diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6365a165dd..0de14e1892 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 with: - submodules: true + fetch-depth: 2 # to be able to check for changes in subfolder res_syntax later - name: Use OCaml ${{matrix.ocaml_compiler}} uses: ocaml/setup-ocaml@v2 @@ -40,10 +40,35 @@ jobs: opam-pin: false opam-depext: false - - name: Use Node.js - uses: actions/setup-node@v3 - with: - node-version: 16 + - name: "Check if syntax subfolder has changed" + id: syntax-diff + shell: bash + run: | + if git diff --name-only --exit-code HEAD^ HEAD -- res_syntax; then + echo "syntax_status=unchanged" >> $GITHUB_ENV + else + echo "syntax_status=changed" >> $GITHUB_ENV + fi + + - name: "Syntax: install OPAM dependencies" + if: env.syntax_status == 'changed' + run: opam install . --deps-only + working-directory: res_syntax + + - name: "Syntax: build CLI" + if: env.syntax_status == 'changed' + run: opam exec -- dune build + working-directory: res_syntax + + - name: "Syntax: Run roundtrip tests" + if: ${{ env.syntax_status == 'changed' && runner.os != 'Windows' }} + run: opam exec -- make roundtrip-test + working-directory: res_syntax + + - name: "Syntax: Run tests (Windows)" + if: ${{ env.syntax_status == 'changed' && runner.os == 'Windows' }} + run: opam exec -- make test + working-directory: res_syntax # Required for ninja build - name: "Windows: Use MSVC" @@ -55,6 +80,11 @@ jobs: - name: Build ninja run: node scripts/buildNinjaBinary.js + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + - name: NPM install run: opam exec -- npm ci env: diff --git a/.gitignore b/.gitignore index 0ac6194ee4..ab537df08a 100644 --- a/.gitignore +++ b/.gitignore @@ -55,7 +55,6 @@ coverage ppx_tools/ vendor/ocaml/stdlib/target_camlheader vendor/ocaml/stdlib/target_camlheaderd -reason/ man/ lib/ocaml *.tgz diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index b370037fae..0000000000 --- a/.gitmodules +++ /dev/null @@ -1,8 +0,0 @@ -[submodule "ocaml"] - path = ocaml - url = https://github.com/rescript-lang/ocaml - ignore = untracked -[submodule "syntax"] - path = syntax - url = https://github.com/rescript-lang/syntax - branch = master diff --git a/.prettierignore b/.prettierignore index 2cba134fbd..4230180348 100644 --- a/.prettierignore +++ b/.prettierignore @@ -2,7 +2,7 @@ jscomp/ lib/ vendor/ ninja/ -syntax/ +res_syntax/ _opam CHANGELOG.md README.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bf6be2b822..fd4d01c4ba 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,7 +4,7 @@ Welcome to the ReScript compiler project! This document will give you guidance on how to get up and running to work on the ReScript compiler and toolchain. -(If you want to contribute to the documentation website, check out [rescript-association/rescript-lang.org](https://github.com/reason-association/rescript-lang.org). For contributions to the ReScript syntax, please visit the [rescript-lang/syntax](https://github.com/rescript-lang/syntax) project.) +(If you want to contribute to the documentation website, check out [rescript-association/rescript-lang.org](https://github.com/reason-association/rescript-lang.org). We tried to keep the installation process as simple as possible. In case you are having issues or get stuck in the process, please let us know in the issue tracker. diff --git a/dune b/dune index e4f11d99bc..23e70d65c8 100644 --- a/dune +++ b/dune @@ -1 +1 @@ -(dirs jscomp) +(dirs jscomp res_syntax) diff --git a/res_syntax/.gitignore b/res_syntax/.gitignore new file mode 100644 index 0000000000..ee92b33d4b --- /dev/null +++ b/res_syntax/.gitignore @@ -0,0 +1,19 @@ +.DS_STORE +*.cm* +*.o +*.s +lib/* +!lib/README.md +.vscode/settings.json + +# Dune +_build + +# Merlin +.merlin + +# Local opam switch +_opam/ + +# Output dir for bootstrap script +bootstrap/ diff --git a/res_syntax/.ocamlformat b/res_syntax/.ocamlformat new file mode 100644 index 0000000000..3bef5a7b46 --- /dev/null +++ b/res_syntax/.ocamlformat @@ -0,0 +1,11 @@ +profile = default +version = 0.22.4 + +field-space = tight-decl +break-cases = toplevel +module-item-spacing = preserve +cases-exp-indent = 2 +space-around-arrays = false +space-around-lists = false +space-around-records = false +space-around-variants = false diff --git a/res_syntax/.ocamlformat-ignore b/res_syntax/.ocamlformat-ignore new file mode 100644 index 0000000000..0c4403d0c7 --- /dev/null +++ b/res_syntax/.ocamlformat-ignore @@ -0,0 +1 @@ +compiler-libs-406/* diff --git a/res_syntax/CHANGELOG.md b/res_syntax/CHANGELOG.md new file mode 100644 index 0000000000..e12a288505 --- /dev/null +++ b/res_syntax/CHANGELOG.md @@ -0,0 +1,98 @@ +## Master + +> **Tags:** +> +> - :boom: [Breaking Change] +> - :eyeglasses: [Spec Compliance] +> - :rocket: [New Feature] +> - :bug: [Bug Fix] +> - :memo: [Documentation] +> - :house: [Internal] +> - :nail_care: [Polish] + +#### :boom: Breaking Change + +- Emit an error when a `@string` or `@int` attribute is used in a V4 component https://github.com/rescript-lang/rescript-compiler/issues/5724 + +#### :rocket: New Feature + +- Add surface syntax for `async`/`await` https://github.com/rescript-lang/syntax/pull/600 + +- Initial support for JSX V4, still work in progress. + + - :boom: when V4 is activated, at most one component is allowed for each module. + +- Add support for empty record literal `{}` for records with only optional fields, and type definition of empty record (e.g. `type empty = {}`) https://github.com/rescript-lang/syntax/pull/632 + +- Support the use of spread anywhere in list creation (e.g. `list{...x, 1, ...y, ...z}). https://github.com/rescript-lang/syntax/pull/692 + +- Add support for the argument of `@react.component` to set a props type from the outside. https://github.com/rescript-lang/syntax/pull/699 + +#### :bug: Bug Fix + +- Fix issue in formatting JSX spread props https://github.com/rescript-lang/syntax/pull/644 +- Fix pretty printer where it would print doc comments on the same line as other attributes https://github.com/rescript-lang/syntax/pull/642 +- Fix location issue in error messages with JSX V4 where the body of the component is an application https://github.com/rescript-lang/syntax/pull/633 +- Fix issue where the printer would omit attributes for `->` and `|>` https://github.com/rescript-lang/syntax/pull/629 +- Fix printing of optional fields in records https://github.com/rescript-lang/rescript-compiler/issues/5654 +- Fix printing of comments inside empty blocks https://github.com/rescript-lang/syntax/pull/647 +- Fix location issue in error messages with JSX V4 where the multiple props types are defined https://github.com/rescript-lang/syntax/pull/655 +- Fix location issue in make function in JSX V4 that breaks dead code elimination https://github.com/rescript-lang/syntax/pull/660 +- Fix parsing (hence pretty printing) of expressions with underscore `_` and comments. +- Fix printing of comments inside JSX tag https://github.com/rescript-lang/syntax/pull/664 +- Fix issue where formatter erases tail comments inside JSX tag https://github.com/rescript-lang/syntax/issues/663 +- Fix issue where the JSX prop has type annotation of the first class module https://github.com/rescript-lang/syntax/pull/666 +- Fix issue where a spread `...x` in non-last position would not be reported as syntax error https://github.com/rescript-lang/syntax/pull/673/ +- Fix issue where the formatter would delete `async` in a function with labelled arguments. +- Fix several printing issues with `async` including an infinite loop https://github.com/rescript-lang/syntax/pull/680 +- Fix issue where certain JSX expressions would be formatted differenctly in compiler 10.1.0-rc.1 https://github.com/rescript-lang/syntax/issues/675 +- Fix issue where printing nested pipe discards await https://github.com/rescript-lang/syntax/issues/687 +- Fix issue where the JSX key type is not an optional string https://github.com/rescript-lang/syntax/pull/693 +- Fix issue where the JSX fragment without children build error https://github.com/rescript-lang/syntax/pull/704 +- Fix issue where async as an id cannot be used with application and labelled arguments https://github.com/rescript-lang/syntax/issues/707 +- Treat await as almost-unary operator weaker than pipe so `await foo->bar` means `await (foo->bar)` https://github.com/rescript-lang/syntax/pull/711 + +#### :eyeglasses: Spec Compliance + +- Functions with consecutive dots now print as multiple arrow functions like in JavaScript. + +#### :nail_care Polish + +- Change the internal representation of props for the lowercase components to record. https://github.com/rescript-lang/syntax/pull/665 +- Change the payload of Pconst_char for type safety. https://github.com/rescript-lang/rescript-compiler/pull/5759 +- Specialize the printing of the rhs of a record field assignment for optional values `{x: ? e}` https://github.com/rescript-lang/syntax/issues/714 + +## ReScript 10.0 + +- Fix printing for inline nullary functor types [#477](https://github.com/rescript-lang/syntax/pull/477) +- Fix stripping of quotes for empty poly variants [#474](https://github.com/rescript-lang/syntax/pull/474) +- Implement syntax for arity zero vs arity one in uncurried application in [#139](https://github.com/rescript-lang/syntax/pull/139) +- Fix parsing of first class module exprs as part of binary/ternary expr in [#256](https://github.com/rescript-lang/syntax/pull/256) +- Fix formatter hanging on deeply nested function calls [#261](https://github.com/rescript-lang/syntax/issues/261) +- Remove parsing of "import" and "export" which was never officially supported. + +## ReScript 9.0.0 + +- Fix parsing of poly-var typexpr consisting of one tag-spec-first in [#254](https://github.com/rescript-lang/syntax/pull/254) +- Implement new syntax for guards on pattern match cases in [#248](https://github.com/rescript-lang/syntax/pull/248) +- Implement intelligent breaking for poly-var type expressions in [#246](https://github.com/rescript-lang/syntax/pull/246) +- Improve indentation of fast pipe chain in let binding in [#244](https://github.com/rescript-lang/syntax/pull/244) +- Improve printing of non-callback arguments in call expressions with callback in [#241](https://github.com/rescript-lang/syntax/pull/241/files) +- Fix printing of constrained expressions in rhs of js objects [#240](https://github.com/rescript-lang/syntax/pull/240) +- Improve printing of trailing comments under lhs of "pipe" expression in [#329](https://github.com/rescript-lang/syntax/pull/239/files) +- Improve printing of jsx children and props with leading line comment in [#236](https://github.com/rescript-lang/syntax/pull/236) +- Improve conversion of quoted strings from Reason in [#238](https://github.com/rescript-lang/syntax/pull/238) +- Print attributes/extension without bs prefix where possible in [#230](https://github.com/rescript-lang/syntax/pull/230) +- Cleanup gentype attribute printing [fe05e1051aa94b16f6993ddc5ba9651f89e86907](https://github.com/rescript-lang/syntax/commit/fe05e1051aa94b16f6993ddc5ba9651f89e86907) +- Implement light weight syntax for poly-variants [f84c5760b3f743f65e934195c87fc06bf88bff75](https://github.com/rescript-lang/syntax/commit/f84c5760b3f743f65e934195c87fc06bf88bff75) +- Fix bug in fast pipe conversion from Reason. [3d5f2daba5418b821c577ba03e2de1afb0dd66de](https://github.com/rescript-lang/syntax/commit/3d5f2daba5418b821c577ba03e2de1afb0dd66de) +- Improve parsed AST when tilde is missing in arrow expr parameters. [e52a0c89ac39b578a2062ef15fae2be625962e1f](https://github.com/rescript-lang/syntax/commit/e52a0c89ac39b578a2062ef15fae2be625962e1f) +- Improve parser diagnostics for missing tilde in labeled parameters. [a0d7689d5d2bfc31dc251e966ac33a3001200171](https://github.com/rescript-lang/syntax/commit/a0d7689d5d2bfc31dc251e966ac33a3001200171) +- Improve printing of uncurried application with a huggable expression in [c8767215186982e171fe9f9101d518150a65f0d7](https://github.com/rescript-lang/syntax/commit/c8767215186982e171fe9f9101d518150a65f0d7) +- Improve printing of uncurried arrow typexpr outcome printer in [4d953b668cf47358deccb8b730566f24de25b9ee](https://github.com/rescript-lang/syntax/commit/4d953b668cf47358deccb8b730566f24de25b9ee) +- Remove support for nativeint syntax in [72d9b7034fc28f317672c94994b322bee520acca](https://github.com/rescript-lang/syntax/commit/72d9b7034fc28f317672c94994b322bee520acca) +- Improve printing of poly variant typexprs with attributes in [bf6561b](https://github.com/rescript-lang/syntax/commit/bf6561bb5d84557b8b6cbbcd40078c39526af4af) + +## ReScript 8.4.2 (December 11, 2020) + +Released in https://github.com/rescript-lang/syntax/releases/tag/v8.4.2 as part of https://github.com/rescript-lang/rescript-compiler/releases/tag/8.4.2 diff --git a/res_syntax/LICENSE b/res_syntax/LICENSE new file mode 100644 index 0000000000..a1b2c17b66 --- /dev/null +++ b/res_syntax/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 ReScript + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/res_syntax/Makefile b/res_syntax/Makefile new file mode 100644 index 0000000000..8579ab747c --- /dev/null +++ b/res_syntax/Makefile @@ -0,0 +1,36 @@ +SHELL = /bin/bash + +build: + dune build + +bootstrap: build + dune exec -- bash ./scripts/bootstrap.sh + +bench: build + dune exec -- bench + +test: build + dune exec -- testrunner + dune exec -- bash ./scripts/test.sh + make reanalyze + bash ./scripts/testok.sh + +roundtrip-test: build + dune exec -- testrunner + ROUNDTRIP_TEST=1 dune exec -- bash ./scripts/test.sh + make reanalyze + bash ./scripts/testok.sh + +reanalyze: build + reanalyze.exe -set-exit-code -all-cmt _build/default -suppress testrunner,compiler-libs-406 -exclude-paths compiler-libs-406 + +format: + dune build @fmt --auto-promote + +checkformat: + dune build @fmt + +clean: + dune clean + +.PHONY: bench clean test roundtrip-test reanalyze bootstrap build-native diff --git a/res_syntax/README.md b/res_syntax/README.md new file mode 100644 index 0000000000..a1c7d02f7c --- /dev/null +++ b/res_syntax/README.md @@ -0,0 +1,125 @@ +# ReScript Syntax ![Tests](https://github.com/rescript-lang/syntax/workflows/CI/badge.svg) + +Documentation: https://rescript-lang.org/docs/manual/latest/overview + +This repo is the source of truth for the ReScript parser & printer. Issues go here. + +**You don't need this repo to use the ReScript syntax**. This comes with ReScript >=8.1. This repo is for syntax developers. + +## Contribute + +### Why + +A detailed discussion by Jonathan Blow and Casey Muratori on why you would hand-roll a parser for a production quality programming language +[Discussion: Making Programming Language Parsers, etc](https://youtu.be/MnctEW1oL-E) + +"One reason why I switched off these parser tools is that the promises didn't really materialize. +The amount of work that I had to do change a yacc script from one language to a variant of that language +was more than if I hand wrote the code myself. +" +J. Blow. + +### Setup & Usage (For Repo Devs Only) + +Required: + +- OCaml 4.10 or later +- Dune +- Reanalyze +- OS: macOS, Linux or Windows + +```sh +git clone https://github.com/rescript-lang/syntax.git +cd syntax +opam install . --deps-only --with-test +make # or "dune build" +``` + +This will produce the three binaries `rescript`, `tests` and `bench` (with `.exe` extension on Windows). + +We only build production binaries, even in dev mode. No need for a separate dev binary when the build is fast enough. Plus, this encourages proper benchmarking of the (production) binary each diff. + +After you make a change: + +```sh +make +``` + +Run the core tests: + +```sh +make test +``` + +Run the extended tests (not fully working on Windows yet): + +```sh +make roundtrip-test +``` + +Those will tell you whether you've got a test output difference. If it's intentional, check them in. + +Debug a file: + +```sh +# write code in test.res +dune exec -- rescript test.res # test printer +dune exec -- rescript -print ast test.res # print ast +dune exec -- rescript -print comments test.res # print comment table +dune exec -- rescript -print ml test.res # show ocaml code +dune exec -- rescript -print res -width 80 test.res # test printer and change default print width +``` + +Benchmark: + +```sh +make bench +``` + +Enable stack trace: + +```sh +# Before you run the binary +export OCAMLRUNPARAM="b" +``` + +This is likely a known knowledge: add the above line into your shell rc file so that every shell startup you have OCaml stack trace enabled. + +### Development Docs + +#### Folder Structure + +- `src` contains all the parser/printer source code. Don't change folder structure without notice; The [rescript-compiler](https://github.com/rescript-lang/rescript-compiler) repo uses this repo as a submodule and assumes `src`. +- `benchmarks`, `cli` and `tests` contain the source code for the executables used for testing/benchmarking. These are not used by the [rescript-compiler](https://github.com/rescript-lang/rescript-compiler) repo. + +#### Error Reporting Logic + +Right now, ReScript's compiler's error reporting mechanism, for architectural reasons, is independent from this syntax repo's error reporting mechanism. However, we do want a unified look when they report the errors in the terminal. This is currently achieved by (carefully...) duplicating the error report logic from the compiler repo to here (or vice-versa; either way, just keep them in sync). The files to sync are the compiler repo's [super_location.ml](https://github.com/rescript-lang/rescript-compiler/blob/fcb21790dfb0592f609818df7790192061360631/jscomp/super_errors/super_location.ml) and [super_code_frame.ml](https://github.com/rescript-lang/rescript-compiler/blob/fcb21790dfb0592f609818df7790192061360631/jscomp/super_errors/super_code_frame.ml), into this repo's [res_diagnostics_printing_utils.ml](https://github.com/rescript-lang/syntax/blob/ec5cefb23b659b0a7be170ae0ad26f3fe8a05456/src/res_diagnostics_printing_utils.ml). A few notes: + +- Some lines are lightly changed to fit this repo's needs; they're documented in the latter file. +- Please keep these files lightweight and as dependency-less as possible, for easier syncing. +- The syntax logic currently doesn't have warnings, only errors, and potentially more than one. +- In the future, ideally, the error reporting logic would also be unified with GenType and Reanalyze's. It'd be painful to copy paste around all this reporting logic. +- The errors are reported by the parser [here](https://github.com/rescript-lang/syntax/blob/ec5cefb23b659b0a7be170ae0ad26f3fe8a05456/src/res_diagnostics.ml#L146). +- Our editor plugin parses the error report from the compiler and from the syntax [here](https://github.com/rescript-lang/rescript-vscode/blob/0dbf2eb9cdb0bd6d95be1aee88b73830feecb5cc/server/src/utils.ts#L129-L329). + +### Example API usage + +```ocaml +let filename = "foo.res" +let src = FS.readFile filename + +let p = + (* intended for ocaml compiler *) + let mode = Res_parser.ParseForTypeChecker in + (* if you want to target the printer use: let mode = Res_parser.Default in*) + Res_parser.make ~mode src filename + +let structure = Res_core.parseImplementation p +let signature = Res_core.parseSpecification p + +let () = match p.diagnostics with +| [] -> () (* no problems *) +| diagnostics -> (* parser contains problems *) + Res_diagnostics.printReport diagnostics src +``` diff --git a/res_syntax/benchmarks/Benchmark.ml b/res_syntax/benchmarks/Benchmark.ml new file mode 100644 index 0000000000..f4ea6b5fe0 --- /dev/null +++ b/res_syntax/benchmarks/Benchmark.ml @@ -0,0 +1,252 @@ +module ResParser = Res_core +module Doc = Res_doc +module CommentTable = Res_comments_table +module Parser = Res_parser +module Printer = Res_printer + +module IO : sig + val readFile : string -> string +end = struct + (* random chunk size: 2^15, TODO: why do we guess randomly? *) + let chunkSize = 32768 + + let readFile filename = + let chan = open_in filename in + let buffer = Buffer.create chunkSize in + let chunk = (Bytes.create [@doesNotRaise]) chunkSize in + let rec loop () = + let len = + try input chan chunk 0 chunkSize with Invalid_argument _ -> 0 + in + if len == 0 then ( + close_in_noerr chan; + Buffer.contents buffer) + else ( + Buffer.add_subbytes buffer chunk 0 len; + loop ()) + in + loop () +end + +module Time : sig + type t + + val now : unit -> t + + val toUint64 : t -> int64 [@@live] + + (* let of_uint64_ns ns = ns *) + + val nanosecond : t [@@live] + val microsecond : t [@@live] + val millisecond : t [@@live] + val second : t [@@live] + val minute : t [@@live] + val hour : t [@@live] + + val zero : t + + val diff : t -> t -> t + val add : t -> t -> t + val print : t -> float +end = struct + (* nanoseconds *) + type t = int64 + + let zero = 0L + + let toUint64 s = s + + let nanosecond = 1L + let microsecond = Int64.mul 1000L nanosecond + let millisecond = Int64.mul 1000L microsecond + let second = Int64.mul 1000L millisecond + let minute = Int64.mul 60L second + let hour = Int64.mul 60L minute + + (* TODO: we could do this inside caml_absolute_time *) + external init : unit -> unit = "caml_mach_initialize" + let () = init () + external now : unit -> t = "caml_mach_absolute_time" + + let diff t1 t2 = Int64.sub t2 t1 + let add t1 t2 = Int64.add t1 t2 + let print t = Int64.to_float t *. 1e-6 +end + +module Benchmark : sig + type t + + val make : name:string -> f:(t -> unit) -> unit -> t + val launch : t -> unit + val report : t -> unit +end = struct + type t = { + name: string; + mutable start: Time.t; + mutable n: int; (* current iterations count *) + mutable duration: Time.t; + benchFunc: t -> unit; + mutable timerOn: bool; + (* mutable result: benchmarkResult; *) + (* The initial states *) + mutable startAllocs: float; + mutable startBytes: float; + (* The net total of this test after being run. *) + mutable netAllocs: float; + mutable netBytes: float; + } + + let report b = + print_endline (Format.sprintf "Benchmark: %s" b.name); + print_endline (Format.sprintf "Nbr of iterations: %d" b.n); + print_endline + (Format.sprintf "Benchmark ran during: %fms" (Time.print b.duration)); + print_endline + (Format.sprintf "Avg time/op: %fms" + (Time.print b.duration /. float_of_int b.n)); + print_endline + (Format.sprintf "Allocs/op: %d" + (int_of_float (b.netAllocs /. float_of_int b.n))); + print_endline + (Format.sprintf "B/op: %d" + (int_of_float (b.netBytes /. float_of_int b.n))); + + (* return (float64(r.Bytes) * float64(r.N) / 1e6) / r.T.Seconds() *) + print_newline (); + () + + let make ~name ~f () = + { + name; + start = Time.zero; + n = 0; + benchFunc = f; + duration = Time.zero; + timerOn = false; + startAllocs = 0.; + startBytes = 0.; + netAllocs = 0.; + netBytes = 0.; + } + + (* total amount of memory allocated by the program since it started in words *) + let mallocs () = + let stats = Gc.quick_stat () in + stats.minor_words +. stats.major_words -. stats.promoted_words + + let startTimer b = + if not b.timerOn then ( + let allocatedWords = mallocs () in + b.startAllocs <- allocatedWords; + b.startBytes <- allocatedWords *. 8.; + b.start <- Time.now (); + b.timerOn <- true) + + let stopTimer b = + if b.timerOn then ( + let allocatedWords = mallocs () in + let diff = Time.diff b.start (Time.now ()) in + b.duration <- Time.add b.duration diff; + b.netAllocs <- b.netAllocs +. (allocatedWords -. b.startAllocs); + b.netBytes <- b.netBytes +. ((allocatedWords *. 8.) -. b.startBytes); + b.timerOn <- false) + + let resetTimer b = + if b.timerOn then ( + let allocatedWords = mallocs () in + b.startAllocs <- allocatedWords; + b.netAllocs <- allocatedWords *. 8.; + b.start <- Time.now ()); + b.netAllocs <- 0.; + b.netBytes <- 0. + + let runIteration b n = + Gc.full_major (); + b.n <- n; + resetTimer b; + startTimer b; + b.benchFunc b; + stopTimer b + + let launch b = + (* 150 runs * all the benchmarks means around 1m of benchmark time *) + for n = 1 to 150 do + runIteration b n + done +end + +module Benchmarks : sig + val run : unit -> unit +end = struct + type action = Parse | Print + let string_of_action action = + match action with + | Parse -> "parser" + | Print -> "printer" + + (* TODO: we could at Reason here *) + type lang = Ocaml | Rescript + let string_of_lang lang = + match lang with + | Ocaml -> "ocaml" + | Rescript -> "rescript" + + let parseOcaml src filename = + let lexbuf = Lexing.from_string src in + Location.init lexbuf filename; + Parse.implementation lexbuf + + let parseRescript src filename = + let p = Parser.make src filename in + let structure = ResParser.parseImplementation p in + assert (p.diagnostics == []); + structure + + let benchmark filename lang action = + let src = IO.readFile filename in + let name = + filename ^ " " ^ string_of_lang lang ^ " " ^ string_of_action action + in + let benchmarkFn = + match (lang, action) with + | Rescript, Parse -> + fun _ -> + let _ = Sys.opaque_identity (parseRescript src filename) in + () + | Ocaml, Parse -> + fun _ -> + let _ = Sys.opaque_identity (parseOcaml src filename) in + () + | Rescript, Print -> + let p = Parser.make src filename in + let ast = ResParser.parseImplementation p in + fun _ -> + let _ = + Sys.opaque_identity + (let cmtTbl = CommentTable.make () in + let comments = List.rev p.Parser.comments in + let () = CommentTable.walkStructure ast cmtTbl comments in + Doc.toString ~width:80 (Printer.printStructure ast cmtTbl)) + in + () + | _ -> fun _ -> () + in + let b = Benchmark.make ~name ~f:benchmarkFn () in + Benchmark.launch b; + Benchmark.report b + + let run () = + benchmark "./benchmarks/data/RedBlackTree.res" Rescript Parse; + benchmark "./benchmarks/data/RedBlackTree.ml" Ocaml Parse; + benchmark "./benchmarks/data/RedBlackTree.res" Rescript Print; + benchmark "./benchmarks/data/RedBlackTreeNoComments.res" Rescript Print; + benchmark "./benchmarks/data/Napkinscript.res" Rescript Parse; + benchmark "./benchmarks/data/Napkinscript.ml" Ocaml Parse; + benchmark "./benchmarks/data/Napkinscript.res" Rescript Print; + benchmark "./benchmarks/data/HeroGraphic.res" Rescript Parse; + benchmark "./benchmarks/data/HeroGraphic.ml" Ocaml Parse; + benchmark "./benchmarks/data/HeroGraphic.res" Rescript Print +end + +let () = Benchmarks.run () diff --git a/res_syntax/benchmarks/data/HeroGraphic.ml b/res_syntax/benchmarks/data/HeroGraphic.ml new file mode 100644 index 0000000000..08feb30e3d --- /dev/null +++ b/res_syntax/benchmarks/data/HeroGraphic.ml @@ -0,0 +1,8654 @@ +;;[%bs.raw {|require('./HeroGraphic.css')|}] +let make ?(width= "760") ?(height= "380") = + ((svg ~width:((width)) ~height:((height)) + ~viewBox:(("0 0 758 381")) ~fill:(("none") + ) ~xmlns:(("http://www.w3.org/2000/svg") + ) + ~children:[((path + ~d:(("M78.8374 255.364H664.923C664.923 255.364 677.451 256.743 677.451 270.971C677.451 285.2 667.673 288.178 667.673 288.178H579.485C579.485 288.178 592.014 290.163 592.014 304.888C592.014 319.612 582.785 321.101 582.785 321.101H524.544C524.544 321.101 507.676 322.59 508.776 333.896C509.876 345.201 520.204 346.194 520.204 346.194H626.849C626.849 346.194 644.266 347.683 643.166 363.897C642.066 380.11 632.288 380.11 632.288 380.11H186.032C186.032 380.11 166.964 379.118 167.514 364.393C168.064 349.668 186.582 350.661 186.582 350.661H121.801C121.801 350.661 104.628 351.598 104.628 338.252C104.628 320.715 121.862 322.149 121.862 322.149H142.457C142.457 322.149 159.264 323.362 159.264 306.101C159.264 293.748 144.657 292.7 144.657 292.7H77.6151C77.6151 292.7 56.4084 290.439 56.4695 275.769C56.5918 260.879 66.3089 255.364 78.8374 255.364Z") + ) ~fill:(("#0B1627")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.57")) + ~d:(("M393.453 244.555C393.453 244.555 398.709 220.841 391.437 205.344C384.103 189.848 369.68 187.145 369.68 187.145C366.991 161.722 351.59 142.916 317.794 145.288C283.998 147.714 270.614 166.134 270.614 166.134C270.614 166.134 274.342 158.358 267.925 150.251C261.508 142.144 247.757 143.799 247.757 143.799C247.757 143.799 253.502 135.14 241.646 128.578C229.79 122.015 220.378 126.868 220.378 126.868C220.378 126.868 226.55 103.595 203.572 99.1834C180.593 94.7163 163.542 110.379 163.542 110.379C163.542 110.379 153.641 104.753 138.852 109.441C124.001 114.184 122.717 134.203 122.717 134.203C122.717 134.203 90.5713 118.541 55.675 137.457C31.5349 150.527 27.2569 178.377 26.9514 194.425C32.9406 207.495 40.152 215.547 40.152 215.547C31.5349 242.349 51.886 248.14 51.886 248.14H90.0824H358.496L393.453 244.555Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-cloudLeft") + ) ~opacity:(("0.7")) + ~d:(("M0.4885 96.7017H97.599C97.599 96.7017 99.0047 89.1463 88.6152 87.8779C88.6152 87.8779 87.393 74.4216 72.6644 69.6237C57.9359 64.8258 51.2133 72.2157 51.2133 72.2157C51.2133 72.2157 46.6909 67.3075 40.8239 68.0244C34.8958 68.7413 32.4513 75.5246 32.4513 75.5246C32.4513 75.5246 28.9677 73.3187 25.3009 75.1386C21.634 76.9585 21.8174 79.385 21.8174 79.385C21.8174 79.385 17.6005 78.4475 14.6059 78.282C11.6113 78.0615 -2.75056 78.999 0.4885 96.7017Z") + ) ~fill:(("white")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M326.044 228.066H250.141V241.246H326.044V228.066Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.6")) + ~d:(("M331.606 228.066H291.759V240.198H331.606V228.066Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M432.811 224.15H375.303V240.915H432.811V224.15Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M382.514 226.466H378.419V227.128H382.514V226.466Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M389.053 226.466H384.959V227.128H389.053V226.466Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M395.531 226.466H391.437V227.128H395.531V226.466Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M402.009 226.466H397.915V227.128H402.009V226.466Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M408.488 226.466H404.393V227.128H408.488V226.466Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M414.966 226.466H410.871V227.128H414.966V226.466Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M421.444 226.466H417.349V227.128H421.444V226.466Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M427.983 226.466H423.888V227.128H427.983V226.466Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M382.514 228.727H378.419V229.389H382.514V228.727Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M389.053 228.727H384.959V229.389H389.053V228.727Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M395.531 228.727H391.437V229.389H395.531V228.727Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M402.009 228.727H397.915V229.389H402.009V228.727Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M408.488 228.727H404.393V229.389H408.488V228.727Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M414.966 228.727H410.871V229.389H414.966V228.727Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M421.444 228.727H417.349V229.389H421.444V228.727Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M427.983 228.727H423.888V229.389H427.983V228.727Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M382.514 230.988H378.419V231.65H382.514V230.988Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M389.053 230.988H384.959V231.65H389.053V230.988Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M395.531 230.988H391.437V231.65H395.531V230.988Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M402.009 230.988H397.915V231.65H402.009V230.988Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M408.488 230.988H404.393V231.65H408.488V230.988Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M414.966 230.988H410.871V231.65H414.966V230.988Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M421.444 230.988H417.349V231.65H421.444V230.988Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M427.983 230.988H423.888V231.65H427.983V230.988Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M382.514 233.25H378.419V233.911H382.514V233.25Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M389.053 233.25H384.959V233.911H389.053V233.25Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M395.531 233.25H391.437V233.911H395.531V233.25Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M402.009 233.25H397.915V233.911H402.009V233.25Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M408.488 233.25H404.393V233.911H408.488V233.25Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M414.966 233.25H410.871V233.911H414.966V233.25Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M421.444 233.25H417.349V233.911H421.444V233.25Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M427.983 233.25H423.888V233.911H427.983V233.25Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M382.514 235.511H378.419V236.172H382.514V235.511Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M389.053 235.511H384.959V236.172H389.053V235.511Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M395.531 235.511H391.437V236.172H395.531V235.511Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M402.009 235.511H397.915V236.172H402.009V235.511Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M408.488 235.511H404.393V236.172H408.488V235.511Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M414.966 235.511H410.871V236.172H414.966V235.511Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M421.444 235.511H417.349V236.172H421.444V235.511Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M427.983 235.511H423.888V236.172H427.983V235.511Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M378.969 224.15H375.303V240.915H378.969V224.15Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M178.087 221.999H171.181V225.032H178.087V221.999Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M186.826 221.999H179.92V225.032H186.826V221.999Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M195.566 221.999H188.66V225.032H195.566V221.999Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M178.087 226.411H171.181V229.444H178.087V226.411Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M186.826 226.411H179.92V229.444H186.826V226.411Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M195.566 226.411H188.66V229.444H195.566V226.411Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M160.914 230.933H112.206V244.555H160.914V230.933Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 220.234H115.689V221.779H117.4V220.234Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 220.234H120.701V221.779H122.412V220.234Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 220.234H125.651V221.779H127.362V220.234Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 220.234H130.601V221.779H132.312V220.234Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 220.234H135.551V221.779H137.263V220.234Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 220.234H140.563V221.779H142.274V220.234Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 220.234H145.513V221.779H147.224V220.234Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 220.234H150.463V221.779H152.174V220.234Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 220.234H155.413V221.779H157.125V220.234Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 223.819H115.689V225.363H117.4V223.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 223.819H120.701V225.363H122.412V223.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 223.819H125.651V225.363H127.362V223.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 223.819H130.601V225.363H132.312V223.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 223.819H135.551V225.363H137.263V223.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 223.819H140.563V225.363H142.274V223.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 223.819H145.513V225.363H147.224V223.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 223.819H150.463V225.363H152.174V223.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 223.819H155.413V225.363H157.125V223.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 227.459H115.689V229.003H117.4V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 227.459H120.701V229.003H122.412V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 227.459H125.651V229.003H127.362V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 227.459H130.601V229.003H132.312V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 227.459H135.551V229.003H137.263V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 227.459H140.563V229.003H142.274V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 227.459H145.513V229.003H147.224V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 227.459H150.463V229.003H152.174V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 227.459H155.413V229.003H157.125V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-wave")) + ~d:(("M81.5264 248.14H667.612C667.612 248.14 680.14 249.518 680.14 263.747C680.14 277.975 670.362 280.953 670.362 280.953H582.174C582.174 280.953 594.703 282.939 594.703 297.663C594.703 312.388 585.474 313.877 585.474 313.877H527.233C527.233 313.877 510.365 315.366 511.465 326.671C512.565 337.977 522.893 338.969 522.893 338.969H629.538C629.538 338.969 646.955 340.458 645.855 356.672C644.755 372.886 634.977 372.886 634.977 372.886H188.721C188.721 372.886 169.653 371.893 170.203 357.168C170.753 342.444 189.271 343.436 189.271 343.436H124.49C124.49 343.436 107.317 344.374 107.317 331.028C107.317 313.491 124.551 314.925 124.551 314.925H145.146C145.146 314.925 161.953 316.138 161.953 298.876C161.953 286.523 147.346 285.475 147.346 285.475H80.3041C80.3041 285.475 59.0975 283.214 59.1586 268.545C59.2808 253.655 68.998 248.14 81.5264 248.14Z") + ) ~fill:(("url(#paint0_linear)") + ) ~fillOpacity:(("0.7") + ) ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.66")) + ~d:(("M112.206 248.14V285.365H147.408C147.408 285.365 151.869 285.696 155.841 288.067H160.914V274.39L169.592 269.923V268.655H181.998H199.477V264.464H201.066V273.949L205.527 274.28V284.979V285.641H203.571V287.35H228.751H252.096V285.641H250.141V284.979V277.424L265.419 278.527H274.831V264.298H279.048V273.674H291.393V286.799H291.82V298.38V299.704H297.26V303.013H300.804V312.002H301.904V303.013H303.31V312.002H304.41V303.013H318.222V312.002H319.322V303.013H320.727V312.002H321.828V303.013H326.228V299.704H331.667V298.38V286.799H341.812V273.839H354.707V273.949H354.89V287.24H390.948V282.497H396.265V290.935H401.948V306.763L405.798 306.983L406.349 307.038L412.582 307.424H412.704L414.966 307.59L415.821 307.645L417.349 307.755L418.205 307.81L419.733 307.921L420.588 307.976L425.233 308.252V304.998H432.933V278.747H439.472L445.034 282.938V293.692H475.285V276.155H508.47V287.295H544.528V282.552H550.639V294.575V295.126H559.806V295.568V297.222H564.757V304.722H566.529V297.167H569.707V295.512V295.071H571.418V308.362H592.319C593.725 305.935 594.703 302.516 594.703 297.608C594.703 282.883 582.174 280.898 582.174 280.898H635.894V248.085H112.206V248.14Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.5")) + ~d:(("M125.712 326.34V326.782H126.14V326.34H129.073L130.54 339.19V339.025L131.885 326.34H134.879L136.346 339.19L137.69 326.34H140.379V326.782H140.929V326.34H141.235V326.23H140.929V321.984H144.169V314.87H124.673C124.673 314.87 123.817 314.814 122.412 314.87V321.929H125.773V326.175V326.34H125.712ZM138.118 321.929H140.379V326.175H137.69L138.118 321.929ZM132.312 321.929H134.329L134.818 326.175H131.823L132.312 321.929ZM126.079 321.929H128.523L129.012 326.175H126.14V321.929H126.079Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M552.717 270.806C552.717 272.57 551.128 274.06 549.111 274.06H480.358C478.402 274.06 476.752 272.626 476.752 270.806C476.752 269.041 478.341 267.552 480.358 267.552H549.173C551.128 267.552 552.717 269.041 552.717 270.806Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M165.008 261.1C165.008 262.313 163.97 263.25 162.625 263.25H153.458C152.113 263.25 151.074 262.313 151.074 261.1C151.074 259.886 152.113 258.949 153.458 258.949H162.625C163.97 258.949 165.008 259.941 165.008 261.1Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M226.673 263.25C226.673 264.464 225.634 265.401 224.289 265.401H192.449C191.104 265.401 190.065 264.464 190.065 263.25C190.065 262.037 191.104 261.1 192.449 261.1H224.289C225.573 261.1 226.673 262.092 226.673 263.25Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M376.036 297.553C376.036 298.27 375.425 298.821 374.63 298.821H361.613C360.819 298.821 360.207 298.27 360.207 297.553C360.207 296.836 360.819 296.284 361.613 296.284H374.63C375.425 296.284 376.036 296.836 376.036 297.553Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-wave")) + ~d:(("M391.681 296.836C391.681 295.733 392.659 294.85 393.881 294.85H479.747C480.969 294.85 481.947 295.733 481.947 296.836C481.947 297.939 480.969 298.821 479.747 298.821H393.881C392.659 298.766 391.681 297.884 391.681 296.836Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M208.4 296.174C208.4 296.891 207.727 297.498 206.933 297.498H194.343C193.549 297.498 192.877 296.891 192.877 296.174C192.877 295.457 193.549 294.85 194.343 294.85H206.872C207.727 294.85 208.4 295.457 208.4 296.174Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-wave")) + ~d:(("M309.788 324.631C309.788 326.009 308.566 327.168 306.977 327.168H236.634C235.106 327.168 233.823 326.065 233.823 324.631C233.823 323.252 235.045 322.094 236.634 322.094H307.038C308.505 322.094 309.788 323.252 309.788 324.631Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M372.186 328.932C372.186 329.925 371.269 330.752 370.169 330.752H355.074C353.974 330.752 353.057 329.925 353.057 328.932C353.057 327.94 353.974 327.112 355.074 327.112H370.169C371.33 327.112 372.186 327.94 372.186 328.932Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M537.255 360.863C537.255 360.477 537.622 360.146 538.05 360.146H551.984C552.412 360.146 552.778 360.477 552.778 360.863C552.778 361.249 552.412 361.58 551.984 361.58H538.05C537.561 361.58 537.255 361.249 537.255 360.863Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M224.656 359.816C224.656 358.823 223.739 357.996 222.639 357.996H212.678C211.578 357.996 210.661 358.823 210.661 359.816C210.661 360.808 211.578 361.635 212.678 361.635H222.639C223.8 361.58 224.656 360.753 224.656 359.816Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M151.136 332.517C151.136 331.524 150.219 330.697 149.119 330.697H141.968C140.868 330.697 139.952 331.524 139.952 332.517C139.952 333.51 140.868 334.337 141.968 334.337H149.119C150.219 334.337 151.136 333.51 151.136 332.517Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.6")) + ~d:(("M250.202 228.066H205.649V241.246H250.202V228.066Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M630.149 117.052H611.326V120.802H630.149V117.052Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M630.149 119.588H611.326V120.857H630.149V119.588Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.6")) + ~d:(("M199.538 230.933H160.914V240.529H199.538V230.933Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M339.795 222.606H337.473V224.702H339.795V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M345.234 222.606H342.912V224.702H345.234V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M350.673 222.606H348.351V224.702H350.673V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M356.052 222.606H353.79V224.702H356.052V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M361.552 222.606H359.229V224.702H361.552V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M366.991 222.606H364.669V224.702H366.991V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M372.43 222.606H370.108V224.702H372.43V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 221.007H478.769V222.661H480.175V221.007Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 221.007H482.13V222.661H483.536V221.007Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 221.007H485.43V222.661H486.836V221.007Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 221.007H488.792V222.661H490.197V221.007Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.497 221.007H492.092V222.661H493.497V221.007Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 221.007H495.453V222.661H496.859V221.007Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 221.007H498.814V222.661H500.22V221.007Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 221.007H501.931V222.661H503.337V221.007Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 221.007H506.087V222.661H507.493V221.007Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 224.26H478.769V225.915H480.175V224.26Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M483.536 224.26H482.13V225.915H483.536V224.26Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M486.836 224.26H485.43V225.915H486.836V224.26Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 224.26H488.792V225.915H490.197V224.26Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.497 224.26H492.092V225.915H493.497V224.26Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 224.26H495.453V225.915H496.859V224.26Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M500.22 224.26H498.814V225.915H500.22V224.26Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 224.26H501.931V225.915H503.337V224.26Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 224.26H506.087V225.915H507.493V224.26Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 227.569H478.769V229.224H480.175V227.569Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 227.569H482.13V229.224H483.536V227.569Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 227.569H485.43V229.224H486.836V227.569Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 227.569H488.792V229.224H490.197V227.569Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.497 227.569H492.092V229.224H493.497V227.569Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 227.569H495.453V229.224H496.859V227.569Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 227.569H498.814V229.224H500.22V227.569Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 227.569H501.931V229.224H503.337V227.569Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 227.569H506.087V229.224H507.493V227.569Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 230.878H478.769V232.533H480.175V230.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 230.878H482.13V232.533H483.536V230.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 230.878H485.43V232.533H486.836V230.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 230.878H488.792V232.533H490.197V230.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.497 230.878H492.092V232.533H493.497V230.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 230.878H495.453V232.533H496.859V230.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 230.878H498.814V232.533H500.22V230.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 230.878H501.931V232.533H503.337V230.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 230.878H506.087V232.533H507.493V230.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 234.132H478.769V235.786H480.175V234.132Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 234.132H482.13V235.786H483.536V234.132Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 234.132H485.43V235.786H486.836V234.132Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 234.132H488.792V235.786H490.197V234.132Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.497 234.132H492.092V235.786H493.497V234.132Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 234.132H495.453V235.786H496.859V234.132Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 234.132H498.814V235.786H500.22V234.132Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 234.132H501.931V235.786H503.337V234.132Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 234.132H506.087V235.786H507.493V234.132Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M339.795 228.01H337.473V230.106H339.795V228.01Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M345.234 228.01H342.912V230.106H345.234V228.01Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M350.673 228.01H348.351V230.106H350.673V228.01Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M356.052 228.01H353.79V230.106H356.052V228.01Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M361.552 228.01H359.229V230.106H361.552V228.01Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M366.991 228.01H364.669V230.106H366.991V228.01Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M372.43 228.01H370.108V230.106H372.43V228.01Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M267.925 236.228H266.519V237.496H267.925V236.228Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M271.286 236.228H269.88V237.496H271.286V236.228Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M274.586 236.228H273.181V237.496H274.586V236.228Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M277.886 236.228H276.481V237.496H277.886V236.228Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M281.187 236.228H279.781V237.496H281.187V236.228Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M284.487 236.228H283.081V237.496H284.487V236.228Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M287.787 236.228H286.381V237.496H287.787V236.228Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.6")) + ~d:(("M375.303 228.066H334.173V240.253H375.303V228.066Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.3")) + ~d:(("M432.811 228.066H375.303V240.253H432.811V228.066Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M444.362 220.4H434.767V223.819H444.362V220.4Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M454.69 220.4H445.095V223.819H454.69V220.4Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M464.957 220.4H455.362V223.819H464.957V220.4Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M475.224 220.4H465.629V223.819H475.224V220.4Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M444.362 224.536H434.767V227.955H444.362V224.536Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M454.69 224.536H445.095V227.955H454.69V224.536Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.957 224.536H455.362V227.955H464.957V224.536Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M475.224 224.536H465.629V227.955H475.224V224.536Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M444.362 228.672H434.767V232.091H444.362V228.672Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M454.69 228.672H445.095V232.091H454.69V228.672Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.957 228.672H455.362V232.091H464.957V228.672Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M475.224 228.672H465.629V232.091H475.224V228.672Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M444.362 231.54H434.767V234.959H444.362V231.54Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M454.69 231.54H445.095V234.959H454.69V231.54Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.957 231.54H455.362V234.959H464.957V231.54Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M475.224 231.54H465.629V234.959H475.224V231.54Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.3")) + ~d:(("M476.141 228.01H432.628V241.081H476.141V228.01Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.6")) + ~d:(("M509.998 230.713H476.08V240.474H509.998V230.713Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.4")) + ~d:(("M548.867 230.713H514.949V240.474H548.867V230.713Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.7")) + ~d:(("M603.809 230.713H578.569V240.474H603.809V230.713Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M128.401 75.3592L130.479 36.0934L132.374 75.3592H128.401Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.477 36.4794V36.0934L128.461 75.3592H128.95C129.744 75.1386 130.539 74.8628 130.539 74.8628V36.4794H130.477Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.207 75.3592L136.285 36.0934L138.179 75.3592H134.207Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.283 36.4794V36.0934L134.266 75.3592H134.755C135.55 75.1386 136.344 74.8628 136.344 74.8628V36.4794H136.283Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.14 62.8405H125.712V73.7599H126.14V62.8405Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M140.868 62.8405H140.318V73.7599H140.868V62.8405Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M141.174 63.778H125.712V64.0538H141.174V63.778Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M144.107 73.3187H122.351V105.801H144.107V73.3187Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M125.284 75.3592H124.001V76.5173H125.284V75.3592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M128.157 75.3592H126.873V76.5173H128.157V75.3592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.968 75.3592H129.684V76.5173H130.968V75.3592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.84 75.3592H132.557V76.5173H133.84V75.3592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.713 75.3592H135.429V76.5173H136.713V75.3592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.585 75.3592H138.302V76.5173H139.585V75.3592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.457 75.3592H141.174V76.5173H142.457V75.3592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M125.284 77.2343H124.001V78.3924H125.284V77.2343Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M128.157 77.2343H126.873V78.3924H128.157V77.2343Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.968 77.2343H129.684V78.3924H130.968V77.2343Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.84 77.2343H132.557V78.3924H133.84V77.2343Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.713 77.2343H135.429V78.3924H136.713V77.2343Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.585 77.2343H138.302V78.3924H139.585V77.2343Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.457 77.2343H141.174V78.3924H142.457V77.2343Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 79.2747H124.001V79.8262H124.612V79.2747Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 79.2747H126.506V79.8262H127.118V79.2747Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 79.2747H129.073V79.8262H129.684V79.2747Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.251 79.2747H131.64V79.8262H132.251V79.2747Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 79.2747H134.146V79.8262H134.757V79.2747Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 79.2747H136.712V79.8262H137.324V79.2747Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 79.2747H139.279V79.8262H139.891V79.2747Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 79.2747H141.785V79.8262H142.396V79.2747Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 81.7564H124.001V82.3079H124.612V81.7564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 81.7564H126.506V82.3079H127.118V81.7564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 81.7564H129.073V82.3079H129.684V81.7564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.251 81.7564H131.64V82.3079H132.251V81.7564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 81.7564H134.146V82.3079H134.757V81.7564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 81.7564H136.712V82.3079H137.324V81.7564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 81.7564H139.279V82.3079H139.891V81.7564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 81.7564H141.785V82.3079H142.396V81.7564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 84.2381H124.001V84.7896H124.612V84.2381Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 84.2381H126.506V84.7896H127.118V84.2381Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 84.2381H129.073V84.7896H129.684V84.2381Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.251 84.2381H131.64V84.7896H132.251V84.2381Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 84.2381H134.146V84.7896H134.757V84.2381Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 84.2381H136.712V84.7896H137.324V84.2381Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 84.2381H139.279V84.7896H139.891V84.2381Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 84.2381H141.785V84.7896H142.396V84.2381Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 86.6647H124.001V87.2162H124.612V86.6647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 86.6647H126.506V87.2162H127.118V86.6647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 86.6647H129.073V87.2162H129.684V86.6647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.251 86.6647H131.64V87.2162H132.251V86.6647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 86.6647H134.146V87.2162H134.757V86.6647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 86.6647H136.712V87.2162H137.324V86.6647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 86.6647H139.279V87.2162H139.891V86.6647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 86.6647H141.785V87.2162H142.396V86.6647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 89.1464H124.001V89.6978H124.612V89.1464Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 89.1464H126.506V89.6978H127.118V89.1464Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 89.1464H129.073V89.6978H129.684V89.1464Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.251 89.1464H131.64V89.6978H132.251V89.1464Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 89.1464H134.146V89.6978H134.757V89.1464Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 89.1464H136.712V89.6978H137.324V89.1464Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 89.1464H139.279V89.6978H139.891V89.1464Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 89.1464H141.785V89.6978H142.396V89.1464Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 91.628H124.001V92.1795H124.612V91.628Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 91.628H126.506V92.1795H127.118V91.628Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 91.628H129.073V92.1795H129.684V91.628Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M132.251 91.628H131.64V92.1795H132.251V91.628Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 91.628H134.146V92.1795H134.757V91.628Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 91.628H136.712V92.1795H137.324V91.628Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 91.628H139.279V92.1795H139.891V91.628Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 91.628H141.785V92.1795H142.396V91.628Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 94.1097H124.001V94.6612H124.612V94.1097Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 94.1097H126.506V94.6612H127.118V94.1097Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 94.1097H129.073V94.6612H129.684V94.1097Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.251 94.1097H131.64V94.6612H132.251V94.1097Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 94.1097H134.146V94.6612H134.757V94.1097Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 94.1097H136.712V94.6612H137.324V94.1097Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 94.1097H139.279V94.6612H139.891V94.1097Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 94.1097H141.785V94.6612H142.396V94.1097Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 96.5914H124.001V97.1429H124.612V96.5914Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 96.5914H126.506V97.1429H127.118V96.5914Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 96.5914H129.073V97.1429H129.684V96.5914Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.251 96.5914H131.64V97.1429H132.251V96.5914Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 96.5914H134.146V97.1429H134.757V96.5914Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 96.5914H136.712V97.1429H137.324V96.5914Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 96.5914H139.279V97.1429H139.891V96.5914Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 96.5914H141.785V97.1429H142.396V96.5914Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 99.0731H124.001V99.6246H124.612V99.0731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 99.0731H126.506V99.6246H127.118V99.0731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 99.0731H129.073V99.6246H129.684V99.0731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.251 99.0731H131.64V99.6246H132.251V99.0731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 99.0731H134.146V99.6246H134.757V99.0731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 99.0731H136.712V99.6246H137.324V99.0731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 99.0731H139.279V99.6246H139.891V99.0731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 99.0731H141.785V99.6246H142.396V99.0731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 101.279H124.001V101.831H124.612V101.279Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.118 101.279H126.506V101.831H127.118V101.279Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M129.684 101.279H129.073V101.831H129.684V101.279Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.251 101.279H131.64V101.831H132.251V101.279Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.757 101.279H134.146V101.831H134.757V101.279Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.324 101.279H136.712V101.831H137.324V101.279Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.891 101.279H139.279V101.831H139.891V101.279Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.396 101.279H141.785V101.831H142.396V101.279Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M144.107 99.1833H122.351V105.746H144.107V99.1833Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M301.904 94.7715H300.804V115.618H301.904V94.7715Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M304.349 94.7715H303.249V115.618H304.349V94.7715Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M319.261 94.7715H318.161V115.618H319.261V94.7715Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M321.766 94.7715H320.666V115.618H321.766V94.7715Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M326.167 114.184H297.26V121.353H326.167V114.184Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M323.722 119.588H299.521V120.305H323.722V119.588Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.707 197.348H201.066V176.833L265.419 167.016H274.831V197.789H278.987V177.494H291.332V149.148H341.751V177.053H354.829V148.1H390.887V158.358H400.787L408.671 169.718V178.818H417.777V166.575H458.54V240.143H133.535L139.707 197.348Z") + ) ~fill:(("#21477C")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M293.287 197.348H354.646V176.833L418.999 167.016H428.411V197.789H432.567V177.494L444.912 157.586V134.203H475.163V172.035H508.409V148.1H544.467V158.358H554.306L562.251 169.718V178.818H571.357V102.492H612.976V240.143H287.115L293.287 197.348Z") + ) ~fill:(("#1B3B68")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M425.172 241.522H401.887V106.077L425.172 102.878V241.522Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M401.887 140.269H396.204V241.522H401.887V140.269Z") + ) ~fill:(("#1073AA")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M432.872 109.937H425.172V241.522H432.872V109.937Z") + ) ~fill:(("#1073AA")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.226 105.47L405.371 105.581V121.298H406.226V105.47Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M410.993 104.809L410.138 104.919V121.298H410.993V104.809Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M408.61 105.139L407.754 105.25V121.298H408.61V105.139Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.377 104.478L412.46 104.588V121.298H413.377V104.478Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M415.76 104.147L414.844 104.257V121.298H415.76V104.147Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M419.61 103.595V121.298H420.527V103.485L419.61 103.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M418.144 103.816L417.227 103.926V121.298H418.144V103.816Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 128.081H403.415V129.129H406.043V128.081Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 128.081H406.96V129.129H409.588V128.081Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 128.081H410.504V129.129H413.132V128.081Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 128.081H414.049V129.129H416.677V128.081Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 128.081H417.594V129.129H420.222V128.081Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 128.081H421.138V129.129H423.766V128.081Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 130.453H403.415V131.501H406.043V130.453Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 130.453H406.96V131.501H409.588V130.453Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 130.453H410.504V131.501H413.132V130.453Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 130.453H414.049V131.501H416.677V130.453Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 130.453H417.594V131.501H420.222V130.453Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 130.453H421.138V131.501H423.766V130.453Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 132.824H403.415V133.872H406.043V132.824Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 132.824H406.96V133.872H409.588V132.824Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 132.824H410.504V133.872H413.132V132.824Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 132.824H414.049V133.872H416.677V132.824Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 132.824H417.594V133.872H420.222V132.824Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 132.824H421.138V133.872H423.766V132.824Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 135.14H403.415V136.188H406.043V135.14Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M409.588 135.14H406.96V136.188H409.588V135.14Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 135.14H410.504V136.188H413.132V135.14Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 135.14H414.049V136.188H416.677V135.14Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 135.14H417.594V136.188H420.222V135.14Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 135.14H421.138V136.188H423.766V135.14Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 137.512H403.415V138.56H406.043V137.512Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 137.512H406.96V138.56H409.588V137.512Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 137.512H410.504V138.56H413.132V137.512Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 137.512H414.049V138.56H416.677V137.512Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 137.512H417.594V138.56H420.222V137.512Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 137.512H421.138V138.56H423.766V137.512Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 139.828H403.415V140.876H406.043V139.828Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 139.828H406.96V140.876H409.588V139.828Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 139.828H410.504V140.876H413.132V139.828Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 139.828H414.049V140.876H416.677V139.828Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 139.828H417.594V140.876H420.222V139.828Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 139.828H421.138V140.876H423.766V139.828Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 142.199H403.415V143.247H406.043V142.199Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 142.199H406.96V143.247H409.588V142.199Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 142.199H410.504V143.247H413.132V142.199Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 142.199H414.049V143.247H416.677V142.199Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 142.199H417.594V143.247H420.222V142.199Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 142.199H421.138V143.247H423.766V142.199Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 144.571H403.415V145.619H406.043V144.571Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 144.571H406.96V145.619H409.588V144.571Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 144.571H410.504V145.619H413.132V144.571Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 144.571H414.049V145.619H416.677V144.571Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 144.571H417.594V145.619H420.222V144.571Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 144.571H421.138V145.619H423.766V144.571Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 146.887H403.415V147.935H406.043V146.887Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 146.887H406.96V147.935H409.588V146.887Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 146.887H410.504V147.935H413.132V146.887Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 146.887H414.049V147.935H416.677V146.887Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 146.887H417.594V147.935H420.222V146.887Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 146.887H421.138V147.935H423.766V146.887Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 149.258H403.415V150.306H406.043V149.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 149.258H406.96V150.306H409.588V149.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 149.258H410.504V150.306H413.132V149.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 149.258H414.049V150.306H416.677V149.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 149.258H417.594V150.306H420.222V149.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 149.258H421.138V150.306H423.766V149.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 151.575H403.415V152.622H406.043V151.575Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 151.575H406.96V152.622H409.588V151.575Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 151.575H410.504V152.622H413.132V151.575Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 151.575H414.049V152.622H416.677V151.575Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 151.575H417.594V152.622H420.222V151.575Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 151.575H421.138V152.622H423.766V151.575Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 153.946H403.415V154.994H406.043V153.946Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 153.946H406.96V154.994H409.588V153.946Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 153.946H410.504V154.994H413.132V153.946Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 153.946H414.049V154.994H416.677V153.946Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 153.946H417.594V154.994H420.222V153.946Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 153.946H421.138V154.994H423.766V153.946Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 163.321H403.415V164.369H406.043V163.321Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 163.321H406.96V164.369H409.588V163.321Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 163.321H410.504V164.369H413.132V163.321Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 163.321H414.049V164.369H416.677V163.321Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 163.321H417.594V164.369H420.222V163.321Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 163.321H421.138V164.369H423.766V163.321Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 165.693H403.415V166.74H406.043V165.693Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 165.693H406.96V166.74H409.588V165.693Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 165.693H410.504V166.74H413.132V165.693Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 165.693H414.049V166.74H416.677V165.693Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 165.693H417.594V166.74H420.222V165.693Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 165.693H421.138V166.74H423.766V165.693Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 168.064H403.415V169.112H406.043V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 168.064H406.96V169.112H409.588V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 168.064H410.504V169.112H413.132V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 168.064H414.049V169.112H416.677V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 168.064H417.594V169.112H420.222V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 168.064H421.138V169.112H423.766V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 170.38H403.415V171.428H406.043V170.38Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M409.588 170.38H406.96V171.428H409.588V170.38Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 170.38H410.504V171.428H413.132V170.38Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 170.38H414.049V171.428H416.677V170.38Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 170.38H417.594V171.428H420.222V170.38Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 170.38H421.138V171.428H423.766V170.38Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 172.752H403.415V173.799H406.043V172.752Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 172.752H406.96V173.799H409.588V172.752Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 172.752H410.504V173.799H413.132V172.752Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 172.752H414.049V173.799H416.677V172.752Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 172.752H417.594V173.799H420.222V172.752Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 172.752H421.138V173.799H423.766V172.752Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 175.068H403.415V176.116H406.043V175.068Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 175.068H406.96V176.116H409.588V175.068Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 175.068H410.504V176.116H413.132V175.068Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 175.068H414.049V176.116H416.677V175.068Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 175.068H417.594V176.116H420.222V175.068Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 175.068H421.138V176.116H423.766V175.068Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 177.439H403.415V178.487H406.043V177.439Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 177.439H406.96V178.487H409.588V177.439Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 177.439H410.504V178.487H413.132V177.439Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 177.439H414.049V178.487H416.677V177.439Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 177.439H417.594V178.487H420.222V177.439Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 177.439H421.138V178.487H423.766V177.439Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 179.811H403.415V180.858H406.043V179.811Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 179.811H406.96V180.858H409.588V179.811Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 179.811H410.504V180.858H413.132V179.811Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 179.811H414.049V180.858H416.677V179.811Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 179.811H417.594V180.858H420.222V179.811Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 179.811H421.138V180.858H423.766V179.811Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 182.127H403.415V183.175H406.043V182.127Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 182.127H406.96V183.175H409.588V182.127Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 182.127H410.504V183.175H413.132V182.127Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 182.127H414.049V183.175H416.677V182.127Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 182.127H417.594V183.175H420.222V182.127Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 182.127H421.138V183.175H423.766V182.127Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 184.498H403.415V185.546H406.043V184.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 184.498H406.96V185.546H409.588V184.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 184.498H410.504V185.546H413.132V184.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 184.498H414.049V185.546H416.677V184.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 184.498H417.594V185.546H420.222V184.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 184.498H421.138V185.546H423.766V184.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 186.87H403.415V187.917H406.043V186.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M409.588 186.87H406.96V187.917H409.588V186.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 186.87H410.504V187.917H413.132V186.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 186.87H414.049V187.917H416.677V186.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 186.87H417.594V187.917H420.222V186.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 186.87H421.138V187.917H423.766V186.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 189.186H403.415V190.234H406.043V189.186Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 189.186H406.96V190.234H409.588V189.186Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 189.186H410.504V190.234H413.132V189.186Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 189.186H414.049V190.234H416.677V189.186Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 189.186H417.594V190.234H420.222V189.186Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 189.186H421.138V190.234H423.766V189.186Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 191.557H403.415V192.605H406.043V191.557Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 191.557H406.96V192.605H409.588V191.557Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 191.557H410.504V192.605H413.132V191.557Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 191.557H414.049V192.605H416.677V191.557Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 191.557H417.594V192.605H420.222V191.557Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 191.557H421.138V192.605H423.766V191.557Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 193.874H403.415V194.921H406.043V193.874Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 193.874H406.96V194.921H409.588V193.874Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 193.874H410.504V194.921H413.132V193.874Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 193.874H414.049V194.921H416.677V193.874Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 193.874H417.594V194.921H420.222V193.874Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 193.874H421.138V194.921H423.766V193.874Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 203.304H403.415V204.352H406.043V203.304Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 203.304H406.96V204.352H409.588V203.304Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 203.304H410.504V204.352H413.132V203.304Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 203.304H414.049V204.352H416.677V203.304Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 203.304H417.594V204.352H420.222V203.304Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 203.304H421.138V204.352H423.766V203.304Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 205.62H403.415V206.668H406.043V205.62Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M409.588 205.62H406.96V206.668H409.588V205.62Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 205.62H410.504V206.668H413.132V205.62Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 205.62H414.049V206.668H416.677V205.62Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 205.62H417.594V206.668H420.222V205.62Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 205.62H421.138V206.668H423.766V205.62Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 207.992H403.415V209.039H406.043V207.992Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 207.992H406.96V209.039H409.588V207.992Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 207.992H410.504V209.039H413.132V207.992Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 207.992H414.049V209.039H416.677V207.992Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 207.992H417.594V209.039H420.222V207.992Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 207.992H421.138V209.039H423.766V207.992Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 210.363H403.415V211.411H406.043V210.363Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 210.363H406.96V211.411H409.588V210.363Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 210.363H410.504V211.411H413.132V210.363Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 210.363H414.049V211.411H416.677V210.363Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 210.363H417.594V211.411H420.222V210.363Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 210.363H421.138V211.411H423.766V210.363Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 212.679H403.415V213.727H406.043V212.679Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 212.679H406.96V213.727H409.588V212.679Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 212.679H410.504V213.727H413.132V212.679Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 212.679H414.049V213.727H416.677V212.679Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 212.679H417.594V213.727H420.222V212.679Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 212.679H421.138V213.727H423.766V212.679Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 215.051H403.415V216.098H406.043V215.051Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 215.051H406.96V216.098H409.588V215.051Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 215.051H410.504V216.098H413.132V215.051Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 215.051H414.049V216.098H416.677V215.051Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 215.051H417.594V216.098H420.222V215.051Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 215.051H421.138V216.098H423.766V215.051Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 217.367H403.415V218.415H406.043V217.367Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 217.367H406.96V218.415H409.588V217.367Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 217.367H410.504V218.415H413.132V217.367Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 217.367H414.049V218.415H416.677V217.367Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 217.367H417.594V218.415H420.222V217.367Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 217.367H421.138V218.415H423.766V217.367Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 219.738H403.415V220.786H406.043V219.738Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 219.738H406.96V220.786H409.588V219.738Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 219.738H410.504V220.786H413.132V219.738Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 219.738H414.049V220.786H416.677V219.738Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 219.738H417.594V220.786H420.222V219.738Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 219.738H421.138V220.786H423.766V219.738Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 222.11H403.415V223.157H406.043V222.11Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M409.588 222.11H406.96V223.157H409.588V222.11Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 222.11H410.504V223.157H413.132V222.11Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 222.11H414.049V223.157H416.677V222.11Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 222.11H417.594V223.157H420.222V222.11Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 222.11H421.138V223.157H423.766V222.11Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 224.426H403.415V225.474H406.043V224.426Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 224.426H406.96V225.474H409.588V224.426Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 224.426H410.504V225.474H413.132V224.426Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 224.426H414.049V225.474H416.677V224.426Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 224.426H417.594V225.474H420.222V224.426Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 224.426H421.138V225.474H423.766V224.426Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 226.797H403.415V227.845H406.043V226.797Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 226.797H406.96V227.845H409.588V226.797Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 226.797H410.504V227.845H413.132V226.797Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 226.797H414.049V227.845H416.677V226.797Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 226.797H417.594V227.845H420.222V226.797Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 226.797H421.138V227.845H423.766V226.797Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 229.113H403.415V230.161H406.043V229.113Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 229.113H406.96V230.161H409.588V229.113Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 229.113H410.504V230.161H413.132V229.113Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 229.113H414.049V230.161H416.677V229.113Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 229.113H417.594V230.161H420.222V229.113Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 229.113H421.138V230.161H423.766V229.113Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 231.485H403.415V232.533H406.043V231.485Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 231.485H406.96V232.533H409.588V231.485Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 231.485H410.504V232.533H413.132V231.485Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 231.485H414.049V232.533H416.677V231.485Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 231.485H417.594V232.533H420.222V231.485Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 231.485H421.138V232.533H423.766V231.485Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M406.043 233.856H403.415V234.904H406.043V233.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M409.588 233.856H406.96V234.904H409.588V233.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M413.132 233.856H410.504V234.904H413.132V233.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M416.677 233.856H414.049V234.904H416.677V233.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M420.222 233.856H417.594V234.904H420.222V233.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M423.766 233.856H421.138V234.904H423.766V233.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.4")) + ~d:(("M425.172 227.955H401.887V240.474H425.172V227.955Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M326.044 212.128H250.141V241.246H326.044V212.128Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M263.83 213.286H252.157V214.72H263.83V213.286Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M276.42 213.286H264.747V214.72H276.42V213.286Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M288.826 213.286H277.153V214.72H288.826V213.286Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M263.83 215.878H252.157V217.312H263.83V215.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M276.42 215.878H264.747V217.312H276.42V215.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M288.826 215.878H277.153V217.312H288.826V215.878Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M263.83 218.47H252.157V219.904H263.83V218.47Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M276.42 218.47H264.747V219.904H276.42V218.47Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M288.826 218.47H277.153V219.904H288.826V218.47Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M263.83 221.062H252.157V222.496H263.83V221.062Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M276.42 221.062H264.747V222.496H276.42V221.062Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M288.826 221.062H277.153V222.496H288.826V221.062Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M263.83 223.654H252.157V225.088H263.83V223.654Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M276.42 223.654H264.747V225.088H276.42V223.654Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M288.826 223.654H277.153V225.088H288.826V223.654Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M263.83 226.191H252.157V227.624H263.83V226.191Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M276.42 226.191H264.747V227.624H276.42V226.191Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M288.826 226.191H277.153V227.624H288.826V226.191Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M263.83 228.783H252.157V230.216H263.83V228.783Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M276.42 228.783H264.747V230.216H276.42V228.783Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M288.826 228.783H277.153V230.216H288.826V228.783Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M263.83 231.375H252.157V232.808H263.83V231.375Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M276.42 231.375H264.747V232.808H276.42V231.375Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M288.826 231.375H277.153V232.808H288.826V231.375Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M360.818 199.664H358.191V209.701H360.818V199.664Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M360.818 199.664H358.191V205.014H360.818V199.664Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M368.274 209.701H334.173V215.216H368.274V209.701Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M375.303 215.216H334.173V240.915H375.303V215.216Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M331.606 121.298H291.759V240.253H331.606V121.298Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M299.582 126.592H294.143V127.254H299.582V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M305.449 126.592H300.01V127.254H305.449V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M311.194 126.592H305.754V127.254H311.194V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M316.938 126.592H311.499V127.254H316.938V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.866 126.592H317.427V127.254H322.866V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M328.611 126.592H323.172V127.254H328.611V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M299.582 129.46H294.143V130.122H299.582V129.46Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M305.449 129.46H300.01V130.122H305.449V129.46Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M311.194 129.46H305.755V130.122H311.194V129.46Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M316.938 129.46H311.499V130.122H316.938V129.46Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.866 129.46H317.427V130.122H322.866V129.46Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M328.611 129.46H323.172V130.122H328.611V129.46Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M299.582 132.383H294.143V133.045H299.582V132.383Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M305.449 132.383H300.01V133.045H305.449V132.383Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M311.194 132.383H305.755V133.045H311.194V132.383Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M316.938 132.383H311.499V133.045H316.938V132.383Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.866 132.383H317.427V133.045H322.866V132.383Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M328.611 132.383H323.172V133.045H328.611V132.383Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M299.582 135.251H294.143V135.912H299.582V135.251Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M305.449 135.251H300.01V135.912H305.449V135.251Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M311.194 135.251H305.755V135.912H311.194V135.251Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M316.938 135.251H311.499V135.912H316.938V135.251Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.866 135.251H317.427V135.912H322.866V135.251Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M328.611 135.251H323.172V135.912H328.611V135.251Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M299.582 138.173H294.143V138.835H299.582V138.173Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M305.449 138.173H300.01V138.835H305.449V138.173Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M311.194 138.173H305.755V138.835H311.194V138.173Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M316.938 138.173H311.499V138.835H316.938V138.173Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.866 138.173H317.427V138.835H322.866V138.173Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M328.611 138.173H323.172V138.835H328.611V138.173Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M299.582 141.041H294.143V141.703H299.582V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M305.449 141.041H300.01V141.703H305.449V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M311.194 141.041H305.755V141.703H311.194V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M316.938 141.041H311.499V141.703H316.938V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.866 141.041H317.427V141.703H322.866V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M328.611 141.041H323.172V141.703H328.611V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 144.626H294.143V145.288H298.237V144.626Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 144.626H303.921V145.288H308.016V144.626Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 144.626H313.699V145.288H317.794V144.626Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 144.626H323.478V145.288H327.572V144.626Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 148.045H299.032V148.707H303.127V148.045Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 148.045H308.81V148.707H312.905V148.045Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 148.045H318.589V148.707H322.683V148.045Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 151.519H294.143V152.181H298.237V151.519Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 151.519H303.921V152.181H308.016V151.519Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 151.519H313.699V152.181H317.794V151.519Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 151.519H323.478V152.181H327.572V151.519Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 154.883H299.032V155.545H303.127V154.883Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 154.883H308.81V155.545H312.905V154.883Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 154.883H318.589V155.545H322.683V154.883Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 158.413H294.143V159.075H298.237V158.413Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 158.413H303.921V159.075H308.016V158.413Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 158.413H313.699V159.075H317.794V158.413Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 158.413H323.478V159.075H327.572V158.413Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 161.777H299.032V162.439H303.127V161.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 161.777H308.81V162.439H312.905V161.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 161.777H318.589V162.439H322.683V161.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 165.307H294.143V165.968H298.237V165.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 165.307H303.921V165.968H308.016V165.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 165.307H313.699V165.968H317.794V165.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 165.307H323.478V165.968H327.572V165.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 168.671H299.032V169.332H303.127V168.671Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 168.671H308.81V169.332H312.905V168.671Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 168.671H318.589V169.332H322.683V168.671Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M298.237 172.2H294.143V172.862H298.237V172.2Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M308.016 172.2H303.921V172.862H308.016V172.2Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M317.794 172.2H313.699V172.862H317.794V172.2Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M327.572 172.2H323.478V172.862H327.572V172.2Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 175.564H299.032V176.226H303.127V175.564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 175.564H308.81V176.226H312.905V175.564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 175.564H318.589V176.226H322.683V175.564Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 179.094H294.143V179.755H298.237V179.094Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 179.094H303.921V179.755H308.016V179.094Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 179.094H313.699V179.755H317.794V179.094Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 179.094H323.478V179.755H327.572V179.094Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 182.458H299.032V183.12H303.127V182.458Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 182.458H308.81V183.12H312.905V182.458Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 182.458H318.589V183.12H322.683V182.458Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 185.932H294.143V186.594H298.237V185.932Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 185.932H303.921V186.594H308.016V185.932Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 185.932H313.699V186.594H317.794V185.932Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 185.932H323.478V186.594H327.572V185.932Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 189.351H299.032V190.013H303.127V189.351Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 189.351H308.81V190.013H312.905V189.351Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 189.351H318.589V190.013H322.683V189.351Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 192.826H294.143V193.487H298.237V192.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 192.826H303.921V193.487H308.016V192.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 192.826H313.699V193.487H317.794V192.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 192.826H323.478V193.487H327.572V192.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 196.245H299.032V196.907H303.127V196.245Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 196.245H308.81V196.907H312.905V196.245Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 196.245H318.589V196.907H322.683V196.245Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 199.719H294.143V200.381H298.237V199.719Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 199.719H303.921V200.381H308.016V199.719Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 199.719H313.699V200.381H317.794V199.719Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 199.719H323.478V200.381H327.572V199.719Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 203.138H299.032V203.8H303.127V203.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M312.905 203.138H308.81V203.8H312.905V203.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 203.138H318.589V203.8H322.683V203.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 206.613H294.143V207.275H298.237V206.613Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 206.613H303.921V207.275H308.016V206.613Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 206.613H313.699V207.275H317.794V206.613Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 206.613H323.478V207.275H327.572V206.613Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 209.977H299.032V210.639H303.127V209.977Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 209.977H308.81V210.639H312.905V209.977Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 209.977H318.589V210.639H322.683V209.977Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 213.506H294.143V214.168H298.237V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 213.506H303.921V214.168H308.016V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 213.506H313.699V214.168H317.794V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 213.506H323.478V214.168H327.572V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 216.87H299.032V217.532H303.127V216.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M312.905 216.87H308.81V217.532H312.905V216.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 216.87H318.589V217.532H322.683V216.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M298.237 220.4H294.143V221.062H298.237V220.4Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M308.016 220.4H303.921V221.062H308.016V220.4Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M317.794 220.4H313.699V221.062H317.794V220.4Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M327.572 220.4H323.478V221.062H327.572V220.4Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 223.764H299.032V224.426H303.127V223.764Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 223.764H308.81V224.426H312.905V223.764Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 223.764H318.589V224.426H322.683V223.764Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M298.237 227.294H294.143V227.955H298.237V227.294Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M308.016 227.294H303.921V227.955H308.016V227.294Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M317.794 227.294H313.699V227.955H317.794V227.294Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M327.572 227.294H323.478V227.955H327.572V227.294Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M303.127 230.658H299.032V231.319H303.127V230.658Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M312.905 230.658H308.81V231.319H312.905V230.658Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M322.683 230.658H318.589V231.319H322.683V230.658Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M199.538 188.303H169.531V240.695H199.538V188.303Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M178.087 191.226H171.181V194.26H178.087V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M186.826 191.226H179.92V194.26H186.826V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M195.566 191.226H188.66V194.26H195.566V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M178.087 195.638H171.181V198.671H178.087V195.638Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M186.826 195.638H179.92V198.671H186.826V195.638Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M195.566 195.638H188.66V198.671H195.566V195.638Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M178.087 200.05H171.181V203.083H178.087V200.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M186.826 200.05H179.92V203.083H186.826V200.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M195.566 200.05H188.66V203.083H195.566V200.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M178.087 204.407H171.181V207.44H178.087V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M186.826 204.407H179.92V207.44H186.826V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M195.566 204.407H188.66V207.44H195.566V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M178.087 208.819H171.181V211.852H178.087V208.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M186.826 208.819H179.92V211.852H186.826V208.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M195.566 208.819H188.66V211.852H195.566V208.819Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M178.087 213.231H171.181V216.264H178.087V213.231Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M186.826 213.231H179.92V216.264H186.826V213.231Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M195.566 213.231H188.66V216.264H195.566V213.231Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M178.087 217.587H171.181V220.621H178.087V217.587Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M186.826 217.587H179.92V220.621H186.826V217.587Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M195.566 217.587H188.66V220.621H195.566V217.587Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M160.914 175.84L169.653 185.546V244.555H160.914V175.84Z") + ) ~fill:(("#1474AA")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.6")) + ~d:(("M163.542 242.57V178.763L160.914 175.895V241.963C162.014 242.349 163.542 242.57 163.542 242.57Z") + ) ~fill:(("#1474AA")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M154.191 108.448H144.046V146.335H154.191V108.448Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M151.991 108.448H144.046V146.335H151.991V108.448Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M160.914 146.335H112.206V244.555H160.914V146.335Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.835 105.746H117.462V146.335H147.835V105.746Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M120.639 115.066H119.539V116.059H120.639V115.066Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.756 115.066H122.656V116.059H123.756V115.066Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.873 115.066H125.773V116.059H126.873V115.066Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.051 115.066H128.951V116.059H130.051V115.066Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.168 115.066H132.068V116.059H133.168V115.066Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.285 115.066H135.185V116.059H136.285V115.066Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.463 115.066H138.363V116.059H139.463V115.066Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.58 115.066H141.479V116.059H142.58V115.066Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 115.066H144.596V116.059H145.696V115.066Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M120.639 118.871H119.539V119.864H120.639V118.871Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.756 118.871H122.656V119.864H123.756V118.871Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.873 118.871H125.773V119.864H126.873V118.871Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.051 118.871H128.951V119.864H130.051V118.871Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.168 118.871H132.068V119.864H133.168V118.871Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.285 118.871H135.185V119.864H136.285V118.871Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.463 118.871H138.363V119.864H139.463V118.871Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.58 118.871H141.479V119.864H142.58V118.871Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 118.871H144.596V119.864H145.696V118.871Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M120.639 122.732H119.539V123.724H120.639V122.732Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.756 122.732H122.656V123.724H123.756V122.732Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.873 122.732H125.773V123.724H126.873V122.732Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.051 122.732H128.951V123.724H130.051V122.732Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.168 122.732H132.068V123.724H133.168V122.732Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.285 122.732H135.185V123.724H136.285V122.732Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.463 122.732H138.363V123.724H139.463V122.732Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.58 122.732H141.479V123.724H142.58V122.732Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 122.732H144.596V123.724H145.696V122.732Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M120.639 126.592H119.539V127.585H120.639V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.756 126.592H122.656V127.585H123.756V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.873 126.592H125.773V127.585H126.873V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.051 126.592H128.951V127.585H130.051V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.168 126.592H132.068V127.585H133.168V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.285 126.592H135.185V127.585H136.285V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.463 126.592H138.363V127.585H139.463V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.58 126.592H141.479V127.585H142.58V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 126.592H144.596V127.585H145.696V126.592Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M120.639 130.397H119.539V131.39H120.639V130.397Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.756 130.397H122.656V131.39H123.756V130.397Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.873 130.397H125.773V131.39H126.873V130.397Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.051 130.397H128.951V131.39H130.051V130.397Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.168 130.397H132.068V131.39H133.168V130.397Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.285 130.397H135.185V131.39H136.285V130.397Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.463 130.397H138.363V131.39H139.463V130.397Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M142.58 130.397H141.479V131.39H142.58V130.397Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 130.397H144.596V131.39H145.696V130.397Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M120.639 134.258H119.539V135.251H120.639V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.756 134.258H122.656V135.251H123.756V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.873 134.258H125.773V135.251H126.873V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.051 134.258H128.951V135.251H130.051V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.168 134.258H132.068V135.251H133.168V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.285 134.258H135.185V135.251H136.285V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.463 134.258H138.363V135.251H139.463V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.58 134.258H141.479V135.251H142.58V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 134.258H144.596V135.251H145.696V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M120.639 138.063H119.539V139.056H120.639V138.063Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.756 138.063H122.656V139.056H123.756V138.063Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.873 138.063H125.773V139.056H126.873V138.063Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.051 138.063H128.951V139.056H130.051V138.063Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.168 138.063H132.068V139.056H133.168V138.063Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.285 138.063H135.185V139.056H136.285V138.063Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.463 138.063H138.363V139.056H139.463V138.063Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.58 138.063H141.479V139.056H142.58V138.063Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 138.063H144.596V139.056H145.696V138.063Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M120.639 141.924H119.539V142.916H120.639V141.924Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.756 141.924H122.656V142.916H123.756V141.924Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.873 141.924H125.773V142.916H126.873V141.924Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.051 141.924H128.951V142.916H130.051V141.924Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.168 141.924H132.068V142.916H133.168V141.924Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.285 141.924H135.185V142.916H136.285V141.924Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M139.463 141.924H138.363V142.916H139.463V141.924Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.58 141.924H141.479V142.916H142.58V141.924Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 141.924H144.596V142.916H145.696V141.924Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M121.556 109.496H119.539V111.316H121.556V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.94 109.496H121.923V111.316H123.94V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.384 109.496H124.367V111.316H126.384V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M128.768 109.496H126.751V111.316H128.768V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M131.212 109.496H129.195V111.316H131.212V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.596 109.496H131.579V111.316H133.596V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.04 109.496H134.023V111.316H136.04V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M138.424 109.496H136.407V111.316H138.424V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M140.868 109.496H138.851V111.316H140.868V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M143.252 109.496H141.235V111.316H143.252V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 109.496H143.68V111.316H145.696V109.496Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M121.556 111.647H119.539V113.467H121.556V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M123.94 111.647H121.923V113.467H123.94V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.384 111.647H124.367V113.467H126.384V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M128.768 111.647H126.751V113.467H128.768V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M131.212 111.647H129.195V113.467H131.212V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M133.596 111.647H131.579V113.467H133.596V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M136.04 111.647H134.023V113.467H136.04V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M138.424 111.647H136.407V113.467H138.424V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M140.868 111.647H138.851V113.467H140.868V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M143.252 111.647H141.235V113.467H143.252V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 111.647H143.68V113.467H145.696V111.647Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M117.462 141.317V146.335H144.046H147.835H154.191V141.317H117.462Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M118.928 150.968H115.689V153.891H118.928V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.778 150.968H119.539V153.891H122.778V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.568 150.968H123.328V153.891H126.568V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.418 150.968H127.179V153.891H130.418V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.268 150.968H131.029V153.891H134.268V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M138.057 150.968H134.818V153.891H138.057V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M141.907 150.968H138.668V153.891H141.907V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 150.968H142.457V153.891H145.696V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M149.546 150.968H146.307V153.891H149.546V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M153.336 150.968H150.097V153.891H153.336V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.186 150.968H153.947V153.891H157.186V150.968Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M118.928 154.442H115.689V157.365H118.928V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 158.634H115.689V160.178H117.4V158.634Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 158.634H120.701V160.178H122.412V158.634Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 158.634H125.651V160.178H127.362V158.634Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 158.634H130.601V160.178H132.312V158.634Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 158.634H135.551V160.178H137.263V158.634Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 158.634H140.563V160.178H142.274V158.634Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 158.634H145.513V160.178H147.224V158.634Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 158.634H150.463V160.178H152.174V158.634Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 158.634H155.414V160.178H157.125V158.634Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 162.273H115.689V163.818H117.4V162.273Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 162.273H120.701V163.818H122.412V162.273Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 162.273H125.651V163.818H127.362V162.273Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 162.273H130.601V163.818H132.312V162.273Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 162.273H135.551V163.818H137.263V162.273Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 162.273H140.563V163.818H142.274V162.273Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 162.273H145.513V163.818H147.224V162.273Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 162.273H150.463V163.818H152.174V162.273Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 162.273H155.414V163.818H157.125V162.273Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 165.913H115.689V167.457H117.4V165.913Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 165.913H120.701V167.457H122.412V165.913Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 165.913H125.651V167.457H127.362V165.913Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 165.913H130.601V167.457H132.312V165.913Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 165.913H135.551V167.457H137.263V165.913Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 165.913H140.563V167.457H142.274V165.913Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 165.913H145.513V167.457H147.224V165.913Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 165.913H150.463V167.457H152.174V165.913Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 165.913H155.414V167.457H157.125V165.913Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 169.498H115.689V171.042H117.4V169.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 169.498H120.701V171.042H122.412V169.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 169.498H125.651V171.042H127.362V169.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 169.498H130.601V171.042H132.312V169.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 169.498H135.551V171.042H137.263V169.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 169.498H140.563V171.042H142.274V169.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 169.498H145.513V171.042H147.224V169.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 169.498H150.463V171.042H152.174V169.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 169.498H155.414V171.042H157.125V169.498Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 173.138H115.689V174.682H117.4V173.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 173.138H120.701V174.682H122.412V173.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 173.138H125.651V174.682H127.362V173.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 173.138H130.601V174.682H132.312V173.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 173.138H135.551V174.682H137.263V173.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 173.138H140.563V174.682H142.274V173.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 173.138H145.513V174.682H147.224V173.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 173.138H150.463V174.682H152.174V173.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 173.138H155.414V174.682H157.125V173.138Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 176.777H115.689V178.322H117.4V176.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 176.777H120.701V178.322H122.412V176.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 176.777H125.651V178.322H127.362V176.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 176.777H130.601V178.322H132.312V176.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 176.777H135.551V178.322H137.263V176.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 176.777H140.563V178.322H142.274V176.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 176.777H145.513V178.322H147.224V176.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 176.777H150.463V178.322H152.174V176.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 176.777H155.414V178.322H157.125V176.777Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 180.362H115.689V181.906H117.4V180.362Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 180.362H120.701V181.906H122.412V180.362Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 180.362H125.651V181.906H127.362V180.362Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 180.362H130.601V181.906H132.312V180.362Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 180.362H135.551V181.906H137.263V180.362Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 180.362H140.563V181.906H142.274V180.362Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 180.362H145.513V181.906H147.224V180.362Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 180.362H150.463V181.906H152.174V180.362Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 180.362H155.414V181.906H157.125V180.362Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 184.002H115.689V185.546H117.4V184.002Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 184.002H120.701V185.546H122.412V184.002Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 184.002H125.651V185.546H127.362V184.002Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 184.002H130.601V185.546H132.312V184.002Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 184.002H135.551V185.546H137.263V184.002Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 184.002H140.563V185.546H142.274V184.002Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 184.002H145.513V185.546H147.224V184.002Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 184.002H150.463V185.546H152.174V184.002Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 184.002H155.414V185.546H157.125V184.002Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 187.642H115.689V189.186H117.4V187.642Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 187.642H120.701V189.186H122.412V187.642Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 187.642H125.651V189.186H127.362V187.642Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 187.642H130.601V189.186H132.312V187.642Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 187.642H135.551V189.186H137.263V187.642Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 187.642H140.563V189.186H142.274V187.642Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 187.642H145.513V189.186H147.224V187.642Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 187.642H150.463V189.186H152.174V187.642Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 187.642H155.414V189.186H157.125V187.642Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 191.226H115.689V192.771H117.4V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 191.226H120.701V192.771H122.412V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 191.226H125.651V192.771H127.362V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 191.226H130.601V192.771H132.312V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 191.226H135.551V192.771H137.263V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 191.226H140.563V192.771H142.274V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 191.226H145.513V192.771H147.224V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 191.226H150.463V192.771H152.174V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 191.226H155.414V192.771H157.125V191.226Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 194.866H115.689V196.41H117.4V194.866Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 194.866H120.701V196.41H122.412V194.866Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 194.866H125.651V196.41H127.362V194.866Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 194.866H130.601V196.41H132.312V194.866Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 194.866H135.551V196.41H137.263V194.866Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 194.866H140.563V196.41H142.274V194.866Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 194.866H145.513V196.41H147.224V194.866Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 194.866H150.463V196.41H152.174V194.866Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 194.866H155.414V196.41H157.125V194.866Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 198.506H115.689V200.05H117.4V198.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 198.506H120.701V200.05H122.412V198.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 198.506H125.651V200.05H127.362V198.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 198.506H130.601V200.05H132.312V198.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 198.506H135.551V200.05H137.263V198.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 198.506H140.563V200.05H142.274V198.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 198.506H145.513V200.05H147.224V198.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 198.506H150.463V200.05H152.174V198.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 198.506H155.414V200.05H157.125V198.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 202.091H115.689V203.635H117.4V202.091Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 202.091H120.701V203.635H122.412V202.091Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 202.091H125.651V203.635H127.362V202.091Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 202.091H130.601V203.635H132.312V202.091Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 202.091H135.551V203.635H137.263V202.091Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 202.091H140.563V203.635H142.274V202.091Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 202.091H145.513V203.635H147.224V202.091Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 202.091H150.463V203.635H152.174V202.091Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 202.091H155.414V203.635H157.125V202.091Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 205.73H115.689V207.275H117.4V205.73Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 205.73H120.701V207.275H122.412V205.73Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 205.73H125.651V207.275H127.362V205.73Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 205.73H130.601V207.275H132.312V205.73Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 205.73H135.551V207.275H137.263V205.73Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 205.73H140.563V207.275H142.274V205.73Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 205.73H145.513V207.275H147.224V205.73Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 205.73H150.463V207.275H152.174V205.73Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 205.73H155.414V207.275H157.125V205.73Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 209.37H115.689V210.914H117.4V209.37Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 209.37H120.701V210.914H122.412V209.37Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 209.37H125.651V210.914H127.362V209.37Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 209.37H130.601V210.914H132.312V209.37Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 209.37H135.551V210.914H137.263V209.37Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 209.37H140.563V210.914H142.274V209.37Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 209.37H145.513V210.914H147.224V209.37Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 209.37H150.463V210.914H152.174V209.37Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 209.37H155.414V210.914H157.125V209.37Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 212.955H115.689V214.499H117.4V212.955Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 212.955H120.701V214.499H122.412V212.955Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 212.955H125.651V214.499H127.362V212.955Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 212.955H130.601V214.499H132.312V212.955Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 212.955H135.551V214.499H137.263V212.955Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 212.955H140.563V214.499H142.274V212.955Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 212.955H145.513V214.499H147.224V212.955Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 212.955H150.463V214.499H152.174V212.955Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 212.955H155.414V214.499H157.125V212.955Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M117.4 216.595H115.689V218.139H117.4V216.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.412 216.595H120.701V218.139H122.412V216.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M127.362 216.595H125.651V218.139H127.362V216.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M132.312 216.595H130.601V218.139H132.312V216.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M137.263 216.595H135.551V218.139H137.263V216.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M142.274 216.595H140.563V218.139H142.274V216.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M147.224 216.595H145.513V218.139H147.224V216.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M152.174 216.595H150.463V218.139H152.174V216.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.125 216.595H155.414V218.139H157.125V216.595Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M122.778 154.442H119.539V157.365H122.778V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M126.568 154.442H123.328V157.365H126.568V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M130.418 154.442H127.179V157.365H130.418V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M134.268 154.442H131.029V157.365H134.268V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M138.057 154.442H134.818V157.365H138.057V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M141.907 154.442H138.668V157.365H141.907V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M145.696 154.442H142.457V157.365H145.696V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M149.546 154.442H146.307V157.365H149.546V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M153.336 154.442H150.097V157.365H153.336V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M157.186 154.442H153.947V157.365H157.186V154.442Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M509.998 178.597H476.08V240.474H509.998V178.597Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M250.141 150.968H205.588V241.246H250.141V150.968Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 157.475H209.377V159.351H213.594V157.475Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 160.288H216.161V162.163H220.378V160.288Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 160.288H233.151V162.163H237.368V160.288Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 157.475H224.534V159.351H228.751V157.475Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 157.475H241.646V159.351H245.863V157.475Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 164.204H209.377V166.079H213.594V164.204Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 167.016H216.161V168.891H220.378V167.016Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 167.016H233.151V168.891H237.368V167.016Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 164.204H224.534V166.079H228.751V164.204Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 164.204H241.646V166.079H245.863V164.204Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 170.932H209.377V172.807H213.594V170.932Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 173.689H216.161V175.564H220.378V173.689Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 173.689H233.151V175.564H237.368V173.689Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 170.932H224.534V172.807H228.751V170.932Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 170.932H241.646V172.807H245.863V170.932Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 177.605H209.377V179.48H213.594V177.605Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 180.417H216.161V182.292H220.378V180.417Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 180.417H233.151V182.292H237.368V180.417Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 177.605H224.534V179.48H228.751V177.605Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 177.605H241.646V179.48H245.863V177.605Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 184.333H209.377V186.208H213.594V184.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 187.145H216.161V189.02H220.378V187.145Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 187.145H233.151V189.02H237.368V187.145Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 184.333H224.534V186.208H228.751V184.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 184.333H241.646V186.208H245.863V184.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 191.061H209.377V192.936H213.594V191.061Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 193.874H216.161V195.749H220.378V193.874Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 193.874H233.151V195.749H237.368V193.874Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 191.061H224.534V192.936H228.751V191.061Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 191.061H241.646V192.936H245.863V191.061Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 197.789H209.377V199.664H213.594V197.789Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 200.602H216.161V202.477H220.378V200.602Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 200.602H233.151V202.477H237.368V200.602Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 197.789H224.534V199.664H228.751V197.789Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 197.789H241.646V199.664H245.863V197.789Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 204.517H209.377V206.392H213.594V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 207.33H216.161V209.205H220.378V207.33Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 207.33H233.151V209.205H237.368V207.33Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 204.517H224.534V206.392H228.751V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 204.517H241.646V206.392H245.863V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 211.245H209.377V213.12H213.594V211.245Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 214.003H216.161V215.878H220.378V214.003Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 214.003H233.151V215.878H237.368V214.003Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 211.245H224.534V213.12H228.751V211.245Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 211.245H241.646V213.12H245.863V211.245Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 217.918H209.377V219.793H213.594V217.918Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 220.731H216.161V222.606H220.378V220.731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 220.731H233.151V222.606H237.368V220.731Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 217.918H224.534V219.793H228.751V217.918Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 217.918H241.646V219.793H245.863V217.918Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M213.594 224.646H209.377V226.521H213.594V224.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M220.378 227.459H216.161V229.334H220.378V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M237.368 227.459H233.151V229.334H237.368V227.459Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M228.751 224.646H224.534V226.521H228.751V224.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 224.646H241.646V226.521H245.863V224.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M476.141 192.55H432.628V241.081H476.141V192.55Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M465.996 185.27H441.306V192.55H465.996V185.27Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M465.996 184.057H441.306V185.27H465.996V184.057Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M465.996 178.873H441.306V180.086H465.996V178.873Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.163 180.086H442.406V184.057H464.163V180.086Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.163 180.086H442.406V180.858H464.163V180.086Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M442.406 178.873L448.884 165.527H458.051L464.957 178.873H442.406Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M531.877 158.964L514.765 181.741V240.364H531.877H549.05V181.741L531.877 158.964Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 189.241H514.765V189.958H549.05V189.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 190.785H514.765V191.502H549.05V190.785Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 192.385H514.765V193.101H549.05V192.385Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 193.984H514.765V194.701H549.05V193.984Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 195.528H514.765V196.245H549.05V195.528Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 197.127H514.765V197.844H549.05V197.127Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 198.671H514.765V199.388H549.05V198.671Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 200.271H514.765V200.988H549.05V200.271Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M549.05 201.87H514.765V202.587H549.05V201.87Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 203.414H514.765V204.131H549.05V203.414Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M549.05 205.014H514.765V205.73H549.05V205.014Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M549.05 206.613H514.765V207.33H549.05V206.613Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 208.157H514.765V208.874H549.05V208.157Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 209.756H514.765V210.473H549.05V209.756Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M549.05 211.3H514.765V212.017H549.05V211.3Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M549.05 212.9H514.765V213.617H549.05V212.9Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 214.499H514.765V215.216H549.05V214.499Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M549.05 216.043H514.765V216.76H549.05V216.043Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M549.05 217.643H514.765V218.36H549.05V217.643Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 219.187H514.765V219.904H549.05V219.187Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 220.786H514.765V221.503H549.05V220.786Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 222.385H514.765V223.102H549.05V222.385Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M549.05 223.93H514.765V224.646H549.05V223.93Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 225.529H514.765V226.246H549.05V225.529Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 227.128H514.765V227.845H549.05V227.128Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 228.672H514.765V229.389H549.05V228.672Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 230.272H514.765V230.989H549.05V230.272Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 231.816H514.765V232.533H549.05V231.816Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 233.415H514.765V234.132H549.05V233.415Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 235.014H514.765V235.731H549.05V235.014Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 236.559H514.765V237.275H549.05V236.559Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M549.05 181.741L531.877 158.964L514.765 181.741L530.594 206.282L549.05 181.741Z") + ) ~fill:(("#1073AA")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M578.568 240.75V162.218L590.975 141.593L603.747 160.84V240.75H578.568Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M587.552 161.832H594.825L591.036 143.026L587.552 161.832Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M635.894 120.802H605.581V241.081H635.894V120.802Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.6")) + ~d:(("M182.059 188.303H169.531V240.695H182.059V188.303Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M250.141 150.968H205.588V153.064H250.141V150.968Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M252.096 147.99H203.572V151.74H252.096V147.99Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.4")) + ~d:(("M228.751 241.246V147.99H203.572V151.685H205.588V241.246H228.751Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M331.606 121.298H291.759V124.166H331.606V121.298Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M336.801 199.664H334.173V209.701H336.801V199.664Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M336.801 199.664H334.173V205.014H336.801V199.664Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M338.94 211.962H337.106V213.672H338.94V211.962Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M343.34 211.962H341.506V213.672H343.34V211.962Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M347.801 211.962H345.968V213.672H347.801V211.962Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M352.201 211.962H350.368V213.672H352.201V211.962Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M356.602 211.962H354.768V213.672H356.602V211.962Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M361.002 211.962H359.168V213.672H361.002V211.962Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M365.402 211.962H363.569V213.672H365.402V211.962Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M339.795 217.146H337.473V219.242H339.795V217.146Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M345.234 217.146H342.912V219.242H345.234V217.146Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M350.673 217.146H348.351V219.242H350.673V217.146Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M356.052 217.146H353.79V219.242H356.052V217.146Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M361.552 217.146H359.229V219.242H361.552V217.146Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M366.991 217.146H364.669V219.242H366.991V217.146Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M372.43 217.146H370.108V219.242H372.43V217.146Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 181.52H478.769V183.175H480.175V181.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M483.536 181.52H482.13V183.175H483.536V181.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M486.836 181.52H485.43V183.175H486.836V181.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 181.52H488.792V183.175H490.197V181.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 181.52H492.092V183.175H493.498V181.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 181.52H495.453V183.175H496.859V181.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M500.22 181.52H498.814V183.175H500.22V181.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 181.52H501.931V183.175H503.337V181.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 181.52H506.087V183.175H507.493V181.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 184.774H478.769V186.428H480.175V184.774Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 184.774H482.13V186.428H483.536V184.774Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 184.774H485.43V186.428H486.836V184.774Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 184.774H488.792V186.428H490.197V184.774Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 184.774H492.092V186.428H493.498V184.774Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 184.774H495.453V186.428H496.859V184.774Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 184.774H498.814V186.428H500.22V184.774Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 184.774H501.931V186.428H503.337V184.774Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 184.774H506.087V186.428H507.493V184.774Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 188.083H478.769V189.737H480.175V188.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 188.083H482.13V189.737H483.536V188.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 188.083H485.43V189.737H486.836V188.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 188.083H488.792V189.737H490.197V188.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 188.083H492.092V189.737H493.498V188.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 188.083H495.453V189.737H496.859V188.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 188.083H498.814V189.737H500.22V188.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 188.083H501.931V189.737H503.337V188.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 188.083H506.087V189.737H507.493V188.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 191.392H478.769V193.046H480.175V191.392Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 191.392H482.13V193.046H483.536V191.392Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 191.392H485.43V193.046H486.836V191.392Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 191.392H488.792V193.046H490.197V191.392Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 191.392H492.092V193.046H493.498V191.392Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 191.392H495.453V193.046H496.859V191.392Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 191.392H498.814V193.046H500.22V191.392Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 191.392H501.931V193.046H503.337V191.392Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 191.392H506.087V193.046H507.493V191.392Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 194.646H478.769V196.3H480.175V194.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M483.536 194.646H482.13V196.3H483.536V194.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M486.836 194.646H485.43V196.3H486.836V194.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 194.646H488.792V196.3H490.197V194.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 194.646H492.092V196.3H493.498V194.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 194.646H495.453V196.3H496.859V194.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M500.22 194.646H498.814V196.3H500.22V194.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 194.646H501.931V196.3H503.337V194.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 194.646H506.087V196.3H507.493V194.646Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 197.954H478.769V199.609H480.175V197.954Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 197.954H482.13V199.609H483.536V197.954Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 197.954H485.43V199.609H486.836V197.954Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 197.954H488.792V199.609H490.197V197.954Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 197.954H492.092V199.609H493.498V197.954Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 197.954H495.453V199.609H496.859V197.954Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 197.954H498.814V199.609H500.22V197.954Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 197.954H501.931V199.609H503.337V197.954Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 197.954H506.087V199.609H507.493V197.954Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 201.263H478.769V202.918H480.175V201.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 201.263H482.13V202.918H483.536V201.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 201.263H485.43V202.918H486.836V201.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 201.263H488.792V202.918H490.197V201.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 201.263H492.092V202.918H493.498V201.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 201.263H495.453V202.918H496.859V201.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 201.263H498.814V202.918H500.22V201.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 201.263H501.931V202.918H503.337V201.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 201.263H506.087V202.918H507.493V201.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 204.517H478.769V206.172H480.175V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 204.517H482.13V206.172H483.536V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 204.517H485.43V206.172H486.836V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 204.517H488.792V206.172H490.197V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 204.517H492.092V206.172H493.498V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 204.517H495.453V206.172H496.859V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 204.517H498.814V206.172H500.22V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 204.517H501.931V206.172H503.337V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 204.517H506.087V206.172H507.493V204.517Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 207.826H478.769V209.481H480.175V207.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 207.826H482.13V209.481H483.536V207.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 207.826H485.43V209.481H486.836V207.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 207.826H488.792V209.481H490.197V207.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 207.826H492.092V209.481H493.498V207.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 207.826H495.453V209.481H496.859V207.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 207.826H498.814V209.481H500.22V207.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 207.826H501.931V209.481H503.337V207.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 207.826H506.087V209.481H507.493V207.826Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 211.135H478.769V212.789H480.175V211.135Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 211.135H482.13V212.789H483.536V211.135Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 211.135H485.43V212.789H486.836V211.135Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 211.135H488.792V212.789H490.197V211.135Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 211.135H492.092V212.789H493.498V211.135Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 211.135H495.453V212.789H496.859V211.135Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 211.135H498.814V212.789H500.22V211.135Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 211.135H501.931V212.789H503.337V211.135Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 211.135H506.087V212.789H507.493V211.135Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 214.389H478.769V216.043H480.175V214.389Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 214.389H482.13V216.043H483.536V214.389Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 214.389H485.43V216.043H486.836V214.389Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 214.389H488.792V216.043H490.197V214.389Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 214.389H492.092V216.043H493.498V214.389Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 214.389H495.453V216.043H496.859V214.389Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 214.389H498.814V216.043H500.22V214.389Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 214.389H501.931V216.043H503.337V214.389Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 214.389H506.087V216.043H507.493V214.389Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M480.175 217.698H478.769V219.352H480.175V217.698Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M483.536 217.698H482.13V219.352H483.536V217.698Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.836 217.698H485.43V219.352H486.836V217.698Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.197 217.698H488.792V219.352H490.197V217.698Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M493.498 217.698H492.092V219.352H493.498V217.698Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M496.859 217.698H495.453V219.352H496.859V217.698Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M500.22 217.698H498.814V219.352H500.22V217.698Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M503.337 217.698H501.931V219.352H503.337V217.698Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M507.493 217.698H506.087V219.352H507.493V217.698Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M360.88 197.844H334.173V203.911H360.88V197.844Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M337.84 199.333H336.312V200.712H337.84V199.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M341.384 199.333H339.856V200.712H341.384V199.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M344.929 199.333H343.401V200.712H344.929V199.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M348.473 199.333H346.946V200.712H348.473V199.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M352.079 199.333H350.551V200.712H352.079V199.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M355.624 199.333H354.096V200.712H355.624V199.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M359.168 199.333H357.641V200.712H359.168V199.333Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.5")) + ~d:(("M368.274 212.459H334.173V215.216H368.274V212.459Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M446.317 186.594H443.811V188.028H446.317V186.594Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M449.617 186.594H447.112V188.028H449.617V186.594Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M452.918 186.594H450.412V188.028H452.918V186.594Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M456.157 186.594H453.651V188.028H456.157V186.594Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M459.457 186.594H456.951V188.028H459.457V186.594Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M462.757 186.594H460.251V188.028H462.757V186.594Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M446.317 188.524H443.811V189.958H446.317V188.524Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M449.617 188.524H447.112V189.958H449.617V188.524Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M452.918 188.524H450.412V189.958H452.918V188.524Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M456.157 188.524H453.651V189.958H456.157V188.524Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M459.457 188.524H456.951V189.958H459.457V188.524Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M462.757 188.524H460.251V189.958H462.757V188.524Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M446.317 190.454H443.811V191.888H446.317V190.454Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M449.617 190.454H447.112V191.888H449.617V190.454Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M452.918 190.454H450.412V191.888H452.918V190.454Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M456.157 190.454H453.651V191.888H456.157V190.454Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M459.457 190.454H456.951V191.888H459.457V190.454Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M462.757 190.454H460.251V191.888H462.757V190.454Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.5")) + ~d:(("M465.996 190.454H441.306V192.55H465.996V190.454Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M444.362 195.473H434.767V198.892H444.362V195.473Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M454.69 195.473H445.095V198.892H454.69V195.473Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.957 195.473H455.362V198.892H464.957V195.473Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M475.224 195.473H465.629V198.892H475.224V195.473Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M444.362 199.609H434.767V203.028H444.362V199.609Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M454.69 199.609H445.095V203.028H454.69V199.609Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.957 199.609H455.362V203.028H464.957V199.609Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M475.224 199.609H465.629V203.028H475.224V199.609Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M444.362 203.8H434.767V207.219H444.362V203.8Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M454.69 203.8H445.095V207.219H454.69V203.8Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M464.957 203.8H455.362V207.219H464.957V203.8Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M475.224 203.8H465.629V207.219H475.224V203.8Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M444.362 207.936H434.767V211.356H444.362V207.936Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M454.69 207.936H445.095V211.356H454.69V207.936Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.957 207.936H455.362V211.356H464.957V207.936Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M475.224 207.936H465.629V211.356H475.224V207.936Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M444.362 212.073H434.767V215.492H444.362V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M454.69 212.073H445.095V215.492H454.69V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.957 212.073H455.362V215.492H464.957V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M475.224 212.073H465.629V215.492H475.224V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M444.362 216.209H434.767V219.628H444.362V216.209Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M454.69 216.209H445.095V219.628H454.69V216.209Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M464.957 216.209H455.362V219.628H464.957V216.209Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M475.224 216.209H465.629V219.628H475.224V216.209Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.6")) + ~d:(("M476.141 192.55H467.402V240.474H476.141V192.55Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.646 126.647H559.745V134.313H569.646V126.647Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.646 130.287H559.745V133.431H569.646V130.287Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M566.468 110.379H564.695V128.853H566.468V110.379Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M573.863 132.383H550.578V240.474H573.863V132.383Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M573.863 131.17H550.578V239.261H573.863V131.17Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 140.765H551.678V142.034H553.023V140.765Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 140.765H554.917V142.034H556.262V140.765Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 140.765H558.156V142.034H559.501V140.765Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 140.765H561.396V142.034H562.74V140.765Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 140.765H564.634V142.034H565.979V140.765Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 140.765H567.812V142.034H569.157V140.765Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 140.765H571.052V142.034H572.396V140.765Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 143.743H551.678V145.012H553.023V143.743Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 143.743H554.917V145.012H556.262V143.743Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 143.743H558.156V145.012H559.501V143.743Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 143.743H561.396V145.012H562.74V143.743Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 143.743H564.634V145.012H565.979V143.743Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 143.743H567.812V145.012H569.157V143.743Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 143.743H571.052V145.012H572.396V143.743Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 149.865H551.678V151.133H553.023V149.865Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 149.865H554.917V151.133H556.262V149.865Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 149.865H558.156V151.133H559.501V149.865Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 149.865H561.396V151.133H562.74V149.865Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 149.865H564.634V151.133H565.979V149.865Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 149.865H567.812V151.133H569.157V149.865Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 149.865H571.052V151.133H572.396V149.865Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 152.843H551.678V154.111H553.023V152.843Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 152.843H554.917V154.111H556.262V152.843Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 152.843H558.156V154.111H559.501V152.843Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 152.843H561.396V154.111H562.74V152.843Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 152.843H564.634V154.111H565.979V152.843Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 152.843H567.812V154.111H569.157V152.843Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 152.843H571.052V154.111H572.396V152.843Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 158.964H551.678V160.233H553.023V158.964Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 158.964H554.917V160.233H556.262V158.964Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 158.964H558.156V160.233H559.501V158.964Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 158.964H561.396V160.233H562.74V158.964Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 158.964H564.634V160.233H565.979V158.964Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 158.964H567.812V160.233H569.157V158.964Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 158.964H571.052V160.233H572.396V158.964Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 161.942H551.678V163.211H553.023V161.942Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 161.942H554.917V163.211H556.262V161.942Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 161.942H558.156V163.211H559.501V161.942Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 161.942H561.396V163.211H562.74V161.942Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 161.942H564.634V163.211H565.979V161.942Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 161.942H567.812V163.211H569.157V161.942Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 161.942H571.052V163.211H572.396V161.942Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 168.064H551.678V169.332H553.023V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 168.064H554.917V169.332H556.262V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 168.064H558.156V169.332H559.501V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 168.064H561.396V169.332H562.74V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 168.064H564.634V169.332H565.979V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 168.064H567.812V169.332H569.157V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 168.064H571.052V169.332H572.396V168.064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 171.042H551.678V172.31H553.023V171.042Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 171.042H554.917V172.31H556.262V171.042Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 171.042H558.156V172.31H559.501V171.042Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M562.74 171.042H561.396V172.31H562.74V171.042Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 171.042H564.634V172.31H565.979V171.042Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 171.042H567.812V172.31H569.157V171.042Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 171.042H571.052V172.31H572.396V171.042Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 177.163H551.678V178.432H553.023V177.163Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 177.163H554.917V178.432H556.262V177.163Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 177.163H558.156V178.432H559.501V177.163Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 177.163H561.396V178.432H562.74V177.163Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 177.163H564.634V178.432H565.979V177.163Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 177.163H567.812V178.432H569.157V177.163Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 177.163H571.052V178.432H572.396V177.163Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 180.141H551.678V181.41H553.023V180.141Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 180.141H554.917V181.41H556.262V180.141Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 180.141H558.156V181.41H559.501V180.141Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M562.74 180.141H561.396V181.41H562.74V180.141Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 180.141H564.634V181.41H565.979V180.141Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 180.141H567.812V181.41H569.157V180.141Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 180.141H571.052V181.41H572.396V180.141Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 186.263H551.678V187.531H553.023V186.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 186.263H554.917V187.531H556.262V186.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 186.263H558.156V187.531H559.501V186.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 186.263H561.396V187.531H562.74V186.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 186.263H564.634V187.531H565.979V186.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 186.263H567.812V187.531H569.157V186.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 186.263H571.052V187.531H572.396V186.263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 189.241H551.678V190.509H553.023V189.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 189.241H554.917V190.509H556.262V189.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 189.241H558.156V190.509H559.501V189.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 189.241H561.396V190.509H562.74V189.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 189.241H564.634V190.509H565.979V189.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 189.241H567.812V190.509H569.157V189.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 189.241H571.052V190.509H572.396V189.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 195.307H551.678V196.576H553.023V195.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 195.307H554.917V196.576H556.262V195.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 195.307H558.156V196.576H559.501V195.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 195.307H561.396V196.576H562.74V195.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 195.307H564.634V196.576H565.979V195.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 195.307H567.812V196.576H569.157V195.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 195.307H571.052V196.576H572.396V195.307Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 198.341H551.678V199.609H553.023V198.341Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 198.341H554.917V199.609H556.262V198.341Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 198.341H558.156V199.609H559.501V198.341Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 198.341H561.396V199.609H562.74V198.341Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 198.341H564.634V199.609H565.979V198.341Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 198.341H567.812V199.609H569.157V198.341Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 198.341H571.052V199.609H572.396V198.341Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 204.407H551.678V205.675H553.023V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 204.407H554.917V205.675H556.262V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 204.407H558.156V205.675H559.501V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 204.407H561.396V205.675H562.74V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 204.407H564.634V205.675H565.979V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 204.407H567.812V205.675H569.157V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 204.407H571.052V205.675H572.396V204.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 207.44H551.678V208.708H553.023V207.44Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 207.44H554.917V208.708H556.262V207.44Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 207.44H558.156V208.708H559.501V207.44Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M562.74 207.44H561.396V208.708H562.74V207.44Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 207.44H564.634V208.708H565.979V207.44Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 207.44H567.812V208.708H569.157V207.44Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 207.44H571.052V208.708H572.396V207.44Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 213.506H551.678V214.775H553.023V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 213.506H554.917V214.775H556.262V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 213.506H558.156V214.775H559.501V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 213.506H561.396V214.775H562.74V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 213.506H564.634V214.775H565.979V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 213.506H567.812V214.775H569.157V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 213.506H571.052V214.775H572.396V213.506Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 216.484H551.678V217.753H553.023V216.484Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 216.484H554.917V217.753H556.262V216.484Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 216.484H558.156V217.753H559.501V216.484Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 216.484H561.396V217.753H562.74V216.484Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 216.484H564.634V217.753H565.979V216.484Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 216.484H567.812V217.753H569.157V216.484Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 216.484H571.052V217.753H572.396V216.484Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 222.606H551.678V223.874H553.023V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 222.606H554.917V223.874H556.262V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 222.606H558.156V223.874H559.501V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 222.606H561.396V223.874H562.74V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 222.606H564.634V223.874H565.979V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 222.606H567.812V223.874H569.157V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 222.606H571.052V223.874H572.396V222.606Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 225.584H551.678V226.852H553.023V225.584Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 225.584H554.917V226.852H556.262V225.584Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 225.584H558.156V226.852H559.501V225.584Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 225.584H561.396V226.852H562.74V225.584Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 225.584H564.634V226.852H565.979V225.584Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 225.584H567.812V226.852H569.157V225.584Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 225.584H571.052V226.852H572.396V225.584Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 231.705H551.678V232.974H553.023V231.705Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 231.705H554.917V232.974H556.262V231.705Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 231.705H558.156V232.974H559.501V231.705Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 231.705H561.396V232.974H562.74V231.705Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 231.705H564.634V232.974H565.979V231.705Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 231.705H567.812V232.974H569.157V231.705Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 231.705H571.052V232.974H572.396V231.705Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M553.023 234.683H551.678V235.952H553.023V234.683Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M556.262 234.683H554.917V235.952H556.262V234.683Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M559.501 234.683H558.156V235.952H559.501V234.683Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M562.74 234.683H561.396V235.952H562.74V234.683Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M565.979 234.683H564.634V235.952H565.979V234.683Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M569.157 234.683H567.812V235.952H569.157V234.683Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M572.396 234.683H571.052V235.952H572.396V234.683Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.3")) + ~d:(("M573.863 230.988H550.578V240.474H573.863V230.988Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.6")) + ~d:(("M562.068 131.17H550.578V240.364H562.068V131.17Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M531.877 158.964L549.05 181.741L530.594 206.282L514.765 181.741L531.877 158.964Z") + ) ~stroke:(("#E37056")) + ~strokeWidth:(("3")) + ~strokeMiterlimit:(("10")) + ~strokeLinecap:(("round")) + ~strokeLinejoin:(("round")) ~children:[] + ()) + [@JSX ]); + ((path + ~d:(("M579.73 161.667H584.986L590.425 143.247L579.73 161.667Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M602.77 161.667H597.575L591.586 143.247L602.77 161.667Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M590.058 165.251H589.386V180.472H590.058V165.251Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M588.897 165.858H588.225V181.079H588.897V165.858Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M587.797 166.41H587.125V181.631H587.797V166.41Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M586.697 167.016H586.025V182.237H586.697V167.016Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M585.536 167.623H584.863V182.844H585.536V167.623Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M584.435 168.174H583.763V183.395H584.435V168.174Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M583.336 168.781H582.663V184.002H583.336V168.781Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M582.174 169.332H581.502V184.553H582.174V169.332Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M593.542 165.251H592.869V180.472H593.542V165.251Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M594.703 165.858H594.03V181.079H594.703V165.858Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M595.803 166.41H595.13V181.631H595.803V166.41Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M596.903 167.016H596.231V182.237H596.903V167.016Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M598.064 167.623H597.392V182.844H598.064V167.623Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M599.164 168.174H598.492V183.395H599.164V168.174Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M600.264 168.781H599.592V184.002H600.264V168.781Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M601.425 169.332H600.753V184.553H601.425V169.332Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M590.058 182.734H589.386V197.955H590.058V182.734Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M588.897 183.285H588.225V198.506H588.897V183.285Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M587.797 183.892H587.125V199.113H587.797V183.892Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M586.697 184.443H586.025V199.664H586.697V184.443Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M585.536 185.05H584.863V200.271H585.536V185.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M584.435 185.656H583.763V200.877H584.435V185.656Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M583.336 186.208H582.663V201.429H583.336V186.208Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M582.174 186.815H581.502V202.036H582.174V186.815Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M593.542 182.734H592.869V197.955H593.542V182.734Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M594.703 183.285H594.03V198.506H594.703V183.285Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M595.803 183.892H595.13V199.113H595.803V183.892Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M596.903 184.443H596.231V199.664H596.903V184.443Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M598.064 185.05H597.392V200.271H598.064V185.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M599.164 185.656H598.492V200.877H599.164V185.656Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M600.264 186.208H599.592V201.429H600.264V186.208Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M601.425 186.814H600.753V202.035H601.425V186.814Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M590.058 200.16H589.386V215.381H590.058V200.16Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M588.897 200.767H588.225V215.988H588.897V200.767Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M587.797 201.319H587.125V216.54H587.797V201.319Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M586.697 201.925H586.025V217.146H586.697V201.925Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M585.536 202.532H584.863V217.753H585.536V202.532Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M584.435 203.083H583.763V218.304H584.435V203.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M583.336 203.69H582.663V218.911H583.336V203.69Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M582.174 204.241H581.502V219.462H582.174V204.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M593.542 200.16H592.869V215.381H593.542V200.16Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M594.703 200.767H594.03V215.988H594.703V200.767Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M595.803 201.318H595.13V216.54H595.803V201.318Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M596.903 201.925H596.231V217.146H596.903V201.925Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M598.064 202.532H597.392V217.753H598.064V202.532Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M599.164 203.083H598.492V218.304H599.164V203.083Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M600.264 203.69H599.592V218.911H600.264V203.69Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M601.425 204.241H600.753V219.462H601.425V204.241Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M590.058 217.643H589.386V232.864H590.058V217.643Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M588.897 218.194H588.225V233.415H588.897V218.194Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M587.797 218.801H587.125V234.022H587.797V218.801Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M586.697 219.407H586.025V234.628H586.697V219.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M585.536 219.959H584.863V235.18H585.536V219.959Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M584.435 220.565H583.763V235.786H584.435V220.565Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M583.336 221.117H582.663V236.338H583.336V221.117Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M582.174 221.724H581.502V236.945H582.174V221.724Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M593.542 217.643H592.869V232.864H593.542V217.643Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M594.703 218.194H594.03V233.415H594.703V218.194Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M595.803 218.801H595.13V234.022H595.803V218.801Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M596.903 219.407H596.231V234.628H596.903V219.407Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M598.064 219.959H597.392V235.18H598.064V219.959Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M599.164 220.565H598.492V235.786H599.164V220.565Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M600.264 221.117H599.592V236.338H600.264V221.117Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M601.425 221.724H600.753V236.945H601.425V221.724Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 134.258H608.331V135.692H609.92V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 134.258H612.976V135.692H614.565V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 134.258H617.62V135.692H619.21V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 134.258H622.265V135.692H623.854V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 134.258H626.91V135.692H628.499V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 134.258H631.555V135.692H633.144V134.258Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 137.622H608.331V139.056H609.92V137.622Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 137.622H612.976V139.056H614.565V137.622Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 137.622H617.62V139.056H619.21V137.622Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 137.622H622.265V139.056H623.854V137.622Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 137.622H626.91V139.056H628.499V137.622Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 137.622H631.555V139.056H633.144V137.622Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 141.041H608.331V142.475H609.92V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 141.041H612.976V142.475H614.565V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 141.041H617.62V142.475H619.21V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 141.041H622.265V142.475H623.854V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 141.041H626.91V142.475H628.499V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 141.041H631.555V142.475H633.144V141.041Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 144.405H608.331V145.839H609.92V144.405Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 144.405H612.976V145.839H614.565V144.405Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 144.405H617.62V145.839H619.21V144.405Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 144.405H622.265V145.839H623.854V144.405Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 144.405H626.91V145.839H628.499V144.405Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 144.405H631.555V145.839H633.144V144.405Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 147.769H608.331V149.203H609.92V147.769Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 147.769H612.976V149.203H614.565V147.769Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 147.769H617.62V149.203H619.21V147.769Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 147.769H622.265V149.203H623.854V147.769Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 147.769H626.91V149.203H628.499V147.769Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 147.769H631.555V149.203H633.144V147.769Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 151.188H608.331V152.622H609.92V151.188Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 151.188H612.976V152.622H614.565V151.188Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 151.188H617.62V152.622H619.21V151.188Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 151.188H622.265V152.622H623.854V151.188Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 151.188H626.91V152.622H628.499V151.188Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 151.188H631.555V152.622H633.144V151.188Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 154.553H608.331V155.986H609.92V154.553Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 154.553H612.976V155.986H614.565V154.553Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 154.553H617.62V155.986H619.21V154.553Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 154.553H622.265V155.986H623.854V154.553Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 154.553H626.91V155.986H628.499V154.553Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 154.553H631.555V155.986H633.144V154.553Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 157.972H608.331V159.406H609.92V157.972Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 157.972H612.976V159.406H614.565V157.972Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 157.972H617.62V159.406H619.21V157.972Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 157.972H622.265V159.406H623.854V157.972Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 157.972H626.91V159.406H628.499V157.972Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 157.972H631.555V159.406H633.144V157.972Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M609.92 161.336H608.331V162.77H609.92V161.336Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 161.336H612.976V162.77H614.565V161.336Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 161.336H617.62V162.77H619.21V161.336Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 161.336H622.265V162.77H623.854V161.336Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 161.336H626.91V162.77H628.499V161.336Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 161.336H631.555V162.77H633.144V161.336Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M609.92 164.7H608.331V166.134H609.92V164.7Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M614.565 164.7H612.976V166.134H614.565V164.7Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 164.7H617.62V166.134H619.21V164.7Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M623.854 164.7H622.265V166.134H623.854V164.7Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M628.499 164.7H626.91V166.134H628.499V164.7Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M633.144 164.7H631.555V166.134H633.144V164.7Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 168.119H608.331V169.553H609.92V168.119Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 168.119H612.976V169.553H614.565V168.119Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 168.119H617.62V169.553H619.21V168.119Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 168.119H622.265V169.553H623.854V168.119Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 168.119H626.91V169.553H628.499V168.119Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 168.119H631.555V169.553H633.144V168.119Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 171.483H608.331V172.917H609.92V171.483Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 171.483H612.976V172.917H614.565V171.483Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 171.483H617.62V172.917H619.21V171.483Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 171.483H622.265V172.917H623.854V171.483Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 171.483H626.91V172.917H628.499V171.483Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 171.483H631.555V172.917H633.144V171.483Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 174.902H608.331V176.336H609.92V174.902Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 174.902H612.976V176.336H614.565V174.902Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 174.902H617.62V176.336H619.21V174.902Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 174.902H622.265V176.336H623.854V174.902Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 174.902H626.91V176.336H628.499V174.902Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 174.902H631.555V176.336H633.144V174.902Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M609.92 178.266H608.331V179.7H609.92V178.266Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 178.266H612.976V179.7H614.565V178.266Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 178.266H617.62V179.7H619.21V178.266Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 178.266H622.265V179.7H623.854V178.266Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M628.499 178.266H626.91V179.7H628.499V178.266Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 178.266H631.555V179.7H633.144V178.266Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M609.92 181.63H608.331V183.064H609.92V181.63Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 181.63H612.976V183.064H614.565V181.63Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 181.63H617.62V183.064H619.21V181.63Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 181.63H622.265V183.064H623.854V181.63Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M628.499 181.63H626.91V183.064H628.499V181.63Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 181.63H631.555V183.064H633.144V181.63Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M609.92 185.05H608.331V186.484H609.92V185.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 185.05H612.976V186.484H614.565V185.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 185.05H617.62V186.484H619.21V185.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 185.05H622.265V186.484H623.854V185.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M628.499 185.05H626.91V186.484H628.499V185.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 185.05H631.555V186.484H633.144V185.05Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 188.414H608.331V189.848H609.92V188.414Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 188.414H612.976V189.848H614.565V188.414Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 188.414H617.62V189.848H619.21V188.414Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 188.414H622.265V189.848H623.854V188.414Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 188.414H626.91V189.848H628.499V188.414Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 188.414H631.555V189.848H633.144V188.414Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 191.833H608.331V193.267H609.92V191.833Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 191.833H612.976V193.267H614.565V191.833Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 191.833H617.62V193.267H619.21V191.833Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 191.833H622.265V193.267H623.854V191.833Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 191.833H626.91V193.267H628.499V191.833Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 191.833H631.555V193.267H633.144V191.833Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 195.197H608.331V196.631H609.92V195.197Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 195.197H612.976V196.631H614.565V195.197Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 195.197H617.62V196.631H619.21V195.197Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 195.197H622.265V196.631H623.854V195.197Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 195.197H626.91V196.631H628.499V195.197Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 195.197H631.555V196.631H633.144V195.197Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M609.92 198.616H608.331V200.05H609.92V198.616Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 198.616H612.976V200.05H614.565V198.616Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 198.616H617.62V200.05H619.21V198.616Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 198.616H622.265V200.05H623.854V198.616Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 198.616H626.91V200.05H628.499V198.616Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 198.616H631.555V200.05H633.144V198.616Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 201.925H608.331V203.359H609.92V201.925Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 201.925H612.976V203.359H614.565V201.925Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 201.925H617.62V203.359H619.21V201.925Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 201.925H622.265V203.359H623.854V201.925Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 201.925H626.91V203.359H628.499V201.925Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 201.925H631.555V203.359H633.144V201.925Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 205.344H608.331V206.778H609.92V205.344Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 205.344H612.976V206.778H614.565V205.344Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 205.344H617.62V206.778H619.21V205.344Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 205.344H622.265V206.778H623.854V205.344Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 205.344H626.91V206.778H628.499V205.344Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 205.344H631.555V206.778H633.144V205.344Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 208.708H608.331V210.142H609.92V208.708Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 208.708H612.976V210.142H614.565V208.708Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 208.708H617.62V210.142H619.21V208.708Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 208.708H622.265V210.142H623.854V208.708Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 208.708H626.91V210.142H628.499V208.708Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 208.708H631.555V210.142H633.144V208.708Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 212.073H608.331V213.506H609.92V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 212.073H612.976V213.506H614.565V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 212.073H617.62V213.506H619.21V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 212.073H622.265V213.506H623.854V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 212.073H626.91V213.506H628.499V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 212.073H631.555V213.506H633.144V212.073Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 215.492H608.331V216.926H609.92V215.492Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 215.492H612.976V216.926H614.565V215.492Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 215.492H617.62V216.926H619.21V215.492Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 215.492H622.265V216.926H623.854V215.492Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 215.492H626.91V216.926H628.499V215.492Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 215.492H631.555V216.926H633.144V215.492Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M609.92 218.856H608.331V220.29H609.92V218.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 218.856H612.976V220.29H614.565V218.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 218.856H617.62V220.29H619.21V218.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 218.856H622.265V220.29H623.854V218.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 218.856H626.91V220.29H628.499V218.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 218.856H631.555V220.29H633.144V218.856Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 222.275H608.331V223.709H609.92V222.275Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 222.275H612.976V223.709H614.565V222.275Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 222.275H617.62V223.709H619.21V222.275Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 222.275H622.265V223.709H623.854V222.275Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 222.275H626.91V223.709H628.499V222.275Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 222.275H631.555V223.709H633.144V222.275Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 225.639H608.331V227.073H609.92V225.639Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 225.639H612.976V227.073H614.565V225.639Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 225.639H617.62V227.073H619.21V225.639Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 225.639H622.265V227.073H623.854V225.639Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 225.639H626.91V227.073H628.499V225.639Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 225.639H631.555V227.073H633.144V225.639Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 229.003H608.331V230.437H609.92V229.003Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 229.003H612.976V230.437H614.565V229.003Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 229.003H617.62V230.437H619.21V229.003Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 229.003H622.265V230.437H623.854V229.003Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 229.003H626.91V230.437H628.499V229.003Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 229.003H631.555V230.437H633.144V229.003Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M609.92 232.422H608.331V233.856H609.92V232.422Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M614.565 232.422H612.976V233.856H614.565V232.422Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~d:(("M619.21 232.422H617.62V233.856H619.21V232.422Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M623.854 232.422H622.265V233.856H623.854V232.422Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M628.499 232.422H626.91V233.856H628.499V232.422Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M633.144 232.422H631.555V233.856H633.144V232.422Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.7")) + ~d:(("M635.894 230.713H605.581V240.474H635.894V230.713Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-cloudRight") + ) ~opacity:(("0.7")) + ~d:(("M609.126 61.903H757.878C757.878 61.903 760.078 50.3218 744.127 48.3364C744.127 48.3364 742.232 27.7108 719.742 20.3761C697.252 13.0413 686.924 24.3468 686.924 24.3468C686.924 24.3468 680.018 16.8466 670.973 17.8944C661.928 18.9422 658.139 29.4205 658.139 29.4205C658.139 29.4205 652.822 26.0564 647.2 28.869C641.577 31.6815 641.883 35.3765 641.883 35.3765C641.883 35.3765 635.405 33.9426 630.821 33.6669C626.238 33.3912 604.237 34.825 609.126 61.903Z") + ) ~fill:(("white")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.5")) + ~d:(("M406.715 74.3666H472.413C472.413 74.3666 472.169 68.2451 465.079 68.2451C465.079 68.2451 463.063 64.8258 452.918 62.7302C442.773 60.6897 432.139 65.4876 432.139 65.4876C432.139 65.4876 429.45 63.8883 426.455 63.8883C423.461 63.8883 422.422 67.3075 422.422 67.3075C422.422 67.3075 406.715 64.881 406.715 74.3666Z") + ) ~fill:(("white")) + ~children:[] ()) + [@JSX ]); + ((path ~opacity:(("0.5")) + ~d:(("M233.09 126.647H283.02C283.02 126.647 277.581 121.022 267.192 117.493C256.802 113.963 250.141 117.493 250.141 117.493C250.141 117.493 247.207 114.68 242.807 117.493C238.407 120.305 239.385 121.298 239.385 121.298C239.385 121.298 231.99 120.36 233.09 126.647Z") + ) ~fill:(("white")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M173.992 45.193C173.992 39.0715 178.392 33.9427 184.504 32.1779C183.037 31.7367 181.509 31.5161 179.859 31.5161C171.487 31.5161 164.703 37.6376 164.703 45.193C164.703 52.7483 171.487 58.8698 179.859 58.8698C181.509 58.8698 183.037 58.6492 184.504 58.208C178.454 56.4433 173.992 51.3145 173.992 45.193Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M245.863 68.4656L246.474 69.5686L247.818 69.734L246.84 70.5613L247.085 71.7194L245.863 71.1679L244.701 71.7194L244.946 70.5613L243.968 69.734L245.313 69.5686L245.863 68.4656Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M334.173 43.2627L334.784 44.4208L336.189 44.5863L335.15 45.5238L335.395 46.7371L334.173 46.1856L332.889 46.7371L333.134 45.5238L332.156 44.5863L333.561 44.4208L334.173 43.2627Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M377.136 23.9056L377.686 24.9534L378.969 25.1188L378.053 25.8909L378.236 27.0491L377.136 26.4976L376.036 27.0491L376.219 25.8909L375.303 25.1188L376.586 24.9534L377.136 23.9056Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M498.815 134.644L499.365 135.692L500.587 135.857L499.67 136.629L499.915 137.732L498.815 137.236L497.653 137.732L497.898 136.629L496.981 135.857L498.203 135.692L498.815 134.644Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M354.096 0.90863L354.646 1.90131L355.868 2.06675L355.013 2.89398L355.196 3.99695L354.096 3.50061L352.935 3.99695L353.179 2.89398L352.263 2.06675L353.546 1.90131L354.096 0.90863Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M160.914 17.9495L161.464 18.9422L162.747 19.1076L161.831 19.9349L162.075 21.0378L160.914 20.4864L159.814 21.0378L159.997 19.9349L159.142 19.1076L160.364 18.9422L160.914 17.9495Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M40.8243 164.314L41.3743 165.362L42.6577 165.527L41.741 166.299L41.9244 167.457L40.8243 166.906L39.7243 167.457L39.9076 166.299L38.9909 165.527L40.2743 165.362L40.8243 164.314Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M380.497 53.9064L381.108 55.0093L382.453 55.1748L381.475 56.002L381.658 57.2153L380.497 56.6638L379.275 57.2153L379.519 56.002L378.542 55.1748L379.886 55.0093L380.497 53.9064Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M216.161 102.272L216.833 103.54L218.361 103.706L217.261 104.643L217.506 106.022L216.161 105.36L214.817 106.022L215.122 104.643L214.022 103.706L215.489 103.54L216.161 102.272Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M97.6606 109.993L98.1495 110.985L99.3718 111.151L98.5162 111.923L98.6995 112.971L97.6606 112.474L96.5605 112.971L96.7439 111.923L95.8883 111.151L97.1105 110.985L97.6606 109.993Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M90.9991 146.28L91.5491 147.273L92.7714 147.438L91.8547 148.21L92.0992 149.258L90.9991 148.762L89.9602 149.258L90.1435 148.21L89.2879 147.438L90.4491 147.273L90.9991 146.28Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M105.239 55.7263L105.728 56.6087L106.828 56.7741L106.033 57.4359L106.217 58.4286L105.239 57.9323L104.261 58.4286L104.444 57.4359L103.711 56.7741L104.75 56.6087L105.239 55.7263Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M115.689 19.4937L116.3 20.5415L117.584 20.7069L116.606 21.5342L116.85 22.6371L115.689 22.0856L114.589 22.6371L114.772 21.5342L113.856 20.7069L115.139 20.5415L115.689 19.4937Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M486.164 101.831L486.347 102.217L486.836 102.272L486.469 102.603L486.592 102.989L486.164 102.823L485.736 102.989L485.797 102.603L485.492 102.272L485.981 102.217L486.164 101.831Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M418.999 18.3907L419.305 18.887L419.977 18.9973L419.488 19.4385L419.61 19.99L418.999 19.7143L418.449 19.99L418.51 19.4385L418.082 18.9973L418.694 18.887L418.999 18.3907Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M558.156 67.9141L558.89 69.2377L560.418 69.4031L559.318 70.451L559.562 71.8848L558.156 71.1679L556.751 71.8848L556.995 70.451L555.834 69.4031L557.423 69.2377L558.156 67.9141Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-starCenterLeft") + ) + ~d:(("M380.497 100.893L381.17 102.161L382.759 102.382L381.658 103.375L381.903 104.809L380.497 104.147L379.092 104.809L379.336 103.375L378.175 102.382L379.764 102.161L380.497 100.893Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-startRight") + ) + ~d:(("M160.914 73.9805L161.647 75.2489L163.236 75.4695L162.075 76.4621L162.319 77.896L160.914 77.2342L159.508 77.896L159.814 76.4621L158.653 75.4695L160.242 75.2489L160.914 73.9805Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M348.473 94.3854L349.146 95.709L350.735 95.8744L349.635 96.9223L349.879 98.3561L348.473 97.6392L347.068 98.3561L347.312 96.9223L346.151 95.8744L347.74 95.709L348.473 94.3854Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M527.783 46.7922L528.271 47.7849L529.494 47.9504L528.638 48.7224L528.822 49.7703L527.783 49.2739L526.682 49.7703L526.866 48.7224L526.01 47.9504L527.233 47.7849L527.783 46.7922Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-starCenterLeft") + ) + ~d:(("M531.877 90.9662L532.427 91.9589L533.649 92.1244L532.733 92.8964L532.977 93.9443L531.877 93.4479L530.838 93.9443L531.022 92.8964L530.166 92.1244L531.388 91.9589L531.877 90.9662Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M632.349 87.8779L632.716 88.4846L633.449 88.5949L632.899 89.0912L633.021 89.753L632.349 89.4221L631.677 89.753L631.799 89.0912L631.249 88.5949L632.043 88.4846L632.349 87.8779Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M683.196 143.357L683.991 144.846L685.824 145.067L684.48 146.225L684.785 147.824L683.196 147.052L681.607 147.824L681.913 146.225L680.629 145.067L682.402 144.846L683.196 143.357Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-starRight") + ) + ~d:(("M669.873 94.3854L671.095 96.7017L673.907 97.0326L671.89 98.7973L672.379 101.334L669.873 100.121L667.368 101.334L667.856 98.7973L665.84 97.0326L668.59 96.7017L669.873 94.3854Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path ~className:(("HeroGraphic-starCenter") + ) + ~d:(("M261.997 28.0417L263.28 30.3028L266.03 30.6888L264.014 32.4536L264.502 34.9904L261.997 33.7771L259.552 34.9904L259.98 32.4536L257.963 30.6888L260.774 30.3028L261.997 28.0417Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M608.331 26.0012L608.698 26.6079L609.431 26.7182L608.881 27.2145L609.003 27.8763L608.331 27.6005L607.659 27.8763L607.781 27.2145L607.231 26.7182L607.964 26.6079L608.331 26.0012Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M656.123 163.211L656.489 163.873L657.284 163.983L656.673 164.479L656.856 165.141L656.123 164.81L655.45 165.141L655.572 164.479L655.022 163.983L655.817 163.873L656.123 163.211Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M497.959 92.4553L498.326 93.117L499.12 93.2273L498.509 93.7237L498.692 94.3855L497.959 94.0546L497.287 94.3855L497.409 93.7237L496.859 93.2273L497.653 93.117L497.959 92.4553Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M506.087 78.8887L506.393 79.4953L507.187 79.6056L506.637 80.1019L506.759 80.8189L506.087 80.488L505.354 80.8189L505.476 80.1019L504.926 79.6056L505.72 79.4953L506.087 78.8887Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M546.239 110.048L546.973 111.426L548.745 111.647L547.461 112.75L547.767 114.294L546.239 113.577L544.711 114.294L544.956 112.75L543.733 111.647L545.445 111.426L546.239 110.048Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M481.947 38.52L482.436 39.4023L483.536 39.5678L482.742 40.2847L482.925 41.2222L481.947 40.7811L480.969 41.2222L481.153 40.2847L480.358 39.5678L481.458 39.4023L481.947 38.52Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M304.349 67.142L304.838 68.0244L305.938 68.1347L305.143 68.8516L305.327 69.8443L304.349 69.348L303.371 69.8443L303.554 68.8516L302.76 68.1347L303.86 68.0244L304.349 67.142Z") + ) ~fill:(("#E37056")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M643.105 244.279C643.105 246.43 641.15 248.195 638.766 248.195H90.0824C87.699 248.195 85.7433 246.43 85.7433 244.279C85.7433 242.129 87.699 240.364 90.0824 240.364H638.766C641.211 240.364 643.105 242.129 643.105 244.279Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M643.105 244.22C643.105 246.371 641.15 248.135 638.766 248.135H90.0824C87.699 248.135 85.7433 246.371 85.7433 244.22C85.7433 242.069 87.699 240.304 90.0824 240.304H638.766C641.211 240.304 643.105 242.069 643.105 244.22Z") + ) ~fill:(("#2484C6")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M124.612 231.485C124.612 231.485 123.817 225.86 127.118 223.985C130.418 222.165 131.64 222.165 131.64 222.165C131.64 222.165 130.54 218.911 133.351 217.477C136.163 216.043 137.446 217.477 137.446 217.477C137.446 217.477 137.752 212.128 141.541 210.97C145.269 209.867 146.246 210.97 146.246 210.97C146.246 210.97 147.958 203.58 153.458 202.697C158.958 201.815 158.958 203.635 158.958 203.635C158.958 203.635 159.569 202.532 162.686 202.422C165.803 202.311 165.681 204.021 165.681 204.021C165.681 204.021 167.697 203.028 170.387 203.58C173.076 204.131 173.076 206.558 173.076 206.558C173.076 206.558 174.298 204.738 179.676 208.543C184.993 212.348 184.198 218.856 184.198 218.856C184.198 218.856 188.293 219.021 188.721 220.676C189.21 222.385 188.11 223.929 188.11 223.929C188.11 223.929 192.938 223.488 193.427 226.025C193.916 228.562 192.815 229.83 192.815 229.83C192.815 229.83 194.527 230.823 194.71 232.919C194.893 235.014 194.71 236.007 194.71 236.007C194.71 236.007 196.299 236.448 196.605 239.371C196.91 242.349 195.504 244.445 195.504 244.445C195.504 244.445 195.504 247.643 191.899 248.195C188.293 248.801 119.112 248.195 119.112 248.195C119.112 248.195 117.095 245.272 118.317 243.728C119.539 242.184 120.334 242.184 120.334 242.184C120.334 242.184 120.028 240.198 120.028 239.757C120.028 239.316 122.351 238.489 122.351 238.489C122.351 238.489 120.762 237.386 121.556 236.393C122.351 235.4 123.451 234.959 123.451 234.959C123.451 234.959 120.578 232.367 124.612 231.485Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M191.654 238.599C191.654 238.599 191.776 232.919 195.443 232.367C199.11 231.816 199.416 232.367 199.416 232.367C199.416 232.367 199.721 230.327 201.433 229.996C203.144 229.61 203.572 229.996 203.572 229.996C203.572 229.996 204.61 226.852 207.85 226.08C211.089 225.308 212.739 229.389 212.739 229.389C212.739 229.389 214.694 228.562 216.283 229.113C217.872 229.665 217.261 231.209 217.261 231.209C217.261 231.209 218.545 230.823 218.85 231.761C219.156 232.698 218.85 233.139 218.85 233.139C218.85 233.139 220.5 232.422 221.417 234.132C222.334 235.897 222.028 236.889 222.028 236.889C222.028 236.889 223.556 235.897 224.045 237.441C224.534 238.985 224.534 239.537 224.534 239.537C224.534 239.537 227.773 238.709 228.017 239.978C228.262 241.246 226.062 242.459 226.062 242.459C226.062 242.459 227.467 243.452 227.467 244.665C227.467 245.879 226.734 246.044 226.734 246.32C226.734 246.596 227.223 247.588 227.162 248.36C227.039 249.077 189.271 248.36 189.271 248.36L191.654 238.599Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M247.329 238.599C247.329 238.599 247.452 232.919 251.118 232.367C254.785 231.816 255.091 232.367 255.091 232.367C255.091 232.367 255.396 230.327 257.108 229.996C258.819 229.61 259.247 229.996 259.247 229.996C259.247 229.996 260.286 226.852 263.525 226.08C266.764 225.308 268.414 229.389 268.414 229.389C268.414 229.389 270.369 228.562 271.958 229.113C273.547 229.665 272.936 231.209 272.936 231.209C272.936 231.209 274.22 230.823 274.525 231.761C274.831 232.698 274.525 233.139 274.525 233.139C274.525 233.139 276.175 232.422 277.092 234.132C278.009 235.897 277.703 236.889 277.703 236.889C277.703 236.889 279.231 235.897 279.72 237.441C280.209 238.985 280.209 239.537 280.209 239.537C280.209 239.537 283.448 238.709 283.692 239.978C283.937 241.246 281.737 242.459 281.737 242.459C281.737 242.459 283.142 243.452 283.142 244.665C283.142 245.879 282.409 246.044 282.409 246.32C282.409 246.596 282.898 247.588 282.837 248.36C282.714 249.077 244.946 248.36 244.946 248.36L247.329 238.599Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M222.089 241.687C222.089 241.687 219.828 233.967 220.989 232.202C222.089 230.437 222.089 230.437 222.089 230.437C222.089 230.437 220.989 228.617 222.089 227.955C223.189 227.294 223.189 227.294 223.189 227.294C223.189 227.294 223.189 224.702 224.839 224.095C226.489 223.433 226.978 224.095 226.978 224.095C226.978 224.095 227.101 220.124 228.69 219.959C230.34 219.793 230.523 220.676 230.523 220.676C230.523 220.676 230.34 218.029 232.54 217.091C234.801 216.154 236.512 218.47 236.512 218.47C236.512 218.47 239.14 218.58 239.568 220.29C239.996 222.054 239.568 223.047 239.568 223.047C239.568 223.047 241.707 223.654 241.401 225.915C241.096 228.176 241.096 228.176 241.096 228.176C241.096 228.176 244.946 226.687 245.985 228.176C247.024 229.665 245.985 230.547 245.985 230.547C245.985 230.547 247.696 230.382 247.696 231.926C247.696 233.47 247.696 234.297 247.696 234.297C247.696 234.297 249.713 233.856 249.957 235.566C250.141 237.331 249.041 238.489 249.041 238.489C249.041 238.489 250.08 239.867 249.957 241.081C249.835 242.294 249.102 242.901 249.102 242.901C249.102 242.901 251.057 242.625 251.546 244.445C252.035 246.265 251.057 246.926 251.057 246.926L251.73 248.526H223.8L222.089 241.687Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M488.547 236.889C488.547 236.889 487.997 233.029 490.258 231.761C492.52 230.492 493.375 230.492 493.375 230.492C493.375 230.492 492.642 228.286 494.536 227.294C496.492 226.301 497.348 227.294 497.348 227.294C497.348 227.294 497.592 223.599 500.159 222.827C502.726 222.054 503.398 222.827 503.398 222.827C503.398 222.827 504.559 217.753 508.348 217.146C512.137 216.54 512.137 217.808 512.137 217.808C512.137 217.808 512.565 217.036 514.704 216.981C516.843 216.926 516.721 218.084 516.721 218.084C516.721 218.084 518.127 217.422 519.96 217.808C521.793 218.194 521.793 219.849 521.793 219.849C521.793 219.849 522.649 218.58 526.316 221.227C529.983 223.819 529.433 228.286 529.433 228.286C529.433 228.286 532.244 228.397 532.55 229.555C532.855 230.713 532.122 231.761 532.122 231.761C532.122 231.761 535.483 231.485 535.789 233.194C536.094 234.959 535.361 235.786 535.361 235.786C535.361 235.786 536.522 236.448 536.644 237.937C536.766 239.371 536.644 240.088 536.644 240.088C536.644 240.088 537.744 240.364 537.928 242.404C538.172 244.445 537.194 245.879 537.194 245.879C537.194 245.879 537.194 248.085 534.75 248.471C532.305 248.912 484.758 248.471 484.758 248.471C484.758 248.471 483.352 246.485 484.208 245.382C485.064 244.334 485.614 244.334 485.614 244.334C485.614 244.334 485.369 242.956 485.369 242.68C485.369 242.404 486.958 241.798 486.958 241.798C486.958 241.798 485.858 241.026 486.408 240.364C486.958 239.702 487.692 239.371 487.692 239.371C487.692 239.371 485.797 237.441 488.547 236.889Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M533.038 238.599C533.038 238.599 533.161 232.919 536.827 232.367C540.494 231.816 540.8 232.367 540.8 232.367C540.8 232.367 541.105 230.327 542.817 229.996C544.528 229.61 544.956 229.996 544.956 229.996C544.956 229.996 545.995 226.852 549.234 226.08C552.473 225.308 554.123 229.389 554.123 229.389C554.123 229.389 556.078 228.562 557.667 229.113C559.318 229.665 558.645 231.209 558.645 231.209C558.645 231.209 559.929 230.823 560.234 231.761C560.54 232.698 560.234 233.139 560.234 233.139C560.234 233.139 561.884 232.422 562.801 234.132C563.718 235.897 563.412 236.889 563.412 236.889C563.412 236.889 564.94 235.897 565.429 237.441C565.918 238.985 565.918 239.537 565.918 239.537C565.918 239.537 569.157 238.709 569.401 239.978C569.585 241.246 567.446 242.459 567.446 242.459C567.446 242.459 568.851 243.452 568.851 244.665C568.851 245.879 568.118 246.044 568.118 246.32C568.118 246.596 568.607 247.588 568.546 248.36C568.424 249.077 530.655 248.36 530.655 248.36L533.038 238.599Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M588.713 238.599C588.713 238.599 588.836 232.919 592.503 232.367C596.169 231.816 596.475 232.367 596.475 232.367C596.475 232.367 596.781 230.327 598.492 229.996C600.203 229.61 600.631 229.996 600.631 229.996C600.631 229.996 601.67 226.852 604.909 226.08C608.148 225.308 609.798 229.389 609.798 229.389C609.798 229.389 611.754 228.562 613.343 229.113C614.993 229.665 614.32 231.209 614.32 231.209C614.32 231.209 615.604 230.823 615.909 231.761C616.215 232.698 615.909 233.139 615.909 233.139C615.909 233.139 617.559 232.422 618.476 234.132C619.393 235.897 619.087 236.889 619.087 236.889C619.087 236.889 620.615 235.897 621.104 237.441C621.593 238.985 621.593 239.537 621.593 239.537C621.593 239.537 624.832 238.709 625.077 239.978C625.26 241.246 623.121 242.459 623.121 242.459C623.121 242.459 624.527 243.452 624.527 244.665C624.527 245.879 623.793 246.044 623.793 246.32C623.793 246.596 624.282 247.588 624.221 248.36C624.099 249.077 586.33 248.36 586.33 248.36L588.713 238.599Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M490.808 240.805C490.808 240.805 490.747 236.338 487.875 235.897C485.003 235.456 484.758 235.897 484.758 235.897C484.758 235.897 484.514 234.297 483.169 234.022C481.825 233.746 481.519 234.022 481.519 234.022C481.519 234.022 480.725 231.54 478.158 230.933C475.591 230.327 474.308 233.525 474.308 233.525C474.308 233.525 472.78 232.864 471.496 233.305C470.213 233.746 470.702 234.959 470.702 234.959C470.702 234.959 469.724 234.684 469.48 235.4C469.235 236.117 469.48 236.503 469.48 236.503C469.48 236.503 468.196 235.952 467.463 237.276C466.729 238.654 466.974 239.426 466.974 239.426C466.974 239.426 465.752 238.654 465.385 239.868C464.957 241.081 464.957 241.522 464.957 241.522C464.957 241.522 462.39 240.86 462.268 241.908C462.085 242.901 463.796 243.838 463.796 243.838C463.796 243.838 462.696 244.61 462.696 245.548C462.696 246.485 463.246 246.651 463.246 246.816C463.246 246.982 462.818 247.809 462.94 248.416C463.001 248.967 492.581 248.416 492.581 248.416L490.808 240.805Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((path + ~d:(("M563.473 241.687C563.473 241.687 561.212 233.967 562.373 232.202C563.473 230.437 563.473 230.437 563.473 230.437C563.473 230.437 562.373 228.617 563.473 227.955C564.573 227.294 564.573 227.294 564.573 227.294C564.573 227.294 564.573 224.702 566.223 224.095C567.874 223.433 568.362 224.095 568.362 224.095C568.362 224.095 568.485 220.124 570.074 219.959C571.724 219.793 571.907 220.676 571.907 220.676C571.907 220.676 571.724 218.029 573.924 217.091C576.185 216.154 577.896 218.47 577.896 218.47C577.896 218.47 580.524 218.58 580.952 220.29C581.38 222.054 580.952 223.047 580.952 223.047C580.952 223.047 583.091 223.654 582.785 225.915C582.48 228.176 582.48 228.176 582.48 228.176C582.48 228.176 586.33 226.687 587.369 228.176C588.408 229.665 587.369 230.547 587.369 230.547C587.369 230.547 589.08 230.382 589.08 231.926C589.08 233.47 589.08 234.297 589.08 234.297C589.08 234.297 591.097 233.856 591.341 235.566C591.525 237.331 590.425 238.489 590.425 238.489C590.425 238.489 591.464 239.867 591.341 241.081C591.219 242.294 590.486 242.901 590.486 242.901C590.486 242.901 592.442 242.625 592.93 244.445C593.419 246.265 592.442 246.926 592.442 246.926L593.114 248.526H565.184L563.473 241.687Z") + ) ~fill:(("#0D1522")) + ~children:[] ()) + [@JSX ]); + ((defs + ~children:[((linearGradient ~id:(("paint0_linear") + ) ~x1:(("374.5") + ) ~y1:(("180.64") + ) ~x2:(("362.765") + ) ~y2:(("435.722") + ) + ~gradientUnits:(("userSpaceOnUse") + ) + ~children:[((stop ~offset:(("0.0658436") + ) + ~stopColor:(("#3A7DDD") + ) + ~children:[] ()) + [@JSX ]); + ((stop ~offset:(("0.4001") + ) + ~stopColor:(("#265291") + ) + ~children:[] ()) + [@JSX ]); + ((stop ~offset:(("0.571") + ) + ~stopColor:(("#1D3E6E") + ) + ~children:[] ()) + [@JSX ]); + ((stop ~offset:(("0.7224") + ) + ~stopColor:(("#173156") + ) + ~children:[] ()) + [@JSX ]); + ((stop ~offset:(("0.8486") + ) + ~stopColor:(("#10213A") + ) + ~children:[] ()) + [@JSX ]); + ((stop ~offset:(("0.9342") + ) + ~stopColor:(("#091321") + ) + ~children:[] ()) + [@JSX ])] ()) + [@JSX ])] ()) + [@JSX ])] ()) + [@JSX ])[@@react.component ] \ No newline at end of file diff --git a/res_syntax/benchmarks/data/HeroGraphic.res b/res_syntax/benchmarks/data/HeroGraphic.res new file mode 100644 index 0000000000..53decfa8a0 --- /dev/null +++ b/res_syntax/benchmarks/data/HeroGraphic.res @@ -0,0 +1,2009 @@ +%bs.raw(`require('./HeroGraphic.css')`) + +@react.component +let make = (~width="760", ~height="380") => + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/res_syntax/benchmarks/data/Napkinscript.ml b/res_syntax/benchmarks/data/Napkinscript.ml new file mode 100644 index 0000000000..f36dd9af5f --- /dev/null +++ b/res_syntax/benchmarks/data/Napkinscript.ml @@ -0,0 +1,19541 @@ +module MiniBuffer : sig + type t + val add_char : t -> char -> unit + val add_string : t -> string -> unit + val contents : t -> string + val create : int -> t + val flush_newline : t -> unit + val length : t -> int + val unsafe_get : t -> int -> char +end = struct + type t = { + mutable buffer : bytes; + mutable position : int; + mutable length : int; + } + + let create n = + let n = if n < 1 then 1 else n in + let s = (Bytes.create [@doesNotRaise]) n in + {buffer = s; position = 0; length = n} + + let contents b = Bytes.sub_string b.buffer 0 b.position + + let unsafe_get b ofs = + Bytes.unsafe_get b.buffer ofs + + let length b = b.position + + (* Can't be called directly, don't add to the interface *) + let resize_internal b more = + let len = b.length in + let new_len = ref len in + while b.position + more > !new_len do new_len := 2 * !new_len done; + if !new_len > Sys.max_string_length then begin + if b.position + more <= Sys.max_string_length + then new_len := Sys.max_string_length + end; + let new_buffer = (Bytes.create [@doesNotRaise]) !new_len in + (* PR#6148: let's keep using [blit] rather than [unsafe_blit] in + this tricky function that is slow anyway. *) + Bytes.blit b.buffer 0 new_buffer 0 b.position [@doesNotRaise]; + b.buffer <- new_buffer; + b.length <- !new_len + + let add_char b c = + let pos = b.position in + if pos >= b.length then resize_internal b 1; + Bytes.unsafe_set b.buffer pos c; + b.position <- pos + 1 + + let add_string b s = + let len = String.length s in + let new_position = b.position + len in + if new_position > b.length then resize_internal b len; + Bytes.blit_string s 0 b.buffer b.position len [@doesNotRaise]; + b.position <- new_position + + (* adds newline and trims all preceding whitespace *) + let flush_newline b = + let position = ref (b.position) in + while (Bytes.unsafe_get b.buffer (!position - 1)) = ' ' && !position >= 0 do + position := !position - 1; + done; + b.position <- !position; + add_char b '\n' +end + +module Doc = struct + type mode = Break | Flat + + type lineStyle = + | Classic (* fits? -> replace with space *) + | Soft (* fits? -> replaced with nothing *) + | Hard (* always included, forces breaks in parents *) + + type t = + | Nil + | Text of string + | Concat of t list + | Indent of t + | IfBreaks of {yes: t; no: t} + | LineSuffix of t + | LineBreak of lineStyle + | Group of {shouldBreak: bool; doc: t} + | CustomLayout of t list + | BreakParent + (* | Cursor *) + + let nil = Nil + let line = LineBreak Classic + let hardLine = LineBreak Hard + let softLine = LineBreak Soft + let text s = Text s + let concat l = Concat l + let indent d = Indent d + let ifBreaks t f = IfBreaks {yes = t; no = f} + let lineSuffix d = LineSuffix d + let group d = Group {shouldBreak = false; doc = d} + let breakableGroup ~forceBreak d = Group {shouldBreak = forceBreak; doc = d} + let customLayout gs = CustomLayout gs + let breakParent = BreakParent + (* let cursor = Cursor *) + + let space = Text " " + let comma = Text "," + let dot = Text "." + let dotdot = Text ".." + let dotdotdot = Text "..." + let lessThan = Text "<" + let greaterThan = Text ">" + let lbrace = Text "{" + let rbrace = Text "}" + let lparen = Text "(" + let rparen = Text ")" + let lbracket = Text "[" + let rbracket = Text "]" + let question = Text "?" + let tilde = Text "~" + let equal = Text "=" + let trailingComma = IfBreaks {yes = comma; no = nil} + let doubleQuote = Text "\"" + + let propagateForcedBreaks doc = + let rec walk doc = match doc with + | Text _ | Nil | LineSuffix _ -> + (false, doc) + | BreakParent -> + (true, Nil) + | LineBreak Hard -> + (true, doc) + | LineBreak (Classic | Soft) -> + (false, doc) + | Indent children -> + let (childForcesBreak, newChildren) = walk children in + (childForcesBreak, Indent newChildren) + | IfBreaks {yes = trueDoc; no = falseDoc} -> + let (falseForceBreak, falseDoc) = walk falseDoc in + if falseForceBreak then + let (_, trueDoc) = walk trueDoc in + (true, trueDoc) + else + let forceBreak, trueDoc = walk trueDoc in + (forceBreak, IfBreaks {yes = trueDoc; no = falseDoc}) + | Group {shouldBreak = forceBreak; doc = children} -> + let (childForcesBreak, newChildren) = walk children in + let shouldBreak = forceBreak || childForcesBreak in + (shouldBreak, Group {shouldBreak; doc = newChildren}) + | Concat children -> + let (forceBreak, newChildren) = List.fold_left (fun (forceBreak, newChildren) child -> + let (childForcesBreak, newChild) = walk child in + (forceBreak || childForcesBreak, newChild::newChildren) + ) (false, []) children + in + (forceBreak, Concat (List.rev newChildren)) + | CustomLayout children -> + (* When using CustomLayout, we don't want to propagate forced breaks + * from the children up. By definition it picks the first layout that fits + * otherwise it takes the last of the list. + * However we do want to propagate forced breaks in the sublayouts. They + * might need to be broken. We just don't propagate them any higher here *) + let children = match walk (Concat children) with + | (_, Concat children) -> children + | _ -> assert false + in + (false, CustomLayout children) + in + let (_, processedDoc) = walk doc in + processedDoc + + let join ~sep docs = + let rec loop acc sep docs = + match docs with + | [] -> List.rev acc + | [x] -> List.rev (x::acc) + | x::xs -> loop (sep::x::acc) sep xs + in + Concat(loop [] sep docs) + + let rec fits w doc = match doc with + | _ when w < 0 -> false + | [] -> true + | (_ind, _mode, Text txt)::rest -> fits (w - String.length txt) rest + | (ind, mode, Indent doc)::rest -> fits w ((ind + 2, mode, doc)::rest) + | (_ind, Flat, LineBreak break)::rest -> + if break = Hard then true + else + let w = if break = Classic then w - 1 else w in + fits w rest + | (_ind, _mode, Nil)::rest -> fits w rest + | (_ind, Break, LineBreak _break)::_rest -> true + | (ind, mode, Group {shouldBreak = forceBreak; doc})::rest -> + let mode = if forceBreak then Break else mode in + fits w ((ind, mode, doc)::rest) + | (ind, mode, IfBreaks {yes = breakDoc; no = flatDoc})::rest -> + if mode = Break then + fits w ((ind, mode, breakDoc)::rest) + else + fits w ((ind, mode, flatDoc)::rest) + | (ind, mode, Concat docs)::rest -> + let ops = List.map (fun doc -> (ind, mode, doc)) docs in + fits w (List.append ops rest) + (* | (_ind, _mode, Cursor)::rest -> fits w rest *) + | (_ind, _mode, LineSuffix _)::rest -> fits w rest + | (_ind, _mode, BreakParent)::rest -> fits w rest + | (ind, mode, CustomLayout (hd::_))::rest -> + (* TODO: if we have nested custom layouts, what we should do here? *) + fits w ((ind, mode, hd)::rest) + | (_ind, _mode, CustomLayout _)::rest -> + fits w rest + + let toString ~width doc = + let doc = propagateForcedBreaks doc in + let buffer = MiniBuffer.create 1000 in + + let rec process ~pos lineSuffices stack = + match stack with + | ((ind, mode, doc) as cmd)::rest -> + begin match doc with + | Nil | BreakParent -> + process ~pos lineSuffices rest + | Text txt -> + MiniBuffer.add_string buffer txt; + process ~pos:(String.length txt + pos) lineSuffices rest + | LineSuffix doc -> + process ~pos ((ind, mode, doc)::lineSuffices) rest + | Concat docs -> + let ops = List.map (fun doc -> (ind, mode, doc)) docs in + process ~pos lineSuffices (List.append ops rest) + | Indent doc -> + process ~pos lineSuffices ((ind + 2, mode, doc)::rest) + | IfBreaks {yes = breakDoc; no = flatDoc} -> + if mode = Break then + process ~pos lineSuffices ((ind, mode, breakDoc)::rest) + else + process ~pos lineSuffices ((ind, mode, flatDoc)::rest) + | LineBreak lineStyle -> + if mode = Break then ( + begin match lineSuffices with + | [] -> + MiniBuffer.flush_newline buffer; + MiniBuffer.add_string buffer (String.make ind ' ' [@doesNotRaise]); + process ~pos:ind [] rest + | _docs -> + process ~pos:ind [] (List.concat [List.rev lineSuffices; cmd::rest]) + end + ) else (* mode = Flat *) ( + let pos = match lineStyle with + | Classic -> MiniBuffer.add_string buffer " "; pos + 1 + | Hard -> MiniBuffer.flush_newline buffer; 0 + | Soft -> pos + in + process ~pos lineSuffices rest + ) + | Group {shouldBreak; doc} -> + if shouldBreak || not (fits (width - pos) ((ind, Flat, doc)::rest)) then + process ~pos lineSuffices ((ind, Break, doc)::rest) + else + process ~pos lineSuffices ((ind, Flat, doc)::rest) + | CustomLayout docs -> + let rec findGroupThatFits groups = match groups with + | [] -> Nil + | [lastGroup] -> lastGroup + | doc::docs -> + if (fits (width - pos) ((ind, Flat, doc)::rest)) then + doc + else + findGroupThatFits docs + in + let doc = findGroupThatFits docs in + process ~pos lineSuffices ((ind, Flat, doc)::rest) + end + | [] -> + begin match lineSuffices with + | [] -> () + | suffices -> + process ~pos:0 [] (List.rev suffices) + end + in + process ~pos:0 [] [0, Flat, doc]; + + let len = MiniBuffer.length buffer in + if len > 0 && MiniBuffer.unsafe_get buffer (len - 1) != '\n' then + MiniBuffer.add_char buffer '\n'; + MiniBuffer.contents buffer + + + let debug t = + let rec toDoc = function + | Nil -> text "nil" + | BreakParent -> text "breakparent" + | Text txt -> text ("text(" ^ txt ^ ")") + | LineSuffix doc -> group( + concat [ + text "linesuffix("; + indent ( + concat [line; toDoc doc] + ); + line; + text ")" + ] + ) + | Concat docs -> group( + concat [ + text "concat("; + indent ( + concat [ + line; + join ~sep:(concat [text ","; line]) + (List.map toDoc docs) ; + ] + ); + line; + text ")" + ] + ) + | CustomLayout docs -> group( + concat [ + text "customLayout("; + indent ( + concat [ + line; + join ~sep:(concat [text ","; line]) + (List.map toDoc docs) ; + ] + ); + line; + text ")" + ] + ) + | Indent doc -> + concat [ + text "indent("; + softLine; + toDoc doc; + softLine; + text ")"; + ] + | IfBreaks {yes = trueDoc; no = falseDoc} -> + group( + concat [ + text "ifBreaks("; + indent ( + concat [ + line; + toDoc trueDoc; + concat [text ","; line]; + toDoc falseDoc; + ] + ); + line; + text ")" + ] + ) + | LineBreak break -> + let breakTxt = match break with + | Classic -> "Classic" + | Soft -> "Soft" + | Hard -> "Hard" + in + text ("LineBreak(" ^ breakTxt ^ ")") + | Group {shouldBreak; doc} -> + group( + concat [ + text "Group("; + indent ( + concat [ + line; + text ("shouldBreak: " ^ (string_of_bool shouldBreak)); + concat [text ","; line]; + toDoc doc; + ] + ); + line; + text ")" + ] + ) + in + let doc = toDoc t in + toString ~width:10 doc |> print_endline + [@@live] +end + +module Sexp: sig + type t + + val atom: string -> t + val list: t list -> t + val toString: t -> string +end = struct + type t = + | Atom of string + | List of t list + + let atom s = Atom s + let list l = List l + + let rec toDoc t = + match t with + | Atom s -> Doc.text s + | List [] -> Doc.text "()" + | List [sexpr] -> Doc.concat [Doc.lparen; toDoc sexpr; Doc.rparen;] + | List (hd::tail) -> + Doc.group ( + Doc.concat [ + Doc.lparen; + toDoc hd; + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.join ~sep:Doc.line (List.map toDoc tail); + ] + ); + Doc.rparen; + ] + ) + + let toString sexpr = + let doc = toDoc sexpr in + Doc.toString ~width:80 doc +end + +module SexpAst: sig + val implementation: Parsetree.structure -> Sexp.t + val interface: Parsetree.signature -> Sexp.t +end = struct + open Parsetree + + let mapEmpty ~f items = + match items with + | [] -> [Sexp.list []] + | items -> List.map f items + + let string txt = + Sexp.atom ("\"" ^ txt ^ "\"") + + let char c = + Sexp.atom ("'" ^ (Char.escaped c) ^ "'") + + let optChar oc = + match oc with + | None -> Sexp.atom "None" + | Some c -> + Sexp.list [ + Sexp.atom "Some"; + char c + ] + + let longident l = + let rec loop l = match l with + | Longident.Lident ident -> Sexp.list [ + Sexp.atom "Lident"; + string ident; + ] + | Longident.Ldot (lident, txt) -> + Sexp.list [ + Sexp.atom "Ldot"; + loop lident; + string txt; + ] + | Longident.Lapply (l1, l2) -> + Sexp.list [ + Sexp.atom "Lapply"; + loop l1; + loop l2; + ] + in + Sexp.list [ + Sexp.atom "longident"; + loop l; + ] + + let closedFlag flag = match flag with + | Asttypes.Closed -> Sexp.atom "Closed" + | Open -> Sexp.atom "Open" + + let directionFlag flag = match flag with + | Asttypes.Upto -> Sexp.atom "Upto" + | Downto -> Sexp.atom "Downto" + + let recFlag flag = match flag with + | Asttypes.Recursive -> Sexp.atom "Recursive" + | Nonrecursive -> Sexp.atom "Nonrecursive" + + let overrideFlag flag = match flag with + | Asttypes.Override -> Sexp.atom "Override" + | Fresh -> Sexp.atom "Fresh" + + let privateFlag flag = match flag with + | Asttypes.Public -> Sexp.atom "Public" + | Private -> Sexp.atom "Private" + + let mutableFlag flag = match flag with + | Asttypes.Immutable -> Sexp.atom "Immutable" + | Mutable -> Sexp.atom "Mutable" + + let variance v = match v with + | Asttypes.Covariant -> Sexp.atom "Covariant" + | Contravariant -> Sexp.atom "Contravariant" + | Invariant -> Sexp.atom "Invariant" + + let argLabel lbl = match lbl with + | Asttypes.Nolabel -> Sexp.atom "Nolabel" + | Labelled txt -> Sexp.list [ + Sexp.atom "Labelled"; + string txt; + ] + | Optional txt -> Sexp.list [ + Sexp.atom "Optional"; + string txt; + ] + + let constant c = + let sexpr = match c with + | Pconst_integer (txt, tag) -> + Sexp.list [ + Sexp.atom "Pconst_integer"; + string txt; + optChar tag; + ] + | Pconst_char c -> + Sexp.list [ + Sexp.atom "Pconst_char"; + Sexp.atom (Char.escaped c); + ] + | Pconst_string (txt, tag) -> + Sexp.list [ + Sexp.atom "Pconst_string"; + string txt; + match tag with + | Some txt -> Sexp.list [ + Sexp.atom "Some"; + string txt; + ] + | None -> Sexp.atom "None"; + ] + | Pconst_float (txt, tag) -> + Sexp.list [ + Sexp.atom "Pconst_float"; + string txt; + optChar tag; + ] + in + Sexp.list [ + Sexp.atom "constant"; + sexpr + ] + + let rec structure s = + Sexp.list ( + (Sexp.atom "structure")::(List.map structureItem s) + ) + + and structureItem si = + let desc = match si.pstr_desc with + | Pstr_eval (expr, attrs) -> + Sexp.list [ + Sexp.atom "Pstr_eval"; + expression expr; + attributes attrs; + ] + | Pstr_value (flag, vbs) -> + Sexp.list [ + Sexp.atom "Pstr_value"; + recFlag flag; + Sexp.list (mapEmpty ~f:valueBinding vbs) + ] + | Pstr_primitive (vd) -> + Sexp.list [ + Sexp.atom "Pstr_primitive"; + valueDescription vd; + ] + | Pstr_type (flag, tds) -> + Sexp.list [ + Sexp.atom "Pstr_type"; + recFlag flag; + Sexp.list (mapEmpty ~f:typeDeclaration tds) + ] + | Pstr_typext typext -> + Sexp.list [ + Sexp.atom "Pstr_type"; + typeExtension typext; + ] + | Pstr_exception ec -> + Sexp.list [ + Sexp.atom "Pstr_exception"; + extensionConstructor ec; + ] + | Pstr_module mb -> + Sexp.list [ + Sexp.atom "Pstr_module"; + moduleBinding mb; + ] + | Pstr_recmodule mbs -> + Sexp.list [ + Sexp.atom "Pstr_recmodule"; + Sexp.list (mapEmpty ~f:moduleBinding mbs); + ] + | Pstr_modtype modTypDecl -> + Sexp.list [ + Sexp.atom "Pstr_modtype"; + moduleTypeDeclaration modTypDecl; + ] + | Pstr_open openDesc -> + Sexp.list [ + Sexp.atom "Pstr_open"; + openDescription openDesc; + ] + | Pstr_class _ -> Sexp.atom "Pstr_class" + | Pstr_class_type _ -> Sexp.atom "Pstr_class_type" + | Pstr_include id -> + Sexp.list [ + Sexp.atom "Pstr_include"; + includeDeclaration id; + ] + | Pstr_attribute attr -> + Sexp.list [ + Sexp.atom "Pstr_attribute"; + attribute attr; + ] + | Pstr_extension (ext, attrs) -> + Sexp.list [ + Sexp.atom "Pstr_extension"; + extension ext; + attributes attrs; + ] + in + Sexp.list [ + Sexp.atom "structure_item"; + desc; + ] + + and includeDeclaration id = + Sexp.list [ + Sexp.atom "include_declaration"; + moduleExpression id.pincl_mod; + attributes id.pincl_attributes; + ] + + and openDescription od = + Sexp.list [ + Sexp.atom "open_description"; + longident od.popen_lid.Asttypes.txt; + attributes od.popen_attributes; + ] + + and moduleTypeDeclaration mtd = + Sexp.list [ + Sexp.atom "module_type_declaration"; + string mtd.pmtd_name.Asttypes.txt; + (match mtd.pmtd_type with + | None -> Sexp.atom "None" + | Some modType -> Sexp.list [ + Sexp.atom "Some"; + moduleType modType; + ]); + attributes mtd.pmtd_attributes; + ] + + and moduleBinding mb = + Sexp.list [ + Sexp.atom "module_binding"; + string mb.pmb_name.Asttypes.txt; + moduleExpression mb.pmb_expr; + attributes mb.pmb_attributes; + ] + + and moduleExpression me = + let desc = match me.pmod_desc with + | Pmod_ident modName -> + Sexp.list [ + Sexp.atom "Pmod_ident"; + longident modName.Asttypes.txt; + ] + | Pmod_structure s -> + Sexp.list [ + Sexp.atom "Pmod_structure"; + structure s; + ] + | Pmod_functor (lbl, optModType, modExpr) -> + Sexp.list [ + Sexp.atom "Pmod_functor"; + string lbl.Asttypes.txt; + (match optModType with + | None -> Sexp.atom "None" + | Some modType -> Sexp.list [ + Sexp.atom "Some"; + moduleType modType; + ]); + moduleExpression modExpr; + ] + | Pmod_apply (callModExpr, modExprArg) -> + Sexp.list [ + Sexp.atom "Pmod_apply"; + moduleExpression callModExpr; + moduleExpression modExprArg; + ] + | Pmod_constraint (modExpr, modType) -> + Sexp.list [ + Sexp.atom "Pmod_constraint"; + moduleExpression modExpr; + moduleType modType; + ] + | Pmod_unpack expr -> + Sexp.list [ + Sexp.atom "Pmod_unpack"; + expression expr; + ] + | Pmod_extension ext -> + Sexp.list [ + Sexp.atom "Pmod_extension"; + extension ext; + ] + in + Sexp.list [ + Sexp.atom "module_expr"; + desc; + attributes me.pmod_attributes; + ] + + and moduleType mt = + let desc = match mt.pmty_desc with + | Pmty_ident longidentLoc -> + Sexp.list [ + Sexp.atom "Pmty_ident"; + longident longidentLoc.Asttypes.txt; + ] + | Pmty_signature s -> + Sexp.list [ + Sexp.atom "Pmty_signature"; + signature s; + ] + | Pmty_functor (lbl, optModType, modType) -> + Sexp.list [ + Sexp.atom "Pmty_functor"; + string lbl.Asttypes.txt; + (match optModType with + | None -> Sexp.atom "None" + | Some modType -> Sexp.list [ + Sexp.atom "Some"; + moduleType modType; + ]); + moduleType modType; + ] + | Pmty_alias longidentLoc -> + Sexp.list [ + Sexp.atom "Pmty_alias"; + longident longidentLoc.Asttypes.txt; + ] + | Pmty_extension ext -> + Sexp.list [ + Sexp.atom "Pmty_extension"; + extension ext; + ] + | Pmty_typeof modExpr -> + Sexp.list [ + Sexp.atom "Pmty_typeof"; + moduleExpression modExpr; + ] + | Pmty_with (modType, withConstraints) -> + Sexp.list [ + Sexp.atom "Pmty_with"; + moduleType modType; + Sexp.list (mapEmpty ~f:withConstraint withConstraints); + ] + in + Sexp.list [ + Sexp.atom "module_type"; + desc; + attributes mt.pmty_attributes; + ] + + and withConstraint wc = match wc with + | Pwith_type (longidentLoc, td) -> + Sexp.list [ + Sexp.atom "Pmty_with"; + longident longidentLoc.Asttypes.txt; + typeDeclaration td; + ] + | Pwith_module (l1, l2) -> + Sexp.list [ + Sexp.atom "Pwith_module"; + longident l1.Asttypes.txt; + longident l2.Asttypes.txt; + ] + | Pwith_typesubst (longidentLoc, td) -> + Sexp.list [ + Sexp.atom "Pwith_typesubst"; + longident longidentLoc.Asttypes.txt; + typeDeclaration td; + ] + | Pwith_modsubst (l1, l2) -> + Sexp.list [ + Sexp.atom "Pwith_modsubst"; + longident l1.Asttypes.txt; + longident l2.Asttypes.txt; + ] + + and signature s = + Sexp.list ( + (Sexp.atom "signature")::(List.map signatureItem s) + ) + + and signatureItem si = + let descr = match si.psig_desc with + | Psig_value vd -> + Sexp.list [ + Sexp.atom "Psig_value"; + valueDescription vd; + ] + | Psig_type (flag, typeDeclarations) -> + Sexp.list [ + Sexp.atom "Psig_type"; + recFlag flag; + Sexp.list (mapEmpty ~f:typeDeclaration typeDeclarations); + ] + | Psig_typext typExt -> + Sexp.list [ + Sexp.atom "Psig_typext"; + typeExtension typExt; + ] + | Psig_exception extConstr -> + Sexp.list [ + Sexp.atom "Psig_exception"; + extensionConstructor extConstr; + ] + | Psig_module modDecl -> + Sexp.list [ + Sexp.atom "Psig_module"; + moduleDeclaration modDecl; + ] + | Psig_recmodule modDecls -> + Sexp.list [ + Sexp.atom "Psig_recmodule"; + Sexp.list (mapEmpty ~f:moduleDeclaration modDecls); + ] + | Psig_modtype modTypDecl -> + Sexp.list [ + Sexp.atom "Psig_modtype"; + moduleTypeDeclaration modTypDecl; + ] + | Psig_open openDesc -> + Sexp.list [ + Sexp.atom "Psig_open"; + openDescription openDesc; + ] + | Psig_include inclDecl -> + Sexp.list [ + Sexp.atom "Psig_include"; + includeDescription inclDecl + ] + | Psig_class _ -> Sexp.list [Sexp.atom "Psig_class";] + | Psig_class_type _ -> Sexp.list [ Sexp.atom "Psig_class_type"; ] + | Psig_attribute attr -> + Sexp.list [ + Sexp.atom "Psig_attribute"; + attribute attr; + ] + | Psig_extension (ext, attrs) -> + Sexp.list [ + Sexp.atom "Psig_extension"; + extension ext; + attributes attrs; + ] + in + Sexp.list [ + Sexp.atom "signature_item"; + descr; + ] + + and includeDescription id = + Sexp.list [ + Sexp.atom "include_description"; + moduleType id.pincl_mod; + attributes id.pincl_attributes; + ] + + and moduleDeclaration md = + Sexp.list [ + Sexp.atom "module_declaration"; + string md.pmd_name.Asttypes.txt; + moduleType md.pmd_type; + attributes md.pmd_attributes; + ] + + and valueBinding vb = + Sexp.list [ + Sexp.atom "value_binding"; + pattern vb.pvb_pat; + expression vb.pvb_expr; + attributes vb.pvb_attributes; + ] + + and valueDescription vd = + Sexp.list [ + Sexp.atom "value_description"; + string vd.pval_name.Asttypes.txt; + coreType vd.pval_type; + Sexp.list (mapEmpty ~f:string vd.pval_prim); + attributes vd.pval_attributes; + ] + + and typeDeclaration td = + Sexp.list [ + Sexp.atom "type_declaration"; + string td.ptype_name.Asttypes.txt; + Sexp.list [ + Sexp.atom "ptype_params"; + Sexp.list (mapEmpty ~f:(fun (typexpr, var) -> + Sexp.list [ + coreType typexpr; + variance var; + ]) td.ptype_params) + ]; + Sexp.list [ + Sexp.atom "ptype_cstrs"; + Sexp.list (mapEmpty ~f:(fun (typ1, typ2, _loc) -> + Sexp.list [ + coreType typ1; + coreType typ2; + ]) td.ptype_cstrs) + ]; + Sexp.list [ + Sexp.atom "ptype_kind"; + typeKind td.ptype_kind; + ]; + Sexp.list [ + Sexp.atom "ptype_manifest"; + match td.ptype_manifest with + | None -> Sexp.atom "None" + | Some typ -> Sexp.list [ + Sexp.atom "Some"; + coreType typ; + ] + ]; + Sexp.list [ + Sexp.atom "ptype_private"; + privateFlag td.ptype_private; + ]; + attributes td.ptype_attributes; + ] + + and extensionConstructor ec = + Sexp.list [ + Sexp.atom "extension_constructor"; + string ec.pext_name.Asttypes.txt; + extensionConstructorKind ec.pext_kind; + attributes ec.pext_attributes; + ] + + and extensionConstructorKind kind = match kind with + | Pext_decl (args, optTypExpr) -> + Sexp.list [ + Sexp.atom "Pext_decl"; + constructorArguments args; + match optTypExpr with + | None -> Sexp.atom "None" + | Some typ -> Sexp.list [ + Sexp.atom "Some"; + coreType typ; + ] + ] + | Pext_rebind longidentLoc -> + Sexp.list [ + Sexp.atom "Pext_rebind"; + longident longidentLoc.Asttypes.txt; + ] + + and typeExtension te = + Sexp.list [ + Sexp.atom "type_extension"; + Sexp.list [ + Sexp.atom "ptyext_path"; + longident te.ptyext_path.Asttypes.txt; + ]; + Sexp.list [ + Sexp.atom "ptyext_parms"; + Sexp.list (mapEmpty ~f:(fun (typexpr, var) -> + Sexp.list [ + coreType typexpr; + variance var; + ]) te.ptyext_params) + ]; + Sexp.list [ + Sexp.atom "ptyext_constructors"; + Sexp.list (mapEmpty ~f:extensionConstructor te.ptyext_constructors); + ]; + Sexp.list [ + Sexp.atom "ptyext_private"; + privateFlag te.ptyext_private; + ]; + attributes te.ptyext_attributes; + ] + + and typeKind kind = match kind with + | Ptype_abstract -> Sexp.atom "Ptype_abstract" + | Ptype_variant constrDecls -> + Sexp.list [ + Sexp.atom "Ptype_variant"; + Sexp.list (mapEmpty ~f:constructorDeclaration constrDecls); + ] + | Ptype_record lblDecls -> + Sexp.list [ + Sexp.atom "Ptype_record"; + Sexp.list (mapEmpty ~f:labelDeclaration lblDecls); + ] + | Ptype_open -> Sexp.atom "Ptype_open" + + and constructorDeclaration cd = + Sexp.list [ + Sexp.atom "constructor_declaration"; + string cd.pcd_name.Asttypes.txt; + Sexp.list [ + Sexp.atom "pcd_args"; + constructorArguments cd.pcd_args; + ]; + Sexp.list [ + Sexp.atom "pcd_res"; + match cd.pcd_res with + | None -> Sexp.atom "None" + | Some typ -> Sexp.list [ + Sexp.atom "Some"; + coreType typ; + ] + ]; + attributes cd.pcd_attributes; + ] + + and constructorArguments args = match args with + | Pcstr_tuple types -> + Sexp.list [ + Sexp.atom "Pcstr_tuple"; + Sexp.list (mapEmpty ~f:coreType types) + ] + | Pcstr_record lds -> + Sexp.list [ + Sexp.atom "Pcstr_record"; + Sexp.list (mapEmpty ~f:labelDeclaration lds) + ] + + and labelDeclaration ld = + Sexp.list [ + Sexp.atom "label_declaration"; + string ld.pld_name.Asttypes.txt; + mutableFlag ld.pld_mutable; + coreType ld.pld_type; + attributes ld.pld_attributes; + ] + + and expression expr = + let desc = match expr.pexp_desc with + | Pexp_ident longidentLoc -> + Sexp.list [ + Sexp.atom "Pexp_ident"; + longident longidentLoc.Asttypes.txt; + ] + | Pexp_constant c -> + Sexp.list [ + Sexp.atom "Pexp_constant"; + constant c + ] + | Pexp_let (flag, vbs, expr) -> + Sexp.list [ + Sexp.atom "Pexp_let"; + recFlag flag; + Sexp.list (mapEmpty ~f:valueBinding vbs); + expression expr; + ] + | Pexp_function cases -> + Sexp.list [ + Sexp.atom "Pexp_function"; + Sexp.list (mapEmpty ~f:case cases); + ] + | Pexp_fun (argLbl, exprOpt, pat, expr) -> + Sexp.list [ + Sexp.atom "Pexp_fun"; + argLabel argLbl; + (match exprOpt with + | None -> Sexp.atom "None" + | Some expr -> Sexp.list [ + Sexp.atom "Some"; + expression expr; + ]); + pattern pat; + expression expr; + ] + | Pexp_apply (expr, args) -> + Sexp.list [ + Sexp.atom "Pexp_apply"; + expression expr; + Sexp.list (mapEmpty ~f:(fun (argLbl, expr) -> Sexp.list [ + argLabel argLbl; + expression expr + ]) args); + ] + | Pexp_match (expr, cases) -> + Sexp.list [ + Sexp.atom "Pexp_match"; + expression expr; + Sexp.list (mapEmpty ~f:case cases); + ] + | Pexp_try (expr, cases) -> + Sexp.list [ + Sexp.atom "Pexp_try"; + expression expr; + Sexp.list (mapEmpty ~f:case cases); + ] + | Pexp_tuple exprs -> + Sexp.list [ + Sexp.atom "Pexp_tuple"; + Sexp.list (mapEmpty ~f:expression exprs); + ] + | Pexp_construct (longidentLoc, exprOpt) -> + Sexp.list [ + Sexp.atom "Pexp_construct"; + longident longidentLoc.Asttypes.txt; + match exprOpt with + | None -> Sexp.atom "None" + | Some expr -> + Sexp.list [ + Sexp.atom "Some"; + expression expr; + ] + ] + | Pexp_variant (lbl, exprOpt) -> + Sexp.list [ + Sexp.atom "Pexp_variant"; + string lbl; + match exprOpt with + | None -> Sexp.atom "None" + | Some expr -> + Sexp.list [ + Sexp.atom "Some"; + expression expr; + ] + ] + | Pexp_record (rows, optExpr) -> + Sexp.list [ + Sexp.atom "Pexp_record"; + Sexp.list (mapEmpty ~f:(fun (longidentLoc, expr) -> Sexp.list [ + longident longidentLoc.Asttypes.txt; + expression expr; + ]) rows); + (match optExpr with + | None -> Sexp.atom "None" + | Some expr -> + Sexp.list [ + Sexp.atom "Some"; + expression expr; + ]); + ] + | Pexp_field (expr, longidentLoc) -> + Sexp.list [ + Sexp.atom "Pexp_field"; + expression expr; + longident longidentLoc.Asttypes.txt; + ] + | Pexp_setfield (expr1, longidentLoc, expr2) -> + Sexp.list [ + Sexp.atom "Pexp_setfield"; + expression expr1; + longident longidentLoc.Asttypes.txt; + expression expr2; + ] + | Pexp_array exprs -> + Sexp.list [ + Sexp.atom "Pexp_array"; + Sexp.list (mapEmpty ~f:expression exprs); + ] + | Pexp_ifthenelse (expr1, expr2, optExpr) -> + Sexp.list [ + Sexp.atom "Pexp_ifthenelse"; + expression expr1; + expression expr2; + (match optExpr with + | None -> Sexp.atom "None" + | Some expr -> + Sexp.list [ + Sexp.atom "Some"; + expression expr; + ]); + ] + | Pexp_sequence (expr1, expr2) -> + Sexp.list [ + Sexp.atom "Pexp_sequence"; + expression expr1; + expression expr2; + ] + | Pexp_while (expr1, expr2) -> + Sexp.list [ + Sexp.atom "Pexp_while"; + expression expr1; + expression expr2; + ] + | Pexp_for (pat, e1, e2, flag, e3) -> + Sexp.list [ + Sexp.atom "Pexp_for"; + pattern pat; + expression e1; + expression e2; + directionFlag flag; + expression e3; + ] + | Pexp_constraint (expr, typexpr) -> + Sexp.list [ + Sexp.atom "Pexp_constraint"; + expression expr; + coreType typexpr; + ] + | Pexp_coerce (expr, optTyp, typexpr) -> + Sexp.list [ + Sexp.atom "Pexp_coerce"; + expression expr; + (match optTyp with + | None -> Sexp.atom "None" + | Some typ -> Sexp.list [ + Sexp.atom "Some"; + coreType typ; + ]); + coreType typexpr; + ] + | Pexp_send _ -> + Sexp.list [ + Sexp.atom "Pexp_send"; + ] + | Pexp_new _ -> + Sexp.list [ + Sexp.atom "Pexp_new"; + ] + | Pexp_setinstvar _ -> + Sexp.list [ + Sexp.atom "Pexp_setinstvar"; + ] + | Pexp_override _ -> + Sexp.list [ + Sexp.atom "Pexp_override"; + ] + | Pexp_letmodule (modName, modExpr, expr) -> + Sexp.list [ + Sexp.atom "Pexp_letmodule"; + string modName.Asttypes.txt; + moduleExpression modExpr; + expression expr; + ] + | Pexp_letexception (extConstr, expr) -> + Sexp.list [ + Sexp.atom "Pexp_letexception"; + extensionConstructor extConstr; + expression expr; + ] + | Pexp_assert expr -> + Sexp.list [ + Sexp.atom "Pexp_assert"; + expression expr; + ] + | Pexp_lazy expr -> + Sexp.list [ + Sexp.atom "Pexp_lazy"; + expression expr; + ] + | Pexp_poly _ -> + Sexp.list [ + Sexp.atom "Pexp_poly"; + ] + | Pexp_object _ -> + Sexp.list [ + Sexp.atom "Pexp_object"; + ] + | Pexp_newtype (lbl, expr) -> + Sexp.list [ + Sexp.atom "Pexp_newtype"; + string lbl.Asttypes.txt; + expression expr; + ] + | Pexp_pack modExpr -> + Sexp.list [ + Sexp.atom "Pexp_pack"; + moduleExpression modExpr; + ] + | Pexp_open (flag, longidentLoc, expr) -> + Sexp.list [ + Sexp.atom "Pexp_open"; + overrideFlag flag; + longident longidentLoc.Asttypes.txt; + expression expr; + ] + | Pexp_extension ext -> + Sexp.list [ + Sexp.atom "Pexp_extension"; + extension ext; + ] + | Pexp_unreachable -> Sexp.atom "Pexp_unreachable" + in + Sexp.list [ + Sexp.atom "expression"; + desc; + ] + + and case c = + Sexp.list [ + Sexp.atom "case"; + Sexp.list [ + Sexp.atom "pc_lhs"; + pattern c.pc_lhs; + ]; + Sexp.list [ + Sexp.atom "pc_guard"; + match c.pc_guard with + | None -> Sexp.atom "None" + | Some expr -> Sexp.list [ + Sexp.atom "Some"; + expression expr; + ] + ]; + Sexp.list [ + Sexp.atom "pc_rhs"; + expression c.pc_rhs; + ] + ] + + and pattern p = + let descr = match p.ppat_desc with + | Ppat_any -> + Sexp.atom "Ppat_any" + | Ppat_var var -> + Sexp.list [ + Sexp.atom "Ppat_var"; + string var.Location.txt; + ] + | Ppat_alias (p, alias) -> + Sexp.list [ + Sexp.atom "Ppat_alias"; + pattern p; + string alias.txt; + ] + | Ppat_constant c -> + Sexp.list [ + Sexp.atom "Ppat_constant"; + constant c; + ] + | Ppat_interval (lo, hi) -> + Sexp.list [ + Sexp.atom "Ppat_interval"; + constant lo; + constant hi; + ] + | Ppat_tuple (patterns) -> + Sexp.list [ + Sexp.atom "Ppat_tuple"; + Sexp.list (mapEmpty ~f:pattern patterns); + ] + | Ppat_construct (longidentLoc, optPattern) -> + Sexp.list [ + Sexp.atom "Ppat_construct"; + longident longidentLoc.Location.txt; + match optPattern with + | None -> Sexp.atom "None" + | Some p -> Sexp.list [ + Sexp.atom "some"; + pattern p; + ] + ] + | Ppat_variant (lbl, optPattern) -> + Sexp.list [ + Sexp.atom "Ppat_variant"; + string lbl; + match optPattern with + | None -> Sexp.atom "None" + | Some p -> Sexp.list [ + Sexp.atom "Some"; + pattern p; + ] + ] + | Ppat_record (rows, flag) -> + Sexp.list [ + Sexp.atom "Ppat_record"; + closedFlag flag; + Sexp.list (mapEmpty ~f:(fun (longidentLoc, p) -> + Sexp.list [ + longident longidentLoc.Location.txt; + pattern p; + ] + ) rows) + ] + | Ppat_array patterns -> + Sexp.list [ + Sexp.atom "Ppat_array"; + Sexp.list (mapEmpty ~f:pattern patterns); + ] + | Ppat_or (p1, p2) -> + Sexp.list [ + Sexp.atom "Ppat_or"; + pattern p1; + pattern p2; + ] + | Ppat_constraint (p, typexpr) -> + Sexp.list [ + Sexp.atom "Ppat_constraint"; + pattern p; + coreType typexpr; + ] + | Ppat_type longidentLoc -> + Sexp.list [ + Sexp.atom "Ppat_type"; + longident longidentLoc.Location.txt + ] + | Ppat_lazy p -> + Sexp.list [ + Sexp.atom "Ppat_lazy"; + pattern p; + ] + | Ppat_unpack stringLoc -> + Sexp.list [ + Sexp.atom "Ppat_unpack"; + string stringLoc.Location.txt; + ] + | Ppat_exception p -> + Sexp.list [ + Sexp.atom "Ppat_exception"; + pattern p; + ] + | Ppat_extension ext -> + Sexp.list [ + Sexp.atom "Ppat_extension"; + extension ext; + ] + | Ppat_open (longidentLoc, p) -> + Sexp.list [ + Sexp.atom "Ppat_open"; + longident longidentLoc.Location.txt; + pattern p; + ] + in + Sexp.list [ + Sexp.atom "pattern"; + descr; + ] + + and objectField field = match field with + | Otag (lblLoc, attrs, typexpr) -> + Sexp.list [ + Sexp.atom "Otag"; + string lblLoc.txt; + attributes attrs; + coreType typexpr; + ] + | Oinherit typexpr -> + Sexp.list [ + Sexp.atom "Oinherit"; + coreType typexpr; + ] + + and rowField field = match field with + | Rtag (labelLoc, attrs, truth, types) -> + Sexp.list [ + Sexp.atom "Rtag"; + string labelLoc.txt; + attributes attrs; + Sexp.atom (if truth then "true" else "false"); + Sexp.list (mapEmpty ~f:coreType types); + ] + | Rinherit typexpr -> + Sexp.list [ + Sexp.atom "Rinherit"; + coreType typexpr; + ] + + and packageType (modNameLoc, packageConstraints) = + Sexp.list [ + Sexp.atom "package_type"; + longident modNameLoc.Asttypes.txt; + Sexp.list (mapEmpty ~f:(fun (modNameLoc, typexpr) -> + Sexp.list [ + longident modNameLoc.Asttypes.txt; + coreType typexpr; + ] + ) packageConstraints) + ] + + and coreType typexpr = + let desc = match typexpr.ptyp_desc with + | Ptyp_any -> Sexp.atom "Ptyp_any" + | Ptyp_var var -> Sexp.list [ + Sexp.atom "Ptyp_var"; + string var + ] + | Ptyp_arrow (argLbl, typ1, typ2) -> + Sexp.list [ + Sexp.atom "Ptyp_arrow"; + argLabel argLbl; + coreType typ1; + coreType typ2; + ] + | Ptyp_tuple types -> + Sexp.list [ + Sexp.atom "Ptyp_tuple"; + Sexp.list (mapEmpty ~f:coreType types); + ] + | Ptyp_constr (longidentLoc, types) -> + Sexp.list [ + Sexp.atom "Ptyp_constr"; + longident longidentLoc.txt; + Sexp.list (mapEmpty ~f:coreType types); + ] + | Ptyp_alias (typexpr, alias) -> + Sexp.list [ + Sexp.atom "Ptyp_alias"; + coreType typexpr; + string alias; + ] + | Ptyp_object (fields, flag) -> + Sexp.list [ + Sexp.atom "Ptyp_object"; + closedFlag flag; + Sexp.list (mapEmpty ~f:objectField fields) + ] + | Ptyp_class (longidentLoc, types) -> + Sexp.list [ + Sexp.atom "Ptyp_class"; + longident longidentLoc.Location.txt; + Sexp.list (mapEmpty ~f:coreType types) + ] + | Ptyp_variant (fields, flag, optLabels) -> + Sexp.list [ + Sexp.atom "Ptyp_variant"; + Sexp.list (mapEmpty ~f:rowField fields); + closedFlag flag; + match optLabels with + | None -> Sexp.atom "None" + | Some lbls -> Sexp.list (mapEmpty ~f:string lbls); + ] + | Ptyp_poly (lbls, typexpr) -> + Sexp.list [ + Sexp.atom "Ptyp_poly"; + Sexp.list (mapEmpty ~f:(fun lbl -> string lbl.Asttypes.txt) lbls); + coreType typexpr; + ] + | Ptyp_package (package) -> + Sexp.list [ + Sexp.atom "Ptyp_package"; + packageType package; + ] + | Ptyp_extension (ext) -> + Sexp.list [ + Sexp.atom "Ptyp_extension"; + extension ext; + ] + in + Sexp.list [ + Sexp.atom "core_type"; + desc; + ] + + and payload p = + match p with + | PStr s -> + Sexp.list ( + (Sexp.atom "PStr")::(mapEmpty ~f:structureItem s) + ) + | PSig s -> + Sexp.list [ + Sexp.atom "PSig"; + signature s; + ] + | PTyp ct -> + Sexp.list [ + Sexp.atom "PTyp"; + coreType ct + ] + | PPat (pat, optExpr) -> + Sexp.list [ + Sexp.atom "PPat"; + pattern pat; + match optExpr with + | Some expr -> Sexp.list [ + Sexp.atom "Some"; + expression expr; + ] + | None -> Sexp.atom "None"; + ] + + and attribute (stringLoc, p) = + Sexp.list [ + Sexp.atom "attribute"; + Sexp.atom stringLoc.Asttypes.txt; + payload p; + ] + + and extension (stringLoc, p) = + Sexp.list [ + Sexp.atom "extension"; + Sexp.atom stringLoc.Asttypes.txt; + payload p; + ] + + and attributes attrs = + let sexprs = mapEmpty ~f:attribute attrs in + Sexp.list ((Sexp.atom "attributes")::sexprs) + + let implementation = structure + let interface = signature +end + +module IO: sig + val readFile: string -> string + val readStdin: unit -> string +end = struct + (* random chunk size: 2^15, TODO: why do we guess randomly? *) + let chunkSize = 32768 + + let readFile filename = + let chan = open_in filename in + let buffer = Buffer.create chunkSize in + let chunk = (Bytes.create [@doesNotRaise]) chunkSize in + let rec loop () = + let len = try input chan chunk 0 chunkSize with Invalid_argument _ -> 0 in + if len == 0 then ( + close_in_noerr chan; + Buffer.contents buffer + ) else ( + Buffer.add_subbytes buffer chunk 0 len; + loop () + ) + in + loop () + + let readStdin () = + let buffer = Buffer.create chunkSize in + let chunk = (Bytes.create [@doesNotRaise]) chunkSize in + let rec loop () = + let len = try input stdin chunk 0 chunkSize with Invalid_argument _ -> 0 in + if len == 0 then ( + close_in_noerr stdin; + Buffer.contents buffer + ) else ( + Buffer.add_subbytes buffer chunk 0 len; + loop () + ) + in + loop () +end + +module CharacterCodes = struct + let eof = -1 + + let space = 0x0020 + let newline = 0x0A (* \n *) [@@live] + let lineFeed = 0x0A (* \n *) + let carriageReturn = 0x0D (* \r *) + let lineSeparator = 0x2028 + let paragraphSeparator = 0x2029 + + let tab = 0x09 + + let bang = 0x21 + let dot = 0x2E + let colon = 0x3A + let comma = 0x2C + let backtick = 0x60 + (* let question = 0x3F *) + let semicolon = 0x3B + let underscore = 0x5F + let singleQuote = 0x27 + let doubleQuote = 0x22 + let equal = 0x3D + let bar = 0x7C + let tilde = 0x7E + let question = 0x3F + let ampersand = 0x26 + let at = 0x40 + let dollar = 0x24 + let percent = 0x25 + + let lparen = 0x28 + let rparen = 0x29 + let lbracket = 0x5B + let rbracket = 0x5D + let lbrace = 0x7B + let rbrace = 0x7D + + let forwardslash = 0x2F (* / *) + let backslash = 0x5C (* \ *) + + let greaterThan = 0x3E + let hash = 0x23 + let lessThan = 0x3C + + let minus = 0x2D + let plus = 0x2B + let asterisk = 0x2A + + let _0 = 0x30 + let _1 = 0x31 [@@live] + let _2 = 0x32 [@@live] + let _3 = 0x33 [@@live] + let _4 = 0x34 [@@live] + let _5 = 0x35 [@@live] + let _6 = 0x36 [@@live] + let _7 = 0x37 [@@live] + let _8 = 0x38 [@@live] + let _9 = 0x39 + + module Lower = struct + let a = 0x61 + let b = 0x62 + let c = 0x63 [@@live] + let d = 0x64 [@@live] + let e = 0x65 + let f = 0x66 + let g = 0x67 + let h = 0x68 [@@live] + let i = 0x69 [@@live] + let j = 0x6A [@@live] + let k = 0x6B [@@live] + let l = 0x6C [@@live] + let m = 0x6D [@@live] + let n = 0x6E + let o = 0x6F + let p = 0x70 + let q = 0x71 [@@live] + let r = 0x72 + let s = 0x73 [@@live] + let t = 0x74 + let u = 0x75 [@@live] + let v = 0x76 [@@live] + let w = 0x77 [@@live] + let x = 0x78 + let y = 0x79 [@@live] + let z = 0x7A + end + + module Upper = struct + let a = 0x41 + (* let b = 0x42 *) + let c = 0x43 [@@live] + let d = 0x44 [@@live] + let e = 0x45 [@@live] + let f = 0x46 [@@live] + let g = 0x47 + let h = 0x48 [@@live] + let i = 0x49 [@@live] + let j = 0x4A [@@live] + let k = 0x4B [@@live] + let l = 0x4C [@@live] + let m = 0x4D [@@live] + let b = 0x4E [@@live] + let o = 0x4F [@@live] + let p = 0x50 [@@live] + let q = 0x51 [@@live] + let r = 0x52 [@@live] + let s = 0x53 [@@live] + let t = 0x54 [@@live] + let u = 0x55 [@@live] + let v = 0x56 [@@live] + let w = 0x57 [@@live] + let x = 0x58 [@@live] + let y = 0x59 [@@live] + let z = 0x5a + end + + (* returns lower-case ch, ch should be ascii *) + let lower ch = + (* if ch >= Lower.a && ch <= Lower.z then ch else ch + 32 *) + 32 lor ch + + let isLetter ch = + Lower.a <= ch && ch <= Lower.z || + Upper.a <= ch && ch <= Upper.z + + let isUpperCase ch = + Upper.a <= ch && ch <= Upper.z + + let isDigit ch = _0 <= ch && ch <= _9 + + let isHex ch = + (_0 <= ch && ch <= _9) || + (Lower.a <= (lower ch) && (lower ch) <= Lower.f) + + (* + // ES5 7.3: + // The ECMAScript line terminator characters are listed in Table 3. + // Table 3: Line Terminator Characters + // Code Unit Value Name Formal Name + // \u000A Line Feed + // \u000D Carriage Return + // \u2028 Line separator + // \u2029 Paragraph separator + // Only the characters in Table 3 are treated as line terminators. Other new line or line + // breaking characters are treated as white space but not as line terminators. + *) + let isLineBreak ch = + ch == lineFeed + || ch == carriageReturn + || ch == lineSeparator + || ch == paragraphSeparator + + let digitValue ch = + if _0 <= ch && ch <= _9 then + ch - 48 + else if Lower.a <= (lower ch) && (lower ch) <= Lower.f then + (lower ch) - Lower.a + 10 + else + 16 (* larger than any legal value *) +end + +module Comment: sig + type t + + val toString: t -> string + + val loc: t -> Location.t + val txt: t -> string + val prevTokEndPos: t -> Lexing.position + + val setPrevTokEndPos: t -> Lexing.position -> unit + + val isSingleLineComment: t -> bool + + val makeSingleLineComment: loc:Location.t -> string -> t + val makeMultiLineComment: loc:Location.t -> string -> t + val fromOcamlComment: + loc:Location.t -> txt:string -> prevTokEndPos:Lexing.position -> t + val trimSpaces: string -> string +end = struct + type style = + | SingleLine + | MultiLine + + let styleToString s = match s with + | SingleLine -> "SingleLine" + | MultiLine -> "MultiLine" + + type t = { + txt: string; + style: style; + loc: Location.t; + mutable prevTokEndPos: Lexing.position; + } + + let loc t = t.loc + let txt t = t.txt + let prevTokEndPos t = t.prevTokEndPos + + let setPrevTokEndPos t pos = + t.prevTokEndPos <- pos + + let isSingleLineComment t = match t.style with + | SingleLine -> true + | MultiLine -> false + + let toString t = + Format.sprintf + "(txt: %s\nstyle: %s\nlines: %d-%d)" + t.txt + (styleToString t.style) + t.loc.loc_start.pos_lnum + t.loc.loc_end.pos_lnum + + let makeSingleLineComment ~loc txt = { + txt; + loc; + style = SingleLine; + prevTokEndPos = Lexing.dummy_pos; + } + + let makeMultiLineComment ~loc txt = { + txt; + loc; + style = MultiLine; + prevTokEndPos = Lexing.dummy_pos; + } + + let fromOcamlComment ~loc ~txt ~prevTokEndPos = { + txt; + loc; + style = MultiLine; + prevTokEndPos = prevTokEndPos + } + + let trimSpaces s = + let len = String.length s in + if len = 0 then s + else if String.unsafe_get s 0 = ' ' || String.unsafe_get s (len - 1) = ' ' then ( + let b = Bytes.of_string s in + let i = ref 0 in + while !i < len && (Bytes.unsafe_get b !i) = ' ' do + incr i + done; + let j = ref (len - 1) in + while !j >= !i && (Bytes.unsafe_get b !j) = ' ' do + decr j + done; + if !j >= !i then + (Bytes.sub [@doesNotRaise]) b !i (!j - !i + 1) |> Bytes.to_string + else + "" + ) else s +end + +module Token = struct + type t = + | Open + | True | False + | Character of char + | Int of {i: string; suffix: char option} + | Float of {f: string; suffix: char option} + | String of string + | Lident of string + | Uident of string + | As + | Dot | DotDot | DotDotDot + | Bang + | Semicolon + | Let + | And + | Rec + | Underscore + | SingleQuote + | Equal | EqualEqual | EqualEqualEqual + | Bar + | Lparen + | Rparen + | Lbracket + | Rbracket + | Lbrace + | Rbrace + | Colon + | Comma + | Eof + | Exception + | Backslash [@live] + | Forwardslash | ForwardslashDot + | Asterisk | AsteriskDot | Exponentiation + | Minus | MinusDot + | Plus | PlusDot | PlusPlus | PlusEqual + | ColonGreaterThan + | GreaterThan + | LessThan + | LessThanSlash + | Hash | HashEqual | HashHash + | Assert + | Lazy + | Tilde + | Question + | If | Else | For | In | To | Downto | While | Switch + | When + | EqualGreater | MinusGreater + | External + | Typ + | Private + | Mutable + | Constraint + | Include + | Module + | Of + | With + | Land | Lor + | Band (* Bitwise and: & *) + | BangEqual | BangEqualEqual + | LessEqual | GreaterEqual + | ColonEqual + | At | AtAt + | Percent | PercentPercent + | Comment of Comment.t + | List + | TemplateTail of string + | TemplatePart of string + | Backtick + | BarGreater + | Try | Catch + | Import + | Export + + let precedence = function + | HashEqual | ColonEqual -> 1 + | Lor -> 2 + | Land -> 3 + | Equal | EqualEqual | EqualEqualEqual | LessThan | GreaterThan + | BangEqual | BangEqualEqual | LessEqual | GreaterEqual | BarGreater -> 4 + | Plus | PlusDot | Minus | MinusDot | PlusPlus -> 5 + | Asterisk | AsteriskDot | Forwardslash | ForwardslashDot -> 6 + | Exponentiation -> 7 + | MinusGreater -> 8 + | Dot -> 9 + | _ -> 0 + + let toString = function + | Open -> "open" + | True -> "true" | False -> "false" + | Character c -> "'" ^ (Char.escaped c) ^ "'" + | String s -> s + | Lident str -> str + | Uident str -> str + | Dot -> "." | DotDot -> ".." | DotDotDot -> "..." + | Int {i} -> "int " ^ i + | Float {f} -> "Float: " ^ f + | Bang -> "!" + | Semicolon -> ";" + | Let -> "let" + | And -> "and" + | Rec -> "rec" + | Underscore -> "_" + | SingleQuote -> "'" + | Equal -> "=" | EqualEqual -> "==" | EqualEqualEqual -> "===" + | Eof -> "eof" + | Bar -> "|" + | As -> "as" + | Lparen -> "(" | Rparen -> ")" + | Lbracket -> "[" | Rbracket -> "]" + | Lbrace -> "{" | Rbrace -> "}" + | ColonGreaterThan -> ":>" + | Colon -> ":" + | Comma -> "," + | Minus -> "-" | MinusDot -> "-." + | Plus -> "+" | PlusDot -> "+." | PlusPlus -> "++" | PlusEqual -> "+=" + | Backslash -> "\\" + | Forwardslash -> "/" | ForwardslashDot -> "/." + | Exception -> "exception" + | Hash -> "#" | HashHash -> "##" | HashEqual -> "#=" + | GreaterThan -> ">" + | LessThan -> "<" + | LessThanSlash -> " "*" | AsteriskDot -> "*." | Exponentiation -> "**" + | Assert -> "assert" + | Lazy -> "lazy" + | Tilde -> "tilde" + | Question -> "?" + | If -> "if" + | Else -> "else" + | For -> "for" + | In -> "in" + | To -> "to" + | Downto -> "downto" + | While -> "while" + | Switch -> "switch" + | When -> "when" + | EqualGreater -> "=>" | MinusGreater -> "->" + | External -> "external" + | Typ -> "type" + | Private -> "private" + | Constraint -> "constraint" + | Mutable -> "mutable" + | Include -> "include" + | Module -> "module" + | Of -> "of" + | With -> "with" + | Lor -> "||" + | Band -> "&" | Land -> "&&" + | BangEqual -> "!=" | BangEqualEqual -> "!==" + | GreaterEqual -> ">=" | LessEqual -> "<=" + | ColonEqual -> ":=" + | At -> "@" | AtAt -> "@@" + | Percent -> "%" | PercentPercent -> "%%" + | Comment c -> "Comment(" ^ (Comment.toString c) ^ ")" + | List -> "list" + | TemplatePart text -> text ^ "${" + | TemplateTail text -> "TemplateTail(" ^ text ^ ")" + | Backtick -> "`" + | BarGreater -> "|>" + | Try -> "try" | Catch -> "catch" + | Import -> "import" + | Export -> "export" + + let keywordTable = function + | "true" -> True + | "false" -> False + | "open" -> Open + | "let" -> Let + | "rec" -> Rec + | "and" -> And + | "as" -> As + | "exception" -> Exception + | "assert" -> Assert + | "lazy" -> Lazy + | "if" -> If + | "else" -> Else + | "for" -> For + | "in" -> In + | "to" -> To + | "downto" -> Downto + | "while" -> While + | "switch" -> Switch + | "when" -> When + | "external" -> External + | "type" -> Typ + | "private" -> Private + | "mutable" -> Mutable + | "constraint" -> Constraint + | "include" -> Include + | "module" -> Module + | "of" -> Of + | "list" -> List + | "with" -> With + | "try" -> Try + | "catch" -> Catch + | "import" -> Import + | "export" -> Export + | _ -> raise Not_found + [@@raises Not_found] + + let isKeyword = function + | True | False | Open | Let | Rec | And | As + | Exception | Assert | Lazy | If | Else | For | In | To + | Downto | While | Switch | When | External | Typ | Private + | Mutable | Constraint | Include | Module | Of + | Land | Lor | List | With + | Try | Catch | Import | Export -> true + | _ -> false + + let lookupKeyword str = + try keywordTable str with + | Not_found -> + if CharacterCodes.isUpperCase (int_of_char (str.[0] [@doesNotRaise])) then + Uident str + else Lident str + + let isKeywordTxt str = + try let _ = keywordTable str in true with + | Not_found -> false +end + +module Grammar = struct + type t = + | OpenDescription (* open Belt *) + | ModuleLongIdent (* Foo or Foo.Bar *) [@live] + | Ternary (* condExpr ? trueExpr : falseExpr *) + | Es6ArrowExpr + | Jsx + | JsxAttribute + | JsxChild [@live] + | ExprOperand + | ExprUnary + | ExprSetField + | ExprBinaryAfterOp of Token.t + | ExprBlock + | ExprCall + | ExprList + | ExprArrayAccess + | ExprArrayMutation + | ExprIf + | IfCondition | IfBranch | ElseBranch + | TypeExpression + | External + | PatternMatching + | PatternMatchCase + | LetBinding + | PatternList + | PatternOcamlList + | PatternRecord + + | TypeDef + | TypeConstrName + | TypeParams + | TypeParam [@live] + | PackageConstraint + + | TypeRepresentation + + | RecordDecl + | ConstructorDeclaration + | ParameterList + | StringFieldDeclarations + | FieldDeclarations + | TypExprList + | FunctorArgs + | ModExprList + | TypeParameters + | RecordRows + | RecordRowsStringKey + | ArgumentList + | Signature + | Specification + | Structure + | Implementation + | Attribute + | TypeConstraint + | Primitive + | AtomicTypExpr + | ListExpr + | JsFfiImport + + let toString = function + | OpenDescription -> "an open description" + | ModuleLongIdent -> "a module identifier" + | Ternary -> "a ternary expression" + | Es6ArrowExpr -> "an es6 arrow function" + | Jsx -> "a jsx expression" + | JsxAttribute -> "a jsx attribute" + | ExprOperand -> "a basic expression" + | ExprUnary -> "a unary expression" + | ExprBinaryAfterOp op -> "an expression after the operator \"" ^ Token.toString op ^ "\"" + | ExprIf -> "an if expression" + | IfCondition -> "the condition of an if expression" + | IfBranch -> "the true-branch of an if expression" + | ElseBranch -> "the else-branch of an if expression" + | TypeExpression -> "a type" + | External -> "an external" + | PatternMatching -> "the cases of a pattern match" + | ExprBlock -> "a block with expressions" + | ExprSetField -> "a record field mutation" + | ExprCall -> "a function application" + | ExprArrayAccess -> "an array access expression" + | ExprArrayMutation -> "an array mutation" + | LetBinding -> "a let binding" + | TypeDef -> "a type definition" + | TypeParams -> "type parameters" + | TypeParam -> "a type parameter" + | TypeConstrName -> "a type-constructor name" + | TypeRepresentation -> "a type representation" + | RecordDecl -> "a record declaration" + | PatternMatchCase -> "a pattern match case" + | ConstructorDeclaration -> "a constructor declaration" + | ExprList -> "multiple expressions" + | PatternList -> "multiple patterns" + | PatternOcamlList -> "a list pattern" + | PatternRecord -> "a record pattern" + | ParameterList -> "parameters" + | StringFieldDeclarations -> "string field declarations" + | FieldDeclarations -> "field declarations" + | TypExprList -> "list of types" + | FunctorArgs -> "functor arguments" + | ModExprList -> "list of module expressions" + | TypeParameters -> "list of type parameters" + | RecordRows -> "rows of a record" + | RecordRowsStringKey -> "rows of a record with string keys" + | ArgumentList -> "arguments" + | Signature -> "signature" + | Specification -> "specification" + | Structure -> "structure" + | Implementation -> "implementation" + | Attribute -> "an attribute" + | TypeConstraint -> "constraints on a type" + | Primitive -> "an external primitive" + | AtomicTypExpr -> "a type" + | ListExpr -> "an ocaml list expr" + | PackageConstraint -> "a package constraint" + | JsFfiImport -> "js ffi import" + | JsxChild -> "jsx child" + + let isSignatureItemStart = function + | Token.At + | Let + | Typ + | External + | Exception + | Open + | Include + | Module + | AtAt + | PercentPercent -> true + | _ -> false + + let isAtomicPatternStart = function + | Token.Int _ | String _ | Character _ + | Lparen | Lbracket | Lbrace + | Underscore + | Lident _ | Uident _ | List + | Exception | Lazy + | Percent -> true + | _ -> false + + let isAtomicExprStart = function + | Token.True | False + | Int _ | String _ | Float _ | Character _ + | Backtick + | Uident _ | Lident _ | Hash + | Lparen + | List + | Lbracket + | Lbrace + | LessThan + | Module + | Percent -> true + | _ -> false + + let isAtomicTypExprStart = function + | Token.SingleQuote | Underscore + | Lparen | Lbrace + | Uident _ | Lident _ | List + | Percent -> true + | _ -> false + + let isExprStart = function + | Token.True | False + | Int _ | String _ | Float _ | Character _ | Backtick + | Underscore (* _ => doThings() *) + | Uident _ | Lident _ | Hash + | Lparen | List | Module | Lbracket | Lbrace + | LessThan + | Minus | MinusDot | Plus | PlusDot | Bang + | Percent | At + | If | Switch | While | For | Assert | Lazy | Try -> true + | _ -> false + + let isJsxAttributeStart = function + | Token.Lident _ | Question -> true + | _ -> false + + let isStructureItemStart = function + | Token.Open + | Let + | Typ + | External | Import | Export + | Exception + | Include + | Module + | AtAt + | PercentPercent + | At -> true + | t when isExprStart t -> true + | _ -> false + + let isPatternStart = function + | Token.Int _ | Float _ | String _ | Character _ | True | False | Minus | Plus + | Lparen | Lbracket | Lbrace | List + | Underscore + | Lident _ | Uident _ | Hash | HashHash + | Exception | Lazy | Percent | Module + | At -> true + | _ -> false + + let isParameterStart = function + | Token.Typ | Tilde | Dot -> true + | token when isPatternStart token -> true + | _ -> false + + (* TODO: overparse Uident ? *) + let isStringFieldDeclStart = function + | Token.String _ | At -> true + | _ -> false + + (* TODO: overparse Uident ? *) + let isFieldDeclStart = function + | Token.At | Mutable | Lident _ | List -> true + (* recovery, TODO: this is not ideal… *) + | Uident _ -> true + | t when Token.isKeyword t -> true + | _ -> false + + let isRecordDeclStart = function + | Token.At + | Mutable + | Lident _ | List -> true + | _ -> false + + let isTypExprStart = function + | Token.At + | SingleQuote + | Underscore + | Lparen | Lbracket + | Uident _ | Lident _ | List + | Module + | Percent + | Lbrace -> true + | _ -> false + + let isTypeParameterStart = function + | Token.Tilde | Dot -> true + | token when isTypExprStart token -> true + | _ -> false + + let isTypeParamStart = function + | Token.Plus | Minus | SingleQuote | Underscore -> true + | _ -> false + + let isFunctorArgStart = function + | Token.At | Uident _ | Underscore + | Percent + | Lbrace + | Lparen -> true + | _ -> false + + let isModExprStart = function + | Token.At | Percent + | Uident _ | Lbrace | Lparen -> true + | _ -> false + + let isRecordRowStart = function + | Token.DotDotDot -> true + | Token.Uident _ | Lident _ | List -> true + (* TODO *) + | t when Token.isKeyword t -> true + | _ -> false + + let isRecordRowStringKeyStart = function + | Token.String _ -> true + | _ -> false + + let isArgumentStart = function + | Token.Tilde | Dot | Underscore -> true + | t when isExprStart t -> true + | _ -> false + + let isPatternMatchStart = function + | Token.Bar -> true + | t when isPatternStart t -> true + | _ -> false + + let isPatternOcamlListStart = function + | Token.DotDotDot -> true + | t when isPatternStart t -> true + | _ -> false + + let isPatternRecordItemStart = function + | Token.DotDotDot | Uident _ | Lident _ | List | Underscore -> true + | _ -> false + + let isAttributeStart = function + | Token.At -> true + | _ -> false + + let isJsFfiImportStart = function + | Token.Lident _ | At -> true + | _ -> false + + let isJsxChildStart = isAtomicExprStart + + let isBlockExprStart = function + | Token.At | Hash | Percent | Minus | MinusDot | Plus | PlusDot | Bang + | True | False | Int _ | String _ | Character _ | Lident _ | Uident _ + | Lparen | List | Lbracket | Lbrace | Forwardslash | Assert + | Lazy | If | For | While | Switch | Open | Module | Exception | Let + | LessThan | Backtick | Try | Underscore -> true + | _ -> false + + let isListElement grammar token = + match grammar with + | ExprList -> token = Token.DotDotDot || isExprStart token + | ListExpr -> token = DotDotDot || isExprStart token + | PatternList -> token = DotDotDot || isPatternStart token + | ParameterList -> isParameterStart token + | StringFieldDeclarations -> isStringFieldDeclStart token + | FieldDeclarations -> isFieldDeclStart token + | RecordDecl -> isRecordDeclStart token + | TypExprList -> isTypExprStart token || token = Token.LessThan + | TypeParams -> isTypeParamStart token + | FunctorArgs -> isFunctorArgStart token + | ModExprList -> isModExprStart token + | TypeParameters -> isTypeParameterStart token + | RecordRows -> isRecordRowStart token + | RecordRowsStringKey -> isRecordRowStringKeyStart token + | ArgumentList -> isArgumentStart token + | Signature | Specification -> isSignatureItemStart token + | Structure | Implementation -> isStructureItemStart token + | PatternMatching -> isPatternMatchStart token + | PatternOcamlList -> isPatternOcamlListStart token + | PatternRecord -> isPatternRecordItemStart token + | Attribute -> isAttributeStart token + | TypeConstraint -> token = Constraint + | PackageConstraint -> token = And + | ConstructorDeclaration -> token = Bar + | Primitive -> begin match token with Token.String _ -> true | _ -> false end + | JsxAttribute -> isJsxAttributeStart token + | JsFfiImport -> isJsFfiImportStart token + | _ -> false + + let isListTerminator grammar token = + match grammar, token with + | _, Token.Eof + | ExprList, (Rparen | Forwardslash | Rbracket) + | ListExpr, Rparen + | ArgumentList, Rparen + | TypExprList, (Rparen | Forwardslash | GreaterThan | Equal) + | ModExprList, Rparen + | (PatternList | PatternOcamlList | PatternRecord), + (Forwardslash | Rbracket | Rparen | EqualGreater (* pattern matching => *) | In (* for expressions *) | Equal (* let {x} = foo *)) + | ExprBlock, Rbrace + | (Structure | Signature), Rbrace + | TypeParams, Rparen + | ParameterList, (EqualGreater | Lbrace) + | JsxAttribute, (Forwardslash | GreaterThan) + | JsFfiImport, Rbrace + | StringFieldDeclarations, Rbrace -> true + + | Attribute, token when token <> At -> true + | TypeConstraint, token when token <> Constraint -> true + | PackageConstraint, token when token <> And -> true + | ConstructorDeclaration, token when token <> Bar -> true + | Primitive, Semicolon -> true + | Primitive, token when isStructureItemStart token -> true + + | _ -> false + + let isPartOfList grammar token = + isListElement grammar token || isListTerminator grammar token +end + +module Reporting = struct + module TerminalDoc = struct + type break = + | Never + | Always + + type document = + | Nil + | Group of {break: break; doc: document} + | Text of string + | Indent of {amount: int; doc: document} + | Append of {doc1: document; doc2: document} + + let group ~break doc = Group {break; doc} + let text txt = Text (txt) + let indent i d = Indent {amount = i; doc = d} + let append d1 d2 = Append {doc1 = d1; doc2 = d2} + let nil = Nil + + type stack = + | Empty + | Cons of {doc: document; stack: stack} + + let push stack doc = Cons {doc; stack} + + type mode = + | Flat + | Break + + let toString (* ~width *) (doc : document) = + let buffer = Buffer.create 100 in + let rec loop stack mode offset = + match stack with + | Empty -> () + | Cons {doc; stack = rest} -> + begin match doc with + | Nil -> loop rest mode offset + | Text txt -> + Buffer.add_string buffer txt; + loop rest mode (offset + (String.length txt)) + | Indent {amount = i; doc} -> + let indentation = (String.make [@doesNotRaise]) i ' ' in + Buffer.add_string buffer indentation; + loop (push rest doc) mode (offset + i) + | Append {doc1; doc2} -> + let rest = push rest doc2 in + let rest = push rest + (match mode = Flat with + | true -> Nil + | false -> text "\n") + in + let rest = push rest doc1 in + loop rest mode offset + | Group {break; doc} -> + let rest = push rest doc in + begin match break with + | Always -> loop rest Break offset + | Never -> loop rest Flat offset + end + end + in + loop (push Empty doc) Flat 0; + Buffer.contents buffer + end + + type color = + | NoColor [@live] + | Red [@live] + + type style = { + underline: bool; [@live] + color: color; [@live] + } + + let highlight ~from ~len txt = + if from < 0 || (String.length txt) == 0 || (from >= String.length txt) then txt else + let before = try String.sub txt 0 from with Invalid_argument _ -> "" in + let content = + "\027[31m" ^ (try String.sub txt from len with Invalid_argument _ -> "") ^ "\027[0m" + in + let after = try String.sub txt (from + len) (String.length txt - (from + len)) with Invalid_argument _ -> "" in + before ^ content ^ after + + let underline ~from ~len txt = + let open TerminalDoc in + let indent = (String.make [@doesNotRaise]) from ' ' in + let underline = (String.make [@doesNotRaise]) len '^' in + let line = highlight ~from:0 ~len underline in + group ~break:Always + (append (text txt) (text (indent ^ line))) + + let rec drop n l = + if n == 1 then l + else drop (n - 1) (match l with | _x::xs -> xs | _ -> l) + + let rec take n l = + match l with + | _ when n == 0 -> [] + | [] -> [] + | x::xs -> x::(take (n -1) xs) + + (* TODO: cleanup *) + let renderCodeContext ~missing (src : string) startPos endPos = + let open Lexing in + let startCol = (startPos.pos_cnum - startPos.pos_bol) in + let endCol = endPos.pos_cnum - startPos.pos_cnum + startCol in + let startLine = max 1 (startPos.pos_lnum - 2) in (* 2 lines before *) + let lines = String.split_on_char '\n' src in + let endLine = + let len = List.length lines in + min len (startPos.pos_lnum + 3) (* 2 lines after *) + in + let lines = + lines + |> drop startLine + |> take (endLine - startLine) + |> Array.of_list + in + + let renderLine x ix = + let x = if ix = startPos.pos_lnum then + begin match missing with + | Some _len -> x ^ (String.make 10 ' ' [@doesNotRaise]) + | None -> x + end + else + x + in + + let open TerminalDoc in + let rowNr = + let txt = string_of_int ix in + let len = String.length txt in + if ix = startPos.pos_lnum then + highlight ~from:0 ~len txt + else txt + in + let len = + let len = if endCol >= 0 then + endCol - startCol + else + 1 + in + if (startCol + len) > String.length x then String.length x - startCol - 1 else len + in + let line = + if ix = startPos.pos_lnum then + begin match missing with + | Some len -> + underline + ~from:( + startCol + String.length (String.length (string_of_int ix) |> string_of_int) + 5 + ) ~len x + | None -> + let len = if startCol + len > String.length x then + (String.length x) - startCol + else + len + in + text (highlight ~from:startCol ~len x) + end + else text x + in + group ~break:Never + (append + (append (text rowNr) (text " │")) + (indent 2 line)) + in + + let reportDoc = ref TerminalDoc.nil in + + let linesLen = Array.length lines in + for i = 0 to (linesLen - 1) do + let line = try (Array.get [@doesNotRaise]) lines i with Invalid_argument _ -> "" in + reportDoc := + let open TerminalDoc in + let ix = startLine + i in + group ~break:Always (append !reportDoc (renderLine line ix)) + done; + + TerminalDoc.toString !reportDoc + + type problem = + | Unexpected of Token.t [@live] + | Expected of {token: Token.t; pos: Lexing.position; context: Grammar.t option} [@live] + | Message of string [@live] + | Uident [@live] + | Lident [@live] + | Unbalanced of Token.t [@live] + + type parseError = Lexing.position * problem +end + +module Diagnostics: sig + type t + type category + type report + + type reportStyle + val parseReportStyle: string -> reportStyle + + val unexpected: Token.t -> (Grammar.t * Lexing.position) list -> category + val expected: ?grammar:Grammar.t -> Lexing.position -> Token.t -> category + val uident: Token.t -> category + val lident: Token.t -> category + val unclosedString: category + val unclosedTemplate: category + val unclosedComment: category + val unknownUchar: int -> category + val message: string -> category + + val make: + filename: string + -> startPos: Lexing.position + -> endPos: Lexing.position + -> category + -> t + + val stringOfReport: style:reportStyle -> t list -> string -> string +end = struct + type category = + | Unexpected of {token: Token.t; context: (Grammar.t * Lexing.position) list} + | Expected of {context: Grammar.t option; pos: Lexing.position (* prev token end*); token: Token.t} + | Message of string + | Uident of Token.t + | Lident of Token.t + | UnclosedString + | UnclosedTemplate + | UnclosedComment + | UnknownUchar of int + + type t = { + filename: string; + startPos: Lexing.position; + endPos: Lexing.position; + category: category; + } + + type report = t list + + (* TODO: add json here *) + type reportStyle = + | Pretty + | Plain + + let parseReportStyle txt = match (String.lowercase_ascii txt) with + | "plain" -> Plain + | _ -> Pretty + + let defaultUnexpected token = + "I'm not sure what to parse here when looking at \"" ^ (Token.toString token) ^ "\"." + + let explain t = + match t.category with + | Uident currentToken -> + begin match currentToken with + | Lident lident -> + let guess = String.capitalize_ascii lident in + "Did you mean `" ^ guess ^"` instead of `" ^ lident ^ "`?" + | t when Token.isKeyword t -> + let token = Token.toString t in + "`" ^ token ^ "` is a reserved keyword." + | _ -> + "At this point, I'm looking for an uppercased identifier like `Belt` or `Array`" + end + | Lident currentToken -> + begin match currentToken with + | Uident uident -> + let guess = String.uncapitalize_ascii uident in + "Did you mean `" ^ guess ^"` instead of `" ^ uident ^ "`?" + | t when Token.isKeyword t -> + let token = Token.toString t in + "`" ^ token ^ "` is a reserved keyword. Keywords need to be escaped: \\\"" ^ token ^ "\"" + | Underscore -> + "`_` isn't a valid name." + | _ -> + "I'm expecting an lowercased identifier like `name` or `age`" + end + | Message txt -> txt + | UnclosedString -> + "This string is missing a double quote at the end" + | UnclosedTemplate -> + "Did you forget to close this template expression with a backtick?" + | UnclosedComment -> + "This comment seems to be missing a closing `*/`" + | UnknownUchar uchar -> + begin match uchar with + | 94 (* ^ *) -> + "Hmm, not sure what I should do here with this character.\nIf you're trying to deref an expression, use `foo.contents` instead." + | _ -> + "Hmm, I have no idea what this character means…" + end + | Expected {context; token = t} -> + let hint = match context with + | Some grammar -> "It signals the start of " ^ (Grammar.toString grammar) + | None -> "" + in + "Did you forget a `" ^ (Token.toString t) ^ "` here? " ^ hint + | Unexpected {token = t; context = breadcrumbs} -> + let name = (Token.toString t) in + begin match breadcrumbs with + | (AtomicTypExpr, _)::breadcrumbs -> + begin match breadcrumbs, t with + | ((StringFieldDeclarations | FieldDeclarations) , _) :: _, (String _ | At | Rbrace | Comma | Eof) -> + "I'm missing a type here" + | _, t when Grammar.isStructureItemStart t || t = Eof -> + "Missing a type here" + | _ -> + defaultUnexpected t + end + | (ExprOperand, _)::breadcrumbs -> + begin match breadcrumbs, t with + | (ExprBlock, _) :: _, Rbrace -> + "It seems that this expression block is empty" + | (ExprBlock, _) :: _, Bar -> (* Pattern matching *) + "Looks like there might be an expression missing here" + | (ExprSetField, _) :: _, _ -> + "It seems that this record field mutation misses an expression" + | (ExprArrayMutation, _) :: _, _ -> + "Seems that an expression is missing, with what do I mutate the array?" + | ((ExprBinaryAfterOp _ | ExprUnary), _) ::_, _ -> + "Did you forget to write an expression here?" + | (Grammar.LetBinding, _)::_, _ -> + "This let-binding misses an expression" + | _::_, (Rbracket | Rbrace) -> + "Missing expression" + | _ -> + "I'm not sure what to parse here when looking at \"" ^ name ^ "\"." + end + | (TypeParam, _)::_ -> + begin match t with + | Lident ident -> + "Did you mean '" ^ ident ^"? A Type parameter starts with a quote." + | _ -> + "I'm not sure what to parse here when looking at \"" ^ name ^ "\"." + end + | _ -> + (* TODO: match on circumstance to verify Lident needed ? *) + if Token.isKeyword t then + "`" ^ name ^ "` is a reserved keyword. Keywords need to be escaped: \\\"" ^ (Token.toString t) ^ "\"" + else + "I'm not sure what to parse here when looking at \"" ^ name ^ "\"." + end + + let toPlainString t buffer = + Buffer.add_string buffer t.filename; + Buffer.add_char buffer '('; + Buffer.add_string buffer (string_of_int t.startPos.pos_cnum); + Buffer.add_char buffer ','; + Buffer.add_string buffer (string_of_int t.endPos.pos_cnum); + Buffer.add_char buffer ')'; + Buffer.add_char buffer ':'; + Buffer.add_string buffer (explain t) + + let toString t src = + let open Lexing in + let startchar = t.startPos.pos_cnum - t.startPos.pos_bol in + let endchar = t.endPos.pos_cnum - t.startPos.pos_cnum + startchar in + let locationInfo = + Printf.sprintf (* ReasonLanguageServer requires the following format *) + "File \"%s\", line %d, characters %d-%d:" + t.filename + t.startPos.pos_lnum + startchar + endchar + in + let code = + let missing = match t.category with + | Expected {token = t} -> + Some (String.length (Token.toString t)) + | _ -> None + in + Reporting.renderCodeContext ~missing src t.startPos t.endPos + in + let explanation = explain t in + Printf.sprintf "%s\n\n%s\n\n%s\n\n" locationInfo code explanation + + let make ~filename ~startPos ~endPos category = { + filename; + startPos; + endPos; + category + } + + let stringOfReport ~style diagnostics src = + match style with + | Pretty -> + List.fold_left (fun report diagnostic -> + report ^ (toString diagnostic src) ^ "\n" + ) "\n" (List.rev diagnostics) + | Plain -> + let buffer = Buffer.create 100 in + List.iter (fun diagnostic -> + toPlainString diagnostic buffer; + Buffer.add_char buffer '\n'; + ) diagnostics; + Buffer.contents buffer + + let unexpected token context = + Unexpected {token; context} + + let expected ?grammar pos token = + Expected {context = grammar; pos; token} + + let uident currentToken = Uident currentToken + let lident currentToken = Lident currentToken + let unclosedString = UnclosedString + let unclosedComment = UnclosedComment + let unclosedTemplate = UnclosedTemplate + let unknownUchar code = UnknownUchar code + let message txt = Message txt +end + +(* Collection of utilities to view the ast in a more a convenient form, + * allowing for easier processing. + * Example: given a ptyp_arrow type, what are its arguments and what is the + * returnType? *) +module ParsetreeViewer : sig + (* Restructures a nested tree of arrow types into its args & returnType + * The parsetree contains: a => b => c => d, for printing purposes + * we restructure the tree into (a, b, c) and its returnType d *) + val arrowType: Parsetree.core_type -> + Parsetree.attributes * + (Parsetree.attributes * Asttypes.arg_label * Parsetree.core_type) list * + Parsetree.core_type + + val functorType: Parsetree.module_type -> + (Parsetree.attributes * string Asttypes.loc * Parsetree.module_type option) list * + Parsetree.module_type + + (* filters @bs out of the provided attributes *) + val processUncurriedAttribute: Parsetree.attributes -> bool * Parsetree.attributes + + (* if ... else if ... else ... is represented as nested expressions: if ... else { if ... } + * The purpose of this function is to flatten nested ifs into one sequence. + * Basically compute: ([if, else if, else if, else if], else) *) + val collectIfExpressions: + Parsetree.expression -> + (Parsetree.expression * Parsetree.expression) list * Parsetree.expression option + + val collectListExpressions: + Parsetree.expression -> (Parsetree.expression list * Parsetree.expression option) + + type funParamKind = + | Parameter of { + attrs: Parsetree.attributes; + lbl: Asttypes.arg_label; + defaultExpr: Parsetree.expression option; + pat: Parsetree.pattern; + } + | NewTypes of {attrs: Parsetree.attributes; locs: string Asttypes.loc list} + + val funExpr: + Parsetree.expression -> + Parsetree.attributes * + funParamKind list * + Parsetree.expression + + (* example: + * `makeCoordinate({ + * x: 1, + * y: 2, + * })` + * Notice howe `({` and `})` "hug" or stick to each other *) + val isHuggableExpression: Parsetree.expression -> bool + + val isHuggablePattern: Parsetree.pattern -> bool + + val isHuggableRhs: Parsetree.expression -> bool + + val operatorPrecedence: string -> int + + val isUnaryExpression: Parsetree.expression -> bool + val isBinaryOperator: string -> bool + val isBinaryExpression: Parsetree.expression -> bool + + val flattenableOperators: string -> string -> bool + + val hasAttributes: Parsetree.attributes -> bool + + val isArrayAccess: Parsetree.expression -> bool + val isTernaryExpr: Parsetree.expression -> bool + + val collectTernaryParts: Parsetree.expression -> ((Parsetree.expression * Parsetree.expression) list * Parsetree.expression) + + val parametersShouldHug: + funParamKind list -> bool + + val filterTernaryAttributes: Parsetree.attributes -> Parsetree.attributes + + val isJsxExpression: Parsetree.expression -> bool + val hasJsxAttribute: Parsetree.attributes -> bool + + val shouldIndentBinaryExpr: Parsetree.expression -> bool + val shouldInlineRhsBinaryExpr: Parsetree.expression -> bool + val filterPrinteableAttributes: Parsetree.attributes -> Parsetree.attributes + val partitionPrinteableAttributes: Parsetree.attributes -> (Parsetree.attributes * Parsetree.attributes) + + val requiresSpecialCallbackPrintingLastArg: (Asttypes.arg_label * Parsetree.expression) list -> bool + val requiresSpecialCallbackPrintingFirstArg: (Asttypes.arg_label * Parsetree.expression) list -> bool + + val modExprApply : Parsetree.module_expr -> ( + Parsetree.module_expr list * Parsetree.module_expr + ) + + val modExprFunctor : Parsetree.module_expr -> ( + (Parsetree.attributes * string Asttypes.loc * Parsetree.module_type option) list * + Parsetree.module_expr + ) + + val splitGenTypeAttr : Parsetree.attributes -> (bool * Parsetree.attributes) + + val collectPatternsFromListConstruct: + Parsetree.pattern list -> Parsetree.pattern -> + (Parsetree.pattern list * Parsetree.pattern) + + val isBlockExpr : Parsetree.expression -> bool + + val isTemplateLiteral: Parsetree.expression -> bool + + val collectOrPatternChain: + Parsetree.pattern -> Parsetree.pattern list + + val processBracesAttr : Parsetree.expression -> (Parsetree.attribute option * Parsetree.expression) + + val filterParsingAttrs : Parsetree.attributes -> Parsetree.attributes + + val isBracedExpr : Parsetree.expression -> bool + + val isPipeExpr : Parsetree.expression -> bool + + val extractValueDescriptionFromModExpr: Parsetree.module_expr -> Parsetree.value_description list + + type jsImportScope = + | JsGlobalImport (* nothing *) + | JsModuleImport of string (* from "path" *) + | JsScopedImport of string list (* window.location *) + + val classifyJsImport: Parsetree.value_description -> jsImportScope + + (* (__x) => f(a, __x, c) -----> f(a, _, c) *) + val rewriteUnderscoreApply: Parsetree.expression -> Parsetree.expression + + (* (__x) => f(a, __x, c) -----> f(a, _, c) *) + val isUnderscoreApplySugar: Parsetree.expression -> bool +end = struct + open Parsetree + + let arrowType ct = + let rec process attrsBefore acc typ = match typ with + | {ptyp_desc = Ptyp_arrow (Nolabel as lbl, typ1, typ2); ptyp_attributes = []} -> + let arg = ([], lbl, typ1) in + process attrsBefore (arg::acc) typ2 + | {ptyp_desc = Ptyp_arrow (Nolabel as lbl, typ1, typ2); ptyp_attributes = [({txt ="bs"}, _) ] as attrs} -> + let arg = (attrs, lbl, typ1) in + process attrsBefore (arg::acc) typ2 + | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = _attrs} as returnType -> + let args = List.rev acc in + (attrsBefore, args, returnType) + | {ptyp_desc = Ptyp_arrow ((Labelled _ | Optional _) as lbl, typ1, typ2); ptyp_attributes = attrs} -> + let arg = (attrs, lbl, typ1) in + process attrsBefore (arg::acc) typ2 + | typ -> + (attrsBefore, List.rev acc, typ) + in + begin match ct with + | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = attrs} as typ -> + process attrs [] {typ with ptyp_attributes = []} + | typ -> process [] [] typ + end + + let functorType modtype = + let rec process acc modtype = match modtype with + | {pmty_desc = Pmty_functor (lbl, argType, returnType); pmty_attributes = attrs} -> + let arg = (attrs, lbl, argType) in + process (arg::acc) returnType + | modType -> + (List.rev acc, modType) + in + process [] modtype + + let processUncurriedAttribute attrs = + let rec process uncurriedSpotted acc attrs = + match attrs with + | [] -> (uncurriedSpotted, List.rev acc) + | ({Location.txt = "bs"}, _)::rest -> process true acc rest + | attr::rest -> process uncurriedSpotted (attr::acc) rest + in + process false [] attrs + + let collectIfExpressions expr = + let rec collect acc expr = match expr.pexp_desc with + | Pexp_ifthenelse (ifExpr, thenExpr, Some elseExpr) -> + collect ((ifExpr, thenExpr)::acc) elseExpr + | Pexp_ifthenelse (ifExpr, thenExpr, (None as elseExpr)) -> + let ifs = List.rev ((ifExpr, thenExpr)::acc) in + (ifs, elseExpr) + | _ -> + (List.rev acc, Some expr) + in + collect [] expr + + let collectListExpressions expr = + let rec collect acc expr = match expr.pexp_desc with + | Pexp_construct ({txt = Longident.Lident "[]"}, _) -> + (List.rev acc, None) + | Pexp_construct ( + {txt = Longident.Lident "::"}, + Some {pexp_desc = Pexp_tuple (hd::[tail])} + ) -> + collect (hd::acc) tail + | _ -> + (List.rev acc, Some expr) + in + collect [] expr + + (* (__x) => f(a, __x, c) -----> f(a, _, c) *) + let rewriteUnderscoreApply expr = + match expr.pexp_desc with + | Pexp_fun ( + Nolabel, + None, + {ppat_desc = Ppat_var {txt="__x"}}, + ({pexp_desc = Pexp_apply (callExpr, args)} as e) + ) -> + let newArgs = List.map (fun arg -> + match arg with + | ( + lbl, + ({pexp_desc = Pexp_ident ({txt = Longident.Lident "__x"} as lid)} as argExpr) + ) -> + (lbl, {argExpr with pexp_desc = Pexp_ident ({lid with txt = Longident.Lident "_"})}) + | arg -> arg + ) args in + {e with pexp_desc = Pexp_apply (callExpr, newArgs)} + | _ -> expr + + type funParamKind = + | Parameter of { + attrs: Parsetree.attributes; + lbl: Asttypes.arg_label; + defaultExpr: Parsetree.expression option; + pat: Parsetree.pattern; + } + | NewTypes of {attrs: Parsetree.attributes; locs: string Asttypes.loc list} + + let funExpr expr = + (* Turns (type t, type u, type z) into "type t u z" *) + let rec collectNewTypes acc returnExpr = + match returnExpr with + | {pexp_desc = Pexp_newtype (stringLoc, returnExpr); pexp_attributes = []} -> + collectNewTypes (stringLoc::acc) returnExpr + | returnExpr -> + (List.rev acc, returnExpr) + in + let rec collect attrsBefore acc expr = match expr with + | {pexp_desc = Pexp_fun ( + Nolabel, + None, + {ppat_desc = Ppat_var {txt="__x"}}, + {pexp_desc = Pexp_apply _} + )} -> + (attrsBefore, List.rev acc, rewriteUnderscoreApply expr) + | {pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); pexp_attributes = []} -> + let parameter = Parameter { + attrs = []; + lbl = lbl; + defaultExpr = defaultExpr; + pat = pattern; + } in + collect attrsBefore (parameter::acc) returnExpr + | {pexp_desc = Pexp_newtype (stringLoc, rest); pexp_attributes = attrs} -> + let (stringLocs, returnExpr) = collectNewTypes [stringLoc] rest in + let param = NewTypes {attrs; locs = stringLocs} in + collect attrsBefore (param::acc) returnExpr + | {pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); pexp_attributes = [({txt = "bs"}, _)] as attrs} -> + let parameter = Parameter { + attrs = attrs; + lbl = lbl; + defaultExpr = defaultExpr; + pat = pattern; + } in + collect attrsBefore (parameter::acc) returnExpr + | { + pexp_desc = Pexp_fun ((Labelled _ | Optional _) as lbl, defaultExpr, pattern, returnExpr); + pexp_attributes = attrs + } -> + let parameter = Parameter { + attrs = attrs; + lbl = lbl; + defaultExpr = defaultExpr; + pat = pattern; + } in + collect attrsBefore (parameter::acc) returnExpr + | expr -> + (attrsBefore, List.rev acc, expr) + in + begin match expr with + | {pexp_desc = Pexp_fun (Nolabel, _defaultExpr, _pattern, _returnExpr); pexp_attributes = attrs} as expr -> + collect attrs [] {expr with pexp_attributes = []} + | expr -> collect [] [] expr + end + + let processBracesAttr expr = + match expr.pexp_attributes with + | (({txt = "ns.braces"}, _) as attr)::attrs -> + (Some attr, {expr with pexp_attributes = attrs}) + | _ -> + (None, expr) + + let filterParsingAttrs attrs = + List.filter (fun attr -> + match attr with + | ({Location.txt = ("ns.ternary" | "ns.braces" | "bs" | "ns.namedArgLoc")}, _) -> false + | _ -> true + ) attrs + + let isBlockExpr expr = + match expr.pexp_desc with + | Pexp_letmodule _ + | Pexp_letexception _ + | Pexp_let _ + | Pexp_open _ + | Pexp_sequence _ -> true + | _ -> false + + let isBracedExpr expr = + match processBracesAttr expr with + | (Some _, _) -> true + | _ -> false + + let isHuggableExpression expr = + match expr.pexp_desc with + | Pexp_array _ + | Pexp_tuple _ + | Pexp_construct ({txt = Longident.Lident ("::" | "[]")}, _) + | Pexp_extension ({txt = "bs.obj"}, _) + | Pexp_record _ -> true + | _ when isBlockExpr expr -> true + | _ when isBracedExpr expr -> true + | _ -> false + + let isHuggableRhs expr = + match expr.pexp_desc with + | Pexp_array _ + | Pexp_tuple _ + | Pexp_construct ({txt = Longident.Lident ("::" | "[]")}, _) + | Pexp_extension ({txt = "bs.obj"}, _) + | Pexp_record _ -> true + | _ when isBracedExpr expr -> true + | _ -> false + + let isHuggablePattern pattern = + match pattern.ppat_desc with + | Ppat_array _ + | Ppat_tuple _ + | Ppat_record _ + | Ppat_construct _ -> true + | _ -> false + + let operatorPrecedence operator = match operator with + | ":=" -> 1 + | "||" -> 2 + | "&&" -> 3 + | "=" | "==" | "<" | ">" | "!=" | "<>" | "!==" | "<=" | ">=" | "|>" -> 4 + | "+" | "+." | "-" | "-." | "^" -> 5 + | "*" | "*." | "/" | "/." -> 6 + | "**" -> 7 + | "#" | "##" | "|." -> 8 + | _ -> 0 + + let isUnaryOperator operator = match operator with + | "~+" | "~+." | "~-" | "~-." | "not" -> true + | _ -> false + + let isUnaryExpression expr = match expr.pexp_desc with + | Pexp_apply( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [Nolabel, _arg] + ) when isUnaryOperator operator -> true + | _ -> false + + let isBinaryOperator operator = match operator with + | ":=" + | "||" + | "&&" + | "=" | "==" | "<" | ">" | "!=" | "!==" | "<=" | ">=" | "|>" + | "+" | "+." | "-" | "-." | "^" + | "*" | "*." | "/" | "/." + | "**" + | "|." | "<>" -> true + | _ -> false + + let isBinaryExpression expr = match expr.pexp_desc with + | Pexp_apply( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [(Nolabel, _operand1); (Nolabel, _operand2)] + ) when isBinaryOperator operator -> true + | _ -> false + + let isEqualityOperator operator = match operator with + | "=" | "==" | "<>" | "!=" -> true + | _ -> false + + let flattenableOperators parentOperator childOperator = + let precParent = operatorPrecedence parentOperator in + let precChild = operatorPrecedence childOperator in + if precParent == precChild then + not ( + isEqualityOperator parentOperator && + isEqualityOperator childOperator + ) + else + false + + let hasAttributes attrs = + List.exists (fun attr -> match attr with + | ({Location.txt = "bs" | "ns.ternary" | "ns.braces"}, _) -> false + | _ -> true + ) attrs + + let isArrayAccess expr = match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "get")}}, + [Nolabel, _parentExpr; Nolabel, _memberExpr] + ) -> true + | _ -> false + + let rec hasTernaryAttribute attrs = + match attrs with + | [] -> false + | ({Location.txt="ns.ternary"},_)::_ -> true + | _::attrs -> hasTernaryAttribute attrs + + let isTernaryExpr expr = match expr with + | { + pexp_attributes = attrs; + pexp_desc = Pexp_ifthenelse _ + } when hasTernaryAttribute attrs -> true + | _ -> false + + let collectTernaryParts expr = + let rec collect acc expr = match expr with + | { + pexp_attributes = attrs; + pexp_desc = Pexp_ifthenelse (condition, consequent, Some(alternate)) + } when hasTernaryAttribute attrs -> collect ((condition, consequent)::acc) alternate + | alternate -> (List.rev acc, alternate) + in + collect [] expr + + let parametersShouldHug parameters = match parameters with + | [Parameter { + attrs = []; + lbl = Asttypes.Nolabel; + defaultExpr = None; + pat = pat + }] when isHuggablePattern pat -> true + | _ -> false + + let filterTernaryAttributes attrs = + List.filter (fun attr -> match attr with + |({Location.txt="ns.ternary"},_) -> false + | _ -> true + ) attrs + + let isJsxExpression expr = + let rec loop attrs = + match attrs with + | [] -> false + | ({Location.txt = "JSX"}, _)::_ -> true + | _::attrs -> loop attrs + in + match expr.pexp_desc with + | Pexp_apply _ -> + loop expr.Parsetree.pexp_attributes + | _ -> false + + let hasJsxAttribute attributes = match attributes with + | ({Location.txt = "JSX"},_)::_ -> true + | _ -> false + + let shouldIndentBinaryExpr expr = + let samePrecedenceSubExpression operator subExpression = + match subExpression with + | {pexp_desc = Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident subOperator}}, + [Nolabel, _lhs; Nolabel, _rhs] + )} when isBinaryOperator subOperator -> + flattenableOperators operator subOperator + | _ -> true + in + match expr with + | {pexp_desc = Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [Nolabel, lhs; Nolabel, _rhs] + )} when isBinaryOperator operator -> + isEqualityOperator operator || + not (samePrecedenceSubExpression operator lhs) || + operator = ":=" + | _ -> false + + let shouldInlineRhsBinaryExpr rhs = match rhs.pexp_desc with + | Parsetree.Pexp_constant _ + | Pexp_let _ + | Pexp_letmodule _ + | Pexp_letexception _ + | Pexp_sequence _ + | Pexp_open _ + | Pexp_ifthenelse _ + | Pexp_for _ + | Pexp_while _ + | Pexp_try _ + | Pexp_array _ + | Pexp_record _ -> true + | _ -> false + + let filterPrinteableAttributes attrs = + List.filter (fun attr -> match attr with + | ({Location.txt="bs" | "ns.ternary"}, _) -> false + | _ -> true + ) attrs + + let partitionPrinteableAttributes attrs = + List.partition (fun attr -> match attr with + | ({Location.txt="bs" | "ns.ternary"}, _) -> false + | _ -> true + ) attrs + + let requiresSpecialCallbackPrintingLastArg args = + let rec loop args = match args with + | [] -> false + | [(_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})] -> true + | (_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})::_ -> false + | _::rest -> loop rest + in + loop args + + let requiresSpecialCallbackPrintingFirstArg args = + let rec loop args = match args with + | [] -> true + | (_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})::_ -> false + | _::rest -> loop rest + in + match args with + | [(_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})] -> false + | (_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})::rest -> loop rest + | _ -> false + + let modExprApply modExpr = + let rec loop acc modExpr = match modExpr with + | {pmod_desc = Pmod_apply (next, arg)} -> + loop (arg::acc) next + | _ -> (acc, modExpr) + in + loop [] modExpr + + let modExprFunctor modExpr = + let rec loop acc modExpr = match modExpr with + | {pmod_desc = Pmod_functor (lbl, modType, returnModExpr); pmod_attributes = attrs} -> + let param = (attrs, lbl, modType) in + loop (param::acc) returnModExpr + | returnModExpr -> + (List.rev acc, returnModExpr) + in + loop [] modExpr + + let splitGenTypeAttr attrs = + match attrs with + | ({Location.txt = "genType"}, _)::attrs -> (true, attrs) + | attrs -> (false, attrs) + + let rec collectPatternsFromListConstruct acc pattern = + let open Parsetree in + match pattern.ppat_desc with + | Ppat_construct( + {txt = Longident.Lident "::"}, + Some {ppat_desc=Ppat_tuple (pat::rest::[])} + ) -> + collectPatternsFromListConstruct (pat::acc) rest + | _ -> List.rev acc, pattern + + let rec isTemplateLiteral expr = + let isPexpConstantString expr = match expr.pexp_desc with + | Pexp_constant (Pconst_string (_, Some _)) -> true + | _ -> false + in + match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "^"}}, + [Nolabel, arg1; Nolabel, arg2] + ) when not (isPexpConstantString arg1 && isPexpConstantString arg2) -> + isTemplateLiteral arg1 || isTemplateLiteral arg2 + | Pexp_constant (Pconst_string (_, Some _)) -> true + | _ -> false + + (* Blue | Red | Green -> [Blue; Red; Green] *) + let collectOrPatternChain pat = + let rec loop pattern chain = + match pattern.ppat_desc with + | Ppat_or (left, right) -> loop left (right::chain) + | _ -> pattern::chain + in + loop pat [] + + let isPipeExpr expr = match expr.pexp_desc with + | Pexp_apply( + {pexp_desc = Pexp_ident {txt = Longident.Lident ("|." | "|>") }}, + [(Nolabel, _operand1); (Nolabel, _operand2)] + ) -> true + | _ -> false + + let extractValueDescriptionFromModExpr modExpr = + let rec loop structure acc = + match structure with + | [] -> List.rev acc + | structureItem::structure -> + begin match structureItem.Parsetree.pstr_desc with + | Pstr_primitive vd -> loop structure (vd::acc) + | _ -> loop structure acc + end + in + match modExpr.pmod_desc with + | Pmod_structure structure -> loop structure [] + | _ -> [] + + type jsImportScope = + | JsGlobalImport (* nothing *) + | JsModuleImport of string (* from "path" *) + | JsScopedImport of string list (* window.location *) + + let classifyJsImport valueDescription = + let rec loop attrs = + let open Parsetree in + match attrs with + | [] -> JsGlobalImport + | ({Location.txt = "bs.scope"}, PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (s, _))}, _)}])::_ -> + JsScopedImport [s] + | ({Location.txt = "genType.import"}, PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (s, _))}, _)}])::_ -> + JsModuleImport s + | ({Location.txt = "bs.scope"}, PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_tuple exprs}, _)}])::_ -> + let scopes = List.fold_left (fun acc curr -> + match curr.Parsetree.pexp_desc with + | Pexp_constant (Pconst_string (s, _)) -> s::acc + | _ -> acc + ) [] exprs + in + JsScopedImport (List.rev scopes) + | _::attrs -> + loop attrs + in + loop valueDescription.pval_attributes + + let isUnderscoreApplySugar expr = + match expr.pexp_desc with + | Pexp_fun ( + Nolabel, + None, + {ppat_desc = Ppat_var {txt="__x"}}, + {pexp_desc = Pexp_apply _} + ) -> true + | _ -> false +end + +module Parens: sig + type kind = Parenthesized | Braced of Location.t | Nothing + + val expr: Parsetree.expression -> kind + val structureExpr: Parsetree.expression -> kind + + val unaryExprOperand: Parsetree.expression -> kind + + val binaryExprOperand: isLhs:bool -> Parsetree.expression -> kind + val subBinaryExprOperand: string -> string -> bool + val rhsBinaryExprOperand: string -> Parsetree.expression -> bool + val flattenOperandRhs: string -> Parsetree.expression -> bool + + val lazyOrAssertExprRhs: Parsetree.expression -> kind + + val fieldExpr: Parsetree.expression -> kind + + val setFieldExprRhs: Parsetree.expression -> kind + + val ternaryOperand: Parsetree.expression -> kind + + val jsxPropExpr: Parsetree.expression -> kind + val jsxChildExpr: Parsetree.expression -> kind + + val binaryExpr: Parsetree.expression -> kind + val modTypeFunctorReturn: Parsetree.module_type -> bool + val modTypeWithOperand: Parsetree.module_type -> bool + val modExprFunctorConstraint: Parsetree.module_type -> bool + + val bracedExpr: Parsetree.expression -> bool + val callExpr: Parsetree.expression -> kind + + val includeModExpr : Parsetree.module_expr -> bool +end = struct + type kind = Parenthesized | Braced of Location.t | Nothing + + let expr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | _ -> + begin match expr with + | {Parsetree.pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + )} -> Nothing + | {pexp_desc = Pexp_constraint _ } -> Parenthesized + | _ -> Nothing + end + + let callExpr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | _ -> + begin match expr with + | {Parsetree.pexp_attributes = attrs} when + begin match ParsetreeViewer.filterParsingAttrs attrs with + | _::_ -> true + | [] -> false + end + -> Parenthesized + | _ when ParsetreeViewer.isUnaryExpression expr || ParsetreeViewer.isBinaryExpression expr -> Parenthesized + | {Parsetree.pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + )} -> Nothing + | {pexp_desc = Pexp_fun _} + when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing + | {pexp_desc = + Pexp_lazy _ + | Pexp_assert _ + | Pexp_fun _ + | Pexp_newtype _ + | Pexp_function _ + | Pexp_constraint _ + | Pexp_setfield _ + | Pexp_match _ + | Pexp_try _ + | Pexp_while _ + | Pexp_for _ + | Pexp_ifthenelse _ + } -> Parenthesized + | _ -> Nothing + end + + let structureExpr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | None -> + begin match expr with + | _ when ParsetreeViewer.hasAttributes expr.pexp_attributes && + not (ParsetreeViewer.isJsxExpression expr) -> Parenthesized + | {Parsetree.pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + )} -> Nothing + | {pexp_desc = Pexp_constraint _ } -> Parenthesized + | _ -> Nothing + end + + let unaryExprOperand expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | None -> + begin match expr with + | {Parsetree.pexp_attributes = attrs} when + begin match ParsetreeViewer.filterParsingAttrs attrs with + | _::_ -> true + | [] -> false + end + -> Parenthesized + | expr when + ParsetreeViewer.isUnaryExpression expr || + ParsetreeViewer.isBinaryExpression expr + -> Parenthesized + | {pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + )} -> Nothing + | {pexp_desc = Pexp_fun _} + when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing + | {pexp_desc = + Pexp_lazy _ + | Pexp_assert _ + | Pexp_fun _ + | Pexp_newtype _ + | Pexp_function _ + | Pexp_constraint _ + | Pexp_setfield _ + | Pexp_extension _ (* readability? maybe remove *) + | Pexp_match _ + | Pexp_try _ + | Pexp_while _ + | Pexp_for _ + | Pexp_ifthenelse _ + } -> Parenthesized + | _ -> Nothing + end + + let binaryExprOperand ~isLhs expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | None -> + begin match expr with + | {Parsetree.pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + )} -> Nothing + | {pexp_desc = Pexp_fun _} + when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing + | {pexp_desc = Pexp_constraint _ | Pexp_fun _ | Pexp_function _ | Pexp_newtype _} -> Parenthesized + | expr when ParsetreeViewer.isBinaryExpression expr -> Parenthesized + | expr when ParsetreeViewer.isTernaryExpr expr -> Parenthesized + | {pexp_desc = + Pexp_lazy _ + | Pexp_assert _ + } when isLhs -> Parenthesized + | _ -> Nothing + end + + let subBinaryExprOperand parentOperator childOperator = + let precParent = ParsetreeViewer.operatorPrecedence parentOperator in + let precChild = ParsetreeViewer.operatorPrecedence childOperator in + precParent > precChild || + (precParent == precChild && + not (ParsetreeViewer.flattenableOperators parentOperator childOperator)) || + (* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… *) + (parentOperator = "||" && childOperator = "&&") + + let rhsBinaryExprOperand parentOperator rhs = + match rhs.Parsetree.pexp_desc with + | Parsetree.Pexp_apply( + {pexp_attributes = []; + pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [_, _left; _, _right] + ) when ParsetreeViewer.isBinaryOperator operator -> + let precParent = ParsetreeViewer.operatorPrecedence parentOperator in + let precChild = ParsetreeViewer.operatorPrecedence operator in + precParent == precChild + | _ -> false + + let flattenOperandRhs parentOperator rhs = + match rhs.Parsetree.pexp_desc with + | Parsetree.Pexp_apply( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [_, _left; _, _right] + ) when ParsetreeViewer.isBinaryOperator operator -> + let precParent = ParsetreeViewer.operatorPrecedence parentOperator in + let precChild = ParsetreeViewer.operatorPrecedence operator in + precParent >= precChild || rhs.pexp_attributes <> [] + | Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + ) -> false + | Pexp_fun _ when ParsetreeViewer.isUnderscoreApplySugar rhs -> false + | Pexp_fun _ + | Pexp_newtype _ + | Pexp_setfield _ + | Pexp_constraint _ -> true + | _ when ParsetreeViewer.isTernaryExpr rhs -> true + | _ -> false + + let lazyOrAssertExprRhs expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | None -> + begin match expr with + | {Parsetree.pexp_attributes = attrs} when + begin match ParsetreeViewer.filterParsingAttrs attrs with + | _::_ -> true + | [] -> false + end + -> Parenthesized + | expr when ParsetreeViewer.isBinaryExpression expr -> Parenthesized + | {pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + )} -> Nothing + | {pexp_desc = Pexp_fun _} + when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing + | {pexp_desc = + Pexp_lazy _ + | Pexp_assert _ + | Pexp_fun _ + | Pexp_newtype _ + | Pexp_function _ + | Pexp_constraint _ + | Pexp_setfield _ + | Pexp_match _ + | Pexp_try _ + | Pexp_while _ + | Pexp_for _ + | Pexp_ifthenelse _ + } -> Parenthesized + | _ -> Nothing + end + + let isNegativeConstant constant = + let isNeg txt = + let len = String.length txt in + len > 0 && (String.get [@doesNotRaise]) txt 0 = '-' + in + match constant with + | Parsetree.Pconst_integer (i, _) | Pconst_float (i, _) when isNeg i -> true + | _ -> false + + let fieldExpr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | None -> + begin match expr with + | {Parsetree.pexp_attributes = attrs} when + begin match ParsetreeViewer.filterParsingAttrs attrs with + | _::_ -> true + | [] -> false + end + -> Parenthesized + | expr when + ParsetreeViewer.isBinaryExpression expr || + ParsetreeViewer.isUnaryExpression expr + -> Parenthesized + | {pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + )} -> Nothing + | {pexp_desc = Pexp_constant c } when isNegativeConstant c -> Parenthesized + | {pexp_desc = Pexp_fun _} + when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing + | {pexp_desc = + Pexp_lazy _ + | Pexp_assert _ + | Pexp_extension _ (* %extension.x vs (%extension).x *) + | Pexp_fun _ + | Pexp_newtype _ + | Pexp_function _ + | Pexp_constraint _ + | Pexp_setfield _ + | Pexp_match _ + | Pexp_try _ + | Pexp_while _ + | Pexp_for _ + | Pexp_ifthenelse _ + } -> Parenthesized + | _ -> Nothing + end + + let setFieldExprRhs expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | None -> + begin match expr with + | {Parsetree.pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + )} -> Nothing + | {pexp_desc = Pexp_constraint _ } -> Parenthesized + | _ -> Nothing + end + + let ternaryOperand expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | None -> + begin match expr with + | {Parsetree.pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + )} -> Nothing + | {pexp_desc = Pexp_constraint _ } -> Parenthesized + | {pexp_desc = Pexp_fun _ | Pexp_newtype _} -> + let (_attrsOnArrow, _parameters, returnExpr) = ParsetreeViewer.funExpr expr in + begin match returnExpr.pexp_desc with + | Pexp_constraint _ -> Parenthesized + | _ -> Nothing + end + | _ -> Nothing + end + + let startsWithMinus txt = + let len = String.length txt in + if len == 0 then + false + else + let s = (String.get [@doesNotRaise]) txt 0 in + s = '-' + + let jsxPropExpr expr = + match expr.Parsetree.pexp_desc with + | Parsetree.Pexp_let _ + | Pexp_sequence _ + | Pexp_letexception _ + | Pexp_letmodule _ + | Pexp_open _ -> Nothing + | _ -> + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + begin match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | None -> + begin match expr with + | {Parsetree.pexp_desc = + Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _)); + pexp_attributes = []} + when startsWithMinus x -> Parenthesized + | {Parsetree.pexp_desc = + Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ | Pexp_variant _ | + Pexp_array _ | Pexp_pack _ | Pexp_record _ | Pexp_extension _ | + Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ | Pexp_sequence _ | + Pexp_let _ | Pexp_tuple _; + pexp_attributes = [] + } -> Nothing + | {Parsetree.pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + ); pexp_attributes = []} -> Nothing + | _ -> Parenthesized + end + end + + let jsxChildExpr expr = + match expr.Parsetree.pexp_desc with + | Parsetree.Pexp_let _ + | Pexp_sequence _ + | Pexp_letexception _ + | Pexp_letmodule _ + | Pexp_open _ -> Nothing + | _ -> + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + begin match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | _ -> + begin match expr with + | {Parsetree.pexp_desc = Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _)); + pexp_attributes = [] + } when startsWithMinus x -> Parenthesized + | {Parsetree.pexp_desc = + Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ | Pexp_variant _ | + Pexp_array _ | Pexp_pack _ | Pexp_record _ | Pexp_extension _ | + Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ | Pexp_sequence _ | + Pexp_let _; + pexp_attributes = [] + } -> Nothing + | {Parsetree.pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + ); pexp_attributes = []} -> Nothing + | expr when ParsetreeViewer.isJsxExpression expr -> Nothing + | _ -> Parenthesized + end + end + + let binaryExpr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc) + | None -> + begin match expr with + | {Parsetree.pexp_attributes = _::_} as expr + when ParsetreeViewer.isBinaryExpression expr -> Parenthesized + | _ -> Nothing + end + + let modTypeFunctorReturn modType = match modType with + | {Parsetree.pmty_desc = Pmty_with _} -> true + | _ -> false + + (* Add parens for readability: + module type Functor = SetLike => Set with type t = A.t + This is actually: + module type Functor = (SetLike => Set) with type t = A.t + *) + let modTypeWithOperand modType = match modType with + | {Parsetree.pmty_desc = Pmty_functor _} -> true + | _ -> false + + let modExprFunctorConstraint modType = match modType with + | {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true + | _ -> false + + let bracedExpr expr = match expr.Parsetree.pexp_desc with + | Pexp_constraint ( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + ) -> false + | Pexp_constraint _ -> true + | _ -> false + + let includeModExpr modExpr = match modExpr.Parsetree.pmod_desc with + | Parsetree.Pmod_constraint _ -> true + | _ -> false +end + +module CommentTable = struct + type t = { + leading: (Location.t, Comment.t list) Hashtbl.t; + inside: (Location.t, Comment.t list) Hashtbl.t; + trailing: (Location.t, Comment.t list) Hashtbl.t; + } + + let make () = { + leading = Hashtbl.create 100; + inside = Hashtbl.create 100; + trailing = Hashtbl.create 100; + } + + let empty = make () + + let log t = + let open Location in + let leadingStuff = Hashtbl.fold (fun (k : Location.t) (v : Comment.t list) acc -> + let loc = Doc.concat [ + Doc.lbracket; + Doc.text (string_of_int k.loc_start.pos_lnum); + Doc.text ":"; + Doc.text (string_of_int (k.loc_start.pos_cnum - k.loc_start.pos_bol)); + Doc.text "-"; + Doc.text (string_of_int k.loc_end.pos_lnum); + Doc.text ":"; + Doc.text (string_of_int (k.loc_end.pos_cnum - k.loc_end.pos_bol)); + Doc.rbracket; + ] in + let doc = Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + loc; + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.join ~sep:Doc.comma (List.map (fun c -> Doc.text (Comment.txt c)) v) + ] + ); + Doc.line; + ] + ) in + doc::acc + ) t.leading [] + in + let trailingStuff = Hashtbl.fold (fun (k : Location.t) (v : Comment.t list) acc -> + let loc = Doc.concat [ + Doc.lbracket; + Doc.text (string_of_int k.loc_start.pos_lnum); + Doc.text ":"; + Doc.text (string_of_int (k.loc_start.pos_cnum - k.loc_start.pos_bol)); + Doc.text "-"; + Doc.text (string_of_int k.loc_end.pos_lnum); + Doc.text ":"; + Doc.text (string_of_int (k.loc_end.pos_cnum - k.loc_end.pos_bol)); + Doc.rbracket; + ] in + let doc = Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + loc; + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (List.map (fun c -> Doc.text (Comment.txt c)) v) + ] + ); + Doc.line; + ] + ) in + doc::acc + ) t.trailing [] + in + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.text "leading comments:"; + Doc.line; + Doc.indent (Doc.concat leadingStuff); + Doc.line; + Doc.line; + Doc.text "trailing comments:"; + Doc.indent (Doc.concat trailingStuff); + Doc.line; + Doc.line; + ] + ) |> Doc.toString ~width:80 |> print_endline + [@@live] + let attach tbl loc comments = + match comments with + | [] -> () + | comments -> Hashtbl.replace tbl loc comments + + let partitionByLoc comments loc = + let rec loop (leading, inside, trailing) comments = + let open Location in + match comments with + | comment::rest -> + let cmtLoc = Comment.loc comment in + if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum then + loop (comment::leading, inside, trailing) rest + else if cmtLoc.loc_start.pos_cnum >= loc.loc_end.pos_cnum then + loop (leading, inside, comment::trailing) rest + else + loop (leading, comment::inside, trailing) rest + | [] -> (List.rev leading, List.rev inside, List.rev trailing) + in + loop ([], [], []) comments + + let partitionLeadingTrailing comments loc = + let rec loop (leading, trailing) comments = + let open Location in + match comments with + | comment::rest -> + let cmtLoc = Comment.loc comment in + if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum then + loop (comment::leading, trailing) rest + else + loop (leading, comment::trailing) rest + | [] -> (List.rev leading, List.rev trailing) + in + loop ([], []) comments + + let partitionByOnSameLine loc comments = + let rec loop (onSameLine, onOtherLine) comments = + let open Location in + match comments with + | [] -> (List.rev onSameLine, List.rev onOtherLine) + | comment::rest -> + let cmtLoc = Comment.loc comment in + if cmtLoc.loc_start.pos_lnum == loc.loc_end.pos_lnum then + loop (comment::onSameLine, onOtherLine) rest + else + loop (onSameLine, comment::onOtherLine) rest + in + loop ([], []) comments + + let partitionAdjacentTrailing loc1 comments = + let open Location in + let open Lexing in + let rec loop ~prevEndPos afterLoc1 comments = + match comments with + | [] -> (List.rev afterLoc1, []) + | (comment::rest) as comments -> + let cmtPrevEndPos = Comment.prevTokEndPos comment in + if prevEndPos.Lexing.pos_cnum == cmtPrevEndPos.pos_cnum then + let commentEnd = (Comment.loc comment).loc_end in + loop ~prevEndPos:commentEnd (comment::afterLoc1) rest + else + (List.rev afterLoc1, comments) + in + loop ~prevEndPos:loc1.loc_end [] comments + + let rec collectListPatterns acc pattern = + let open Parsetree in + match pattern.ppat_desc with + | Ppat_construct( + {txt = Longident.Lident "::"}, + Some {ppat_desc=Ppat_tuple (pat::rest::[])} + ) -> + collectListPatterns (pat::acc) rest + | Ppat_construct ({txt = Longident.Lident "[]"}, None) -> + List.rev acc + | _ -> List.rev (pattern::acc) + + let rec collectListExprs acc expr = + let open Parsetree in + match expr.pexp_desc with + | Pexp_construct( + {txt = Longident.Lident "::"}, + Some {pexp_desc=Pexp_tuple (expr::rest::[])} + ) -> + collectListExprs (expr::acc) rest + | Pexp_construct ({txt = Longident.Lident "[]"}, _) -> + List.rev acc + | _ -> List.rev (expr::acc) + + (* TODO: use ParsetreeViewer *) + let arrowType ct = + let open Parsetree in + let rec process attrsBefore acc typ = match typ with + | {ptyp_desc = Ptyp_arrow (Nolabel as lbl, typ1, typ2); ptyp_attributes = []} -> + let arg = ([], lbl, typ1) in + process attrsBefore (arg::acc) typ2 + | {ptyp_desc = Ptyp_arrow (Nolabel as lbl, typ1, typ2); ptyp_attributes = [({txt ="bs"}, _) ] as attrs} -> + let arg = (attrs, lbl, typ1) in + process attrsBefore (arg::acc) typ2 + | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = _attrs} as returnType -> + let args = List.rev acc in + (attrsBefore, args, returnType) + | {ptyp_desc = Ptyp_arrow ((Labelled _ | Optional _) as lbl, typ1, typ2); ptyp_attributes = attrs} -> + let arg = (attrs, lbl, typ1) in + process attrsBefore (arg::acc) typ2 + | typ -> + (attrsBefore, List.rev acc, typ) + in + begin match ct with + | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = attrs} as typ -> + process attrs [] {typ with ptyp_attributes = []} + | typ -> process [] [] typ + end + + (* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? *) + let modExprApply modExpr = + let rec loop acc modExpr = match modExpr with + | {Parsetree.pmod_desc = Pmod_apply (next, arg)} -> + loop (arg::acc) next + | _ -> (modExpr::acc) + in + loop [] modExpr + + (* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? *) + let modExprFunctor modExpr = + let rec loop acc modExpr = match modExpr with + | {Parsetree.pmod_desc = Pmod_functor (lbl, modType, returnModExpr); pmod_attributes = attrs} -> + let param = (attrs, lbl, modType) in + loop (param::acc) returnModExpr + | returnModExpr -> + (List.rev acc, returnModExpr) + in + loop [] modExpr + + let functorType modtype = + let rec process acc modtype = match modtype with + | {Parsetree.pmty_desc = Pmty_functor (lbl, argType, returnType); pmty_attributes = attrs} -> + let arg = (attrs, lbl, argType) in + process (arg::acc) returnType + | modType -> + (List.rev acc, modType) + in + process [] modtype + + let funExpr expr = + let open Parsetree in + (* Turns (type t, type u, type z) into "type t u z" *) + let rec collectNewTypes acc returnExpr = + match returnExpr with + | {pexp_desc = Pexp_newtype (stringLoc, returnExpr); pexp_attributes = []} -> + collectNewTypes (stringLoc::acc) returnExpr + | returnExpr -> + let loc = match (acc, List.rev acc) with + | (_startLoc::_, endLoc::_) -> { endLoc.loc with loc_end = endLoc.loc.loc_end } + | _ -> Location.none + in + let txt = List.fold_right (fun curr acc -> acc ^ " " ^ curr.Location.txt) acc "type" in + (Location.mkloc txt loc, returnExpr) + in + (* For simplicity reason Pexp_newtype gets converted to a Nolabel parameter, + * otherwise this function would need to return a variant: + * | NormalParamater(...) + * | NewType(...) + * This complicates printing with an extra variant/boxing/allocation for a code-path + * that is not often used. Lets just keep it simple for now *) + let rec collect attrsBefore acc expr = match expr with + | {pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); pexp_attributes = []} -> + let parameter = ([], lbl, defaultExpr, pattern) in + collect attrsBefore (parameter::acc) returnExpr + | {pexp_desc = Pexp_newtype (stringLoc, rest); pexp_attributes = attrs} -> + let (var, returnExpr) = collectNewTypes [stringLoc] rest in + let parameter = ( + attrs, + Asttypes.Nolabel, + None, + Ast_helper.Pat.var ~loc:stringLoc.loc var + ) in + collect attrsBefore (parameter::acc) returnExpr + | {pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); pexp_attributes = [({txt = "bs"}, _)] as attrs} -> + let parameter = (attrs, lbl, defaultExpr, pattern) in + collect attrsBefore (parameter::acc) returnExpr + | { + pexp_desc = Pexp_fun ((Labelled _ | Optional _) as lbl, defaultExpr, pattern, returnExpr); + pexp_attributes = attrs + } -> + let parameter = (attrs, lbl, defaultExpr, pattern) in + collect attrsBefore (parameter::acc) returnExpr + | expr -> + (attrsBefore, List.rev acc, expr) + in + begin match expr with + | {pexp_desc = Pexp_fun (Nolabel, _defaultExpr, _pattern, _returnExpr); pexp_attributes = attrs} as expr -> + collect attrs [] {expr with pexp_attributes = []} + | expr -> collect [] [] expr + end + + let rec isBlockExpr expr = + let open Parsetree in + match expr.pexp_desc with + | Pexp_letmodule _ + | Pexp_letexception _ + | Pexp_let _ + | Pexp_open _ + | Pexp_sequence _ -> true + | Pexp_apply (callExpr, _) when isBlockExpr callExpr -> true + | Pexp_constraint (expr, _) when isBlockExpr expr -> true + | Pexp_field (expr, _) when isBlockExpr expr -> true + | Pexp_setfield (expr, _, _) when isBlockExpr expr -> true + | _ -> false + + let rec walkStructure s t comments = + match s with + | _ when comments = [] -> () + | [] -> attach t.inside Location.none comments + | s -> + walkList + ~getLoc:(fun n -> n.Parsetree.pstr_loc) + ~walkNode:walkStructureItem + s + t + comments + + and walkStructureItem si t comments = + match si.Parsetree.pstr_desc with + | _ when comments = [] -> () + | Pstr_primitive valueDescription -> + walkValueDescription valueDescription t comments + | Pstr_open openDescription -> + walkOpenDescription openDescription t comments + | Pstr_value (_, valueBindings) -> + walkValueBindings valueBindings t comments + | Pstr_type (_, typeDeclarations) -> + walkTypeDeclarations typeDeclarations t comments + | Pstr_eval (expr, _) -> + walkExpr expr t comments + | Pstr_module moduleBinding -> + walkModuleBinding moduleBinding t comments + | Pstr_recmodule moduleBindings -> + walkList + ~getLoc:(fun mb -> mb.Parsetree.pmb_loc) + ~walkNode:walkModuleBinding + moduleBindings + t + comments + | Pstr_modtype modTypDecl -> + walkModuleTypeDeclaration modTypDecl t comments + | Pstr_attribute attribute -> + walkAttribute attribute t comments + | Pstr_extension (extension, _) -> + walkExtension extension t comments + | Pstr_include includeDeclaration -> + walkIncludeDeclaration includeDeclaration t comments + | Pstr_exception extensionConstructor -> + walkExtConstr extensionConstructor t comments + | Pstr_typext typeExtension -> + walkTypeExtension typeExtension t comments + | Pstr_class_type _ | Pstr_class _ -> () + + and walkValueDescription vd t comments = + let (leading, trailing) = + partitionLeadingTrailing comments vd.pval_name.loc in + attach t.leading vd.pval_name.loc leading; + let (afterName, rest) = + partitionAdjacentTrailing vd.pval_name.loc trailing in + attach t.trailing vd.pval_name.loc afterName; + let (before, inside, after) = + partitionByLoc rest vd.pval_type.ptyp_loc + in + attach t.leading vd.pval_type.ptyp_loc before; + walkTypExpr vd.pval_type t inside; + attach t.trailing vd.pval_type.ptyp_loc after + + and walkTypeExtension te t comments = + let (leading, trailing) = + partitionLeadingTrailing comments te.ptyext_path.loc in + attach t.leading te.ptyext_path.loc leading; + let (afterPath, rest) = + partitionAdjacentTrailing te.ptyext_path.loc trailing in + attach t.trailing te.ptyext_path.loc afterPath; + + (* type params *) + let rest = match te.ptyext_params with + | [] -> rest + | typeParams -> + visitListButContinueWithRemainingComments + ~getLoc:(fun (typexpr, _variance) -> typexpr.Parsetree.ptyp_loc) + ~walkNode:walkTypeParam + ~newlineDelimited:false + typeParams + t + rest + in + walkList + ~getLoc:(fun n -> n.Parsetree.pext_loc) + ~walkNode:walkExtConstr + te.ptyext_constructors + t + rest + + and walkIncludeDeclaration inclDecl t comments = + let (before, inside, after) = + partitionByLoc comments inclDecl.pincl_mod.pmod_loc in + attach t.leading inclDecl.pincl_mod.pmod_loc before; + walkModExpr inclDecl.pincl_mod t inside; + attach t.trailing inclDecl.pincl_mod.pmod_loc after + + and walkModuleTypeDeclaration mtd t comments = + let (leading, trailing) = + partitionLeadingTrailing comments mtd.pmtd_name.loc in + attach t.leading mtd.pmtd_name.loc leading; + begin match mtd.pmtd_type with + | None -> + attach t.trailing mtd.pmtd_name.loc trailing + | Some modType -> + let (afterName, rest) = partitionAdjacentTrailing mtd.pmtd_name.loc trailing in + attach t.trailing mtd.pmtd_name.loc afterName; + let (before, inside, after) = partitionByLoc rest modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + attach t.trailing modType.pmty_loc after + end + + and walkModuleBinding mb t comments = + let (leading, trailing) = partitionLeadingTrailing comments mb.pmb_name.loc in + attach t.leading mb.pmb_name.loc leading; + let (afterName, rest) = partitionAdjacentTrailing mb.pmb_name.loc trailing in + attach t.trailing mb.pmb_name.loc afterName; + let (leading, inside, trailing) = partitionByLoc rest mb.pmb_expr.pmod_loc in + begin match mb.pmb_expr.pmod_desc with + | Pmod_constraint _ -> + walkModExpr mb.pmb_expr t (List.concat [leading; inside]); + | _ -> + attach t.leading mb.pmb_expr.pmod_loc leading; + walkModExpr mb.pmb_expr t inside; + end; + attach t.trailing mb.pmb_expr.pmod_loc trailing + + and walkSignature signature t comments = + match signature with + | _ when comments = [] -> () + | [] -> attach t.inside Location.none comments + | _s -> + walkList + ~getLoc:(fun n -> n.Parsetree.psig_loc) + ~walkNode:walkSignatureItem + signature + t + comments + + and walkSignatureItem si t comments = + match si.psig_desc with + | _ when comments = [] -> () + | Psig_value valueDescription -> + walkValueDescription valueDescription t comments + | Psig_type (_, typeDeclarations) -> + walkTypeDeclarations typeDeclarations t comments + | Psig_typext typeExtension -> + walkTypeExtension typeExtension t comments + | Psig_exception extensionConstructor -> + walkExtConstr extensionConstructor t comments + | Psig_module moduleDeclaration -> + walkModuleDeclaration moduleDeclaration t comments + | Psig_recmodule moduleDeclarations -> + walkList + ~getLoc:(fun n -> n.Parsetree.pmd_loc) + ~walkNode:walkModuleDeclaration + moduleDeclarations + t + comments + | Psig_modtype moduleTypeDeclaration -> + walkModuleTypeDeclaration moduleTypeDeclaration t comments + | Psig_open openDescription -> + walkOpenDescription openDescription t comments + | Psig_include includeDescription -> + walkIncludeDescription includeDescription t comments + | Psig_attribute attribute -> + walkAttribute attribute t comments + | Psig_extension (extension, _) -> + walkExtension extension t comments + | Psig_class _ | Psig_class_type _ -> () + + and walkIncludeDescription id t comments = + let (before, inside, after) = + partitionByLoc comments id.pincl_mod.pmty_loc in + attach t.leading id.pincl_mod.pmty_loc before; + walkModType id.pincl_mod t inside; + attach t.trailing id.pincl_mod.pmty_loc after + + and walkModuleDeclaration md t comments = + let (leading, trailing) = partitionLeadingTrailing comments md.pmd_name.loc in + attach t.leading md.pmd_name.loc leading; + let (afterName, rest) = partitionAdjacentTrailing md.pmd_name.loc trailing in + attach t.trailing md.pmd_name.loc afterName; + let (leading, inside, trailing) = partitionByLoc rest md.pmd_type.pmty_loc in + attach t.leading md.pmd_type.pmty_loc leading; + walkModType md.pmd_type t inside; + attach t.trailing md.pmd_type.pmty_loc trailing + + and walkList: + 'node. + ?prevLoc:Location.t -> + getLoc:('node -> Location.t) -> + walkNode:('node -> t -> Comment.t list -> unit) -> + 'node list -> t -> Comment.t list -> unit + = fun ?prevLoc ~getLoc ~walkNode l t comments -> + let open Location in + match l with + | _ when comments = [] -> () + | [] -> + begin match prevLoc with + | Some loc -> + attach t.trailing loc comments + | None -> () + end + | node::rest -> + let currLoc = getLoc node in + let (leading, inside, trailing) = partitionByLoc comments currLoc in + begin match prevLoc with + | None -> (* first node, all leading comments attach here *) + attach t.leading currLoc leading + | Some prevLoc -> + (* Same line *) + if prevLoc.loc_end.pos_lnum == currLoc.loc_start.pos_lnum then + let (afterPrev, beforeCurr) = partitionAdjacentTrailing prevLoc leading in + let () = attach t.trailing prevLoc afterPrev in + attach t.leading currLoc beforeCurr + else + let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine prevLoc leading in + let () = attach t.trailing prevLoc onSameLineAsPrev in + let (leading, _inside, _trailing) = partitionByLoc afterPrev currLoc in + attach t.leading currLoc leading + end; + walkNode node t inside; + walkList ~prevLoc:currLoc ~getLoc ~walkNode rest t trailing + + (* The parsetree doesn't always contain location info about the opening or + * closing token of a "list-of-things". This routine visits the whole list, + * but returns any remaining comments that likely fall after the whole list. *) + and visitListButContinueWithRemainingComments: + 'node. + ?prevLoc:Location.t -> + newlineDelimited:bool -> + getLoc:('node -> Location.t) -> + walkNode:('node -> t -> Comment.t list -> unit) -> + 'node list -> t -> Comment.t list -> Comment.t list + = fun ?prevLoc ~newlineDelimited ~getLoc ~walkNode l t comments -> + let open Location in + match l with + | _ when comments = [] -> [] + | [] -> + begin match prevLoc with + | Some loc -> + let (afterPrev, rest) = + if newlineDelimited then + partitionByOnSameLine loc comments + else + partitionAdjacentTrailing loc comments + in + attach t.trailing loc afterPrev; + rest + | None -> comments + end + | node::rest -> + let currLoc = getLoc node in + let (leading, inside, trailing) = partitionByLoc comments currLoc in + let () = match prevLoc with + | None -> (* first node, all leading comments attach here *) + attach t.leading currLoc leading; + () + | Some prevLoc -> + (* Same line *) + if prevLoc.loc_end.pos_lnum == currLoc.loc_start.pos_lnum then + let (afterPrev, beforeCurr) = partitionAdjacentTrailing prevLoc leading in + let () = attach t.trailing prevLoc afterPrev in + let () = attach t.leading currLoc beforeCurr in + () + else + let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine prevLoc leading in + let () = attach t.trailing prevLoc onSameLineAsPrev in + let (leading, _inside, _trailing) = partitionByLoc afterPrev currLoc in + let () = attach t.leading currLoc leading in + () + in + walkNode node t inside; + visitListButContinueWithRemainingComments + ~prevLoc:currLoc ~getLoc ~walkNode ~newlineDelimited + rest t trailing + + and walkValueBindings vbs t comments = + walkList + ~getLoc:(fun n -> n.Parsetree.pvb_loc) + ~walkNode:walkValueBinding + vbs + t + comments + + and walkOpenDescription openDescription t comments = + let loc = openDescription.popen_lid.loc in + let (leading, trailing) = partitionLeadingTrailing comments loc in + attach t.leading loc leading; + attach t.trailing loc trailing; + + and walkTypeDeclarations typeDeclarations t comments = + walkList + ~getLoc:(fun n -> n.Parsetree.ptype_loc) + ~walkNode:walkTypeDeclaration + typeDeclarations + t + comments + + and walkTypeParam (typexpr, _variance) t comments = + walkTypExpr typexpr t comments + + and walkTypeDeclaration td t comments = + let (beforeName, rest) = + partitionLeadingTrailing comments td.ptype_name.loc in + attach t.leading td.ptype_name.loc beforeName; + + let (afterName, rest) = + partitionAdjacentTrailing td.ptype_name.loc rest in + attach t.trailing td.ptype_name.loc afterName; + + (* type params *) + let rest = match td.ptype_params with + | [] -> rest + | typeParams -> + visitListButContinueWithRemainingComments + ~getLoc:(fun (typexpr, _variance) -> typexpr.Parsetree.ptyp_loc) + ~walkNode:walkTypeParam + ~newlineDelimited:false + typeParams + t + rest + in + + (* manifest: = typexpr *) + let rest = match td.ptype_manifest with + | Some typexpr -> + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkTypExpr typexpr t insideTyp; + let (afterTyp, rest) = + partitionAdjacentTrailing typexpr.Parsetree.ptyp_loc afterTyp in + attach t.trailing typexpr.ptyp_loc afterTyp; + rest + | None -> rest + in + + let rest = match td.ptype_kind with + | Ptype_abstract | Ptype_open -> rest + | Ptype_record labelDeclarations -> + let () = walkList + ~getLoc:(fun ld -> ld.Parsetree.pld_loc) + ~walkNode:walkLabelDeclaration + labelDeclarations + t + rest + in + [] + | Ptype_variant constructorDeclarations -> + walkConstructorDeclarations constructorDeclarations t rest + in + attach t.trailing td.ptype_loc rest + + and walkLabelDeclarations lds t comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun ld -> ld.Parsetree.pld_loc) + ~walkNode:walkLabelDeclaration + ~newlineDelimited:false + lds + t + comments + + and walkLabelDeclaration ld t comments = + let (beforeName, rest) = + partitionLeadingTrailing comments ld.pld_name.loc in + attach t.leading ld.pld_name.loc beforeName; + let (afterName, rest) = partitionAdjacentTrailing ld.pld_name.loc rest in + attach t.trailing ld.pld_name.loc afterName; + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc rest ld.pld_type.ptyp_loc in + attach t.leading ld.pld_type.ptyp_loc beforeTyp; + walkTypExpr ld.pld_type t insideTyp; + attach t.trailing ld.pld_type.ptyp_loc afterTyp + + and walkConstructorDeclarations cds t comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun cd -> cd.Parsetree.pcd_loc) + ~walkNode:walkConstructorDeclaration + ~newlineDelimited:false + cds + t + comments + + and walkConstructorDeclaration cd t comments = + let (beforeName, rest) = + partitionLeadingTrailing comments cd.pcd_name.loc in + attach t.leading cd.pcd_name.loc beforeName; + let (afterName, rest) = + partitionAdjacentTrailing cd.pcd_name.loc rest in + attach t.trailing cd.pcd_name.loc afterName; + let rest = walkConstructorArguments cd.pcd_args t rest in + + let rest = match cd.pcd_res with + | Some typexpr -> + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkTypExpr typexpr t insideTyp; + let (afterTyp, rest) = + partitionAdjacentTrailing typexpr.Parsetree.ptyp_loc afterTyp in + attach t.trailing typexpr.ptyp_loc afterTyp; + rest + | None -> rest + in + attach t.trailing cd.pcd_loc rest + + and walkConstructorArguments args t comments = + match args with + | Pcstr_tuple typexprs -> + visitListButContinueWithRemainingComments + ~getLoc:(fun n -> n.Parsetree.ptyp_loc) + ~walkNode:walkTypExpr + ~newlineDelimited:false + typexprs + t + comments + | Pcstr_record labelDeclarations -> + walkLabelDeclarations labelDeclarations t comments + + and walkValueBinding vb t comments = + let open Location in + + let vb = + let open Parsetree in + match (vb.pvb_pat, vb.pvb_expr) with + | {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly ([], t)})}, + {pexp_desc = Pexp_constraint (expr, _typ)} -> + {vb with + pvb_pat = Ast_helper.Pat.constraint_ + ~loc:{pat.ppat_loc with loc_end = t.Parsetree.ptyp_loc.loc_end} pat t; + pvb_expr = expr; + } + | {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly (_::_, t)})}, + {pexp_desc = Pexp_fun _} -> + {vb with + pvb_pat = {vb.pvb_pat with + ppat_loc = {pat.ppat_loc with loc_end = t.ptyp_loc.loc_end}}} + | _ -> vb + in + let patternLoc = vb.Parsetree.pvb_pat.ppat_loc in + let exprLoc = vb.Parsetree.pvb_expr.pexp_loc in + + let (leading, inside, trailing) = + partitionByLoc comments patternLoc in + + (* everything before start of pattern can only be leading on the pattern: + * let |* before *| a = 1 *) + attach t.leading patternLoc leading; + walkPattern vb.Parsetree.pvb_pat t inside; + (* let pattern = expr -> pattern and expr on the same line *) + (* if patternLoc.loc_end.pos_lnum == exprLoc.loc_start.pos_lnum then ( *) + let (afterPat, surroundingExpr) = + partitionAdjacentTrailing patternLoc trailing + in + attach t.trailing patternLoc afterPat; + let (beforeExpr, insideExpr, afterExpr) = + partitionByLoc surroundingExpr exprLoc in + if isBlockExpr vb.pvb_expr then ( + walkExpr vb.pvb_expr t (List.concat [beforeExpr; insideExpr; afterExpr]) + ) else ( + attach t.leading exprLoc beforeExpr; + walkExpr vb.Parsetree.pvb_expr t insideExpr; + attach t.trailing exprLoc afterExpr + ) + + and walkExpr expr t comments = + let open Location in + match expr.Parsetree.pexp_desc with + | _ when comments = [] -> () + | Pexp_constant _ -> + let (leading, trailing) = + partitionLeadingTrailing comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + attach t.trailing expr.pexp_loc trailing; + | Pexp_ident longident -> + let (leading, trailing) = + partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + attach t.trailing longident.loc trailing; + | Pexp_let (_recFlag, valueBindings, expr2) -> + let comments = visitListButContinueWithRemainingComments + ~getLoc:(fun n -> + if n.Parsetree.pvb_pat.ppat_loc.loc_ghost then + n.pvb_expr.pexp_loc + else + n.Parsetree.pvb_loc + ) + ~walkNode:walkValueBinding + ~newlineDelimited:true + valueBindings + t + comments + in + if isBlockExpr expr2 then ( + walkExpr expr2 t comments; + ) else ( + let (leading, inside, trailing) = partitionByLoc comments expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpr expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + ) + | Pexp_sequence (expr1, expr2) -> + let (leading, inside, trailing) = partitionByLoc comments expr1.pexp_loc in + let comments = if isBlockExpr expr1 then ( + let (afterExpr, comments) = partitionByOnSameLine expr1.pexp_loc trailing in + walkExpr expr1 t (List.concat [leading; inside; afterExpr]); + comments + ) else ( + attach t.leading expr1.pexp_loc leading; + walkExpr expr1 t inside; + let (afterExpr, comments) = partitionByOnSameLine expr1.pexp_loc trailing in + attach t.trailing expr1.pexp_loc afterExpr; + comments + ) in + if isBlockExpr expr2 then ( + walkExpr expr2 t comments + ) else ( + let (leading, inside, trailing) = partitionByLoc comments expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpr expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + ) + | Pexp_open (_override, longident, expr2) -> + let (leading, comments) = + partitionLeadingTrailing comments expr.pexp_loc in + attach + t.leading + {expr.pexp_loc with loc_end = longident.loc.loc_end} + leading; + let (leading, trailing) = + partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + let (afterLongident, rest) = + partitionByOnSameLine longident.loc trailing in + attach t.trailing longident.loc afterLongident; + if isBlockExpr expr2 then ( + walkExpr expr2 t rest + ) else ( + let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpr expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + ) + | Pexp_extension ( + {txt = "bs.obj"}, + PStr [{ + pstr_desc = Pstr_eval({pexp_desc = Pexp_record (rows, _)}, []) + }] + ) -> + walkList + ~getLoc:(fun ( + (longident, expr): (Longident.t Asttypes.loc * Parsetree.expression) + ) -> { + longident.loc with loc_end = expr.pexp_loc.loc_end + }) + ~walkNode:walkExprRecordRow + rows + t + comments + | Pexp_extension extension -> + walkExtension extension t comments + | Pexp_letexception (extensionConstructor, expr2) -> + let (leading, comments) = + partitionLeadingTrailing comments expr.pexp_loc in + attach + t.leading + {expr.pexp_loc with loc_end = extensionConstructor.pext_loc.loc_end} + leading; + let (leading, inside, trailing) = + partitionByLoc comments extensionConstructor.pext_loc in + attach t.leading extensionConstructor.pext_loc leading; + walkExtConstr extensionConstructor t inside; + let (afterExtConstr, rest) = + partitionByOnSameLine extensionConstructor.pext_loc trailing in + attach t.trailing extensionConstructor.pext_loc afterExtConstr; + if isBlockExpr expr2 then ( + walkExpr expr2 t rest + ) else ( + let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpr expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + ) + | Pexp_letmodule (stringLoc, modExpr, expr2) -> + let (leading, comments) = + partitionLeadingTrailing comments expr.pexp_loc in + attach t.leading {expr.pexp_loc with loc_end = modExpr.pmod_loc.loc_end} leading; + let (leading, trailing) = partitionLeadingTrailing comments stringLoc.loc in + attach t.leading stringLoc.loc leading; + let (afterString, rest) = + partitionAdjacentTrailing stringLoc.loc trailing in + attach t.trailing stringLoc.loc afterString; + let (beforeModExpr, insideModExpr, afterModExpr) = + partitionByLoc rest modExpr.pmod_loc in + attach t.leading modExpr.pmod_loc beforeModExpr; + walkModExpr modExpr t insideModExpr; + let (afterModExpr, rest) = + partitionByOnSameLine modExpr.pmod_loc afterModExpr in + attach t.trailing modExpr.pmod_loc afterModExpr; + if isBlockExpr expr2 then ( + walkExpr expr2 t rest; + ) else ( + let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpr expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + ) + | Pexp_assert expr + | Pexp_lazy expr -> + if isBlockExpr expr then ( + walkExpr expr t comments + ) else ( + let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpr expr t inside; + attach t.trailing expr.pexp_loc trailing + ) + | Pexp_coerce (expr, optTypexpr, typexpr) -> + let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpr expr t inside; + let (afterExpr, rest) = + partitionAdjacentTrailing expr.pexp_loc trailing in + attach t.trailing expr.pexp_loc afterExpr; + let rest = match optTypexpr with + | Some typexpr -> + let (leading, inside, trailing) = partitionByLoc comments typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc leading; + walkTypExpr typexpr t inside; + let (afterTyp, rest) = + partitionAdjacentTrailing typexpr.ptyp_loc trailing in + attach t.trailing typexpr.ptyp_loc afterTyp; + rest + | None -> rest + in + let (leading, inside, trailing) = partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc leading; + walkTypExpr typexpr t inside; + attach t.trailing typexpr.ptyp_loc trailing + | Pexp_constraint (expr, typexpr) -> + let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpr expr t inside; + let (afterExpr, rest) = + partitionAdjacentTrailing expr.pexp_loc trailing in + attach t.trailing expr.pexp_loc afterExpr; + let (leading, inside, trailing) = partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc leading; + walkTypExpr typexpr t inside; + attach t.trailing typexpr.ptyp_loc trailing + | Pexp_tuple [] + | Pexp_array [] + | Pexp_construct({txt = Longident.Lident "[]"}, _) -> + attach t.inside expr.pexp_loc comments + | Pexp_construct({txt = Longident.Lident "::"}, _) -> + walkList + ~getLoc:(fun n -> n.Parsetree.pexp_loc) + ~walkNode:walkExpr + (collectListExprs [] expr) + t + comments + | Pexp_construct (longident, args) -> + let (leading, trailing) = + partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + begin match args with + | Some expr -> + let (afterLongident, rest) = + partitionAdjacentTrailing longident.loc trailing in + attach t.trailing longident.loc afterLongident; + walkExpr expr t rest + | None -> + attach t.trailing longident.loc trailing + end + | Pexp_variant (_label, None) -> + () + | Pexp_variant (_label, Some expr) -> + walkExpr expr t comments + | Pexp_array exprs | Pexp_tuple exprs -> + walkList + ~getLoc:(fun n -> n.Parsetree.pexp_loc) + ~walkNode:walkExpr + exprs + t + comments + | Pexp_record (rows, spreadExpr) -> + let comments = match spreadExpr with + | None -> comments + | Some expr -> + let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpr expr t inside; + let (afterExpr, rest) = partitionAdjacentTrailing expr.pexp_loc trailing in + attach t.trailing expr.pexp_loc afterExpr; + rest + in + walkList + ~getLoc:(fun ( + (longident, expr): (Longident.t Asttypes.loc * Parsetree.expression) + ) -> { + longident.loc with loc_end = expr.pexp_loc.loc_end + }) + ~walkNode:walkExprRecordRow + rows + t + comments + | Pexp_field (expr, longident) -> + let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in + let trailing = if isBlockExpr expr then ( + let (afterExpr, rest) = + partitionAdjacentTrailing expr.pexp_loc trailing in + walkExpr expr t (List.concat [leading; inside; afterExpr]); + rest + ) else ( + attach t.leading expr.pexp_loc leading; + walkExpr expr t inside; + trailing + ) in + let (afterExpr, rest) = partitionAdjacentTrailing expr.pexp_loc trailing in + attach t.trailing expr.pexp_loc afterExpr; + let (leading, trailing) = partitionLeadingTrailing rest longident.loc in + attach t.leading longident.loc leading; + attach t.trailing longident.loc trailing + | Pexp_setfield (expr1, longident, expr2) -> + let (leading, inside, trailing) = partitionByLoc comments expr1.pexp_loc in + let rest = if isBlockExpr expr1 then ( + let (afterExpr, rest) = + partitionAdjacentTrailing expr1.pexp_loc trailing in + walkExpr expr1 t (List.concat [leading; inside; afterExpr]); + rest + ) else ( + let (afterExpr, rest) = + partitionAdjacentTrailing expr1.pexp_loc trailing in + attach t.leading expr1.pexp_loc leading; + walkExpr expr1 t inside; + attach t.trailing expr1.pexp_loc afterExpr; + rest + ) in + let (beforeLongident, afterLongident) = partitionLeadingTrailing rest longident.loc in + attach t.leading longident.loc beforeLongident; + let (afterLongident, rest) = partitionAdjacentTrailing longident.loc afterLongident in + attach t.trailing longident.loc afterLongident; + if isBlockExpr expr2 then + walkExpr expr2 t rest + else ( + let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpr expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + ) + | Pexp_ifthenelse (ifExpr, thenExpr, elseExpr) -> + let (leading, inside, trailing) = partitionByLoc comments ifExpr.pexp_loc in + let comments = if isBlockExpr ifExpr then ( + let (afterExpr, comments) = partitionAdjacentTrailing ifExpr.pexp_loc trailing in + walkExpr ifExpr t (List.concat [leading; inside; afterExpr]); + comments + ) else ( + attach t.leading ifExpr.pexp_loc leading; + walkExpr ifExpr t inside; + let (afterExpr, comments) = partitionAdjacentTrailing ifExpr.pexp_loc trailing in + attach t.trailing ifExpr.pexp_loc afterExpr; + comments + ) in + let (leading, inside, trailing) = partitionByLoc comments thenExpr.pexp_loc in + let comments = if isBlockExpr thenExpr then ( + let (afterExpr, trailing) = partitionAdjacentTrailing thenExpr.pexp_loc trailing in + walkExpr thenExpr t (List.concat [leading; inside; afterExpr]); + trailing + ) else ( + attach t.leading thenExpr.pexp_loc leading; + walkExpr thenExpr t inside; + let (afterExpr, comments) = partitionAdjacentTrailing thenExpr.pexp_loc trailing in + attach t.trailing thenExpr.pexp_loc afterExpr; + comments + ) in + begin match elseExpr with + | None -> () + | Some expr -> + if isBlockExpr expr then + walkExpr expr t comments + else ( + let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpr expr t inside; + attach t.trailing expr.pexp_loc trailing + ) + end + | Pexp_while (expr1, expr2) -> + let (leading, inside, trailing) = partitionByLoc comments expr1.pexp_loc in + let rest = if isBlockExpr expr1 then + let (afterExpr, rest) = partitionAdjacentTrailing expr1.pexp_loc trailing in + walkExpr expr1 t (List.concat [leading; inside; afterExpr]); + rest + else ( + attach t.leading expr1.pexp_loc leading; + walkExpr expr1 t inside; + let (afterExpr, rest) = partitionAdjacentTrailing expr1.pexp_loc trailing in + attach t.trailing expr1.pexp_loc afterExpr; + rest + ) in + if isBlockExpr expr2 then ( + walkExpr expr2 t rest + ) else ( + let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpr expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + ) + | Pexp_for (pat, expr1, expr2, _, expr3) -> + let (leading, inside, trailing) = partitionByLoc comments pat.ppat_loc in + attach t.leading pat.ppat_loc leading; + walkPattern pat t inside; + let (afterPat, rest) = partitionAdjacentTrailing pat.ppat_loc trailing in + attach t.trailing pat.ppat_loc afterPat; + let (leading, inside, trailing) = partitionByLoc rest expr1.pexp_loc in + attach t.leading expr1.pexp_loc leading; + walkExpr expr1 t inside; + let (afterExpr, rest) = partitionAdjacentTrailing expr1.pexp_loc trailing in + attach t.trailing expr1.pexp_loc afterExpr; + let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpr expr2 t inside; + let (afterExpr, rest) = partitionAdjacentTrailing expr2.pexp_loc trailing in + attach t.trailing expr2.pexp_loc afterExpr; + if isBlockExpr expr3 then ( + walkExpr expr3 t rest + ) else ( + let (leading, inside, trailing) = partitionByLoc rest expr3.pexp_loc in + attach t.leading expr3.pexp_loc leading; + walkExpr expr3 t inside; + attach t.trailing expr3.pexp_loc trailing + ) + | Pexp_pack modExpr -> + let (before, inside, after) = partitionByLoc comments modExpr.pmod_loc in + attach t.leading modExpr.pmod_loc before; + walkModExpr modExpr t inside; + attach t.trailing modExpr.pmod_loc after + | Pexp_match (expr, cases) | Pexp_try (expr, cases) -> + let (before, inside, after) = partitionByLoc comments expr.pexp_loc in + let after = if isBlockExpr expr then ( + let (afterExpr, rest) = + partitionAdjacentTrailing expr.pexp_loc after in + walkExpr expr t (List.concat [before; inside; afterExpr]); + rest + ) else ( + attach t.leading expr.pexp_loc before; + walkExpr expr t inside; + after + ) in + let (afterExpr, rest) = partitionAdjacentTrailing expr.pexp_loc after in + attach t.trailing expr.pexp_loc afterExpr; + walkList + ~getLoc:(fun n -> {n.Parsetree.pc_lhs.ppat_loc with + loc_end = n.pc_rhs.pexp_loc.loc_end}) + ~walkNode:walkCase + cases + t + rest + (* unary expression: todo use parsetreeviewer *) + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident + ("~+" | "~+." | "~-" | "~-." | "not" | "!") + }}, + [Nolabel, argExpr] + ) -> + let (before, inside, after) = partitionByLoc comments argExpr.pexp_loc in + attach t.leading argExpr.pexp_loc before; + walkExpr argExpr t inside; + attach t.trailing argExpr.pexp_loc after + (* binary expression *) + | Pexp_apply( + {pexp_desc = Pexp_ident {txt = Longident.Lident + (":=" | "||" | "&&" | "=" | "==" | "<" | ">" + | "!=" | "!==" | "<=" | ">=" | "|>" | "+" | "+." + | "-" | "-." | "++" | "^" | "*" | "*." | "/" + | "/." | "**" | "|." | "<>") }}, + [(Nolabel, operand1); (Nolabel, operand2)] + ) -> + let (before, inside, after) = partitionByLoc comments operand1.pexp_loc in + attach t.leading operand1.pexp_loc before; + walkExpr operand1 t inside; + let (afterOperand1, rest) = + partitionAdjacentTrailing operand1.pexp_loc after in + attach t.trailing operand1.pexp_loc afterOperand1; + let (before, inside, after) = partitionByLoc rest operand2.pexp_loc in + attach t.leading operand2.pexp_loc before; + walkExpr operand2 t inside; (* (List.concat [inside; after]); *) + attach t.trailing operand2.pexp_loc after; + | Pexp_apply (callExpr, arguments) -> + let (before, inside, after) = partitionByLoc comments callExpr.pexp_loc in + let after = if isBlockExpr callExpr then ( + let (afterExpr, rest) = + partitionAdjacentTrailing callExpr.pexp_loc after in + walkExpr callExpr t (List.concat [before; inside; afterExpr]); + rest + ) else ( + attach t.leading callExpr.pexp_loc before; + walkExpr callExpr t inside; + after + ) in + let (afterExpr, rest) = partitionAdjacentTrailing callExpr.pexp_loc after in + attach t.trailing callExpr.pexp_loc afterExpr; + walkList + ~getLoc:(fun (_argLabel, expr) -> + let loc = match expr.Parsetree.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_attrs -> + {loc with loc_end = expr.pexp_loc.loc_end} + | _ -> + expr.pexp_loc + in + loc) + ~walkNode:walkExprArgument + arguments + t + rest + | Pexp_fun (_, _, _, _) | Pexp_newtype _ -> + let (_, parameters, returnExpr) = funExpr expr in + let comments = visitListButContinueWithRemainingComments + ~newlineDelimited:false + ~walkNode:walkExprPararameter + ~getLoc:(fun (_attrs, _argLbl, exprOpt, pattern) -> + let open Parsetree in + let startPos = match pattern.ppat_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_attrs -> + loc.loc_start + | _ -> + pattern.ppat_loc.loc_start + in + match exprOpt with + | None -> {pattern.ppat_loc with loc_start = startPos} + | Some expr -> { + pattern.ppat_loc with + loc_start = startPos; + loc_end = expr.pexp_loc.loc_end + } + ) + parameters + t + comments + in + begin match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) + when expr.pexp_loc.loc_start.pos_cnum >= typ.ptyp_loc.loc_end.pos_cnum + -> + let (leading, inside, trailing) = partitionByLoc comments typ.ptyp_loc in + attach t.leading typ.ptyp_loc leading; + walkTypExpr typ t inside; + let (afterTyp, comments) = + partitionAdjacentTrailing typ.ptyp_loc trailing in + attach t.trailing typ.ptyp_loc afterTyp; + if isBlockExpr expr then + walkExpr expr t comments + else ( + let (leading, inside, trailing) = + partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpr expr t inside; + attach t.trailing expr.pexp_loc trailing + ) + | _ -> + if isBlockExpr returnExpr then + walkExpr returnExpr t comments + else ( + let (leading, inside, trailing) = + partitionByLoc comments returnExpr.pexp_loc in + attach t.leading returnExpr.pexp_loc leading; + walkExpr returnExpr t inside; + attach t.trailing returnExpr.pexp_loc trailing + ) + end + | _ -> () + + and walkExprPararameter (_attrs, _argLbl, exprOpt, pattern) t comments = + let (leading, inside, trailing) = partitionByLoc comments pattern.ppat_loc in + attach t.leading pattern.ppat_loc leading; + walkPattern pattern t inside; + begin match exprOpt with + | Some expr -> + let (_afterPat, rest) = + partitionAdjacentTrailing pattern.ppat_loc trailing in + attach t.trailing pattern.ppat_loc trailing; + if isBlockExpr expr then + walkExpr expr t rest + else ( + let (leading, inside, trailing) = partitionByLoc rest expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpr expr t inside; + attach t.trailing expr.pexp_loc trailing + ) + | None -> + attach t.trailing pattern.ppat_loc trailing + end + + and walkExprArgument (_argLabel, expr) t comments = + match expr.Parsetree.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_attrs -> + let (leading, trailing) = partitionLeadingTrailing comments loc in + attach t.leading loc leading; + let (afterLabel, rest) = partitionAdjacentTrailing loc trailing in + attach t.trailing loc afterLabel; + let (before, inside, after) = partitionByLoc rest expr.pexp_loc in + attach t.leading expr.pexp_loc before; + walkExpr expr t inside; + attach t.trailing expr.pexp_loc after + | _ -> + let (before, inside, after) = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc before; + walkExpr expr t inside; + attach t.trailing expr.pexp_loc after + + and walkCase case t comments = + let (before, inside, after) = partitionByLoc comments case.pc_lhs.ppat_loc in + (* cases don't have a location on their own, leading comments should go + * after the bar on the pattern *) + walkPattern case.pc_lhs t (List.concat [before; inside]); + let (afterPat, rest) = partitionAdjacentTrailing case.pc_lhs.ppat_loc after in + attach t.trailing case.pc_lhs.ppat_loc afterPat; + let comments = match case.pc_guard with + | Some expr -> + let (before, inside, after) = partitionByLoc rest expr.pexp_loc in + let (afterExpr, rest) = partitionAdjacentTrailing expr.pexp_loc after in + if isBlockExpr expr then ( + walkExpr expr t (List.concat [before; inside; afterExpr]) + ) else ( + attach t.leading expr.pexp_loc before; + walkExpr expr t inside; + attach t.trailing expr.pexp_loc afterExpr; + ); + rest + | None -> rest + in + if isBlockExpr case.pc_rhs then ( + walkExpr case.pc_rhs t comments + ) else ( + let (before, inside, after) = partitionByLoc comments case.pc_rhs.pexp_loc in + attach t.leading case.pc_rhs.pexp_loc before; + walkExpr case.pc_rhs t inside; + attach t.trailing case.pc_rhs.pexp_loc after + ) + + and walkExprRecordRow (longident, expr) t comments = + let (beforeLongident, afterLongident) = + partitionLeadingTrailing comments longident.loc + in + attach t.leading longident.loc beforeLongident; + let (afterLongident, rest) = + partitionAdjacentTrailing longident.loc afterLongident in + attach t.trailing longident.loc afterLongident; + let (leading, inside, trailing) = partitionByLoc rest expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpr expr t inside; + attach t.trailing expr.pexp_loc trailing + + and walkExtConstr extConstr t comments = + let (leading, trailing) = + partitionLeadingTrailing comments extConstr.pext_name.loc in + attach t.leading extConstr.pext_name.loc leading; + let (afterName, rest) = + partitionAdjacentTrailing extConstr.pext_name.loc trailing in + attach t.trailing extConstr.pext_name.loc afterName; + walkExtensionConstructorKind extConstr.pext_kind t rest + + and walkExtensionConstructorKind kind t comments = + match kind with + | Pext_rebind longident -> + let (leading, trailing) = + partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + attach t.trailing longident.loc trailing + | Pext_decl (constructorArguments, maybeTypExpr) -> + let rest = walkConstructorArguments constructorArguments t comments in + begin match maybeTypExpr with + | None -> () + | Some typexpr -> + let (before, inside, after) = partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc before; + walkTypExpr typexpr t inside; + attach t.trailing typexpr.ptyp_loc after + end + + and walkModExpr modExpr t comments = + match modExpr.pmod_desc with + | Pmod_ident longident -> + let (before, after) = partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc before; + attach t.trailing longident.loc after + | Pmod_structure structure -> + walkStructure structure t comments + | Pmod_extension extension -> + walkExtension extension t comments + | Pmod_unpack expr -> + let (before, inside, after) = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc before; + walkExpr expr t inside; + attach t.trailing expr.pexp_loc after + | Pmod_constraint (modexpr, modtype) -> + if modtype.pmty_loc.loc_start >= modexpr.pmod_loc.loc_end then ( + let (before, inside, after) = partitionByLoc comments modexpr.pmod_loc in + attach t.leading modexpr.pmod_loc before; + walkModExpr modexpr t inside; + let (after, rest) = partitionAdjacentTrailing modexpr.pmod_loc after in + attach t.trailing modexpr.pmod_loc after; + let (before, inside, after) = partitionByLoc rest modtype.pmty_loc in + attach t.leading modtype.pmty_loc before; + walkModType modtype t inside; + attach t.trailing modtype.pmty_loc after + ) else ( + let (before, inside, after) = partitionByLoc comments modtype.pmty_loc in + attach t.leading modtype.pmty_loc before; + walkModType modtype t inside; + let (after, rest) = partitionAdjacentTrailing modtype.pmty_loc after in + attach t.trailing modtype.pmty_loc after; + let (before, inside, after) = partitionByLoc rest modexpr.pmod_loc in + attach t.leading modexpr.pmod_loc before; + walkModExpr modexpr t inside; + attach t.trailing modexpr.pmod_loc after; + ) + | Pmod_apply (_callModExpr, _argModExpr) -> + let modExprs = modExprApply modExpr in + walkList + ~getLoc:(fun n -> n.Parsetree.pmod_loc) + ~walkNode:walkModExpr + modExprs + t + comments + | Pmod_functor _ -> + let (parameters, returnModExpr) = modExprFunctor modExpr in + let comments = visitListButContinueWithRemainingComments + ~getLoc:(fun + (_, lbl, modTypeOption) -> match modTypeOption with + | None -> lbl.Asttypes.loc + | Some modType -> {lbl.loc with loc_end = modType.Parsetree.pmty_loc.loc_end} + ) + ~walkNode:walkModExprParameter + ~newlineDelimited:false + parameters + t + comments + in + begin match returnModExpr.pmod_desc with + | Pmod_constraint (modExpr, modType) + when modType.pmty_loc.loc_end.pos_cnum <= modExpr.pmod_loc.loc_start.pos_cnum -> + let (before, inside, after) = partitionByLoc comments modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + let (after, rest) = partitionAdjacentTrailing modType.pmty_loc after in + attach t.trailing modType.pmty_loc after; + let (before, inside, after) = partitionByLoc rest modExpr.pmod_loc in + attach t.leading modExpr.pmod_loc before; + walkModExpr modExpr t inside; + attach t.trailing modExpr.pmod_loc after + | _ -> + let (before, inside, after) = partitionByLoc comments returnModExpr.pmod_loc in + attach t.leading returnModExpr.pmod_loc before; + walkModExpr returnModExpr t inside; + attach t.trailing returnModExpr.pmod_loc after + end + + and walkModExprParameter parameter t comments = + let (_attrs, lbl, modTypeOption) = parameter in + let (leading, trailing) = partitionLeadingTrailing comments lbl.loc in + attach t.leading lbl.loc leading; + begin match modTypeOption with + | None -> attach t.trailing lbl.loc trailing + | Some modType -> + let (afterLbl, rest) = partitionAdjacentTrailing lbl.loc trailing in + attach t.trailing lbl.loc afterLbl; + let (before, inside, after) = partitionByLoc rest modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + attach t.trailing modType.pmty_loc after; + end + + and walkModType modType t comments = + match modType.pmty_desc with + | Pmty_ident longident | Pmty_alias longident -> + let (leading, trailing) = partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + attach t.trailing longident.loc trailing; + | Pmty_signature signature -> + walkSignature signature t comments + | Pmty_extension extension -> + walkExtension extension t comments + | Pmty_typeof modExpr -> + let (before, inside, after) = partitionByLoc comments modExpr.pmod_loc in + attach t.leading modExpr.pmod_loc before; + walkModExpr modExpr t inside; + attach t.trailing modExpr.pmod_loc after; + | Pmty_with (modType, _withConstraints) -> + let (before, inside, after) = partitionByLoc comments modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + attach t.trailing modType.pmty_loc after + (* TODO: withConstraints*) + | Pmty_functor _ -> + let (parameters, returnModType) = functorType modType in + let comments = visitListButContinueWithRemainingComments + ~getLoc:(fun + (_, lbl, modTypeOption) -> match modTypeOption with + | None -> lbl.Asttypes.loc + | Some modType -> + if lbl.txt = "_" then modType.Parsetree.pmty_loc + else {lbl.loc with loc_end = modType.Parsetree.pmty_loc.loc_end} + ) + ~walkNode:walkModTypeParameter + ~newlineDelimited:false + parameters + t + comments + in + let (before, inside, after) = partitionByLoc comments returnModType.pmty_loc in + attach t.leading returnModType.pmty_loc before; + walkModType returnModType t inside; + attach t.trailing returnModType.pmty_loc after + + and walkModTypeParameter (_, lbl, modTypeOption) t comments = + let (leading, trailing) = partitionLeadingTrailing comments lbl.loc in + attach t.leading lbl.loc leading; + begin match modTypeOption with + | None -> attach t.trailing lbl.loc trailing + | Some modType -> + let (afterLbl, rest) = partitionAdjacentTrailing lbl.loc trailing in + attach t.trailing lbl.loc afterLbl; + let (before, inside, after) = partitionByLoc rest modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + attach t.trailing modType.pmty_loc after; + end + + and walkPattern pat t comments = + let open Location in + match pat.Parsetree.ppat_desc with + | _ when comments = [] -> () + | Ppat_alias (pat, alias) -> + let (leading, inside, trailing) = partitionByLoc comments pat.ppat_loc in + attach t.leading pat.ppat_loc leading; + walkPattern pat t inside; + let (afterPat, rest) = partitionAdjacentTrailing pat.ppat_loc trailing in + attach t.leading pat.ppat_loc leading; + attach t.trailing pat.ppat_loc afterPat; + let (beforeAlias, afterAlias) = partitionLeadingTrailing rest alias.loc in + attach t.leading alias.loc beforeAlias; + attach t.trailing alias.loc afterAlias + | Ppat_tuple [] + | Ppat_array [] + | Ppat_construct({txt = Longident.Lident "()"}, _) + | Ppat_construct({txt = Longident.Lident "[]"}, _) -> + attach t.inside pat.ppat_loc comments; + | Ppat_array patterns -> + walkList + ~getLoc:(fun n -> n.Parsetree.ppat_loc) + ~walkNode:walkPattern + patterns + t + comments + | Ppat_tuple patterns -> + walkList + ~getLoc:(fun n -> n.Parsetree.ppat_loc) + ~walkNode:walkPattern + patterns + t + comments + | Ppat_construct({txt = Longident.Lident "::"}, _) -> + walkList + ~getLoc:(fun n -> n.Parsetree.ppat_loc) + ~walkNode:walkPattern + (collectListPatterns [] pat) + t + comments + | Ppat_construct (constr, None) -> + let (beforeConstr, afterConstr) = + partitionLeadingTrailing comments constr.loc + in + attach t.leading constr.loc beforeConstr; + attach t.trailing constr.loc afterConstr + | Ppat_construct (constr, Some pat) -> + let (leading, trailing) = partitionLeadingTrailing comments constr.loc in + attach t.leading constr.loc leading; + let (leading, inside, trailing) = partitionByLoc trailing pat.ppat_loc in + attach t.leading pat.ppat_loc leading; + walkPattern pat t inside; + attach t.trailing pat.ppat_loc trailing + | Ppat_variant (_label, None) -> + () + | Ppat_variant (_label, Some pat) -> + walkPattern pat t comments + | Ppat_type _ -> + () + | Ppat_record (recordRows, _) -> + walkList + ~getLoc:(fun ( + (longidentLoc, pattern): (Longident.t Asttypes.loc * Parsetree.pattern) + ) -> { + longidentLoc.loc with + loc_end = pattern.Parsetree.ppat_loc.loc_end + }) + ~walkNode:walkPatternRecordRow + recordRows + t + comments + | Ppat_or (pattern1, pattern2) -> + let (beforePattern1, insidePattern1, afterPattern1) = + partitionByLoc comments pattern1.ppat_loc + in + attach t.leading pattern1.ppat_loc beforePattern1; + walkPattern pattern1 t insidePattern1; + let (afterPattern1, rest) = + partitionAdjacentTrailing pattern1.ppat_loc afterPattern1 + in + attach t.trailing pattern1.ppat_loc afterPattern1; + let (beforePattern2, insidePattern2, afterPattern2) = + partitionByLoc rest pattern2.ppat_loc + in + attach t.leading pattern2.ppat_loc beforePattern2; + walkPattern pattern2 t insidePattern2; + attach t.trailing pattern2.ppat_loc afterPattern2 + | Ppat_constraint (pattern, typ) -> + let (beforePattern, insidePattern, afterPattern) = + partitionByLoc comments pattern.ppat_loc + in + attach t.leading pattern.ppat_loc beforePattern; + walkPattern pattern t insidePattern; + let (afterPattern, rest) = + partitionAdjacentTrailing pattern.ppat_loc afterPattern + in + attach t.trailing pattern.ppat_loc afterPattern; + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc rest typ.ptyp_loc + in + attach t.leading typ.ptyp_loc beforeTyp; + walkTypExpr typ t insideTyp; + attach t.trailing typ.ptyp_loc afterTyp + | Ppat_lazy pattern | Ppat_exception pattern -> + let (leading, inside, trailing) = partitionByLoc comments pattern.ppat_loc in + attach t.leading pattern.ppat_loc leading; + walkPattern pattern t inside; + attach t.trailing pattern.ppat_loc trailing + | Ppat_unpack stringLoc -> + let (leading, trailing) = partitionLeadingTrailing comments stringLoc.loc in + attach t.leading stringLoc.loc leading; + attach t.trailing stringLoc.loc trailing + | Ppat_extension extension -> + walkExtension extension t comments + | _ -> () + + (* name: firstName *) + and walkPatternRecordRow row t comments = + match row with + (* punned {x}*) + | ({Location.txt=Longident.Lident ident; loc = longidentLoc}, + {Parsetree.ppat_desc=Ppat_var {txt;_}}) when ident = txt -> + let (beforeLbl, afterLbl) = + partitionLeadingTrailing comments longidentLoc + in + attach t.leading longidentLoc beforeLbl; + attach t.trailing longidentLoc afterLbl + | (longident, pattern) -> + let (beforeLbl, afterLbl) = + partitionLeadingTrailing comments longident.loc + in + attach t.leading longident.loc beforeLbl; + let (afterLbl, rest) = partitionAdjacentTrailing longident.loc afterLbl in + attach t.trailing longident.loc afterLbl; + let (leading, inside, trailing) = partitionByLoc rest pattern.ppat_loc in + attach t.leading pattern.ppat_loc leading; + walkPattern pattern t inside; + attach t.trailing pattern.ppat_loc trailing + + and walkTypExpr typ t comments = + match typ.Parsetree.ptyp_desc with + | _ when comments = [] -> () + | Ptyp_tuple typexprs -> + walkList + ~getLoc:(fun n -> n.Parsetree.ptyp_loc) + ~walkNode:walkTypExpr + typexprs + t + comments + | Ptyp_extension extension -> + walkExtension extension t comments + | Ptyp_package packageType -> + walkPackageType packageType t comments + | Ptyp_alias (typexpr, _alias) -> + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc comments typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkTypExpr typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp; + | Ptyp_poly (strings, typexpr) -> + let comments = visitListButContinueWithRemainingComments + ~getLoc:(fun n -> n.Asttypes.loc) + ~walkNode:(fun longident t comments -> + let (beforeLongident, afterLongident) = + partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc beforeLongident; + attach t.trailing longident.loc afterLongident + ) + ~newlineDelimited:false + strings + t + comments + in + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc comments typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkTypExpr typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + | Ptyp_constr (longident, typexprs) -> + let (beforeLongident, _afterLongident) = + partitionLeadingTrailing comments longident.loc in + let (afterLongident, rest) = + partitionAdjacentTrailing longident.loc comments in + attach t.leading longident.loc beforeLongident; + attach t.trailing longident.loc afterLongident; + walkList + ~getLoc:(fun n -> n.Parsetree.ptyp_loc) + ~walkNode:walkTypExpr + typexprs + t + rest + | Ptyp_arrow _ -> + let (_, parameters, typexpr) = arrowType typ in + let comments = walkTypeParameters parameters t comments in + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc comments typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkTypExpr typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + | Ptyp_object (fields, _) -> + walkTypObjectFields fields t comments + | _ -> () + + and walkTypObjectFields fields t comments = + walkList + ~getLoc:(fun field -> + match field with + | Parsetree.Otag (lbl, _, typ) -> + {lbl.loc with loc_end = typ.ptyp_loc.loc_end} + | _ -> Location.none + ) + ~walkNode:walkTypObjectField + fields + t + comments + + and walkTypObjectField field t comments = + match field with + | Otag (lbl, _, typexpr) -> + let (beforeLbl, afterLbl) = partitionLeadingTrailing comments lbl.loc in + attach t.leading lbl.loc beforeLbl; + let (afterLbl, rest) = partitionAdjacentTrailing lbl.loc afterLbl in + attach t.trailing lbl.loc afterLbl; + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkTypExpr typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + | _ -> () + + and walkTypeParameters typeParameters t comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun (_, _, typexpr) -> typexpr.Parsetree.ptyp_loc) + ~walkNode:walkTypeParameter + ~newlineDelimited:false + typeParameters + t + comments + + and walkTypeParameter (_attrs, _lbl, typexpr) t comments = + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc comments typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkTypExpr typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + + and walkPackageType packageType t comments = + let (longident, packageConstraints) = packageType in + let (beforeLongident, afterLongident) = + partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc beforeLongident; + let (afterLongident, rest) = + partitionAdjacentTrailing longident.loc afterLongident in + attach t.trailing longident.loc afterLongident; + walkPackageConstraints packageConstraints t rest + + and walkPackageConstraints packageConstraints t comments = + walkList + ~getLoc:(fun (longident, typexpr) -> {longident.Asttypes.loc with + loc_end = typexpr.Parsetree.ptyp_loc.loc_end + }) + ~walkNode:walkPackageConstraint + packageConstraints + t + comments + + and walkPackageConstraint packageConstraint t comments = + let (longident, typexpr) = packageConstraint in + let (beforeLongident, afterLongident) = + partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc beforeLongident; + let (afterLongident, rest) = + partitionAdjacentTrailing longident.loc afterLongident in + attach t.trailing longident.loc afterLongident; + let (beforeTyp, insideTyp, afterTyp) = + partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkTypExpr typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp; + + and walkExtension extension t comments = + let (id, payload) = extension in + let (beforeId, afterId) = partitionLeadingTrailing comments id.loc in + attach t.leading id.loc beforeId; + let (afterId, rest) = partitionAdjacentTrailing id.loc afterId in + attach t.trailing id.loc afterId; + walkPayload payload t rest + + and walkAttribute (id, payload) t comments = + let (beforeId, afterId) = partitionLeadingTrailing comments id.loc in + attach t.leading id.loc beforeId; + let (afterId, rest) = partitionAdjacentTrailing id.loc afterId in + attach t.trailing id.loc afterId; + walkPayload payload t rest + + and walkPayload payload t comments = + match payload with + | PStr s -> walkStructure s t comments + | _ -> () + +end + +module Printer = struct + let addParens doc = + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + doc + ] + ); + Doc.softLine; + Doc.rparen; + ] + ) + + let addBraces doc = + Doc.group ( + Doc.concat [ + Doc.lbrace; + doc; + Doc.rbrace; + ] + ) + + let getFirstLeadingComment tbl loc = + match Hashtbl.find tbl.CommentTable.leading loc with + | comment::_ -> Some comment + | [] -> None + | exception Not_found -> None + + let printMultilineCommentContent txt = + (* Turns + * |* first line + * * second line + * * third line *| + * Into + * |* first line + * * second line + * * third line *| + * + * What makes a comment suitable for this kind of indentation? + * -> multiple lines + every line starts with a star + *) + let rec indentStars lines acc = + match lines with + | [] -> Doc.nil + | [lastLine] -> + let line = String.trim lastLine in + let doc = Doc.text (" " ^ line) in + let trailingSpace = if String.length line > 0 then Doc.space else Doc.nil in + List.rev (trailingSpace::doc::acc) |> Doc.concat + | line::lines -> + let line = String.trim line in + let len = String.length line in + if len > 0 && (String.get [@doesNotRaise]) line 0 == '*' then + let doc = Doc.text (" " ^ (String.trim line)) in + indentStars lines (Doc.hardLine::doc::acc) + else + let trailingSpace = + let len = String.length txt in + if len > 0 && (String.unsafe_get txt (len - 1) = ' ') then + Doc.space + else Doc.nil + in + let content = Comment.trimSpaces txt in + Doc.concat [Doc.text content; trailingSpace] + in + let lines = String.split_on_char '\n' txt in + match lines with + | [] -> Doc.text "/* */" + | [line] -> Doc.concat [ + Doc.text "/* "; + Doc.text (Comment.trimSpaces line); + Doc.text " */"; + ] + | first::rest -> + let firstLine = Comment.trimSpaces first in + Doc.concat [ + Doc.text "/*"; + if String.length firstLine > 0 && not (String.equal firstLine "*") then + Doc.space else Doc.nil; + indentStars rest [Doc.hardLine; Doc.text firstLine]; + Doc.text "*/"; + ] + + let printTrailingComment (nodeLoc : Location.t) comment = + let singleLine = Comment.isSingleLineComment comment in + let content = + let txt = Comment.txt comment in + if singleLine then + Doc.text ("// " ^ String.trim txt) + else + printMultilineCommentContent txt + in + let diff = + let cmtStart = (Comment.loc comment).loc_start in + let prevTokEndPos = Comment.prevTokEndPos comment in + cmtStart.pos_lnum - prevTokEndPos.pos_lnum + in + let isBelow = + (Comment.loc comment).loc_start.pos_lnum > nodeLoc.loc_end.pos_lnum in + if diff > 0 || isBelow then + Doc.concat [ + Doc.breakParent; + Doc.lineSuffix( + (Doc.concat [Doc.hardLine; if diff > 1 then Doc.hardLine else Doc.nil; content]) + ) + ] + else if not singleLine then + Doc.concat [Doc.space; content] + else + Doc.lineSuffix (Doc.concat [Doc.space; content]) + + let printLeadingComment ?nextComment comment = + let singleLine = Comment.isSingleLineComment comment in + let content = + let txt = Comment.txt comment in + if singleLine then + Doc.text ("// " ^ String.trim txt) + else + printMultilineCommentContent txt + in + let separator = Doc.concat [ + if singleLine then Doc.concat [ + Doc.hardLine; + Doc.breakParent; + ] else Doc.nil; + (match nextComment with + | Some next -> + let nextLoc = Comment.loc next in + let currLoc = Comment.loc comment in + let diff = + nextLoc.Location.loc_start.pos_lnum - + currLoc.Location.loc_end.pos_lnum + in + let nextSingleLine = Comment.isSingleLineComment next in + if singleLine && nextSingleLine then + if diff > 1 then Doc.hardLine else Doc.nil + else if singleLine && not nextSingleLine then + if diff > 1 then Doc.hardLine else Doc.nil + else + if diff > 1 then Doc.concat [Doc.hardLine; Doc.hardLine] + else if diff == 1 then Doc.hardLine + else + Doc.space + | None -> Doc.nil) + ] + in + Doc.concat [ + content; + separator; + ] + + let printCommentsInside cmtTbl loc = + let rec loop acc comments = + match comments with + | [] -> Doc.nil + | [comment] -> + let cmtDoc = printLeadingComment comment in + let doc = Doc.group ( + Doc.concat [ + Doc.concat (List.rev (cmtDoc::acc)); + ] + ) + in + doc + | comment::((nextComment::_comments) as rest) -> + let cmtDoc = printLeadingComment ~nextComment comment in + loop (cmtDoc::acc) rest + in + match Hashtbl.find cmtTbl.CommentTable.inside loc with + | exception Not_found -> Doc.nil + | comments -> + Hashtbl.remove cmtTbl.inside loc; + Doc.group ( + loop [] comments + ) + + let printLeadingComments node tbl loc = + let rec loop acc comments = + match comments with + | [] -> node + | [comment] -> + let cmtDoc = printLeadingComment comment in + let diff = + loc.Location.loc_start.pos_lnum - + (Comment.loc comment).Location.loc_end.pos_lnum + in + let separator = + if Comment.isSingleLineComment comment then + if diff > 1 then Doc.hardLine else Doc.nil + else if diff == 0 then + Doc.space + else if diff > 1 then Doc.concat [Doc.hardLine; Doc.hardLine] + else + Doc.hardLine + in + let doc = Doc.group ( + Doc.concat [ + Doc.concat (List.rev (cmtDoc::acc)); + separator; + node + ] + ) + in + doc + | comment::((nextComment::_comments) as rest) -> + let cmtDoc = printLeadingComment ~nextComment comment in + loop (cmtDoc::acc) rest + in + match Hashtbl.find tbl loc with + | exception Not_found -> node + | comments -> + (* Remove comments from tbl: Some ast nodes have the same location. + * We only want to print comments once *) + Hashtbl.remove tbl loc; + loop [] comments + + let printTrailingComments node tbl loc = + let rec loop acc comments = + match comments with + | [] -> Doc.concat (List.rev acc) + | comment::comments -> + let cmtDoc = printTrailingComment loc comment in + loop (cmtDoc::acc) comments + in + match Hashtbl.find tbl loc with + | exception Not_found -> node + | [] -> node + | (_first::_) as comments -> + (* Remove comments from tbl: Some ast nodes have the same location. + * We only want to print comments once *) + Hashtbl.remove tbl loc; + let cmtsDoc = loop [] comments in + Doc.concat [ + node; + cmtsDoc; + ] + + let printComments doc (tbl: CommentTable.t) loc = + let docWithLeadingComments = printLeadingComments doc tbl.leading loc in + printTrailingComments docWithLeadingComments tbl.trailing loc + + let printList ~getLoc ~nodes ~print ?(forceBreak=false) t = + let rec loop (prevLoc: Location.t) acc nodes = + match nodes with + | [] -> (prevLoc, Doc.concat (List.rev acc)) + | node::nodes -> + let loc = getLoc node in + let startPos = match getFirstLeadingComment t loc with + | None -> loc.loc_start + | Some comment -> (Comment.loc comment).loc_start + in + let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 then + Doc.concat [Doc.hardLine; Doc.hardLine] + else + Doc.hardLine + in + let doc = printComments (print node t) t loc in + loop loc (doc::sep::acc) nodes + in + match nodes with + | [] -> Doc.nil + | node::nodes -> + let firstLoc = getLoc node in + let doc = printComments (print node t) t firstLoc in + let (lastLoc, docs) = loop firstLoc [doc] nodes in + let forceBreak = + forceBreak || + firstLoc.loc_start.pos_lnum != lastLoc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak docs + + let printListi ~getLoc ~nodes ~print ?(forceBreak=false) t = + let rec loop i (prevLoc: Location.t) acc nodes = + match nodes with + | [] -> (prevLoc, Doc.concat (List.rev acc)) + | node::nodes -> + let loc = getLoc node in + let startPos = match getFirstLeadingComment t loc with + | None -> loc.loc_start + | Some comment -> (Comment.loc comment).loc_start + in + let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 then + Doc.concat [Doc.hardLine; Doc.hardLine] + else + Doc.line + in + let doc = printComments (print node t i) t loc in + loop (i + 1) loc (doc::sep::acc) nodes + in + match nodes with + | [] -> Doc.nil + | node::nodes -> + let firstLoc = getLoc node in + let doc = printComments (print node t 0) t firstLoc in + let (lastLoc, docs) = loop 1 firstLoc [doc] nodes in + let forceBreak = + forceBreak || + firstLoc.loc_start.pos_lnum != lastLoc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak docs + + let rec printLongidentAux accu = function + | Longident.Lident s -> (Doc.text s) :: accu + | Ldot(lid, s) -> printLongidentAux ((Doc.text s) :: accu) lid + | Lapply(lid1, lid2) -> + let d1 = Doc.join ~sep:Doc.dot (printLongidentAux [] lid1) in + let d2 = Doc.join ~sep:Doc.dot (printLongidentAux [] lid2) in + (Doc.concat [d1; Doc.lparen; d2; Doc.rparen]) :: accu + + let printLongident = function + | Longident.Lident txt -> Doc.text txt + | lid -> Doc.join ~sep:Doc.dot (printLongidentAux [] lid) + + type identifierStyle = + | ExoticIdent + | NormalIdent + + let classifyIdentContent ?(allowUident=false) txt = + let len = String.length txt in + let rec go i = + if i == len then NormalIdent + else + let c = String.unsafe_get txt i in + if i == 0 && not ( + (allowUident && (c >= 'A' && c <= 'Z')) || + (c >= 'a' && c <= 'z') || c = '_' || (c >= '0' && c <= '9')) then + ExoticIdent + else if not ( + (c >= 'a' && c <= 'z') + || (c >= 'A' && c <= 'Z') + || c = '\'' + || c = '_' + || (c >= '0' && c <= '9')) + then + ExoticIdent + else + go (i + 1) + in + if Token.isKeywordTxt txt && txt <> "list" then + ExoticIdent + else + go 0 + + let printIdentLike ?allowUident txt = + match classifyIdentContent ?allowUident txt with + | ExoticIdent -> Doc.concat [ + Doc.text "\\\""; + Doc.text txt; + Doc.text"\"" + ] + | NormalIdent -> Doc.text txt + + let printLident l = match l with + | Longident.Lident txt -> printIdentLike txt + | Longident.Ldot (path, txt) -> + let txts = Longident.flatten path in + Doc.concat [ + Doc.join ~sep:Doc.dot (List.map Doc.text txts); + Doc.dot; + printIdentLike txt; + ] + | _ -> Doc.text("printLident: Longident.Lapply is not supported") + + let printLongidentLocation l cmtTbl = + let doc = printLongident l.Location.txt in + printComments doc cmtTbl l.loc + + (* Module.SubModule.x *) + let printLidentPath path cmtTbl = + let doc = printLident path.Location.txt in + printComments doc cmtTbl path.loc + + (* Module.SubModule.x or Module.SubModule.X *) + let printIdentPath path cmtTbl = + let doc = printLident path.Location.txt in + printComments doc cmtTbl path.loc + + let printStringLoc sloc cmtTbl = + let doc = printIdentLike sloc.Location.txt in + printComments doc cmtTbl sloc.loc + + let printConstant c = match c with + | Parsetree.Pconst_integer (s, suffix) -> + begin match suffix with + | Some c -> Doc.text (s ^ (Char.escaped c)) + | None -> Doc.text s + end + | Pconst_string (txt, None) -> + Doc.text ("\"" ^ txt ^ "\"") + | Pconst_string (txt, Some prefix) -> + Doc.concat [ + if prefix = "" then Doc.nil else Doc.text prefix; + Doc.text ("`" ^ txt ^ "`") + ] + | Pconst_float (s, _) -> Doc.text s + | Pconst_char c -> Doc.text ("'" ^ (Char.escaped c) ^ "'") + + let rec printStructure (s : Parsetree.structure) t = + match s with + | [] -> printCommentsInside t Location.none + | structure -> + printList + ~getLoc:(fun s -> s.Parsetree.pstr_loc) + ~nodes:structure + ~print:printStructureItem + t + + and printStructureItem (si: Parsetree.structure_item) cmtTbl = + match si.pstr_desc with + | Pstr_value(rec_flag, valueBindings) -> + let recFlag = match rec_flag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + printValueBindings ~recFlag valueBindings cmtTbl + | Pstr_type(recFlag, typeDeclarations) -> + let recFlag = match recFlag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + printTypeDeclarations ~recFlag typeDeclarations cmtTbl + | Pstr_primitive valueDescription -> + printValueDescription valueDescription cmtTbl + | Pstr_eval (expr, attrs) -> + let exprDoc = + let doc = printExpressionWithComments expr cmtTbl in + match Parens.structureExpr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [ + printAttributes attrs; + exprDoc; + ] + | Pstr_attribute attr -> Doc.concat [ + Doc.text "@"; + printAttributeWithComments attr cmtTbl + ] + | Pstr_extension (extension, attrs) -> Doc.concat [ + printAttributes attrs; + Doc.concat [printExtensionWithComments ~atModuleLvl:true extension cmtTbl]; + ] + | Pstr_include includeDeclaration -> + printIncludeDeclaration includeDeclaration cmtTbl + | Pstr_open openDescription -> + printOpenDescription openDescription cmtTbl + | Pstr_modtype modTypeDecl -> + printModuleTypeDeclaration modTypeDecl cmtTbl + | Pstr_module moduleBinding -> + printModuleBinding ~isRec:false moduleBinding cmtTbl 0 + | Pstr_recmodule moduleBindings -> + printListi + ~getLoc:(fun mb -> mb.Parsetree.pmb_loc) + ~nodes:moduleBindings + ~print:(printModuleBinding ~isRec:true) + cmtTbl + | Pstr_exception extensionConstructor -> + printExceptionDef extensionConstructor cmtTbl + | Pstr_typext typeExtension -> + printTypeExtension typeExtension cmtTbl + | Pstr_class _ | Pstr_class_type _ -> Doc.nil + + and printTypeExtension (te : Parsetree.type_extension) cmtTbl = + let prefix = Doc.text "type " in + let name = printLidentPath te.ptyext_path cmtTbl in + let typeParams = printTypeParams te.ptyext_params cmtTbl in + let extensionConstructors = + let ecs = te.ptyext_constructors in + let forceBreak = + match (ecs, List.rev ecs) with + | (first::_, last::_) -> + first.pext_loc.loc_start.pos_lnum > te.ptyext_path.loc.loc_end.pos_lnum || + first.pext_loc.loc_start.pos_lnum < last.pext_loc.loc_end.pos_lnum + | _ -> false + in + let privateFlag = match te.ptyext_private with + | Asttypes.Private -> Doc.concat [ + Doc.text "private"; + Doc.line; + ] + | Public -> Doc.nil + in + let rows = + printListi + ~getLoc:(fun n -> n.Parsetree.pext_loc) + ~print:printExtensionConstructor + ~nodes: ecs + ~forceBreak + cmtTbl + in + Doc.breakableGroup ~forceBreak ( + Doc.indent ( + Doc.concat [ + Doc.line; + privateFlag; + rows; + (* Doc.join ~sep:Doc.line ( *) + (* List.mapi printExtensionConstructor ecs *) + (* ) *) + ] + ) + ) + in + Doc.group ( + Doc.concat [ + printAttributes ~loc: te.ptyext_path.loc te.ptyext_attributes; + prefix; + name; + typeParams; + Doc.text " +="; + extensionConstructors; + ] + ) + + and printModuleBinding ~isRec moduleBinding cmtTbl i = + let prefix = if i = 0 then + Doc.concat [ + Doc.text "module "; + if isRec then Doc.text "rec " else Doc.nil; + ] + else + Doc.text "and " + in + let (modExprDoc, modConstraintDoc) = + match moduleBinding.pmb_expr with + | {pmod_desc = Pmod_constraint (modExpr, modType)} -> + ( + printModExpr modExpr cmtTbl, + Doc.concat [ + Doc.text ": "; + printModType modType cmtTbl + ] + ) + | modExpr -> + (printModExpr modExpr cmtTbl, Doc.nil) + in + let modName = + let doc = Doc.text moduleBinding.pmb_name.Location.txt in + printComments doc cmtTbl moduleBinding.pmb_name.loc + in + let doc = Doc.concat [ + printAttributes ~loc:moduleBinding.pmb_name.loc moduleBinding.pmb_attributes; + prefix; + modName; + modConstraintDoc; + Doc.text " = "; + modExprDoc; + ] in + printComments doc cmtTbl moduleBinding.pmb_loc + + and printModuleTypeDeclaration (modTypeDecl : Parsetree.module_type_declaration) cmtTbl = + let modName = + let doc = Doc.text modTypeDecl.pmtd_name.txt in + printComments doc cmtTbl modTypeDecl.pmtd_name.loc + in + Doc.concat [ + printAttributes modTypeDecl.pmtd_attributes; + Doc.text "module type "; + modName; + (match modTypeDecl.pmtd_type with + | None -> Doc.nil + | Some modType -> Doc.concat [ + Doc.text " = "; + printModType modType cmtTbl; + ]); + ] + + and printModType modType cmtTbl = + let modTypeDoc = match modType.pmty_desc with + | Parsetree.Pmty_ident longident -> + Doc.concat [ + printAttributes ~loc:longident.loc modType.pmty_attributes; + printLongidentLocation longident cmtTbl + ] + | Pmty_signature signature -> + let signatureDoc = Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.line; + printSignature signature cmtTbl; + ] + ); + Doc.line; + Doc.rbrace; + ] + ) in + Doc.concat [ + printAttributes modType.pmty_attributes; + signatureDoc + ] + | Pmty_functor _ -> + let (parameters, returnType) = ParsetreeViewer.functorType modType in + let parametersDoc = match parameters with + | [] -> Doc.nil + | [attrs, {Location.txt = "_"; loc}, Some modType] -> + let cmtLoc = + {loc with loc_end = modType.Parsetree.pmty_loc.loc_end} + in + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + let doc = Doc.concat [ + attrs; + printModType modType cmtTbl + ] in + printComments doc cmtTbl cmtLoc + | params -> + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun (attrs, lbl, modType) -> + let cmtLoc = match modType with + | None -> lbl.Asttypes.loc + | Some modType -> + {lbl.Asttypes.loc with loc_end = modType.Parsetree.pmty_loc.loc_end} + in + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + let lblDoc = if lbl.Location.txt = "_" then Doc.nil + else + let doc = Doc.text lbl.txt in + printComments doc cmtTbl lbl.loc + in + let doc = Doc.concat [ + attrs; + lblDoc; + (match modType with + | None -> Doc.nil + | Some modType -> Doc.concat [ + if lbl.txt = "_" then Doc.nil else Doc.text ": "; + printModType modType cmtTbl; + ]); + ] in + printComments doc cmtTbl cmtLoc + ) params + ); + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + in + let returnDoc = + let doc = printModType returnType cmtTbl in + if Parens.modTypeFunctorReturn returnType then addParens doc else doc + in + Doc.group ( + Doc.concat [ + parametersDoc; + Doc.group ( + Doc.concat [ + Doc.text " =>"; + Doc.line; + returnDoc; + ] + ) + ] + ) + | Pmty_typeof modExpr -> Doc.concat [ + Doc.text "module type of "; + printModExpr modExpr cmtTbl + ] + | Pmty_extension extension -> printExtensionWithComments ~atModuleLvl:false extension cmtTbl + | Pmty_alias longident -> Doc.concat [ + Doc.text "module "; + printLongidentLocation longident cmtTbl; + ] + | Pmty_with (modType, withConstraints) -> + let operand = + let doc = printModType modType cmtTbl in + if Parens.modTypeWithOperand modType then addParens doc else doc + in + Doc.group ( + Doc.concat [ + operand; + Doc.indent ( + Doc.concat [ + Doc.line; + printWithConstraints withConstraints cmtTbl; + ] + ) + ] + ) + in + let attrsAlreadyPrinted = match modType.pmty_desc with + | Pmty_functor _ | Pmty_signature _ | Pmty_ident _ -> true + | _ -> false + in + let doc =Doc.concat [ + if attrsAlreadyPrinted then Doc.nil else printAttributes modType.pmty_attributes; + modTypeDoc; + ] in + printComments doc cmtTbl modType.pmty_loc + + and printWithConstraints withConstraints cmtTbl = + let rows = List.mapi (fun i withConstraint -> + Doc.group ( + Doc.concat [ + if i == 0 then Doc.text "with " else Doc.text "and "; + printWithConstraint withConstraint cmtTbl; + ] + ) + ) withConstraints + in + Doc.join ~sep:Doc.line rows + + and printWithConstraint (withConstraint : Parsetree.with_constraint) cmtTbl = + match withConstraint with + (* with type X.t = ... *) + | Pwith_type (longident, typeDeclaration) -> + Doc.group (printTypeDeclaration + ~name:(printLidentPath longident cmtTbl) + ~equalSign:"=" + ~recFlag:Doc.nil + 0 + typeDeclaration + CommentTable.empty) + (* with module X.Y = Z *) + | Pwith_module ({txt = longident1}, {txt = longident2}) -> + Doc.concat [ + Doc.text "module "; + printLongident longident1; + Doc.text " ="; + Doc.indent ( + Doc.concat [ + Doc.line; + printLongident longident2; + ] + ) + ] + (* with type X.t := ..., same format as [Pwith_type] *) + | Pwith_typesubst (longident, typeDeclaration) -> + Doc.group(printTypeDeclaration + ~name:(printLidentPath longident cmtTbl) + ~equalSign:":=" + ~recFlag:Doc.nil + 0 + typeDeclaration + CommentTable.empty) + | Pwith_modsubst ({txt = longident1}, {txt = longident2}) -> + Doc.concat [ + Doc.text "module "; + printLongident longident1; + Doc.text " :="; + Doc.indent ( + Doc.concat [ + Doc.line; + printLongident longident2; + ] + ) + ] + + and printSignature signature cmtTbl = + match signature with + | [] -> printCommentsInside cmtTbl Location.none + | signature -> + printList + ~getLoc:(fun s -> s.Parsetree.psig_loc) + ~nodes:signature + ~print:printSignatureItem + cmtTbl + + and printSignatureItem (si : Parsetree.signature_item) cmtTbl = + match si.psig_desc with + | Parsetree.Psig_value valueDescription -> + printValueDescription valueDescription cmtTbl + | Psig_type (recFlag, typeDeclarations) -> + let recFlag = match recFlag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + printTypeDeclarations ~recFlag typeDeclarations cmtTbl + | Psig_typext typeExtension -> + printTypeExtension typeExtension cmtTbl + | Psig_exception extensionConstructor -> + printExceptionDef extensionConstructor cmtTbl + | Psig_module moduleDeclaration -> + printModuleDeclaration moduleDeclaration cmtTbl + | Psig_recmodule moduleDeclarations -> + printRecModuleDeclarations moduleDeclarations cmtTbl + | Psig_modtype modTypeDecl -> + printModuleTypeDeclaration modTypeDecl cmtTbl + | Psig_open openDescription -> + printOpenDescription openDescription cmtTbl + | Psig_include includeDescription -> + printIncludeDescription includeDescription cmtTbl + | Psig_attribute attr -> Doc.concat [ + Doc.text "@"; + printAttributeWithComments attr cmtTbl + ] + | Psig_extension (extension, attrs) -> Doc.concat [ + printAttributes attrs; + Doc.concat [printExtensionWithComments ~atModuleLvl:true extension cmtTbl]; + ] + | Psig_class _ | Psig_class_type _ -> Doc.nil + + and printRecModuleDeclarations moduleDeclarations cmtTbl = + printListi + ~getLoc:(fun n -> n.Parsetree.pmd_loc) + ~nodes:moduleDeclarations + ~print:printRecModuleDeclaration + cmtTbl + + and printRecModuleDeclaration md cmtTbl i = + let body = match md.pmd_type.pmty_desc with + | Parsetree.Pmty_alias longident -> + Doc.concat [Doc.text " = "; printLongidentLocation longident cmtTbl] + | _ -> + let needsParens = match md.pmd_type.pmty_desc with + | Pmty_with _ -> true + | _ -> false + in + let modTypeDoc = + let doc = printModType md.pmd_type cmtTbl in + if needsParens then addParens doc else doc + in + Doc.concat [Doc.text ": "; modTypeDoc] + in + let prefix = if i < 1 then "module rec " else "and " in + Doc.concat [ + printAttributes ~loc:md.pmd_name.loc md.pmd_attributes; + Doc.text prefix; + printComments (Doc.text md.pmd_name.txt) cmtTbl md.pmd_name.loc; + body + ] + + and printModuleDeclaration (md: Parsetree.module_declaration) cmtTbl = + let body = match md.pmd_type.pmty_desc with + | Parsetree.Pmty_alias longident -> + Doc.concat [Doc.text " = "; printLongidentLocation longident cmtTbl] + | _ -> Doc.concat [Doc.text ": "; printModType md.pmd_type cmtTbl] + in + Doc.concat [ + printAttributes ~loc:md.pmd_name.loc md.pmd_attributes; + Doc.text "module "; + printComments (Doc.text md.pmd_name.txt) cmtTbl md.pmd_name.loc; + body + ] + + and printOpenDescription (openDescription : Parsetree.open_description) p = + Doc.concat [ + printAttributes openDescription.popen_attributes; + Doc.text "open"; + (match openDescription.popen_override with + | Asttypes.Fresh -> Doc.space + | Asttypes.Override -> Doc.text "! "); + printLongidentLocation openDescription.popen_lid p + ] + + and printIncludeDescription (includeDescription: Parsetree.include_description) cmtTbl = + Doc.concat [ + printAttributes includeDescription.pincl_attributes; + Doc.text "include "; + printModType includeDescription.pincl_mod cmtTbl; + ] + + and printIncludeDeclaration (includeDeclaration : Parsetree.include_declaration) cmtTbl = + let isJsFfiImport = List.exists (fun attr -> + match attr with + | ({Location.txt = "ns.jsFfi"}, _) -> true + | _ -> false + ) includeDeclaration.pincl_attributes + in + if isJsFfiImport then + printJsFfiImportDeclaration includeDeclaration cmtTbl + else + Doc.concat [ + printAttributes includeDeclaration.pincl_attributes; + Doc.text "include "; + let includeDoc = + printModExpr includeDeclaration.pincl_mod cmtTbl + in + if Parens.includeModExpr includeDeclaration.pincl_mod then + addParens includeDoc + else includeDoc; + ] + + and printJsFfiImport (valueDescription: Parsetree.value_description) cmtTbl = + let attrs = List.filter (fun attr -> + match attr with + | ({Location.txt = "bs.val" | "genType.import" | "bs.scope" }, _) -> false + | _ -> true + ) valueDescription.pval_attributes in + let (ident, alias) = match valueDescription.pval_prim with + | primitive::_ -> + if primitive <> valueDescription.pval_name.txt then + ( + printIdentLike primitive, + Doc.concat [ + Doc.text " as "; + printIdentLike valueDescription.pval_name.txt; + ] + ) + else + (printIdentLike primitive, Doc.nil) + | _ -> + (printIdentLike valueDescription.pval_name.txt, Doc.nil) + in + Doc.concat [ + printAttributes ~loc:valueDescription.pval_name.loc attrs; + ident; + alias; + Doc.text ": "; + printTypExpr valueDescription.pval_type cmtTbl; + ] + + and printJsFfiImportScope (scope: ParsetreeViewer.jsImportScope) = + match scope with + | JsGlobalImport -> Doc.nil + | JsModuleImport modName -> + Doc.concat [ + Doc.text " from "; + Doc.doubleQuote; + Doc.text modName; + Doc.doubleQuote; + ] + | JsScopedImport idents -> + Doc.concat [ + Doc.text " from "; + Doc.join ~sep:Doc.dot (List.map Doc.text idents) + ] + + and printJsFfiImportDeclaration (includeDeclaration: Parsetree.include_declaration) cmtTbl = + let attrs = List.filter (fun attr -> + match attr with + | ({Location.txt = "ns.jsFfi"}, _) -> false + | _ -> true + ) includeDeclaration.pincl_attributes + in + let imports = ParsetreeViewer.extractValueDescriptionFromModExpr includeDeclaration.pincl_mod in + let scope = match imports with + | vd::_ -> ParsetreeViewer.classifyJsImport vd + | [] -> ParsetreeViewer.JsGlobalImport + in + let scopeDoc = printJsFfiImportScope scope in + Doc.group ( + Doc.concat [ + printAttributes attrs; + Doc.text "import "; + Doc.group ( + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun vd -> printJsFfiImport vd cmtTbl) imports + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ] + ); + scopeDoc; + ] + ) + + and printValueBindings ~recFlag (vbs: Parsetree.value_binding list) cmtTbl = + printListi + ~getLoc:(fun vb -> vb.Parsetree.pvb_loc) + ~nodes:vbs + ~print:(printValueBinding ~recFlag) + cmtTbl + + and printValueDescription valueDescription cmtTbl = + let isExternal = + match valueDescription.pval_prim with | [] -> false | _ -> true + in + Doc.group ( + Doc.concat [ + printAttributes valueDescription.pval_attributes; + Doc.text (if isExternal then "external " else "let "); + printComments + (printIdentLike valueDescription.pval_name.txt) + cmtTbl + valueDescription.pval_name.loc; + Doc.text ": "; + printTypExpr valueDescription.pval_type cmtTbl; + if isExternal then + Doc.group ( + Doc.concat [ + Doc.text " ="; + Doc.indent( + Doc.concat [ + Doc.line; + Doc.join ~sep:Doc.line ( + List.map(fun s -> Doc.concat [ + Doc.text "\""; + Doc.text s; + Doc.text "\""; + ]) + valueDescription.pval_prim + ); + ] + ) + ] + ) + else Doc.nil + ] + ) + + and printTypeDeclarations ~recFlag typeDeclarations cmtTbl = + printListi + ~getLoc:(fun n -> n.Parsetree.ptype_loc) + ~nodes:typeDeclarations + ~print:(printTypeDeclaration2 ~recFlag) + cmtTbl + + (* + * type_declaration = { + * ptype_name: string loc; + * ptype_params: (core_type * variance) list; + * (* ('a1,...'an) t; None represents _*) + * ptype_cstrs: (core_type * core_type * Location.t) list; + * (* ... constraint T1=T1' ... constraint Tn=Tn' *) + * ptype_kind: type_kind; + * ptype_private: private_flag; (* = private ... *) + * ptype_manifest: core_type option; (* = T *) + * ptype_attributes: attributes; (* ... [@@id1] [@@id2] *) + * ptype_loc: Location.t; + * } + * + * + * type t (abstract, no manifest) + * type t = T0 (abstract, manifest=T0) + * type t = C of T | ... (variant, no manifest) + * type t = T0 = C of T | ... (variant, manifest=T0) + * type t = {l: T; ...} (record, no manifest) + * type t = T0 = {l : T; ...} (record, manifest=T0) + * type t = .. (open, no manifest) + * + * + * and type_kind = + * | Ptype_abstract + * | Ptype_variant of constructor_declaration list + * (* Invariant: non-empty list *) + * | Ptype_record of label_declaration list + * (* Invariant: non-empty list *) + * | Ptype_open + *) + and printTypeDeclaration ~name ~equalSign ~recFlag i (td: Parsetree.type_declaration) cmtTbl = + let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr td.ptype_attributes in + let attrs = printAttributes ~loc:td.ptype_loc attrs in + let prefix = if i > 0 then + Doc.concat [ + Doc.text "and "; + if hasGenType then Doc.text "export " else Doc.nil + ] + else + Doc.concat [ + Doc.text (if hasGenType then "export type " else "type "); + recFlag + ] + in + let typeName = name in + let typeParams = printTypeParams td.ptype_params cmtTbl in + let manifestAndKind = match td.ptype_kind with + | Ptype_abstract -> + begin match td.ptype_manifest with + | None -> Doc.nil + | Some(typ) -> + Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printTypExpr typ cmtTbl; + ] + end + | Ptype_open -> Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + Doc.text ".."; + ] + | Ptype_record(lds) -> + let manifest = match td.ptype_manifest with + | None -> Doc.nil + | Some(typ) -> Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr typ cmtTbl; + ] + in + Doc.concat [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printRecordDeclaration lds cmtTbl; + ] + | Ptype_variant(cds) -> + let manifest = match td.ptype_manifest with + | None -> Doc.nil + | Some(typ) -> Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr typ cmtTbl; + ] + in + Doc.concat [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign]; + printConstructorDeclarations ~privateFlag:td.ptype_private cds cmtTbl; + ] + in + let constraints = printTypeDefinitionConstraints td.ptype_cstrs in + Doc.group ( + Doc.concat [ + attrs; + prefix; + typeName; + typeParams; + manifestAndKind; + constraints; + ] + ) + + and printTypeDeclaration2 ~recFlag (td: Parsetree.type_declaration) cmtTbl i = + let name = + let doc = printIdentLike td.Parsetree.ptype_name.txt in + printComments doc cmtTbl td.ptype_name.loc + in + let equalSign = "=" in + let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr td.ptype_attributes in + let attrs = printAttributes ~loc:td.ptype_loc attrs in + let prefix = if i > 0 then + Doc.concat [ + Doc.text "and "; + if hasGenType then Doc.text "export " else Doc.nil + ] + else + Doc.concat [ + Doc.text (if hasGenType then "export type " else "type "); + recFlag + ] + in + let typeName = name in + let typeParams = printTypeParams td.ptype_params cmtTbl in + let manifestAndKind = match td.ptype_kind with + | Ptype_abstract -> + begin match td.ptype_manifest with + | None -> Doc.nil + | Some(typ) -> + Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printTypExpr typ cmtTbl; + ] + end + | Ptype_open -> Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + Doc.text ".."; + ] + | Ptype_record(lds) -> + let manifest = match td.ptype_manifest with + | None -> Doc.nil + | Some(typ) -> Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr typ cmtTbl; + ] + in + Doc.concat [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printRecordDeclaration lds cmtTbl; + ] + | Ptype_variant(cds) -> + let manifest = match td.ptype_manifest with + | None -> Doc.nil + | Some(typ) -> Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr typ cmtTbl; + ] + in + Doc.concat [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign]; + printConstructorDeclarations ~privateFlag:td.ptype_private cds cmtTbl; + ] + in + let constraints = printTypeDefinitionConstraints td.ptype_cstrs in + Doc.group ( + Doc.concat [ + attrs; + prefix; + typeName; + typeParams; + manifestAndKind; + constraints; + ] + ) + + and printTypeDefinitionConstraints cstrs = + match cstrs with + | [] -> Doc.nil + | cstrs -> Doc.indent ( + Doc.group ( + Doc.concat [ + Doc.line; + Doc.group( + Doc.join ~sep:Doc.line ( + List.map printTypeDefinitionConstraint cstrs + ) + ) + ] + ) + ) + + and printTypeDefinitionConstraint ((typ1, typ2, _loc ): Parsetree.core_type * Parsetree.core_type * Location.t) = + Doc.concat [ + Doc.text "constraint "; + printTypExpr typ1 CommentTable.empty; + Doc.text " = "; + printTypExpr typ2 CommentTable.empty; + ] + + and printPrivateFlag (flag : Asttypes.private_flag) = match flag with + | Private -> Doc.text "private " + | Public -> Doc.nil + + and printTypeParams typeParams cmtTbl = + match typeParams with + | [] -> Doc.nil + | typeParams -> + Doc.group ( + Doc.concat [ + Doc.lessThan; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun typeParam -> + let doc = printTypeParam typeParam cmtTbl in + printComments doc cmtTbl (fst typeParam).Parsetree.ptyp_loc + ) typeParams + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ] + ) + + and printTypeParam (param : (Parsetree.core_type * Asttypes.variance)) cmtTbl = + let (typ, variance) = param in + let printedVariance = match variance with + | Covariant -> Doc.text "+" + | Contravariant -> Doc.text "-" + | Invariant -> Doc.nil + in + Doc.concat [ + printedVariance; + printTypExpr typ cmtTbl + ] + + and printRecordDeclaration (lds: Parsetree.label_declaration list) cmtTbl = + let forceBreak = match (lds, List.rev lds) with + | (first::_, last::_) -> + first.pld_loc.loc_start.pos_lnum < last.pld_loc.loc_end.pos_lnum + | _ -> false + in + Doc.breakableGroup ~forceBreak ( + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map (fun ld -> + let doc = printLabelDeclaration ld cmtTbl in + printComments doc cmtTbl ld.Parsetree.pld_loc + ) lds) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ] + ) + + and printConstructorDeclarations + ~privateFlag (cds: Parsetree.constructor_declaration list) cmtTbl + = + let forceBreak = match (cds, List.rev cds) with + | (first::_, last::_) -> + first.pcd_loc.loc_start.pos_lnum < last.pcd_loc.loc_end.pos_lnum + | _ -> false + in + let privateFlag = match privateFlag with + | Asttypes.Private -> Doc.concat [ + Doc.text "private"; + Doc.line; + ] + | Public -> Doc.nil + in + let rows = + printListi + ~getLoc:(fun cd -> cd.Parsetree.pcd_loc) + ~nodes:cds + ~print:(fun cd cmtTbl i -> + let doc = printConstructorDeclaration2 i cd cmtTbl in + printComments doc cmtTbl cd.Parsetree.pcd_loc + ) + ~forceBreak + cmtTbl + in + Doc.breakableGroup ~forceBreak ( + Doc.indent ( + Doc.concat [ + Doc.line; + privateFlag; + rows; + ] + ) + ) + + and printConstructorDeclaration2 i (cd : Parsetree.constructor_declaration) cmtTbl = + let attrs = printAttributes cd.pcd_attributes in + let bar = if i > 0 then Doc.text "| " + else Doc.ifBreaks (Doc.text "| ") Doc.nil + in + let constrName = + let doc = Doc.text cd.pcd_name.txt in + printComments doc cmtTbl cd.pcd_name.loc + in + let constrArgs = printConstructorArguments ~indent:true cd.pcd_args cmtTbl in + let gadt = match cd.pcd_res with + | None -> Doc.nil + | Some(typ) -> Doc.indent ( + Doc.concat [ + Doc.text ": "; + printTypExpr typ cmtTbl; + ] + ) + in + Doc.concat [ + bar; + Doc.group ( + Doc.concat [ + attrs; (* TODO: fix parsing of attributes, so when can print them above the bar? *) + constrName; + constrArgs; + gadt; + ] + ) + ] + + and printConstructorArguments ~indent (cdArgs : Parsetree.constructor_arguments) cmtTbl = + match cdArgs with + | Pcstr_tuple [] -> Doc.nil + | Pcstr_tuple types -> + let args = Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun typexpr -> + printTypExpr typexpr cmtTbl + ) types + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] in + Doc.group ( + if indent then Doc.indent args else args + ) + | Pcstr_record lds -> + let args = Doc.concat [ + Doc.lparen; + (* manually inline the printRecordDeclaration, gives better layout *) + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map (fun ld -> + let doc = printLabelDeclaration ld cmtTbl in + printComments doc cmtTbl ld.Parsetree.pld_loc + ) lds) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + Doc.rparen; + ] in + if indent then Doc.indent args else args + + and printLabelDeclaration (ld : Parsetree.label_declaration) cmtTbl = + let attrs = printAttributes ~loc:ld.pld_name.loc ld.pld_attributes in + let mutableFlag = match ld.pld_mutable with + | Mutable -> Doc.text "mutable " + | Immutable -> Doc.nil + in + let name = + let doc = printIdentLike ld.pld_name.txt in + printComments doc cmtTbl ld.pld_name.loc + in + Doc.group ( + Doc.concat [ + attrs; + mutableFlag; + name; + Doc.text ": "; + printTypExpr ld.pld_type cmtTbl; + ] + ) + + and printTypExpr (typExpr : Parsetree.core_type) cmtTbl = + let renderedType = match typExpr.ptyp_desc with + | Ptyp_any -> Doc.text "_" + | Ptyp_var var -> Doc.concat [ + Doc.text "'"; + printIdentLike var; + ] + | Ptyp_extension(extension) -> + printExtensionWithComments ~atModuleLvl:false extension cmtTbl + | Ptyp_alias(typ, alias) -> + let typ = + (* Technically type t = (string, float) => unit as 'x, doesn't require + * parens around the arrow expression. This is very confusing though. + * Is the "as" part of "unit" or "(string, float) => unit". By printing + * parens we guide the user towards its meaning.*) + let needsParens = match typ.ptyp_desc with + | Ptyp_arrow _ -> true + | _ -> false + in + let doc = printTypExpr typ cmtTbl in + if needsParens then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else + doc + in + Doc.concat [typ; Doc.text " as "; Doc.concat [Doc.text "'"; printIdentLike alias]] + | Ptyp_constr({txt = Longident.Ldot(Longident.Lident "Js", "t")}, [{ptyp_desc = Ptyp_object (_fields, _openFlag)} as typ]) -> + let bsObject = printTypExpr typ cmtTbl in + begin match typExpr.ptyp_attributes with + | [] -> bsObject + | attrs -> + Doc.concat [ + Doc.group ( + Doc.join ~sep:Doc.line (List.map printAttribute attrs) + ); + Doc.space; + printTypExpr typ cmtTbl; + ] + end + | Ptyp_constr(longidentLoc, [{ ptyp_desc = Parsetree.Ptyp_tuple tuple }]) -> + let constrName = printLidentPath longidentLoc cmtTbl in + Doc.group( + Doc.concat([ + constrName; + Doc.lessThan; + printTupleType ~inline:true tuple cmtTbl; + Doc.greaterThan; + ]) + ) + | Ptyp_constr(longidentLoc, constrArgs) -> + let constrName = printLidentPath longidentLoc cmtTbl in + begin match constrArgs with + | [] -> constrName + | [{ + Parsetree.ptyp_desc = + Ptyp_constr({txt = Longident.Ldot(Longident.Lident "Js", "t")}, + [{ptyp_desc = Ptyp_object (fields, openFlag)}]) + }] -> + Doc.concat([ + constrName; + Doc.lessThan; + printBsObjectSugar ~inline:true fields openFlag cmtTbl; + Doc.greaterThan; + ]) + | _args -> Doc.group( + Doc.concat([ + constrName; + Doc.lessThan; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map + (fun typexpr -> printTypExpr typexpr cmtTbl) + constrArgs + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ]) + ) + end + | Ptyp_arrow _ -> + let (attrsBefore, args, returnType) = ParsetreeViewer.arrowType typExpr in + let returnTypeNeedsParens = match returnType.ptyp_desc with + | Ptyp_alias _ -> true + | _ -> false + in + let returnDoc = + let doc = printTypExpr returnType cmtTbl in + if returnTypeNeedsParens then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else doc + in + let (isUncurried, attrs) = + ParsetreeViewer.processUncurriedAttribute attrsBefore + in + begin match args with + | [] -> Doc.nil + | [([], Nolabel, n)] when not isUncurried -> + let hasAttrsBefore = not (attrs = []) in + let attrs = if hasAttrsBefore then + Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrsBefore); + Doc.space; + ] + else Doc.nil + in + let typDoc = + let doc = printTypExpr n cmtTbl in + match n.ptyp_desc with + | Ptyp_arrow _ | Ptyp_tuple _ -> addParens doc + | _ -> doc + in + Doc.group ( + Doc.concat [ + Doc.group attrs; + Doc.group ( + if hasAttrsBefore then + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + typDoc; + Doc.text " => "; + returnDoc; + ] + ); + Doc.softLine; + Doc.rparen + ] + else + Doc.concat [ + typDoc; + Doc.text " => "; + returnDoc; + ] + ) + ] + ) + | args -> + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.space; + ] + in + let renderedArgs = Doc.concat [ + attrs; + Doc.text "("; + Doc.indent ( + Doc.concat [ + Doc.softLine; + if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun tp -> printTypeParameter tp cmtTbl) args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.text ")"; + ] in + Doc.group ( + Doc.concat [ + renderedArgs; + Doc.text " => "; + returnDoc; + ] + ) + end + | Ptyp_tuple types -> printTupleType ~inline:false types cmtTbl + | Ptyp_object (fields, openFlag) -> + printBsObjectSugar ~inline:false fields openFlag cmtTbl + | Ptyp_poly([], typ) -> + printTypExpr typ cmtTbl + | Ptyp_poly(stringLocs, typ) -> + Doc.concat [ + Doc.join ~sep:Doc.space (List.map (fun {Location.txt; loc} -> + let doc = Doc.concat [Doc.text "'"; Doc.text txt] in + printComments doc cmtTbl loc + ) stringLocs); + Doc.dot; + Doc.space; + printTypExpr typ cmtTbl + ] + | Ptyp_package packageType -> + printPackageType ~printModuleKeywordAndParens:true packageType cmtTbl + | Ptyp_class _ -> + Doc.text "classes are not supported in types" + | Ptyp_variant (rowFields, closedFlag, labelsOpt) -> + let printRowField = function + | Parsetree.Rtag ({txt}, attrs, true, []) -> + Doc.concat [ + printAttributes attrs; + Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true txt] + ] + | Rtag ({txt}, attrs, truth, types) -> + let doType t = match t.Parsetree.ptyp_desc with + | Ptyp_tuple _ -> printTypExpr t cmtTbl + | _ -> Doc.concat [ Doc.lparen; printTypExpr t cmtTbl; Doc.rparen ] + in + let printedTypes = List.map doType types in + let cases = Doc.join ~sep:(Doc.concat [Doc.line; Doc.text "& "]) printedTypes in + let cases = if truth then Doc.concat [Doc.line; Doc.text "& "; cases] else cases in + Doc.group (Doc.concat [ + printAttributes attrs; + Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true txt]; + cases + ]) + | Rinherit coreType -> + printTypExpr coreType cmtTbl + in + let docs = List.map printRowField rowFields in + let cases = Doc.join ~sep:(Doc.concat [Doc.line; Doc.text "| "]) docs in + let cases = if docs = [] then cases else Doc.concat [Doc.text "| "; cases] in + let openingSymbol = + if closedFlag = Open + then Doc.greaterThan + else if labelsOpt = None + then Doc.nil + else Doc.lessThan in + let hasLabels = labelsOpt <> None && labelsOpt <> Some [] in + let labels = match labelsOpt with + | None + | Some([]) -> + Doc.nil + | Some(labels) -> + Doc.concat (List.map (fun label -> Doc.concat [Doc.line; Doc.text "#" ; printIdentLike ~allowUident:true label] ) labels) + in + let closingSymbol = if hasLabels then Doc.text " >" else Doc.nil in + Doc.group (Doc.concat [Doc.lbracket; openingSymbol; Doc.line; cases; closingSymbol; labels; Doc.line; Doc.rbracket]) + in + let shouldPrintItsOwnAttributes = match typExpr.ptyp_desc with + | Ptyp_arrow _ (* es6 arrow types print their own attributes *) + | Ptyp_constr({txt = Longident.Ldot(Longident.Lident "Js", "t")}, _) -> true + | _ -> false + in + let doc = begin match typExpr.ptyp_attributes with + | _::_ as attrs when not shouldPrintItsOwnAttributes -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + renderedType; + ] + ) + | _ -> renderedType + end + in + printComments doc cmtTbl typExpr.ptyp_loc + + and printBsObjectSugar ~inline fields openFlag cmtTbl = + let doc = match fields with + | [] -> Doc.concat [ + Doc.lbrace; + (match openFlag with + | Asttypes.Closed -> Doc.dot + | Open -> Doc.dotdot); + Doc.rbrace + ] + | fields -> + Doc.concat [ + Doc.lbrace; + (match openFlag with + | Asttypes.Closed -> Doc.nil + | Open -> Doc.dotdot); + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun field -> printObjectField field cmtTbl) fields + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ] + in + if inline then doc else Doc.group doc + + and printTupleType ~inline (types: Parsetree.core_type list) cmtTbl = + let tuple = Doc.concat([ + Doc.lparen; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun typexpr -> printTypExpr typexpr cmtTbl) types + ) + ]) + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + in + if inline == false then Doc.group(tuple) else tuple + + and printObjectField (field : Parsetree.object_field) cmtTbl = + match field with + | Otag (labelLoc, attrs, typ) -> + let lbl = + let doc = Doc.text ("\"" ^ labelLoc.txt ^ "\"") in + printComments doc cmtTbl labelLoc.loc + in + let doc = Doc.concat [ + printAttributes ~loc:labelLoc.loc attrs; + lbl; + Doc.text ": "; + printTypExpr typ cmtTbl; + ] in + let cmtLoc = {labelLoc.loc with loc_end = typ.ptyp_loc.loc_end} in + printComments doc cmtTbl cmtLoc + | _ -> Doc.nil + + (* es6 arrow type arg + * type t = (~foo: string, ~bar: float=?, unit) => unit + * i.e. ~foo: string, ~bar: float *) + and printTypeParameter (attrs, lbl, typ) cmtTbl = + let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute attrs in + let uncurried = if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil in + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + let label = match lbl with + | Asttypes.Nolabel -> Doc.nil + | Labelled lbl -> Doc.concat [ + Doc.text "~"; + printIdentLike lbl; + Doc.text ": "; + ] + | Optional lbl -> Doc.concat [ + Doc.text "~"; + printIdentLike lbl; + Doc.text ": "; + ] + in + let optionalIndicator = match lbl with + | Asttypes.Nolabel + | Labelled _ -> Doc.nil + | Optional _lbl -> Doc.text "=?" + in + let doc = Doc.group ( + Doc.concat [ + uncurried; + attrs; + label; + printTypExpr typ cmtTbl; + optionalIndicator; + ] + ) in + printComments doc cmtTbl typ.ptyp_loc + + and printValueBinding ~recFlag vb cmtTbl i = + let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr vb.pvb_attributes in + let attrs = printAttributes ~loc:vb.pvb_pat.ppat_loc attrs in + let header = + if i == 0 then + Doc.concat [ + if hasGenType then Doc.text "export " else Doc.text "let "; + recFlag + ] else + Doc.concat [ + Doc.text "and "; + if hasGenType then Doc.text "export " else Doc.nil + ] + in + match vb with + | {pvb_pat = + {ppat_desc = Ppat_constraint (pattern, {ptyp_desc = Ptyp_poly _})}; + pvb_expr = + {pexp_desc = Pexp_newtype _} as expr + } -> + let (_attrs, parameters, returnExpr) = ParsetreeViewer.funExpr expr in + let abstractType = match parameters with + | [NewTypes {locs = vars}] -> + Doc.concat [ + Doc.text "type "; + Doc.join ~sep:Doc.space (List.map (fun var -> Doc.text var.Asttypes.txt) vars); + Doc.dot; + ] + | _ -> Doc.nil + in + begin match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) -> + Doc.group ( + Doc.concat [ + attrs; + header; + printPattern pattern cmtTbl; + Doc.text ":"; + Doc.indent ( + Doc.concat [ + Doc.line; + abstractType; + Doc.space; + printTypExpr typ cmtTbl; + Doc.text " ="; + Doc.concat [ + Doc.line; + printExpressionWithComments expr cmtTbl; + ] + ] + ) + ] + ) + | _ -> Doc.nil + end + | _ -> + let (optBraces, expr) = ParsetreeViewer.processBracesAttr vb.pvb_expr in + let printedExpr = + let doc = printExpressionWithComments vb.pvb_expr cmtTbl in + match Parens.expr vb.pvb_expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + if ParsetreeViewer.isPipeExpr vb.pvb_expr then + Doc.customLayout [ + Doc.group ( + Doc.concat [ + attrs; + header; + printPattern vb.pvb_pat cmtTbl; + Doc.text " ="; + Doc.space; + printedExpr; + ] + ); + Doc.group ( + Doc.concat [ + attrs; + header; + printPattern vb.pvb_pat cmtTbl; + Doc.text " ="; + Doc.indent ( + Doc.concat [ + Doc.line; + printedExpr; + ] + ) + ] + ); + ] + else + let shouldIndent = + match optBraces with + | Some _ -> false + | _ -> + ParsetreeViewer.isBinaryExpression expr || + (match vb.pvb_expr with + | { + pexp_attributes = [({Location.txt="ns.ternary"}, _)]; + pexp_desc = Pexp_ifthenelse (ifExpr, _, _) + } -> + ParsetreeViewer.isBinaryExpression ifExpr || ParsetreeViewer.hasAttributes ifExpr.pexp_attributes + | { pexp_desc = Pexp_newtype _} -> false + | e -> + ParsetreeViewer.hasAttributes e.pexp_attributes || + ParsetreeViewer.isArrayAccess e + ) + in + Doc.group ( + Doc.concat [ + attrs; + header; + printPattern vb.pvb_pat cmtTbl; + Doc.text " ="; + if shouldIndent then + Doc.indent ( + Doc.concat [ + Doc.line; + printedExpr; + ] + ) + else + Doc.concat [ + Doc.space; + printedExpr; + ] + ] + ) + + and printPackageType ~printModuleKeywordAndParens (packageType: Parsetree.package_type) cmtTbl = + let doc = match packageType with + | (longidentLoc, []) -> Doc.group( + Doc.concat [ + printLongidentLocation longidentLoc cmtTbl; + ] + ) + | (longidentLoc, packageConstraints) -> Doc.group( + Doc.concat [ + printLongidentLocation longidentLoc cmtTbl; + printPackageConstraints packageConstraints cmtTbl; + Doc.softLine; + ] + ) + in + if printModuleKeywordAndParens then + Doc.concat[ + Doc.text "module("; + doc; + Doc.rparen + ] + else + doc + + and printPackageConstraints packageConstraints cmtTbl = + Doc.concat [ + Doc.text " with"; + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.join ~sep:Doc.line ( + List.mapi (fun i pc -> + let (longident, typexpr) = pc in + let cmtLoc = {longident.Asttypes.loc with + loc_end = typexpr.Parsetree.ptyp_loc.loc_end + } in + let doc = printPackageConstraint i cmtTbl pc in + printComments doc cmtTbl cmtLoc + ) packageConstraints + ) + ] + ) + ] + + and printPackageConstraint i cmtTbl (longidentLoc, typ) = + let prefix = if i == 0 then Doc.text "type " else Doc.text "and type " in + Doc.concat [ + prefix; + printLongidentLocation longidentLoc cmtTbl; + Doc.text " = "; + printTypExpr typ cmtTbl; + ] + + and printExtensionWithComments ~atModuleLvl (stringLoc, payload) cmtTbl = + let extName = + let doc = Doc.concat [ + Doc.text "%"; + if atModuleLvl then Doc.text "%" else Doc.nil; + Doc.text stringLoc.Location.txt; + ] in + printComments doc cmtTbl stringLoc.Location.loc + in + match payload with + | Parsetree.PStr [{pstr_desc = Pstr_eval (expr, attrs)}] -> + let exprDoc = printExpressionWithComments expr cmtTbl in + let needsParens = match attrs with | [] -> false | _ -> true in + Doc.group ( + Doc.concat [ + extName; + addParens ( + Doc.concat [ + printAttributes attrs; + if needsParens then addParens exprDoc else exprDoc; + ] + ) + ] + ) + | _ -> extName + + and printPattern (p : Parsetree.pattern) cmtTbl = + let patternWithoutAttributes = match p.ppat_desc with + | Ppat_any -> Doc.text "_" + | Ppat_var var -> printIdentLike var.txt + | Ppat_constant c -> printConstant c + | Ppat_tuple patterns -> + Doc.group( + Doc.concat([ + Doc.lparen; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map (fun pat -> + printPattern pat cmtTbl) patterns) + ]) + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen + ]) + ) + | Ppat_array [] -> + Doc.concat [ + Doc.lbracket; + printCommentsInside cmtTbl p.ppat_loc; + Doc.rbracket; + ] + | Ppat_array patterns -> + Doc.group( + Doc.concat([ + Doc.text "["; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map (fun pat -> + printPattern pat cmtTbl) patterns) + ]) + ); + Doc.trailingComma; + Doc.softLine; + Doc.text "]"; + ]) + ) + | Ppat_construct({txt = Longident.Lident "()"}, _) -> + Doc.concat [ + Doc.lparen; + printCommentsInside cmtTbl p.ppat_loc; + Doc.rparen; + ] + | Ppat_construct({txt = Longident.Lident "[]"}, _) -> + Doc.concat [ + Doc.text "list["; + printCommentsInside cmtTbl p.ppat_loc; + Doc.rbracket; + ] + | Ppat_construct({txt = Longident.Lident "::"}, _) -> + let (patterns, tail) = ParsetreeViewer.collectPatternsFromListConstruct [] p in + let shouldHug = match (patterns, tail) with + | ([pat], + {ppat_desc = Ppat_construct({txt = Longident.Lident "[]"}, _)}) when ParsetreeViewer.isHuggablePattern pat -> true + | _ -> false + in + let children = Doc.concat([ + if shouldHug then Doc.nil else Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map (fun pat -> + printPattern pat cmtTbl) patterns); + begin match tail.Parsetree.ppat_desc with + | Ppat_construct({txt = Longident.Lident "[]"}, _) -> Doc.nil + | _ -> + let doc = Doc.concat [Doc.text "..."; printPattern tail cmtTbl] in + let tail = printComments doc cmtTbl tail.ppat_loc in + Doc.concat([Doc.text ","; Doc.line; tail]) + end; + ]) in + Doc.group( + Doc.concat([ + Doc.text "list["; + if shouldHug then children else Doc.concat [ + Doc.indent children; + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + ]; + Doc.rbracket; + ]) + ) + | Ppat_construct(constrName, constructorArgs) -> + let constrName = printLongident constrName.txt in + let argsDoc = match constructorArgs with + | None -> Doc.nil + | Some({ppat_loc; ppat_desc = Ppat_construct ({txt = Longident.Lident "()"}, _)}) -> + Doc.concat [ + Doc.lparen; + printCommentsInside cmtTbl ppat_loc; + Doc.rparen; + ] + | Some({ppat_desc = Ppat_tuple []; ppat_loc = loc}) -> + Doc.concat [ + Doc.lparen; + Doc.softLine; + printCommentsInside cmtTbl loc; + Doc.rparen; + ] + (* Some((1, 2) *) + | Some({ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as arg]}) -> + Doc.concat [ + Doc.lparen; + printPattern arg cmtTbl; + Doc.rparen; + ] + | Some({ppat_desc = Ppat_tuple patterns}) -> + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun pat -> printPattern pat cmtTbl) patterns + ); + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + | Some(arg) -> + let argDoc = printPattern arg cmtTbl in + let shouldHug = ParsetreeViewer.isHuggablePattern arg in + Doc.concat [ + Doc.lparen; + if shouldHug then argDoc + else Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + argDoc; + ] + ); + Doc.trailingComma; + Doc.softLine; + ]; + Doc.rparen; + + ] + in + Doc.group(Doc.concat [constrName; argsDoc]) + | Ppat_variant (label, None) -> + Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true label] + | Ppat_variant (label, variantArgs) -> + let variantName = + Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true label] in + let argsDoc = match variantArgs with + | None -> Doc.nil + | Some({ppat_desc = Ppat_construct ({txt = Longident.Lident "()"}, _)}) -> + Doc.text "()" + | Some({ppat_desc = Ppat_tuple []; ppat_loc = loc}) -> + Doc.concat [ + Doc.lparen; + Doc.softLine; + printCommentsInside cmtTbl loc; + Doc.rparen; + ] + (* Some((1, 2) *) + | Some({ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as arg]}) -> + Doc.concat [ + Doc.lparen; + printPattern arg cmtTbl; + Doc.rparen; + ] + | Some({ppat_desc = Ppat_tuple patterns}) -> + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun pat -> printPattern pat cmtTbl) patterns + ); + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + | Some(arg) -> + let argDoc = printPattern arg cmtTbl in + let shouldHug = ParsetreeViewer.isHuggablePattern arg in + Doc.concat [ + Doc.lparen; + if shouldHug then argDoc + else Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + argDoc; + ] + ); + Doc.trailingComma; + Doc.softLine; + ]; + Doc.rparen; + + ] + in + Doc.group(Doc.concat [variantName; argsDoc]) + | Ppat_type ident -> + Doc.concat [Doc.text "##"; printIdentPath ident cmtTbl] + | Ppat_record(rows, openFlag) -> + Doc.group( + Doc.concat([ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map (fun row -> printPatternRecordRow row cmtTbl) rows); + begin match openFlag with + | Open -> Doc.concat [Doc.text ","; Doc.line; Doc.text "_"] + | Closed -> Doc.nil + end; + ] + ); + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + Doc.rbrace; + ]) + ) + + | Ppat_exception p -> + let needsParens = match p.ppat_desc with + | Ppat_or (_, _) | Ppat_alias (_, _) -> true + | _ -> false + in + let pat = + let p = printPattern p cmtTbl in + if needsParens then + Doc.concat [Doc.text "("; p; Doc.text ")"] + else + p + in + Doc.group ( + Doc.concat [Doc.text "exception"; Doc.line; pat] + ) + | Ppat_or _ -> + (* Blue | Red | Green -> [Blue; Red; Green] *) + let orChain = ParsetreeViewer.collectOrPatternChain p in + let docs = List.mapi (fun i pat -> + let patternDoc = printPattern pat cmtTbl in + Doc.concat [ + if i == 0 then Doc.nil else Doc.concat [Doc.line; Doc.text "| "]; + match pat.ppat_desc with + (* (Blue | Red) | (Green | Black) | White *) + | Ppat_or _ -> addParens patternDoc + | _ -> patternDoc + ] + ) orChain in + Doc.group (Doc.concat docs) + | Ppat_extension ext -> + printExtensionWithComments ~atModuleLvl:false ext cmtTbl + | Ppat_lazy p -> + let needsParens = match p.ppat_desc with + | Ppat_or (_, _) | Ppat_alias (_, _) -> true + | _ -> false + in + let pat = + let p = printPattern p cmtTbl in + if needsParens then + Doc.concat [Doc.text "("; p; Doc.text ")"] + else + p + in + Doc.concat [Doc.text "lazy "; pat] + | Ppat_alias (p, aliasLoc) -> + let needsParens = match p.ppat_desc with + | Ppat_or (_, _) | Ppat_alias (_, _) -> true + | _ -> false + in + let renderedPattern = + let p = printPattern p cmtTbl in + if needsParens then + Doc.concat [Doc.text "("; p; Doc.text ")"] + else + p + in + Doc.concat([ + renderedPattern; + Doc.text " as "; + printStringLoc aliasLoc cmtTbl; + ]) + + (* Note: module(P : S) is represented as *) + (* Ppat_constraint(Ppat_unpack, Ptyp_package) *) + | Ppat_constraint ({ppat_desc = Ppat_unpack stringLoc}, {ptyp_desc = Ptyp_package packageType; ptyp_loc}) -> + Doc.concat [ + Doc.text "module("; + printComments (Doc.text stringLoc.txt) cmtTbl stringLoc.loc; + Doc.text ": "; + printComments + (printPackageType ~printModuleKeywordAndParens:false packageType cmtTbl) + cmtTbl + ptyp_loc; + Doc.rparen; + ] + | Ppat_constraint (pattern, typ) -> + Doc.concat [ + printPattern pattern cmtTbl; + Doc.text ": "; + printTypExpr typ cmtTbl; + ] + + (* Note: module(P : S) is represented as *) + (* Ppat_constraint(Ppat_unpack, Ptyp_package) *) + | Ppat_unpack stringLoc -> + Doc.concat [ + Doc.text "module("; + printComments (Doc.text stringLoc.txt) cmtTbl stringLoc.loc; + Doc.rparen; + ] + | Ppat_interval (a, b) -> + Doc.concat [ + printConstant a; + Doc.text " .. "; + printConstant b; + ] + | Ppat_open _ -> Doc.nil + in + let doc = match p.ppat_attributes with + | [] -> patternWithoutAttributes + | attrs -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + patternWithoutAttributes; + ] + ) + in + printComments doc cmtTbl p.ppat_loc + + and printPatternRecordRow row cmtTbl = + match row with + (* punned {x}*) + | ({Location.txt=Longident.Lident ident} as longident, + {Parsetree.ppat_desc=Ppat_var {txt;_}}) when ident = txt -> + printLidentPath longident cmtTbl + | (longident, pattern) -> + let locForComments = { + longident.loc with + loc_end = pattern.Parsetree.ppat_loc.loc_end + } in + let doc = Doc.group ( + Doc.concat([ + printLidentPath longident cmtTbl; + Doc.text ": "; + Doc.indent( + Doc.concat [ + Doc.softLine; + printPattern pattern cmtTbl; + ] + ) + ]) + ) in + printComments doc cmtTbl locForComments + + and printExpressionWithComments expr cmtTbl = + let doc = printExpression expr cmtTbl in + printComments doc cmtTbl expr.Parsetree.pexp_loc + + and printExpression (e : Parsetree.expression) cmtTbl = + let printedExpression = match e.pexp_desc with + | Parsetree.Pexp_constant c -> printConstant c + | Pexp_construct _ when ParsetreeViewer.hasJsxAttribute e.pexp_attributes -> + printJsxFragment e cmtTbl + | Pexp_construct ({txt = Longident.Lident "()"}, _) -> Doc.text "()" + | Pexp_construct ({txt = Longident.Lident "[]"}, _) -> + Doc.concat [ + Doc.text "list["; + printCommentsInside cmtTbl e.pexp_loc; + Doc.rbracket; + ] + | Pexp_construct ({txt = Longident.Lident "::"}, _) -> + let (expressions, spread) = ParsetreeViewer.collectListExpressions e in + let spreadDoc = match spread with + | Some(expr) -> Doc.concat [ + Doc.text ","; + Doc.line; + Doc.dotdotdot; + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + ] + | None -> Doc.nil + in + Doc.group( + Doc.concat([ + Doc.text "list["; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun expr -> + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + ) + expressions); + spreadDoc; + ]) + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbracket; + ]) + ) + | Pexp_construct (longidentLoc, args) -> + let constr = printLongidentLocation longidentLoc cmtTbl in + let args = match args with + | None -> Doc.nil + | Some({pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)}) -> + Doc.text "()" + (* Some((1, 2)) *) + | Some({pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _} as arg]}) -> + Doc.concat [ + Doc.lparen; + (let doc = printExpressionWithComments arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc); + Doc.rparen; + ] + | Some({pexp_desc = Pexp_tuple args }) -> + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map + (fun expr -> + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + | Some(arg) -> + let argDoc = + let doc = printExpressionWithComments arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc + in + let shouldHug = ParsetreeViewer.isHuggableExpression arg in + Doc.concat [ + Doc.lparen; + if shouldHug then argDoc + else Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + argDoc; + ] + ); + Doc.trailingComma; + Doc.softLine; + ]; + Doc.rparen; + ] + in + Doc.group(Doc.concat [constr; args]) + | Pexp_ident path -> + printLidentPath path cmtTbl + | Pexp_tuple exprs -> + Doc.group( + Doc.concat([ + Doc.lparen; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map (fun expr -> + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + exprs) + ]) + ); + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + Doc.rparen; + ]) + ) + | Pexp_array [] -> + Doc.concat [ + Doc.lbracket; + printCommentsInside cmtTbl e.pexp_loc; + Doc.rbracket; + ] + | Pexp_array exprs -> + Doc.group( + Doc.concat([ + Doc.lbracket; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map (fun expr -> + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + ) exprs) + ]) + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbracket; + ]) + ) + | Pexp_variant (label, args) -> + let variantName = + Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true label] in + let args = match args with + | None -> Doc.nil + | Some({pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)}) -> + Doc.text "()" + (* #poly((1, 2) *) + | Some({pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _} as arg]}) -> + Doc.concat [ + Doc.lparen; + (let doc = printExpressionWithComments arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc); + Doc.rparen; + ] + | Some({pexp_desc = Pexp_tuple args }) -> + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map + (fun expr -> + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + | Some(arg) -> + let argDoc = + let doc = printExpressionWithComments arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc + in + let shouldHug = ParsetreeViewer.isHuggableExpression arg in + Doc.concat [ + Doc.lparen; + if shouldHug then argDoc + else Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + argDoc; + ] + ); + Doc.trailingComma; + Doc.softLine; + ]; + Doc.rparen; + ] + in + Doc.group(Doc.concat [variantName; args]) + | Pexp_record (rows, spreadExpr) -> + let spread = match spreadExpr with + | None -> Doc.nil + | Some expr -> Doc.concat [ + Doc.dotdotdot; + (let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc); + Doc.comma; + Doc.line; + ] + in + (* If the record is written over multiple lines, break automatically + * `let x = {a: 1, b: 3}` -> same line, break when line-width exceeded + * `let x = { + * a: 1, + * b: 2, + * }` -> record is written on multiple lines, break the group *) + let forceBreak = + e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak ( + Doc.concat([ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + spread; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map (fun row -> printRecordRow row cmtTbl) rows) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + ) + | Pexp_extension extension -> + begin match extension with + | ( + {txt = "bs.obj"}, + PStr [{ + pstr_loc = loc; + pstr_desc = Pstr_eval({pexp_desc = Pexp_record (rows, _)}, []) + }] + ) -> + (* If the object is written over multiple lines, break automatically + * `let x = {"a": 1, "b": 3}` -> same line, break when line-width exceeded + * `let x = { + * "a": 1, + * "b": 2, + * }` -> object is written on multiple lines, break the group *) + let forceBreak = + loc.loc_start.pos_lnum < loc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak ( + Doc.concat([ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map (fun row -> printBsObjectRow row cmtTbl) rows) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + ) + | extension -> + printExtensionWithComments ~atModuleLvl:false extension cmtTbl + end + | Pexp_apply _ -> + if ParsetreeViewer.isUnaryExpression e then + printUnaryExpression e cmtTbl + else if ParsetreeViewer.isTemplateLiteral e then + printTemplateLiteral e cmtTbl + else if ParsetreeViewer.isBinaryExpression e then + printBinaryExpression e cmtTbl + else + printPexpApply e cmtTbl + | Pexp_unreachable -> Doc.dot + | Pexp_field (expr, longidentLoc) -> + let lhs = + let doc = printExpressionWithComments expr cmtTbl in + match Parens.fieldExpr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [ + lhs; + Doc.dot; + printLidentPath longidentLoc cmtTbl; + ] + | Pexp_setfield (expr1, longidentLoc, expr2) -> + printSetFieldExpr e.pexp_attributes expr1 longidentLoc expr2 e.pexp_loc cmtTbl + | Pexp_ifthenelse (_ifExpr, _thenExpr, _elseExpr) -> + if ParsetreeViewer.isTernaryExpr e then + let (parts, alternate) = ParsetreeViewer.collectTernaryParts e in + let ternaryDoc = match parts with + | (condition1, consequent1)::rest -> + Doc.group (Doc.concat [ + printTernaryOperand condition1 cmtTbl; + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.indent ( + Doc.concat [ + Doc.text "? "; + printTernaryOperand consequent1 cmtTbl + ] + ); + Doc.concat ( + List.map (fun (condition, consequent) -> + Doc.concat [ + Doc.line; + Doc.text ": "; + printTernaryOperand condition cmtTbl; + Doc.line; + Doc.text "? "; + printTernaryOperand consequent cmtTbl; + ] + ) rest + ); + Doc.line; + Doc.text ": "; + Doc.indent (printTernaryOperand alternate cmtTbl); + ] + ) + ]) + | _ -> Doc.nil + in + let attrs = ParsetreeViewer.filterTernaryAttributes e.pexp_attributes in + let needsParens = match ParsetreeViewer.filterParsingAttrs attrs with + | [] -> false | _ -> true + in + Doc.concat [ + printAttributes attrs; + if needsParens then addParens ternaryDoc else ternaryDoc; + ] + else + let (ifs, elseExpr) = ParsetreeViewer.collectIfExpressions e in + let ifDocs = Doc.join ~sep:Doc.space ( + List.mapi (fun i (ifExpr, thenExpr) -> + let ifTxt = if i > 0 then Doc.text "else if " else Doc.text "if " in + let condition = + if ParsetreeViewer.isBlockExpr ifExpr then + printExpressionBlock ~braces:true ifExpr cmtTbl + else + let doc = printExpressionWithComments ifExpr cmtTbl in + match Parens.expr ifExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc ifExpr braces + | Nothing -> Doc.ifBreaks (addParens doc) doc + in + Doc.concat [ + ifTxt; + Doc.group (condition); + Doc.space; + let thenExpr = match ParsetreeViewer.processBracesAttr thenExpr with + (* This case only happens when coming from Reason, we strip braces *) + | (Some _, expr) -> expr + | _ -> thenExpr + in + printExpressionBlock ~braces:true thenExpr cmtTbl; + ] + ) ifs + ) in + let elseDoc = match elseExpr with + | None -> Doc.nil + | Some expr -> Doc.concat [ + Doc.text " else "; + printExpressionBlock ~braces:true expr cmtTbl; + ] + in + Doc.concat [ + printAttributes e.pexp_attributes; + ifDocs; + elseDoc; + ] + | Pexp_while (expr1, expr2) -> + let condition = + let doc = printExpressionWithComments expr1 cmtTbl in + match Parens.expr expr1 with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr1 braces + | Nothing -> doc + in + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.text "while "; + if ParsetreeViewer.isBlockExpr expr1 then + condition + else + Doc.group ( + Doc.ifBreaks (addParens condition) condition + ); + Doc.space; + printExpressionBlock ~braces:true expr2 cmtTbl; + ] + ) + | Pexp_for (pattern, fromExpr, toExpr, directionFlag, body) -> + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.text "for "; + printPattern pattern cmtTbl; + Doc.text " in "; + (let doc = printExpressionWithComments fromExpr cmtTbl in + match Parens.expr fromExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc fromExpr braces + | Nothing -> doc); + printDirectionFlag directionFlag; + (let doc = printExpressionWithComments toExpr cmtTbl in + match Parens.expr toExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc toExpr braces + | Nothing -> doc); + Doc.space; + printExpressionBlock ~braces:true body cmtTbl; + ] + ) + | Pexp_constraint( + {pexp_desc = Pexp_pack modExpr}, + {ptyp_desc = Ptyp_package packageType; ptyp_loc} + ) -> + Doc.group ( + Doc.concat [ + Doc.text "module("; + Doc.indent ( + Doc.concat [ + Doc.softLine; + printModExpr modExpr cmtTbl; + Doc.text ": "; + printComments + (printPackageType ~printModuleKeywordAndParens:false packageType cmtTbl) + cmtTbl + ptyp_loc + ] + ); + Doc.softLine; + Doc.rparen; + ] + ) + + | Pexp_constraint (expr, typ) -> + let exprDoc = + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [ + exprDoc; + Doc.text ": "; + printTypExpr typ cmtTbl; + ] + | Pexp_letmodule ({txt = _modName}, _modExpr, _expr) -> + printExpressionBlock ~braces:true e cmtTbl + | Pexp_letexception (_extensionConstructor, _expr) -> + printExpressionBlock ~braces:true e cmtTbl + | Pexp_assert expr -> + let rhs = + let doc = printExpressionWithComments expr cmtTbl in + match Parens.lazyOrAssertExprRhs expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [ + Doc.text "assert "; + rhs; + ] + | Pexp_lazy expr -> + let rhs = + let doc = printExpressionWithComments expr cmtTbl in + match Parens.lazyOrAssertExprRhs expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.group ( + Doc.concat [ + Doc.text "lazy "; + rhs; + ] + ) + | Pexp_open (_overrideFlag, _longidentLoc, _expr) -> + printExpressionBlock ~braces:true e cmtTbl + | Pexp_pack (modExpr) -> + Doc.group (Doc.concat [ + Doc.text "module("; + Doc.indent ( + Doc.concat [ + Doc.softLine; + printModExpr modExpr cmtTbl; + ] + ); + Doc.softLine; + Doc.rparen; + ]) + | Pexp_sequence _ -> + printExpressionBlock ~braces:true e cmtTbl + | Pexp_let _ -> + printExpressionBlock ~braces:true e cmtTbl + | Pexp_fun (Nolabel, None, {ppat_desc = Ppat_var {txt="__x"}}, ({pexp_desc = Pexp_apply _})) -> + + (* (__x) => f(a, __x, c) -----> f(a, _, c) *) + printExpressionWithComments (ParsetreeViewer.rewriteUnderscoreApply e) cmtTbl + | Pexp_fun _ | Pexp_newtype _ -> + let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr e in + let (uncurried, attrs) = + ParsetreeViewer.processUncurriedAttribute attrsOnArrow + in + let (returnExpr, typConstraint) = match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) -> ( + {expr with pexp_attributes = List.concat [ + expr.pexp_attributes; + returnExpr.pexp_attributes; + ]}, + Some typ + ) + | _ -> (returnExpr, None) + in + let hasConstraint = match typConstraint with | Some _ -> true | None -> false in + let parametersDoc = printExprFunParameters + ~inCallback:false + ~uncurried + ~hasConstraint + parameters + cmtTbl + in + let returnExprDoc = + let (optBraces, _) = ParsetreeViewer.processBracesAttr returnExpr in + let shouldInline = match (returnExpr.pexp_desc, optBraces) with + | (_, Some _ ) -> true + | ((Pexp_array _ + | Pexp_tuple _ + | Pexp_construct (_, Some _) + | Pexp_record _), _) -> true + | _ -> false + in + let shouldIndent = match returnExpr.pexp_desc with + | Pexp_sequence _ + | Pexp_let _ + | Pexp_letmodule _ + | Pexp_letexception _ + | Pexp_open _ -> false + | _ -> true + in + let returnDoc = + let doc = printExpressionWithComments returnExpr cmtTbl in + match Parens.expr returnExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc returnExpr braces + | Nothing -> doc + in + if shouldInline then Doc.concat [ + Doc.space; + returnDoc; + ] else + Doc.group ( + if shouldIndent then + Doc.indent ( + Doc.concat [ + Doc.line; + returnDoc; + ] + ) + else + Doc.concat [ + Doc.space; + returnDoc + ] + ) + in + let typConstraintDoc = match typConstraint with + | Some(typ) -> Doc.concat [Doc.text ": "; printTypExpr typ cmtTbl] + | _ -> Doc.nil + in + let attrs = printAttributes attrs in + Doc.group ( + Doc.concat [ + attrs; + parametersDoc; + typConstraintDoc; + Doc.text " =>"; + returnExprDoc; + ] + ) + | Pexp_try (expr, cases) -> + let exprDoc = + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [ + Doc.text "try "; + exprDoc; + Doc.text " catch "; + printCases cases cmtTbl; + ] + | Pexp_match (expr, cases) -> + let exprDoc = + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [ + Doc.text "switch "; + exprDoc; + Doc.space; + printCases cases cmtTbl; + ] + | Pexp_function cases -> + Doc.concat [ + Doc.text "x => switch x "; + printCases cases cmtTbl; + ] + | Pexp_coerce (expr, typOpt, typ) -> + let docExpr = printExpressionWithComments expr cmtTbl in + let docTyp = printTypExpr typ cmtTbl in + let ofType = match typOpt with + | None -> Doc.nil + | Some(typ1) -> + Doc.concat [Doc.text ": "; printTypExpr typ1 cmtTbl] + in + Doc.concat [Doc.lparen; docExpr; ofType; Doc.text " :> "; docTyp; Doc.rparen] + | Pexp_send _ -> + Doc.text "Pexp_send not impemented in printer" + | Pexp_new _ -> + Doc.text "Pexp_new not impemented in printer" + | Pexp_setinstvar _ -> + Doc.text "Pexp_setinstvar not impemented in printer" + | Pexp_override _ -> + Doc.text "Pexp_override not impemented in printer" + | Pexp_poly _ -> + Doc.text "Pexp_poly not impemented in printer" + | Pexp_object _ -> + Doc.text "Pexp_object not impemented in printer" + in + let shouldPrintItsOwnAttributes = match e.pexp_desc with + | Pexp_apply _ + | Pexp_fun _ + | Pexp_newtype _ + | Pexp_setfield _ + | Pexp_ifthenelse _ -> true + | Pexp_construct _ when ParsetreeViewer.hasJsxAttribute e.pexp_attributes -> true + | _ -> false + in + match e.pexp_attributes with + | [] -> printedExpression + | attrs when not shouldPrintItsOwnAttributes -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + printedExpression; + ] + ) + | _ -> printedExpression + + and printPexpFun ~inCallback e cmtTbl = + let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr e in + let (uncurried, attrs) = + ParsetreeViewer.processUncurriedAttribute attrsOnArrow + in + let (returnExpr, typConstraint) = match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) -> ( + {expr with pexp_attributes = List.concat [ + expr.pexp_attributes; + returnExpr.pexp_attributes; + ]}, + Some typ + ) + | _ -> (returnExpr, None) + in + let parametersDoc = printExprFunParameters + ~inCallback + ~uncurried + ~hasConstraint:(match typConstraint with | Some _ -> true | None -> false) + parameters cmtTbl in + let returnShouldIndent = match returnExpr.pexp_desc with + | Pexp_sequence _ | Pexp_let _ | Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ -> false + | _ -> true + in + let returnExprDoc = + let (optBraces, _) = ParsetreeViewer.processBracesAttr returnExpr in + let shouldInline = match (returnExpr.pexp_desc, optBraces) with + | (_, Some _) -> true + | ((Pexp_array _ + | Pexp_tuple _ + | Pexp_construct (_, Some _) + | Pexp_record _), _) -> true + | _ -> false + in + let returnDoc = + let doc = printExpressionWithComments returnExpr cmtTbl in + match Parens.expr returnExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc returnExpr braces + | Nothing -> doc + in + if shouldInline then Doc.concat [ + Doc.space; + returnDoc; + ] else + Doc.group ( + if returnShouldIndent then + Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.line; + returnDoc; + ] + ); + if inCallback then Doc.softLine else Doc.nil; + ] + else + Doc.concat [ + Doc.space; + returnDoc; + ] + ) + in + let typConstraintDoc = match typConstraint with + | Some(typ) -> Doc.concat [ + Doc.text ": "; + printTypExpr typ cmtTbl + ] + | _ -> Doc.nil + in + Doc.group ( + Doc.concat [ + printAttributes attrs; + parametersDoc; + typConstraintDoc; + Doc.text " =>"; + returnExprDoc; + ] + ) + + and printTernaryOperand expr cmtTbl = + let doc = printExpressionWithComments expr cmtTbl in + match Parens.ternaryOperand expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + + and printSetFieldExpr attrs lhs longidentLoc rhs loc cmtTbl = + let rhsDoc = + let doc = printExpressionWithComments rhs cmtTbl in + match Parens.setFieldExprRhs rhs with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc rhs braces + | Nothing -> doc + in + let lhsDoc = + let doc = printExpressionWithComments lhs cmtTbl in + match Parens.fieldExpr lhs with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc lhs braces + | Nothing -> doc + in + let shouldIndent = ParsetreeViewer.isBinaryExpression rhs in + let doc = Doc.group (Doc.concat [ + lhsDoc; + Doc.dot; + printLidentPath longidentLoc cmtTbl; + Doc.text " ="; + if shouldIndent then Doc.group ( + Doc.indent ( + (Doc.concat [Doc.line; rhsDoc]) + ) + ) else + Doc.concat [Doc.space; rhsDoc] + ]) in + let doc = match attrs with + | [] -> doc + | attrs -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + doc + ] + ) + in + printComments doc cmtTbl loc + + and printTemplateLiteral expr cmtTbl = + let tag = ref "j" in + let rec walkExpr expr = + let open Parsetree in + match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "^"}}, + [Nolabel, arg1; Nolabel, arg2] + ) -> + let lhs = walkExpr arg1 in + let rhs = walkExpr arg2 in + Doc.concat [lhs; rhs] + | Pexp_constant (Pconst_string (txt, Some prefix)) -> + tag := prefix; + Doc.text txt + | _ -> + let doc = printExpressionWithComments expr cmtTbl in + Doc.concat [Doc.text "${"; doc; Doc.rbrace] + in + let content = walkExpr expr in + Doc.concat [ + if !tag = "j" then Doc.nil else Doc.text !tag; + Doc.text "`"; + content; + Doc.text "`" + ] + + and printUnaryExpression expr cmtTbl = + let printUnaryOperator op = Doc.text ( + match op with + | "~+" -> "+" + | "~+." -> "+." + | "~-" -> "-" + | "~-." -> "-." + | "not" -> "!" + | _ -> assert false + ) in + match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [Nolabel, operand] + ) -> + let printedOperand = + let doc = printExpressionWithComments operand cmtTbl in + match Parens.unaryExprOperand operand with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc operand braces + | Nothing -> doc + in + let doc = Doc.concat [ + printUnaryOperator operator; + printedOperand; + ] in + printComments doc cmtTbl expr.pexp_loc + | _ -> assert false + + and printBinaryExpression (expr : Parsetree.expression) cmtTbl = + let printBinaryOperator ~inlineRhs operator = + let operatorTxt = match operator with + | "|." -> "->" + | "^" -> "++" + | "=" -> "==" + | "==" -> "===" + | "<>" -> "!=" + | "!=" -> "!==" + | txt -> txt + in + let spacingBeforeOperator = + if operator = "|." then Doc.softLine + else if operator = "|>" then Doc.line + else Doc.space; + in + let spacingAfterOperator = + if operator = "|." then Doc.nil + else if operator = "|>" then Doc.space + else if inlineRhs then Doc.space else Doc.line + in + Doc.concat [ + spacingBeforeOperator; + Doc.text operatorTxt; + spacingAfterOperator; + ] + in + let printOperand ~isLhs expr parentOperator = + let rec flatten ~isLhs expr parentOperator = + if ParsetreeViewer.isBinaryExpression expr then + begin match expr with + | {pexp_desc = Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [_, left; _, right] + )} -> + if ParsetreeViewer.flattenableOperators parentOperator operator && + not (ParsetreeViewer.hasAttributes expr.pexp_attributes) + then + let leftPrinted = flatten ~isLhs:true left operator in + let rightPrinted = + let (_, rightAttrs) = + ParsetreeViewer.partitionPrinteableAttributes right.pexp_attributes + in + let doc = + printExpressionWithComments + {right with pexp_attributes = rightAttrs} + cmtTbl + in + let doc = if Parens.flattenOperandRhs parentOperator right then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else + doc + in + let printeableAttrs = + ParsetreeViewer.filterPrinteableAttributes right.pexp_attributes + in + Doc.concat [printAttributes printeableAttrs; doc] + in + let doc = Doc.concat [ + leftPrinted; + printBinaryOperator ~inlineRhs:false operator; + rightPrinted; + ] in + let doc = + if not isLhs && (Parens.rhsBinaryExprOperand operator expr) then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else doc + in + printComments doc cmtTbl expr.pexp_loc + else ( + let doc = printExpressionWithComments {expr with pexp_attributes = []} cmtTbl in + let doc = if Parens.subBinaryExprOperand parentOperator operator || + (expr.pexp_attributes <> [] && + (ParsetreeViewer.isBinaryExpression expr || + ParsetreeViewer.isTernaryExpr expr)) + then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else doc + in Doc.concat [ + printAttributes expr.pexp_attributes; + doc + ] + ) + | _ -> assert false + end + else + begin match expr.pexp_desc with + | Pexp_setfield (lhs, field, rhs) -> + let doc = printSetFieldExpr expr.pexp_attributes lhs field rhs expr.pexp_loc cmtTbl in + if isLhs then addParens doc else doc + | Pexp_apply( + {pexp_desc = Pexp_ident {txt = Longident.Lident "#="}}, + [(Nolabel, lhs); (Nolabel, rhs)] + ) -> + let rhsDoc = printExpressionWithComments rhs cmtTbl in + let lhsDoc = printExpressionWithComments lhs cmtTbl in + (* TODO: unify indentation of "=" *) + let shouldIndent = ParsetreeViewer.isBinaryExpression rhs in + let doc = Doc.group ( + Doc.concat [ + lhsDoc; + Doc.text " ="; + if shouldIndent then Doc.group ( + Doc.indent (Doc.concat [Doc.line; rhsDoc]) + ) else + Doc.concat [Doc.space; rhsDoc] + ] + ) in + let doc = match expr.pexp_attributes with + | [] -> doc + | attrs -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + doc + ] + ) + in + if isLhs then addParens doc else doc + | _ -> + let doc = printExpressionWithComments expr cmtTbl in + begin match Parens.binaryExprOperand ~isLhs expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + end + end + in + flatten ~isLhs expr parentOperator + in + match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident (("|." | "|>") as op)}}, + [Nolabel, lhs; Nolabel, rhs] + ) when not ( + ParsetreeViewer.isBinaryExpression lhs || + ParsetreeViewer.isBinaryExpression rhs + ) -> + let lhsDoc = printOperand ~isLhs:true lhs op in + let rhsDoc = printOperand ~isLhs:false rhs op in + Doc.group ( + Doc.concat [ + lhsDoc; + (match op with + | "|." -> Doc.text "->" + | "|>" -> Doc.text " |> " + | _ -> assert false); + rhsDoc; + ] + ) + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [Nolabel, lhs; Nolabel, rhs] + ) -> + let right = + let operatorWithRhs = + let rhsDoc = printOperand ~isLhs:false rhs operator in + Doc.concat [ + printBinaryOperator + ~inlineRhs:(ParsetreeViewer.shouldInlineRhsBinaryExpr rhs) operator; + rhsDoc; + ] in + if ParsetreeViewer.shouldIndentBinaryExpr expr then + Doc.group (Doc.indent operatorWithRhs) + else operatorWithRhs + in + let doc = Doc.group ( + Doc.concat [ + printOperand ~isLhs:true lhs operator; + right + ] + ) in + Doc.group ( + Doc.concat [ + printAttributes expr.pexp_attributes; + match Parens.binaryExpr {expr with + pexp_attributes = List.filter (fun attr -> + match attr with + | ({Location.txt = ("ns.braces")}, _) -> false + | _ -> true + ) expr.pexp_attributes + } with + | Braced(bracesLoc) -> printBraces doc expr bracesLoc + | Parenthesized -> addParens doc + | Nothing -> doc; + ] + ) + | _ -> Doc.nil + + (* callExpr(arg1, arg2) *) + and printPexpApply expr cmtTbl = + match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "##"}}, + [Nolabel, parentExpr; Nolabel, memberExpr] + ) -> + let parentDoc = + let doc = printExpressionWithComments parentExpr cmtTbl in + match Parens.unaryExprOperand parentExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc parentExpr braces + | Nothing -> doc + in + let member = + let memberDoc = match memberExpr.pexp_desc with + | Pexp_ident lident -> + printComments (printLongident lident.txt) cmtTbl memberExpr.pexp_loc + | _ -> printExpressionWithComments memberExpr cmtTbl + in + Doc.concat [Doc.text "\""; memberDoc; Doc.text "\""] + in + Doc.group (Doc.concat [ + printAttributes expr.pexp_attributes; + parentDoc; + Doc.lbracket; + member; + Doc.rbracket; + ]) + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "#="}}, + [Nolabel, lhs; Nolabel, rhs] + ) -> + let rhsDoc = + let doc = printExpressionWithComments rhs cmtTbl in + match Parens.expr rhs with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc rhs braces + | Nothing -> doc + in + (* TODO: unify indentation of "=" *) + let shouldIndent = not (ParsetreeViewer.isBracedExpr rhs) && ParsetreeViewer.isBinaryExpression rhs in + let doc = Doc.group( + Doc.concat [ + printExpressionWithComments lhs cmtTbl; + Doc.text " ="; + if shouldIndent then Doc.group ( + Doc.indent ( + (Doc.concat [Doc.line; rhsDoc]) + ) + ) else + Doc.concat [Doc.space; rhsDoc] + ] + ) in + begin match expr.pexp_attributes with + | [] -> doc + | attrs -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + doc + ] + ) + end + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "get")}}, + [Nolabel, parentExpr; Nolabel, memberExpr] + ) -> + let member = + let memberDoc = + let doc = printExpressionWithComments memberExpr cmtTbl in + match Parens.expr memberExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc memberExpr braces + | Nothing -> doc + in + let shouldInline = match memberExpr.pexp_desc with + | Pexp_constant _ | Pexp_ident _ -> true + | _ -> false + in + if shouldInline then memberDoc else ( + Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + memberDoc; + ] + ); + Doc.softLine + ] + ) + in + let parentDoc = + let doc = printExpressionWithComments parentExpr cmtTbl in + match Parens.unaryExprOperand parentExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc parentExpr braces + | Nothing -> doc + in + Doc.group (Doc.concat [ + printAttributes expr.pexp_attributes; + parentDoc; + Doc.lbracket; + member; + Doc.rbracket; + ]) + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "set")}}, + [Nolabel, parentExpr; Nolabel, memberExpr; Nolabel, targetExpr] + ) -> + let member = + let memberDoc = + let doc = printExpressionWithComments memberExpr cmtTbl in + match Parens.expr memberExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc memberExpr braces + | Nothing -> doc + in + let shouldInline = match memberExpr.pexp_desc with + | Pexp_constant _ | Pexp_ident _ -> true + | _ -> false + in + if shouldInline then memberDoc else ( + Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + memberDoc; + ] + ); + Doc.softLine + ] + ) + in + let shouldIndentTargetExpr = + if ParsetreeViewer.isBracedExpr targetExpr then + false + else + ParsetreeViewer.isBinaryExpression targetExpr || + (match targetExpr with + | { + pexp_attributes = [({Location.txt="ns.ternary"}, _)]; + pexp_desc = Pexp_ifthenelse (ifExpr, _, _) + } -> + ParsetreeViewer.isBinaryExpression ifExpr || ParsetreeViewer.hasAttributes ifExpr.pexp_attributes + | { pexp_desc = Pexp_newtype _} -> false + | e -> + ParsetreeViewer.hasAttributes e.pexp_attributes || + ParsetreeViewer.isArrayAccess e + ) + in + let targetExpr = + let doc = printExpressionWithComments targetExpr cmtTbl in + match Parens.expr targetExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc targetExpr braces + | Nothing -> doc + in + let parentDoc = + let doc = printExpressionWithComments parentExpr cmtTbl in + match Parens.unaryExprOperand parentExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc parentExpr braces + | Nothing -> doc + in + Doc.group ( + Doc.concat [ + printAttributes expr.pexp_attributes; + parentDoc; + Doc.lbracket; + member; + Doc.rbracket; + Doc.text " ="; + if shouldIndentTargetExpr then + Doc.indent ( + Doc.concat [ + Doc.line; + targetExpr; + ] + ) + else + Doc.concat [ + Doc.space; + targetExpr; + ] + ] + ) + (* TODO: cleanup, are those branches even remotely performant? *) + | Pexp_apply ( + {pexp_desc = Pexp_ident lident}, + args + ) when ParsetreeViewer.isJsxExpression expr -> + printJsxExpression lident args cmtTbl + | Pexp_apply (callExpr, args) -> + let args = List.map (fun (lbl, arg) -> + (lbl, ParsetreeViewer.rewriteUnderscoreApply arg) + ) args + in + let (uncurried, attrs) = + ParsetreeViewer.processUncurriedAttribute expr.pexp_attributes + in + let callExprDoc = + let doc = printExpressionWithComments callExpr cmtTbl in + match Parens.callExpr callExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc callExpr braces + | Nothing -> doc + in + if ParsetreeViewer.requiresSpecialCallbackPrintingFirstArg args then + let argsDoc = + printArgumentsWithCallbackInFirstPosition ~uncurried args cmtTbl + in + Doc.concat [ + printAttributes attrs; + callExprDoc; + argsDoc; + ] + else if ParsetreeViewer.requiresSpecialCallbackPrintingLastArg args then + let argsDoc = + printArgumentsWithCallbackInLastPosition ~uncurried args cmtTbl + in + Doc.concat [ + printAttributes attrs; + callExprDoc; + argsDoc; + ] + else + let argsDoc = printArguments ~uncurried args cmtTbl in + Doc.concat [ + printAttributes attrs; + callExprDoc; + argsDoc; + ] + | _ -> assert false + + and printJsxExpression lident args cmtTbl = + let name = printJsxName lident in + let (formattedProps, children) = printJsxProps args cmtTbl in + (*
*) + let isSelfClosing = match children with | [] -> true | _ -> false in + Doc.group ( + Doc.concat [ + Doc.group ( + Doc.concat [ + printComments (Doc.concat [Doc.lessThan; name]) cmtTbl lident.Asttypes.loc; + formattedProps; + if isSelfClosing then Doc.concat [Doc.line; Doc.text "/>"] else Doc.nil + ] + ); + if isSelfClosing then Doc.nil + else + Doc.concat [ + Doc.greaterThan; + Doc.indent ( + Doc.concat [ + Doc.line; + printJsxChildren children cmtTbl; + ] + ); + Doc.line; + Doc.text "" in + let closing = Doc.text "" in + let (children, _) = ParsetreeViewer.collectListExpressions expr in + Doc.group ( + Doc.concat [ + opening; + begin match children with + | [] -> Doc.nil + | children -> + Doc.indent ( + Doc.concat [ + Doc.line; + printJsxChildren children cmtTbl; + ] + ) + end; + Doc.line; + closing; + ] + ) + + and printJsxChildren (children: Parsetree.expression list) cmtTbl = + Doc.group ( + Doc.join ~sep:Doc.line ( + List.map (fun expr -> + let exprDoc = printExpressionWithComments expr cmtTbl in + match Parens.jsxChildExpr expr with + | Parenthesized | Braced _ -> + (* {(20: int)} make sure that we also protect the expression inside *) + addBraces (if Parens.bracedExpr expr then addParens exprDoc else exprDoc) + | Nothing -> exprDoc + ) children + ) + ) + + and printJsxProps args cmtTbl = + let rec loop props args = + match args with + | [] -> (Doc.nil, []) + | [ + (Asttypes.Labelled "children", children); + ( + Asttypes.Nolabel, + {Parsetree.pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, None)} + ) + ] -> + let formattedProps = Doc.indent ( + match props with + | [] -> Doc.nil + | props -> + Doc.concat [ + Doc.line; + Doc.group ( + Doc.join ~sep:Doc.line (props |> List.rev) + ) + ] + ) in + let (children, _) = ParsetreeViewer.collectListExpressions children in + (formattedProps, children) + | arg::args -> + let propDoc = printJsxProp arg cmtTbl in + loop (propDoc::props) args + in + loop [] args + + and printJsxProp arg cmtTbl = + match arg with + | ( + (Asttypes.Labelled lblTxt | Optional lblTxt) as lbl, + { + Parsetree.pexp_attributes = [({Location.txt = "ns.namedArgLoc"; loc = argLoc}, _)]; + pexp_desc = Pexp_ident {txt = Longident.Lident ident} + } + ) when lblTxt = ident (* jsx punning *) -> + begin match lbl with + | Nolabel -> Doc.nil + | Labelled _lbl -> + printComments (printIdentLike ident) cmtTbl argLoc + | Optional _lbl -> + let doc = Doc.concat [ + Doc.question; + printIdentLike ident; + ] in + printComments doc cmtTbl argLoc + end + | ( + (Asttypes.Labelled lblTxt | Optional lblTxt) as lbl, + { + Parsetree.pexp_attributes = []; + pexp_desc = Pexp_ident {txt = Longident.Lident ident} + } + ) when lblTxt = ident (* jsx punning when printing from Reason *) -> + begin match lbl with + | Nolabel -> Doc.nil + | Labelled _lbl -> printIdentLike ident + | Optional _lbl -> Doc.concat [ + Doc.question; + printIdentLike ident; + ] + end + | (lbl, expr) -> + let (argLoc, expr) = match expr.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::attrs -> + (loc, {expr with pexp_attributes = attrs}) + | _ -> + Location.none, expr + in + let lblDoc = match lbl with + | Asttypes.Labelled lbl -> + let lbl = printComments (printIdentLike lbl) cmtTbl argLoc in + Doc.concat [lbl; Doc.equal] + | Asttypes.Optional lbl -> + let lbl = printComments (printIdentLike lbl) cmtTbl argLoc in + Doc.concat [lbl; Doc.equal; Doc.question] + | Nolabel -> Doc.nil + in + let exprDoc = + let doc = printExpression expr cmtTbl in + match Parens.jsxPropExpr expr with + | Parenthesized | Braced(_) -> + (* {(20: int)} make sure that we also protect the expression inside *) + addBraces (if Parens.bracedExpr expr then addParens doc else doc) + | _ -> doc + in + let fullLoc = {argLoc with loc_end = expr.pexp_loc.loc_end} in + printComments + (Doc.concat [ + lblDoc; + exprDoc; + ]) + cmtTbl + fullLoc + + (* div -> div. + * Navabar.createElement -> Navbar + * Staff.Users.createElement -> Staff.Users *) + and printJsxName {txt = lident} = + let rec flatten acc lident = match lident with + | Longident.Lident txt -> txt::acc + | Ldot (lident, txt) -> + let acc = if txt = "createElement" then acc else txt::acc in + flatten acc lident + | _ -> acc + in + match lident with + | Longident.Lident txt -> Doc.text txt + | _ as lident -> + let segments = flatten [] lident in + Doc.join ~sep:Doc.dot (List.map Doc.text segments) + + and printArgumentsWithCallbackInFirstPosition ~uncurried args cmtTbl = + let (callback, printedArgs) = match args with + | (lbl, expr)::args -> + let lblDoc = match lbl with + | Asttypes.Nolabel -> Doc.nil + | Asttypes.Labelled txt -> + Doc.concat [ + Doc.tilde; printIdentLike txt; Doc.equal; + ] + | Asttypes.Optional txt -> + Doc.concat [ + Doc.tilde; printIdentLike txt; Doc.equal; Doc.question; + ] + in + let callback = Doc.concat [ + lblDoc; + printPexpFun ~inCallback:true expr cmtTbl + ] in + let printedArgs = List.map (fun arg -> + printArgument arg cmtTbl + ) args |> Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) + in + (callback, printedArgs) + | _ -> assert false + in + (* Thing.map((arg1, arg2) => MyModuleBlah.toList(argument), foo) *) + (* Thing.map((arg1, arg2) => { + * MyModuleBlah.toList(argument) + * }, longArgumet, veryLooooongArgument) + *) + let fitsOnOneLine = Doc.concat [ + if uncurried then Doc.text "(. " else Doc.lparen; + callback; + Doc.comma; + Doc.line; + printedArgs; + Doc.rparen; + ] in + + (* Thing.map( + * (param1, parm2) => doStuff(param1, parm2), + * arg1, + * arg2, + * arg3, + * ) + *) + let breakAllArgs = printArguments ~uncurried args cmtTbl in + Doc.customLayout [ + fitsOnOneLine; + breakAllArgs; + ] + + and printArgumentsWithCallbackInLastPosition ~uncurried args cmtTbl = + let rec loop acc args = match args with + | [] -> (Doc.nil, Doc.nil) + | [lbl, expr] -> + let lblDoc = match lbl with + | Asttypes.Nolabel -> Doc.nil + | Asttypes.Labelled txt -> + Doc.concat [ + Doc.tilde; printIdentLike txt; Doc.equal; + ] + | Asttypes.Optional txt -> + Doc.concat [ + Doc.tilde; printIdentLike txt; Doc.equal; Doc.question; + ] + in + let callback = printPexpFun ~inCallback:true expr cmtTbl in + (Doc.concat (List.rev acc), Doc.concat [lblDoc; callback]) + | arg::args -> + let argDoc = printArgument arg cmtTbl in + loop (Doc.line::Doc.comma::argDoc::acc) args + in + let (printedArgs, callback) = loop [] args in + + (* Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument)) *) + let fitsOnOneLine = Doc.concat [ + if uncurried then Doc.text "(." else Doc.lparen; + printedArgs; + callback; + Doc.rparen; + ] in + + (* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) => + * MyModuleBlah.toList(argument) + * ) + *) + let arugmentsFitOnOneLine = + Doc.concat [ + if uncurried then Doc.text "(." else Doc.lparen; + Doc.softLine; + printedArgs; + Doc.breakableGroup ~forceBreak:true callback; + Doc.softLine; + Doc.rparen; + ] + in + + (* Thing.map( + * arg1, + * arg2, + * arg3, + * (param1, parm2) => doStuff(param1, parm2) + * ) + *) + let breakAllArgs = printArguments ~uncurried args cmtTbl in + Doc.customLayout [ + fitsOnOneLine; + arugmentsFitOnOneLine; + breakAllArgs; + ] + + and printArguments ~uncurried (args : (Asttypes.arg_label * Parsetree.expression) list) cmtTbl = + match args with + | [Nolabel, {pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)}] -> + if uncurried then Doc.text "(.)" else Doc.text "()" + | [(Nolabel, arg)] when ParsetreeViewer.isHuggableExpression arg -> + let argDoc = + let doc = printExpressionWithComments arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc + in + Doc.concat [ + if uncurried then Doc.text "(." else Doc.lparen; + argDoc; + Doc.rparen; + ] + | args -> Doc.group ( + Doc.concat [ + if uncurried then Doc.text "(." else Doc.lparen; + Doc.indent ( + Doc.concat [ + if uncurried then Doc.line else Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun arg -> printArgument arg cmtTbl) args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + +(* + * argument ::= + * | _ (* syntax sugar *) + * | expr + * | expr : type + * | ~ label-name + * | ~ label-name + * | ~ label-name ? + * | ~ label-name = expr + * | ~ label-name = _ (* syntax sugar *) + * | ~ label-name = expr : type + * | ~ label-name = ? expr + * | ~ label-name = ? _ (* syntax sugar *) + * | ~ label-name = ? expr : type *) + and printArgument (argLbl, arg) cmtTbl = + match (argLbl, arg) with + (* ~a (punned)*) + | ( + (Asttypes.Labelled lbl), + ({pexp_desc=Pexp_ident {txt = Longident.Lident name}; + pexp_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)]) + } as argExpr) + ) when lbl = name && not (ParsetreeViewer.isBracedExpr argExpr) -> + let loc = match arg.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ -> loc + | _ -> arg.pexp_loc + in + let doc = Doc.concat [ + Doc.tilde; + printIdentLike lbl + ] in + printComments doc cmtTbl loc + + (* ~a: int (punned)*) + | ( + (Asttypes.Labelled lbl), + {pexp_desc = Pexp_constraint ( + {pexp_desc = Pexp_ident {txt = Longident.Lident name}} as argExpr, + typ + ); + pexp_loc; + pexp_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)]) as attrs + } + ) when lbl = name && not (ParsetreeViewer.isBracedExpr argExpr) -> + let loc = match attrs with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ -> + {loc with loc_end = pexp_loc.loc_end} + | _ -> arg.pexp_loc + in + let doc = Doc.concat [ + Doc.tilde; + printIdentLike lbl; + Doc.text ": "; + printTypExpr typ cmtTbl; + ] in + printComments doc cmtTbl loc + (* ~a? (optional lbl punned)*) + | ( + (Asttypes.Optional lbl), + {pexp_desc=Pexp_ident {txt = Longident.Lident name}; + pexp_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)]) + } + ) when lbl = name -> + let loc = match arg.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ -> loc + | _ -> arg.pexp_loc + in + let doc = Doc.concat [ + Doc.tilde; + printIdentLike lbl; + Doc.question; + ] in + printComments doc cmtTbl loc + | (_lbl, expr) -> + let (argLoc, expr) = match expr.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::attrs -> + (loc, {expr with pexp_attributes = attrs}) + | _ -> + expr.pexp_loc, expr + in + let printedLbl = match argLbl with + | Asttypes.Nolabel -> Doc.nil + | Asttypes.Labelled lbl -> + let doc = Doc.concat [Doc.tilde; printIdentLike lbl; Doc.equal] in + printComments doc cmtTbl argLoc + | Asttypes.Optional lbl -> + let doc = Doc.concat [Doc.tilde; printIdentLike lbl; Doc.equal; Doc.question] in + printComments doc cmtTbl argLoc + in + let printedExpr = + let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + let loc = {argLoc with loc_end = expr.pexp_loc.loc_end} in + let doc = Doc.concat [ + printedLbl; + printedExpr; + ] in + printComments doc cmtTbl loc + + and printCases (cases: Parsetree.case list) cmtTbl = + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.lbrace; + Doc.concat [ + Doc.line; + printList + ~getLoc:(fun n -> {n.Parsetree.pc_lhs.ppat_loc with + loc_end = + match ParsetreeViewer.processBracesAttr n.Parsetree.pc_rhs with + | (None, _) -> n.pc_rhs.pexp_loc.loc_end + | (Some ({loc}, _), _) -> loc.Location.loc_end + }) + ~print:printCase + ~nodes:cases + cmtTbl + ]; + Doc.line; + Doc.rbrace; + ] + ) + + and printCase (case: Parsetree.case) cmtTbl = + let rhs = match case.pc_rhs.pexp_desc with + | Pexp_let _ + | Pexp_letmodule _ + | Pexp_letexception _ + | Pexp_open _ + | Pexp_sequence _ -> + printExpressionBlock ~braces:(ParsetreeViewer.isBracedExpr case.pc_rhs) case.pc_rhs cmtTbl + | _ -> + let doc = printExpressionWithComments case.pc_rhs cmtTbl in + begin match Parens.expr case.pc_rhs with + | Parenthesized -> addParens doc + | _ -> doc + end + + in + let guard = match case.pc_guard with + | None -> Doc.nil + | Some expr -> Doc.group ( + Doc.concat [ + Doc.line; + Doc.text "when "; + printExpressionWithComments expr cmtTbl; + ] + ) + in + let shouldInlineRhs = match case.pc_rhs.pexp_desc with + | Pexp_construct ({txt = Longident.Lident ("()" | "true" | "false")}, _) + | Pexp_constant _ + | Pexp_ident _ -> true + | _ when ParsetreeViewer.isHuggableRhs case.pc_rhs -> true + | _ -> false + in + let shouldIndentPattern = match case.pc_lhs.ppat_desc with + | Ppat_or _ -> false + | _ -> true + in + let patternDoc = + let doc = printPattern case.pc_lhs cmtTbl in + match case.pc_lhs.ppat_desc with + | Ppat_constraint _ -> addParens doc + | _ -> doc + in + let content = Doc.concat [ + if shouldIndentPattern then Doc.indent patternDoc else patternDoc; + Doc.indent guard; + Doc.text " =>"; + Doc.indent ( + Doc.concat [ + if shouldInlineRhs then Doc.space else Doc.line; + rhs; + ] + ) + ] in + Doc.group ( + Doc.concat [ + Doc.text "| "; + content; + ] + ) + + and printExprFunParameters ~inCallback ~uncurried ~hasConstraint parameters cmtTbl = + match parameters with + (* let f = _ => () *) + | [ParsetreeViewer.Parameter { + attrs = []; + lbl = Asttypes.Nolabel; + defaultExpr = None; + pat = {Parsetree.ppat_desc = Ppat_any} + }] when not uncurried -> + if hasConstraint then Doc.text "(_)" else Doc.text "_" + (* let f = a => () *) + | [ParsetreeViewer.Parameter { + attrs = []; + lbl = Asttypes.Nolabel; + defaultExpr = None; + pat = {Parsetree.ppat_desc = Ppat_var stringLoc} + }] when not uncurried -> + let txtDoc = + let var = printIdentLike stringLoc.txt in + if hasConstraint then addParens var else var + in + printComments txtDoc cmtTbl stringLoc.loc + (* let f = () => () *) + | [ParsetreeViewer.Parameter { + attrs = []; + lbl = Asttypes.Nolabel; + defaultExpr = None; + pat = {ppat_desc = Ppat_construct({txt = Longident.Lident "()"}, None)} + }] when not uncurried -> + Doc.text "()" + (* let f = (~greeting, ~from as hometown, ~x=?) => () *) + | parameters -> + let lparen = if uncurried then Doc.text "(. " else Doc.lparen in + let shouldHug = ParsetreeViewer.parametersShouldHug parameters in + let printedParamaters = Doc.concat [ + if shouldHug || inCallback then Doc.nil else Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; if inCallback then Doc.space else Doc.line]) + (List.map (fun p -> printExpFunParameter p cmtTbl) parameters) + ] in + Doc.group ( + Doc.concat [ + lparen; + if shouldHug || inCallback then + printedParamaters + else Doc.indent printedParamaters; + if shouldHug || inCallback then + Doc.nil + else + Doc.concat [Doc.trailingComma; Doc.softLine]; + Doc.rparen; + ] + ) + + and printExpFunParameter parameter cmtTbl = + match parameter with + | ParsetreeViewer.NewTypes {attrs; locs = lbls} -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + Doc.text "type "; + Doc.join ~sep:Doc.space (List.map (fun lbl -> + printComments (printIdentLike lbl.Asttypes.txt) cmtTbl lbl.Asttypes.loc + ) lbls) + ] + ) + | Parameter {attrs; lbl; defaultExpr; pat = pattern} -> + let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute attrs in + let uncurried = if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil in + let attrs = printAttributes attrs in + (* =defaultValue *) + let defaultExprDoc = match defaultExpr with + | Some expr -> Doc.concat [ + Doc.text "="; + printExpressionWithComments expr cmtTbl + ] + | None -> Doc.nil + in + (* ~from as hometown + * ~from -> punning *) + let labelWithPattern = match (lbl, pattern) with + | (Asttypes.Nolabel, pattern) -> printPattern pattern cmtTbl + | ( + (Asttypes.Labelled lbl | Optional lbl), + {ppat_desc = Ppat_var stringLoc; + ppat_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)]) + } + ) when lbl = stringLoc.txt -> + (* ~d *) + Doc.concat [ + Doc.text "~"; + printIdentLike lbl; + ] + | ( + (Asttypes.Labelled lbl | Optional lbl), + ({ppat_desc = Ppat_constraint ({ ppat_desc = Ppat_var { txt } }, typ); + ppat_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)]) + }) + ) when lbl = txt -> + (* ~d: e *) + Doc.concat [ + Doc.text "~"; + printIdentLike lbl; + Doc.text ": "; + printTypExpr typ cmtTbl; + ] + | ((Asttypes.Labelled lbl | Optional lbl), pattern) -> + (* ~b as c *) + Doc.concat [ + Doc.text "~"; + printIdentLike lbl; + Doc.text " as "; + printPattern pattern cmtTbl + ] + in + let optionalLabelSuffix = match (lbl, defaultExpr) with + | (Asttypes.Optional _, None) -> Doc.text "=?" + | _ -> Doc.nil + in + let doc = Doc.group ( + Doc.concat [ + uncurried; + attrs; + labelWithPattern; + defaultExprDoc; + optionalLabelSuffix; + ] + ) in + let cmtLoc = match defaultExpr with + | None -> + begin match pattern.ppat_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ -> + {loc with loc_end = pattern.ppat_loc.loc_end} + | _ -> pattern.ppat_loc + end + | Some expr -> + let startPos = match pattern.ppat_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ -> + loc.loc_start + | _ -> pattern.ppat_loc.loc_start + in { + pattern.ppat_loc with + loc_start = startPos; + loc_end = expr.pexp_loc.loc_end + } + in + printComments doc cmtTbl cmtLoc + + and printExpressionBlock ~braces expr cmtTbl = + let rec collectRows acc expr = match expr.Parsetree.pexp_desc with + | Parsetree.Pexp_letmodule (modName, modExpr, expr2) -> + let name = + let doc = Doc.text modName.txt in + printComments doc cmtTbl modName.loc + in + let letModuleDoc = Doc.concat [ + Doc.text "module "; + name; + Doc.text " = "; + printModExpr modExpr cmtTbl; + ] in + let loc = {expr.pexp_loc with loc_end = modExpr.pmod_loc.loc_end} in + collectRows ((loc, letModuleDoc)::acc) expr2 + | Pexp_letexception (extensionConstructor, expr2) -> + let loc = + let loc = {expr.pexp_loc with loc_end = extensionConstructor.pext_loc.loc_end} in + match getFirstLeadingComment cmtTbl loc with + | None -> loc + | Some comment -> + let cmtLoc = Comment.loc comment in + {cmtLoc with loc_end = loc.loc_end} + in + let letExceptionDoc = printExceptionDef extensionConstructor cmtTbl in + collectRows ((loc, letExceptionDoc)::acc) expr2 + | Pexp_open (overrideFlag, longidentLoc, expr2) -> + let openDoc = Doc.concat [ + Doc.text "open"; + printOverrideFlag overrideFlag; + Doc.space; + printLongidentLocation longidentLoc cmtTbl; + ] in + let loc = {expr.pexp_loc with loc_end = longidentLoc.loc.loc_end} in + collectRows ((loc, openDoc)::acc) expr2 + | Pexp_sequence (expr1, expr2) -> + let exprDoc = + let doc = printExpression expr1 cmtTbl in + match Parens.expr expr1 with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr1 braces + | Nothing -> doc + in + let loc = expr1.pexp_loc in + collectRows ((loc, exprDoc)::acc) expr2 + | Pexp_let (recFlag, valueBindings, expr2) -> + let loc = + let loc = match (valueBindings, List.rev valueBindings) with + | (vb::_, lastVb::_) -> {vb.pvb_loc with loc_end = lastVb.pvb_loc.loc_end} + | _ -> Location.none + in + match getFirstLeadingComment cmtTbl loc with + | None -> loc + | Some comment -> + let cmtLoc = Comment.loc comment in + {cmtLoc with loc_end = loc.loc_end} + in + let recFlag = match recFlag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + let letDoc = printValueBindings ~recFlag valueBindings cmtTbl in + (* let () = { + * let () = foo() + * () + * } + * We don't need to print the () on the last line of the block + *) + begin match expr2.pexp_desc with + | Pexp_construct ({txt = Longident.Lident "()"}, _) -> + List.rev ((loc, letDoc)::acc) + | _ -> + collectRows ((loc, letDoc)::acc) expr2 + end + | _ -> + let exprDoc = + let doc = printExpression expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + List.rev ((expr.pexp_loc, exprDoc)::acc) + in + let rows = collectRows [] expr in + let block = + printList + ~getLoc:fst + ~nodes:rows + ~print:(fun (_, doc) _ -> doc) + ~forceBreak:true + cmtTbl + in + Doc.breakableGroup ~forceBreak:true ( + if braces then + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.line; + block; + ] + ); + Doc.line; + Doc.rbrace; + ] + else block + ) + + (* + * // user types: + * let f = (a, b) => { a + b } + * + * // printer: everything is on one line + * let f = (a, b) => { a + b } + * + * // user types: over multiple lines + * let f = (a, b) => { + * a + b + * } + * + * // printer: over multiple lines + * let f = (a, b) => { + * a + b + * } + *) + and printBraces doc expr bracesLoc = + let overMultipleLines = + let open Location in + bracesLoc.loc_end.pos_lnum > bracesLoc.loc_start.pos_lnum + in + match expr.Parsetree.pexp_desc with + | Pexp_letmodule _ + | Pexp_letexception _ + | Pexp_let _ + | Pexp_open _ + | Pexp_sequence _ -> + (* already has braces *) + doc + | _ -> + Doc.breakableGroup ~forceBreak:overMultipleLines ( + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + if Parens.bracedExpr expr then addParens doc else doc; + ] + ); + Doc.softLine; + Doc.rbrace; + ] + ) + + and printOverrideFlag overrideFlag = match overrideFlag with + | Asttypes.Override -> Doc.text "!" + | Fresh -> Doc.nil + + and printDirectionFlag flag = match flag with + | Asttypes.Downto -> Doc.text " downto " + | Asttypes.Upto -> Doc.text " to " + + and printRecordRow (lbl, expr) cmtTbl = + let cmtLoc = {lbl.loc with loc_end = expr.pexp_loc.loc_end} in + let doc = Doc.group (Doc.concat [ + printLidentPath lbl cmtTbl; + Doc.text ": "; + (let doc = printExpressionWithComments expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc); + ]) in + printComments doc cmtTbl cmtLoc + + and printBsObjectRow (lbl, expr) cmtTbl = + let cmtLoc = {lbl.loc with loc_end = expr.pexp_loc.loc_end} in + let lblDoc = + let doc = Doc.concat [ + Doc.text "\""; + printLongident lbl.txt; + Doc.text "\""; + ] in + printComments doc cmtTbl lbl.loc + in + let doc = Doc.concat [ + lblDoc; + Doc.text ": "; + printExpressionWithComments expr cmtTbl + ] in + printComments doc cmtTbl cmtLoc + + (* The optional loc indicates whether we need to print the attributes in + * relation to some location. In practise this means the following: + * `@attr type t = string` -> on the same line, print on the same line + * `@attr + * type t = string` -> attr is on prev line, print the attributes + * with a line break between, we respect the users' original layout *) + and printAttributes ?loc (attrs: Parsetree.attributes) = + match ParsetreeViewer.filterParsingAttrs attrs with + | [] -> Doc.nil + | attrs -> + let lineBreak = match loc with + | None -> Doc.line + | Some loc -> begin match List.rev attrs with + | ({loc = firstLoc}, _)::_ when loc.loc_start.pos_lnum > firstLoc.loc_end.pos_lnum -> + Doc.hardLine; + | _ -> Doc.line + end + in + Doc.concat [ + Doc.group (Doc.join ~sep:Doc.line (List.map printAttribute attrs)); + lineBreak; + ] + + and printAttribute ((id, payload) : Parsetree.attribute) = + let attrName = Doc.concat [ + Doc.text "@"; + Doc.text id.txt + ] in + match payload with + | PStr [{pstr_desc = Pstr_eval (expr, attrs)}] -> + let exprDoc = printExpression expr CommentTable.empty in + let needsParens = match attrs with | [] -> false | _ -> true in + Doc.group ( + Doc.concat [ + attrName; + addParens ( + Doc.concat [ + printAttributes attrs; + if needsParens then addParens exprDoc else exprDoc; + ] + ) + ] + ) + | PTyp typ -> + Doc.group ( + Doc.concat [ + attrName; + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.text ": "; + printTypExpr typ CommentTable.empty; + ] + ); + Doc.softLine; + Doc.rparen; + ] + ) + | _ -> attrName + + and printAttributeWithComments ((id, payload) : Parsetree.attribute) cmtTbl = + let attrName = Doc.text ("@" ^ id.txt) in + match payload with + | PStr [{pstr_desc = Pstr_eval (expr, attrs)}] -> + let exprDoc = printExpressionWithComments expr cmtTbl in + let needsParens = match attrs with | [] -> false | _ -> true in + Doc.group ( + Doc.concat [ + attrName; + addParens ( + Doc.concat [ + printAttributes attrs; + if needsParens then addParens exprDoc else exprDoc; + ] + ) + ] + ) + | _ -> attrName + + and printModExpr modExpr cmtTbl = + let doc = match modExpr.pmod_desc with + | Pmod_ident longidentLoc -> + printLongidentLocation longidentLoc cmtTbl + | Pmod_structure structure -> + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + printStructure structure cmtTbl; + ]; + ); + Doc.softLine; + Doc.rbrace; + ] + ) + | Pmod_unpack expr -> + let shouldHug = match expr.pexp_desc with + | Pexp_let _ -> true + | Pexp_constraint ( + {pexp_desc = Pexp_let _ }, + {ptyp_desc = Ptyp_package _packageType} + ) -> true + | _ -> false + in + let (expr, moduleConstraint) = match expr.pexp_desc with + | Pexp_constraint ( + expr, + {ptyp_desc = Ptyp_package packageType; ptyp_loc} + ) -> + let packageDoc = + let doc = printPackageType ~printModuleKeywordAndParens:false packageType cmtTbl in + printComments doc cmtTbl ptyp_loc + in + let typeDoc = Doc.group (Doc.concat [ + Doc.text ":"; + Doc.indent ( + Doc.concat [ + Doc.line; + packageDoc + ] + ) + ]) in + (expr, typeDoc) + | _ -> (expr, Doc.nil) + in + let unpackDoc = Doc.group(Doc.concat [ + printExpressionWithComments expr cmtTbl; + moduleConstraint; + ]) in + Doc.group ( + Doc.concat [ + Doc.text "unpack("; + if shouldHug then unpackDoc + else + Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + unpackDoc; + ] + ); + Doc.softLine; + ]; + Doc.rparen; + ] + ) + | Pmod_extension extension -> + printExtensionWithComments ~atModuleLvl:false extension cmtTbl + | Pmod_apply _ -> + let (args, callExpr) = ParsetreeViewer.modExprApply modExpr in + let isUnitSugar = match args with + | [{pmod_desc = Pmod_structure []}] -> true + | _ -> false + in + let shouldHug = match args with + | [{pmod_desc = Pmod_structure _}] -> true + | _ -> false + in + Doc.group ( + Doc.concat [ + printModExpr callExpr cmtTbl; + if isUnitSugar then + printModApplyArg (List.hd args [@doesNotRaise]) cmtTbl + else + Doc.concat [ + Doc.lparen; + if shouldHug then + printModApplyArg (List.hd args [@doesNotRaise]) cmtTbl + else + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun modArg -> printModApplyArg modArg cmtTbl) args + ) + ] + ); + if not shouldHug then + Doc.concat [ + Doc.trailingComma; + Doc.softLine; + ] + else Doc.nil; + Doc.rparen; + ] + ] + ) + | Pmod_constraint (modExpr, modType) -> + Doc.concat [ + printModExpr modExpr cmtTbl; + Doc.text ": "; + printModType modType cmtTbl; + ] + | Pmod_functor _ -> + printModFunctor modExpr cmtTbl + in + printComments doc cmtTbl modExpr.pmod_loc + + and printModFunctor modExpr cmtTbl = + let (parameters, returnModExpr) = ParsetreeViewer.modExprFunctor modExpr in + (* let shouldInline = match returnModExpr.pmod_desc with *) + (* | Pmod_structure _ | Pmod_ident _ -> true *) + (* | Pmod_constraint ({pmod_desc = Pmod_structure _}, _) -> true *) + (* | _ -> false *) + (* in *) + let (returnConstraint, returnModExpr) = match returnModExpr.pmod_desc with + | Pmod_constraint (modExpr, modType) -> + let constraintDoc = + let doc = printModType modType cmtTbl in + if Parens.modExprFunctorConstraint modType then addParens doc else doc + in + let modConstraint = Doc.concat [ + Doc.text ": "; + constraintDoc; + ] in + (modConstraint, printModExpr modExpr cmtTbl) + | _ -> (Doc.nil, printModExpr returnModExpr cmtTbl) + in + let parametersDoc = match parameters with + | [(attrs, {txt = "*"}, None)] -> + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + Doc.group (Doc.concat [ + attrs; + Doc.text "()" + ]) + | [([], {txt = lbl}, None)] -> Doc.text lbl + | parameters -> + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun param -> printModFunctorParam param cmtTbl) parameters + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + in + Doc.group ( + Doc.concat [ + parametersDoc; + returnConstraint; + Doc.text " => "; + returnModExpr + ] + ) + + and printModFunctorParam (attrs, lbl, optModType) cmtTbl = + let cmtLoc = match optModType with + | None -> lbl.Asttypes.loc + | Some modType -> {lbl.loc with loc_end = + modType.Parsetree.pmty_loc.loc_end + } + in + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + let lblDoc = + let doc = Doc.text lbl.txt in + printComments doc cmtTbl lbl.loc + in + let doc = Doc.group ( + Doc.concat [ + attrs; + lblDoc; + (match optModType with + | None -> Doc.nil + | Some modType -> + Doc.concat [ + Doc.text ": "; + printModType modType cmtTbl + ]); + ] + ) in + printComments doc cmtTbl cmtLoc + + and printModApplyArg modExpr cmtTbl = + match modExpr.pmod_desc with + | Pmod_structure [] -> Doc.text "()" + | _ -> printModExpr modExpr cmtTbl + + + and printExceptionDef (constr : Parsetree.extension_constructor) cmtTbl = + let kind = match constr.pext_kind with + | Pext_rebind longident -> Doc.indent ( + Doc.concat [ + Doc.text " ="; + Doc.line; + printLongidentLocation longident cmtTbl; + ] + ) + | Pext_decl (Pcstr_tuple [], None) -> Doc.nil + | Pext_decl (args, gadt) -> + let gadtDoc = match gadt with + | Some typ -> Doc.concat [ + Doc.text ": "; + printTypExpr typ cmtTbl + ] + | None -> Doc.nil + in + Doc.concat [ + printConstructorArguments ~indent:false args cmtTbl; + gadtDoc + ] + in + let name = + printComments + (Doc.text constr.pext_name.txt) + cmtTbl + constr.pext_name.loc + in + let doc = Doc.group ( + Doc.concat [ + printAttributes constr.pext_attributes; + Doc.text "exception "; + name; + kind + ] + ) in + printComments doc cmtTbl constr.pext_loc + + and printExtensionConstructor (constr : Parsetree.extension_constructor) cmtTbl i = + let attrs = printAttributes constr.pext_attributes in + let bar = if i > 0 then Doc.text "| " + else Doc.ifBreaks (Doc.text "| ") Doc.nil + in + let kind = match constr.pext_kind with + | Pext_rebind longident -> Doc.indent ( + Doc.concat [ + Doc.text " ="; + Doc.line; + printLongidentLocation longident cmtTbl; + ] + ) + | Pext_decl (Pcstr_tuple [], None) -> Doc.nil + | Pext_decl (args, gadt) -> + let gadtDoc = match gadt with + | Some typ -> Doc.concat [ + Doc.text ": "; + printTypExpr typ cmtTbl; + ] + | None -> Doc.nil + in + Doc.concat [ + printConstructorArguments ~indent:false args cmtTbl; + gadtDoc + ] + in + let name = + printComments (Doc.text constr.pext_name.txt) cmtTbl constr.pext_name.loc + in + Doc.concat [ + bar; + Doc.group ( + Doc.concat [ + attrs; + name; + kind; + ] + ) + ] + + let printImplementation ~width (s: Parsetree.structure) comments = + let cmtTbl = CommentTable.make () in + CommentTable.walkStructure s cmtTbl comments; + (* CommentTable.log cmtTbl; *) + let doc = printStructure s cmtTbl in + (* Doc.debug doc; *) + let stringDoc = Doc.toString ~width doc in + print_string stringDoc + + let printInterface ~width (s: Parsetree.signature) comments = + let cmtTbl = CommentTable.make () in + CommentTable.walkSignature s cmtTbl comments; + let stringDoc = Doc.toString ~width (printSignature s cmtTbl) in + print_string stringDoc + +end + +module Scanner = struct + type mode = Template | Jsx | Diamond + + type t = { + filename: string; + src: bytes; + mutable err: + startPos: Lexing.position + -> endPos: Lexing.position + -> Diagnostics.category + -> unit; + mutable ch: int; (* current character *) + mutable offset: int; (* character offset *) + mutable rdOffset: int; (* reading offset (position after current character) *) + mutable lineOffset: int; (* current line offset *) + mutable lnum: int; (* current line number *) + mutable mode: mode list; + } + + let setDiamondMode scanner = + scanner.mode <- Diamond::scanner.mode + + let setTemplateMode scanner = + scanner.mode <- Template::scanner.mode + + let setJsxMode scanner = + scanner.mode <- Jsx::scanner.mode + + let popMode scanner mode = + match scanner.mode with + | m::ms when m = mode -> + scanner.mode <- ms + | _ -> () + + let inDiamondMode scanner = match scanner.mode with + | Diamond::_ -> true + | _ -> false + + let inJsxMode scanner = match scanner.mode with + | Jsx::_ -> true + | _ -> false + + let inTemplateMode scanner = match scanner.mode with + | Template::_ -> true + | _ -> false + + let position scanner = Lexing.{ + pos_fname = scanner.filename; + (* line number *) + pos_lnum = scanner.lnum; + (* offset of the beginning of the line (number + of characters between the beginning of the scanner and the beginning + of the line) *) + pos_bol = scanner.lineOffset; + (* [pos_cnum] is the offset of the position (number of + characters between the beginning of the scanner and the position). *) + pos_cnum = scanner.offset; + } + + let next scanner = + if scanner.rdOffset < (Bytes.length scanner.src) then ( + scanner.offset <- scanner.rdOffset; + let ch = (Bytes.get [@doesNotRaise]) scanner.src scanner.rdOffset in + scanner.rdOffset <- scanner.rdOffset + 1; + scanner.ch <- int_of_char ch + ) else ( + scanner.offset <- Bytes.length scanner.src; + scanner.ch <- -1 + ) + + let peek scanner = + if scanner.rdOffset < (Bytes.length scanner.src) then + int_of_char (Bytes.unsafe_get scanner.src scanner.rdOffset) + else + -1 + + let make b filename = + let scanner = { + filename; + src = b; + err = (fun ~startPos:_ ~endPos:_ _ -> ()); + ch = CharacterCodes.space; + offset = 0; + rdOffset = 0; + lineOffset = 0; + lnum = 1; + mode = []; + } in + next scanner; + scanner + + let skipWhitespace scanner = + let rec scan () = + if scanner.ch == CharacterCodes.space || scanner.ch == CharacterCodes.tab then ( + next scanner; + scan() + ) else if CharacterCodes.isLineBreak scanner.ch then ( + scanner.lineOffset <- scanner.offset + 1; + scanner.lnum <- scanner.lnum + 1; + next scanner; + scan() + ) else ( + () + ) + in + scan() + + let scanIdentifier scanner = + let startOff = scanner.offset in + while ( + CharacterCodes.isLetter scanner.ch || + CharacterCodes.isDigit scanner.ch || + CharacterCodes.underscore == scanner.ch || + CharacterCodes.singleQuote == scanner.ch + ) do + next scanner + done; + let str = Bytes.sub_string scanner.src startOff (scanner.offset - startOff) in + Token.lookupKeyword str + + let scanDigits scanner ~base = + if base <= 10 then ( + while CharacterCodes.isDigit scanner.ch || scanner.ch == CharacterCodes.underscore do + next scanner + done; + ) else ( + while CharacterCodes.isHex scanner.ch || scanner.ch == CharacterCodes.underscore do + next scanner + done; + ) + + (* float: (0…9) { 0…9∣ _ } [. { 0…9∣ _ }] [(e∣ E) [+∣ -] (0…9) { 0…9∣ _ }] *) + let scanNumber scanner = + let startOff = scanner.offset in + + (* integer part *) + let base, _prefix = if scanner.ch != CharacterCodes.dot then ( + if scanner.ch == CharacterCodes._0 then ( + next scanner; + let ch = CharacterCodes.lower scanner.ch in + if ch == CharacterCodes.Lower.x then ( + next scanner; + 16, 'x' + ) else if ch == CharacterCodes.Lower.o then ( + next scanner; + 8, 'o' + ) else if ch == CharacterCodes.Lower.b then ( + next scanner; + 2, 'b' + ) else ( + 8, '0' + ) + ) else ( + 10, ' ' + ) + ) else (10, ' ') + in + scanDigits scanner ~base; + + (* *) + let isFloat = if CharacterCodes.dot == scanner.ch then ( + next scanner; + scanDigits scanner ~base; + true + ) else ( + false + ) in + + (* exponent part *) + let isFloat = + if let exp = CharacterCodes.lower scanner.ch in + exp == CharacterCodes.Lower.e || exp == CharacterCodes.Lower.p + then ( + next scanner; + if scanner.ch == CharacterCodes.plus || scanner.ch == CharacterCodes.minus then + next scanner; + scanDigits scanner ~base; + true + ) else + isFloat + in + let literal = + Bytes.sub_string scanner.src startOff (scanner.offset - startOff) + in + + (* suffix *) + let suffix = + if scanner.ch >= CharacterCodes.Lower.g && scanner.ch <= CharacterCodes.Lower.z + || scanner.ch >= CharacterCodes.Upper.g && scanner.ch <= CharacterCodes.Upper.z + then ( + let ch = scanner.ch in + next scanner; + Some (Char.unsafe_chr ch) + ) else + None + in + if isFloat then + Token.Float {f = literal; suffix} + else + Token.Int {i = literal; suffix} + + let scanExoticIdentifier scanner = + next scanner; + let buffer = Buffer.create 20 in + let startPos = position scanner in + + let rec scan () = + if scanner.ch == CharacterCodes.eof then + let endPos = position scanner in + scanner.err ~startPos ~endPos (Diagnostics.message "Did you forget a \" here?") + else if scanner.ch == CharacterCodes.doubleQuote then ( + next scanner + ) else if CharacterCodes.isLineBreak scanner.ch then ( + scanner.lineOffset <- scanner.offset + 1; + scanner.lnum <- scanner.lnum + 1; + let endPos = position scanner in + scanner.err ~startPos ~endPos (Diagnostics.message "Did you forget a \" here?"); + next scanner + ) else ( + Buffer.add_char buffer ((Char.chr [@doesNotRaise]) scanner.ch); + next scanner; + scan() + ) + in + scan(); + Token.Lident (Buffer.contents buffer) + + let scanStringEscapeSequence ~startPos scanner = + (* \ already consumed *) + if CharacterCodes.Lower.n == scanner.ch + || CharacterCodes.Lower.t == scanner.ch + || CharacterCodes.Lower.b == scanner.ch + || CharacterCodes.Lower.r == scanner.ch + || CharacterCodes.backslash == scanner.ch + || CharacterCodes.space == scanner.ch + || CharacterCodes.singleQuote == scanner.ch + || CharacterCodes.doubleQuote == scanner.ch + then + next scanner + else + let (n, base, max) = + if CharacterCodes.isDigit scanner.ch then + (* decimal *) + (3, 10, 255) + else if scanner.ch == CharacterCodes.Lower.o then + (* octal *) + let () = next scanner in + (3, 8, 255) + else if scanner.ch == CharacterCodes.Lower.x then + (* hex *) + let () = next scanner in + (2, 16, 255) + else + (* unknown escape sequence + * TODO: we should warn the user here. Let's not make it a hard error for now, for reason compat *) + (* let pos = position scanner in *) + (* let () = *) + (* let msg = if scanner.ch == -1 then *) + (* "unclosed escape sequence" *) + (* else "unknown escape sequence" *) + (* in *) + (* scanner.err ~startPos ~endPos:pos (Diagnostics.message msg) *) + (* in *) + (-1, -1, -1) + in + if n < 0 then () + else + let rec while_ n x = + if n == 0 then x + else + let d = CharacterCodes.digitValue scanner.ch in + if d >= base then + let pos = position scanner in + let msg = if scanner.ch == -1 then + "unclosed escape sequence" + else "unknown escape sequence" + in + scanner.err ~startPos ~endPos:pos (Diagnostics.message msg); + -1 + else + let () = next scanner in + while_ (n - 1) (x * base + d) + in + let x = while_ n 0 in + if x > max then + let pos = position scanner in + let msg = "invalid escape sequence (value too high)" in + scanner.err ~startPos ~endPos:pos (Diagnostics.message msg); + () + + let scanString scanner = + let offs = scanner.offset in + + let startPos = position scanner in + let rec scan () = + if scanner.ch == CharacterCodes.eof then + let endPos = position scanner in + scanner.err ~startPos ~endPos Diagnostics.unclosedString + else if scanner.ch == CharacterCodes.doubleQuote then ( + next scanner; + ) else if scanner.ch == CharacterCodes.backslash then ( + let startPos = position scanner in + next scanner; + scanStringEscapeSequence ~startPos scanner; + scan () + ) else if CharacterCodes.isLineBreak scanner.ch then ( + scanner.lineOffset <- scanner.offset + 1; + scanner.lnum <- scanner.lnum + 1; + next scanner; + scan () + ) else ( + next scanner; + scan () + ) + in + scan (); + Token.String (Bytes.sub_string scanner.src offs (scanner.offset - offs - 1)) + + (* I wonder if this gets inlined *) + let convertNumber scanner ~n ~base = + let x = ref 0 in + for _ = n downto 1 do + let d = CharacterCodes.digitValue scanner.ch in + x := (!x * base) + d; + next scanner + done; + !x + + let scanEscape scanner = + (* let offset = scanner.offset in *) + let c = match scanner.ch with + | 98 (* b *) -> next scanner; '\008' + | 110 (* n *) -> next scanner; '\010' + | 114 (* r *) -> next scanner; '\013' + | 116 (* t *) -> next scanner; '\009' + | ch when CharacterCodes.isDigit ch -> + let x = convertNumber scanner ~n:3 ~base:10 in + (Char.chr [@doesNotRaise]) x + | ch when ch == CharacterCodes.Lower.x -> + next scanner; + let x = convertNumber scanner ~n:2 ~base:16 in + (Char.chr [@doesNotRaise]) x + | ch when ch == CharacterCodes.Lower.o -> + next scanner; + let x = convertNumber scanner ~n:3 ~base:8 in + (Char.chr [@doesNotRaise]) x + | ch -> + next scanner; + (Char.chr [@doesNotRaise]) ch + in + next scanner; (* Consume \' *) + Token.Character c + + let scanSingleLineComment scanner = + let startOff = scanner.offset in + let startPos = position scanner in + while not (CharacterCodes.isLineBreak scanner.ch) && scanner.ch >= 0 do + next scanner + done; + let endPos = position scanner in + Token.Comment ( + Comment.makeSingleLineComment + ~loc:(Location.{loc_start = startPos; loc_end = endPos; loc_ghost = false}) + (Bytes.sub_string scanner.src startOff (scanner.offset - startOff)) + ) + + let scanMultiLineComment scanner = + let startOff = scanner.offset in + let startPos = position scanner in + let rec scan ~depth () = + if scanner.ch == CharacterCodes.asterisk && + peek scanner == CharacterCodes.forwardslash then ( + next scanner; + next scanner; + if depth > 0 then scan ~depth:(depth - 1) () else () + ) else if scanner.ch == CharacterCodes.eof then ( + let endPos = position scanner in + scanner.err ~startPos ~endPos Diagnostics.unclosedComment + ) else if scanner.ch == CharacterCodes.forwardslash + && peek scanner == CharacterCodes. asterisk then ( + next scanner; + next scanner; + scan ~depth:(depth + 1) () + ) else ( + if CharacterCodes.isLineBreak scanner.ch then ( + scanner.lineOffset <- scanner.offset + 1; + scanner.lnum <- scanner.lnum + 1; + ); + next scanner; + scan ~depth () + ) + in + scan ~depth:0 (); + Token.Comment ( + Comment.makeMultiLineComment + ~loc:(Location.{loc_start = startPos; loc_end = (position scanner); loc_ghost = false}) + (Bytes.sub_string scanner.src startOff (scanner.offset - 2 - startOff)) + ) + + let scanTemplate scanner = + let startOff = scanner.offset in + let startPos = position scanner in + + let rec scan () = + if scanner.ch == CharacterCodes.eof then ( + let endPos = position scanner in + scanner.err ~startPos ~endPos Diagnostics.unclosedTemplate; + popMode scanner Template; + Token.TemplateTail( + Bytes.sub_string scanner.src startOff (scanner.offset - 2 - startOff) + ) + ) + else if scanner.ch == CharacterCodes.backslash then ( + next scanner; + if scanner.ch == CharacterCodes.backtick + || scanner.ch == CharacterCodes.backslash + || scanner.ch == CharacterCodes.dollar + then next scanner; + scan() + ) else if scanner.ch == CharacterCodes.backtick then ( + next scanner; + let contents = + Bytes.sub_string scanner.src startOff (scanner.offset - 1 - startOff) + in + popMode scanner Template; + Token.TemplateTail contents + ) else if scanner.ch == CharacterCodes.dollar && + peek scanner == CharacterCodes.lbrace + then ( + next scanner; (* consume $ *) + next scanner; (* consume { *) + let contents = + Bytes.sub_string scanner.src startOff (scanner.offset - 2 - startOff) + in + popMode scanner Template; + Token.TemplatePart contents + ) else ( + if CharacterCodes.isLineBreak scanner.ch then ( + scanner.lineOffset <- scanner.offset + 1; + scanner.lnum <- scanner.lnum + 1; + ); + next scanner; + scan() + ) + in + scan() + + let rec scan scanner = + if not (inTemplateMode scanner) then skipWhitespace scanner; + let startPos = position scanner in + let ch = scanner.ch in + let token = if inTemplateMode scanner then + scanTemplate scanner + else if ch == CharacterCodes.underscore then ( + let nextCh = peek scanner in + if nextCh == CharacterCodes.underscore || CharacterCodes.isDigit nextCh || CharacterCodes.isLetter nextCh then + scanIdentifier scanner + else ( + next scanner; + Token.Underscore + ) + ) else if CharacterCodes.isLetter ch then + scanIdentifier scanner + else if CharacterCodes.isDigit ch then + scanNumber scanner + else begin + next scanner; + if ch == CharacterCodes.dot then + if scanner.ch == CharacterCodes.dot then ( + next scanner; + if scanner.ch == CharacterCodes.dot then ( + next scanner; + Token.DotDotDot + ) else ( + Token.DotDot + ) + ) else ( + Token.Dot + ) + else if ch == CharacterCodes.doubleQuote then + scanString scanner + else if ch == CharacterCodes.singleQuote then ( + if scanner.ch == CharacterCodes.backslash + && not ((peek scanner) == CharacterCodes.doubleQuote) (* start of exotic ident *) + then ( + next scanner; + scanEscape scanner + ) else if (peek scanner) == CharacterCodes.singleQuote then ( + let ch = scanner.ch in + next scanner; + next scanner; + Token.Character ((Char.chr [@doesNotRaise]) ch) + ) else ( + SingleQuote + ) + ) else if ch == CharacterCodes.bang then + if scanner.ch == CharacterCodes.equal then ( + next scanner; + if scanner.ch == CharacterCodes.equal then ( + next scanner; + Token.BangEqualEqual + ) else ( + Token.BangEqual + ) + ) else ( + Token.Bang + ) + else if ch == CharacterCodes.semicolon then + Token.Semicolon + else if ch == CharacterCodes.equal then ( + if scanner.ch == CharacterCodes.greaterThan then ( + next scanner; + Token.EqualGreater + ) else if scanner.ch == CharacterCodes.equal then ( + next scanner; + if scanner.ch == CharacterCodes.equal then ( + next scanner; + Token.EqualEqualEqual + ) else ( + Token.EqualEqual + ) + ) else ( + Token.Equal + ) + ) else if ch == CharacterCodes.bar then + if scanner.ch == CharacterCodes.bar then ( + next scanner; + Token.Lor + ) else if scanner.ch == CharacterCodes.greaterThan then ( + next scanner; + Token.BarGreater + ) else ( + Token.Bar + ) + else if ch == CharacterCodes.ampersand then + if scanner.ch == CharacterCodes.ampersand then ( + next scanner; + Token.Land + ) else ( + Token.Band + ) + else if ch == CharacterCodes.lparen then + Token.Lparen + else if ch == CharacterCodes.rparen then + Token.Rparen + else if ch == CharacterCodes.lbracket then + Token.Lbracket + else if ch == CharacterCodes.rbracket then + Token.Rbracket + else if ch == CharacterCodes.lbrace then + Token.Lbrace + else if ch == CharacterCodes.rbrace then + Token.Rbrace + else if ch == CharacterCodes.comma then + Token.Comma + else if ch == CharacterCodes.colon then + if scanner.ch == CharacterCodes.equal then( + next scanner; + Token.ColonEqual + ) else if (scanner.ch == CharacterCodes.greaterThan) then ( + next scanner; + Token.ColonGreaterThan + ) else ( + Token.Colon + ) + else if ch == CharacterCodes.backslash then + scanExoticIdentifier scanner + else if ch == CharacterCodes.forwardslash then + if scanner.ch == CharacterCodes.forwardslash then ( + next scanner; + scanSingleLineComment scanner + ) else if (scanner.ch == CharacterCodes.asterisk) then ( + next scanner; + scanMultiLineComment scanner + ) else if scanner.ch == CharacterCodes.dot then ( + next scanner; + Token.ForwardslashDot + ) else ( + Token.Forwardslash + ) + else if ch == CharacterCodes.minus then + if scanner.ch == CharacterCodes.dot then ( + next scanner; + Token.MinusDot + ) else if scanner.ch == CharacterCodes.greaterThan then ( + next scanner; + Token.MinusGreater; + ) else ( + Token.Minus + ) + else if ch == CharacterCodes.plus then + if scanner.ch == CharacterCodes.dot then ( + next scanner; + Token.PlusDot + ) else if scanner.ch == CharacterCodes.plus then ( + next scanner; + Token.PlusPlus + ) else if scanner.ch == CharacterCodes.equal then ( + next scanner; + Token.PlusEqual + ) else ( + Token.Plus + ) + else if ch == CharacterCodes.greaterThan then + if scanner.ch == CharacterCodes.equal && not (inDiamondMode scanner) then ( + next scanner; + Token.GreaterEqual + ) else ( + Token.GreaterThan + ) + else if ch == CharacterCodes.lessThan then + (* Imagine the following:
< + * < indicates the start of a new jsx-element, the parser expects + * the name of a new element after the < + * Example:
+ * This signals a closing element. To simulate the two-token lookahead, + * the < + * is `<` the start of a jsx-child?
+ * reconsiderLessThan peeks at the next token and + * determines the correct token to disambiguate *) + let reconsiderLessThan scanner = + (* < consumed *) + skipWhitespace scanner; + if scanner.ch == CharacterCodes.forwardslash then + let () = next scanner in + Token.LessThanSlash + else + Token.LessThan + + (* If an operator has whitespace around both sides, it's a binary operator *) + let isBinaryOp src startCnum endCnum = + if startCnum == 0 then false + else + let leftOk = + let c = + (startCnum - 1) + |> (Bytes.get [@doesNotRaise]) src + |> Char.code + in + c == CharacterCodes.space || + c == CharacterCodes.tab || + CharacterCodes.isLineBreak c + in + let rightOk = + let c = + if endCnum == Bytes.length src then -1 + else endCnum |> (Bytes.get [@doesNotRaise]) src |> Char.code + in + c == CharacterCodes.space || + c == CharacterCodes.tab || + CharacterCodes.isLineBreak c || + c == CharacterCodes.eof + in + leftOk && rightOk +end + +(* AST for js externals *) +module JsFfi = struct + type scope = + | Global + | Module of string (* bs.module("path") *) + | Scope of Longident.t (* bs.scope(/"window", "location"/) *) + + type label_declaration = { + jld_attributes: Parsetree.attributes; [@live] + jld_name: string; + jld_alias: string; + jld_type: Parsetree.core_type; + jld_loc: Location.t + } + + type importSpec = + | Default of label_declaration + | Spec of label_declaration list + + type import_description = { + jid_loc: Location.t; + jid_spec: importSpec; + jid_scope: scope; + jid_attributes: Parsetree.attributes; + } + + let decl ~attrs ~loc ~name ~alias ~typ = { + jld_loc = loc; + jld_attributes = attrs; + jld_name = name; + jld_alias = alias; + jld_type = typ + } + + let importDescr ~attrs ~scope ~importSpec ~loc = { + jid_loc = loc; + jid_spec = importSpec; + jid_scope = scope; + jid_attributes = attrs; + } + + let toParsetree importDescr = + let bsVal = (Location.mknoloc "bs.val", Parsetree.PStr []) in + let attrs = match importDescr.jid_scope with + | Global -> [bsVal] + (* @genType.import("./MyMath"), + * @genType.import(/"./MyMath", "default"/) *) + | Module s -> + let structure = [ + Parsetree.Pconst_string (s, None) + |> Ast_helper.Exp.constant + |> Ast_helper.Str.eval + ] in + let genType = (Location.mknoloc "genType.import", Parsetree.PStr structure) in + [genType] + | Scope longident -> + let structureItem = + let expr = match Longident.flatten longident |> List.map (fun s -> + Ast_helper.Exp.constant (Parsetree.Pconst_string (s, None)) + ) with + | [expr] -> expr + | [] as exprs | (_ as exprs) -> exprs |> Ast_helper.Exp.tuple + in + Ast_helper.Str.eval expr + in + let bsScope = ( + Location.mknoloc "bs.scope", + Parsetree. PStr [structureItem] + ) in + [bsVal; bsScope] + in + let valueDescrs = match importDescr.jid_spec with + | Default decl -> + let prim = [decl.jld_name] in + let allAttrs = + List.concat [attrs; importDescr.jid_attributes] + |> List.map (fun attr -> match attr with + | ( + {Location.txt = "genType.import"} as id, + Parsetree.PStr [{pstr_desc = Parsetree.Pstr_eval (moduleName, _) }] + ) -> + let default = + Parsetree.Pconst_string ("default", None) |> Ast_helper.Exp.constant + in + let structureItem = + [moduleName; default] + |> Ast_helper.Exp.tuple + |> Ast_helper.Str.eval + in + (id, Parsetree.PStr [structureItem]) + | attr -> attr + ) + in + [Ast_helper.Val.mk + ~loc:importDescr.jid_loc + ~prim + ~attrs:allAttrs + (Location.mknoloc decl.jld_alias) + decl.jld_type + |> Ast_helper.Str.primitive] + | Spec decls -> + List.map (fun decl -> + let prim = [decl.jld_name] in + let allAttrs = List.concat [attrs; decl.jld_attributes] in + Ast_helper.Val.mk + ~loc:importDescr.jid_loc + ~prim + ~attrs:allAttrs + (Location.mknoloc decl.jld_alias) + decl.jld_type + |> Ast_helper.Str.primitive ~loc:decl.jld_loc + ) decls + in + let jsFfiAttr = (Location.mknoloc "ns.jsFfi", Parsetree.PStr []) in + Ast_helper.Mod.structure ~loc:importDescr.jid_loc valueDescrs + |> Ast_helper.Incl.mk ~attrs:[jsFfiAttr] ~loc:importDescr.jid_loc + |> Ast_helper.Str.include_ ~loc:importDescr.jid_loc +end + +module ParsetreeCompatibility = struct + let concatLongidents l1 l2 = + let parts1 = Longident.flatten l1 in + let parts2 = Longident.flatten l2 in + match List.concat [parts1; parts2] |> Longident.unflatten with + | Some longident -> longident + | None -> l2 + + (* TODO: support nested open's ? *) + let rec rewritePpatOpen longidentOpen pat = + let open Parsetree in + match pat.ppat_desc with + | Ppat_array (first::rest) -> + (* Color.[Red, Blue, Green] -> [Color.Red, Blue, Green] *) + {pat with ppat_desc = Ppat_array ((rewritePpatOpen longidentOpen first)::rest)} + | Ppat_tuple (first::rest) -> + (* Color.(Red, Blue, Green) -> (Color.Red, Blue, Green) *) + {pat with ppat_desc = Ppat_tuple ((rewritePpatOpen longidentOpen first)::rest)} + | Ppat_construct( + {txt = Longident.Lident "::"} as listConstructor, + Some ({ppat_desc=Ppat_tuple (pat::rest)} as element) + ) -> + (* Color.(list[Red, Blue, Green]) -> list[Color.Red, Blue, Green] *) + {pat with ppat_desc = + Ppat_construct ( + listConstructor, + Some {element with ppat_desc = Ppat_tuple ((rewritePpatOpen longidentOpen pat)::rest)} + ) + } + | Ppat_construct ({txt = constructor} as longidentLoc, optPattern) -> + (* Foo.(Bar(a)) -> Foo.Bar(a) *) + {pat with ppat_desc = + Ppat_construct ( + {longidentLoc with txt = concatLongidents longidentOpen constructor}, + optPattern + ) + } + | Ppat_record (({txt = lbl} as longidentLoc, firstPat)::rest, flag) -> + (* Foo.{x} -> {Foo.x: x} *) + let firstRow = ( + {longidentLoc with txt = concatLongidents longidentOpen lbl}, + firstPat + ) in + {pat with ppat_desc = Ppat_record (firstRow::rest, flag)} + | Ppat_or (pat1, pat2) -> + {pat with ppat_desc = Ppat_or ( + rewritePpatOpen longidentOpen pat1, + rewritePpatOpen longidentOpen pat2 + )} + | Ppat_constraint (pattern, typ) -> + {pat with ppat_desc = Ppat_constraint ( + rewritePpatOpen longidentOpen pattern, + typ + )} + | Ppat_type ({txt = constructor} as longidentLoc) -> + {pat with ppat_desc = Ppat_type ( + {longidentLoc with txt = concatLongidents longidentOpen constructor} + )} + | Ppat_lazy p -> + {pat with ppat_desc = Ppat_lazy (rewritePpatOpen longidentOpen p)} + | Ppat_exception p -> + {pat with ppat_desc = Ppat_exception (rewritePpatOpen longidentOpen p)} + | _ -> pat + + let rec rewriteReasonFastPipe expr = + let open Parsetree in + match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "|."}} as op, + [Asttypes.Nolabel, lhs; Nolabel, rhs] + ); pexp_attributes = subAttrs}, + args + ) -> + let rhsLoc = {rhs.pexp_loc with loc_end = expr.pexp_loc.loc_end} in + let newLhs = + let expr = rewriteReasonFastPipe lhs in + {expr with pexp_attributes = subAttrs} + in + let allArgs = + (Asttypes.Nolabel, newLhs)::[ + Asttypes.Nolabel, Ast_helper.Exp.apply ~loc:rhsLoc rhs args + ] + in + Ast_helper.Exp.apply ~attrs:expr.pexp_attributes ~loc:expr.pexp_loc op allArgs + | _ -> expr + + let makeReasonArityMapper ~forPrinter = + let open Ast_mapper in + { default_mapper with + expr = begin fun mapper expr -> + match expr with + (* Don't mind this case, Reason doesn't handle this. *) + (* | {pexp_desc = Pexp_variant (lbl, args); pexp_loc; pexp_attributes} -> *) + (* let newArgs = match args with *) + (* | (Some {pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _ } as sp]}) as args-> *) + (* if forPrinter then args else Some sp *) + (* | Some {pexp_desc = Pexp_tuple [sp]} -> Some sp *) + (* | _ -> args *) + (* in *) + (* default_mapper.expr mapper {pexp_desc=Pexp_variant(lbl, newArgs); pexp_loc; pexp_attributes} *) + | {pexp_desc=Pexp_construct(lid, args); pexp_loc; pexp_attributes} -> + let newArgs = match args with + | (Some {pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _ } as sp]}) as args -> + if forPrinter then args else Some sp + | Some {pexp_desc = Pexp_tuple [sp]} -> Some sp + | _ -> args + in + default_mapper.expr mapper { pexp_desc=Pexp_construct(lid, newArgs); pexp_loc; pexp_attributes} + | expr -> + default_mapper.expr mapper (rewriteReasonFastPipe expr) + end; + pat = begin fun mapper pattern -> + match pattern with + (* Don't mind this case, Reason doesn't handle this. *) + (* | {ppat_desc = Ppat_variant (lbl, args); ppat_loc; ppat_attributes} -> *) + (* let newArgs = match args with *) + (* | (Some {ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as sp]}) as args -> *) + (* if forPrinter then args else Some sp *) + (* | Some {ppat_desc = Ppat_tuple [sp]} -> Some sp *) + (* | _ -> args *) + (* in *) + (* default_mapper.pat mapper {ppat_desc = Ppat_variant (lbl, newArgs); ppat_loc; ppat_attributes;} *) + | {ppat_desc=Ppat_construct(lid, args); + ppat_loc; + ppat_attributes} -> + let new_args = match args with + | (Some {ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as sp]}) as args -> + if forPrinter then args else Some sp + | Some {ppat_desc = Ppat_tuple [sp]} -> Some sp + | _ -> args in + default_mapper.pat mapper { ppat_desc=Ppat_construct(lid, new_args); ppat_loc; ppat_attributes;} + | x -> default_mapper.pat mapper x + end; + } + + let escapeTemplateLiteral s = + let len = String.length s in + let b = Buffer.create len in + let i = ref 0 in + while !i < len do + let c = (String.get [@doesNotRaise]) s !i in + if c = '`' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '`'; + incr i; + ) else if c = '$' then ( + if !i + 1 < len then ( + let c2 = (String.get [@doesNotRaise]) s (!i + 1) in + if c2 = '{' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '$'; + Buffer.add_char b '{'; + ) else ( + Buffer.add_char b c; + Buffer.add_char b c2; + ); + i := !i + 2; + ) else ( + Buffer.add_char b c; + incr i + ) + ) else if c = '\\' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '\\'; + incr i; + ) else ( + Buffer.add_char b c; + incr i + ) + done; + Buffer.contents b + + let escapeStringContents s = + let len = String.length s in + let b = Buffer.create len in + + let i = ref 0 in + + while !i < len do + let c = String.unsafe_get s !i in + if c = '\\' then ( + incr i; + Buffer.add_char b c; + let c = String.unsafe_get s !i in + if !i < len then + let () = Buffer.add_char b c in + incr i + else + () + ) else if c = '"' then ( + Buffer.add_char b '\\'; + Buffer.add_char b c; + incr i; + ) else ( + Buffer.add_char b c; + incr i; + ) + done; + Buffer.contents b + + let looksLikeRecursiveTypeDeclaration typeDeclaration = + let open Parsetree in + let name = typeDeclaration.ptype_name.txt in + let rec checkKind kind = + match kind with + | Ptype_abstract | Ptype_open -> false + | Ptype_variant constructorDeclarations -> + List.exists checkConstructorDeclaration constructorDeclarations + | Ptype_record labelDeclarations -> + List.exists checkLabelDeclaration labelDeclarations + + and checkConstructorDeclaration constrDecl = + checkConstructorArguments constrDecl.pcd_args + || (match constrDecl.pcd_res with + | Some typexpr -> + checkTypExpr typexpr + | None -> false + ) + + and checkLabelDeclaration labelDeclaration = + checkTypExpr labelDeclaration.pld_type + + and checkConstructorArguments constrArg = + match constrArg with + | Pcstr_tuple types -> + List.exists checkTypExpr types + | Pcstr_record labelDeclarations -> + List.exists checkLabelDeclaration labelDeclarations + + and checkTypExpr typ = + match typ.ptyp_desc with + | Ptyp_any -> false + | Ptyp_var _ -> false + | Ptyp_object _ -> false + | Ptyp_class _ -> false + | Ptyp_package _ -> false + | Ptyp_extension _ -> false + | Ptyp_arrow (_lbl, typ1, typ2) -> + checkTypExpr typ1 || checkTypExpr typ2 + | Ptyp_tuple types -> + List.exists checkTypExpr types + | Ptyp_constr ({txt = longident}, types) -> + (match longident with + | Lident ident -> ident = name + | _ -> false + ) || + List.exists checkTypExpr types + | Ptyp_alias (typ, _) -> checkTypExpr typ + | Ptyp_variant (rowFields, _, _) -> + List.exists checkRowFields rowFields + | Ptyp_poly (_, typ) -> + checkTypExpr typ + + and checkRowFields rowField = + match rowField with + | Rtag (_, _, _, types) -> + List.exists checkTypExpr types + | Rinherit typexpr -> + checkTypExpr typexpr + in + checkKind typeDeclaration.ptype_kind + + + let filterReasonRawLiteral attrs = + List.filter (fun attr -> + match attr with + | ({Location.txt = ("reason.raw_literal")}, _) -> false + | _ -> true + ) attrs + + let stringLiteralMapper stringData = + let isSameLocation l1 l2 = + let open Location in + l1.loc_start.pos_cnum == l2.loc_start.pos_cnum + in + let remainingStringData = stringData in + let open Ast_mapper in + { default_mapper with + expr = (fun mapper expr -> + match expr.pexp_desc with + | Pexp_constant (Pconst_string (_txt, None)) -> + begin match + List.find_opt (fun (_stringData, stringLoc) -> + isSameLocation stringLoc expr.pexp_loc + ) remainingStringData + with + | Some(stringData, _) -> + let stringData = + let attr = List.find_opt (fun attr -> match attr with + | ({Location.txt = ("reason.raw_literal")}, _) -> true + | _ -> false + ) expr.pexp_attributes in + match attr with + | Some (_, PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (raw, _))}, _)}]) -> + raw + | _ -> (String.sub [@doesNotRaise]) stringData 1 (String.length stringData - 2) + in + {expr with + pexp_attributes = filterReasonRawLiteral expr.pexp_attributes; + pexp_desc = Pexp_constant (Pconst_string (stringData, None)) + } + | None -> + default_mapper.expr mapper expr + end + | _ -> default_mapper.expr mapper expr + ) + } + + let normalize = + let open Ast_mapper in + { default_mapper with + attributes = (fun mapper attrs -> + attrs + |> List.filter (fun attr -> + match attr with + | ({Location.txt = ( + "reason.preserve_braces" + | "explicit_arity" + | "implicity_arity" + )}, _) -> false + | _ ->true + ) + |> default_mapper.attributes mapper + ); + pat = begin fun mapper p -> + match p.ppat_desc with + | Ppat_open ({txt = longidentOpen}, pattern) -> + let p = rewritePpatOpen longidentOpen pattern in + default_mapper.pat mapper p + | _ -> + default_mapper.pat mapper p + end; + expr = (fun mapper expr -> + match expr.pexp_desc with + | Pexp_constant (Pconst_string (txt, None)) -> + let raw = escapeStringContents txt in + let s = Parsetree.Pconst_string (raw, None) in + let expr = Ast_helper.Exp.constant + ~attrs:expr.pexp_attributes + ~loc:expr.pexp_loc s + in + expr + | Pexp_constant (Pconst_string (txt, tag)) -> + let s = Parsetree.Pconst_string ((escapeTemplateLiteral txt), tag) in + Ast_helper.Exp.constant + ~attrs:(mapper.attributes mapper expr.pexp_attributes) + ~loc:expr.pexp_loc + s + | Pexp_function cases -> + let loc = match (cases, List.rev cases) with + | (first::_), (last::_) -> + {first.pc_lhs.ppat_loc with loc_end = last.pc_rhs.pexp_loc.loc_end} + | _ -> Location.none + in + Ast_helper.Exp.fun_ ~loc + Asttypes.Nolabel None (Ast_helper.Pat.var (Location.mknoloc "x")) + (Ast_helper.Exp.match_ ~loc + (Ast_helper.Exp.ident (Location.mknoloc (Longident.Lident "x"))) + (default_mapper.cases mapper cases) + ) + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "!"}}, + [Asttypes.Nolabel, operand] + ) -> + (* turn `!foo` into `foo.contents` *) + Ast_helper.Exp.field ~loc:expr.pexp_loc ~attrs:expr.pexp_attributes + operand + (Location.mknoloc (Longident.Lident "contents")) + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "##"}} as op, + [Asttypes.Nolabel, lhs; Nolabel, ({pexp_desc = Pexp_constant (Pconst_string (txt, None))} as stringExpr)] + ) -> + let ident = Ast_helper.Exp.ident ~loc:stringExpr.pexp_loc + (Location.mkloc (Longident.Lident txt) stringExpr.pexp_loc) + in + Ast_helper.Exp.apply ~loc:expr.pexp_loc ~attrs:expr.pexp_attributes + op [Asttypes.Nolabel, lhs; Nolabel, ident] + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "@@"}}, + [Asttypes.Nolabel, callExpr; Nolabel, argExpr] + ) -> + Ast_helper.Exp.apply (mapper.expr mapper callExpr) [ + Asttypes.Nolabel, mapper.expr mapper argExpr + ] + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "@"}}, + [Nolabel, arg1; Nolabel, arg2] + ) -> + let listConcat = Longident.Ldot (Longident.Lident "List", "append") in + Ast_helper.Exp.apply + (Ast_helper.Exp.ident (Location.mknoloc listConcat)) + [Nolabel, mapper.expr mapper arg1; Nolabel, mapper.expr mapper arg2] + | Pexp_match ( + condition, + [ + {pc_lhs = {ppat_desc = Ppat_construct ({txt = Longident.Lident "true"}, None)}; pc_rhs = thenExpr }; + {pc_lhs = {ppat_desc = Ppat_construct ({txt = Longident.Lident "false"}, None)}; pc_rhs = elseExpr }; + ] + ) -> + let ternaryMarker = (Location.mknoloc "ns.ternary", Parsetree.PStr []) in + Ast_helper.Exp.ifthenelse + ~loc:expr.pexp_loc + ~attrs:(ternaryMarker::expr.pexp_attributes) + (default_mapper.expr mapper condition) + (default_mapper.expr mapper thenExpr) + (Some (default_mapper.expr mapper elseExpr)) + | _ -> default_mapper.expr mapper expr + ); + structure_item = begin fun mapper structureItem -> + match structureItem.pstr_desc with + (* heuristic: if we have multiple type declarations, mark them recursive *) + | Pstr_type (recFlag, typeDeclarations) -> + let flag = match typeDeclarations with + | [td] -> + if looksLikeRecursiveTypeDeclaration td then Asttypes.Recursive + else Asttypes.Nonrecursive + | _ -> recFlag + in + {structureItem with pstr_desc = Pstr_type ( + flag, + List.map (fun typeDeclaration -> + default_mapper.type_declaration mapper typeDeclaration + ) typeDeclarations + )} + | _ -> default_mapper.structure_item mapper structureItem + end; + signature_item = begin fun mapper signatureItem -> + match signatureItem.psig_desc with + (* heuristic: if we have multiple type declarations, mark them recursive *) + | Psig_type (recFlag, typeDeclarations) -> + let flag = match typeDeclarations with + | [td] -> + if looksLikeRecursiveTypeDeclaration td then Asttypes.Recursive + else Asttypes.Nonrecursive + | _ -> recFlag + in + {signatureItem with psig_desc = Psig_type ( + flag, + List.map (fun typeDeclaration -> + default_mapper.type_declaration mapper typeDeclaration + ) typeDeclarations + )} + | _ -> default_mapper.signature_item mapper signatureItem + end; + value_binding = begin fun mapper vb -> + match vb with + | { + pvb_pat = {ppat_desc = Ppat_var _} as pat; + pvb_expr = {pexp_loc = expr_loc; pexp_desc = Pexp_constraint (expr, typ) } + } when expr_loc.loc_ghost -> + (* let t: t = (expr : t) -> let t: t = expr *) + let typ = default_mapper.typ mapper typ in + let pat = default_mapper.pat mapper pat in + let expr = mapper.expr mapper expr in + let newPattern = Ast_helper.Pat.constraint_ + ~loc:{pat.ppat_loc with loc_end = typ.ptyp_loc.loc_end} + pat typ in + {vb with + pvb_pat = newPattern; + pvb_expr = expr; + pvb_attributes = default_mapper.attributes mapper vb.pvb_attributes} + | { + pvb_pat = {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly ([], _)})} ; + pvb_expr = {pexp_loc = expr_loc; pexp_desc = Pexp_constraint (expr, typ) } + } when expr_loc.loc_ghost -> + (* let t: . t = (expr : t) -> let t: t = expr *) + let typ = default_mapper.typ mapper typ in + let pat = default_mapper.pat mapper pat in + let expr = mapper.expr mapper expr in + let newPattern = Ast_helper.Pat.constraint_ + ~loc:{pat.ppat_loc with loc_end = typ.ptyp_loc.loc_end} + pat typ in + {vb with + pvb_pat = newPattern; + pvb_expr = expr; + pvb_attributes = default_mapper.attributes mapper vb.pvb_attributes} + | _ -> default_mapper.value_binding mapper vb + end; + } + + let normalizeReasonArityStructure ~forPrinter s = + let mapper = makeReasonArityMapper ~forPrinter in + mapper.Ast_mapper.structure mapper s + + let normalizeReasonAritySignature ~forPrinter s = + let mapper = makeReasonArityMapper ~forPrinter in + mapper.Ast_mapper.signature mapper s + + let structure s = normalize.Ast_mapper.structure normalize s + let signature s = normalize.Ast_mapper.signature normalize s + + let replaceStringLiteralStructure stringData structure = + let mapper = stringLiteralMapper stringData in + mapper.Ast_mapper.structure mapper structure + + let replaceStringLiteralSignature stringData signature = + let mapper = stringLiteralMapper stringData in + mapper.Ast_mapper.signature mapper signature +end + +module OcamlParser = Parser + +module Parser = struct + type mode = ParseForTypeChecker | Default + + type regionStatus = Report | Silent + + type t = { + mode: mode; + mutable scanner: Scanner.t; + mutable token: Token.t; + mutable startPos: Lexing.position; + mutable endPos: Lexing.position; + mutable prevEndPos: Lexing.position; + mutable breadcrumbs: (Grammar.t * Lexing.position) list; + mutable errors: Reporting.parseError list; + mutable diagnostics: Diagnostics.t list; + mutable comments: Comment.t list; + mutable regions: regionStatus ref list; + } + + let err ?startPos ?endPos p error = + let d = Diagnostics.make + ~filename:p.scanner.filename + ~startPos:(match startPos with | Some pos -> pos | None -> p.startPos) + ~endPos:(match endPos with | Some pos -> pos | None -> p.endPos) + error + in + try + if (!(List.hd p.regions) = Report) then ( + p.diagnostics <- d::p.diagnostics; + List.hd p.regions := Silent + ) + with Failure _ -> () + + let beginRegion p = + p.regions <- ref Report :: p.regions + let endRegion p = + try p.regions <- List.tl p.regions with Failure _ -> () + + (* Advance to the next non-comment token and store any encountered comment + * in the parser's state. Every comment contains the end position of it's + * previous token to facilite comment interleaving *) + let rec next ?prevEndPos p = + let prevEndPos = match prevEndPos with Some pos -> pos | None -> p.endPos in + let (startPos, endPos, token) = Scanner.scan p.scanner in + match token with + | Comment c -> + Comment.setPrevTokEndPos c p.endPos; + p.comments <- c::p.comments; + p.prevEndPos <- p.endPos; + p.endPos <- endPos; + next ~prevEndPos p + | _ -> + p.token <- token; + (* p.prevEndPos <- prevEndPos; *) + p.prevEndPos <- prevEndPos; + p.startPos <- startPos; + p.endPos <- endPos + + let checkProgress ~prevEndPos ~result p = + if p.endPos == prevEndPos + then None + else Some result + + let make ?(mode=ParseForTypeChecker) src filename = + let scanner = Scanner.make (Bytes.of_string src) filename in + let parserState = { + mode; + scanner; + token = Token.Eof; + startPos = Lexing.dummy_pos; + prevEndPos = Lexing.dummy_pos; + endPos = Lexing.dummy_pos; + breadcrumbs = []; + errors = []; + diagnostics = []; + comments = []; + regions = [ref Report]; + } in + parserState.scanner.err <- (fun ~startPos ~endPos error -> + let diagnostic = Diagnostics.make + ~filename + ~startPos + ~endPos + error + in + parserState.diagnostics <- diagnostic::parserState.diagnostics + ); + next parserState; + parserState + + let leaveBreadcrumb p circumstance = + let crumb = (circumstance, p.startPos) in + p.breadcrumbs <- crumb::p.breadcrumbs + + let eatBreadcrumb p = + match p.breadcrumbs with + | [] -> () + | _::crumbs -> p.breadcrumbs <- crumbs + + let optional p token = + if p.token = token then + let () = next p in true + else + false + + let expect ?grammar token p = + if p.token = token then + next p + else + let error = Diagnostics.expected ?grammar p.prevEndPos token in + err ~startPos:p.prevEndPos p error + + (* Don't use immutable copies here, it trashes certain heuristics + * in the ocaml compiler, resulting in massive slowdowns of the parser *) + let lookahead p callback = + let err = p.scanner.err in + let ch = p.scanner.ch in + let offset = p.scanner.offset in + let rdOffset = p.scanner.rdOffset in + let lineOffset = p.scanner.lineOffset in + let lnum = p.scanner.lnum in + let mode = p.scanner.mode in + let token = p.token in + let startPos = p.startPos in + let endPos = p.endPos in + let prevEndPos = p.prevEndPos in + let breadcrumbs = p.breadcrumbs in + let errors = p.errors in + let diagnostics = p.diagnostics in + let comments = p.comments in + + let res = callback p in + + p.scanner.err <- err; + p.scanner.ch <- ch; + p.scanner.offset <- offset; + p.scanner.rdOffset <- rdOffset; + p.scanner.lineOffset <- lineOffset; + p.scanner.lnum <- lnum; + p.scanner.mode <- mode; + p.token <- token; + p.startPos <- startPos; + p.endPos <- endPos; + p.prevEndPos <- prevEndPos; + p.breadcrumbs <- breadcrumbs; + p.errors <- errors; + p.diagnostics <- diagnostics; + p.comments <- comments; + + res +end + +module NapkinScript = struct + let mkLoc startLoc endLoc = Location.{ + loc_start = startLoc; + loc_end = endLoc; + loc_ghost = false; + } + + + module Recover = struct + type action = unit option (* None is abort, Some () is retry *) + + let defaultExpr () = + let id = Location.mknoloc "napkinscript.exprhole" in + Ast_helper.Exp.mk (Pexp_extension (id, PStr [])) + + let defaultType () = + let id = Location.mknoloc "napkinscript.typehole" in + Ast_helper.Typ.extension (id, PStr []) + + let defaultPattern () = + let id = Location.mknoloc "napkinscript.patternhole" in + Ast_helper.Pat.extension (id, PStr []) + (* Ast_helper.Pat.any () *) + + let defaultModuleExpr () = Ast_helper.Mod.structure [] + let defaultModuleType () = Ast_helper.Mty.signature [] + + let recoverEqualGreater p = + Parser.expect EqualGreater p; + match p.Parser.token with + | MinusGreater -> Parser.next p + | _ -> () + + let shouldAbortListParse p = + let rec check breadcrumbs = + match breadcrumbs with + | [] -> false + | (grammar, _)::rest -> + if Grammar.isPartOfList grammar p.Parser.token then + true + else + check rest + in + check p.breadcrumbs + end + + module ErrorMessages = struct + let listPatternSpread = "List pattern matches only supports one `...` spread, at the end. +Explanation: a list spread at the tail is efficient, but a spread in the middle would create new list[s]; out of performance concern, our pattern matching currently guarantees to never create new intermediate data." + + let recordPatternSpread = "Record's `...` spread is not supported in pattern matches. +Explanation: you can't collect a subset of a record's field into its own record, since a record needs an explicit declaration and that subset wouldn't have one. +Solution: you need to pull out each field you want explicitly." + + let recordPatternUnderscore = "Record patterns only support one `_`, at the end." + [@@live] + + let arrayPatternSpread = "Array's `...` spread is not supported in pattern matches. +Explanation: such spread would create a subarray; out of performance concern, our pattern matching currently guarantees to never create new intermediate data. +Solution: if it's to validate the first few elements, use a `when` clause + Array size check + `get` checks on the current pattern. If it's to obtain a subarray, use `Array.sub` or `Belt.Array.slice`." + + let arrayExprSpread = "Arrays can't use the `...` spread currently. Please use `concat` or other Array helpers." + + let recordExprSpread = "Records can only have one `...` spread, at the beginning. +Explanation: since records have a known, fixed shape, a spread like `{a, ...b}` wouldn't make sense, as `b` would override every field of `a` anyway." + + let listExprSpread = "Lists can only have one `...` spread, and at the end. +Explanation: lists are singly-linked list, where a node contains a value and points to the next node. `list[a, ...bc]` efficiently creates a new item and links `bc` as its next nodes. `[...bc, a]` would be expensive, as it'd need to traverse `bc` and prepend each item to `a` one by one. We therefore disallow such syntax sugar. +Solution: directly use `concat`." + + let variantIdent = "A polymorphic variant (e.g. #id) must start with an alphabetical letter." +end + + + let jsxAttr = (Location.mknoloc "JSX", Parsetree.PStr []) + let uncurryAttr = (Location.mknoloc "bs", Parsetree.PStr []) + let ternaryAttr = (Location.mknoloc "ns.ternary", Parsetree.PStr []) + let makeBracesAttr loc = (Location.mkloc "ns.braces" loc, Parsetree.PStr []) + + type typDefOrExt = + | TypeDef of {recFlag: Asttypes.rec_flag; types: Parsetree.type_declaration list} + | TypeExt of Parsetree.type_extension + + type labelledParameter = + | TermParameter of + {uncurried: bool; attrs: Parsetree.attributes; label: Asttypes.arg_label; expr: Parsetree.expression option; + pat: Parsetree.pattern; pos: Lexing.position} + | TypeParameter of {uncurried: bool; attrs: Parsetree.attributes; locs: string Location.loc list; pos: Lexing.position} + + type recordPatternItem = + | PatUnderscore + | PatField of (Ast_helper.lid * Parsetree.pattern) + + type context = + | OrdinaryExpr + | TernaryTrueBranchExpr + | WhenExpr + + let getClosingToken = function + | Token.Lparen -> Token.Rparen + | Lbrace -> Rbrace + | Lbracket -> Rbracket + | _ -> assert false + + let rec goToClosing closingToken state = + match (state.Parser.token, closingToken) with + | (Rparen, Token.Rparen) | (Rbrace, Rbrace) | (Rbracket, Rbracket) -> + Parser.next state; + () + | (Token.Lbracket | Lparen | Lbrace) as t, _ -> + Parser.next state; + goToClosing (getClosingToken t) state; + goToClosing closingToken state + | ((Rparen | Token.Rbrace | Rbracket | Eof), _) -> + () (* TODO: how do report errors here? *) + | _ -> + Parser.next state; + goToClosing closingToken state + + (* Madness *) + let isEs6ArrowExpression ~inTernary p = + Parser.lookahead p (fun state -> + match state.Parser.token with + | Lident _ | List | Underscore -> + Parser.next state; + begin match state.Parser.token with + (* Don't think that this valid + * Imagine: let x = (a: int) + * This is a parenthesized expression with a type constraint, wait for + * the arrow *) + (* | Colon when not inTernary -> true *) + | EqualGreater -> true + | _ -> false + end + | Lparen -> + let prevEndPos = state.prevEndPos in + Parser.next state; + begin match state.token with + | Rparen -> + Parser.next state; + begin match state.Parser.token with + | Colon when not inTernary -> true + | EqualGreater -> true + | _ -> false + end + | Dot (* uncurried *) -> true + | Tilde -> true + | Backtick -> false (* (` always indicates the start of an expr, can't be es6 parameter *) + | _ -> + goToClosing Rparen state; + begin match state.Parser.token with + | EqualGreater -> true + (* | Lbrace TODO: detect missing =>, is this possible? *) + | Colon when not inTernary -> true + | Rparen -> + (* imagine having something as : + * switch colour { + * | Red + * when l == l' + * || (&Clflags.classic && (l == Nolabel && !is_optional(l'))) => (t1, t2) + * We'll arrive at the outer rparen just before the =>. + * This is not an es6 arrow. + * *) + false + | _ -> + Parser.next state; + (* error recovery, peek at the next token, + * (elements, providerId] => { + * in the example above, we have an unbalanced ] here + *) + begin match state.Parser.token with + | EqualGreater when state.startPos.pos_lnum == prevEndPos.pos_lnum -> true + | _ -> false + end + end + end + | _ -> false) + + + let isEs6ArrowFunctor p = + Parser.lookahead p (fun state -> + match state.Parser.token with + (* | Uident _ | Underscore -> *) + (* Parser.next state; *) + (* begin match state.Parser.token with *) + (* | EqualGreater -> true *) + (* | _ -> false *) + (* end *) + | Lparen -> + Parser.next state; + begin match state.token with + | Rparen -> + Parser.next state; + begin match state.token with + | Colon | EqualGreater -> true + | _ -> false + end + | _ -> + goToClosing Rparen state; + begin match state.Parser.token with + | EqualGreater | Lbrace -> true + | Colon -> true + | _ -> false + end + end + | _ -> false + ) + + let isEs6ArrowType p = + Parser.lookahead p (fun state -> + match state.Parser.token with + | Lparen -> + Parser.next state; + begin match state.Parser.token with + | Rparen -> + Parser.next state; + begin match state.Parser.token with + | EqualGreater -> true + | _ -> false + end + | Tilde | Dot -> true + | _ -> + goToClosing Rparen state; + begin match state.Parser.token with + | EqualGreater -> true + | _ -> false + end + end + | Tilde -> true + | _ -> false + ) + + let buildLongident words = match List.rev words with + | [] -> assert false + | hd::tl -> List.fold_left (fun p s -> Longident.Ldot (p, s)) (Lident hd) tl + + let makeInfixOperator p token startPos endPos = + let stringifiedToken = + if token = Token.MinusGreater then "|." + else if token = Token.PlusPlus then "^" + else if token = Token.BangEqual then "<>" + else if token = Token.BangEqualEqual then "!=" + else if token = Token.Equal then ( + (* TODO: could have a totally different meaning like x->fooSet(y)*) + Parser.err ~startPos ~endPos p ( + Diagnostics.message "Did you mean `==` here?" + ); + "=" + ) else if token = Token.EqualEqual then "=" + else if token = Token.EqualEqualEqual then "==" + else Token.toString token + in + let loc = mkLoc startPos endPos in + let operator = Location.mkloc + (Longident.Lident stringifiedToken) loc + in + Ast_helper.Exp.ident ~loc operator + + let negateString s = + if String.length s > 0 && (s.[0] [@doesNotRaise]) = '-' + then (String.sub [@doesNotRaise]) s 1 (String.length s - 1) + else "-" ^ s + + let makeUnaryExpr startPos tokenEnd token operand = + match token, operand.Parsetree.pexp_desc with + | (Token.Plus | PlusDot), Pexp_constant((Pconst_integer _ | Pconst_float _)) -> + operand + | Minus, Pexp_constant(Pconst_integer (n,m)) -> + {operand with pexp_desc = Pexp_constant(Pconst_integer (negateString n,m))} + | (Minus | MinusDot), Pexp_constant(Pconst_float (n,m)) -> + {operand with pexp_desc = Pexp_constant(Pconst_float (negateString n,m))} + | (Token.Plus | PlusDot | Minus | MinusDot ), _ -> + let tokenLoc = mkLoc startPos tokenEnd in + let operator = "~" ^ Token.toString token in + Ast_helper.Exp.apply + ~loc:(mkLoc startPos operand.Parsetree.pexp_loc.loc_end) + (Ast_helper.Exp.ident ~loc:tokenLoc + (Location.mkloc (Longident.Lident operator) tokenLoc)) + [Nolabel, operand] + | Token.Bang, _ -> + let tokenLoc = mkLoc startPos tokenEnd in + Ast_helper.Exp.apply + ~loc:(mkLoc startPos operand.Parsetree.pexp_loc.loc_end) + (Ast_helper.Exp.ident ~loc:tokenLoc + (Location.mkloc (Longident.Lident "not") tokenLoc)) + [Nolabel, operand] + | _ -> + operand + + let makeListExpression loc seq extOpt = + let rec handleSeq = function + | [] -> + begin match extOpt with + | Some ext -> ext + | None -> + let loc = {loc with Location.loc_ghost = true} in + let nil = Location.mkloc (Longident.Lident "[]") loc in + Ast_helper.Exp.construct ~loc nil None + end + | e1 :: el -> + let exp_el = handleSeq el in + let loc = mkLoc + e1.Parsetree.pexp_loc.Location.loc_start + exp_el.pexp_loc.loc_end + in + let arg = Ast_helper.Exp.tuple ~loc [e1; exp_el] in + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident "::") loc) + (Some arg) + in + let expr = handleSeq seq in + {expr with pexp_loc = loc} + + let makeListPattern loc seq ext_opt = + let rec handle_seq = function + [] -> + let base_case = match ext_opt with + | Some ext -> + ext + | None -> + let loc = { loc with Location.loc_ghost = true} in + let nil = { Location.txt = Longident.Lident "[]"; loc } in + Ast_helper.Pat.construct ~loc nil None + in + base_case + | p1 :: pl -> + let pat_pl = handle_seq pl in + let loc = + mkLoc p1.Parsetree.ppat_loc.loc_start pat_pl.ppat_loc.loc_end in + let arg = Ast_helper.Pat.mk ~loc (Ppat_tuple [p1; pat_pl]) in + Ast_helper.Pat.mk ~loc (Ppat_construct(Location.mkloc (Longident.Lident "::") loc, Some arg)) + in + handle_seq seq + + + (* {"foo": bar} -> Js.t({. foo: bar}) + * {.. "foo": bar} -> Js.t({.. foo: bar}) + * {..} -> Js.t({..}) *) + let makeBsObjType ~attrs ~loc ~closed rows = + let obj = Ast_helper.Typ.object_ ~loc rows closed in + let jsDotTCtor = + Location.mkloc (Longident.Ldot (Longident.Lident "Js", "t")) loc + in + Ast_helper.Typ.constr ~loc ~attrs jsDotTCtor [obj] + + (* TODO: diagnostic reporting *) + let lidentOfPath longident = + match Longident.flatten longident |> List.rev with + | [] -> "" + | ident::_ -> ident + + let makeNewtypes ~attrs ~loc newtypes exp = + let expr = List.fold_right (fun newtype exp -> + Ast_helper.Exp.mk ~loc (Pexp_newtype (newtype, exp)) + ) newtypes exp + in {expr with pexp_attributes = attrs} + + (* locally abstract types syntax sugar + * Transforms + * let f: type t u v. = (foo : list) => ... + * into + * let f = (type t u v. foo : list) => ... + *) + let wrapTypeAnnotation ~loc newtypes core_type body = + let exp = makeNewtypes ~attrs:[] ~loc newtypes + (Ast_helper.Exp.constraint_ ~loc body core_type) + in + let typ = Ast_helper.Typ.poly ~loc newtypes + (Ast_helper.Typ.varify_constructors newtypes core_type) + in + (exp, typ) + + (** + * process the occurrence of _ in the arguments of a function application + * replace _ with a new variable, currently __x, in the arguments + * return a wrapping function that wraps ((__x) => ...) around an expression + * e.g. foo(_, 3) becomes (__x) => foo(__x, 3) + *) + let processUnderscoreApplication args = + let open Parsetree in + let exp_question = ref None in + let hidden_var = "__x" in + let check_arg ((lab, exp) as arg) = + match exp.pexp_desc with + | Pexp_ident ({ txt = Lident "_"} as id) -> + let new_id = Location.mkloc (Longident.Lident hidden_var) id.loc in + let new_exp = Ast_helper.Exp.mk (Pexp_ident new_id) ~loc:exp.pexp_loc in + exp_question := Some new_exp; + (lab, new_exp) + | _ -> + arg + in + let args = List.map check_arg args in + let wrap exp_apply = + match !exp_question with + | Some {pexp_loc=loc} -> + let pattern = Ast_helper.Pat.mk (Ppat_var (Location.mkloc hidden_var loc)) ~loc in + Ast_helper.Exp.mk (Pexp_fun (Nolabel, None, pattern, exp_apply)) ~loc + | None -> + exp_apply + in + (args, wrap) + + let rec parseLident p = + let recoverLident p = + if ( + Token.isKeyword p.Parser.token && + p.Parser.prevEndPos.pos_lnum == p.startPos.pos_lnum + ) + then ( + Parser.err p (Diagnostics.lident p.Parser.token); + Parser.next p; + None + ) else ( + let rec loop p = + if not (Recover.shouldAbortListParse p) + then begin + Parser.next p; + loop p + end + in + Parser.next p; + loop p; + match p.Parser.token with + | Lident _ -> Some () + | _ -> None + ) + in + let startPos = p.Parser.startPos in + match p.Parser.token with + | Lident ident -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + (ident, loc) + | List -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + ("list", loc) + | _ -> + begin match recoverLident p with + | Some () -> + parseLident p + | None -> + ("_", mkLoc startPos p.prevEndPos) + end + + let parseIdent ~msg ~startPos p = + match p.Parser.token with + | Lident ident + | Uident ident -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + (ident, loc) + | List -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + ("list", loc) + | _token -> + Parser.err p (Diagnostics.message msg); + Parser.next p; + ("_", mkLoc startPos p.prevEndPos) + + let parseHashIdent ~startPos p = + Parser.expect Hash p; + parseIdent ~startPos ~msg:ErrorMessages.variantIdent p + + (* Ldot (Ldot (Lident "Foo", "Bar"), "baz") *) + let parseValuePath p = + let startPos = p.Parser.startPos in + let rec aux p path = + match p.Parser.token with + | List -> Longident.Ldot(path, "list") + | Lident ident -> Longident.Ldot(path, ident) + | Uident uident -> + Parser.next p; + Parser.expect Dot p; + aux p (Ldot (path, uident)) + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Longident.Lident "_" + in + let ident = match p.Parser.token with + | List -> Longident.Lident "list" + | Lident ident -> Longident.Lident ident + | Uident ident -> + Parser.next p; + Parser.expect Dot p; + aux p (Lident ident) + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Longident.Lident "_" + in + Parser.next p; + Location.mkloc ident (mkLoc startPos p.prevEndPos) + + let parseValuePathTail p startPos ident = + let rec loop p path = + match p.Parser.token with + | Lident ident -> + Parser.next p; + Location.mkloc (Longident.Ldot(path, ident)) (mkLoc startPos p.prevEndPos) + | List -> + Parser.next p; + Location.mkloc (Longident.Ldot(path, "list")) (mkLoc startPos p.prevEndPos) + | Uident ident -> + Parser.next p; + Parser.expect Dot p; + loop p (Longident.Ldot (path, ident)) + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Location.mknoloc path + in + loop p ident + + let parseModuleLongIdentTail ~lowercase p startPos ident = + let rec loop p acc = + match p.Parser.token with + | List when lowercase -> + Parser.next p; + let lident = (Longident.Ldot (acc, "list")) in + Location.mkloc lident (mkLoc startPos p.prevEndPos) + | Lident ident when lowercase -> + Parser.next p; + let lident = (Longident.Ldot (acc, ident)) in + Location.mkloc lident (mkLoc startPos p.prevEndPos) + | Uident ident -> + Parser.next p; + let endPos = p.prevEndPos in + let lident = (Longident.Ldot (acc, ident)) in + begin match p.Parser.token with + | Dot -> + Parser.next p; + loop p lident + | _ -> Location.mkloc lident (mkLoc startPos endPos) + end + | t -> + Parser.err p (Diagnostics.uident t); + Location.mkloc acc (mkLoc startPos p.prevEndPos) + in + loop p ident + + (* Parses module identifiers: + Foo + Foo.Bar *) + let parseModuleLongIdent ~lowercase p = + (* Parser.leaveBreadcrumb p Reporting.ModuleLongIdent; *) + let startPos = p.Parser.startPos in + let moduleIdent = match p.Parser.token with + | List when lowercase -> + let loc = mkLoc startPos p.endPos in + Parser.next p; + Location.mkloc (Longident.Lident "list") loc + | Lident ident when lowercase -> + let loc = mkLoc startPos p.endPos in + let lident = Longident.Lident ident in + Parser.next p; + Location.mkloc lident loc + | Uident ident -> + let lident = Longident.Lident ident in + let endPos = p.endPos in + Parser.next p; + begin match p.Parser.token with + | Dot -> + Parser.next p; + parseModuleLongIdentTail ~lowercase p startPos lident + | _ -> Location.mkloc lident (mkLoc startPos endPos) + end + | t -> + Parser.err p (Diagnostics.uident t); + Location.mkloc (Longident.Lident "_") (mkLoc startPos p.prevEndPos) + in + (* Parser.eatBreadcrumb p; *) + moduleIdent + + (* `window.location` or `Math` or `Foo.Bar` *) + let parseIdentPath p = + let rec loop p acc = + match p.Parser.token with + | Uident ident | Lident ident -> + Parser.next p; + let lident = (Longident.Ldot (acc, ident)) in + begin match p.Parser.token with + | Dot -> + Parser.next p; + loop p lident + | _ -> lident + end + | _t -> acc + in + match p.Parser.token with + | Lident ident | Uident ident -> + Parser.next p; + begin match p.Parser.token with + | Dot -> + Parser.next p; + loop p (Longident.Lident ident) + | _ -> Longident.Lident ident + end + | _ -> + Longident.Lident "_" + + let verifyJsxOpeningClosingName p nameExpr = + let closing = match p.Parser.token with + | Lident lident -> Parser.next p; Longident.Lident lident + | Uident _ -> + (parseModuleLongIdent ~lowercase:false p).txt + | _ -> Longident.Lident "" + in + match nameExpr.Parsetree.pexp_desc with + | Pexp_ident openingIdent -> + let opening = + let withoutCreateElement = + Longident.flatten openingIdent.txt + |> List.filter (fun s -> s <> "createElement") + in + match (Longident.unflatten withoutCreateElement) with + | Some li -> li + | None -> Longident.Lident "" + in + opening = closing + | _ -> assert false + + let string_of_pexp_ident nameExpr = + match nameExpr.Parsetree.pexp_desc with + | Pexp_ident openingIdent -> + Longident.flatten openingIdent.txt + |> List.filter (fun s -> s <> "createElement") + |> String.concat "." + | _ -> "" + + (* open-def ::= + * | open module-path + * | open! module-path *) + let parseOpenDescription ~attrs p = + Parser.leaveBreadcrumb p Grammar.OpenDescription; + let startPos = p.Parser.startPos in + Parser.expect Open p; + let override = if Parser.optional p Token.Bang then + Asttypes.Override + else + Asttypes.Fresh + in + let modident = parseModuleLongIdent ~lowercase:false p in + let loc = mkLoc startPos p.prevEndPos in + Parser.eatBreadcrumb p; + Ast_helper.Opn.mk ~loc ~attrs ~override modident + + let hexValue x = + match x with + | '0' .. '9' -> + (Char.code x) - 48 + | 'A' .. 'Z' -> + (Char.code x) - 55 + | 'a' .. 'z' -> + (Char.code x) - 97 + | _ -> 16 + + let parseStringLiteral s = + let len = String.length s in + let b = Buffer.create (String.length s) in + + let rec loop i = + if i = len then + () + else + let c = String.unsafe_get s i in + match c with + | '\\' as c -> + let nextIx = i + 1 in + if nextIx < len then + let nextChar = String.unsafe_get s nextIx in + begin match nextChar with + | 'n' -> + Buffer.add_char b '\010'; + loop (nextIx + 1) + | 'r' -> + Buffer.add_char b '\013'; + loop (nextIx + 1) + | 'b' -> + Buffer.add_char b '\008'; + loop (nextIx + 1) + | 't' -> + Buffer.add_char b '\009'; + loop (nextIx + 1) + | '\\' as c -> + Buffer.add_char b c; + loop (nextIx + 1) + | ' ' as c -> + Buffer.add_char b c; + loop (nextIx + 1) + | '\'' as c -> + Buffer.add_char b c; + loop (nextIx + 1) + | '\"' as c -> + Buffer.add_char b c; + loop (nextIx + 1) + | '0' .. '9' -> + if nextIx + 2 < len then + let c0 = nextChar in + let c1 = (String.unsafe_get s (nextIx + 1)) in + let c2 = (String.unsafe_get s (nextIx + 2)) in + let c = + 100 * (Char.code c0 - 48) + + 10 * (Char.code c1 - 48) + + (Char.code c2 - 48) + in + if (c < 0 || c > 255) then ( + Buffer.add_char b '\\'; + Buffer.add_char b c0; + Buffer.add_char b c1; + Buffer.add_char b c2; + loop (nextIx + 3) + ) else ( + Buffer.add_char b (Char.unsafe_chr c); + loop (nextIx + 3) + ) + else ( + Buffer.add_char b '\\'; + Buffer.add_char b nextChar; + loop (nextIx + 1) + ) + | 'o' -> + if nextIx + 3 < len then + let c0 = (String.unsafe_get s (nextIx + 1)) in + let c1 = (String.unsafe_get s (nextIx + 2)) in + let c2 = (String.unsafe_get s (nextIx + 3)) in + let c = + 64 * (Char.code c0 - 48) + + 8 * (Char.code c1 - 48) + + (Char.code c2 - 48) + in + if (c < 0 || c > 255) then ( + Buffer.add_char b '\\'; + Buffer.add_char b '0'; + Buffer.add_char b c0; + Buffer.add_char b c1; + Buffer.add_char b c2; + loop (nextIx + 4) + ) else ( + Buffer.add_char b (Char.unsafe_chr c); + loop (nextIx + 4) + ) + else ( + Buffer.add_char b '\\'; + Buffer.add_char b nextChar; + loop (nextIx + 1) + ) + | 'x' as c -> + if nextIx + 2 < len then + let c0 = (String.unsafe_get s (nextIx + 1)) in + let c1 = (String.unsafe_get s (nextIx + 2)) in + let c = (16 * (hexValue c0)) + (hexValue c1) in + if (c < 0 || c > 255) then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'x'; + Buffer.add_char b c0; + Buffer.add_char b c1; + loop (nextIx + 3) + ) else ( + Buffer.add_char b (Char.unsafe_chr c); + loop (nextIx + 3) + ) + else ( + Buffer.add_char b '\\'; + Buffer.add_char b c; + loop (nextIx + 2) + ) + | _ -> + Buffer.add_char b c; + Buffer.add_char b nextChar; + loop (nextIx + 1) + end + else ( + Buffer.add_char b c; + () + ) + | c -> + Buffer.add_char b c; + loop (i + 1) + in + loop 0; + Buffer.contents b + + let parseTemplateStringLiteral s = + let len = String.length s in + let b = Buffer.create len in + + let rec loop i = + if i < len then + let c = String.unsafe_get s i in + match c with + | '\\' as c -> + if i + 1 < len then + let nextChar = String.unsafe_get s (i + 1) in + begin match nextChar with + | '\\' as c -> + Buffer.add_char b c; + loop (i + 2) + | '$' as c -> + Buffer.add_char b c; + loop (i + 2) + | '`' as c -> + Buffer.add_char b c; + loop (i + 2) + | c -> + Buffer.add_char b '\\'; + Buffer.add_char b c; + loop (i + 2) + end + else ( + Buffer.add_char b c + ) + + | c -> + Buffer.add_char b c; + loop (i + 1) + + else + () + in + loop 0; + Buffer.contents b + + (* constant ::= integer-literal *) + (* ∣ float-literal *) + (* ∣ string-literal *) + let parseConstant p = + let isNegative = match p.Parser.token with + | Token.Minus -> Parser.next p; true + | Plus -> Parser.next p; false + | _ -> false + in + let constant = match p.Parser.token with + | Int {i; suffix} -> + let intTxt = if isNegative then "-" ^ i else i in + Parsetree.Pconst_integer (intTxt, suffix) + | Float {f; suffix} -> + let floatTxt = if isNegative then "-" ^ f else f in + Parsetree.Pconst_float (floatTxt, suffix) + | String s -> + let txt = if p.mode = ParseForTypeChecker then + parseStringLiteral s + else + s + in + Pconst_string(txt, None) + | Character c -> Pconst_char c + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Pconst_string("", None) + in + Parser.next p; + constant + + let parseCommaDelimitedRegion p ~grammar ~closing ~f = + Parser.leaveBreadcrumb p grammar; + let rec loop nodes = + match f p with + | Some node -> + begin match p.Parser.token with + | Comma -> + Parser.next p; + loop (node::nodes) + | token when token = closing || token = Eof -> + List.rev (node::nodes) + | _ -> + if not (p.token = Eof || p.token = closing || Recover.shouldAbortListParse p) then + Parser.expect Comma p; + if p.token = Semicolon then Parser.next p; + loop (node::nodes) + end + | None -> + if p.token = Eof || p.token = closing || Recover.shouldAbortListParse p then + List.rev nodes + else ( + Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs); + Parser.next p; + loop nodes + ); + in + let nodes = loop [] in + Parser.eatBreadcrumb p; + nodes + + let parseCommaDelimitedReversedList p ~grammar ~closing ~f = + Parser.leaveBreadcrumb p grammar; + let rec loop nodes = + match f p with + | Some node -> + begin match p.Parser.token with + | Comma -> + Parser.next p; + loop (node::nodes) + | token when token = closing || token = Eof -> + (node::nodes) + | _ -> + if not (p.token = Eof || p.token = closing || Recover.shouldAbortListParse p) then + Parser.expect Comma p; + if p.token = Semicolon then Parser.next p; + loop (node::nodes) + end + | None -> + if p.token = Eof || p.token = closing || Recover.shouldAbortListParse p then + nodes + else ( + Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs); + Parser.next p; + loop nodes + ); + in + let nodes = loop [] in + Parser.eatBreadcrumb p; + nodes + + let parseDelimitedRegion p ~grammar ~closing ~f = + Parser.leaveBreadcrumb p grammar; + let rec loop nodes = + match f p with + | Some node -> + loop (node::nodes) + | None -> + if ( + p.Parser.token = Token.Eof || + p.token = closing || + Recover.shouldAbortListParse p + ) then + List.rev nodes + else ( + Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs); + Parser.next p; + loop nodes + ) + in + let nodes = loop [] in + Parser.eatBreadcrumb p; + nodes + + let parseRegion p ~grammar ~f = + Parser.leaveBreadcrumb p grammar; + let rec loop nodes = + match f p with + | Some node -> + loop (node::nodes) + | None -> + if p.Parser.token = Token.Eof || Recover.shouldAbortListParse p then + List.rev nodes + else ( + Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs); + Parser.next p; + loop nodes + ) + in + let nodes = loop [] in + Parser.eatBreadcrumb p; + nodes + + (* let-binding ::= pattern = expr *) + (* ∣ value-name { parameter } [: typexpr] [:> typexpr] = expr *) + (* ∣ value-name : poly-typexpr = expr *) + + (* pattern ::= value-name *) + (* ∣ _ *) + (* ∣ constant *) + (* ∣ pattern as value-name *) + (* ∣ ( pattern ) *) + (* ∣ ( pattern : typexpr ) *) + (* ∣ pattern | pattern *) + (* ∣ constr pattern *) + (* ∣ #variant variant-pattern *) + (* ∣ ##type *) + (* ∣ / pattern { , pattern }+ / *) + (* ∣ { field [: typexpr] [= pattern] { ; field [: typexpr] [= pattern] } [; _ ] [ ; ] } *) + (* ∣ [ pattern { ; pattern } [ ; ] ] *) + (* ∣ pattern :: pattern *) + (* ∣ [| pattern { ; pattern } [ ; ] |] *) + (* ∣ char-literal .. char-literal *) + (* ∣ exception pattern *) + let rec parsePattern ?(alias=true) ?(or_=true) p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + let pat = match p.Parser.token with + | (True | False) as token -> + let endPos = p.endPos in + Parser.next p; + let loc = mkLoc startPos endPos in + Ast_helper.Pat.construct ~loc + (Location.mkloc (Longident.Lident (Token.toString token)) loc) None + | Int _ | String _ | Float _ | Character _ | Minus | Plus -> + let c = parseConstant p in + begin match p.token with + | DotDot -> + Parser.next p; + let c2 = parseConstant p in + Ast_helper.Pat.interval ~loc:(mkLoc startPos p.prevEndPos) c c2 + | _ -> + Ast_helper.Pat.constant ~loc:(mkLoc startPos p.prevEndPos) c + end + | Lparen -> + Parser.next p; + begin match p.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let lid = Location.mkloc (Longident.Lident "()") loc in + Ast_helper.Pat.construct ~loc lid None + | _ -> + let pat = parseConstrainedPattern p in + begin match p.token with + | Comma -> + Parser.next p; + parseTuplePattern ~attrs ~first:pat ~startPos p + | _ -> + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + {pat with ppat_loc = loc} + end + end + | Lbracket -> + parseArrayPattern ~attrs p + | Lbrace -> + parseRecordPattern ~attrs p + | Underscore -> + let endPos = p.endPos in + let loc = mkLoc startPos endPos in + Parser.next p; + Ast_helper.Pat.any ~loc ~attrs () + | Lident ident -> + let endPos = p.endPos in + let loc = mkLoc startPos endPos in + Parser.next p; + Ast_helper.Pat.var ~loc ~attrs (Location.mkloc ident loc) + | Uident _ -> + let constr = parseModuleLongIdent ~lowercase:false p in + begin match p.Parser.token with + | Lparen -> + parseConstructorPatternArgs p constr startPos attrs + | _ -> + Ast_helper.Pat.construct ~loc:constr.loc ~attrs constr None + end + | Hash -> + let (ident, loc) = parseHashIdent ~startPos p in + begin match p.Parser.token with + | Lparen -> + parseVariantPatternArgs p ident startPos attrs + | _ -> + Ast_helper.Pat.variant ~loc ~attrs ident None + end + | HashHash -> + Parser.next p; + let ident = parseValuePath p in + let loc = mkLoc startPos ident.loc.loc_end in + Ast_helper.Pat.type_ ~loc ~attrs ident + | Exception -> + Parser.next p; + let pat = parsePattern ~alias:false ~or_:false p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.exception_ ~loc ~attrs pat + | Lazy -> + Parser.next p; + let pat = parsePattern ~alias:false ~or_:false p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.lazy_ ~loc ~attrs pat + | List -> + Parser.next p; + begin match p.token with + | Lbracket -> + parseListPattern ~startPos ~attrs p + | _ -> + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.var ~loc ~attrs (Location.mkloc "list" loc) + end + | Module -> + parseModulePattern ~attrs p + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.extension ~loc ~attrs extension + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + begin match skipTokensAndMaybeRetry p ~isStartOfGrammar:Grammar.isAtomicPatternStart with + | None -> + Recover.defaultPattern() + | Some () -> + parsePattern p + end + in + let pat = if alias then parseAliasPattern ~attrs pat p else pat in + if or_ then parseOrPattern pat p else pat + + and skipTokensAndMaybeRetry p ~isStartOfGrammar = + if Token.isKeyword p.Parser.token + && p.Parser.prevEndPos.pos_lnum == p.startPos.pos_lnum + then ( + Parser.next p; + None + ) else ( + if Recover.shouldAbortListParse p then + begin + if isStartOfGrammar p.Parser.token then + begin + Parser.next p; + Some () + end + else + None + end + else + begin + Parser.next p; + let rec loop p = + if not (Recover.shouldAbortListParse p) + then begin + Parser.next p; + loop p + end in + loop p; + if isStartOfGrammar p.Parser.token then + Some () + else + None + end + ) + + (* alias ::= pattern as lident *) + and parseAliasPattern ~attrs pattern p = + match p.Parser.token with + | As -> + Parser.next p; + let (name, loc) = parseLident p in + let name = Location.mkloc name loc in + Ast_helper.Pat.alias + ~loc:({pattern.ppat_loc with loc_end = p.prevEndPos}) + ~attrs + pattern + name + | _ -> pattern + + (* or ::= pattern | pattern + * precedence: Red | Blue | Green is interpreted as (Red | Blue) | Green *) + and parseOrPattern pattern1 p = + let rec loop pattern1 = + match p.Parser.token with + | Bar -> + Parser.next p; + let pattern2 = parsePattern ~or_:false p in + let loc = { pattern1.Parsetree.ppat_loc with + loc_end = pattern2.ppat_loc.loc_end + } in + loop (Ast_helper.Pat.or_ ~loc pattern1 pattern2) + | _ -> pattern1 + in + loop pattern1 + + and parseNonSpreadPattern ~msg p = + let () = match p.Parser.token with + | DotDotDot -> + Parser.err p (Diagnostics.message msg); + Parser.next p; + | _ -> () + in + match p.Parser.token with + | token when Grammar.isPatternStart token -> + let pat = parsePattern p in + begin match p.Parser.token with + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc pat.ppat_loc.loc_start typ.Parsetree.ptyp_loc.loc_end in + Some (Ast_helper.Pat.constraint_ ~loc pat typ) + | _ -> Some pat + end + | _ -> None + + and parseConstrainedPattern p = + let pat = parsePattern p in + match p.Parser.token with + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc pat.ppat_loc.loc_start typ.Parsetree.ptyp_loc.loc_end in + Ast_helper.Pat.constraint_ ~loc pat typ + | _ -> pat + + and parseConstrainedPatternRegion p = + match p.Parser.token with + | token when Grammar.isPatternStart token -> + Some (parseConstrainedPattern p) + | _ -> None + + (* field ::= + * | longident + * | longident : pattern + * | longident as lident + * + * row ::= + * | field , + * | field , _ + * | field , _, + *) + and parseRecordPatternField p = + let startPos = p.Parser.startPos in + let label = parseValuePath p in + let pattern = match p.Parser.token with + | Colon -> + Parser.next p; + parsePattern p + | _ -> + Ast_helper.Pat.var + ~loc:label.loc + (Location.mkloc (Longident.last label.txt) label.loc) + in + match p.token with + | As -> + Parser.next p; + let (name, loc) = parseLident p in + let name = Location.mkloc name loc in + let aliasPattern = Ast_helper.Pat.alias + ~loc:(mkLoc startPos p.prevEndPos) + pattern + name + in + (Location.mkloc label.txt (mkLoc startPos aliasPattern.ppat_loc.loc_end), aliasPattern) + | _ -> + (label, pattern) + + (* TODO: there are better representations than PatField|Underscore ? *) + and parseRecordPatternItem p = + match p.Parser.token with + | DotDotDot -> + Parser.next p; + Some (true, PatField (parseRecordPatternField p)) + | Uident _ | Lident _ -> + Some (false, PatField (parseRecordPatternField p)) + | Underscore -> + Parser.next p; + Some (false, PatUnderscore) + | _ -> + None + + and parseRecordPattern ~attrs p = + let startPos = p.startPos in + Parser.expect Lbrace p; + let rawFields = + parseCommaDelimitedReversedList p + ~grammar:PatternRecord + ~closing:Rbrace + ~f:parseRecordPatternItem + in + Parser.expect Rbrace p; + let (fields, closedFlag) = + let (rawFields, flag) = match rawFields with + | (_hasSpread, PatUnderscore)::rest -> + (rest, Asttypes.Open) + | rawFields -> + (rawFields, Asttypes.Closed) + in + List.fold_left (fun (fields, flag) curr -> + let (hasSpread, field) = curr in + match field with + | PatField field -> + if hasSpread then ( + let (_, pattern) = field in + Parser.err ~startPos:pattern.Parsetree.ppat_loc.loc_start p (Diagnostics.message ErrorMessages.recordPatternSpread) + ); + (field::fields, flag) + | PatUnderscore -> + (fields, flag) + ) ([], flag) rawFields + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.record ~loc ~attrs fields closedFlag + + and parseTuplePattern ~attrs ~first ~startPos p = + let patterns = + parseCommaDelimitedRegion p + ~grammar:Grammar.PatternList + ~closing:Rparen + ~f:parseConstrainedPatternRegion + in + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.tuple ~loc ~attrs (first::patterns) + + and parsePatternRegion p = + match p.Parser.token with + | DotDotDot -> + Parser.next p; + Some (true, parseConstrainedPattern p) + | token when Grammar.isPatternStart token -> + Some (false, parseConstrainedPattern p) + | _ -> None + + and parseModulePattern ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Module p; + Parser.expect Lparen p; + let uident = match p.token with + | Uident uident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc uident loc + | _ -> (* TODO: error recovery *) + Location.mknoloc "_" + in + begin match p.token with + | Colon -> + let colonStart = p.Parser.startPos in + Parser.next p; + let packageTypAttrs = parseAttributes p in + let packageType = parsePackageType ~startPos:colonStart ~attrs:packageTypAttrs p in + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + let unpack = Ast_helper.Pat.unpack ~loc:uident.loc uident in + Ast_helper.Pat.constraint_ + ~loc + ~attrs + unpack + packageType + | _ -> + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.unpack ~loc ~attrs uident + end + + and parseListPattern ~startPos ~attrs p = + Parser.expect Lbracket p; + let listPatterns = + parseCommaDelimitedReversedList p + ~grammar:Grammar.PatternOcamlList + ~closing:Rbracket + ~f:parsePatternRegion + in + Parser.expect Rbracket p; + let loc = mkLoc startPos p.prevEndPos in + let filterSpread (hasSpread, pattern) = + if hasSpread then ( + Parser.err + ~startPos:pattern.Parsetree.ppat_loc.loc_start + p + (Diagnostics.message ErrorMessages.listPatternSpread); + pattern + ) else + pattern + in + match listPatterns with + | (true, pattern)::patterns -> + let patterns = patterns |> List.map filterSpread |> List.rev in + let pat = makeListPattern loc patterns (Some pattern) in + {pat with ppat_loc = loc; ppat_attributes = attrs;} + | patterns -> + let patterns = patterns |> List.map filterSpread |> List.rev in + let pat = makeListPattern loc patterns None in + {pat with ppat_loc = loc; ppat_attributes = attrs;} + + and parseArrayPattern ~attrs p = + let startPos = p.startPos in + Parser.expect Lbracket p; + let patterns = + parseCommaDelimitedRegion + p + ~grammar:Grammar.PatternList + ~closing:Rbracket + ~f:(parseNonSpreadPattern ~msg:ErrorMessages.arrayPatternSpread) + in + Parser.expect Rbracket p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.array ~loc ~attrs patterns + + and parseConstructorPatternArgs p constr startPos attrs = + let lparen = p.startPos in + Parser.expect Lparen p; + let args = parseCommaDelimitedRegion + p ~grammar:Grammar.PatternList ~closing:Rparen ~f:parseConstrainedPatternRegion + in + Parser.expect Rparen p; + let args = match args with + | [] -> + let loc = mkLoc lparen p.prevEndPos in + Some ( + Ast_helper.Pat.construct ~loc (Location.mkloc (Longident.Lident "()") loc) None + ) + | [{ppat_desc = Ppat_tuple _} as pat] as patterns -> + if p.mode = ParseForTypeChecker then + (* Some(1, 2) for type-checker *) + Some pat + else + (* Some((1, 2)) for printer *) + Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns) + | [pattern] -> Some pattern + | patterns -> + Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns) + in + Ast_helper.Pat.construct ~loc:(mkLoc startPos p.prevEndPos) ~attrs constr args + + and parseVariantPatternArgs p ident startPos attrs = + let lparen = p.startPos in + Parser.expect Lparen p; + let patterns = + parseCommaDelimitedRegion + p ~grammar:Grammar.PatternList ~closing:Rparen ~f:parseConstrainedPatternRegion in + let args = + match patterns with + | [{ppat_desc = Ppat_tuple _} as pat] as patterns -> + if p.mode = ParseForTypeChecker then + (* #ident(1, 2) for type-checker *) + Some pat + else + (* #ident((1, 2)) for printer *) + Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns) + | [pattern] -> Some pattern + | patterns -> + Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns) + in + Parser.expect Rparen p; + Ast_helper.Pat.variant ~loc:(mkLoc startPos p.prevEndPos) ~attrs ident args + + and parseExpr ?(context=OrdinaryExpr) p = + let expr = parseOperandExpr ~context p in + let expr = parseBinaryExpr ~context ~a:expr p 1 in + parseTernaryExpr expr p + + (* expr ? expr : expr *) + and parseTernaryExpr leftOperand p = + match p.Parser.token with + | Question -> + Parser.leaveBreadcrumb p Grammar.Ternary; + Parser.next p; + let trueBranch = parseExpr ~context:TernaryTrueBranchExpr p in + Parser.expect Colon p; + let falseBranch = parseExpr p in + Parser.eatBreadcrumb p; + let loc = {leftOperand.Parsetree.pexp_loc with + loc_start = leftOperand.pexp_loc.loc_start; + loc_end = falseBranch.Parsetree.pexp_loc.loc_end; + } in + Ast_helper.Exp.ifthenelse + ~attrs:[ternaryAttr] ~loc + leftOperand trueBranch (Some falseBranch) + | _ -> + leftOperand + + and parseEs6ArrowExpression ?parameters p = + let startPos = p.Parser.startPos in + Parser.leaveBreadcrumb p Grammar.Es6ArrowExpr; + let parameters = match parameters with + | Some params -> params + | None -> parseParameters p + in + let returnType = match p.Parser.token with + | Colon -> + Parser.next p; + Some (parseTypExpr ~es6Arrow:false p) + | _ -> + None + in + Parser.expect EqualGreater p; + let body = + let expr = parseExpr p in + match returnType with + | Some typ -> + Ast_helper.Exp.constraint_ + ~loc:(mkLoc expr.pexp_loc.loc_start typ.Parsetree.ptyp_loc.loc_end) expr typ + | None -> expr + in + Parser.eatBreadcrumb p; + let endPos = p.prevEndPos in + let arrowExpr = + List.fold_right (fun parameter expr -> + match parameter with + | TermParameter {uncurried; attrs; label = lbl; expr = defaultExpr; pat; pos = startPos} -> + let attrs = if uncurried then uncurryAttr::attrs else attrs in + Ast_helper.Exp.fun_ ~loc:(mkLoc startPos endPos) ~attrs lbl defaultExpr pat expr + | TypeParameter {uncurried; attrs; locs = newtypes; pos = startPos} -> + let attrs = if uncurried then uncurryAttr::attrs else attrs in + makeNewtypes ~attrs ~loc:(mkLoc startPos endPos) newtypes expr + ) parameters body + in + {arrowExpr with pexp_loc = {arrowExpr.pexp_loc with loc_start = startPos}} + + (* + * uncurried_parameter ::= + * | . parameter + * + * parameter ::= + * | pattern + * | pattern : type + * | ~ labelName + * | ~ labelName as pattern + * | ~ labelName as pattern : type + * | ~ labelName = expr + * | ~ labelName as pattern = expr + * | ~ labelName as pattern : type = expr + * | ~ labelName = ? + * | ~ labelName as pattern = ? + * | ~ labelName as pattern : type = ? + * + * labelName ::= lident + *) + and parseParameter p = + if ( + p.Parser.token = Token.Typ || + p.token = Tilde || + p.token = Dot || + Grammar.isPatternStart p.token + ) then ( + let startPos = p.Parser.startPos in + let uncurried = Parser.optional p Token.Dot in + (* two scenarios: + * attrs ~lbl ... + * attrs pattern + * Attributes before a labelled arg, indicate that it's on the whole arrow expr + * Otherwise it's part of the pattern + * *) + let attrs = parseAttributes p in + if p.Parser.token = Typ then ( + Parser.next p; + let lidents = parseLidentList p in + Some (TypeParameter {uncurried; attrs; locs = lidents; pos = startPos}) + ) else ( + let (attrs, lbl, pat) = match p.Parser.token with + | Tilde -> + Parser.next p; + let (lblName, loc) = parseLident p in + let propLocAttr = (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) in + begin match p.Parser.token with + | Comma | Equal | Rparen -> + let loc = mkLoc startPos p.prevEndPos in + ( + attrs, + Asttypes.Labelled lblName, + Ast_helper.Pat.var ~attrs:[propLocAttr] ~loc (Location.mkloc lblName loc) + ) + | Colon -> + let lblEnd = p.prevEndPos in + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc startPos lblEnd in + let pat = + let pat = Ast_helper.Pat.var ~loc (Location.mkloc lblName loc) in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.constraint_ ~attrs:[propLocAttr] ~loc pat typ in + (attrs, Asttypes.Labelled lblName, pat) + | As -> + Parser.next p; + let pat = + let pat = parseConstrainedPattern p in + {pat with ppat_attributes = propLocAttr::pat.ppat_attributes} + in + (attrs, Asttypes.Labelled lblName, pat) + | t -> + Parser.err p (Diagnostics.unexpected t p.breadcrumbs); + let loc = mkLoc startPos p.prevEndPos in + ( + attrs, + Asttypes.Labelled lblName, + Ast_helper.Pat.var ~loc (Location.mkloc lblName loc) + ) + end + | _ -> + let pattern = parseConstrainedPattern p in + let attrs = List.concat [attrs; pattern.ppat_attributes] in + ([], Asttypes.Nolabel, {pattern with ppat_attributes = attrs}) + in + match p.Parser.token with + | Equal -> + Parser.next p; + let lbl = match lbl with + | Asttypes.Labelled lblName -> Asttypes.Optional lblName + | Asttypes.Optional _ as lbl -> lbl + | Asttypes.Nolabel -> Asttypes.Nolabel + in + begin match p.Parser.token with + | Question -> + Parser.next p; + Some (TermParameter {uncurried; attrs; label = lbl; expr = None; pat; pos = startPos}) + | _ -> + let expr = parseConstrainedOrCoercedExpr p in + Some (TermParameter {uncurried; attrs; label = lbl; expr = Some expr; pat; pos = startPos}) + end + | _ -> + Some (TermParameter {uncurried; attrs; label = lbl; expr = None; pat; pos = startPos}) + ) + ) else None + + and parseParameterList p = + let parameters = + parseCommaDelimitedRegion + ~grammar:Grammar.ParameterList + ~f:parseParameter + ~closing:Rparen + p + in + Parser.expect Rparen p; + parameters + + (* parameters ::= + * | _ + * | lident + * | () + * | (.) + * | ( parameter {, parameter} [,] ) + *) + and parseParameters p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | Lident ident -> + Parser.next p; + let loc = mkLoc startPos p.Parser.prevEndPos in + [TermParameter { + uncurried = false; + attrs = []; + label = Asttypes.Nolabel; + expr = None; + pat = Ast_helper.Pat.var ~loc (Location.mkloc ident loc); + pos = startPos; + }] + | List -> + Parser.next p; + let loc = mkLoc startPos p.Parser.prevEndPos in + [TermParameter { + uncurried = false; + attrs = []; + label = Asttypes.Nolabel; + expr = None; + pat = Ast_helper.Pat.var ~loc (Location.mkloc "list" loc); + pos = startPos; + }] + | Underscore -> + Parser.next p; + let loc = mkLoc startPos p.Parser.prevEndPos in + [TermParameter {uncurried = false; attrs = []; label = Asttypes.Nolabel; expr = None; pat = Ast_helper.Pat.any ~loc (); pos = startPos}] + | Lparen -> + Parser.next p; + begin match p.Parser.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.Parser.prevEndPos in + let unitPattern = Ast_helper.Pat.construct + ~loc (Location.mkloc (Longident.Lident "()") loc) None + in + [TermParameter {uncurried = false; attrs = []; label = Asttypes.Nolabel; expr = None; pat = unitPattern; pos = startPos}] + | Dot -> + Parser.next p; + begin match p.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.Parser.prevEndPos in + let unitPattern = Ast_helper.Pat.construct + ~loc (Location.mkloc (Longident.Lident "()") loc) None + in + [TermParameter {uncurried = true; attrs = []; label = Asttypes.Nolabel; expr = None; pat = unitPattern; pos = startPos}] + | _ -> + begin match parseParameterList p with + | (TermParameter {attrs; label = lbl; expr = defaultExpr; pat = pattern; pos = startPos})::rest -> + (TermParameter {uncurried = true; attrs; label = lbl; expr = defaultExpr; pat = pattern; pos = startPos})::rest + | parameters -> parameters + end + end + | _ -> parseParameterList p + end + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + [] + + and parseCoercedExpr ~(expr: Parsetree.expression) p = + Parser.expect ColonGreaterThan p; + let typ = parseTypExpr p in + let loc = mkLoc expr.pexp_loc.loc_start p.prevEndPos in + Ast_helper.Exp.coerce ~loc expr None typ + + and parseConstrainedOrCoercedExpr p = + let expr = parseExpr p in + match p.Parser.token with + | ColonGreaterThan -> + parseCoercedExpr ~expr p + | Colon -> + Parser.next p; + begin match p.token with + | _ -> + let typ = parseTypExpr p in + let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in + let expr = Ast_helper.Exp.constraint_ ~loc expr typ in + begin match p.token with + | ColonGreaterThan -> + parseCoercedExpr ~expr p + | _ -> + expr + end + end + | _ -> expr + + + and parseConstrainedExprRegion p = + match p.Parser.token with + | token when Grammar.isExprStart token -> + let expr = parseExpr p in + begin match p.Parser.token with + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in + Some (Ast_helper.Exp.constraint_ ~loc expr typ) + | _ -> Some expr + end + | _ -> None + + (* Atomic expressions represent unambiguous expressions. + * This means that regardless of the context, these expressions + * are always interpreted correctly. *) + and parseAtomicExpr p = + Parser.leaveBreadcrumb p Grammar.ExprOperand; + let startPos = p.Parser.startPos in + let expr = match p.Parser.token with + | (True | False) as token -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident (Token.toString token)) loc) None + | Int _ | String _ | Float _ | Character _ -> + let c = parseConstant p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.constant ~loc c + | Backtick -> + let expr = parseTemplateExpr p in + {expr with pexp_loc = mkLoc startPos p.prevEndPos} + | Uident _ | Lident _ -> + parseValueOrConstructor p + | Hash -> + parsePolyVariantExpr p + | Lparen -> + Parser.next p; + begin match p.Parser.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.construct + ~loc (Location.mkloc (Longident.Lident "()") loc) None + | _t -> + let expr = parseConstrainedOrCoercedExpr p in + begin match p.token with + | Comma -> + Parser.next p; + parseTupleExpr ~startPos ~first:expr p + | _ -> + Parser.expect Rparen p; + expr + (* {expr with pexp_loc = mkLoc startPos p.prevEndPos} + * What does this location mean here? It means that when there's + * a parenthesized we keep the location here for whitespace interleaving. + * Without the closing paren in the location there will always be an extra + * line. For now we don't include it, because it does weird things + * with for comments. *) + end + end + | List -> + Parser.next p; + begin match p.token with + | Lbracket -> + parseListExpr ~startPos p + | _ -> + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.ident ~loc (Location.mkloc (Longident.Lident "list") loc) + end + | Module -> + Parser.next p; + parseFirstClassModuleExpr ~startPos p + | Lbracket -> + parseArrayExp p + | Lbrace -> + parseBracedOrRecordExpr p + | LessThan -> + parseJsx p + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.extension ~loc extension + | Underscore as token -> + (* This case is for error recovery. Not sure if it's the correct place *) + Parser.err p (Diagnostics.lident token); + Parser.next p; + Recover.defaultExpr () + | token -> + let errPos = p.prevEndPos in + begin match skipTokensAndMaybeRetry p ~isStartOfGrammar:Grammar.isAtomicExprStart with + | None -> + Parser.err ~startPos:errPos p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultExpr () + | Some () -> parseAtomicExpr p + end + in + Parser.eatBreadcrumb p; + expr + + (* module(module-expr) + * module(module-expr : package-type) *) + and parseFirstClassModuleExpr ~startPos p = + Parser.expect Lparen p; + + let modExpr = parseModuleExpr p in + let modEndLoc = p.prevEndPos in + begin match p.Parser.token with + | Colon -> + let colonStart = p.Parser.startPos in + Parser.next p; + let attrs = parseAttributes p in + let packageType = parsePackageType ~startPos:colonStart ~attrs p in + Parser.expect Rparen p; + let loc = mkLoc startPos modEndLoc in + let firstClassModule = Ast_helper.Exp.pack ~loc modExpr in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.constraint_ ~loc firstClassModule packageType + | _ -> + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.pack ~loc modExpr + end + + and parseBracketAccess p expr startPos = + Parser.leaveBreadcrumb p Grammar.ExprArrayAccess; + let lbracket = p.startPos in + Parser.next p; + let stringStart = p.startPos in + match p.Parser.token with + | String s -> + Parser.next p; + let stringEnd = p.prevEndPos in + Parser.expect Rbracket p; + let rbracket = p.prevEndPos in + let e = + let identLoc = mkLoc stringStart stringEnd in + let loc = mkLoc lbracket rbracket in + Ast_helper.Exp.apply ~loc + (Ast_helper.Exp.ident ~loc (Location.mkloc (Longident.Lident "##") loc)) + [Nolabel, expr; Nolabel, (Ast_helper.Exp.ident ~loc:identLoc (Location.mkloc (Longident.Lident s) identLoc))] + in + let e = parsePrimaryExpr ~operand:e p in + let equalStart = p.startPos in + begin match p.token with + | Equal -> + Parser.next p; + let equalEnd = p.prevEndPos in + let rhsExpr = parseExpr p in + let loc = mkLoc startPos rhsExpr.pexp_loc.loc_end in + let operatorLoc = mkLoc equalStart equalEnd in + Ast_helper.Exp.apply ~loc + (Ast_helper.Exp.ident ~loc:operatorLoc (Location.mkloc (Longident.Lident "#=") operatorLoc)) + [Nolabel, e; Nolabel, rhsExpr] + | _ -> e + end + | _ -> + let accessExpr = parseConstrainedOrCoercedExpr p in + Parser.expect Rbracket p; + let rbracket = p.prevEndPos in + let arrayLoc = mkLoc lbracket rbracket in + begin match p.token with + | Equal -> + Parser.leaveBreadcrumb p ExprArrayMutation; + Parser.next p; + let rhsExpr = parseExpr p in + let arraySet = Location.mkloc + (Longident.Ldot(Lident "Array", "set")) + arrayLoc + in + let endPos = p.prevEndPos in + let arraySet = Ast_helper.Exp.apply + ~loc:(mkLoc startPos endPos) + (Ast_helper.Exp.ident ~loc:arrayLoc arraySet) + [Nolabel, expr; Nolabel, accessExpr; Nolabel, rhsExpr] + in + Parser.eatBreadcrumb p; + arraySet + | _ -> + let endPos = p.prevEndPos in + let e = + Ast_helper.Exp.apply + ~loc:(mkLoc startPos endPos) + (Ast_helper.Exp.ident + ~loc:arrayLoc + (Location.mkloc (Longident.Ldot(Lident "Array", "get")) arrayLoc) + ) + [Nolabel, expr; Nolabel, accessExpr] + in + Parser.eatBreadcrumb p; + parsePrimaryExpr ~operand:e p + end + + (* * A primary expression represents + * - atomic-expr + * - john.age + * - array[0] + * - applyFunctionTo(arg1, arg2) + * + * The "operand" represents the expression that is operated on + *) + and parsePrimaryExpr ~operand ?(noCall=false) p = + let startPos = operand.pexp_loc.loc_start in + let rec loop p expr = + match p.Parser.token with + | Dot -> + Parser.next p; + let lident = parseValuePath p in + begin match p.Parser.token with + | Equal when noCall = false -> + Parser.leaveBreadcrumb p Grammar.ExprSetField; + Parser.next p; + let targetExpr = parseExpr p in + let loc = mkLoc startPos p.prevEndPos in + let setfield = Ast_helper.Exp.setfield ~loc expr lident targetExpr in + Parser.eatBreadcrumb p; + setfield + | _ -> + let endPos = p.prevEndPos in + let loc = mkLoc startPos endPos in + loop p (Ast_helper.Exp.field ~loc expr lident) + end + | Lbracket when noCall = false && p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + parseBracketAccess p expr startPos + | Lparen when noCall = false && p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + loop p (parseCallExpr p expr) + | Backtick when noCall = false && p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + begin match expr.pexp_desc with + | Pexp_ident {txt = Longident.Lident ident} -> + parseTemplateExpr ~prefix:ident p + | _ -> + Parser.err + ~startPos:expr.pexp_loc.loc_start + ~endPos:expr.pexp_loc.loc_end + p + (Diagnostics.message "Tagged template literals are currently restricted to identifiers like: json`null`."); + parseTemplateExpr p + end + | _ -> expr + in + loop p operand + + (* a unary expression is an expression with only one operand and + * unary operator. Examples: + * -1 + * !condition + * -. 1.6 + *) + and parseUnaryExpr p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | (Minus | MinusDot | Plus | PlusDot | Bang) as token -> + Parser.leaveBreadcrumb p Grammar.ExprUnary; + let tokenEnd = p.endPos in + Parser.next p; + let operand = parseUnaryExpr p in + let unaryExpr = makeUnaryExpr startPos tokenEnd token operand in + Parser.eatBreadcrumb p; + unaryExpr + | _ -> + parsePrimaryExpr ~operand:(parseAtomicExpr p) p + + (* Represents an "operand" in a binary expression. + * If you have `a + b`, `a` and `b` both represent + * the operands of the binary expression with opeartor `+` *) + and parseOperandExpr ~context p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + let expr = match p.Parser.token with + | Assert -> + Parser.next p; + let expr = parseUnaryExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.assert_ ~loc expr + | Lazy -> + Parser.next p; + let expr = parseUnaryExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.lazy_ ~loc expr + | Try -> + parseTryExpression p + | If -> + parseIfExpression p + | For -> + parseForExpression p + | While -> + parseWhileExpression p + | Switch -> + parseSwitchExpression p + | _ -> + if (context != WhenExpr) && + isEs6ArrowExpression ~inTernary:(context=TernaryTrueBranchExpr) p + then + parseEs6ArrowExpression p + else + parseUnaryExpr p + in + (* let endPos = p.Parser.prevEndPos in *) + {expr with + pexp_attributes = List.concat[expr.Parsetree.pexp_attributes; attrs]; + (* pexp_loc = mkLoc startPos endPos *) + } + + (* a binary expression is an expression that combines two expressions with an + * operator. Examples: + * a + b + * f(x) |> g(y) + *) + and parseBinaryExpr ?(context=OrdinaryExpr) ?a p prec = + let a = match a with + | Some e -> e + | None -> parseOperandExpr ~context p + in + let rec loop a = + let token = p.Parser.token in + let tokenPrec = + match token with + (* Can the minus be interpreted as a binary operator? Or is it a unary? + * let w = { + * x + * -10 + * } + * vs + * let w = { + * width + * - gap + * } + * + * First case is unary, second is a binary operator. + * See Scanner.isBinaryOp *) + | Minus | MinusDot | LessThan when not ( + Scanner.isBinaryOp p.scanner.src p.startPos.pos_cnum p.endPos.pos_cnum + ) && p.startPos.pos_lnum > p.prevEndPos.pos_lnum -> -1 + | token -> Token.precedence token + in + if tokenPrec < prec then a + else begin + Parser.leaveBreadcrumb p (Grammar.ExprBinaryAfterOp token); + let startPos = p.startPos in + Parser.next p; + let endPos = p.prevEndPos in + let b = parseBinaryExpr ~context p (tokenPrec + 1) in + let loc = mkLoc a.Parsetree.pexp_loc.loc_start b.pexp_loc.loc_end in + let expr = Ast_helper.Exp.apply + ~loc + (makeInfixOperator p token startPos endPos) + [Nolabel, a; Nolabel, b] + in + loop expr + end + in + loop a + + (* If we even need this, determines if < might be the start of jsx. Not 100% complete *) + (* and isStartOfJsx p = *) + (* Parser.lookahead p (fun p -> *) + (* match p.Parser.token with *) + (* | LessThan -> *) + (* Parser.next p; *) + (* begin match p.token with *) + (* | GreaterThan (* <> *) -> true *) + (* | Lident _ | Uident _ | List -> *) + (* ignore (parseJsxName p); *) + (* begin match p.token with *) + (* | GreaterThan (*
*) -> true *) + (* | Question (* true *) + (* | Lident _ | List -> *) + (* Parser.next p; *) + (* begin match p.token with *) + (* | Equal (* true *) + (* | _ -> false (* TODO *) *) + (* end *) + (* | Forwardslash (* *) + (* Parser.next p; *) + (* begin match p.token with *) + (* | GreaterThan (* *) -> true *) + (* | _ -> false *) + (* end *) + (* | _ -> *) + (* false *) + (* end *) + (* | _ -> false *) + (* end *) + (* | _ -> false *) + (* ) *) + + and parseTemplateExpr ?(prefix="") p = + let hiddenOperator = + let op = Location.mknoloc (Longident.Lident "^") in + Ast_helper.Exp.ident op + in + let rec loop acc p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | TemplateTail txt -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + if String.length txt > 0 then + let txt = if p.mode = ParseForTypeChecker then parseTemplateStringLiteral txt else txt in + let str = Ast_helper.Exp.constant ~loc (Pconst_string(txt, Some prefix)) in + Ast_helper.Exp.apply ~loc hiddenOperator + [Nolabel, acc; Nolabel, str] + else + acc + | TemplatePart txt -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let expr = parseExprBlock p in + let fullLoc = mkLoc startPos p.prevEndPos in + Scanner.setTemplateMode p.scanner; + Parser.expect Rbrace p; + let txt = if p.mode = ParseForTypeChecker then parseTemplateStringLiteral txt else txt in + let str = Ast_helper.Exp.constant ~loc (Pconst_string(txt, Some prefix)) in + let next = + let a = if String.length txt > 0 then + Ast_helper.Exp.apply ~loc:fullLoc hiddenOperator [Nolabel, acc; Nolabel, str] + else acc + in + Ast_helper.Exp.apply ~loc:fullLoc hiddenOperator + [Nolabel, a; Nolabel, expr] + in + loop next p + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + acc + in + Scanner.setTemplateMode p.scanner; + Parser.expect Backtick p; + let startPos = p.Parser.startPos in + match p.Parser.token with + | TemplateTail txt -> + let loc = mkLoc startPos p.endPos in + Parser.next p; + let txt = if p.mode = ParseForTypeChecker then parseTemplateStringLiteral txt else txt in + Ast_helper.Exp.constant ~loc (Pconst_string(txt, Some prefix)) + | TemplatePart txt -> + let constantLoc = mkLoc startPos p.endPos in + Parser.next p; + let expr = parseExprBlock p in + let fullLoc = mkLoc startPos p.prevEndPos in + Scanner.setTemplateMode p.scanner; + Parser.expect Rbrace p; + let txt = if p.mode = ParseForTypeChecker then parseTemplateStringLiteral txt else txt in + let str = Ast_helper.Exp.constant ~loc:constantLoc (Pconst_string(txt, Some prefix)) in + let next = + if String.length txt > 0 then + Ast_helper.Exp.apply ~loc:fullLoc hiddenOperator [Nolabel, str; Nolabel, expr] + else + expr + in + loop next p + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Ast_helper.Exp.constant (Pconst_string("", None)) + + (* Overparse: let f = a : int => a + 1, is it (a : int) => or (a): int => + * Also overparse constraints: + * let x = { + * let a = 1 + * a + pi: int + * } + * + * We want to give a nice error message in these cases + * *) + and overParseConstrainedOrCoercedOrArrowExpression p expr = + match p.Parser.token with + | ColonGreaterThan -> + parseCoercedExpr ~expr p + | Colon -> + Parser.next p; + let typ = parseTypExpr ~es6Arrow:false p in + begin match p.Parser.token with + | EqualGreater -> + Parser.next p; + let body = parseExpr p in + let pat = match expr.pexp_desc with + | Pexp_ident longident -> + Ast_helper.Pat.var ~loc:expr.pexp_loc + (Location.mkloc + (Longident.flatten longident.txt |> String.concat ".") + longident.loc) + (* TODO: can we convert more expressions to patterns?*) + | _ -> + Ast_helper.Pat.var ~loc:expr.pexp_loc (Location.mkloc "pattern" expr.pexp_loc) + in + let arrow1 = Ast_helper.Exp.fun_ + ~loc:(mkLoc expr.pexp_loc.loc_start body.pexp_loc.loc_end) + Asttypes.Nolabel + None + pat + (Ast_helper.Exp.constraint_ body typ) + in + let arrow2 = Ast_helper.Exp.fun_ + ~loc:(mkLoc expr.pexp_loc.loc_start body.pexp_loc.loc_end) + Asttypes.Nolabel + None + (Ast_helper.Pat.constraint_ pat typ) + body + in + let msg = + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.text "Did you mean to annotate the parameter type or the return type?"; + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.text "1) "; + Printer.printExpression arrow1 CommentTable.empty; + Doc.line; + Doc.text "2) "; + Printer.printExpression arrow2 CommentTable.empty; + ] + ) + ] + ) |> Doc.toString ~width:80 + in + Parser.err + ~startPos:expr.pexp_loc.loc_start + ~endPos:body.pexp_loc.loc_end + p + (Diagnostics.message msg); + arrow1 + | _ -> + let open Parsetree in + let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in + let expr = Ast_helper.Exp.constraint_ ~loc expr typ in + let () = Parser.err + ~startPos:expr.pexp_loc.loc_start + ~endPos:typ.ptyp_loc.loc_end + p + (Diagnostics.message + (Doc.breakableGroup ~forceBreak:true (Doc.concat [ + Doc.text "Expressions with type constraints need to be wrapped in parens:"; + Doc.indent ( + Doc.concat [ + Doc.line; + Printer.addParens (Printer.printExpression expr CommentTable.empty); + ] + ) + ]) |> Doc.toString ~width:80 + )) + in + expr + end + | _ -> expr + + and parseLetBindingBody ~startPos ~attrs p = + Parser.beginRegion p; + Parser.leaveBreadcrumb p Grammar.LetBinding; + let pat, exp = + let pat = parsePattern p in + match p.Parser.token with + | Colon -> + Parser.next p; + begin match p.token with + | Typ -> (* locally abstract types *) + Parser.next p; + let newtypes = parseLidentList p in + Parser.expect Dot p; + let typ = parseTypExpr p in + Parser.expect Equal p; + let expr = parseExpr p in + let loc = mkLoc startPos p.prevEndPos in + let exp, poly = wrapTypeAnnotation ~loc newtypes typ expr in + let pat = Ast_helper.Pat.constraint_ ~loc pat poly in + (pat, exp) + | _ -> + let polyType = parsePolyTypeExpr p in + let loc = {pat.ppat_loc with loc_end = polyType.Parsetree.ptyp_loc.loc_end} in + let pat = Ast_helper.Pat.constraint_ ~loc pat polyType in + Parser.expect Token.Equal p; + let exp = parseExpr p in + let exp = overParseConstrainedOrCoercedOrArrowExpression p exp in + (pat, exp) + end + | _ -> + Parser.expect Token.Equal p; + let exp = overParseConstrainedOrCoercedOrArrowExpression p (parseExpr p) in + (pat, exp) + in + let loc = mkLoc startPos p.prevEndPos in + let vb = Ast_helper.Vb.mk ~loc ~attrs pat exp in + Parser.eatBreadcrumb p; + Parser.endRegion p; + vb + + (* TODO: find a better way? Is it possible? + * let a = 1 + * @attr + * and b = 2 + * + * The problem is that without semi we need a lookahead to determine + * if the attr is on the letbinding or the start of a new thing + * + * let a = 1 + * @attr + * let b = 1 + * + * Here @attr should attach to something "new": `let b = 1` + * The parser state is forked, which is quite expensive… + *) + and parseAttributesAndBinding (p : Parser.t) = + let err = p.scanner.err in + let ch = p.scanner.ch in + let offset = p.scanner.offset in + let rdOffset = p.scanner.rdOffset in + let lineOffset = p.scanner.lineOffset in + let lnum = p.scanner.lnum in + let mode = p.scanner.mode in + let token = p.token in + let startPos = p.startPos in + let endPos = p.endPos in + let prevEndPos = p.prevEndPos in + let breadcrumbs = p.breadcrumbs in + let errors = p.errors in + let diagnostics = p.diagnostics in + let comments = p.comments in + + match p.Parser.token with + | At -> + let attrs = parseAttributes p in + begin match p.Parser.token with + | And -> + attrs + | _ -> + p.scanner.err <- err; + p.scanner.ch <- ch; + p.scanner.offset <- offset; + p.scanner.rdOffset <- rdOffset; + p.scanner.lineOffset <- lineOffset; + p.scanner.lnum <- lnum; + p.scanner.mode <- mode; + p.token <- token; + p.startPos <- startPos; + p.endPos <- endPos; + p.prevEndPos <- prevEndPos; + p.breadcrumbs <- breadcrumbs; + p.errors <- errors; + p.diagnostics <- diagnostics; + p.comments <- comments; + [] + end + | _ -> [] + + (* definition ::= let [rec] let-binding { and let-binding } *) + and parseLetBindings ~attrs p = + let startPos = p.Parser.startPos in + Parser.optional p Let |> ignore; + let recFlag = if Parser.optional p Token.Rec then + Asttypes.Recursive + else + Asttypes.Nonrecursive + in + let first = parseLetBindingBody ~startPos ~attrs p in + + let rec loop p bindings = + let startPos = p.Parser.startPos in + let attrs = parseAttributesAndBinding p in + match p.Parser.token with + | And -> + Parser.next p; + let attrs = match p.token with + | Export -> + let exportLoc = mkLoc p.startPos p.endPos in + Parser.next p; + let genTypeAttr = (Location.mkloc "genType" exportLoc, Parsetree.PStr []) in + genTypeAttr::attrs + | _ -> attrs + in + ignore(Parser.optional p Let); (* overparse for fault tolerance *) + let letBinding = parseLetBindingBody ~startPos ~attrs p in + loop p (letBinding::bindings) + | _ -> + List.rev bindings + in + (recFlag, loop p [first]) + + (* + * div -> div + * Foo -> Foo.createElement + * Foo.Bar -> Foo.Bar.createElement + *) + and parseJsxName p = + let longident = match p.Parser.token with + | Lident ident -> + let identStart = p.startPos in + let identEnd = p.endPos in + Parser.next p; + let loc = mkLoc identStart identEnd in + Location.mkloc (Longident.Lident ident) loc + | Uident _ -> + let longident = parseModuleLongIdent ~lowercase:false p in + Location.mkloc (Longident.Ldot (longident.txt, "createElement")) longident.loc + | _ -> + let msg = "A jsx name should start with a lowercase or uppercase identifier, like: div in
or Navbar in " + in + Parser.err p (Diagnostics.message msg); + Location.mknoloc (Longident.Lident "_") + in + Ast_helper.Exp.ident ~loc:longident.loc longident + + and parseJsxOpeningOrSelfClosingElement ~startPos p = + let jsxStartPos = p.Parser.startPos in + let name = parseJsxName p in + let jsxProps = parseJsxProps p in + let children = match p.Parser.token with + | Forwardslash -> (* *) + let childrenStartPos = p.Parser.startPos in + Parser.next p; + let childrenEndPos = p.Parser.startPos in + Parser.expect GreaterThan p; + let loc = mkLoc childrenStartPos childrenEndPos in + makeListExpression loc [] None (* no children *) + | GreaterThan -> (* bar *) + let childrenStartPos = p.Parser.startPos in + Scanner.setJsxMode p.scanner; + Parser.next p; + let (spread, children) = parseJsxChildren p in + let childrenEndPos = p.Parser.startPos in + let () = match p.token with + | LessThanSlash -> Parser.next p + | LessThan -> Parser.next p; Parser.expect Forwardslash p + | token when Grammar.isStructureItemStart token -> () + | _ -> Parser.expect LessThanSlash p + in + begin match p.Parser.token with + | Lident _ | Uident _ when verifyJsxOpeningClosingName p name -> + Parser.expect GreaterThan p; + let loc = mkLoc childrenStartPos childrenEndPos in + ( match spread, children with + | true, child :: _ -> + child + | _ -> + makeListExpression loc children None + ) + | token -> + let () = if Grammar.isStructureItemStart token then ( + let closing = "" in + let msg = Diagnostics.message ("Missing " ^ closing) in + Parser.err ~startPos ~endPos:p.prevEndPos p msg; + ) else ( + let opening = "" in + let msg = "Closing jsx name should be the same as the opening name. Did you mean " ^ opening ^ " ?" in + Parser.err ~startPos ~endPos:p.prevEndPos p (Diagnostics.message msg); + Parser.expect GreaterThan p + ) + in + let loc = mkLoc childrenStartPos childrenEndPos in + ( match spread, children with + | true, child :: _ -> + child + | _ -> + makeListExpression loc children None + ) + end + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + makeListExpression Location.none [] None + in + let jsxEndPos = p.prevEndPos in + let loc = mkLoc jsxStartPos jsxEndPos in + Ast_helper.Exp.apply + ~loc + name + (List.concat [jsxProps; [ + (Asttypes.Labelled "children", children); + (Asttypes.Nolabel, Ast_helper.Exp.construct (Location.mknoloc (Longident.Lident "()")) None) + ]]) + + (* + * jsx ::= + * | <> jsx-children + * | + * | jsx-children + * + * jsx-children ::= primary-expr* * => 0 or more + *) + and parseJsx p = + Parser.leaveBreadcrumb p Grammar.Jsx; + let startPos = p.Parser.startPos in + Parser.expect LessThan p; + let jsxExpr = match p.Parser.token with + | Lident _ | Uident _ -> + parseJsxOpeningOrSelfClosingElement ~startPos p + | GreaterThan -> (* fragment: <> foo *) + parseJsxFragment p + | _ -> + parseJsxName p + in + {jsxExpr with pexp_attributes = [jsxAttr]} + + (* + * jsx-fragment ::= + * | <> + * | <> jsx-children + *) + and parseJsxFragment p = + let childrenStartPos = p.Parser.startPos in + Scanner.setJsxMode p.scanner; + Parser.expect GreaterThan p; + let (_spread, children) = parseJsxChildren p in + let childrenEndPos = p.Parser.startPos in + Parser.expect LessThanSlash p; + Parser.expect GreaterThan p; + let loc = mkLoc childrenStartPos childrenEndPos in + makeListExpression loc children None + + + (* + * jsx-prop ::= + * | lident + * | ?lident + * | lident = jsx_expr + * | lident = ?jsx_expr + *) + and parseJsxProp p = + Parser.leaveBreadcrumb p Grammar.JsxAttribute; + match p.Parser.token with + | Question | Lident _ -> + let optional = Parser.optional p Question in + let (name, loc) = parseLident p in + let propLocAttr = (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) in + (* optional punning: *) + if optional then + Some ( + Asttypes.Optional name, + Ast_helper.Exp.ident ~attrs:[propLocAttr] + ~loc (Location.mkloc (Longident.Lident name) loc) + ) + else begin + match p.Parser.token with + | Equal -> + Parser.next p; + (* no punning *) + let optional = Parser.optional p Question in + let attrExpr = + let e = parsePrimaryExpr ~operand:(parseAtomicExpr p) p in + {e with pexp_attributes = propLocAttr::e.pexp_attributes} + in + let label = + if optional then Asttypes.Optional name else Asttypes.Labelled name + in + Some (label, attrExpr) + | _ -> + let attrExpr = + Ast_helper.Exp.ident ~loc ~attrs:[propLocAttr] + (Location.mknoloc (Longident.Lident name)) in + let label = + if optional then Asttypes.Optional name else Asttypes.Labelled name + in + Some (label, attrExpr) + end + | _ -> + None + + and parseJsxProps p = + parseRegion + ~grammar:Grammar.JsxAttribute + ~f:parseJsxProp + p + + and parseJsxChildren p = + let rec loop p children = + match p.Parser.token with + | Token.Eof | LessThanSlash -> + Scanner.popMode p.scanner Jsx; + List.rev children + | LessThan -> + (* Imagine:
< + * is `<` the start of a jsx-child?
+ * reconsiderLessThan peeks at the next token and + * determines the correct token to disambiguate *) + let token = Scanner.reconsiderLessThan p.scanner in + if token = LessThan then + let child = parsePrimaryExpr ~operand:(parseAtomicExpr p) ~noCall:true p in + loop p (child::children) + else (* LessThanSlash *) + let () = p.token <- token in + let () = Scanner.popMode p.scanner Jsx in + List.rev children + | token when Grammar.isJsxChildStart token -> + let () = Scanner.popMode p.scanner Jsx in + let child = parsePrimaryExpr ~operand:(parseAtomicExpr p) ~noCall:true p in + loop p (child::children) + | _ -> + Scanner.popMode p.scanner Jsx; + List.rev children + in + match p.Parser.token with + | DotDotDot -> + Parser.next p; + (true, [parsePrimaryExpr ~operand:(parseAtomicExpr p) ~noCall:true p]) + | _ -> (false, loop p []) + + and parseBracedOrRecordExpr p = + let startPos = p.Parser.startPos in + Parser.expect Lbrace p; + match p.Parser.token with + | Rbrace -> + Parser.err p (Diagnostics.unexpected Rbrace p.breadcrumbs); + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + Ast_helper.Exp.construct ~attrs:[braces] ~loc + (Location.mkloc (Longident.Lident "()") loc) None + | DotDotDot -> + (* beginning of record spread, parse record *) + Parser.next p; + let spreadExpr = parseConstrainedOrCoercedExpr p in + Parser.expect Comma p; + let expr = parseRecordExpr ~startPos ~spread:(Some spreadExpr) [] p in + Parser.expect Rbrace p; + expr + | String s -> + let field = + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc (Longident.Lident s) loc + in + begin match p.Parser.token with + | Colon -> + Parser.next p; + let fieldExpr = parseExpr p in + Parser.optional p Comma |> ignore; + let expr = parseRecordExprWithStringKeys ~startPos (field, fieldExpr) p in + Parser.expect Rbrace p; + expr + | _ -> + let constant = Ast_helper.Exp.constant ~loc:field.loc (Parsetree.Pconst_string(s, None)) in + let a = parsePrimaryExpr ~operand:constant p in + let e = parseBinaryExpr ~a p 1 in + let e = parseTernaryExpr e p in + begin match p.Parser.token with + | Semicolon -> + Parser.next p; + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {e with pexp_attributes = braces::e.pexp_attributes} + | _ -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + end + end + | Uident _ | Lident _ -> + let valueOrConstructor = parseValueOrConstructor p in + begin match valueOrConstructor.pexp_desc with + | Pexp_ident pathIdent -> + let identEndPos = p.prevEndPos in + begin match p.Parser.token with + | Comma -> + Parser.next p; + let expr = parseRecordExpr ~startPos [(pathIdent, valueOrConstructor)] p in + Parser.expect Rbrace p; + expr + | Colon -> + Parser.next p; + let fieldExpr = parseExpr p in + begin match p.token with + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.record ~loc [(pathIdent, fieldExpr)] None + | _ -> + Parser.expect Comma p; + let expr = parseRecordExpr ~startPos [(pathIdent, fieldExpr)] p in + Parser.expect Rbrace p; + expr + end + (* error case *) + | Lident _ -> + if p.prevEndPos.pos_lnum < p.startPos.pos_lnum then ( + Parser.expect Comma p; + let expr = parseRecordExpr ~startPos [(pathIdent, valueOrConstructor)] p in + Parser.expect Rbrace p; + expr + ) else ( + Parser.expect Colon p; + let expr = parseRecordExpr ~startPos [(pathIdent, valueOrConstructor)] p in + Parser.expect Rbrace p; + expr + ) + | Semicolon -> + Parser.next p; + let expr = parseExprBlock ~first:(Ast_helper.Exp.ident pathIdent) p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + | Rbrace -> + Parser.next p; + let expr = Ast_helper.Exp.ident ~loc:pathIdent.loc pathIdent in + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + | EqualGreater -> + let loc = mkLoc startPos identEndPos in + let ident = Location.mkloc (Longident.last pathIdent.txt) loc in + let a = parseEs6ArrowExpression + ~parameters:[TermParameter { + uncurried = false; + attrs = []; + label = Asttypes.Nolabel; + expr = None; + pat = Ast_helper.Pat.var ident; + pos = startPos; + }] + p + in + let e = parseBinaryExpr ~a p 1 in + let e = parseTernaryExpr e p in + begin match p.Parser.token with + | Semicolon -> + Parser.next p; + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {e with pexp_attributes = braces::e.pexp_attributes} + | _ -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + end + | _ -> + Parser.leaveBreadcrumb p Grammar.ExprBlock; + let a = parsePrimaryExpr ~operand:(Ast_helper.Exp.ident ~loc:pathIdent.loc pathIdent) p in + let e = parseBinaryExpr ~a p 1 in + let e = parseTernaryExpr e p in + Parser.eatBreadcrumb p; + begin match p.Parser.token with + | Semicolon -> + Parser.next p; + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {e with pexp_attributes = braces::e.pexp_attributes} + | _ -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + end + end + | _ -> + Parser.leaveBreadcrumb p Grammar.ExprBlock; + let a = parsePrimaryExpr ~operand:valueOrConstructor p in + let e = parseBinaryExpr ~a p 1 in + let e = parseTernaryExpr e p in + Parser.eatBreadcrumb p; + begin match p.Parser.token with + | Semicolon -> + Parser.next p; + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {e with pexp_attributes = braces::e.pexp_attributes} + | _ -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + end + end + | _ -> + let expr = parseExprBlock p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces::expr.pexp_attributes} + + and parseRecordRowWithStringKey p = + match p.Parser.token with + | String s -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + let field = Location.mkloc (Longident.Lident s) loc in + begin match p.Parser.token with + | Colon -> + Parser.next p; + let fieldExpr = parseExpr p in + Some (field, fieldExpr) + | _ -> + Some (field, Ast_helper.Exp.ident ~loc:field.loc field) + end + | _ -> None + + and parseRecordRow p = + let () = match p.Parser.token with + | Token.DotDotDot -> + Parser.err p (Diagnostics.message ErrorMessages.recordExprSpread); + Parser.next p; + | _ -> () + in + match p.Parser.token with + | Lident _ | Uident _ | List -> + let field = parseValuePath p in + begin match p.Parser.token with + | Colon -> + Parser.next p; + let fieldExpr = parseExpr p in + Some (field, fieldExpr) + | _ -> + Some (field, Ast_helper.Exp.ident ~loc:field.loc field) + end + | _ -> None + + and parseRecordExprWithStringKeys ~startPos firstRow p = + let rows = firstRow::( + parseCommaDelimitedRegion ~grammar:Grammar.RecordRowsStringKey ~closing:Rbrace ~f:parseRecordRowWithStringKey p + ) in + let loc = mkLoc startPos p.endPos in + let recordStrExpr = Ast_helper.Str.eval ~loc ( + Ast_helper.Exp.record ~loc rows None + ) in + Ast_helper.Exp.extension ~loc + (Location.mkloc "bs.obj" loc, Parsetree.PStr [recordStrExpr]) + + and parseRecordExpr ~startPos ?(spread=None) rows p = + let exprs = + parseCommaDelimitedRegion + ~grammar:Grammar.RecordRows + ~closing:Rbrace + ~f:parseRecordRow p + in + let rows = List.concat [rows; exprs] in + let () = match rows with + | [] -> + let msg = "Record spread needs at least one field that's updated" in + Parser.err p (Diagnostics.message msg); + | _rows -> () + in + let loc = mkLoc startPos p.endPos in + Ast_helper.Exp.record ~loc rows spread + + and parseExprBlockItem p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + match p.Parser.token with + | Module -> + Parser.next p; + begin match p.token with + | Lparen -> + parseFirstClassModuleExpr ~startPos p + | _ -> + let name = match p.Parser.token with + | Uident ident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let body = parseModuleBindingBody p in + Parser.optional p Semicolon |> ignore; + let expr = parseExprBlock p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.letmodule ~loc name body expr + end + | Exception -> + let extensionConstructor = parseExceptionDef ~attrs p in + Parser.optional p Semicolon |> ignore; + let blockExpr = parseExprBlock p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.letexception ~loc extensionConstructor blockExpr + | Open -> + let od = parseOpenDescription ~attrs p in + Parser.optional p Semicolon |> ignore; + let blockExpr = parseExprBlock p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.open_ ~loc od.popen_override od.popen_lid blockExpr + | Let -> + let (recFlag, letBindings) = parseLetBindings ~attrs p in + let next = match p.Parser.token with + | Semicolon -> + Parser.next p; + if Grammar.isBlockExprStart p.Parser.token then + parseExprBlock p + else + let loc = mkLoc p.startPos p.endPos in + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) None + | token when Grammar.isBlockExprStart token -> + parseExprBlock p + | _ -> + let loc = mkLoc p.startPos p.endPos in + Ast_helper.Exp.construct ~loc (Location.mkloc (Longident.Lident "()") loc) None + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.let_ ~loc recFlag letBindings next + | _ -> + let e1 = + let expr = parseExpr p in + {expr with pexp_attributes = List.concat [attrs; expr.pexp_attributes]} + in + ignore (Parser.optional p Semicolon); + if Grammar.isBlockExprStart p.Parser.token then + let e2 = parseExprBlock p in + let loc = {e1.pexp_loc with loc_end = e2.pexp_loc.loc_end} in + Ast_helper.Exp.sequence ~loc e1 e2 + else e1 + + (* blockExpr ::= expr + * | expr ; + * | expr ; blockExpr + * | module ... ; blockExpr + * | open ... ; blockExpr + * | exception ... ; blockExpr + * | let ... + * | let ... ; + * | let ... ; blockExpr + * + * note: semi should be made optional + * a block of expression is always + *) + and parseExprBlock ?first p = + Parser.leaveBreadcrumb p Grammar.ExprBlock; + let item = match first with + | Some e -> e + | None -> parseExprBlockItem p + in + let blockExpr = match p.Parser.token with + | Semicolon -> + Parser.next p; + if Grammar.isBlockExprStart p.Parser.token then + let next = parseExprBlockItem p in + ignore(Parser.optional p Semicolon); + let loc = {item.pexp_loc with loc_end = next.pexp_loc.loc_end} in + Ast_helper.Exp.sequence ~loc item next + else + item + | token when Grammar.isBlockExprStart token -> + let next = parseExprBlockItem p in + ignore(Parser.optional p Semicolon); + let loc = {item.pexp_loc with loc_end = next.pexp_loc.loc_end} in + Ast_helper.Exp.sequence ~loc item next + | _ -> + item + in + Parser.eatBreadcrumb p; + overParseConstrainedOrCoercedOrArrowExpression p blockExpr + + and parseTryExpression p = + let startPos = p.Parser.startPos in + Parser.expect Try p; + let expr = parseExpr ~context:WhenExpr p in + Parser.expect Catch p; + Parser.expect Lbrace p; + let cases = parsePatternMatching p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.try_ ~loc expr cases + + and parseIfExpression p = + Parser.beginRegion p; + Parser.leaveBreadcrumb p Grammar.ExprIf; + let startPos = p.Parser.startPos in + Parser.expect If p; + Parser.leaveBreadcrumb p Grammar.IfCondition; + (* doesn't make sense to try es6 arrow here? *) + let conditionExpr = parseExpr ~context:WhenExpr p in + Parser.eatBreadcrumb p; + Parser.leaveBreadcrumb p IfBranch; + Parser.expect Lbrace p; + let thenExpr = parseExprBlock p in + Parser.expect Rbrace p; + Parser.eatBreadcrumb p; + let elseExpr = match p.Parser.token with + | Else -> + Parser.endRegion p; + Parser.leaveBreadcrumb p Grammar.ElseBranch; + Parser.next p; + Parser.beginRegion p; + let elseExpr = match p.token with + | If -> + parseIfExpression p + | _ -> + Parser.expect Lbrace p; + let blockExpr = parseExprBlock p in + Parser.expect Rbrace p; + blockExpr + in + Parser.eatBreadcrumb p; + Parser.endRegion p; + Some elseExpr + | _ -> + Parser.endRegion p; + None + in + let loc = mkLoc startPos p.prevEndPos in + Parser.eatBreadcrumb p; + Ast_helper.Exp.ifthenelse ~loc conditionExpr thenExpr elseExpr + + and parseForRest hasOpeningParen pattern startPos p = + Parser.expect In p; + let e1 = parseExpr p in + let direction = match p.Parser.token with + | To -> Asttypes.Upto + | Downto -> Asttypes.Downto + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Asttypes.Upto + in + Parser.next p; + let e2 = parseExpr ~context:WhenExpr p in + if hasOpeningParen then Parser.expect Rparen p; + Parser.expect Lbrace p; + let bodyExpr = parseExprBlock p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.for_ ~loc pattern e1 e2 direction bodyExpr + + and parseForExpression p = + let startPos = p.Parser.startPos in + Parser.expect For p; + match p.token with + | Lparen -> + let lparen = p.startPos in + Parser.next p; + begin match p.token with + | Rparen -> + Parser.next p; + let unitPattern = + let loc = mkLoc lparen p.prevEndPos in + let lid = Location.mkloc (Longident.Lident "()") loc in + Ast_helper.Pat.construct lid None + in + parseForRest false (parseAliasPattern ~attrs:[] unitPattern p) startPos p + | _ -> + let pat = parsePattern p in + begin match p.token with + | Comma -> + Parser.next p; + let tuplePattern = + parseTuplePattern ~attrs:[] ~startPos:lparen ~first:pat p + in + let pattern = parseAliasPattern ~attrs:[] tuplePattern p in + parseForRest false pattern startPos p + | _ -> + parseForRest true pat startPos p + end + end + | _ -> + parseForRest false (parsePattern p) startPos p + + and parseWhileExpression p = + let startPos = p.Parser.startPos in + Parser.expect While p; + let expr1 = parseExpr ~context:WhenExpr p in + Parser.expect Lbrace p; + let expr2 = parseExprBlock p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.while_ ~loc expr1 expr2 + + and parsePatternMatchCase p = + Parser.beginRegion p; + Parser.leaveBreadcrumb p Grammar.PatternMatchCase; + match p.Parser.token with + | Token.Bar -> + Parser.next p; + let lhs = parsePattern p in + let guard = match p.Parser.token with + | When -> + Parser.next p; + Some (parseExpr ~context:WhenExpr p) + | _ -> + None + in + let () = match p.token with + | EqualGreater -> Parser.next p + | _ -> Recover.recoverEqualGreater p + in + let rhs = parseExprBlock p in + Parser.endRegion p; + Parser.eatBreadcrumb p; + Some (Ast_helper.Exp.case lhs ?guard rhs) + | _ -> + Parser.endRegion p; + None + + and parsePatternMatching p = + Parser.leaveBreadcrumb p Grammar.PatternMatching; + let cases = + parseDelimitedRegion + ~grammar:Grammar.PatternMatching + ~closing:Rbrace + ~f:parsePatternMatchCase + p + in + let () = match cases with + | [] -> Parser.err ~startPos:p.prevEndPos p ( + Diagnostics.message "Pattern matching needs at least one case" + ) + | _ -> () + in + cases + + and parseSwitchExpression p = + let startPos = p.Parser.startPos in + Parser.expect Switch p; + let switchExpr = parseExpr ~context:WhenExpr p in + Parser.expect Lbrace p; + let cases = parsePatternMatching p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.match_ ~loc switchExpr cases + + (* + * argument ::= + * | _ (* syntax sugar *) + * | expr + * | expr : type + * | ~ label-name + * | ~ label-name + * | ~ label-name ? + * | ~ label-name = expr + * | ~ label-name = _ (* syntax sugar *) + * | ~ label-name = expr : type + * | ~ label-name = ? expr + * | ~ label-name = ? _ (* syntax sugar *) + * | ~ label-name = ? expr : type + * + * uncurried_argument ::= + * | . argument + *) + and parseArgument p = + if ( + p.Parser.token = Token.Tilde || + p.token = Dot || + p.token = Underscore || + Grammar.isExprStart p.token + ) then ( + match p.Parser.token with + | Dot -> + let uncurried = true in + let startPos = p.Parser.startPos in + Parser.next(p); + begin match p.token with + (* apply(.) *) + | Rparen -> + let loc = mkLoc startPos p.prevEndPos in + let unitExpr = Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) None + in + Some (uncurried, Asttypes.Nolabel, unitExpr) + | _ -> + parseArgument2 p ~uncurried + end + | _ -> + parseArgument2 p ~uncurried:false + ) else + None + + and parseArgument2 p ~uncurried = + match p.Parser.token with + (* foo(_), do not confuse with foo(_ => x), TODO: performance *) + | Underscore when not (isEs6ArrowExpression ~inTernary:false p) -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + let exp = Ast_helper.Exp.ident ~loc ( + Location.mkloc (Longident.Lident "_") loc + ) in + Some (uncurried, Asttypes.Nolabel, exp) + | Tilde -> + Parser.next p; + (* TODO: nesting of pattern matches not intuitive for error recovery *) + begin match p.Parser.token with + | Lident ident -> + let startPos = p.startPos in + Parser.next p; + let endPos = p.prevEndPos in + let loc = mkLoc startPos endPos in + let propLocAttr = (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) in + let identExpr = Ast_helper.Exp.ident ~attrs:[propLocAttr] ~loc ( + Location.mkloc (Longident.Lident ident) loc + ) in + begin match p.Parser.token with + | Question -> + Parser.next p; + Some (uncurried, Asttypes.Optional ident, identExpr) + | Equal -> + Parser.next p; + let label = match p.Parser.token with + | Question -> + Parser.next p; + Asttypes.Optional ident + | _ -> + Labelled ident + in + let expr = match p.Parser.token with + | Underscore when not (isEs6ArrowExpression ~inTernary:false p) -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Ast_helper.Exp.ident ~loc ( + Location.mkloc (Longident.Lident "_") loc + ) + | _ -> + let expr = parseConstrainedOrCoercedExpr p in + {expr with pexp_attributes = propLocAttr::expr.pexp_attributes} + in + Some (uncurried, label, expr) + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc startPos p.prevEndPos in + let expr = Ast_helper.Exp.constraint_ ~attrs:[propLocAttr] ~loc identExpr typ in + Some (uncurried, Labelled ident, expr) + | _ -> + Some (uncurried, Labelled ident, identExpr) + end + | t -> + Parser.err p (Diagnostics.lident t); + Some (uncurried, Nolabel, Recover.defaultExpr ()) + end + | _ -> Some (uncurried, Nolabel, parseConstrainedOrCoercedExpr p) + + and parseCallExpr p funExpr = + Parser.expect Lparen p; + let startPos = p.Parser.startPos in + Parser.leaveBreadcrumb p Grammar.ExprCall; + let args = + parseCommaDelimitedRegion + ~grammar:Grammar.ArgumentList + ~closing:Rparen + ~f:parseArgument p + in + Parser.expect Rparen p; + let args = match args with + | [] -> + let loc = mkLoc startPos p.prevEndPos in + (* No args -> unit sugar: `foo()` *) + [ false, + Asttypes.Nolabel, + Ast_helper.Exp.construct + ~loc (Location.mkloc (Longident.Lident "()") loc) None + ] + | args -> args + in + let loc = {funExpr.pexp_loc with loc_end = p.prevEndPos} in + let args = match args with + | (u, lbl, expr)::args -> + let group (grp, acc) (uncurried, lbl, expr) = + let (_u, grp) = grp in + if uncurried == true then + ((true, [lbl, expr]), ((_u, (List.rev grp))::acc)) + else + ((_u, ((lbl, expr)::grp)), acc) + in + let ((_u, grp), acc) = List.fold_left group((u, [lbl, expr]), []) args in + List.rev ((_u, (List.rev grp))::acc) + | [] -> [] + in + let apply = List.fold_left (fun callBody group -> + let (uncurried, args) = group in + let (args, wrap) = processUnderscoreApplication args in + let exp = if uncurried then + let attrs = [uncurryAttr] in + Ast_helper.Exp.apply ~loc ~attrs callBody args + else + Ast_helper.Exp.apply ~loc callBody args + in + wrap exp + ) funExpr args + in + Parser.eatBreadcrumb p; + apply + + and parseValueOrConstructor p = + let startPos = p.Parser.startPos in + let rec aux p acc = + match p.Parser.token with + | Uident ident -> + let endPosLident = p.endPos in + Parser.next p; + begin match p.Parser.token with + | Dot -> + Parser.next p; + aux p (ident::acc) + | Lparen when p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + let lparen = p.startPos in + let args = parseConstructorArgs p in + let rparen = p.prevEndPos in + let lident = buildLongident (ident::acc) in + let tail = match args with + | [] -> None + | [{Parsetree.pexp_desc = Pexp_tuple _} as arg] as args -> + let loc = mkLoc lparen rparen in + if p.mode = ParseForTypeChecker then + (* Some(1, 2) for type-checker *) + Some arg + else + (* Some((1, 2)) for printer *) + Some (Ast_helper.Exp.tuple ~loc args) + | [arg] -> + Some arg + | args -> + let loc = mkLoc lparen rparen in + Some (Ast_helper.Exp.tuple ~loc args) + in + let loc = mkLoc startPos p.prevEndPos in + let identLoc = mkLoc startPos endPosLident in + Ast_helper.Exp.construct ~loc (Location.mkloc lident identLoc) tail + | _ -> + let loc = mkLoc startPos p.prevEndPos in + let lident = buildLongident (ident::acc) in + Ast_helper.Exp.construct ~loc (Location.mkloc lident loc) None + end + | Lident ident -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let lident = buildLongident (ident::acc) in + Ast_helper.Exp.ident ~loc (Location.mkloc lident loc) + | List -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let lident = buildLongident ("list"::acc) in + Ast_helper.Exp.ident ~loc (Location.mkloc lident loc) + | token -> + Parser.next p; + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultExpr() + in + aux p [] + + and parsePolyVariantExpr p = + let startPos = p.startPos in + let (ident, _loc) = parseHashIdent ~startPos p in + begin match p.Parser.token with + | Lparen when p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + let lparen = p.startPos in + let args = parseConstructorArgs p in + let rparen = p.prevEndPos in + let loc_paren = mkLoc lparen rparen in + let tail = match args with + | [] -> None + | [{Parsetree.pexp_desc = Pexp_tuple _} as expr ] as args -> + if p.mode = ParseForTypeChecker then + (* #a(1, 2) for type-checker *) + Some expr + else + (* #a((1, 2)) for type-checker *) + Some (Ast_helper.Exp.tuple ~loc:loc_paren args) + | [arg] -> Some arg + | args -> + (* #a((1, 2)) for printer *) + Some (Ast_helper.Exp.tuple ~loc:loc_paren args) + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.variant ~loc ident tail + | _ -> + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.variant ~loc ident None + end + + and parseConstructorArgs p = + let lparen = p.Parser.startPos in + Parser.expect Lparen p; + let args = + parseCommaDelimitedRegion + ~grammar:Grammar.ExprList ~f:parseConstrainedExprRegion ~closing:Rparen p + in + Parser.expect Rparen p; + match args with + | [] -> + let loc = mkLoc lparen p.prevEndPos in + [Ast_helper.Exp.construct + ~loc (Location.mkloc (Longident.Lident "()") loc) None] + | args -> args + + and parseTupleExpr ~first ~startPos p = + let exprs = + parseCommaDelimitedRegion + p ~grammar:Grammar.ExprList ~closing:Rparen ~f:parseConstrainedExprRegion + in + Parser.expect Rparen p; + Ast_helper.Exp.tuple ~loc:(mkLoc startPos p.prevEndPos) (first::exprs) + + and parseSpreadExprRegion p = + match p.Parser.token with + | DotDotDot -> + Parser.next p; + let expr = parseConstrainedOrCoercedExpr p in + Some (true, expr) + | token when Grammar.isExprStart token -> + Some (false, parseConstrainedOrCoercedExpr p) + | _ -> None + + and parseListExpr ~startPos p = + Parser.expect Lbracket p; + let listExprs = + parseCommaDelimitedReversedList + p ~grammar:Grammar.ListExpr ~closing:Rbracket ~f:parseSpreadExprRegion + in + Parser.expect Rbracket p; + let loc = mkLoc startPos p.prevEndPos in + match listExprs with + | (true, expr)::exprs -> + let exprs = exprs |> List.map snd |> List.rev in + makeListExpression loc exprs (Some expr) + | exprs -> + let exprs = + exprs + |> List.map (fun (spread, expr) -> + if spread then + Parser.err p (Diagnostics.message ErrorMessages.listExprSpread); + expr) + |> List.rev + in + makeListExpression loc exprs None + + (* Overparse ... and give a nice error message *) + and parseNonSpreadExp ~msg p = + let () = match p.Parser.token with + | DotDotDot -> + Parser.err p (Diagnostics.message msg); + Parser.next p; + | _ -> () + in + match p.Parser.token with + | token when Grammar.isExprStart token -> + let expr = parseExpr p in + begin match p.Parser.token with + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in + Some (Ast_helper.Exp.constraint_ ~loc expr typ) + | _ -> Some expr + end + | _ -> None + + and parseArrayExp p = + let startPos = p.Parser.startPos in + Parser.expect Lbracket p; + let exprs = + parseCommaDelimitedRegion + p + ~grammar:Grammar.ExprList + ~closing:Rbracket + ~f:(parseNonSpreadExp ~msg:ErrorMessages.arrayExprSpread) + in + Parser.expect Rbracket p; + Ast_helper.Exp.array ~loc:(mkLoc startPos p.prevEndPos) exprs + + (* TODO: check attributes in the case of poly type vars, + * might be context dependend: parseFieldDeclaration (see ocaml) *) + and parsePolyTypeExpr p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | SingleQuote -> + let vars = parseTypeVarList p in + begin match vars with + | _v1::_v2::_ -> + Parser.expect Dot p; + let typ = parseTypExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.poly ~loc vars typ + | [var] -> + begin match p.Parser.token with + | Dot -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.poly ~loc vars typ + | EqualGreater -> + Parser.next p; + let typ = Ast_helper.Typ.var ~loc:var.loc var.txt in + let returnType = parseTypExpr ~alias:false p in + let loc = mkLoc typ.Parsetree.ptyp_loc.loc_start p.prevEndPos in + Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ returnType + | _ -> + Ast_helper.Typ.var ~loc:var.loc var.txt + end + | _ -> assert false + end + | _ -> + parseTypExpr p + + (* 'a 'b 'c *) + and parseTypeVarList p = + let rec loop p vars = + match p.Parser.token with + | SingleQuote -> + Parser.next p; + let (lident, loc) = parseLident p in + let var = Location.mkloc lident loc in + loop p (var::vars) + | _ -> + List.rev vars + in + loop p [] + + and parseLidentList p = + let rec loop p ls = + match p.Parser.token with + | Lident lident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + loop p ((Location.mkloc lident loc)::ls) + | _ -> + List.rev ls + in + loop p [] + + and parseAtomicTypExpr ~attrs p = + Parser.leaveBreadcrumb p Grammar.AtomicTypExpr; + let startPos = p.Parser.startPos in + let typ = match p.Parser.token with + | SingleQuote -> + Parser.next p; + let (ident, loc) = parseLident p in + Ast_helper.Typ.var ~loc ~attrs ident + | Underscore -> + let endPos = p.endPos in + Parser.next p; + Ast_helper.Typ.any ~loc:(mkLoc startPos endPos) ~attrs () + | Lparen -> + Parser.next p; + begin match p.Parser.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let unitConstr = Location.mkloc (Longident.Lident "unit") loc in + Ast_helper.Typ.constr ~attrs unitConstr [] + | _ -> + let t = parseTypExpr p in + begin match p.token with + | Comma -> + Parser.next p; + parseTupleType ~attrs ~first:t ~startPos p + | _ -> + Parser.expect Rparen p; + {t with + ptyp_loc = mkLoc startPos p.prevEndPos; + ptyp_attributes = List.concat [attrs; t.ptyp_attributes]} + end + end + | Lbracket -> + parsePolymorphicVariantType ~attrs p + | Uident _ | Lident _ | List -> + let constr = parseValuePath p in + let args = parseTypeConstructorArgs ~constrName:constr p in + Ast_helper.Typ.constr ~loc:(mkLoc startPos p.prevEndPos) ~attrs constr args + | Module -> + Parser.next p; + Parser.expect Lparen p; + let packageType = parsePackageType ~startPos ~attrs p in + Parser.expect Rparen p; + {packageType with ptyp_loc = mkLoc startPos p.prevEndPos} + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.extension ~attrs ~loc extension + | Lbrace -> + parseBsObjectType ~attrs p + | token -> + begin match skipTokensAndMaybeRetry p ~isStartOfGrammar:Grammar.isAtomicTypExprStart with + | Some () -> + parseAtomicTypExpr ~attrs p + | None -> + Parser.err ~startPos:p.prevEndPos p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultType() + end + in + Parser.eatBreadcrumb p; + typ + + (* package-type ::= + | modtype-path + ∣ modtype-path with package-constraint { and package-constraint } + *) + and parsePackageType ~startPos ~attrs p = + let modTypePath = parseModuleLongIdent ~lowercase:true p in + begin match p.Parser.token with + | With -> + Parser.next p; + let constraints = parsePackageConstraints p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.package ~loc ~attrs modTypePath constraints + | _ -> + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.package ~loc ~attrs modTypePath [] + end + + (* package-constraint { and package-constraint } *) + and parsePackageConstraints p = + let first = + Parser.expect Typ p; + let typeConstr = parseValuePath p in + Parser.expect Equal p; + let typ = parseTypExpr p in + (typeConstr, typ) + in + let rest = parseRegion + ~grammar:Grammar.PackageConstraint + ~f:parsePackageConstraint + p + in + first::rest + + (* and type typeconstr = typexpr *) + and parsePackageConstraint p = + match p.Parser.token with + | And -> + Parser.next p; + Parser.expect Typ p; + let typeConstr = parseValuePath p in + Parser.expect Equal p; + let typ = parseTypExpr p in + Some (typeConstr, typ) + | _ -> None + + and parseBsObjectType ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Lbrace p; + let closedFlag = match p.token with + | DotDot -> Parser.next p; Asttypes.Open + | Dot -> Parser.next p; Asttypes.Closed + | _ -> Asttypes.Closed + in + let fields = + parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace + ~f:parseStringFieldDeclaration + p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + makeBsObjType ~attrs ~loc ~closed:closedFlag fields + + (* TODO: check associativity in combination with attributes *) + and parseTypeAlias p typ = + match p.Parser.token with + | As -> + Parser.next p; + Parser.expect SingleQuote p; + let (ident, _loc) = parseLident p in + (* TODO: how do we parse attributes here? *) + Ast_helper.Typ.alias ~loc:(mkLoc typ.Parsetree.ptyp_loc.loc_start p.prevEndPos) typ ident + | _ -> typ + + + (* type_parameter ::= + * | type_expr + * | ~ident: type_expr + * | ~ident: type_expr=? + * + * note: + * | attrs ~ident: type_expr -> attrs are on the arrow + * | attrs type_expr -> attrs are here part of the type_expr + * + * uncurried_type_parameter ::= + * | . type_parameter + *) + and parseTypeParameter p = + if ( + p.Parser.token = Token.Tilde || + p.token = Dot || + Grammar.isTypExprStart p.token + ) then ( + let startPos = p.Parser.startPos in + let uncurried = Parser.optional p Dot in + let attrs = parseAttributes p in + match p.Parser.token with + | Tilde -> + Parser.next p; + let (name, _loc) = parseLident p in + Parser.expect ~grammar:Grammar.TypeExpression Colon p; + let typ = parseTypExpr p in + begin match p.Parser.token with + | Equal -> + Parser.next p; + Parser.expect Question p; + Some (uncurried, attrs, Asttypes.Optional name, typ, startPos) + | _ -> + Some (uncurried, attrs, Asttypes.Labelled name, typ, startPos) + end + | Lident _ | List -> + let (name, loc) = parseLident p in + begin match p.token with + | Colon -> + let () = + let error = Diagnostics.message + ("Parameter names start with a `~`, like: ~" ^ name) + in + Parser.err ~startPos:loc.loc_start ~endPos:loc.loc_end p error + in + Parser.next p; + let typ = parseTypExpr p in + begin match p.Parser.token with + | Equal -> + Parser.next p; + Parser.expect Question p; + Some (uncurried, attrs, Asttypes.Optional name, typ, startPos) + | _ -> + Some (uncurried, attrs, Asttypes.Labelled name, typ, startPos) + end + | _ -> + let constr = Location.mkloc (Longident.Lident name) loc in + let args = parseTypeConstructorArgs ~constrName:constr p in + let typ = Ast_helper.Typ.constr ~loc:(mkLoc startPos p.prevEndPos) ~attrs constr args + in + + let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in + let typ = parseTypeAlias p typ in + Some (uncurried, [], Asttypes.Nolabel, typ, startPos) + end + | _ -> + let typ = parseTypExpr p in + let typWithAttributes = {typ with ptyp_attributes = List.concat[attrs; typ.ptyp_attributes]} in + Some (uncurried, [], Asttypes.Nolabel, typWithAttributes, startPos) + ) else + None + + (* (int, ~x:string, float) *) + and parseTypeParameters p = + let startPos = p.Parser.startPos in + Parser.expect Lparen p; + match p.Parser.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let unitConstr = Location.mkloc (Longident.Lident "unit") loc in + let typ = Ast_helper.Typ.constr unitConstr [] in + [(false, [], Asttypes.Nolabel, typ, startPos)] + | _ -> + let params = + parseCommaDelimitedRegion ~grammar:Grammar.TypeParameters ~closing:Rparen ~f:parseTypeParameter p + in + Parser.expect Rparen p; + params + + and parseEs6ArrowType ~attrs p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | Tilde -> + Parser.next p; + let (name, _loc) = parseLident p in + Parser.expect ~grammar:Grammar.TypeExpression Colon p; + let typ = parseTypExpr ~alias:false ~es6Arrow:false p in + let arg = match p.Parser.token with + | Equal -> + Parser.next p; + Parser.expect Question p; + Asttypes.Optional name + | _ -> + Asttypes.Labelled name + in + Parser.expect EqualGreater p; + let returnType = parseTypExpr ~alias:false p in + Ast_helper.Typ.arrow ~attrs arg typ returnType + | _ -> + let parameters = parseTypeParameters p in + Parser.expect EqualGreater p; + let returnType = parseTypExpr ~alias:false p in + let endPos = p.prevEndPos in + let typ = List.fold_right (fun (uncurried, attrs, argLbl, typ, startPos) t -> + let attrs = if uncurried then uncurryAttr::attrs else attrs in + Ast_helper.Typ.arrow ~loc:(mkLoc startPos endPos) ~attrs argLbl typ t + ) parameters returnType + in + {typ with + ptyp_attributes = List.concat [typ.ptyp_attributes; attrs]; + ptyp_loc = mkLoc startPos p.prevEndPos} + + (* + * typexpr ::= + * | 'ident + * | _ + * | (typexpr) + * | typexpr => typexpr --> es6 arrow + * | (typexpr, typexpr) => typexpr --> es6 arrow + * | /typexpr, typexpr, typexpr/ --> tuple + * | typeconstr + * | typeconstr + * | typeconstr + * | typexpr as 'ident + * | %attr-id --> extension + * | %attr-id(payload) --> extension + * + * typeconstr ::= + * | lident + * | uident.lident + * | uident.uident.lident --> long module path + *) + and parseTypExpr ?attrs ?(es6Arrow=true) ?(alias=true) p = + (* Parser.leaveBreadcrumb p Grammar.TypeExpression; *) + let startPos = p.Parser.startPos in + let attrs = match attrs with + | Some attrs -> + attrs + | None -> + parseAttributes p in + let typ = if es6Arrow && isEs6ArrowType p then + parseEs6ArrowType ~attrs p + else + let typ = parseAtomicTypExpr ~attrs p in + parseArrowTypeRest ~es6Arrow ~startPos typ p + in + let typ = if alias then parseTypeAlias p typ else typ in + (* Parser.eatBreadcrumb p; *) + typ + + and parseArrowTypeRest ~es6Arrow ~startPos typ p = + match p.Parser.token with + | (EqualGreater | MinusGreater) as token when es6Arrow == true -> + (* error recovery *) + if token = MinusGreater then ( + Parser.expect EqualGreater p; + ); + Parser.next p; + let returnType = parseTypExpr ~alias:false p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ returnType + | _ -> typ + + and parseTypExprRegion p = + if Grammar.isTypExprStart p.Parser.token then + Some (parseTypExpr p) + else + None + + and parseTupleType ~attrs ~first ~startPos p = + let typexprs = + parseCommaDelimitedRegion + ~grammar:Grammar.TypExprList + ~closing:Rparen + ~f:parseTypExprRegion + p + in + Parser.expect Rparen p; + let tupleLoc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.tuple ~attrs ~loc:tupleLoc (first::typexprs) + + and parseTypeConstructorArgRegion p = + if Grammar.isTypExprStart p.Parser.token then + Some (parseTypExpr p) + else if p.token = LessThan then ( + Parser.next p; + parseTypeConstructorArgRegion p + ) else + None + + (* Js.Nullable.value<'a> *) + and parseTypeConstructorArgs ~constrName p = + let opening = p.Parser.token in + let openingStartPos = p.startPos in + match opening with + | LessThan | Lparen -> + Scanner.setDiamondMode p.scanner; + Parser.next p; + let typeArgs = + (* TODO: change Grammar.TypExprList to TypArgList!!! Why did I wrote this? *) + parseCommaDelimitedRegion + ~grammar:Grammar.TypExprList + ~closing:GreaterThan + ~f:parseTypeConstructorArgRegion + p + in + let () = match p.token with + | Rparen when opening = Token.Lparen -> + let typ = Ast_helper.Typ.constr constrName typeArgs in + let msg = + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.text "Type parameters require angle brackets:"; + Doc.indent ( + Doc.concat [ + Doc.line; + Printer.printTypExpr typ CommentTable.empty; + ] + ) + ] + ) |> Doc.toString ~width:80 + in + Parser.err ~startPos:openingStartPos p (Diagnostics.message msg); + Parser.next p + | _ -> + Parser.expect GreaterThan p + in + Scanner.popMode p.scanner Diamond; + typeArgs + | _ -> [] + + (* string-field-decl ::= + * | string: poly-typexpr + * | attributes string-field-decl *) + and parseStringFieldDeclaration p = + let attrs = parseAttributes p in + match p.Parser.token with + | String name -> + let nameStartPos = p.startPos in + let nameEndPos = p.endPos in + Parser.next p; + let fieldName = Location.mkloc name (mkLoc nameStartPos nameEndPos) in + Parser.expect ~grammar:Grammar.TypeExpression Colon p; + let typ = parsePolyTypeExpr p in + Some(Parsetree.Otag (fieldName, attrs, typ)) + | _token -> + None + + (* field-decl ::= + * | [mutable] field-name : poly-typexpr + * | attributes field-decl *) + and parseFieldDeclaration p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + let mut = if Parser.optional p Token.Mutable then + Asttypes.Mutable + else + Asttypes.Immutable + in + let (lident, loc) = match p.token with + | List -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + ("list", loc) + | _ -> parseLident p + in + let name = Location.mkloc lident loc in + let typ = match p.Parser.token with + | Colon -> + Parser.next p; + parsePolyTypeExpr p + | _ -> + Ast_helper.Typ.constr ~loc:name.loc {name with txt = Lident name.txt} [] + in + let loc = mkLoc startPos typ.ptyp_loc.loc_end in + Ast_helper.Type.field ~attrs ~loc ~mut name typ + + + and parseFieldDeclarationRegion p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + let mut = if Parser.optional p Token.Mutable then + Asttypes.Mutable + else + Asttypes.Immutable + in + match p.token with + | Lident _ | List -> + let (lident, loc) = match p.token with + | List -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + ("list", loc) + | _ -> parseLident p + in + let name = Location.mkloc lident loc in + let typ = match p.Parser.token with + | Colon -> + Parser.next p; + parsePolyTypeExpr p + | _ -> + Ast_helper.Typ.constr ~loc:name.loc {name with txt = Lident name.txt} [] + in + let loc = mkLoc startPos typ.ptyp_loc.loc_end in + Some(Ast_helper.Type.field ~attrs ~loc ~mut name typ) + | _ -> + None + + (* record-decl ::= + * | { field-decl } + * | { field-decl, field-decl } + * | { field-decl, field-decl, field-decl, } + *) + and parseRecordDeclaration p = + Parser.leaveBreadcrumb p Grammar.RecordDecl; + Parser.expect Lbrace p; + let rows = + parseCommaDelimitedRegion + ~grammar:Grammar.RecordDecl + ~closing:Rbrace + ~f:parseFieldDeclarationRegion + p + in + Parser.expect Rbrace p; + Parser.eatBreadcrumb p; + rows + + (* constr-args ::= + * | (typexpr) + * | (typexpr, typexpr) + * | (typexpr, typexpr, typexpr,) + * | (record-decl) + * + * TODO: should we overparse inline-records in every position? + * Give a good error message afterwards? + *) + and parseConstrDeclArgs p = + let constrArgs = match p.Parser.token with + | Lparen -> + Parser.next p; + (* TODO: this could use some cleanup/stratification *) + begin match p.Parser.token with + | Lbrace -> + let lbrace = p.startPos in + Parser.next p; + let startPos = p.Parser.startPos in + begin match p.Parser.token with + | DotDot | Dot -> + let closedFlag = match p.token with + | DotDot -> Parser.next p; Asttypes.Open + | Dot -> Parser.next p; Asttypes.Closed + | _ -> Asttypes.Closed + in + let fields = + parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace + ~f:parseStringFieldDeclaration + p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = makeBsObjType ~attrs:[] ~loc ~closed:closedFlag fields in + Parser.optional p Comma |> ignore; + let moreArgs = + parseCommaDelimitedRegion + ~grammar:Grammar.TypExprList + ~closing:Rparen + ~f:parseTypExprRegion + p + in + Parser.expect Rparen p; + Parsetree.Pcstr_tuple (typ::moreArgs) + | _ -> + let attrs = parseAttributes p in + begin match p.Parser.token with + | String _ -> + let closedFlag = Asttypes.Closed in + let fields = match attrs with + | [] -> + parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace + ~f:parseStringFieldDeclaration + p + | attrs -> + let first = + Parser.leaveBreadcrumb p Grammar.StringFieldDeclarations; + let field = match parseStringFieldDeclaration p with + | Some field -> field + | None -> assert false + in + (* parse comma after first *) + let () = match p.Parser.token with + | Rbrace | Eof -> () + | Comma -> Parser.next p + | _ -> Parser.expect Comma p + in + Parser.eatBreadcrumb p; + begin match field with + | Parsetree.Otag (label, _, ct) -> Parsetree.Otag (label, attrs, ct) + | Oinherit ct -> Oinherit ct + end + in + first::( + parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace + ~f:parseStringFieldDeclaration + p + ) in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = makeBsObjType ~attrs:[] ~loc ~closed:closedFlag fields in + Parser.optional p Comma |> ignore; + let moreArgs = + parseCommaDelimitedRegion + ~grammar:Grammar.TypExprList + ~closing:Rparen + ~f:parseTypExprRegion p + in + Parser.expect Rparen p; + Parsetree.Pcstr_tuple (typ::moreArgs) + | _ -> + let fields = match attrs with + | [] -> + parseCommaDelimitedRegion + ~grammar:Grammar.FieldDeclarations + ~closing:Rbrace + ~f:parseFieldDeclarationRegion + p + | attrs -> + let first = + let field = parseFieldDeclaration p in + Parser.expect Comma p; + {field with Parsetree.pld_attributes = attrs} + in + first::( + parseCommaDelimitedRegion + ~grammar:Grammar.FieldDeclarations + ~closing:Rbrace + ~f:parseFieldDeclarationRegion + p + ) + in + let () = match fields with + | [] -> Parser.err ~startPos:lbrace p ( + Diagnostics.message "An inline record declaration needs at least one field" + ) + | _ -> () + in + Parser.expect Rbrace p; + Parser.optional p Comma |> ignore; + Parser.expect Rparen p; + Parsetree.Pcstr_record fields + end + end + | _ -> + let args = + parseCommaDelimitedRegion + ~grammar:Grammar.TypExprList + ~closing:Rparen + ~f:parseTypExprRegion + p + in + Parser.expect Rparen p; + Parsetree.Pcstr_tuple args + end + | _ -> Pcstr_tuple [] + in + let res = match p.Parser.token with + | Colon -> + Parser.next p; + Some (parseTypExpr p) + | _ -> None + in + (constrArgs, res) + + (* constr-decl ::= + * | constr-name + * | attrs constr-name + * | constr-name const-args + * | attrs constr-name const-args *) + and parseTypeConstructorDeclarationWithBar p = + match p.Parser.token with + | Bar -> + let startPos = p.Parser.startPos in + Parser.next p; + Some (parseTypeConstructorDeclaration ~startPos p) + | _ -> None + + and parseTypeConstructorDeclaration ~startPos p = + Parser.leaveBreadcrumb p Grammar.ConstructorDeclaration; + let attrs = parseAttributes p in + match p.Parser.token with + | Uident uident -> + let uidentLoc = mkLoc p.startPos p.endPos in + Parser.next p; + let (args, res) = parseConstrDeclArgs p in + Parser.eatBreadcrumb p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Type.constructor ~loc ~attrs ?res ~args (Location.mkloc uident uidentLoc) + | t -> + Parser.err p (Diagnostics.uident t); + Ast_helper.Type.constructor (Location.mknoloc "_") + + (* [|] constr-decl { | constr-decl } *) + and parseTypeConstructorDeclarations ?first p = + let firstConstrDecl = match first with + | None -> + let startPos = p.Parser.startPos in + ignore (Parser.optional p Token.Bar); + parseTypeConstructorDeclaration ~startPos p + | Some firstConstrDecl -> + firstConstrDecl + in + firstConstrDecl::( + parseRegion + ~grammar:Grammar.ConstructorDeclaration + ~f:parseTypeConstructorDeclarationWithBar + p + ) + + (* + * type-representation ::= + * ∣ = [ | ] constr-decl { | constr-decl } + * ∣ = private [ | ] constr-decl { | constr-decl } + * | = | + * ∣ = private | + * ∣ = record-decl + * ∣ = private record-decl + * | = .. + *) + and parseTypeRepresentation p = + Parser.leaveBreadcrumb p Grammar.TypeRepresentation; + (* = consumed *) + let privateFlag = + if Parser.optional p Token.Private + then Asttypes.Private + else Asttypes.Public + in + let kind = match p.Parser.token with + | Bar | Uident _ -> + Parsetree.Ptype_variant (parseTypeConstructorDeclarations p) + | Lbrace -> + Parsetree.Ptype_record (parseRecordDeclaration p) + | DotDot -> + Parser.next p; + Ptype_open + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + (* TODO: I have no idea if this is even remotely a good idea *) + Parsetree.Ptype_variant [] + in + Parser.eatBreadcrumb p; + (privateFlag, kind) + + (* type-param ::= + * | variance 'lident + * | variance _ + * + * variance ::= + * | + + * | - + * | (* empty *) + *) + and parseTypeParam p = + let variance = match p.Parser.token with + | Plus -> Parser.next p; Asttypes.Covariant + | Minus -> Parser.next p; Contravariant + | _ -> Invariant + in + match p.Parser.token with + | SingleQuote -> + Parser.next p; + let (ident, loc) = parseLident p in + Some (Ast_helper.Typ.var ~loc ident, variance) + | Underscore -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Some (Ast_helper.Typ.any ~loc (), variance) + (* TODO: should we try parsing lident as 'ident ? *) + | _token -> + None + + (* type-params ::= + * | + * ∣ + * ∣ + * ∣ + * + * TODO: when we have pretty-printer show an error + * with the actual code corrected. *) + and parseTypeParams ~parent p = + let opening = p.Parser.token in + match opening with + | LessThan | Lparen when p.startPos.pos_lnum == p.prevEndPos.pos_lnum -> + Scanner.setDiamondMode p.scanner; + let openingStartPos = p.startPos in + Parser.leaveBreadcrumb p Grammar.TypeParams; + Parser.next p; + let params = + parseCommaDelimitedRegion + ~grammar:Grammar.TypeParams + ~closing:GreaterThan + ~f:parseTypeParam + p + in + let () = match p.token with + | Rparen when opening = Token.Lparen -> + let msg = + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.text "Type parameters require angle brackets:"; + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.concat [ + Printer.printLongident parent.Location.txt; + Printer.printTypeParams params CommentTable.empty; + ] + ] + ) + ] + ) |> Doc.toString ~width:80 + in + Parser.err ~startPos:openingStartPos p (Diagnostics.message msg); + Parser.next p + | _ -> + Parser.expect GreaterThan p + in + Scanner.popMode p.scanner Diamond; + Parser.eatBreadcrumb p; + params + | _ -> [] + + (* type-constraint ::= constraint ' ident = typexpr *) + and parseTypeConstraint p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | Token.Constraint -> + Parser.next p; + Parser.expect SingleQuote p; + begin match p.Parser.token with + | Lident ident -> + let identLoc = mkLoc startPos p.endPos in + Parser.next p; + Parser.expect Equal p; + let typ = parseTypExpr p in + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Typ.var ~loc:identLoc ident, typ, loc) + | t -> + Parser.err p (Diagnostics.lident t); + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Typ.any (), parseTypExpr p, loc) + end + | _ -> None + + (* type-constraints ::= + * | (* empty *) + * | type-constraint + * | type-constraint type-constraint + * | type-constraint type-constraint type-constraint (* 0 or more *) + *) + and parseTypeConstraints p = + parseRegion + ~grammar:Grammar.TypeConstraint + ~f:parseTypeConstraint + p + + and parseTypeEquationOrConstrDecl p = + let uidentStartPos = p.Parser.startPos in + match p.Parser.token with + | Uident uident -> + Parser.next p; + begin match p.Parser.token with + | Dot -> + Parser.next p; + let typeConstr = + parseValuePathTail p uidentStartPos (Longident.Lident uident) + in + let loc = mkLoc uidentStartPos p.prevEndPos in + let typ = parseTypeAlias p ( + Ast_helper.Typ.constr ~loc typeConstr (parseTypeConstructorArgs ~constrName:typeConstr p) + ) in + begin match p.token with + | Equal -> + Parser.next p; + let (priv, kind) = parseTypeRepresentation p in + (Some typ, priv, kind) + | EqualGreater -> + Parser.next p; + let returnType = parseTypExpr ~alias:false p in + let loc = mkLoc uidentStartPos p.prevEndPos in + let arrowType = Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ returnType in + let typ = parseTypeAlias p arrowType in + (Some typ, Asttypes.Public, Parsetree.Ptype_abstract) + | _ -> (Some typ, Asttypes.Public, Parsetree.Ptype_abstract) + end + | _ -> + let uidentEndPos = p.endPos in + let (args, res) = parseConstrDeclArgs p in + let first = Some ( + let uidentLoc = mkLoc uidentStartPos uidentEndPos in + Ast_helper.Type.constructor + ~loc:(mkLoc uidentStartPos p.prevEndPos) + ?res + ~args + (Location.mkloc uident uidentLoc) + ) in + (None, Asttypes.Public, Parsetree.Ptype_variant (parseTypeConstructorDeclarations p ?first)) + end + | t -> + Parser.err p (Diagnostics.uident t); + (* TODO: is this a good idea? *) + (None, Asttypes.Public, Parsetree.Ptype_abstract) + + and parseRecordOrBsObjectDecl p = + let startPos = p.Parser.startPos in + Parser.expect Lbrace p; + match p.Parser.token with + | DotDot | Dot -> + let closedFlag = match p.token with + | DotDot -> Parser.next p; Asttypes.Open + | Dot -> Parser.next p; Asttypes.Closed + | _ -> Asttypes.Closed + in + let fields = + parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace + ~f:parseStringFieldDeclaration + p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = + makeBsObjType ~attrs:[] ~loc ~closed:closedFlag fields + |> parseTypeAlias p + in + let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in + (Some typ, Asttypes.Public, Parsetree.Ptype_abstract) + | _ -> + let attrs = parseAttributes p in + begin match p.Parser.token with + | String _ -> + let closedFlag = Asttypes.Closed in + let fields = match attrs with + | [] -> + parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace + ~f:parseStringFieldDeclaration + p + | attrs -> + let first = + Parser.leaveBreadcrumb p Grammar.StringFieldDeclarations; + let field = match parseStringFieldDeclaration p with + | Some field -> field + | None -> assert false + in + (* parse comma after first *) + let () = match p.Parser.token with + | Rbrace | Eof -> () + | Comma -> Parser.next p + | _ -> Parser.expect Comma p + in + Parser.eatBreadcrumb p; + begin match field with + | Parsetree.Otag (label, _, ct) -> Parsetree.Otag (label, attrs, ct) + | Oinherit ct -> Oinherit ct + end + in + first::( + parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace + ~f:parseStringFieldDeclaration + p + ) + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = + makeBsObjType ~attrs:[] ~loc ~closed:closedFlag fields |> parseTypeAlias p + in + let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in + (Some typ, Asttypes.Public, Parsetree.Ptype_abstract) + | _ -> + Parser.leaveBreadcrumb p Grammar.RecordDecl; + let fields = match attrs with + | [] -> + parseCommaDelimitedRegion + ~grammar:Grammar.FieldDeclarations + ~closing:Rbrace + ~f:parseFieldDeclarationRegion + p + | attr::_ as attrs -> + let first = + let field = parseFieldDeclaration p in + Parser.optional p Comma |> ignore; + {field with + Parsetree.pld_attributes = attrs; + pld_loc = { + field.Parsetree.pld_loc with loc_start = + (attr |> fst).loc.loc_start + } + } + in + first::( + parseCommaDelimitedRegion + ~grammar:Grammar.FieldDeclarations + ~closing:Rbrace + ~f:parseFieldDeclarationRegion + p + ) + in + let () = match fields with + | [] -> Parser.err ~startPos p ( + Diagnostics.message "A record needs at least one field" + ) + | _ -> () + in + Parser.expect Rbrace p; + Parser.eatBreadcrumb p; + (None, Asttypes.Public, Parsetree.Ptype_record fields) + end + + and parsePrivateEqOrRepr p = + Parser.expect Private p; + match p.Parser.token with + | Lbrace -> + let (manifest, _ ,kind) = parseRecordOrBsObjectDecl p in + (manifest, Asttypes.Private, kind) + | Uident _ -> + let (manifest, _, kind) = parseTypeEquationOrConstrDecl p in + (manifest, Asttypes.Private, kind) + | Bar | DotDot -> + let (_, kind) = parseTypeRepresentation p in + (None, Asttypes.Private, kind) + | t when Grammar.isTypExprStart t -> + (Some (parseTypExpr p), Asttypes.Private, Parsetree.Ptype_abstract) + | _ -> + let (_, kind) = parseTypeRepresentation p in + (None, Asttypes.Private, kind) + + (* + polymorphic-variant-type ::= + | [ tag-spec-first { | tag-spec } ] + | [> [ tag-spec ] { | tag-spec } ] + | [< [|] tag-spec-full { | tag-spec-full } [ > { `tag-name }+ ] ] + + tag-spec-first ::= `tag-name [ of typexpr ] + | [ typexpr ] | tag-spec + + tag-spec ::= `tag-name [ of typexpr ] + | typexpr + + tag-spec-full ::= `tag-name [ of [&] typexpr { & typexpr } ] + | typexpr + *) + and parsePolymorphicVariantType ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Lbracket p; + match p.token with + | GreaterThan -> + Parser.next p; + let rowFields = + begin match p.token with + | Rbracket -> + [] + | Bar -> + parseTagSpecs p + | _ -> + let rowField = parseTagSpec p in + rowField :: parseTagSpecs p + end + in + let variant = + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.variant ~attrs ~loc rowFields Open None in + Parser.expect Rbracket p; + variant + | LessThan -> + Parser.next p; + Parser.optional p Bar |> ignore; + let rowField = parseTagSpecFull p in + let rowFields = parseTagSpecFulls p in + let tagNames = + if p.token == GreaterThan + then begin + Parser.next p; + let rec loop p = match p.Parser.token with + | Rbracket -> [] + | _ -> + let (ident, _loc) = parseHashIdent ~startPos:p.startPos p in + ident :: loop p + in + loop p + end + else [] in + let variant = + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.variant ~attrs ~loc (rowField :: rowFields) Closed (Some tagNames) in + Parser.expect Rbracket p; + variant + | _ -> + let rowFields1 = parseTagSpecFirst p in + let rowFields2 = parseTagSpecs p in + let variant = + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.variant ~attrs ~loc (rowFields1 @ rowFields2) Closed None in + Parser.expect Rbracket p; + variant + + and parseTagSpecFulls p = + match p.Parser.token with + | Rbracket -> + [] + | GreaterThan -> + [] + | Bar -> + Parser.next p; + let rowField = parseTagSpecFull p in + rowField ::parseTagSpecFulls p + | _ -> + [] + + and parseTagSpecFull p = + let attrs = parseAttributes p in + match p.Parser.token with + | Hash -> + parsePolymorphicVariantTypeSpecHash ~attrs ~full:true p + | _ -> + let typ = parseTypExpr ~attrs p in + Parsetree.Rinherit typ + + and parseTagSpecs p = + match p.Parser.token with + | Bar -> + Parser.next p; + let rowField = parseTagSpec p in + rowField :: parseTagSpecs p + | _ -> + [] + + and parseTagSpec p = + let attrs = parseAttributes p in + match p.Parser.token with + | Hash -> + parsePolymorphicVariantTypeSpecHash ~attrs ~full:false p + | _ -> + let typ = parseTypExpr ~attrs p in + Parsetree.Rinherit typ + + and parseTagSpecFirst p = + let attrs = parseAttributes p in + match p.Parser.token with + | Bar -> + Parser.next p; + [parseTagSpec p] + | Hash -> + [parsePolymorphicVariantTypeSpecHash ~attrs ~full:false p] + | _ -> + let typ = parseTypExpr ~attrs p in + Parser.expect Bar p; + [Parsetree.Rinherit typ; parseTagSpec p] + + and parsePolymorphicVariantTypeSpecHash ~attrs ~full p : Parsetree.row_field = + let startPos = p.Parser.startPos in + let (ident, loc) = parseHashIdent ~startPos p in + let rec loop p = + match p.Parser.token with + | Band when full -> + Parser.next p; + let rowField = parsePolymorphicVariantTypeArgs p in + rowField :: loop p + | _ -> + [] + in + let firstTuple, tagContainsAConstantEmptyConstructor = + match p.Parser.token with + | Band when full -> + Parser.next p; + [parsePolymorphicVariantTypeArgs p], true + | Lparen -> + [parsePolymorphicVariantTypeArgs p], false + | _ -> + [], true + in + let tuples = firstTuple @ loop p in + Parsetree.Rtag ( + Location.mkloc ident loc, + attrs, + tagContainsAConstantEmptyConstructor, + tuples + ) + + and parsePolymorphicVariantTypeArgs p = + let startPos = p.Parser.startPos in + Parser.expect Lparen p; + let args = parseCommaDelimitedRegion + ~grammar:Grammar.TypExprList + ~closing:Rparen + ~f:parseTypExprRegion + p + in + Parser.expect Rparen p; + let attrs = [] in + let loc = mkLoc startPos p.prevEndPos in + match args with + | [{ptyp_desc = Ptyp_tuple _} as typ] as types -> + if p.mode = ParseForTypeChecker then + typ + else + Ast_helper.Typ.tuple ~loc ~attrs types + | [typ] -> typ + | types -> Ast_helper.Typ.tuple ~loc ~attrs types + + and parseTypeEquationAndRepresentation p = + match p.Parser.token with + | Equal | Bar as token -> + if token = Bar then Parser.expect Equal p; + Parser.next p; + begin match p.Parser.token with + | Uident _ -> + parseTypeEquationOrConstrDecl p + | Lbrace -> + parseRecordOrBsObjectDecl p + | Private -> + parsePrivateEqOrRepr p + | Bar | DotDot -> + let (priv, kind) = parseTypeRepresentation p in + (None, priv, kind) + | _ -> + let manifest = Some (parseTypExpr p) in + begin match p.Parser.token with + | Equal -> + Parser.next p; + let (priv, kind) = parseTypeRepresentation p in + (manifest, priv, kind) + | _ -> + (manifest, Public, Parsetree.Ptype_abstract) + end + end + | _ -> (None, Public, Parsetree.Ptype_abstract) + + (* type-definition ::= type [rec] typedef { and typedef } + * typedef ::= typeconstr-name [type-params] type-information + * type-information ::= [type-equation] [type-representation] { type-constraint } + * type-equation ::= = typexpr *) + and parseTypeDef ~attrs ~startPos p = + Parser.leaveBreadcrumb p Grammar.TypeDef; + (* let attrs = match attrs with | Some attrs -> attrs | None -> parseAttributes p in *) + Parser.leaveBreadcrumb p Grammar.TypeConstrName; + let (name, loc) = parseLident p in + let typeConstrName = Location.mkloc name loc in + Parser.eatBreadcrumb p; + let params = + let constrName = Location.mkloc (Longident.Lident name) loc in + parseTypeParams ~parent:constrName p in + let typeDef = + let (manifest, priv, kind) = parseTypeEquationAndRepresentation p in + let cstrs = parseTypeConstraints p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Type.mk + ~loc ~attrs ~priv ~kind ~params ~cstrs ?manifest typeConstrName + in + Parser.eatBreadcrumb p; + typeDef + + and parseTypeExtension ~params ~attrs ~name p = + Parser.expect PlusEqual p; + let priv = + if Parser.optional p Token.Private + then Asttypes.Private + else Asttypes.Public + in + let constrStart = p.Parser.startPos in + Parser.optional p Bar |> ignore; + let first = + let (attrs, name, kind) = match p.Parser.token with + | Bar -> + Parser.next p; + parseConstrDef ~parseAttrs:true p + | _ -> + parseConstrDef ~parseAttrs:true p + in + let loc = mkLoc constrStart p.prevEndPos in + Ast_helper.Te.constructor ~loc ~attrs name kind + in + let rec loop p cs = + match p.Parser.token with + | Bar -> + let startPos = p.Parser.startPos in + Parser.next p; + let (attrs, name, kind) = parseConstrDef ~parseAttrs:true p in + let extConstr = + Ast_helper.Te.constructor ~attrs ~loc:(mkLoc startPos p.prevEndPos) name kind + in + loop p (extConstr::cs) + | _ -> + List.rev cs + in + let constructors = loop p [first] in + Ast_helper.Te.mk ~attrs ~params ~priv name constructors + + and parseTypeDefinitions ~attrs ~name ~params ~startPos p = + let typeDef = + let (manifest, priv, kind) = parseTypeEquationAndRepresentation p in + let cstrs = parseTypeConstraints p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Type.mk + ~loc ~attrs ~priv ~kind ~params ~cstrs ?manifest + {name with txt = lidentOfPath name.Location.txt} + in + let rec loop p defs = + let startPos = p.Parser.startPos in + let attrs = parseAttributesAndBinding p in + match p.Parser.token with + | And -> + Parser.next p; + let attrs = match p.token with + | Export -> + let exportLoc = mkLoc p.startPos p.endPos in + Parser.next p; + let genTypeAttr = (Location.mkloc "genType" exportLoc, Parsetree.PStr []) in + genTypeAttr::attrs + | _ -> attrs + in + let typeDef = parseTypeDef ~attrs ~startPos p in + loop p (typeDef::defs) + | _ -> + List.rev defs + in + loop p [typeDef] + + (* TODO: decide if we really want type extensions (eg. type x += Blue) + * It adds quite a bit of complexity that can be avoided, + * implemented for now. Needed to get a feel for the complexities of + * this territory of the grammar *) + and parseTypeDefinitionOrExtension ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Token.Typ p; + let recFlag = match p.token with + | Rec -> Parser.next p; Asttypes.Recursive + | Lident "nonrec" -> + Parser.next p; + Asttypes.Nonrecursive + | _ -> Asttypes.Nonrecursive + in + let name = parseValuePath p in + let params = parseTypeParams ~parent:name p in + match p.Parser.token with + | PlusEqual -> + TypeExt(parseTypeExtension ~params ~attrs ~name p) + | _ -> + let typeDefs = parseTypeDefinitions ~attrs ~name ~params ~startPos p in + TypeDef {recFlag; types = typeDefs} + + and parsePrimitive p = + match p.Parser.token with + | String s -> Parser.next p; Some s + | _ -> None + + and parsePrimitives p = + match (parseRegion ~grammar:Grammar.Primitive ~f:parsePrimitive p) with + | [] -> + let msg = "An external definition should have at least one primitive. Example: \"setTimeout\"" in + Parser.err p (Diagnostics.message msg); + [] + | primitives -> primitives + + (* external value-name : typexp = external-declaration *) + and parseExternalDef ~attrs p = + let startPos = p.Parser.startPos in + Parser.leaveBreadcrumb p Grammar.External; + Parser.expect Token.External p; + let (name, loc) = parseLident p in + let name = Location.mkloc name loc in + Parser.expect ~grammar:(Grammar.TypeExpression) Colon p; + let typExpr = parseTypExpr p in + Parser.expect Equal p; + let prim = parsePrimitives p in + let loc = mkLoc startPos p.prevEndPos in + let vb = Ast_helper.Val.mk ~loc ~attrs ~prim name typExpr in + Parser.eatBreadcrumb p; + vb + + (* constr-def ::= + * | constr-decl + * | constr-name = constr + * + * constr-decl ::= constr-name constr-args + * constr-name ::= uident + * constr ::= path-uident *) + and parseConstrDef ~parseAttrs p = + let attrs = if parseAttrs then parseAttributes p else [] in + let name = match p.Parser.token with + | Uident name -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc name loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let kind = match p.Parser.token with + | Lparen -> + let (args, res) = parseConstrDeclArgs p in + Parsetree.Pext_decl (args, res) + | Equal -> + Parser.next p; + let longident = parseModuleLongIdent ~lowercase:false p in + Parsetree.Pext_rebind longident + | _ -> + Parsetree.Pext_decl (Pcstr_tuple [], None) + in + (attrs, name, kind) + + (* + * exception-definition ::= + * | exception constr-decl + * ∣ exception constr-name = constr + * + * constr-name ::= uident + * constr ::= long_uident *) + and parseExceptionDef ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Token.Exception p; + let (_, name, kind) = parseConstrDef ~parseAttrs:false p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Te.constructor ~loc ~attrs name kind + + (* module structure on the file level *) + and parseImplementation p : Parsetree.structure = + parseRegion p ~grammar:Grammar.Implementation ~f:parseStructureItemRegion + [@@progress (Parser.next, Parser.expect, Parser.checkProgress)] + + and parseStructureItemRegion p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + match p.Parser.token with + | Open -> + let openDescription = parseOpenDescription ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.open_ ~loc openDescription) + | Let -> + let (recFlag, letBindings) = parseLetBindings ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.value ~loc recFlag letBindings) + | Typ -> + Parser.beginRegion p; + begin match parseTypeDefinitionOrExtension ~attrs p with + | TypeDef {recFlag; types} -> + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Str.type_ ~loc recFlag types) + | TypeExt(ext) -> + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Str.type_extension ~loc ext) + end + | External -> + let externalDef = parseExternalDef ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.primitive ~loc externalDef) + | Import -> + let importDescr = parseJsImport ~startPos ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + let structureItem = JsFfi.toParsetree importDescr in + Some {structureItem with pstr_loc = loc} + | Exception -> + let exceptionDef = parseExceptionDef ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.exception_ ~loc exceptionDef) + | Include -> + let includeStatement = parseIncludeStatement ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.include_ ~loc includeStatement) + | Export -> + let structureItem = parseJsExport ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some {structureItem with pstr_loc = loc} + | Module -> + let structureItem = parseModuleOrModuleTypeImplOrPackExpr ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some {structureItem with pstr_loc = loc} + | AtAt -> + let attr = parseStandaloneAttribute p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.attribute ~loc attr) + | PercentPercent -> + let extension = parseExtension ~moduleLanguage:true p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.extension ~attrs ~loc extension) + | token when Grammar.isExprStart token -> + let prevEndPos = p.Parser.endPos in + let exp = parseExpr p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Parser.checkProgress ~prevEndPos ~result:(Ast_helper.Str.eval ~loc ~attrs exp) p + | _ -> None + + and parseJsImport ~startPos ~attrs p = + Parser.expect Token.Import p; + let importSpec = match p.Parser.token with + | Token.Lident _ | Token.At -> + let decl = match parseJsFfiDeclaration p with + | Some decl -> decl + | None -> assert false + in + JsFfi.Default decl + | _ -> JsFfi.Spec(parseJsFfiDeclarations p) + in + let scope = parseJsFfiScope p in + let loc = mkLoc startPos p.prevEndPos in + JsFfi.importDescr ~attrs ~importSpec ~scope ~loc + + and parseJsExport ~attrs p = + let exportStart = p.Parser.startPos in + Parser.expect Token.Export p; + let exportLoc = mkLoc exportStart p.prevEndPos in + let genTypeAttr = (Location.mkloc "genType" exportLoc, Parsetree.PStr []) in + let attrs = genTypeAttr::attrs in + match p.Parser.token with + | Typ -> + begin match parseTypeDefinitionOrExtension ~attrs p with + | TypeDef {recFlag; types} -> + Ast_helper.Str.type_ recFlag types + | TypeExt(ext) -> + Ast_helper.Str.type_extension ext + end + | (* Let *) _ -> + let (recFlag, letBindings) = parseLetBindings ~attrs p in + Ast_helper.Str.value recFlag letBindings + + and parseJsFfiScope p = + match p.Parser.token with + | Token.Lident "from" -> + Parser.next p; + begin match p.token with + | String s -> Parser.next p; JsFfi.Module s + | Uident _ | Lident _ -> + let value = parseIdentPath p in + JsFfi.Scope value + | _ -> JsFfi.Global + end + | _ -> JsFfi.Global + + and parseJsFfiDeclarations p = + Parser.expect Token.Lbrace p; + let decls = parseCommaDelimitedRegion + ~grammar:Grammar.JsFfiImport + ~closing:Rbrace + ~f:parseJsFfiDeclaration + p + in + Parser.expect Rbrace p; + decls + + and parseJsFfiDeclaration p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + match p.Parser.token with + | Lident _ -> + let (ident, _) = parseLident p in + let alias = match p.token with + | As -> + Parser.next p; + let (ident, _) = parseLident p in + ident + | _ -> + ident + in + Parser.expect Token.Colon p; + let typ = parseTypExpr p in + let loc = mkLoc startPos p.prevEndPos in + Some (JsFfi.decl ~loc ~alias ~attrs ~name:ident ~typ) + | _ -> None + + (* include-statement ::= include module-expr *) + and parseIncludeStatement ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Token.Include p; + let modExpr = parseModuleExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Incl.mk ~loc ~attrs modExpr + + and parseAtomicModuleExpr p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | Uident _ident -> + let longident = parseModuleLongIdent ~lowercase:false p in + Ast_helper.Mod.ident ~loc:longident.loc longident + | Lbrace -> + Parser.next p; + let structure = Ast_helper.Mod.structure ( + parseDelimitedRegion + ~grammar:Grammar.Structure + ~closing:Rbrace + ~f:parseStructureItemRegion + p + ) in + Parser.expect Rbrace p; + let endPos = p.prevEndPos in + {structure with pmod_loc = mkLoc startPos endPos} + | Lparen -> + Parser.next p; + let modExpr = match p.token with + | Rparen -> + Ast_helper.Mod.structure ~loc:(mkLoc startPos p.prevEndPos) [] + | _ -> + parseConstrainedModExpr p + in + Parser.expect Rparen p; + modExpr + | Lident "unpack" -> (* TODO: should this be made a keyword?? *) + Parser.next p; + Parser.expect Lparen p; + let expr = parseExpr p in + begin match p.Parser.token with + | Colon -> + let colonStart = p.Parser.startPos in + Parser.next p; + let attrs = parseAttributes p in + let packageType = parsePackageType ~startPos:colonStart ~attrs p in + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + let constraintExpr = Ast_helper.Exp.constraint_ + ~loc + expr packageType + in + Ast_helper.Mod.unpack ~loc constraintExpr + | _ -> + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mod.unpack ~loc expr + end + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mod.extension ~loc extension + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultModuleExpr() + + and parsePrimaryModExpr p = + let startPos = p.Parser.startPos in + let modExpr = parseAtomicModuleExpr p in + let rec loop p modExpr = + match p.Parser.token with + | Lparen when p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + loop p (parseModuleApplication p modExpr) + | _ -> modExpr + in + let modExpr = loop p modExpr in + {modExpr with pmod_loc = mkLoc startPos p.prevEndPos} + + (* + * functor-arg ::= + * | uident : modtype + * | _ : modtype + * | modtype --> "punning" for _ : modtype + * | attributes functor-arg + *) + and parseFunctorArg p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + match p.Parser.token with + | Uident ident -> + Parser.next p; + let uidentEndPos = p.prevEndPos in + begin match p.Parser.token with + | Colon -> + Parser.next p; + let moduleType = parseModuleType p in + let loc = mkLoc startPos uidentEndPos in + let argName = Location.mkloc ident loc in + Some (attrs, argName, Some moduleType, startPos) + | Dot -> + Parser.next p; + let moduleType = + let moduleLongIdent = + parseModuleLongIdentTail ~lowercase:false p startPos (Longident.Lident ident) in + Ast_helper.Mty.ident ~loc:moduleLongIdent.loc moduleLongIdent + in + let argName = Location.mknoloc "_" in + Some (attrs, argName, Some moduleType, startPos) + | _ -> + let loc = mkLoc startPos uidentEndPos in + let modIdent = Location.mkloc (Longident.Lident ident) loc in + let moduleType = Ast_helper.Mty.ident ~loc modIdent in + let argName = Location.mknoloc "_" in + Some (attrs, argName, Some moduleType, startPos) + end + | Underscore -> + Parser.next p; + let argName = Location.mkloc "_" (mkLoc startPos p.prevEndPos) in + Parser.expect Colon p; + let moduleType = parseModuleType p in + Some (attrs, argName, Some moduleType, startPos) + | _ -> + None + + and parseFunctorArgs p = + let startPos = p.Parser.startPos in + Parser.expect Lparen p; + let args = + parseCommaDelimitedRegion + ~grammar:Grammar.FunctorArgs + ~closing:Rparen + ~f:parseFunctorArg + p + in + Parser.expect Rparen p; + match args with + | [] -> + [[], Location.mkloc "*" (mkLoc startPos p.prevEndPos), None, startPos] + | args -> args + + and parseFunctorModuleExpr p = + let startPos = p.Parser.startPos in + let args = parseFunctorArgs p in + let returnType = match p.Parser.token with + | Colon -> + Parser.next p; + Some (parseModuleType ~es6Arrow:false p) + | _ -> None + in + Parser.expect EqualGreater p; + let rhsModuleExpr = + let modExpr = parseModuleExpr p in + match returnType with + | Some modType -> + Ast_helper.Mod.constraint_ + ~loc:(mkLoc modExpr.pmod_loc.loc_start modType.Parsetree.pmty_loc.loc_end) + modExpr modType + | None -> modExpr + in + let endPos = p.prevEndPos in + let modExpr = List.fold_right (fun (attrs, name, moduleType, startPos) acc -> + Ast_helper.Mod.functor_ + ~loc:(mkLoc startPos endPos) + ~attrs + name moduleType acc + ) args rhsModuleExpr + in + {modExpr with pmod_loc = mkLoc startPos endPos} + + (* module-expr ::= + * | module-path + * ∣ { structure-items } + * ∣ functorArgs => module-expr + * ∣ module-expr(module-expr) + * ∣ ( module-expr ) + * ∣ ( module-expr : module-type ) + * | extension + * | attributes module-expr *) + and parseModuleExpr p = + let attrs = parseAttributes p in + let modExpr = if isEs6ArrowFunctor p then + parseFunctorModuleExpr p + else + parsePrimaryModExpr p + in + {modExpr with pmod_attributes = List.concat [modExpr.pmod_attributes; attrs]} + + and parseConstrainedModExpr p = + let modExpr = parseModuleExpr p in + match p.Parser.token with + | Colon -> + Parser.next p; + let modType = parseModuleType p in + let loc = mkLoc modExpr.pmod_loc.loc_start modType.pmty_loc.loc_end in + Ast_helper.Mod.constraint_ ~loc modExpr modType + | _ -> modExpr + + and parseConstrainedModExprRegion p = + if Grammar.isModExprStart p.Parser.token then + Some (parseConstrainedModExpr p) + else + None + + and parseModuleApplication p modExpr = + let startPos = p.Parser.startPos in + Parser.expect Lparen p; + let args = + parseCommaDelimitedRegion + ~grammar:Grammar.ModExprList + ~closing:Rparen + ~f:parseConstrainedModExprRegion + p + in + Parser.expect Rparen p; + let args = match args with + | [] -> + let loc = mkLoc startPos p.prevEndPos in + [Ast_helper.Mod.structure ~loc []] + | args -> args + in + List.fold_left (fun modExpr arg -> + Ast_helper.Mod.apply + ~loc:(mkLoc modExpr.Parsetree.pmod_loc.loc_start arg.Parsetree.pmod_loc.loc_end) + modExpr arg + ) modExpr args + + and parseModuleOrModuleTypeImplOrPackExpr ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Module p; + match p.Parser.token with + | Typ -> parseModuleTypeImpl ~attrs startPos p + | Lparen -> + let expr = parseFirstClassModuleExpr ~startPos p in + Ast_helper.Str.eval ~attrs expr + | _ -> parseMaybeRecModuleBinding ~attrs ~startPos p + + and parseModuleTypeImpl ~attrs startPos p = + Parser.expect Typ p; + let nameStart = p.Parser.startPos in + let name = match p.Parser.token with + | List -> + Parser.next p; + let loc = mkLoc nameStart p.prevEndPos in + Location.mkloc "list" loc + | Lident ident -> + Parser.next p; + let loc = mkLoc nameStart p.prevEndPos in + Location.mkloc ident loc + | Uident ident -> + Parser.next p; + let loc = mkLoc nameStart p.prevEndPos in + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + Parser.expect Equal p; + let moduleType = parseModuleType p in + let moduleTypeDeclaration = + Ast_helper.Mtd.mk + ~attrs + ~loc:(mkLoc nameStart p.prevEndPos) + ~typ:moduleType + name + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Str.modtype ~loc moduleTypeDeclaration + + (* definition ::= + ∣ module rec module-name : module-type = module-expr { and module-name + : module-type = module-expr } *) + and parseMaybeRecModuleBinding ~attrs ~startPos p = + match p.Parser.token with + | Token.Rec -> + Parser.next p; + Ast_helper.Str.rec_module (parseModuleBindings ~startPos ~attrs p) + | _ -> + Ast_helper.Str.module_ (parseModuleBinding ~attrs ~startPos:p.Parser.startPos p) + + and parseModuleBinding ~attrs ~startPos p = + let name = match p.Parser.token with + | Uident ident -> + let startPos = p.Parser.startPos in + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let body = parseModuleBindingBody p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mb.mk ~attrs ~loc name body + + and parseModuleBindingBody p = + (* TODO: make required with good error message when rec module binding *) + let returnModType = match p.Parser.token with + | Colon -> + Parser.next p; + Some (parseModuleType p) + | _ -> None + in + Parser.expect Equal p; + let modExpr = parseModuleExpr p in + match returnModType with + | Some modType -> + Ast_helper.Mod.constraint_ + ~loc:(mkLoc modType.pmty_loc.loc_start modExpr.pmod_loc.loc_end) + modExpr modType + | None -> modExpr + + + (* module-name : module-type = module-expr + * { and module-name : module-type = module-expr } *) + and parseModuleBindings ~attrs ~startPos p = + let rec loop p acc = + let startPos = p.Parser.startPos in + let attrs = parseAttributesAndBinding p in + match p.Parser.token with + | And -> + Parser.next p; + ignore(Parser.optional p Module); (* over-parse for fault-tolerance *) + let modBinding = parseModuleBinding ~attrs ~startPos p in + loop p (modBinding::acc) + | _ -> List.rev acc + in + let first = parseModuleBinding ~attrs ~startPos p in + loop p [first] + + and parseAtomicModuleType p = + let startPos = p.Parser.startPos in + let moduleType = match p.Parser.token with + | Uident _ | Lident _ | List -> + (* Ocaml allows module types to end with lowercase: module Foo : bar = { ... } + * lets go with uppercase terminal for now *) + let moduleLongIdent = parseModuleLongIdent ~lowercase:true p in + Ast_helper.Mty.ident ~loc:moduleLongIdent.loc moduleLongIdent + | Lparen -> + Parser.next p; + let mty = parseModuleType p in + Parser.expect Rparen p; + {mty with pmty_loc = mkLoc startPos p.prevEndPos} + | Lbrace -> + Parser.next p; + let spec = + parseDelimitedRegion + ~grammar:Grammar.Signature + ~closing:Rbrace + ~f:parseSignatureItemRegion + p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mty.signature ~loc spec + | Module -> (* TODO: check if this is still atomic when implementing first class modules*) + parseModuleTypeOf p + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mty.extension ~loc extension + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultModuleType() + in + let moduleTypeLoc = mkLoc startPos p.prevEndPos in + {moduleType with pmty_loc = moduleTypeLoc} + + and parseFunctorModuleType p = + let startPos = p.Parser.startPos in + let args = parseFunctorArgs p in + Parser.expect EqualGreater p; + let rhs = parseModuleType p in + let endPos = p.prevEndPos in + let modType = List.fold_right (fun (attrs, name, moduleType, startPos) acc -> + Ast_helper.Mty.functor_ + ~loc:(mkLoc startPos endPos) + ~attrs + name moduleType acc + ) args rhs + in + {modType with pmty_loc = mkLoc startPos endPos} + + (* Module types are the module-level equivalent of type expressions: they + * specify the general shape and type properties of modules. + * + * module-type ::= + * | modtype-path + * | { signature } + * | ( module-type ) --> parenthesized module-type + * | functor-args => module-type --> functor + * | module-type => module-type --> functor + * | module type of module-expr + * | attributes module-type + * | module-type with-mod-constraints + * | extension + *) + and parseModuleType ?(es6Arrow=true) ?(with_=true) p = + let attrs = parseAttributes p in + let modty = if es6Arrow && isEs6ArrowFunctor p then + parseFunctorModuleType p + else + let modty = parseAtomicModuleType p in + match p.Parser.token with + | EqualGreater when es6Arrow == true -> + Parser.next p; + let rhs = parseModuleType ~with_:false p in + let str = Location.mknoloc "_" in + let loc = mkLoc modty.pmty_loc.loc_start p.prevEndPos in + Ast_helper.Mty.functor_ ~loc str (Some modty) rhs + | _ -> modty + in + let moduleType = { modty with + pmty_attributes = List.concat [modty.pmty_attributes; attrs] + } in + if with_ then + parseWithConstraints moduleType p + else moduleType + + + and parseWithConstraints moduleType p = + match p.Parser.token with + | With -> + Parser.next p; + let first = parseWithConstraint p in + let rec loop p acc = + match p.Parser.token with + | And -> + Parser.next p; + loop p ((parseWithConstraint p)::acc) + | _ -> + List.rev acc + in + let constraints = loop p [first] in + let loc = mkLoc moduleType.pmty_loc.loc_start p.prevEndPos in + Ast_helper.Mty.with_ ~loc moduleType constraints + | _ -> + moduleType + + (* mod-constraint ::= + * | type typeconstr type-equation type-constraints? + * ∣ type typeconstr-name := typexpr + * ∣ module module-path = extended-module-path + * ∣ module module-path := extended-module-path + * + * TODO: split this up into multiple functions, better errors *) + and parseWithConstraint p = + match p.Parser.token with + | Module -> + Parser.next p; + let modulePath = parseModuleLongIdent ~lowercase:false p in + begin match p.Parser.token with + | ColonEqual -> + Parser.next p; + let lident = parseModuleLongIdent ~lowercase:false p in + Parsetree.Pwith_modsubst (modulePath, lident) + | Equal -> + Parser.next p; + let lident = parseModuleLongIdent ~lowercase:false p in + Parsetree.Pwith_module (modulePath, lident) + | token -> + (* TODO: revisit *) + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + let lident = parseModuleLongIdent ~lowercase:false p in + Parsetree.Pwith_modsubst (modulePath, lident) + end + | Typ -> + Parser.next p; + let typeConstr = parseValuePath p in + let params = parseTypeParams ~parent:typeConstr p in + begin match p.Parser.token with + | ColonEqual -> + Parser.next p; + let typExpr = parseTypExpr p in + Parsetree.Pwith_typesubst ( + typeConstr, + Ast_helper.Type.mk + ~loc:typeConstr.loc + ~params + ~manifest:typExpr + (Location.mkloc (Longident.last typeConstr.txt) typeConstr.loc) + ) + | Equal -> + Parser.next p; + let typExpr = parseTypExpr p in + let typeConstraints = parseTypeConstraints p in + Parsetree.Pwith_type ( + typeConstr, + Ast_helper.Type.mk + ~loc:typeConstr.loc + ~params + ~manifest:typExpr + ~cstrs:typeConstraints + (Location.mkloc (Longident.last typeConstr.txt) typeConstr.loc) + ) + | token -> + (* TODO: revisit *) + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + let typExpr = parseTypExpr p in + let typeConstraints = parseTypeConstraints p in + Parsetree.Pwith_type ( + typeConstr, + Ast_helper.Type.mk + ~loc:typeConstr.loc + ~params + ~manifest:typExpr + ~cstrs:typeConstraints + (Location.mkloc (Longident.last typeConstr.txt) typeConstr.loc) + ) + end + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + exit (-1) (* TODO: handle this case *) + + and parseModuleTypeOf p = + let startPos = p.Parser.startPos in + Parser.expect Module p; + Parser.expect Typ p; + Parser.expect Of p; + let moduleExpr = parseModuleExpr p in + Ast_helper.Mty.typeof_ ~loc:(mkLoc startPos p.prevEndPos) moduleExpr + + (* module signature on the file level *) + and parseSpecification p = + parseRegion ~grammar:Grammar.Specification ~f:parseSignatureItemRegion p + [@@progress (Parser.next, Parser.expect, Parser.checkProgress)] + + and parseSignatureItemRegion p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + match p.Parser.token with + | Let -> + Parser.beginRegion p; + let valueDesc = parseSignLetDesc ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Sig.value ~loc valueDesc) + | Typ -> + Parser.beginRegion p; + begin match parseTypeDefinitionOrExtension ~attrs p with + | TypeDef {recFlag; types} -> + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Sig.type_ ~loc recFlag types) + | TypeExt(ext) -> + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Sig.type_extension ~loc ext) + end + | External -> + let externalDef = parseExternalDef ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.value ~loc externalDef) + | Exception -> + let exceptionDef = parseExceptionDef ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.exception_ ~loc exceptionDef) + | Open -> + let openDescription = parseOpenDescription ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.open_ ~loc openDescription) + | Include -> + Parser.next p; + let moduleType = parseModuleType p in + let includeDescription = Ast_helper.Incl.mk + ~loc:(mkLoc startPos p.prevEndPos) + ~attrs + moduleType + in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.include_ ~loc includeDescription) + | Module -> + Parser.next p; + begin match p.Parser.token with + | Uident _ -> + let modDecl = parseModuleDeclarationOrAlias ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.module_ ~loc modDecl) + | Rec -> + let recModule = parseRecModuleSpec ~attrs ~startPos p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.rec_module ~loc recModule) + | Typ -> + Some (parseModuleTypeDeclaration ~attrs ~startPos p) + | _t -> + let modDecl = parseModuleDeclarationOrAlias ~attrs p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.module_ ~loc modDecl) + end + | AtAt -> + let attr = parseStandaloneAttribute p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.attribute ~loc attr) + | PercentPercent -> + let extension = parseExtension ~moduleLanguage:true p in + Parser.optional p Semicolon |> ignore; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.extension ~attrs ~loc extension) + | Import -> + Parser.next p; + parseSignatureItemRegion p + | _ -> + None + + (* module rec module-name : module-type { and module-name: module-type } *) + and parseRecModuleSpec ~attrs ~startPos p = + Parser.expect Rec p; + let rec loop p spec = + let startPos = p.Parser.startPos in + let attrs = parseAttributesAndBinding p in + match p.Parser.token with + | And -> + (* TODO: give a good error message when with constraint, no parens + * and ASet: (Set.S with type elt = A.t) + * and BTree: (Btree.S with type elt = A.t) + * Without parens, the `and` signals the start of another + * `with-constraint` + *) + Parser.expect And p; + let decl = parseRecModuleDeclaration ~attrs ~startPos p in + loop p (decl::spec) + | _ -> + List.rev spec + in + let first = parseRecModuleDeclaration ~attrs ~startPos p in + loop p [first] + + (* module-name : module-type *) + and parseRecModuleDeclaration ~attrs ~startPos p = + let name = match p.Parser.token with + | Uident modName -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc modName loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + Parser.expect Colon p; + let modType = parseModuleType p in + Ast_helper.Md.mk ~loc:(mkLoc startPos p.prevEndPos) ~attrs name modType + + and parseModuleDeclarationOrAlias ~attrs p = + let startPos = p.Parser.startPos in + let moduleName = match p.Parser.token with + | Uident ident -> + let loc = mkLoc p.Parser.startPos p.endPos in + Parser.next p; + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let body = match p.Parser.token with + | Colon -> + Parser.next p; + parseModuleType p + | Equal -> + Parser.next p; + let lident = parseModuleLongIdent ~lowercase:false p in + Ast_helper.Mty.alias lident + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultModuleType() + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Md.mk ~loc ~attrs moduleName body + + and parseModuleTypeDeclaration ~attrs ~startPos p = + Parser.expect Typ p; + let moduleName = match p.Parser.token with + | Uident ident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc ident loc + | Lident ident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let typ = match p.Parser.token with + | Equal -> + Parser.next p; + Some (parseModuleType p) + | _ -> None + in + let moduleDecl = Ast_helper.Mtd.mk ~attrs ?typ moduleName in + Ast_helper.Sig.modtype ~loc:(mkLoc startPos p.prevEndPos) moduleDecl + + and parseSignLetDesc ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Let p; + let (name, loc) = parseLident p in + let name = Location.mkloc name loc in + Parser.expect Colon p; + let typExpr = parsePolyTypeExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Val.mk ~loc ~attrs name typExpr + +(* attr-id ::= lowercase-ident + ∣ capitalized-ident + ∣ attr-id . attr-id *) + and parseAttributeId p = + let startPos = p.Parser.startPos in + let rec loop p acc = + match p.Parser.token with + | Lident ident | Uident ident -> + Parser.next p; + let id = acc ^ ident in + begin match p.Parser.token with + | Dot -> Parser.next p; loop p (id ^ ".") + | _ -> id + end + | token when Token.isKeyword token -> + Parser.next p; + let id = acc ^ (Token.toString token) in + begin match p.Parser.token with + | Dot -> Parser.next p; loop p (id ^ ".") + | _ -> id + end + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + acc + in + let id = loop p "" in + let endPos = p.prevEndPos in + Location.mkloc id (mkLoc startPos endPos) + + (* + * payload ::= empty + * | ( structure-item ) + * + * TODO: what about multiple structure items? + * @attr({let x = 1; let x = 2}) + * + * Also what about type-expressions and specifications? + * @attr(:myType) ??? + *) + and parsePayload p = + match p.Parser.token with + | Lparen when p.startPos.pos_cnum = p.prevEndPos.pos_cnum -> + Parser.next p; + begin match p.token with + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + Parser.expect Rparen p; + Parsetree.PTyp typ + | _ -> + let items = parseDelimitedRegion + ~grammar:Grammar.Structure + ~closing:Rparen + ~f:parseStructureItemRegion + p + in + Parser.expect Rparen p; + Parsetree.PStr items + end + | _ -> Parsetree.PStr [] + + (* type attribute = string loc * payload *) + and parseAttribute p = + match p.Parser.token with + | At -> + Parser.next p; + let attrId = parseAttributeId p in + let payload = parsePayload p in + Some(attrId, payload) + | _ -> None + + and parseAttributes p = + parseRegion p + ~grammar:Grammar.Attribute + ~f:parseAttribute + + (* + * standalone-attribute ::= + * | @@ atribute-id + * | @@ attribute-id ( structure-item ) + *) + and parseStandaloneAttribute p = + Parser.expect AtAt p; + let attrId = parseAttributeId p in + let payload = parsePayload p in + (attrId, payload) + + (* extension ::= % attr-id attr-payload + * | %% attr-id( + * expr ::= ... + * ∣ extension + * + * typexpr ::= ... + * ∣ extension + * + * pattern ::= ... + * ∣ extension + * + * module-expr ::= ... + * ∣ extension + * + * module-type ::= ... + * ∣ extension + * + * class-expr ::= ... + * ∣ extension + * + * class-type ::= ... + * ∣ extension + * + * + * item extension nodes usable in structures and signature + * + * item-extension ::= %% attr-id + * | %% attr-id(structure-item) + * + * attr-payload ::= structure-item + * + * ~moduleLanguage represents whether we're on the module level or not + *) + and parseExtension ?(moduleLanguage=false) p = + if moduleLanguage then + Parser.expect PercentPercent p + else + Parser.expect Percent p; + let attrId = parseAttributeId p in + let payload = parsePayload p in + (attrId, payload) +end + +module OutcomePrinter: sig + open Format + open Outcometree + + val out_value : (formatter -> out_value -> unit) ref [@@live] + val out_type : (formatter -> out_type -> unit) ref [@@live] + val out_class_type : (formatter -> out_class_type -> unit) ref [@@live] + val out_module_type : (formatter -> out_module_type -> unit) ref [@@live] + val out_sig_item : (formatter -> out_sig_item -> unit) ref [@@live] + val out_signature : (formatter -> out_sig_item list -> unit) ref [@@live] + val out_type_extension : (formatter -> out_type_extension -> unit) ref [@@live] + val out_phrase : (formatter -> out_phrase -> unit) ref [@@live] + + val parenthesized_ident : string -> bool [@@live] +end = struct + (* Napkin doesn't have parenthesized identifiers. + * We don't support custom operators. *) + let parenthesized_ident _name = true + + (* TODO: better allocation strategy for the buffer *) + let escapeStringContents s = + let len = String.length s in + let b = Buffer.create len in + for i = 0 to len - 1 do + let c = (String.get [@doesNotRaise]) s i in + if c = '\008' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'b'; + ) else if c = '\009' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 't'; + ) else if c = '\010' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'n'; + ) else if c = '\013' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'r'; + ) else if c = '\034' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '"'; + ) else if c = '\092' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '\\'; + ) else ( + Buffer.add_char b c; + ); + done; + Buffer.contents b + + (* let rec print_ident fmt ident = match ident with + | Outcometree.Oide_ident s -> Format.pp_print_string fmt s + | Oide_dot (id, s) -> + print_ident fmt id; + Format.pp_print_char fmt '.'; + Format.pp_print_string fmt s + | Oide_apply (id1, id2) -> + print_ident fmt id1; + Format.pp_print_char fmt '('; + print_ident fmt id2; + Format.pp_print_char fmt ')' *) + + let rec printOutIdentDoc (ident : Outcometree.out_ident) = + match ident with + | Oide_ident s -> Doc.text s + | Oide_dot (ident, s) -> Doc.concat [ + printOutIdentDoc ident; + Doc.dot; + Doc.text s; + ] + | Oide_apply (call, arg) ->Doc.concat [ + printOutIdentDoc call; + Doc.lparen; + printOutIdentDoc arg; + Doc.rparen; + ] + + let printOutAttributeDoc (outAttribute: Outcometree.out_attribute) = + Doc.concat [ + Doc.text "@"; + Doc.text outAttribute.oattr_name; + ] + + let printOutAttributesDoc (attrs: Outcometree.out_attribute list) = + match attrs with + | [] -> Doc.nil + | attrs -> + Doc.concat [ + Doc.group ( + Doc.join ~sep:Doc.line (List.map printOutAttributeDoc attrs) + ); + Doc.line; + ] + + let rec collectArrowArgs (outType: Outcometree.out_type) args = + match outType with + | Otyp_arrow (label, argType, returnType) -> + let arg = (label, argType) in + collectArrowArgs returnType (arg::args) + | _ as returnType -> + (List.rev args, returnType) + + let rec collectFunctorArgs (outModuleType: Outcometree.out_module_type) args = + match outModuleType with + | Omty_functor (lbl, optModType, returnModType) -> + let arg = (lbl, optModType) in + collectFunctorArgs returnModType (arg::args) + | _ -> + (List.rev args, outModuleType) + + let rec printOutTypeDoc (outType: Outcometree.out_type) = + match outType with + | Otyp_abstract | Otyp_variant _ (* don't support poly-variants atm *) | Otyp_open -> Doc.nil + | Otyp_alias (typ, aliasTxt) -> + Doc.concat [ + printOutTypeDoc typ; + Doc.text " as '"; + Doc.text aliasTxt + ] + | Otyp_constr (outIdent, []) -> + printOutIdentDoc outIdent + | Otyp_manifest (typ1, typ2) -> + Doc.concat [ + printOutTypeDoc typ1; + Doc.text " = "; + printOutTypeDoc typ2; + ] + | Otyp_record record -> + printRecordDeclarationDoc ~inline:true record + | Otyp_stuff txt -> Doc.text txt + | Otyp_var (ng, s) -> Doc.concat [ + Doc.text ("'" ^ (if ng then "_" else "")); + Doc.text s + ] + | Otyp_object (fields, rest) -> printObjectFields fields rest + | Otyp_class _ -> Doc.nil + | Otyp_attribute (typ, attribute) -> + Doc.group ( + Doc.concat [ + printOutAttributeDoc attribute; + Doc.line; + printOutTypeDoc typ; + ] + ) + (* example: Red | Blue | Green | CustomColour(float, float, float) *) + | Otyp_sum constructors -> + printOutConstructorsDoc constructors + + (* example: {"name": string, "age": int} *) + | Otyp_constr ( + (Oide_dot ((Oide_ident "Js"), "t")), + [Otyp_object (fields, rest)] + ) -> printObjectFields fields rest + + (* example: node *) + | Otyp_constr (outIdent, args) -> + let argsDoc = match args with + | [] -> Doc.nil + | args -> + Doc.concat [ + Doc.lessThan; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printOutTypeDoc args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ] + in + Doc.group ( + Doc.concat [ + printOutIdentDoc outIdent; + argsDoc; + ] + ) + | Otyp_tuple tupleArgs -> + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printOutTypeDoc tupleArgs + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + | Otyp_poly (vars, outType) -> + Doc.group ( + Doc.concat [ + Doc.join ~sep:Doc.space ( + List.map (fun var -> Doc.text ("'" ^ var)) vars + ); + printOutTypeDoc outType; + ] + ) + | Otyp_arrow _ as typ -> + let (typArgs, typ) = collectArrowArgs typ [] in + let args = Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun (lbl, typ) -> + if lbl = "" then + printOutTypeDoc typ + else + Doc.group ( + Doc.concat [ + Doc.text ("~" ^ lbl ^ ": "); + printOutTypeDoc typ + ] + ) + ) typArgs + ) in + let argsDoc = + let needsParens = match typArgs with + | [_, (Otyp_tuple _ | Otyp_arrow _)] -> true + (* single argument should not be wrapped *) + | ["", _] -> false + | _ -> true + in + if needsParens then + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + args; + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + else args + in + Doc.concat [ + argsDoc; + Doc.text " => "; + printOutTypeDoc typ; + ] + | Otyp_module (_modName, _stringList, _outTypes) -> + Doc.nil + + and printObjectFields fields rest = + let dots = match rest with + | Some non_gen -> Doc.text ((if non_gen then "_" else "") ^ "..") + | None -> Doc.nil + in + Doc.group ( + Doc.concat [ + Doc.lbrace; + dots; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun (lbl, outType) -> Doc.group ( + Doc.concat [ + Doc.text ("\"" ^ lbl ^ "\": "); + printOutTypeDoc outType; + ] + )) fields + ) + ] + ); + Doc.softLine; + Doc.trailingComma; + Doc.rbrace; + ] + ) + + + and printOutConstructorsDoc constructors = + Doc.group ( + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.join ~sep:Doc.line ( + List.mapi (fun i constructor -> + Doc.concat [ + if i > 0 then Doc.text "| " else Doc.ifBreaks (Doc.text "| ") Doc.nil; + printOutConstructorDoc constructor; + ] + ) constructors + ) + ] + ) + ) + + and printOutConstructorDoc (name, args, gadt) = + let gadtDoc = match gadt with + | Some outType -> + Doc.concat [ + Doc.text ": "; + printOutTypeDoc outType + ] + | None -> Doc.nil + in + let argsDoc = match args with + | [] -> Doc.nil + | [Otyp_record record] -> + (* inline records + * | Root({ + * mutable value: 'value, + * mutable updatedTime: float, + * }) + *) + Doc.concat [ + Doc.lparen; + Doc.indent ( + printRecordDeclarationDoc ~inline:true record; + ); + Doc.rparen; + ] + | _types -> + Doc.indent ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printOutTypeDoc args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + in + Doc.group ( + Doc.concat [ + Doc.text name; + argsDoc; + gadtDoc + ] + ) + + and printRecordDeclRowDoc (name, mut, arg) = + Doc.group ( + Doc.concat [ + if mut then Doc.text "mutable " else Doc.nil; + Doc.text name; + Doc.text ": "; + printOutTypeDoc arg; + ] + ) + + and printRecordDeclarationDoc ~inline rows = + let content = Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printRecordDeclRowDoc rows + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ] in + if not inline then + Doc.group content + else content + + let printOutType fmt outType = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutTypeDoc outType)) + + let printTypeParameterDoc (typ, (co, cn)) = + Doc.concat [ + if not cn then Doc.text "+" else if not co then Doc.text "-" else Doc.nil; + if typ = "_" then Doc.text "_" else Doc.text ("'" ^ typ) + ] + + + let rec printOutSigItemDoc (outSigItem : Outcometree.out_sig_item) = + match outSigItem with + | Osig_class _ | Osig_class_type _ -> Doc.nil + | Osig_ellipsis -> Doc.dotdotdot + | Osig_value valueDecl -> + Doc.group ( + Doc.concat [ + printOutAttributesDoc valueDecl.oval_attributes; + Doc.text ( + match valueDecl.oval_prims with | [] -> "let " | _ -> "external " + ); + Doc.text valueDecl.oval_name; + Doc.text ":"; + Doc.space; + printOutTypeDoc valueDecl.oval_type; + match valueDecl.oval_prims with + | [] -> Doc.nil + | primitives -> Doc.indent ( + Doc.concat [ + Doc.text " ="; + Doc.line; + Doc.group ( + Doc.join ~sep:Doc.line (List.map (fun prim -> Doc.text ("\"" ^ prim ^ "\"")) primitives) + ) + ] + ) + ] + ) + | Osig_typext (outExtensionConstructor, _outExtStatus) -> + printOutExtensionConstructorDoc outExtensionConstructor + | Osig_modtype (modName, Omty_signature []) -> + Doc.concat [ + Doc.text "module type "; + Doc.text modName; + ] + | Osig_modtype (modName, outModuleType) -> + Doc.group ( + Doc.concat [ + Doc.text "module type "; + Doc.text modName; + Doc.text " = "; + printOutModuleTypeDoc outModuleType; + ] + ) + | Osig_module (modName, Omty_alias ident, _) -> + Doc.group ( + Doc.concat [ + Doc.text "module "; + Doc.text modName; + Doc.text " ="; + Doc.line; + printOutIdentDoc ident; + ] + ) + | Osig_module (modName, outModType, outRecStatus) -> + Doc.group ( + Doc.concat [ + Doc.text ( + match outRecStatus with + | Orec_not -> "module " + | Orec_first -> "module rec " + | Orec_next -> "and" + ); + Doc.text modName; + Doc.text " = "; + printOutModuleTypeDoc outModType; + ] + ) + | Osig_type (outTypeDecl, outRecStatus) -> + (* TODO: manifest ? *) + let attrs = match outTypeDecl.otype_immediate, outTypeDecl.otype_unboxed with + | false, false -> Doc.nil + | true, false -> + Doc.concat [Doc.text "@immediate"; Doc.line] + | false, true -> + Doc.concat [Doc.text "@unboxed"; Doc.line] + | true, true -> + Doc.concat [Doc.text "@immediate @unboxed"; Doc.line] + in + let kw = Doc.text ( + match outRecStatus with + | Orec_not -> "type " + | Orec_first -> "type rec " + | Orec_next -> "and " + ) in + let typeParams = match outTypeDecl.otype_params with + | [] -> Doc.nil + | _params -> Doc.group ( + Doc.concat [ + Doc.lessThan; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printTypeParameterDoc outTypeDecl.otype_params + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ] + ) + in + let privateDoc = match outTypeDecl.otype_private with + | Asttypes.Private -> Doc.text "private " + | Public -> Doc.nil + in + let kind = match outTypeDecl.otype_type with + | Otyp_open -> Doc.concat [ + Doc.text " = "; + privateDoc; + Doc.text ".."; + ] + | Otyp_abstract -> Doc.nil + | Otyp_record record -> Doc.concat [ + Doc.text " = "; + privateDoc; + printRecordDeclarationDoc ~inline:false record; + ] + | typ -> Doc.concat [ + Doc.text " = "; + printOutTypeDoc typ + ] + in + let constraints = match outTypeDecl.otype_cstrs with + | [] -> Doc.nil + | _ -> Doc.group ( + Doc.concat [ + Doc.line; + Doc.indent ( + Doc.concat [ + Doc.hardLine; + Doc.join ~sep:Doc.line (List.map (fun (typ1, typ2) -> + Doc.group ( + Doc.concat [ + Doc.text "constraint "; + printOutTypeDoc typ1; + Doc.text " ="; + Doc.indent ( + Doc.concat [ + Doc.line; + printOutTypeDoc typ2; + ] + ) + ] + ) + ) outTypeDecl.otype_cstrs) + ] + ) + ] + ) in + Doc.group ( + Doc.concat [ + attrs; + Doc.group ( + Doc.concat [ + attrs; + kw; + Doc.text outTypeDecl.otype_name; + typeParams; + kind + ] + ); + constraints + ] + ) + + and printOutModuleTypeDoc (outModType : Outcometree.out_module_type) = + match outModType with + | Omty_abstract -> Doc.nil + | Omty_ident ident -> printOutIdentDoc ident + (* example: module Increment = (M: X_int) => X_int *) + | Omty_functor _ -> + let (args, returnModType) = collectFunctorArgs outModType [] in + let argsDoc = match args with + | [_, None] -> Doc.text "()" + | args -> + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun (lbl, optModType) -> Doc.group ( + Doc.concat [ + Doc.text lbl; + match optModType with + | None -> Doc.nil + | Some modType -> Doc.concat [ + Doc.text ": "; + printOutModuleTypeDoc modType; + ] + ] + )) args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + in + Doc.group ( + Doc.concat [ + argsDoc; + Doc.text " => "; + printOutModuleTypeDoc returnModType + ] + ) + | Omty_signature [] -> Doc.nil + | Omty_signature signature -> + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.line; + printOutSignatureDoc signature; + ] + ); + Doc.softLine; + Doc.rbrace; + ] + ) + | Omty_alias _ident -> Doc.nil + + and printOutSignatureDoc (signature : Outcometree.out_sig_item list) = + let rec loop signature acc = + match signature with + | [] -> List.rev acc + | Outcometree.Osig_typext(ext, Oext_first) :: items -> + (* Gather together the extension constructors *) + let rec gather_extensions acc items = + match items with + Outcometree.Osig_typext(ext, Oext_next) :: items -> + gather_extensions + ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc) + items + | _ -> (List.rev acc, items) + in + let exts, items = + gather_extensions + [(ext.oext_name, ext.oext_args, ext.oext_ret_type)] + items + in + let te = + { Outcometree.otyext_name = ext.oext_type_name; + otyext_params = ext.oext_type_params; + otyext_constructors = exts; + otyext_private = ext.oext_private } + in + let doc = printOutTypeExtensionDoc te in + loop items (doc::acc) + | item::items -> + let doc = printOutSigItemDoc item in + loop items (doc::acc) + in + match loop signature [] with + | [doc] -> doc + | docs -> + Doc.breakableGroup ~forceBreak:true ( + Doc.join ~sep:Doc.line docs + ) + + and printOutExtensionConstructorDoc (outExt : Outcometree.out_extension_constructor) = + let typeParams = match outExt.oext_type_params with + | [] -> Doc.nil + | params -> + Doc.group( + Doc.concat [ + Doc.lessThan; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (List.map + (fun ty -> Doc.text (if ty = "_" then ty else "'" ^ ty)) + params + + ) + ] + ); + Doc.softLine; + Doc.greaterThan; + ] + ) + + in + Doc.group ( + Doc.concat [ + Doc.text "type "; + Doc.text outExt.oext_type_name; + typeParams; + Doc.text " +="; + Doc.line; + if outExt.oext_private = Asttypes.Private then + Doc.text "private " + else + Doc.nil; + printOutConstructorDoc + (outExt.oext_name, outExt.oext_args, outExt.oext_ret_type) + ] + ) + + and printOutTypeExtensionDoc (typeExtension : Outcometree.out_type_extension) = + let typeParams = match typeExtension.otyext_params with + | [] -> Doc.nil + | params -> + Doc.group( + Doc.concat [ + Doc.lessThan; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (List.map + (fun ty -> Doc.text (if ty = "_" then ty else "'" ^ ty)) + params + + ) + ] + ); + Doc.softLine; + Doc.greaterThan; + ] + ) + + in + Doc.group ( + Doc.concat [ + Doc.text "type "; + Doc.text typeExtension.otyext_name; + typeParams; + Doc.text " +="; + if typeExtension.otyext_private = Asttypes.Private then + Doc.text "private " + else + Doc.nil; + printOutConstructorsDoc typeExtension.otyext_constructors; + ] + ) + + let printOutSigItem fmt outSigItem = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutSigItemDoc outSigItem)) + + let printOutSignature fmt signature = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutSignatureDoc signature)) + + let validFloatLexeme s = + let l = String.length s in + let rec loop i = + if i >= l then s ^ "." else + match (s.[i] [@doesNotRaise]) with + | '0' .. '9' | '-' -> loop (i+1) + | _ -> s + in loop 0 + + let floatRepres f = + match classify_float f with + | FP_nan -> "nan" + | FP_infinite -> + if f < 0.0 then "neg_infinity" else "infinity" + | _ -> + let float_val = + let s1 = Printf.sprintf "%.12g" f in + if f = (float_of_string [@doesNotRaise]) s1 then s1 else + let s2 = Printf.sprintf "%.15g" f in + if f = (float_of_string [@doesNotRaise]) s2 then s2 else + Printf.sprintf "%.18g" f + in validFloatLexeme float_val + + let rec printOutValueDoc (outValue : Outcometree.out_value) = + match outValue with + | Oval_array outValues -> + Doc.group ( + Doc.concat [ + Doc.lbracket; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printOutValueDoc outValues + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbracket; + ] + ) + | Oval_char c -> Doc.text ("'" ^ (Char.escaped c) ^ "'") + | Oval_constr (outIdent, outValues) -> + Doc.group ( + Doc.concat [ + printOutIdentDoc outIdent; + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printOutValueDoc outValues + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + | Oval_ellipsis -> Doc.text "..." + | Oval_int i -> Doc.text (Format.sprintf "%i" i) + | Oval_int32 i -> Doc.text (Format.sprintf "%lil" i) + | Oval_int64 i -> Doc.text (Format.sprintf "%LiL" i) + | Oval_nativeint i -> Doc.text (Format.sprintf "%nin" i) + | Oval_float f -> Doc.text (floatRepres f) + | Oval_list outValues -> + Doc.group ( + Doc.concat [ + Doc.text "list["; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printOutValueDoc outValues + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbracket; + ] + ) + | Oval_printer fn -> + let fmt = Format.str_formatter in + fn fmt; + let str = Format.flush_str_formatter () in + Doc.text str + | Oval_record rows -> + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun (outIdent, outValue) -> Doc.group ( + Doc.concat [ + printOutIdentDoc outIdent; + Doc.text ": "; + printOutValueDoc outValue; + ] + ) + ) rows + ); + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + | Oval_string (txt, _sizeToPrint, _kind) -> + Doc.text (escapeStringContents txt) + | Oval_stuff txt -> Doc.text txt + | Oval_tuple outValues -> + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printOutValueDoc outValues + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + (* Not supported by NapkinScript *) + | Oval_variant _ -> Doc.nil + + let printOutExceptionDoc exc outValue = + match exc with + | Sys.Break -> Doc.text "Interrupted." + | Out_of_memory -> Doc.text "Out of memory during evaluation." + | Stack_overflow -> + Doc.text "Stack overflow during evaluation (looping recursion?)." + | _ -> + Doc.group ( + Doc.indent( + Doc.concat [ + Doc.text "Exception:"; + Doc.line; + printOutValueDoc outValue; + ] + ) + ) + + let printOutPhraseSignature signature = + let rec loop signature acc = + match signature with + | [] -> List.rev acc + | (Outcometree.Osig_typext(ext, Oext_first), None)::signature -> + (* Gather together extension constructors *) + let rec gather_extensions acc items = + match items with + | (Outcometree.Osig_typext(ext, Oext_next), None)::items -> + gather_extensions + ((ext.oext_name, ext.oext_args, ext.oext_ret_type)::acc) + items + | _ -> (List.rev acc, items) + in + let exts, signature = + gather_extensions + [(ext.oext_name, ext.oext_args, ext.oext_ret_type)] + signature + in + let te = + { Outcometree.otyext_name = ext.oext_type_name; + otyext_params = ext.oext_type_params; + otyext_constructors = exts; + otyext_private = ext.oext_private } + in + let doc = printOutTypeExtensionDoc te in + loop signature (doc::acc) + | (sigItem, optOutValue)::signature -> + let doc = match optOutValue with + | None -> + printOutSigItemDoc sigItem + | Some outValue -> + Doc.group ( + Doc.concat [ + printOutSigItemDoc sigItem; + Doc.text " = "; + printOutValueDoc outValue; + ] + ) + in + loop signature (doc::acc) + in + Doc.breakableGroup ~forceBreak:true ( + Doc.join ~sep:Doc.line (loop signature []) + ) + + let printOutPhraseDoc (outPhrase : Outcometree.out_phrase) = + match outPhrase with + | Ophr_eval (outValue, outType) -> + Doc.group ( + Doc.concat [ + Doc.text "- : "; + printOutTypeDoc outType; + Doc.text " ="; + Doc.indent ( + Doc.concat [ + Doc.line; + printOutValueDoc outValue; + ] + ) + ] + ) + | Ophr_signature [] -> Doc.nil + | Ophr_signature signature -> printOutPhraseSignature signature + | Ophr_exception (exc, outValue) -> + printOutExceptionDoc exc outValue + + let printOutPhase fmt outPhrase = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutPhraseDoc outPhrase)) + + let printOutModuleType fmt outModuleType = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutModuleTypeDoc outModuleType)) + + let printOutTypeExtension fmt typeExtension = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutTypeExtensionDoc typeExtension)) + + let printOutValue fmt outValue = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutValueDoc outValue)) + + (* Not supported in Napkin *) + let printOutClassType _fmt _ = () + + let out_value = ref printOutValue + let out_type = ref printOutType + let out_module_type = ref printOutModuleType + let out_sig_item = ref printOutSigItem + let out_signature = ref printOutSignature + let out_type_extension = ref printOutTypeExtension + let out_phrase = ref printOutPhase [@live] + let out_class_type = ref printOutClassType +end + +module Repl = struct + let parseToplevelPhrase filename = + let src = IO.readFile filename in + let p = Parser.make src filename in + Parsetree.Ptop_def (NapkinScript.parseImplementation p) + + let typeAndPrintOutcome filename = + Compmisc.init_path false; + let env = Compmisc.initial_env () in + try + let sstr = match parseToplevelPhrase filename with + | Parsetree.Ptop_def sstr -> sstr + | _ -> assert false + in + let (_str, signature, _newenv) = Typemod.type_toplevel_phrase env sstr in + let outSigItems = Printtyp.tree_of_signature signature in + let fmt = Format.str_formatter in + !OutcomePrinter.out_signature fmt outSigItems; + let result = Format.flush_str_formatter () in + print_string result + with + | Typetexp.Error (_, _, err) -> + let fmt = Format.str_formatter in + Typetexp.report_error env fmt err; + let result = Format.flush_str_formatter () in + let () = print_endline result in + () + | _ -> print_endline "catch all" +end + +(* command line flags *) +module Clflags: sig + val recover: bool ref + val print: string ref + val width: int ref + val origin: string ref + val files: string list ref + val interface: bool ref + val report: string ref + + val parse: unit -> unit + val outcome: bool ref +end = struct + let recover = ref false + let width = ref 100 + + let files = ref [] + let addFilename filename = files := filename::(!files) + + let print = ref "" + let outcome = ref false + let origin = ref "" + let interface = ref false + let report = ref "pretty" + + let usage = "Usage: napkinscript \nOptions are:" + + let spec = [ + ("-recover", Arg.Unit (fun () -> recover := true), "Emit partial ast"); + ("-print", Arg.String (fun txt -> print := txt), "Print either binary, ocaml or ast"); + ("-parse", Arg.String (fun txt -> origin := txt), "Parse ocaml or napkinscript"); + ("-outcome", Arg.Bool (fun printOutcomeTree -> outcome := printOutcomeTree), "print outcometree"); + ("-width", Arg.Int (fun w -> width := w), "Specify the line length that the printer will wrap on" ); + ("-interface", Arg.Unit (fun () -> interface := true), "Parse as interface"); + ("-report", Arg.String (fun txt -> report := txt), "Stylize errors and messages using color and context. Accepts `Pretty` and `Plain`. Default `Plain`") + ] + + let parse () = Arg.parse spec addFilename usage +end + +module Driver: sig + val processFile: + isInterface: bool + -> width: int + -> recover: bool + -> origin:string + -> target:string + -> report:string + -> string + -> unit +end = struct + type 'a file_kind = + | Structure: Parsetree.structure file_kind + | Signature: Parsetree.signature file_kind + + let parseNapkin (type a) (kind : a file_kind) p : a = + match kind with + | Structure -> NapkinScript.parseImplementation p + | Signature -> NapkinScript.parseSpecification p + + let extractOcamlStringData filename = + let lexbuf = if String.length filename > 0 then + IO.readFile filename |> Lexing.from_string + else + Lexing.from_channel stdin + in + let stringLocs = ref [] in + let rec next () = + let token = Lexer.token_with_comments lexbuf in + match token with + | OcamlParser.STRING (_txt, None) -> + let open Location in + let loc = { + loc_start = lexbuf.lex_start_p; + loc_end = lexbuf.Lexing.lex_curr_p; + loc_ghost = false; + } in + let len = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum in + let txt = Bytes.to_string ( + (Bytes.sub [@doesNotRaise]) lexbuf.Lexing.lex_buffer loc.loc_start.pos_cnum len + ) in + stringLocs := (txt, loc)::(!stringLocs); + next(); + | OcamlParser.EOF -> () + | _ -> next() + in + next(); + List.rev !stringLocs + + let parseOcaml (type a) (kind : a file_kind) filename : a = + let lexbuf = if String.length filename > 0 then + IO.readFile filename |> Lexing.from_string + else + Lexing.from_channel stdin + in + let stringData = extractOcamlStringData filename in + match kind with + | Structure -> + Parse.implementation lexbuf + |> ParsetreeCompatibility.replaceStringLiteralStructure stringData + |> ParsetreeCompatibility.structure + | Signature -> + Parse.interface lexbuf + |> ParsetreeCompatibility.replaceStringLiteralSignature stringData + |> ParsetreeCompatibility.signature + + let parseNapkinFile ~destination kind filename = + let src = if String.length filename > 0 then + IO.readFile filename + else + IO.readStdin () + in + let p = + let mode = match destination with + | "napkinscript" | "ns" | "sexp" -> Parser.Default + | _ -> Parser.ParseForTypeChecker + in + Parser.make ~mode src filename in + let ast = parseNapkin kind p in + let report = match p.diagnostics with + | [] -> None + | diagnostics -> Some(diagnostics) + in + (ast, report, p) + + let parseOcamlFile kind filename = + let ast = parseOcaml kind filename in + let lexbuf2 = if String.length filename > 0 then + IO.readFile filename |> Lexing.from_string + else + Lexing.from_channel stdin + in + let comments = + let rec next (prevTokEndPos : Lexing.position) comments lb = + let token = Lexer.token_with_comments lb in + match token with + | OcamlParser.EOF -> comments + | OcamlParser.COMMENT (txt, loc) -> + let comment = Comment.fromOcamlComment + ~loc + ~prevTokEndPos + ~txt + in + next loc.Location.loc_end (comment::comments) lb + | _ -> + next lb.Lexing.lex_curr_p comments lb + in + let cmts = next lexbuf2.Lexing.lex_start_p [] lexbuf2 in + cmts + in + let p = Parser.make "" filename in + p.comments <- comments; + (ast, None, p) + + let reasonFilename = ref "" + let commentData = ref [] + let stringData = ref [] + + let parseReasonBinaryFromStdin (type a) (kind : a file_kind) filename :a = + let chan, close = + match String.length filename == 0 with + | true -> stdin, (fun _ -> ()) + | false -> + let file_chan = open_in_bin filename in + seek_in file_chan 0; + file_chan, close_in_noerr + in + let ic = chan in + let magic = match kind with + | Structure -> Config.ast_impl_magic_number + | Signature -> Config.ast_intf_magic_number + in + let buffer = (really_input_string [@doesNotRaise]) ic (String.length magic) in + assert(buffer = magic); + let filename = input_value ic in + reasonFilename := filename; + let ast = input_value ic in + close chan; + + let src = + if String.length filename > 0 then IO.readFile filename + else IO.readStdin () + in + + let scanner = Scanner.make (Bytes.of_string src) filename in + + let rec next prevEndPos scanner = + let (startPos, endPos, token) = Scanner.scan scanner in + match token with + | Eof -> () + | Comment c -> + Comment.setPrevTokEndPos c prevEndPos; + commentData := c::(!commentData); + next endPos scanner + | String _ -> + let loc = {Location.loc_start = startPos; loc_end = endPos; loc_ghost = false} in + let len = endPos.pos_cnum - startPos.pos_cnum in + let txt = (String.sub [@doesNotRaise]) src startPos.pos_cnum len in + stringData := (txt, loc)::(!stringData); + next endPos scanner + | _ -> + next endPos scanner + in + + next Lexing.dummy_pos scanner; + + match kind with + | Structure -> + ast + |> ParsetreeCompatibility.replaceStringLiteralStructure !stringData + |> ParsetreeCompatibility.normalizeReasonArityStructure ~forPrinter:true + |> ParsetreeCompatibility.structure + | Signature -> + ast + |> ParsetreeCompatibility.replaceStringLiteralSignature !stringData + |> ParsetreeCompatibility.normalizeReasonAritySignature ~forPrinter:true + |> ParsetreeCompatibility.signature + + let isReasonDocComment (comment: Comment.t) = + let content = Comment.txt comment in + let len = String.length content in + if len = 0 then true + else if len >= 2 && (String.unsafe_get content 0 = '*' && String.unsafe_get content 1 = '*') then false + else if len >= 1 && (String.unsafe_get content 0 = '*') then true + else false + + + let parseReasonBinary kind filename = + let ast = parseReasonBinaryFromStdin kind filename in + let p = Parser.make "" !reasonFilename in + p.comments <- List.filter (fun c -> not (isReasonDocComment c)) !commentData; + (ast, None, p) + + let parseImplementation ~origin ~destination filename = + match origin with + | "ml" | "ocaml" -> + parseOcamlFile Structure filename + | "reasonBinary" -> + parseReasonBinary Structure filename + | _ -> + parseNapkinFile ~destination Structure filename + + let parseInterface ~destination ~origin filename = + match origin with + | "ml" | "ocaml" -> + parseOcamlFile Signature filename + | "reasonBinary" -> + parseReasonBinary Signature filename + | _ -> + parseNapkinFile ~destination Signature filename + + let process ~reportStyle parseFn printFn recover filename = + let (ast, report, parserState) = parseFn filename in + match report with + | Some report when recover = true -> + printFn ast parserState; + prerr_string ( + Diagnostics.stringOfReport + ~style:(Diagnostics.parseReportStyle reportStyle) + report (Bytes.to_string parserState.Parser.scanner.src) + ); + | Some report -> + prerr_string ( + Diagnostics.stringOfReport + ~style:(Diagnostics.parseReportStyle reportStyle) + report (Bytes.to_string parserState.Parser.scanner.src) + ); + exit 1 + | None -> + printFn ast parserState + + type action = + | ProcessImplementation + | ProcessInterface + + let printImplementation ~target ~width filename ast _parserState = + match target with + | "ml" | "ocaml" -> + Pprintast.structure Format.std_formatter ast + | "ns" | "napkinscript" -> + Printer.printImplementation ~width ast (List.rev _parserState.Parser.comments) + | "ast" -> + Printast.implementation Format.std_formatter ast + | "sexp" -> + ast |> SexpAst.implementation |> Sexp.toString |> print_string + | _ -> (* default binary *) + output_string stdout Config.ast_impl_magic_number; + output_value stdout filename; + output_value stdout ast + + let printInterface ~target ~width filename ast _parserState = + match target with + | "ml" | "ocaml" -> Pprintast.signature Format.std_formatter ast + | "ns" | "napkinscript" -> + Printer.printInterface ~width ast (List.rev _parserState.Parser.comments) + | "ast" -> Printast.interface Format.std_formatter ast + | "sexp" -> + ast |> SexpAst.interface |> Sexp.toString |> print_string + | _ -> (* default binary *) + output_string stdout Config.ast_intf_magic_number; + output_value stdout filename; + output_value stdout ast + + let processFile ~isInterface ~width ~recover ~origin ~target ~report filename = + try + let len = String.length filename in + let action = + if isInterface || len > 0 && (String.get [@doesNotRaise]) filename (len - 1) = 'i' then + ProcessInterface + else ProcessImplementation + in + match action with + | ProcessImplementation -> + process + ~reportStyle:report + (parseImplementation ~origin ~destination:target) + (printImplementation ~target ~width filename) recover filename + | ProcessInterface -> + process + ~reportStyle:report + (parseInterface ~origin ~destination:target) + (printInterface ~target ~width filename) recover filename + with + | Failure txt -> + prerr_string txt; + prerr_newline(); + exit 1 + | _ -> exit 1 +end + +let () = + Clflags.parse (); + if !Clflags.outcome then ( + Repl.typeAndPrintOutcome (List.hd !Clflags.files) + ) else ( + let () = match !Clflags.files with + | (_file::_) as files -> + List.iter (fun filename -> + Driver.processFile + ~isInterface:!Clflags.interface + ~width:!Clflags.width + ~recover:!Clflags.recover + ~target:!Clflags.print + ~origin:!Clflags.origin + ~report:!Clflags.report + filename + ) files; + | [] -> + Driver.processFile + ~isInterface:!Clflags.interface + ~width:!Clflags.width + ~recover:!Clflags.recover + ~target:!Clflags.print + ~origin:!Clflags.origin + ~report:!Clflags.report + "" + in + exit 0 + ) \ No newline at end of file diff --git a/res_syntax/benchmarks/data/Napkinscript.res b/res_syntax/benchmarks/data/Napkinscript.res new file mode 100644 index 0000000000..f1f9658043 --- /dev/null +++ b/res_syntax/benchmarks/data/Napkinscript.res @@ -0,0 +1,20680 @@ +module MiniBuffer: { + type t + let add_char: (t, char) => unit + let add_string: (t, string) => unit + let contents: t => string + let create: int => t + let flush_newline: t => unit + let length: t => int + let unsafe_get: (t, int) => char +} = { + type t = { + mutable buffer: bytes, + mutable position: int, + mutable length: int, + } + + let create = n => { + let n = if n < 1 { + 1 + } else { + n + } + let s = (@doesNotRaise Bytes.create)(n) + {buffer: s, position: 0, length: n} + } + + let contents = b => Bytes.sub_string(b.buffer, 0, b.position) + + let unsafe_get = (b, ofs) => Bytes.unsafe_get(b.buffer, ofs) + + let length = b => b.position + + /* Can't be called directly, don't add to the interface */ + let resize_internal = (b, more) => { + let len = b.length + let new_len = ref(len) + while b.position + more > new_len.contents { + new_len := 2 * new_len.contents + } + if new_len.contents > Sys.max_string_length { + if b.position + more <= Sys.max_string_length { + new_len := Sys.max_string_length + } + } + let new_buffer = (@doesNotRaise Bytes.create)(new_len.contents) + /* PR#6148: let's keep using [blit] rather than [unsafe_blit] in + this tricky function that is slow anyway. */ + @doesNotRaise + Bytes.blit(b.buffer, 0, new_buffer, 0, b.position) + b.buffer = new_buffer + b.length = new_len.contents + } + + let add_char = (b, c) => { + let pos = b.position + if pos >= b.length { + resize_internal(b, 1) + } + Bytes.unsafe_set(b.buffer, pos, c) + b.position = pos + 1 + } + + let add_string = (b, s) => { + let len = String.length(s) + let new_position = b.position + len + if new_position > b.length { + resize_internal(b, len) + } + @doesNotRaise + Bytes.blit_string(s, 0, b.buffer, b.position, len) + b.position = new_position + } + + /* adds newline and trims all preceding whitespace */ + let flush_newline = b => { + let position = ref(b.position) + while Bytes.unsafe_get(b.buffer, position.contents - 1) == ' ' && position.contents >= 0 { + position := position.contents - 1 + } + b.position = position.contents + add_char(b, '\n') + } +} + +module Doc = { + type mode = Break | Flat + + type lineStyle = + | Classic /* fits? -> replace with space */ + | Soft /* fits? -> replaced with nothing */ + | Hard /* always included, forces breaks in parents */ + + type rec t = + | Nil + | Text(string) + | Concat(list) + | Indent(t) + | IfBreaks({yes: t, no: t}) + | LineSuffix(t) + | LineBreak(lineStyle) + | Group({shouldBreak: bool, doc: t}) + | CustomLayout(list) + | BreakParent + /* | Cursor */ + + let nil = Nil + let line = LineBreak(Classic) + let hardLine = LineBreak(Hard) + let softLine = LineBreak(Soft) + let text = s => Text(s) + let concat = l => Concat(l) + let indent = d => Indent(d) + let ifBreaks = (t, f) => IfBreaks({yes: t, no: f}) + let lineSuffix = d => LineSuffix(d) + let group = d => Group({shouldBreak: false, doc: d}) + let breakableGroup = (~forceBreak, d) => Group({shouldBreak: forceBreak, doc: d}) + let customLayout = gs => CustomLayout(gs) + let breakParent = BreakParent + /* let cursor = Cursor */ + + let space = Text(" ") + let comma = Text(",") + let dot = Text(".") + let dotdot = Text("..") + let dotdotdot = Text("...") + let lessThan = Text("<") + let greaterThan = Text(">") + let lbrace = Text("{") + let rbrace = Text("}") + let lparen = Text("(") + let rparen = Text(")") + let lbracket = Text("[") + let rbracket = Text("]") + let question = Text("?") + let tilde = Text("~") + let equal = Text("=") + let trailingComma = IfBreaks({yes: comma, no: nil}) + let doubleQuote = Text("\"") + + let propagateForcedBreaks = doc => { + let rec walk = doc => + switch doc { + | Text(_) | Nil | LineSuffix(_) => (false, doc) + | BreakParent => (true, Nil) + | LineBreak(Hard) => (true, doc) + | LineBreak(Classic | Soft) => (false, doc) + | Indent(children) => + let (childForcesBreak, newChildren) = walk(children) + (childForcesBreak, Indent(newChildren)) + | IfBreaks({yes: trueDoc, no: falseDoc}) => + let (falseForceBreak, falseDoc) = walk(falseDoc) + if falseForceBreak { + let (_, trueDoc) = walk(trueDoc) + (true, trueDoc) + } else { + let (forceBreak, trueDoc) = walk(trueDoc) + (forceBreak, IfBreaks({yes: trueDoc, no: falseDoc})) + } + | Group({shouldBreak: forceBreak, doc: children}) => + let (childForcesBreak, newChildren) = walk(children) + let shouldBreak = forceBreak || childForcesBreak + (shouldBreak, Group({shouldBreak: shouldBreak, doc: newChildren})) + | Concat(children) => + let (forceBreak, newChildren) = List.fold_left(((forceBreak, newChildren), child) => { + let (childForcesBreak, newChild) = walk(child) + (forceBreak || childForcesBreak, list{newChild, ...newChildren}) + }, (false, list{}), children) + + (forceBreak, Concat(List.rev(newChildren))) + | CustomLayout(children) => + /* When using CustomLayout, we don't want to propagate forced breaks + * from the children up. By definition it picks the first layout that fits + * otherwise it takes the last of the list. + * However we do want to propagate forced breaks in the sublayouts. They + * might need to be broken. We just don't propagate them any higher here */ + let children = switch walk(Concat(children)) { + | (_, Concat(children)) => children + | _ => assert false + } + + (false, CustomLayout(children)) + } + + let (_, processedDoc) = walk(doc) + processedDoc + } + + let join = (~sep, docs) => { + let rec loop = (acc, sep, docs) => + switch docs { + | list{} => List.rev(acc) + | list{x} => List.rev(list{x, ...acc}) + | list{x, ...xs} => loop(list{sep, x, ...acc}, sep, xs) + } + + Concat(loop(list{}, sep, docs)) + } + + let rec fits = (w, doc) => + switch doc { + | _ when w < 0 => false + | list{} => true + | list{(_ind, _mode, Text(txt)), ...rest} => fits(w - String.length(txt), rest) + | list{(ind, mode, Indent(doc)), ...rest} => fits(w, list{(ind + 2, mode, doc), ...rest}) + | list{(_ind, Flat, LineBreak(break)), ...rest} => + if break == Hard { + true + } else { + let w = if break == Classic { + w - 1 + } else { + w + } + fits(w, rest) + } + | list{(_ind, _mode, Nil), ...rest} => fits(w, rest) + | list{(_ind, Break, LineBreak(_break)), ..._rest} => true + | list{(ind, mode, Group({shouldBreak: forceBreak, doc})), ...rest} => + let mode = if forceBreak { + Break + } else { + mode + } + fits(w, list{(ind, mode, doc), ...rest}) + | list{(ind, mode, IfBreaks({yes: breakDoc, no: flatDoc})), ...rest} => + if mode == Break { + fits(w, list{(ind, mode, breakDoc), ...rest}) + } else { + fits(w, list{(ind, mode, flatDoc), ...rest}) + } + | list{(ind, mode, Concat(docs)), ...rest} => + let ops = List.map(doc => (ind, mode, doc), docs) + fits(w, List.append(ops, rest)) + /* | (_ind, _mode, Cursor)::rest -> fits w rest */ + | list{(_ind, _mode, LineSuffix(_)), ...rest} => fits(w, rest) + | list{(_ind, _mode, BreakParent), ...rest} => fits(w, rest) + | list{(ind, mode, CustomLayout(list{hd, ..._})), ...rest} => + /* TODO: if we have nested custom layouts, what we should do here? */ + fits(w, list{(ind, mode, hd), ...rest}) + | list{(_ind, _mode, CustomLayout(_)), ...rest} => fits(w, rest) + } + + let toString = (~width, doc) => { + let doc = propagateForcedBreaks(doc) + let buffer = MiniBuffer.create(1000) + + let rec process = (~pos, lineSuffices, stack) => + switch stack { + | list{(ind, mode, doc) as cmd, ...rest} => + switch doc { + | Nil | BreakParent => process(~pos, lineSuffices, rest) + | Text(txt) => + MiniBuffer.add_string(buffer, txt) + process(~pos=String.length(txt) + pos, lineSuffices, rest) + | LineSuffix(doc) => process(~pos, list{(ind, mode, doc), ...lineSuffices}, rest) + | Concat(docs) => + let ops = List.map(doc => (ind, mode, doc), docs) + process(~pos, lineSuffices, List.append(ops, rest)) + | Indent(doc) => process(~pos, lineSuffices, list{(ind + 2, mode, doc), ...rest}) + | IfBreaks({yes: breakDoc, no: flatDoc}) => + if mode == Break { + process(~pos, lineSuffices, list{(ind, mode, breakDoc), ...rest}) + } else { + process(~pos, lineSuffices, list{(ind, mode, flatDoc), ...rest}) + } + | LineBreak(lineStyle) => + if mode == Break { + switch lineSuffices { + | list{} => + MiniBuffer.flush_newline(buffer) + MiniBuffer.add_string(buffer, @doesNotRaise String.make(ind, ' ')) + process(~pos=ind, list{}, rest) + | _docs => + process( + ~pos=ind, + list{}, + List.concat(list{List.rev(lineSuffices), list{cmd, ...rest}}), + ) + } + } else { + /* mode = Flat */ + let pos = switch lineStyle { + | Classic => + MiniBuffer.add_string(buffer, " ") + pos + 1 + | Hard => + MiniBuffer.flush_newline(buffer) + 0 + | Soft => pos + } + + process(~pos, lineSuffices, rest) + } + | Group({shouldBreak, doc}) => + if shouldBreak || !fits(width - pos, list{(ind, Flat, doc), ...rest}) { + process(~pos, lineSuffices, list{(ind, Break, doc), ...rest}) + } else { + process(~pos, lineSuffices, list{(ind, Flat, doc), ...rest}) + } + | CustomLayout(docs) => + let rec findGroupThatFits = groups => + switch groups { + | list{} => Nil + | list{lastGroup} => lastGroup + | list{doc, ...docs} => + if fits(width - pos, list{(ind, Flat, doc), ...rest}) { + doc + } else { + findGroupThatFits(docs) + } + } + + let doc = findGroupThatFits(docs) + process(~pos, lineSuffices, list{(ind, Flat, doc), ...rest}) + } + | list{} => + switch lineSuffices { + | list{} => () + | suffices => process(~pos=0, list{}, List.rev(suffices)) + } + } + + process(~pos=0, list{}, list{(0, Flat, doc)}) + + let len = MiniBuffer.length(buffer) + if len > 0 && MiniBuffer.unsafe_get(buffer, len - 1) !== '\n' { + MiniBuffer.add_char(buffer, '\n') + } + MiniBuffer.contents(buffer) + } + + @live + let debug = t => { + let rec toDoc = x => + switch x { + | Nil => text("nil") + | BreakParent => text("breakparent") + | Text(txt) => text("text(" ++ (txt ++ ")")) + | LineSuffix(doc) => + group( + concat(list{ + text("linesuffix("), + indent(concat(list{line, toDoc(doc)})), + line, + text(")"), + }), + ) + | Concat(docs) => + group( + concat(list{ + text("concat("), + indent( + concat(list{line, join(~sep=concat(list{text(","), line}), List.map(toDoc, docs))}), + ), + line, + text(")"), + }), + ) + | CustomLayout(docs) => + group( + concat(list{ + text("customLayout("), + indent( + concat(list{line, join(~sep=concat(list{text(","), line}), List.map(toDoc, docs))}), + ), + line, + text(")"), + }), + ) + | Indent(doc) => concat(list{text("indent("), softLine, toDoc(doc), softLine, text(")")}) + | IfBreaks({yes: trueDoc, no: falseDoc}) => + group( + concat(list{ + text("ifBreaks("), + indent( + concat(list{line, toDoc(trueDoc), concat(list{text(","), line}), toDoc(falseDoc)}), + ), + line, + text(")"), + }), + ) + | LineBreak(break) => + let breakTxt = switch break { + | Classic => "Classic" + | Soft => "Soft" + | Hard => "Hard" + } + + text("LineBreak(" ++ (breakTxt ++ ")")) + | Group({shouldBreak, doc}) => + group( + concat(list{ + text("Group("), + indent( + concat(list{ + line, + text("shouldBreak: " ++ string_of_bool(shouldBreak)), + concat(list{text(","), line}), + toDoc(doc), + }), + ), + line, + text(")"), + }), + ) + } + + let doc = toDoc(t) + toString(~width=10, doc) |> print_endline + } +} + +module Sexp: { + type t + + let atom: string => t + let list: list => t + let toString: t => string +} = { + type rec t = + | Atom(string) + | List(list) + + let atom = s => Atom(s) + let list = l => List(l) + + let rec toDoc = t => + switch t { + | Atom(s) => Doc.text(s) + | List(list{}) => Doc.text("()") + | List(list{sexpr}) => Doc.concat(list{Doc.lparen, toDoc(sexpr), Doc.rparen}) + | List(list{hd, ...tail}) => + Doc.group( + Doc.concat(list{ + Doc.lparen, + toDoc(hd), + Doc.indent(Doc.concat(list{Doc.line, Doc.join(~sep=Doc.line, List.map(toDoc, tail))})), + Doc.rparen, + }), + ) + } + + let toString = sexpr => { + let doc = toDoc(sexpr) + Doc.toString(~width=80, doc) + } +} + +module SexpAst: { + let implementation: Parsetree.structure => Sexp.t + let interface: Parsetree.signature => Sexp.t +} = { + open Parsetree + + let mapEmpty = (~f, items) => + switch items { + | list{} => list{Sexp.list(list{})} + | items => List.map(f, items) + } + + let string = txt => Sexp.atom("\"" ++ (txt ++ "\"")) + + let char = c => Sexp.atom("'" ++ (Char.escaped(c) ++ "'")) + + let optChar = oc => + switch oc { + | None => Sexp.atom("None") + | Some(c) => Sexp.list(list{Sexp.atom("Some"), char(c)}) + } + + let longident = l => { + let rec loop = l => + switch l { + | Longident.Lident(ident) => Sexp.list(list{Sexp.atom("Lident"), string(ident)}) + | Longident.Ldot(lident, txt) => Sexp.list(list{Sexp.atom("Ldot"), loop(lident), string(txt)}) + | Longident.Lapply(l1, l2) => Sexp.list(list{Sexp.atom("Lapply"), loop(l1), loop(l2)}) + } + + Sexp.list(list{Sexp.atom("longident"), loop(l)}) + } + + let closedFlag = flag => + switch flag { + | Asttypes.Closed => Sexp.atom("Closed") + | Open => Sexp.atom("Open") + } + + let directionFlag = flag => + switch flag { + | Asttypes.Upto => Sexp.atom("Upto") + | Downto => Sexp.atom("Downto") + } + + let recFlag = flag => + switch flag { + | Asttypes.Recursive => Sexp.atom("Recursive") + | Nonrecursive => Sexp.atom("Nonrecursive") + } + + let overrideFlag = flag => + switch flag { + | Asttypes.Override => Sexp.atom("Override") + | Fresh => Sexp.atom("Fresh") + } + + let privateFlag = flag => + switch flag { + | Asttypes.Public => Sexp.atom("Public") + | Private => Sexp.atom("Private") + } + + let mutableFlag = flag => + switch flag { + | Asttypes.Immutable => Sexp.atom("Immutable") + | Mutable => Sexp.atom("Mutable") + } + + let variance = v => + switch v { + | Asttypes.Covariant => Sexp.atom("Covariant") + | Contravariant => Sexp.atom("Contravariant") + | Invariant => Sexp.atom("Invariant") + } + + let argLabel = lbl => + switch lbl { + | Asttypes.Nolabel => Sexp.atom("Nolabel") + | Labelled(txt) => Sexp.list(list{Sexp.atom("Labelled"), string(txt)}) + | Optional(txt) => Sexp.list(list{Sexp.atom("Optional"), string(txt)}) + } + + let constant = c => { + let sexpr = switch c { + | Pconst_integer(txt, tag) => + Sexp.list(list{Sexp.atom("Pconst_integer"), string(txt), optChar(tag)}) + | Pconst_char(c) => Sexp.list(list{Sexp.atom("Pconst_char"), Sexp.atom(Char.escaped(c))}) + | Pconst_string(txt, tag) => + Sexp.list(list{ + Sexp.atom("Pconst_string"), + string(txt), + switch tag { + | Some(txt) => Sexp.list(list{Sexp.atom("Some"), string(txt)}) + | None => Sexp.atom("None") + }, + }) + | Pconst_float(txt, tag) => + Sexp.list(list{Sexp.atom("Pconst_float"), string(txt), optChar(tag)}) + } + + Sexp.list(list{Sexp.atom("constant"), sexpr}) + } + + let rec structure = s => Sexp.list(list{Sexp.atom("structure"), ...List.map(structureItem, s)}) + + and structureItem = si => { + let desc = switch si.pstr_desc { + | Pstr_eval(expr, attrs) => + Sexp.list(list{Sexp.atom("Pstr_eval"), expression(expr), attributes(attrs)}) + | Pstr_value(flag, vbs) => + Sexp.list(list{ + Sexp.atom("Pstr_value"), + recFlag(flag), + Sexp.list(mapEmpty(~f=valueBinding, vbs)), + }) + | Pstr_primitive(vd) => Sexp.list(list{Sexp.atom("Pstr_primitive"), valueDescription(vd)}) + | Pstr_type(flag, tds) => + Sexp.list(list{ + Sexp.atom("Pstr_type"), + recFlag(flag), + Sexp.list(mapEmpty(~f=typeDeclaration, tds)), + }) + | Pstr_typext(typext) => Sexp.list(list{Sexp.atom("Pstr_type"), typeExtension(typext)}) + | Pstr_exception(ec) => Sexp.list(list{Sexp.atom("Pstr_exception"), extensionConstructor(ec)}) + | Pstr_module(mb) => Sexp.list(list{Sexp.atom("Pstr_module"), moduleBinding(mb)}) + | Pstr_recmodule(mbs) => + Sexp.list(list{Sexp.atom("Pstr_recmodule"), Sexp.list(mapEmpty(~f=moduleBinding, mbs))}) + | Pstr_modtype(modTypDecl) => + Sexp.list(list{Sexp.atom("Pstr_modtype"), moduleTypeDeclaration(modTypDecl)}) + | Pstr_open(openDesc) => Sexp.list(list{Sexp.atom("Pstr_open"), openDescription(openDesc)}) + | Pstr_class(_) => Sexp.atom("Pstr_class") + | Pstr_class_type(_) => Sexp.atom("Pstr_class_type") + | Pstr_include(id) => Sexp.list(list{Sexp.atom("Pstr_include"), includeDeclaration(id)}) + | Pstr_attribute(attr) => Sexp.list(list{Sexp.atom("Pstr_attribute"), attribute(attr)}) + | Pstr_extension(ext, attrs) => + Sexp.list(list{Sexp.atom("Pstr_extension"), extension(ext), attributes(attrs)}) + } + + Sexp.list(list{Sexp.atom("structure_item"), desc}) + } + + and includeDeclaration = id => + Sexp.list(list{ + Sexp.atom("include_declaration"), + moduleExpression(id.pincl_mod), + attributes(id.pincl_attributes), + }) + + and openDescription = od => + Sexp.list(list{ + Sexp.atom("open_description"), + longident(od.popen_lid.Asttypes.txt), + attributes(od.popen_attributes), + }) + + and moduleTypeDeclaration = mtd => + Sexp.list(list{ + Sexp.atom("module_type_declaration"), + string(mtd.pmtd_name.Asttypes.txt), + switch mtd.pmtd_type { + | None => Sexp.atom("None") + | Some(modType) => Sexp.list(list{Sexp.atom("Some"), moduleType(modType)}) + }, + attributes(mtd.pmtd_attributes), + }) + + and moduleBinding = mb => + Sexp.list(list{ + Sexp.atom("module_binding"), + string(mb.pmb_name.Asttypes.txt), + moduleExpression(mb.pmb_expr), + attributes(mb.pmb_attributes), + }) + + and moduleExpression = me => { + let desc = switch me.pmod_desc { + | Pmod_ident(modName) => + Sexp.list(list{Sexp.atom("Pmod_ident"), longident(modName.Asttypes.txt)}) + | Pmod_structure(s) => Sexp.list(list{Sexp.atom("Pmod_structure"), structure(s)}) + | Pmod_functor(lbl, optModType, modExpr) => + Sexp.list(list{ + Sexp.atom("Pmod_functor"), + string(lbl.Asttypes.txt), + switch optModType { + | None => Sexp.atom("None") + | Some(modType) => Sexp.list(list{Sexp.atom("Some"), moduleType(modType)}) + }, + moduleExpression(modExpr), + }) + | Pmod_apply(callModExpr, modExprArg) => + Sexp.list(list{ + Sexp.atom("Pmod_apply"), + moduleExpression(callModExpr), + moduleExpression(modExprArg), + }) + | Pmod_constraint(modExpr, modType) => + Sexp.list(list{Sexp.atom("Pmod_constraint"), moduleExpression(modExpr), moduleType(modType)}) + | Pmod_unpack(expr) => Sexp.list(list{Sexp.atom("Pmod_unpack"), expression(expr)}) + | Pmod_extension(ext) => Sexp.list(list{Sexp.atom("Pmod_extension"), extension(ext)}) + } + + Sexp.list(list{Sexp.atom("module_expr"), desc, attributes(me.pmod_attributes)}) + } + + and moduleType = mt => { + let desc = switch mt.pmty_desc { + | Pmty_ident(longidentLoc) => + Sexp.list(list{Sexp.atom("Pmty_ident"), longident(longidentLoc.Asttypes.txt)}) + | Pmty_signature(s) => Sexp.list(list{Sexp.atom("Pmty_signature"), signature(s)}) + | Pmty_functor(lbl, optModType, modType) => + Sexp.list(list{ + Sexp.atom("Pmty_functor"), + string(lbl.Asttypes.txt), + switch optModType { + | None => Sexp.atom("None") + | Some(modType) => Sexp.list(list{Sexp.atom("Some"), moduleType(modType)}) + }, + moduleType(modType), + }) + | Pmty_alias(longidentLoc) => + Sexp.list(list{Sexp.atom("Pmty_alias"), longident(longidentLoc.Asttypes.txt)}) + | Pmty_extension(ext) => Sexp.list(list{Sexp.atom("Pmty_extension"), extension(ext)}) + | Pmty_typeof(modExpr) => Sexp.list(list{Sexp.atom("Pmty_typeof"), moduleExpression(modExpr)}) + | Pmty_with(modType, withConstraints) => + Sexp.list(list{ + Sexp.atom("Pmty_with"), + moduleType(modType), + Sexp.list(mapEmpty(~f=withConstraint, withConstraints)), + }) + } + + Sexp.list(list{Sexp.atom("module_type"), desc, attributes(mt.pmty_attributes)}) + } + + and withConstraint = wc => + switch wc { + | Pwith_type(longidentLoc, td) => + Sexp.list(list{ + Sexp.atom("Pmty_with"), + longident(longidentLoc.Asttypes.txt), + typeDeclaration(td), + }) + | Pwith_module(l1, l2) => + Sexp.list(list{ + Sexp.atom("Pwith_module"), + longident(l1.Asttypes.txt), + longident(l2.Asttypes.txt), + }) + | Pwith_typesubst(longidentLoc, td) => + Sexp.list(list{ + Sexp.atom("Pwith_typesubst"), + longident(longidentLoc.Asttypes.txt), + typeDeclaration(td), + }) + | Pwith_modsubst(l1, l2) => + Sexp.list(list{ + Sexp.atom("Pwith_modsubst"), + longident(l1.Asttypes.txt), + longident(l2.Asttypes.txt), + }) + } + + and signature = s => Sexp.list(list{Sexp.atom("signature"), ...List.map(signatureItem, s)}) + + and signatureItem = si => { + let descr = switch si.psig_desc { + | Psig_value(vd) => Sexp.list(list{Sexp.atom("Psig_value"), valueDescription(vd)}) + | Psig_type(flag, typeDeclarations) => + Sexp.list(list{ + Sexp.atom("Psig_type"), + recFlag(flag), + Sexp.list(mapEmpty(~f=typeDeclaration, typeDeclarations)), + }) + | Psig_typext(typExt) => Sexp.list(list{Sexp.atom("Psig_typext"), typeExtension(typExt)}) + | Psig_exception(extConstr) => + Sexp.list(list{Sexp.atom("Psig_exception"), extensionConstructor(extConstr)}) + | Psig_module(modDecl) => Sexp.list(list{Sexp.atom("Psig_module"), moduleDeclaration(modDecl)}) + | Psig_recmodule(modDecls) => + Sexp.list(list{ + Sexp.atom("Psig_recmodule"), + Sexp.list(mapEmpty(~f=moduleDeclaration, modDecls)), + }) + | Psig_modtype(modTypDecl) => + Sexp.list(list{Sexp.atom("Psig_modtype"), moduleTypeDeclaration(modTypDecl)}) + | Psig_open(openDesc) => Sexp.list(list{Sexp.atom("Psig_open"), openDescription(openDesc)}) + | Psig_include(inclDecl) => + Sexp.list(list{Sexp.atom("Psig_include"), includeDescription(inclDecl)}) + | Psig_class(_) => Sexp.list(list{Sexp.atom("Psig_class")}) + | Psig_class_type(_) => Sexp.list(list{Sexp.atom("Psig_class_type")}) + | Psig_attribute(attr) => Sexp.list(list{Sexp.atom("Psig_attribute"), attribute(attr)}) + | Psig_extension(ext, attrs) => + Sexp.list(list{Sexp.atom("Psig_extension"), extension(ext), attributes(attrs)}) + } + + Sexp.list(list{Sexp.atom("signature_item"), descr}) + } + + and includeDescription = id => + Sexp.list(list{ + Sexp.atom("include_description"), + moduleType(id.pincl_mod), + attributes(id.pincl_attributes), + }) + + and moduleDeclaration = md => + Sexp.list(list{ + Sexp.atom("module_declaration"), + string(md.pmd_name.Asttypes.txt), + moduleType(md.pmd_type), + attributes(md.pmd_attributes), + }) + + and valueBinding = vb => + Sexp.list(list{ + Sexp.atom("value_binding"), + pattern(vb.pvb_pat), + expression(vb.pvb_expr), + attributes(vb.pvb_attributes), + }) + + and valueDescription = vd => + Sexp.list(list{ + Sexp.atom("value_description"), + string(vd.pval_name.Asttypes.txt), + coreType(vd.pval_type), + Sexp.list(mapEmpty(~f=string, vd.pval_prim)), + attributes(vd.pval_attributes), + }) + + and typeDeclaration = td => + Sexp.list(list{ + Sexp.atom("type_declaration"), + string(td.ptype_name.Asttypes.txt), + Sexp.list(list{ + Sexp.atom("ptype_params"), + Sexp.list( + mapEmpty( + ~f=((typexpr, var)) => Sexp.list(list{coreType(typexpr), variance(var)}), + td.ptype_params, + ), + ), + }), + Sexp.list(list{ + Sexp.atom("ptype_cstrs"), + Sexp.list( + mapEmpty( + ~f=((typ1, typ2, _loc)) => Sexp.list(list{coreType(typ1), coreType(typ2)}), + td.ptype_cstrs, + ), + ), + }), + Sexp.list(list{Sexp.atom("ptype_kind"), typeKind(td.ptype_kind)}), + Sexp.list(list{ + Sexp.atom("ptype_manifest"), + switch td.ptype_manifest { + | None => Sexp.atom("None") + | Some(typ) => Sexp.list(list{Sexp.atom("Some"), coreType(typ)}) + }, + }), + Sexp.list(list{Sexp.atom("ptype_private"), privateFlag(td.ptype_private)}), + attributes(td.ptype_attributes), + }) + + and extensionConstructor = ec => + Sexp.list(list{ + Sexp.atom("extension_constructor"), + string(ec.pext_name.Asttypes.txt), + extensionConstructorKind(ec.pext_kind), + attributes(ec.pext_attributes), + }) + + and extensionConstructorKind = kind => + switch kind { + | Pext_decl(args, optTypExpr) => + Sexp.list(list{ + Sexp.atom("Pext_decl"), + constructorArguments(args), + switch optTypExpr { + | None => Sexp.atom("None") + | Some(typ) => Sexp.list(list{Sexp.atom("Some"), coreType(typ)}) + }, + }) + | Pext_rebind(longidentLoc) => + Sexp.list(list{Sexp.atom("Pext_rebind"), longident(longidentLoc.Asttypes.txt)}) + } + + and typeExtension = te => + Sexp.list(list{ + Sexp.atom("type_extension"), + Sexp.list(list{Sexp.atom("ptyext_path"), longident(te.ptyext_path.Asttypes.txt)}), + Sexp.list(list{ + Sexp.atom("ptyext_parms"), + Sexp.list( + mapEmpty( + ~f=((typexpr, var)) => Sexp.list(list{coreType(typexpr), variance(var)}), + te.ptyext_params, + ), + ), + }), + Sexp.list(list{ + Sexp.atom("ptyext_constructors"), + Sexp.list(mapEmpty(~f=extensionConstructor, te.ptyext_constructors)), + }), + Sexp.list(list{Sexp.atom("ptyext_private"), privateFlag(te.ptyext_private)}), + attributes(te.ptyext_attributes), + }) + + and typeKind = kind => + switch kind { + | Ptype_abstract => Sexp.atom("Ptype_abstract") + | Ptype_variant(constrDecls) => + Sexp.list(list{ + Sexp.atom("Ptype_variant"), + Sexp.list(mapEmpty(~f=constructorDeclaration, constrDecls)), + }) + | Ptype_record(lblDecls) => + Sexp.list(list{Sexp.atom("Ptype_record"), Sexp.list(mapEmpty(~f=labelDeclaration, lblDecls))}) + | Ptype_open => Sexp.atom("Ptype_open") + } + + and constructorDeclaration = cd => + Sexp.list(list{ + Sexp.atom("constructor_declaration"), + string(cd.pcd_name.Asttypes.txt), + Sexp.list(list{Sexp.atom("pcd_args"), constructorArguments(cd.pcd_args)}), + Sexp.list(list{ + Sexp.atom("pcd_res"), + switch cd.pcd_res { + | None => Sexp.atom("None") + | Some(typ) => Sexp.list(list{Sexp.atom("Some"), coreType(typ)}) + }, + }), + attributes(cd.pcd_attributes), + }) + + and constructorArguments = args => + switch args { + | Pcstr_tuple(types) => + Sexp.list(list{Sexp.atom("Pcstr_tuple"), Sexp.list(mapEmpty(~f=coreType, types))}) + | Pcstr_record(lds) => + Sexp.list(list{Sexp.atom("Pcstr_record"), Sexp.list(mapEmpty(~f=labelDeclaration, lds))}) + } + + and labelDeclaration = ld => + Sexp.list(list{ + Sexp.atom("label_declaration"), + string(ld.pld_name.Asttypes.txt), + mutableFlag(ld.pld_mutable), + coreType(ld.pld_type), + attributes(ld.pld_attributes), + }) + + and expression = expr => { + let desc = switch expr.pexp_desc { + | Pexp_ident(longidentLoc) => + Sexp.list(list{Sexp.atom("Pexp_ident"), longident(longidentLoc.Asttypes.txt)}) + | Pexp_constant(c) => Sexp.list(list{Sexp.atom("Pexp_constant"), constant(c)}) + | Pexp_let(flag, vbs, expr) => + Sexp.list(list{ + Sexp.atom("Pexp_let"), + recFlag(flag), + Sexp.list(mapEmpty(~f=valueBinding, vbs)), + expression(expr), + }) + | Pexp_function(cases) => + Sexp.list(list{Sexp.atom("Pexp_function"), Sexp.list(mapEmpty(~f=case, cases))}) + | Pexp_fun(argLbl, exprOpt, pat, expr) => + Sexp.list(list{ + Sexp.atom("Pexp_fun"), + argLabel(argLbl), + switch exprOpt { + | None => Sexp.atom("None") + | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)}) + }, + pattern(pat), + expression(expr), + }) + | Pexp_apply(expr, args) => + Sexp.list(list{ + Sexp.atom("Pexp_apply"), + expression(expr), + Sexp.list( + mapEmpty( + ~f=((argLbl, expr)) => Sexp.list(list{argLabel(argLbl), expression(expr)}), + args, + ), + ), + }) + | Pexp_match(expr, cases) => + Sexp.list(list{ + Sexp.atom("Pexp_match"), + expression(expr), + Sexp.list(mapEmpty(~f=case, cases)), + }) + | Pexp_try(expr, cases) => + Sexp.list(list{Sexp.atom("Pexp_try"), expression(expr), Sexp.list(mapEmpty(~f=case, cases))}) + | Pexp_tuple(exprs) => + Sexp.list(list{Sexp.atom("Pexp_tuple"), Sexp.list(mapEmpty(~f=expression, exprs))}) + | Pexp_construct(longidentLoc, exprOpt) => + Sexp.list(list{ + Sexp.atom("Pexp_construct"), + longident(longidentLoc.Asttypes.txt), + switch exprOpt { + | None => Sexp.atom("None") + | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)}) + }, + }) + | Pexp_variant(lbl, exprOpt) => + Sexp.list(list{ + Sexp.atom("Pexp_variant"), + string(lbl), + switch exprOpt { + | None => Sexp.atom("None") + | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)}) + }, + }) + | Pexp_record(rows, optExpr) => + Sexp.list(list{ + Sexp.atom("Pexp_record"), + Sexp.list( + mapEmpty( + ~f=((longidentLoc, expr)) => + Sexp.list(list{longident(longidentLoc.Asttypes.txt), expression(expr)}), + rows, + ), + ), + switch optExpr { + | None => Sexp.atom("None") + | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)}) + }, + }) + | Pexp_field(expr, longidentLoc) => + Sexp.list(list{ + Sexp.atom("Pexp_field"), + expression(expr), + longident(longidentLoc.Asttypes.txt), + }) + | Pexp_setfield(expr1, longidentLoc, expr2) => + Sexp.list(list{ + Sexp.atom("Pexp_setfield"), + expression(expr1), + longident(longidentLoc.Asttypes.txt), + expression(expr2), + }) + | Pexp_array(exprs) => + Sexp.list(list{Sexp.atom("Pexp_array"), Sexp.list(mapEmpty(~f=expression, exprs))}) + | Pexp_ifthenelse(expr1, expr2, optExpr) => + Sexp.list(list{ + Sexp.atom("Pexp_ifthenelse"), + expression(expr1), + expression(expr2), + switch optExpr { + | None => Sexp.atom("None") + | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)}) + }, + }) + | Pexp_sequence(expr1, expr2) => + Sexp.list(list{Sexp.atom("Pexp_sequence"), expression(expr1), expression(expr2)}) + | Pexp_while(expr1, expr2) => + Sexp.list(list{Sexp.atom("Pexp_while"), expression(expr1), expression(expr2)}) + | Pexp_for(pat, e1, e2, flag, e3) => + Sexp.list(list{ + Sexp.atom("Pexp_for"), + pattern(pat), + expression(e1), + expression(e2), + directionFlag(flag), + expression(e3), + }) + | Pexp_constraint(expr, typexpr) => + Sexp.list(list{Sexp.atom("Pexp_constraint"), expression(expr), coreType(typexpr)}) + | Pexp_coerce(expr, optTyp, typexpr) => + Sexp.list(list{ + Sexp.atom("Pexp_coerce"), + expression(expr), + switch optTyp { + | None => Sexp.atom("None") + | Some(typ) => Sexp.list(list{Sexp.atom("Some"), coreType(typ)}) + }, + coreType(typexpr), + }) + | Pexp_send(_) => Sexp.list(list{Sexp.atom("Pexp_send")}) + | Pexp_new(_) => Sexp.list(list{Sexp.atom("Pexp_new")}) + | Pexp_setinstvar(_) => Sexp.list(list{Sexp.atom("Pexp_setinstvar")}) + | Pexp_override(_) => Sexp.list(list{Sexp.atom("Pexp_override")}) + | Pexp_letmodule(modName, modExpr, expr) => + Sexp.list(list{ + Sexp.atom("Pexp_letmodule"), + string(modName.Asttypes.txt), + moduleExpression(modExpr), + expression(expr), + }) + | Pexp_letexception(extConstr, expr) => + Sexp.list(list{ + Sexp.atom("Pexp_letexception"), + extensionConstructor(extConstr), + expression(expr), + }) + | Pexp_assert(expr) => Sexp.list(list{Sexp.atom("Pexp_assert"), expression(expr)}) + | Pexp_lazy(expr) => Sexp.list(list{Sexp.atom("Pexp_lazy"), expression(expr)}) + | Pexp_poly(_) => Sexp.list(list{Sexp.atom("Pexp_poly")}) + | Pexp_object(_) => Sexp.list(list{Sexp.atom("Pexp_object")}) + | Pexp_newtype(lbl, expr) => + Sexp.list(list{Sexp.atom("Pexp_newtype"), string(lbl.Asttypes.txt), expression(expr)}) + | Pexp_pack(modExpr) => Sexp.list(list{Sexp.atom("Pexp_pack"), moduleExpression(modExpr)}) + | Pexp_open(flag, longidentLoc, expr) => + Sexp.list(list{ + Sexp.atom("Pexp_open"), + overrideFlag(flag), + longident(longidentLoc.Asttypes.txt), + expression(expr), + }) + | Pexp_extension(ext) => Sexp.list(list{Sexp.atom("Pexp_extension"), extension(ext)}) + | Pexp_unreachable => Sexp.atom("Pexp_unreachable") + } + + Sexp.list(list{Sexp.atom("expression"), desc}) + } + + and case = c => + Sexp.list(list{ + Sexp.atom("case"), + Sexp.list(list{Sexp.atom("pc_lhs"), pattern(c.pc_lhs)}), + Sexp.list(list{ + Sexp.atom("pc_guard"), + switch c.pc_guard { + | None => Sexp.atom("None") + | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)}) + }, + }), + Sexp.list(list{Sexp.atom("pc_rhs"), expression(c.pc_rhs)}), + }) + + and pattern = p => { + let descr = switch p.ppat_desc { + | Ppat_any => Sexp.atom("Ppat_any") + | Ppat_var(var) => Sexp.list(list{Sexp.atom("Ppat_var"), string(var.Location.txt)}) + | Ppat_alias(p, alias) => + Sexp.list(list{Sexp.atom("Ppat_alias"), pattern(p), string(alias.txt)}) + | Ppat_constant(c) => Sexp.list(list{Sexp.atom("Ppat_constant"), constant(c)}) + | Ppat_interval(lo, hi) => + Sexp.list(list{Sexp.atom("Ppat_interval"), constant(lo), constant(hi)}) + | Ppat_tuple(patterns) => + Sexp.list(list{Sexp.atom("Ppat_tuple"), Sexp.list(mapEmpty(~f=pattern, patterns))}) + | Ppat_construct(longidentLoc, optPattern) => + Sexp.list(list{ + Sexp.atom("Ppat_construct"), + longident(longidentLoc.Location.txt), + switch optPattern { + | None => Sexp.atom("None") + | Some(p) => Sexp.list(list{Sexp.atom("some"), pattern(p)}) + }, + }) + | Ppat_variant(lbl, optPattern) => + Sexp.list(list{ + Sexp.atom("Ppat_variant"), + string(lbl), + switch optPattern { + | None => Sexp.atom("None") + | Some(p) => Sexp.list(list{Sexp.atom("Some"), pattern(p)}) + }, + }) + | Ppat_record(rows, flag) => + Sexp.list(list{ + Sexp.atom("Ppat_record"), + closedFlag(flag), + Sexp.list( + mapEmpty( + ~f=((longidentLoc, p)) => + Sexp.list(list{longident(longidentLoc.Location.txt), pattern(p)}), + rows, + ), + ), + }) + | Ppat_array(patterns) => + Sexp.list(list{Sexp.atom("Ppat_array"), Sexp.list(mapEmpty(~f=pattern, patterns))}) + | Ppat_or(p1, p2) => Sexp.list(list{Sexp.atom("Ppat_or"), pattern(p1), pattern(p2)}) + | Ppat_constraint(p, typexpr) => + Sexp.list(list{Sexp.atom("Ppat_constraint"), pattern(p), coreType(typexpr)}) + | Ppat_type(longidentLoc) => + Sexp.list(list{Sexp.atom("Ppat_type"), longident(longidentLoc.Location.txt)}) + | Ppat_lazy(p) => Sexp.list(list{Sexp.atom("Ppat_lazy"), pattern(p)}) + | Ppat_unpack(stringLoc) => + Sexp.list(list{Sexp.atom("Ppat_unpack"), string(stringLoc.Location.txt)}) + | Ppat_exception(p) => Sexp.list(list{Sexp.atom("Ppat_exception"), pattern(p)}) + | Ppat_extension(ext) => Sexp.list(list{Sexp.atom("Ppat_extension"), extension(ext)}) + | Ppat_open(longidentLoc, p) => + Sexp.list(list{Sexp.atom("Ppat_open"), longident(longidentLoc.Location.txt), pattern(p)}) + } + + Sexp.list(list{Sexp.atom("pattern"), descr}) + } + + and objectField = field => + switch field { + | Otag(lblLoc, attrs, typexpr) => + Sexp.list(list{Sexp.atom("Otag"), string(lblLoc.txt), attributes(attrs), coreType(typexpr)}) + | Oinherit(typexpr) => Sexp.list(list{Sexp.atom("Oinherit"), coreType(typexpr)}) + } + + and rowField = field => + switch field { + | Rtag(labelLoc, attrs, truth, types) => + Sexp.list(list{ + Sexp.atom("Rtag"), + string(labelLoc.txt), + attributes(attrs), + Sexp.atom( + if truth { + "true" + } else { + "false" + }, + ), + Sexp.list(mapEmpty(~f=coreType, types)), + }) + | Rinherit(typexpr) => Sexp.list(list{Sexp.atom("Rinherit"), coreType(typexpr)}) + } + + and packageType = ((modNameLoc, packageConstraints)) => + Sexp.list(list{ + Sexp.atom("package_type"), + longident(modNameLoc.Asttypes.txt), + Sexp.list( + mapEmpty( + ~f=((modNameLoc, typexpr)) => + Sexp.list(list{longident(modNameLoc.Asttypes.txt), coreType(typexpr)}), + packageConstraints, + ), + ), + }) + + and coreType = typexpr => { + let desc = switch typexpr.ptyp_desc { + | Ptyp_any => Sexp.atom("Ptyp_any") + | Ptyp_var(var) => Sexp.list(list{Sexp.atom("Ptyp_var"), string(var)}) + | Ptyp_arrow(argLbl, typ1, typ2) => + Sexp.list(list{Sexp.atom("Ptyp_arrow"), argLabel(argLbl), coreType(typ1), coreType(typ2)}) + | Ptyp_tuple(types) => + Sexp.list(list{Sexp.atom("Ptyp_tuple"), Sexp.list(mapEmpty(~f=coreType, types))}) + | Ptyp_constr(longidentLoc, types) => + Sexp.list(list{ + Sexp.atom("Ptyp_constr"), + longident(longidentLoc.txt), + Sexp.list(mapEmpty(~f=coreType, types)), + }) + | Ptyp_alias(typexpr, alias) => + Sexp.list(list{Sexp.atom("Ptyp_alias"), coreType(typexpr), string(alias)}) + | Ptyp_object(fields, flag) => + Sexp.list(list{ + Sexp.atom("Ptyp_object"), + closedFlag(flag), + Sexp.list(mapEmpty(~f=objectField, fields)), + }) + | Ptyp_class(longidentLoc, types) => + Sexp.list(list{ + Sexp.atom("Ptyp_class"), + longident(longidentLoc.Location.txt), + Sexp.list(mapEmpty(~f=coreType, types)), + }) + | Ptyp_variant(fields, flag, optLabels) => + Sexp.list(list{ + Sexp.atom("Ptyp_variant"), + Sexp.list(mapEmpty(~f=rowField, fields)), + closedFlag(flag), + switch optLabels { + | None => Sexp.atom("None") + | Some(lbls) => Sexp.list(mapEmpty(~f=string, lbls)) + }, + }) + | Ptyp_poly(lbls, typexpr) => + Sexp.list(list{ + Sexp.atom("Ptyp_poly"), + Sexp.list(mapEmpty(~f=lbl => string(lbl.Asttypes.txt), lbls)), + coreType(typexpr), + }) + | Ptyp_package(package) => Sexp.list(list{Sexp.atom("Ptyp_package"), packageType(package)}) + | Ptyp_extension(ext) => Sexp.list(list{Sexp.atom("Ptyp_extension"), extension(ext)}) + } + + Sexp.list(list{Sexp.atom("core_type"), desc}) + } + + and payload = p => + switch p { + | PStr(s) => Sexp.list(list{Sexp.atom("PStr"), ...mapEmpty(~f=structureItem, s)}) + | PSig(s) => Sexp.list(list{Sexp.atom("PSig"), signature(s)}) + | PTyp(ct) => Sexp.list(list{Sexp.atom("PTyp"), coreType(ct)}) + | PPat(pat, optExpr) => + Sexp.list(list{ + Sexp.atom("PPat"), + pattern(pat), + switch optExpr { + | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)}) + | None => Sexp.atom("None") + }, + }) + } + + and attribute = ((stringLoc, p)) => + Sexp.list(list{Sexp.atom("attribute"), Sexp.atom(stringLoc.Asttypes.txt), payload(p)}) + + and extension = ((stringLoc, p)) => + Sexp.list(list{Sexp.atom("extension"), Sexp.atom(stringLoc.Asttypes.txt), payload(p)}) + + and attributes = attrs => { + let sexprs = mapEmpty(~f=attribute, attrs) + Sexp.list(list{Sexp.atom("attributes"), ...sexprs}) + } + + let implementation = structure + let interface = signature +} + +module IO: { + let readFile: string => string + let readStdin: unit => string +} = { + /* random chunk size: 2^15, TODO: why do we guess randomly? */ + let chunkSize = 32768 + + let readFile = filename => { + let chan = open_in(filename) + let buffer = Buffer.create(chunkSize) + let chunk = (@doesNotRaise Bytes.create)(chunkSize) + let rec loop = () => { + let len = try input(chan, chunk, 0, chunkSize) catch { + | Invalid_argument(_) => 0 + } + if len === 0 { + close_in_noerr(chan) + Buffer.contents(buffer) + } else { + Buffer.add_subbytes(buffer, chunk, 0, len) + loop() + } + } + + loop() + } + + let readStdin = () => { + let buffer = Buffer.create(chunkSize) + let chunk = (@doesNotRaise Bytes.create)(chunkSize) + let rec loop = () => { + let len = try input(stdin, chunk, 0, chunkSize) catch { + | Invalid_argument(_) => 0 + } + if len === 0 { + close_in_noerr(stdin) + Buffer.contents(buffer) + } else { + Buffer.add_subbytes(buffer, chunk, 0, len) + loop() + } + } + + loop() + } +} + +module CharacterCodes = { + let eof = -1 + + let space = 0x0020 + @live let newline = 0x0A /* \n */ + let lineFeed = 0x0A /* \n */ + let carriageReturn = 0x0D /* \r */ + let lineSeparator = 0x2028 + let paragraphSeparator = 0x2029 + + let tab = 0x09 + + let bang = 0x21 + let dot = 0x2E + let colon = 0x3A + let comma = 0x2C + let backtick = 0x60 + /* let question = 0x3F */ + let semicolon = 0x3B + let underscore = 0x5F + let singleQuote = 0x27 + let doubleQuote = 0x22 + let equal = 0x3D + let bar = 0x7C + let tilde = 0x7E + let question = 0x3F + let ampersand = 0x26 + let at = 0x40 + let dollar = 0x24 + let percent = 0x25 + + let lparen = 0x28 + let rparen = 0x29 + let lbracket = 0x5B + let rbracket = 0x5D + let lbrace = 0x7B + let rbrace = 0x7D + + let forwardslash = 0x2F /* / */ + let backslash = 0x5C /* \ */ + + let greaterThan = 0x3E + let hash = 0x23 + let lessThan = 0x3C + + let minus = 0x2D + let plus = 0x2B + let asterisk = 0x2A + + let _0 = 0x30 + @live let _1 = 0x31 + @live let _2 = 0x32 + @live let _3 = 0x33 + @live let _4 = 0x34 + @live let _5 = 0x35 + @live let _6 = 0x36 + @live let _7 = 0x37 + @live let _8 = 0x38 + let _9 = 0x39 + + module Lower = { + let a = 0x61 + let b = 0x62 + @live let c = 0x63 + @live let d = 0x64 + let e = 0x65 + let f = 0x66 + let g = 0x67 + @live let h = 0x68 + @live let i = 0x69 + @live let j = 0x6A + @live let k = 0x6B + @live let l = 0x6C + @live let m = 0x6D + let n = 0x6E + let o = 0x6F + let p = 0x70 + @live let q = 0x71 + let r = 0x72 + @live let s = 0x73 + let t = 0x74 + @live let u = 0x75 + @live let v = 0x76 + @live let w = 0x77 + let x = 0x78 + @live let y = 0x79 + let z = 0x7A + } + + module Upper = { + let a = 0x41 + /* let b = 0x42 */ + @live let c = 0x43 + @live let d = 0x44 + @live let e = 0x45 + @live let f = 0x46 + let g = 0x47 + @live let h = 0x48 + @live let i = 0x49 + @live let j = 0x4A + @live let k = 0x4B + @live let l = 0x4C + @live let m = 0x4D + @live let b = 0x4E + @live let o = 0x4F + @live let p = 0x50 + @live let q = 0x51 + @live let r = 0x52 + @live let s = 0x53 + @live let t = 0x54 + @live let u = 0x55 + @live let v = 0x56 + @live let w = 0x57 + @live let x = 0x58 + @live let y = 0x59 + let z = 0x5a + } + + /* returns lower-case ch, ch should be ascii */ + let lower = ch => + /* if ch >= Lower.a && ch <= Lower.z then ch else ch + 32 */ + lor(32, ch) + + let isLetter = ch => (Lower.a <= ch && ch <= Lower.z) || (Upper.a <= ch && ch <= Upper.z) + + let isUpperCase = ch => Upper.a <= ch && ch <= Upper.z + + let isDigit = ch => _0 <= ch && ch <= _9 + + let isHex = ch => (_0 <= ch && ch <= _9) || (Lower.a <= lower(ch) && lower(ch) <= Lower.f) + + /* + // ES5 7.3: + // The ECMAScript line terminator characters are listed in Table 3. + // Table 3: Line Terminator Characters + // Code Unit Value Name Formal Name + // \u000A Line Feed + // \u000D Carriage Return + // \u2028 Line separator + // \u2029 Paragraph separator + // Only the characters in Table 3 are treated as line terminators. Other new line or line + // breaking characters are treated as white space but not as line terminators. + */ + let isLineBreak = ch => + ch === lineFeed || + (ch === carriageReturn || + (ch === lineSeparator || ch === paragraphSeparator)) + + let digitValue = ch => + if _0 <= ch && ch <= _9 { + ch - 48 + } else if Lower.a <= lower(ch) && lower(ch) <= Lower.f { + lower(ch) - Lower.a + 10 + } else { + 16 + } /* larger than any legal value */ +} + +module Comment: { + type t + + let toString: t => string + + let loc: t => Location.t + let txt: t => string + let prevTokEndPos: t => Lexing.position + + let setPrevTokEndPos: (t, Lexing.position) => unit + + let isSingleLineComment: t => bool + + let makeSingleLineComment: (~loc: Location.t, string) => t + let makeMultiLineComment: (~loc: Location.t, string) => t + let fromOcamlComment: (~loc: Location.t, ~txt: string, ~prevTokEndPos: Lexing.position) => t + let trimSpaces: string => string +} = { + type style = + | SingleLine + | MultiLine + + let styleToString = s => + switch s { + | SingleLine => "SingleLine" + | MultiLine => "MultiLine" + } + + type t = { + txt: string, + style: style, + loc: Location.t, + mutable prevTokEndPos: Lexing.position, + } + + let loc = t => t.loc + let txt = t => t.txt + let prevTokEndPos = t => t.prevTokEndPos + + let setPrevTokEndPos = (t, pos) => t.prevTokEndPos = pos + + let isSingleLineComment = t => + switch t.style { + | SingleLine => true + | MultiLine => false + } + + let toString = t => + Format.sprintf( + "(txt: %s\nstyle: %s\nlines: %d-%d)", + t.txt, + styleToString(t.style), + t.loc.loc_start.pos_lnum, + t.loc.loc_end.pos_lnum, + ) + + let makeSingleLineComment = (~loc, txt) => { + txt: txt, + loc: loc, + style: SingleLine, + prevTokEndPos: Lexing.dummy_pos, + } + + let makeMultiLineComment = (~loc, txt) => { + txt: txt, + loc: loc, + style: MultiLine, + prevTokEndPos: Lexing.dummy_pos, + } + + let fromOcamlComment = (~loc, ~txt, ~prevTokEndPos) => { + txt: txt, + loc: loc, + style: MultiLine, + prevTokEndPos: prevTokEndPos, + } + + let trimSpaces = s => { + let len = String.length(s) + if len == 0 { + s + } else if String.unsafe_get(s, 0) == ' ' || String.unsafe_get(s, len - 1) == ' ' { + let b = Bytes.of_string(s) + let i = ref(0) + while i.contents < len && Bytes.unsafe_get(b, i.contents) == ' ' { + incr(i) + } + let j = ref(len - 1) + while j.contents >= i.contents && Bytes.unsafe_get(b, j.contents) == ' ' { + decr(j) + } + if j.contents >= i.contents { + (@doesNotRaise Bytes.sub)(b, i.contents, j.contents - i.contents + 1) |> Bytes.to_string + } else { + "" + } + } else { + s + } + } +} + +module Token = { + type t = + | Open + | True + | False + | Character(char) + | Int({i: string, suffix: option}) + | Float({f: string, suffix: option}) + | String(string) + | Lident(string) + | Uident(string) + | As + | Dot + | DotDot + | DotDotDot + | Bang + | Semicolon + | Let + | And + | Rec + | Underscore + | SingleQuote + | Equal + | EqualEqual + | EqualEqualEqual + | Bar + | Lparen + | Rparen + | Lbracket + | Rbracket + | Lbrace + | Rbrace + | Colon + | Comma + | Eof + | Exception + | @live Backslash + | Forwardslash + | ForwardslashDot + | Asterisk + | AsteriskDot + | Exponentiation + | Minus + | MinusDot + | Plus + | PlusDot + | PlusPlus + | PlusEqual + | ColonGreaterThan + | GreaterThan + | LessThan + | LessThanSlash + | Hash + | HashEqual + | HashHash + | Assert + | Lazy + | Tilde + | Question + | If + | Else + | For + | In + | To + | Downto + | While + | Switch + | When + | EqualGreater + | MinusGreater + | External + | Typ + | Private + | Mutable + | Constraint + | Include + | Module + | Of + | With + | Land + | Lor + | Band /* Bitwise and: & */ + | BangEqual + | BangEqualEqual + | LessEqual + | GreaterEqual + | ColonEqual + | At + | AtAt + | Percent + | PercentPercent + | Comment(Comment.t) + | List + | TemplateTail(string) + | TemplatePart(string) + | Backtick + | BarGreater + | Try + | Catch + | Import + | Export + + let precedence = x => + switch x { + | HashEqual | ColonEqual => 1 + | Lor => 2 + | Land => 3 + | Equal + | EqualEqual + | EqualEqualEqual + | LessThan + | GreaterThan + | BangEqual + | BangEqualEqual + | LessEqual + | GreaterEqual + | BarGreater => 4 + | Plus | PlusDot | Minus | MinusDot | PlusPlus => 5 + | Asterisk | AsteriskDot | Forwardslash | ForwardslashDot => 6 + | Exponentiation => 7 + | MinusGreater => 8 + | Dot => 9 + | _ => 0 + } + + let toString = x => + switch x { + | Open => "open" + | True => "true" + | False => "false" + | Character(c) => "'" ++ (Char.escaped(c) ++ "'") + | String(s) => s + | Lident(str) => str + | Uident(str) => str + | Dot => "." + | DotDot => ".." + | DotDotDot => "..." + | Int({i}) => "int " ++ i + | Float({f}) => "Float: " ++ f + | Bang => "!" + | Semicolon => ";" + | Let => "let" + | And => "and" + | Rec => "rec" + | Underscore => "_" + | SingleQuote => "'" + | Equal => "=" + | EqualEqual => "==" + | EqualEqualEqual => "===" + | Eof => "eof" + | Bar => "|" + | As => "as" + | Lparen => "(" + | Rparen => ")" + | Lbracket => "[" + | Rbracket => "]" + | Lbrace => "{" + | Rbrace => "}" + | ColonGreaterThan => ":>" + | Colon => ":" + | Comma => "," + | Minus => "-" + | MinusDot => "-." + | Plus => "+" + | PlusDot => "+." + | PlusPlus => "++" + | PlusEqual => "+=" + | Backslash => "\\" + | Forwardslash => "/" + | ForwardslashDot => "/." + | Exception => "exception" + | Hash => "#" + | HashHash => "##" + | HashEqual => "#=" + | GreaterThan => ">" + | LessThan => "<" + | LessThanSlash => " "*" + | AsteriskDot => "*." + | Exponentiation => "**" + | Assert => "assert" + | Lazy => "lazy" + | Tilde => "tilde" + | Question => "?" + | If => "if" + | Else => "else" + | For => "for" + | In => "in" + | To => "to" + | Downto => "downto" + | While => "while" + | Switch => "switch" + | When => "when" + | EqualGreater => "=>" + | MinusGreater => "->" + | External => "external" + | Typ => "type" + | Private => "private" + | Constraint => "constraint" + | Mutable => "mutable" + | Include => "include" + | Module => "module" + | Of => "of" + | With => "with" + | Lor => "||" + | Band => "&" + | Land => "&&" + | BangEqual => "!=" + | BangEqualEqual => "!==" + | GreaterEqual => ">=" + | LessEqual => "<=" + | ColonEqual => ":=" + | At => "@" + | AtAt => "@@" + | Percent => "%" + | PercentPercent => "%%" + | Comment(c) => "Comment(" ++ (Comment.toString(c) ++ ")") + | List => "list" + | TemplatePart(text) => text ++ "${" + | TemplateTail(text) => "TemplateTail(" ++ (text ++ ")") + | Backtick => "`" + | BarGreater => "|>" + | Try => "try" + | Catch => "catch" + | Import => "import" + | Export => "export" + } + + @raises(Not_found) + let keywordTable = x => + switch x { + | "true" => True + | "false" => False + | "open" => Open + | "let" => Let + | "rec" => Rec + | "and" => And + | "as" => As + | "exception" => Exception + | "assert" => Assert + | "lazy" => Lazy + | "if" => If + | "else" => Else + | "for" => For + | "in" => In + | "to" => To + | "downto" => Downto + | "while" => While + | "switch" => Switch + | "when" => When + | "external" => External + | "type" => Typ + | "private" => Private + | "mutable" => Mutable + | "constraint" => Constraint + | "include" => Include + | "module" => Module + | "of" => Of + | "list" => List + | "with" => With + | "try" => Try + | "catch" => Catch + | "import" => Import + | "export" => Export + | _ => raise(Not_found) + } + + let isKeyword = x => + switch x { + | True + | False + | Open + | Let + | Rec + | And + | As + | Exception + | Assert + | Lazy + | If + | Else + | For + | In + | To + | Downto + | While + | Switch + | When + | External + | Typ + | Private + | Mutable + | Constraint + | Include + | Module + | Of + | Land + | Lor + | List + | With + | Try + | Catch + | Import + | Export => true + | _ => false + } + + let lookupKeyword = str => + try keywordTable(str) catch { + | Not_found => + if CharacterCodes.isUpperCase(int_of_char(@doesNotRaise String.get(str, 0))) { + Uident(str) + } else { + Lident(str) + } + } + + let isKeywordTxt = str => + try { + let _ = keywordTable(str) + true + } catch { + | Not_found => false + } +} + +module Grammar = { + type t = + | OpenDescription /* open Belt */ + | @live ModuleLongIdent /* Foo or Foo.Bar */ + | Ternary /* condExpr ? trueExpr : falseExpr */ + | Es6ArrowExpr + | Jsx + | JsxAttribute + | @live JsxChild + | ExprOperand + | ExprUnary + | ExprSetField + | ExprBinaryAfterOp(Token.t) + | ExprBlock + | ExprCall + | ExprList + | ExprArrayAccess + | ExprArrayMutation + | ExprIf + | IfCondition + | IfBranch + | ElseBranch + | TypeExpression + | External + | PatternMatching + | PatternMatchCase + | LetBinding + | PatternList + | PatternOcamlList + | PatternRecord + + | TypeDef + | TypeConstrName + | TypeParams + | @live TypeParam + | PackageConstraint + + | TypeRepresentation + + | RecordDecl + | ConstructorDeclaration + | ParameterList + | StringFieldDeclarations + | FieldDeclarations + | TypExprList + | FunctorArgs + | ModExprList + | TypeParameters + | RecordRows + | RecordRowsStringKey + | ArgumentList + | Signature + | Specification + | Structure + | Implementation + | Attribute + | TypeConstraint + | Primitive + | AtomicTypExpr + | ListExpr + | JsFfiImport + + let toString = x => + switch x { + | OpenDescription => "an open description" + | ModuleLongIdent => "a module identifier" + | Ternary => "a ternary expression" + | Es6ArrowExpr => "an es6 arrow function" + | Jsx => "a jsx expression" + | JsxAttribute => "a jsx attribute" + | ExprOperand => "a basic expression" + | ExprUnary => "a unary expression" + | ExprBinaryAfterOp(op) => "an expression after the operator \"" ++ (Token.toString(op) ++ "\"") + | ExprIf => "an if expression" + | IfCondition => "the condition of an if expression" + | IfBranch => "the true-branch of an if expression" + | ElseBranch => "the else-branch of an if expression" + | TypeExpression => "a type" + | External => "an external" + | PatternMatching => "the cases of a pattern match" + | ExprBlock => "a block with expressions" + | ExprSetField => "a record field mutation" + | ExprCall => "a function application" + | ExprArrayAccess => "an array access expression" + | ExprArrayMutation => "an array mutation" + | LetBinding => "a let binding" + | TypeDef => "a type definition" + | TypeParams => "type parameters" + | TypeParam => "a type parameter" + | TypeConstrName => "a type-constructor name" + | TypeRepresentation => "a type representation" + | RecordDecl => "a record declaration" + | PatternMatchCase => "a pattern match case" + | ConstructorDeclaration => "a constructor declaration" + | ExprList => "multiple expressions" + | PatternList => "multiple patterns" + | PatternOcamlList => "a list pattern" + | PatternRecord => "a record pattern" + | ParameterList => "parameters" + | StringFieldDeclarations => "string field declarations" + | FieldDeclarations => "field declarations" + | TypExprList => "list of types" + | FunctorArgs => "functor arguments" + | ModExprList => "list of module expressions" + | TypeParameters => "list of type parameters" + | RecordRows => "rows of a record" + | RecordRowsStringKey => "rows of a record with string keys" + | ArgumentList => "arguments" + | Signature => "signature" + | Specification => "specification" + | Structure => "structure" + | Implementation => "implementation" + | Attribute => "an attribute" + | TypeConstraint => "constraints on a type" + | Primitive => "an external primitive" + | AtomicTypExpr => "a type" + | ListExpr => "an ocaml list expr" + | PackageConstraint => "a package constraint" + | JsFfiImport => "js ffi import" + | JsxChild => "jsx child" + } + + let isSignatureItemStart = x => + switch x { + | Token.At + | Let + | Typ + | External + | Exception + | Open + | Include + | Module + | AtAt + | PercentPercent => true + | _ => false + } + + let isAtomicPatternStart = x => + switch x { + | Token.Int(_) + | String(_) + | Character(_) + | Lparen + | Lbracket + | Lbrace + | Underscore + | Lident(_) + | Uident(_) + | List + | Exception + | Lazy + | Percent => true + | _ => false + } + + let isAtomicExprStart = x => + switch x { + | Token.True + | False + | Int(_) + | String(_) + | Float(_) + | Character(_) + | Backtick + | Uident(_) + | Lident(_) + | Hash + | Lparen + | List + | Lbracket + | Lbrace + | LessThan + | Module + | Percent => true + | _ => false + } + + let isAtomicTypExprStart = x => + switch x { + | Token.SingleQuote + | Underscore + | Lparen + | Lbrace + | Uident(_) + | Lident(_) + | List + | Percent => true + | _ => false + } + + let isExprStart = x => + switch x { + | Token.True + | False + | Int(_) + | String(_) + | Float(_) + | Character(_) + | Backtick + | Underscore + | Uident(_) + | Lident(_) + | Hash + | Lparen + | List + | Module + | Lbracket + | Lbrace + | LessThan + | Minus + | MinusDot + | Plus + | PlusDot + | Bang + | Percent + | At + | If + | Switch + | While + | For + | Assert + | Lazy + | Try => true + | _ => false + } + + let isJsxAttributeStart = x => + switch x { + | Token.Lident(_) | Question => true + | _ => false + } + + let isStructureItemStart = x => + switch x { + | Token.Open + | Let + | Typ + | External + | Import + | Export + | Exception + | Include + | Module + | AtAt + | PercentPercent + | At => true + | t when isExprStart(t) => true + | _ => false + } + + let isPatternStart = x => + switch x { + | Token.Int(_) + | Float(_) + | String(_) + | Character(_) + | True + | False + | Minus + | Plus + | Lparen + | Lbracket + | Lbrace + | List + | Underscore + | Lident(_) + | Uident(_) + | Hash + | HashHash + | Exception + | Lazy + | Percent + | Module + | At => true + | _ => false + } + + let isParameterStart = x => + switch x { + | Token.Typ | Tilde | Dot => true + | token when isPatternStart(token) => true + | _ => false + } + + /* TODO: overparse Uident ? */ + let isStringFieldDeclStart = x => + switch x { + | Token.String(_) | At => true + | _ => false + } + + /* TODO: overparse Uident ? */ + let isFieldDeclStart = x => + switch x { + | Token.At | Mutable | Lident(_) | List => true + /* recovery, TODO: this is not ideal… */ + | Uident(_) => true + | t when Token.isKeyword(t) => true + | _ => false + } + + let isRecordDeclStart = x => + switch x { + | Token.At | Mutable | Lident(_) | List => true + | _ => false + } + + let isTypExprStart = x => + switch x { + | Token.At + | SingleQuote + | Underscore + | Lparen + | Lbracket + | Uident(_) + | Lident(_) + | List + | Module + | Percent + | Lbrace => true + | _ => false + } + + let isTypeParameterStart = x => + switch x { + | Token.Tilde | Dot => true + | token when isTypExprStart(token) => true + | _ => false + } + + let isTypeParamStart = x => + switch x { + | Token.Plus | Minus | SingleQuote | Underscore => true + | _ => false + } + + let isFunctorArgStart = x => + switch x { + | Token.At | Uident(_) | Underscore | Percent | Lbrace | Lparen => true + | _ => false + } + + let isModExprStart = x => + switch x { + | Token.At | Percent | Uident(_) | Lbrace | Lparen => true + | _ => false + } + + let isRecordRowStart = x => + switch x { + | Token.DotDotDot => true + | Token.Uident(_) | Lident(_) | List => true + /* TODO */ + | t when Token.isKeyword(t) => true + | _ => false + } + + let isRecordRowStringKeyStart = x => + switch x { + | Token.String(_) => true + | _ => false + } + + let isArgumentStart = x => + switch x { + | Token.Tilde | Dot | Underscore => true + | t when isExprStart(t) => true + | _ => false + } + + let isPatternMatchStart = x => + switch x { + | Token.Bar => true + | t when isPatternStart(t) => true + | _ => false + } + + let isPatternOcamlListStart = x => + switch x { + | Token.DotDotDot => true + | t when isPatternStart(t) => true + | _ => false + } + + let isPatternRecordItemStart = x => + switch x { + | Token.DotDotDot | Uident(_) | Lident(_) | List | Underscore => true + | _ => false + } + + let isAttributeStart = x => + switch x { + | Token.At => true + | _ => false + } + + let isJsFfiImportStart = x => + switch x { + | Token.Lident(_) | At => true + | _ => false + } + + let isJsxChildStart = isAtomicExprStart + + let isBlockExprStart = x => + switch x { + | Token.At + | Hash + | Percent + | Minus + | MinusDot + | Plus + | PlusDot + | Bang + | True + | False + | Int(_) + | String(_) + | Character(_) + | Lident(_) + | Uident(_) + | Lparen + | List + | Lbracket + | Lbrace + | Forwardslash + | Assert + | Lazy + | If + | For + | While + | Switch + | Open + | Module + | Exception + | Let + | LessThan + | Backtick + | Try + | Underscore => true + | _ => false + } + + let isListElement = (grammar, token) => + switch grammar { + | ExprList => token == Token.DotDotDot || isExprStart(token) + | ListExpr => token == DotDotDot || isExprStart(token) + | PatternList => token == DotDotDot || isPatternStart(token) + | ParameterList => isParameterStart(token) + | StringFieldDeclarations => isStringFieldDeclStart(token) + | FieldDeclarations => isFieldDeclStart(token) + | RecordDecl => isRecordDeclStart(token) + | TypExprList => isTypExprStart(token) || token == Token.LessThan + | TypeParams => isTypeParamStart(token) + | FunctorArgs => isFunctorArgStart(token) + | ModExprList => isModExprStart(token) + | TypeParameters => isTypeParameterStart(token) + | RecordRows => isRecordRowStart(token) + | RecordRowsStringKey => isRecordRowStringKeyStart(token) + | ArgumentList => isArgumentStart(token) + | Signature | Specification => isSignatureItemStart(token) + | Structure | Implementation => isStructureItemStart(token) + | PatternMatching => isPatternMatchStart(token) + | PatternOcamlList => isPatternOcamlListStart(token) + | PatternRecord => isPatternRecordItemStart(token) + | Attribute => isAttributeStart(token) + | TypeConstraint => token == Constraint + | PackageConstraint => token == And + | ConstructorDeclaration => token == Bar + | Primitive => + switch token { + | Token.String(_) => true + | _ => false + } + | JsxAttribute => isJsxAttributeStart(token) + | JsFfiImport => isJsFfiImportStart(token) + | _ => false + } + + let isListTerminator = (grammar, token) => + switch (grammar, token) { + | (_, Token.Eof) + | (ExprList, Rparen | Forwardslash | Rbracket) + | (ListExpr, Rparen) + | (ArgumentList, Rparen) + | (TypExprList, Rparen | Forwardslash | GreaterThan | Equal) + | (ModExprList, Rparen) + | ( + PatternList | PatternOcamlList | PatternRecord, + Forwardslash | Rbracket | Rparen | EqualGreater | In | Equal /* let {x} = foo */, + ) + | (ExprBlock, Rbrace) + | (Structure | Signature, Rbrace) + | (TypeParams, Rparen) + | (ParameterList, EqualGreater | Lbrace) + | (JsxAttribute, Forwardslash | GreaterThan) + | (JsFfiImport, Rbrace) + | (StringFieldDeclarations, Rbrace) => true + + | (Attribute, token) when token != At => true + | (TypeConstraint, token) when token != Constraint => true + | (PackageConstraint, token) when token != And => true + | (ConstructorDeclaration, token) when token != Bar => true + | (Primitive, Semicolon) => true + | (Primitive, token) when isStructureItemStart(token) => true + + | _ => false + } + + let isPartOfList = (grammar, token) => + isListElement(grammar, token) || isListTerminator(grammar, token) +} + +module Reporting = { + module TerminalDoc = { + type break = + | Never + | Always + + type rec document = + | Nil + | Group({break: break, doc: document}) + | Text(string) + | Indent({amount: int, doc: document}) + | Append({doc1: document, doc2: document}) + + let group = (~break, doc) => Group({break: break, doc: doc}) + let text = txt => Text(txt) + let indent = (i, d) => Indent({amount: i, doc: d}) + let append = (d1, d2) => Append({doc1: d1, doc2: d2}) + let nil = Nil + + type rec stack = + | Empty + | Cons({doc: document, stack: stack}) + + let push = (stack, doc) => Cons({doc: doc, stack: stack}) + + type mode = + | Flat + | Break + + let toString /* ~width */ = (doc: document) => { + let buffer = Buffer.create(100) + let rec loop = (stack, mode, offset) => + switch stack { + | Empty => () + | Cons({doc, stack: rest}) => + switch doc { + | Nil => loop(rest, mode, offset) + | Text(txt) => + Buffer.add_string(buffer, txt) + loop(rest, mode, offset + String.length(txt)) + | Indent({amount: i, doc}) => + let indentation = (@doesNotRaise String.make)(i, ' ') + Buffer.add_string(buffer, indentation) + loop(push(rest, doc), mode, offset + i) + | Append({doc1, doc2}) => + let rest = push(rest, doc2) + let rest = push(rest, mode == Flat ? Nil : text("\n")) + + let rest = push(rest, doc1) + loop(rest, mode, offset) + | Group({break, doc}) => + let rest = push(rest, doc) + switch break { + | Always => loop(rest, Break, offset) + | Never => loop(rest, Flat, offset) + } + } + } + + loop(push(Empty, doc), Flat, 0) + Buffer.contents(buffer) + } + } + + type color = + | @live NoColor + | @live Red + + type style = { + @live underline: bool, + @live color: color, + } + + let highlight = (~from, ~len, txt) => + if from < 0 || (String.length(txt) === 0 || from >= String.length(txt)) { + txt + } else { + let before = try String.sub(txt, 0, from) catch { + | Invalid_argument(_) => "" + } + let content = + "\027[31m" ++ + (try String.sub(txt, from, len) catch { + | Invalid_argument(_) => "" + } ++ + "\027[0m") + + let after = try String.sub(txt, from + len, String.length(txt) - (from + len)) catch { + | Invalid_argument(_) => "" + } + before ++ (content ++ after) + } + + let underline = (~from, ~len, txt) => { + open TerminalDoc + let indent = (@doesNotRaise String.make)(from, ' ') + let underline = (@doesNotRaise String.make)(len, '^') + let line = highlight(~from=0, ~len, underline) + group(~break=Always, append(text(txt), text(indent ++ line))) + } + + let rec drop = (n, l) => + if n === 1 { + l + } else { + drop( + n - 1, + switch l { + | list{_x, ...xs} => xs + | _ => l + }, + ) + } + + let rec take = (n, l) => + switch l { + | _ when n === 0 => list{} + | list{} => list{} + | list{x, ...xs} => list{x, ...take(n - 1, xs)} + } + + /* TODO: cleanup */ + let renderCodeContext = (~missing, src: string, startPos, endPos) => { + open Lexing + let startCol = startPos.pos_cnum - startPos.pos_bol + let endCol = endPos.pos_cnum - startPos.pos_cnum + startCol + let startLine = max(1, startPos.pos_lnum - 2) /* 2 lines before */ + let lines = String.split_on_char('\n', src) + let endLine = { + let len = List.length(lines) + min(len, startPos.pos_lnum + 3) + } /* 2 lines after */ + + let lines = lines |> drop(startLine) |> take(endLine - startLine) |> Array.of_list + + let renderLine = (x, ix) => { + let x = if ix == startPos.pos_lnum { + switch missing { + | Some(_len) => x ++ @doesNotRaise String.make(10, ' ') + | None => x + } + } else { + x + } + + open TerminalDoc + let rowNr = { + let txt = string_of_int(ix) + let len = String.length(txt) + if ix == startPos.pos_lnum { + highlight(~from=0, ~len, txt) + } else { + txt + } + } + + let len = { + let len = if endCol >= 0 { + endCol - startCol + } else { + 1 + } + + if startCol + len > String.length(x) { + String.length(x) - startCol - 1 + } else { + len + } + } + + let line = if ix == startPos.pos_lnum { + switch missing { + | Some(len) => + underline( + ~from=startCol + String.length(String.length(string_of_int(ix)) |> string_of_int) + 5, + ~len, + x, + ) + | None => + let len = if startCol + len > String.length(x) { + String.length(x) - startCol + } else { + len + } + + text(highlight(~from=startCol, ~len, x)) + } + } else { + text(x) + } + + group(~break=Never, append(append(text(rowNr), text(" │")), indent(2, line))) + } + + let reportDoc = ref(TerminalDoc.nil) + + let linesLen = Array.length(lines) + for i in 0 to linesLen - 1 { + let line = try lines[i] catch { + | Invalid_argument(_) => "" + } + reportDoc := { + open TerminalDoc + let ix = startLine + i + group(~break=Always, append(reportDoc.contents, renderLine(line, ix))) + } + } + + TerminalDoc.toString(reportDoc.contents) + } + + type problem = + | @live Unexpected(Token.t) + | @live Expected({token: Token.t, pos: Lexing.position, context: option}) + | @live Message(string) + | @live Uident + | @live Lident + | @live Unbalanced(Token.t) + + type parseError = (Lexing.position, problem) +} + +module Diagnostics: { + type t + type category + type report + + type reportStyle + let parseReportStyle: string => reportStyle + + let unexpected: (Token.t, list<(Grammar.t, Lexing.position)>) => category + let expected: (~grammar: Grammar.t=?, Lexing.position, Token.t) => category + let uident: Token.t => category + let lident: Token.t => category + let unclosedString: category + let unclosedTemplate: category + let unclosedComment: category + let unknownUchar: int => category + let message: string => category + + let make: (~filename: string, ~startPos: Lexing.position, ~endPos: Lexing.position, category) => t + + let stringOfReport: (~style: reportStyle, list, string) => string +} = { + type category = + | Unexpected({token: Token.t, context: list<(Grammar.t, Lexing.position)>}) + | Expected({ + context: option, + pos: Lexing.position /* prev token end */, + token: Token.t, + }) + | Message(string) + | Uident(Token.t) + | Lident(Token.t) + | UnclosedString + | UnclosedTemplate + | UnclosedComment + | UnknownUchar(int) + + type t = { + filename: string, + startPos: Lexing.position, + endPos: Lexing.position, + category: category, + } + + type report = list + + /* TODO: add json here */ + type reportStyle = + | Pretty + | Plain + + let parseReportStyle = txt => + switch String.lowercase_ascii(txt) { + | "plain" => Plain + | _ => Pretty + } + + let defaultUnexpected = token => + "I'm not sure what to parse here when looking at \"" ++ (Token.toString(token) ++ "\".") + + let explain = t => + switch t.category { + | Uident(currentToken) => + switch currentToken { + | Lident(lident) => + let guess = String.capitalize_ascii(lident) + "Did you mean `" ++ (guess ++ ("` instead of `" ++ (lident ++ "`?"))) + | t when Token.isKeyword(t) => + let token = Token.toString(t) + "`" ++ (token ++ "` is a reserved keyword.") + | _ => "At this point, I'm looking for an uppercased identifier like `Belt` or `Array`" + } + | Lident(currentToken) => + switch currentToken { + | Uident(uident) => + let guess = String.uncapitalize_ascii(uident) + "Did you mean `" ++ (guess ++ ("` instead of `" ++ (uident ++ "`?"))) + | t when Token.isKeyword(t) => + let token = Token.toString(t) + "`" ++ + (token ++ + ("` is a reserved keyword. Keywords need to be escaped: \\\"" ++ (token ++ "\""))) + | Underscore => "`_` isn't a valid name." + | _ => "I'm expecting an lowercased identifier like `name` or `age`" + } + | Message(txt) => txt + | UnclosedString => "This string is missing a double quote at the end" + | UnclosedTemplate => "Did you forget to close this template expression with a backtick?" + | UnclosedComment => "This comment seems to be missing a closing `*/`" + | UnknownUchar(uchar) => + switch uchar { + | 94 /* ^ */ => "Hmm, not sure what I should do here with this character.\nIf you're trying to deref an expression, use `foo.contents` instead." + | _ => "Hmm, I have no idea what this character means…" + } + | Expected({context, token: t}) => + let hint = switch context { + | Some(grammar) => "It signals the start of " ++ Grammar.toString(grammar) + | None => "" + } + + "Did you forget a `" ++ (Token.toString(t) ++ ("` here? " ++ hint)) + | Unexpected({token: t, context: breadcrumbs}) => + let name = Token.toString(t) + switch breadcrumbs { + | list{(AtomicTypExpr, _), ...breadcrumbs} => + switch (breadcrumbs, t) { + | ( + list{(StringFieldDeclarations | FieldDeclarations, _), ..._}, + String(_) | At | Rbrace | Comma | Eof, + ) => "I'm missing a type here" + | (_, t) when Grammar.isStructureItemStart(t) || t == Eof => "Missing a type here" + | _ => defaultUnexpected(t) + } + | list{(ExprOperand, _), ...breadcrumbs} => + switch (breadcrumbs, t) { + | (list{(ExprBlock, _), ..._}, Rbrace) => "It seems that this expression block is empty" + | (list{(ExprBlock, _), ..._}, Bar) => /* Pattern matching */ + "Looks like there might be an expression missing here" + | ( + list{(ExprSetField, _), ..._}, + _, + ) => "It seems that this record field mutation misses an expression" + | ( + list{(ExprArrayMutation, _), ..._}, + _, + ) => "Seems that an expression is missing, with what do I mutate the array?" + | ( + list{(ExprBinaryAfterOp(_) | ExprUnary, _), ..._}, + _, + ) => "Did you forget to write an expression here?" + | (list{(Grammar.LetBinding, _), ..._}, _) => "This let-binding misses an expression" + | (list{_, ..._}, Rbracket | Rbrace) => "Missing expression" + | _ => "I'm not sure what to parse here when looking at \"" ++ (name ++ "\".") + } + | list{(TypeParam, _), ..._} => + switch t { + | Lident(ident) => "Did you mean '" ++ (ident ++ "? A Type parameter starts with a quote.") + | _ => "I'm not sure what to parse here when looking at \"" ++ (name ++ "\".") + } + | _ => + /* TODO: match on circumstance to verify Lident needed ? */ + if Token.isKeyword(t) { + "`" ++ + (name ++ + ("` is a reserved keyword. Keywords need to be escaped: \\\"" ++ + (Token.toString(t) ++ + "\""))) + } else { + "I'm not sure what to parse here when looking at \"" ++ (name ++ "\".") + } + } + } + + let toPlainString = (t, buffer) => { + Buffer.add_string(buffer, t.filename) + Buffer.add_char(buffer, '(') + Buffer.add_string(buffer, string_of_int(t.startPos.pos_cnum)) + Buffer.add_char(buffer, ',') + Buffer.add_string(buffer, string_of_int(t.endPos.pos_cnum)) + Buffer.add_char(buffer, ')') + Buffer.add_char(buffer, ':') + Buffer.add_string(buffer, explain(t)) + } + + let toString = (t, src) => { + open Lexing + let startchar = t.startPos.pos_cnum - t.startPos.pos_bol + let endchar = t.endPos.pos_cnum - t.startPos.pos_cnum + startchar + let locationInfo = Printf.sprintf /* ReasonLanguageServer requires the following format */( + "File \"%s\", line %d, characters %d-%d:", + t.filename, + t.startPos.pos_lnum, + startchar, + endchar, + ) + + let code = { + let missing = switch t.category { + | Expected({token: t}) => Some(String.length(Token.toString(t))) + | _ => None + } + + Reporting.renderCodeContext(~missing, src, t.startPos, t.endPos) + } + + let explanation = explain(t) + Printf.sprintf("%s\n\n%s\n\n%s\n\n", locationInfo, code, explanation) + } + + let make = (~filename, ~startPos, ~endPos, category) => { + filename: filename, + startPos: startPos, + endPos: endPos, + category: category, + } + + let stringOfReport = (~style, diagnostics, src) => + switch style { + | Pretty => + List.fold_left( + (report, diagnostic) => report ++ (toString(diagnostic, src) ++ "\n"), + "\n", + List.rev(diagnostics), + ) + | Plain => + let buffer = Buffer.create(100) + List.iter(diagnostic => { + toPlainString(diagnostic, buffer) + Buffer.add_char(buffer, '\n') + }, diagnostics) + Buffer.contents(buffer) + } + + let unexpected = (token, context) => Unexpected({token: token, context: context}) + + let expected = (~grammar=?, pos, token) => Expected({context: grammar, pos: pos, token: token}) + + let uident = currentToken => Uident(currentToken) + let lident = currentToken => Lident(currentToken) + let unclosedString = UnclosedString + let unclosedComment = UnclosedComment + let unclosedTemplate = UnclosedTemplate + let unknownUchar = code => UnknownUchar(code) + let message = txt => Message(txt) +} + +/* Collection of utilities to view the ast in a more a convenient form, + * allowing for easier processing. + * Example: given a ptyp_arrow type, what are its arguments and what is the + * returnType? */ +module ParsetreeViewer: { + /* Restructures a nested tree of arrow types into its args & returnType + * The parsetree contains: a => b => c => d, for printing purposes + * we restructure the tree into (a, b, c) and its returnType d */ + let arrowType: Parsetree.core_type => ( + Parsetree.attributes, + list<(Parsetree.attributes, Asttypes.arg_label, Parsetree.core_type)>, + Parsetree.core_type, + ) + + let functorType: Parsetree.module_type => ( + list<(Parsetree.attributes, Asttypes.loc, option)>, + Parsetree.module_type, + ) + + /* filters @bs out of the provided attributes */ + let processUncurriedAttribute: Parsetree.attributes => (bool, Parsetree.attributes) + + /* if ... else if ... else ... is represented as nested expressions: if ... else { if ... } + * The purpose of this function is to flatten nested ifs into one sequence. + * Basically compute: ([if, else if, else if, else if], else) */ + let collectIfExpressions: Parsetree.expression => ( + list<(Parsetree.expression, Parsetree.expression)>, + option, + ) + + let collectListExpressions: Parsetree.expression => ( + list, + option, + ) + + type funParamKind = + | Parameter({ + attrs: Parsetree.attributes, + lbl: Asttypes.arg_label, + defaultExpr: option, + pat: Parsetree.pattern, + }) + | NewTypes({attrs: Parsetree.attributes, locs: list>}) + + let funExpr: Parsetree.expression => ( + Parsetree.attributes, + list, + Parsetree.expression, + ) + + /* example: + * `makeCoordinate({ + * x: 1, + * y: 2, + * })` + * Notice howe `({` and `})` "hug" or stick to each other */ + let isHuggableExpression: Parsetree.expression => bool + + let isHuggablePattern: Parsetree.pattern => bool + + let isHuggableRhs: Parsetree.expression => bool + + let operatorPrecedence: string => int + + let isUnaryExpression: Parsetree.expression => bool + let isBinaryOperator: string => bool + let isBinaryExpression: Parsetree.expression => bool + + let flattenableOperators: (string, string) => bool + + let hasAttributes: Parsetree.attributes => bool + + let isArrayAccess: Parsetree.expression => bool + let isTernaryExpr: Parsetree.expression => bool + + let collectTernaryParts: Parsetree.expression => ( + list<(Parsetree.expression, Parsetree.expression)>, + Parsetree.expression, + ) + + let parametersShouldHug: list => bool + + let filterTernaryAttributes: Parsetree.attributes => Parsetree.attributes + + let isJsxExpression: Parsetree.expression => bool + let hasJsxAttribute: Parsetree.attributes => bool + + let shouldIndentBinaryExpr: Parsetree.expression => bool + let shouldInlineRhsBinaryExpr: Parsetree.expression => bool + let filterPrinteableAttributes: Parsetree.attributes => Parsetree.attributes + let partitionPrinteableAttributes: Parsetree.attributes => ( + Parsetree.attributes, + Parsetree.attributes, + ) + + let requiresSpecialCallbackPrintingLastArg: list<( + Asttypes.arg_label, + Parsetree.expression, + )> => bool + let requiresSpecialCallbackPrintingFirstArg: list<( + Asttypes.arg_label, + Parsetree.expression, + )> => bool + + let modExprApply: Parsetree.module_expr => (list, Parsetree.module_expr) + + let modExprFunctor: Parsetree.module_expr => ( + list<(Parsetree.attributes, Asttypes.loc, option)>, + Parsetree.module_expr, + ) + + let splitGenTypeAttr: Parsetree.attributes => (bool, Parsetree.attributes) + + let collectPatternsFromListConstruct: ( + list, + Parsetree.pattern, + ) => (list, Parsetree.pattern) + + let isBlockExpr: Parsetree.expression => bool + + let isTemplateLiteral: Parsetree.expression => bool + + let collectOrPatternChain: Parsetree.pattern => list + + let processBracesAttr: Parsetree.expression => (option, Parsetree.expression) + + let filterParsingAttrs: Parsetree.attributes => Parsetree.attributes + + let isBracedExpr: Parsetree.expression => bool + + let isPipeExpr: Parsetree.expression => bool + + let extractValueDescriptionFromModExpr: Parsetree.module_expr => list + + type jsImportScope = + | JsGlobalImport /* nothing */ + | JsModuleImport(string) /* from "path" */ + | JsScopedImport(list) /* window.location */ + + let classifyJsImport: Parsetree.value_description => jsImportScope + + /* (__x) => f(a, __x, c) -----> f(a, _, c) */ + let rewriteUnderscoreApply: Parsetree.expression => Parsetree.expression + + /* (__x) => f(a, __x, c) -----> f(a, _, c) */ + let isUnderscoreApplySugar: Parsetree.expression => bool +} = { + open Parsetree + + let arrowType = ct => { + let rec process = (attrsBefore, acc, typ) => + switch typ { + | {ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2), ptyp_attributes: list{}} => + let arg = (list{}, lbl, typ1) + process(attrsBefore, list{arg, ...acc}, typ2) + | { + ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2), + ptyp_attributes: list{({txt: "bs"}, _)} as attrs, + } => + let arg = (attrs, lbl, typ1) + process(attrsBefore, list{arg, ...acc}, typ2) + | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: _attrs} as returnType => + let args = List.rev(acc) + (attrsBefore, args, returnType) + | { + ptyp_desc: Ptyp_arrow((Labelled(_) | Optional(_)) as lbl, typ1, typ2), + ptyp_attributes: attrs, + } => + let arg = (attrs, lbl, typ1) + process(attrsBefore, list{arg, ...acc}, typ2) + | typ => (attrsBefore, List.rev(acc), typ) + } + + switch ct { + | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: attrs} as typ => + process(attrs, list{}, {...typ, ptyp_attributes: list{}}) + | typ => process(list{}, list{}, typ) + } + } + + let functorType = modtype => { + let rec process = (acc, modtype) => + switch modtype { + | {pmty_desc: Pmty_functor(lbl, argType, returnType), pmty_attributes: attrs} => + let arg = (attrs, lbl, argType) + process(list{arg, ...acc}, returnType) + | modType => (List.rev(acc), modType) + } + + process(list{}, modtype) + } + + let processUncurriedAttribute = attrs => { + let rec process = (uncurriedSpotted, acc, attrs) => + switch attrs { + | list{} => (uncurriedSpotted, List.rev(acc)) + | list{({Location.txt: "bs"}, _), ...rest} => process(true, acc, rest) + | list{attr, ...rest} => process(uncurriedSpotted, list{attr, ...acc}, rest) + } + + process(false, list{}, attrs) + } + + let collectIfExpressions = expr => { + let rec collect = (acc, expr) => + switch expr.pexp_desc { + | Pexp_ifthenelse(ifExpr, thenExpr, Some(elseExpr)) => + collect(list{(ifExpr, thenExpr), ...acc}, elseExpr) + | Pexp_ifthenelse(ifExpr, thenExpr, None as elseExpr) => + let ifs = List.rev(list{(ifExpr, thenExpr), ...acc}) + (ifs, elseExpr) + | _ => (List.rev(acc), Some(expr)) + } + + collect(list{}, expr) + } + + let collectListExpressions = expr => { + let rec collect = (acc, expr) => + switch expr.pexp_desc { + | Pexp_construct({txt: Longident.Lident("[]")}, _) => (List.rev(acc), None) + | Pexp_construct( + {txt: Longident.Lident("::")}, + Some({pexp_desc: Pexp_tuple(list{hd, tail})}), + ) => + collect(list{hd, ...acc}, tail) + | _ => (List.rev(acc), Some(expr)) + } + + collect(list{}, expr) + } + + /* (__x) => f(a, __x, c) -----> f(a, _, c) */ + let rewriteUnderscoreApply = expr => + switch expr.pexp_desc { + | Pexp_fun( + Nolabel, + None, + {ppat_desc: Ppat_var({txt: "__x"})}, + {pexp_desc: Pexp_apply(callExpr, args)} as e, + ) => + let newArgs = List.map(arg => + switch arg { + | (lbl, {pexp_desc: Pexp_ident({txt: Longident.Lident("__x")} as lid)} as argExpr) => ( + lbl, + {...argExpr, pexp_desc: Pexp_ident({...lid, txt: Longident.Lident("_")})}, + ) + | arg => arg + } + , args) + {...e, pexp_desc: Pexp_apply(callExpr, newArgs)} + | _ => expr + } + + type funParamKind = + | Parameter({ + attrs: Parsetree.attributes, + lbl: Asttypes.arg_label, + defaultExpr: option, + pat: Parsetree.pattern, + }) + | NewTypes({attrs: Parsetree.attributes, locs: list>}) + + let funExpr = expr => { + /* Turns (type t, type u, type z) into "type t u z" */ + let rec collectNewTypes = (acc, returnExpr) => + switch returnExpr { + | {pexp_desc: Pexp_newtype(stringLoc, returnExpr), pexp_attributes: list{}} => + collectNewTypes(list{stringLoc, ...acc}, returnExpr) + | returnExpr => (List.rev(acc), returnExpr) + } + + let rec collect = (attrsBefore, acc, expr) => + switch expr { + | { + pexp_desc: + Pexp_fun( + Nolabel, + None, + {ppat_desc: Ppat_var({txt: "__x"})}, + {pexp_desc: Pexp_apply(_)}, + ), + } => (attrsBefore, List.rev(acc), rewriteUnderscoreApply(expr)) + | {pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr), pexp_attributes: list{}} => + let parameter = Parameter({ + attrs: list{}, + lbl: lbl, + defaultExpr: defaultExpr, + pat: pattern, + }) + collect(attrsBefore, list{parameter, ...acc}, returnExpr) + | {pexp_desc: Pexp_newtype(stringLoc, rest), pexp_attributes: attrs} => + let (stringLocs, returnExpr) = collectNewTypes(list{stringLoc}, rest) + let param = NewTypes({attrs: attrs, locs: stringLocs}) + collect(attrsBefore, list{param, ...acc}, returnExpr) + | { + pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr), + pexp_attributes: list{({txt: "bs"}, _)} as attrs, + } => + let parameter = Parameter({ + attrs: attrs, + lbl: lbl, + defaultExpr: defaultExpr, + pat: pattern, + }) + collect(attrsBefore, list{parameter, ...acc}, returnExpr) + | { + pexp_desc: Pexp_fun((Labelled(_) | Optional(_)) as lbl, defaultExpr, pattern, returnExpr), + pexp_attributes: attrs, + } => + let parameter = Parameter({ + attrs: attrs, + lbl: lbl, + defaultExpr: defaultExpr, + pat: pattern, + }) + collect(attrsBefore, list{parameter, ...acc}, returnExpr) + | expr => (attrsBefore, List.rev(acc), expr) + } + + switch expr { + | { + pexp_desc: Pexp_fun(Nolabel, _defaultExpr, _pattern, _returnExpr), + pexp_attributes: attrs, + } as expr => + collect(attrs, list{}, {...expr, pexp_attributes: list{}}) + | expr => collect(list{}, list{}, expr) + } + } + + let processBracesAttr = expr => + switch expr.pexp_attributes { + | list{({txt: "ns.braces"}, _) as attr, ...attrs} => ( + Some(attr), + {...expr, pexp_attributes: attrs}, + ) + | _ => (None, expr) + } + + let filterParsingAttrs = attrs => List.filter(attr => + switch attr { + | ({Location.txt: "ns.ternary" | "ns.braces" | "bs" | "ns.namedArgLoc"}, _) => false + | _ => true + } + , attrs) + + let isBlockExpr = expr => + switch expr.pexp_desc { + | Pexp_letmodule(_) + | Pexp_letexception(_) + | Pexp_let(_) + | Pexp_open(_) + | Pexp_sequence(_) => true + | _ => false + } + + let isBracedExpr = expr => + switch processBracesAttr(expr) { + | (Some(_), _) => true + | _ => false + } + + let isHuggableExpression = expr => + switch expr.pexp_desc { + | Pexp_array(_) + | Pexp_tuple(_) + | Pexp_construct({txt: Longident.Lident("::" | "[]")}, _) + | Pexp_extension({txt: "bs.obj"}, _) + | Pexp_record(_) => true + | _ when isBlockExpr(expr) => true + | _ when isBracedExpr(expr) => true + | _ => false + } + + let isHuggableRhs = expr => + switch expr.pexp_desc { + | Pexp_array(_) + | Pexp_tuple(_) + | Pexp_construct({txt: Longident.Lident("::" | "[]")}, _) + | Pexp_extension({txt: "bs.obj"}, _) + | Pexp_record(_) => true + | _ when isBracedExpr(expr) => true + | _ => false + } + + let isHuggablePattern = pattern => + switch pattern.ppat_desc { + | Ppat_array(_) | Ppat_tuple(_) | Ppat_record(_) | Ppat_construct(_) => true + | _ => false + } + + let operatorPrecedence = operator => + switch operator { + | ":=" => 1 + | "||" => 2 + | "&&" => 3 + | "=" | "==" | "<" | ">" | "!=" | "<>" | "!==" | "<=" | ">=" | "|>" => 4 + | "+" | "+." | "-" | "-." | "^" => 5 + | "*" | "*." | "/" | "/." => 6 + | "**" => 7 + | "#" | "##" | "|." => 8 + | _ => 0 + } + + let isUnaryOperator = operator => + switch operator { + | "~+" | "~+." | "~-" | "~-." | "not" => true + | _ => false + } + + let isUnaryExpression = expr => + switch expr.pexp_desc { + | Pexp_apply({pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, list{(Nolabel, _arg)}) + when isUnaryOperator(operator) => true + | _ => false + } + + let isBinaryOperator = operator => + switch operator { + | ":=" + | "||" + | "&&" + | "=" + | "==" + | "<" + | ">" + | "!=" + | "!==" + | "<=" + | ">=" + | "|>" + | "+" + | "+." + | "-" + | "-." + | "^" + | "*" + | "*." + | "/" + | "/." + | "**" + | "|." + | "<>" => true + | _ => false + } + + let isBinaryExpression = expr => + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list{(Nolabel, _operand1), (Nolabel, _operand2)}, + ) when isBinaryOperator(operator) => true + | _ => false + } + + let isEqualityOperator = operator => + switch operator { + | "=" | "==" | "<>" | "!=" => true + | _ => false + } + + let flattenableOperators = (parentOperator, childOperator) => { + let precParent = operatorPrecedence(parentOperator) + let precChild = operatorPrecedence(childOperator) + if precParent === precChild { + !(isEqualityOperator(parentOperator) && isEqualityOperator(childOperator)) + } else { + false + } + } + + let hasAttributes = attrs => List.exists(attr => + switch attr { + | ({Location.txt: "bs" | "ns.ternary" | "ns.braces"}, _) => false + | _ => true + } + , attrs) + + let isArrayAccess = expr => + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident("Array"), "get")})}, + list{(Nolabel, _parentExpr), (Nolabel, _memberExpr)}, + ) => true + | _ => false + } + + let rec hasTernaryAttribute = attrs => + switch attrs { + | list{} => false + | list{({Location.txt: "ns.ternary"}, _), ..._} => true + | list{_, ...attrs} => hasTernaryAttribute(attrs) + } + + let isTernaryExpr = expr => + switch expr { + | {pexp_attributes: attrs, pexp_desc: Pexp_ifthenelse(_)} + when hasTernaryAttribute(attrs) => true + | _ => false + } + + let collectTernaryParts = expr => { + let rec collect = (acc, expr) => + switch expr { + | {pexp_attributes: attrs, pexp_desc: Pexp_ifthenelse(condition, consequent, Some(alternate))} + when hasTernaryAttribute(attrs) => + collect(list{(condition, consequent), ...acc}, alternate) + | alternate => (List.rev(acc), alternate) + } + + collect(list{}, expr) + } + + let parametersShouldHug = parameters => + switch parameters { + | list{Parameter({attrs: list{}, lbl: Asttypes.Nolabel, defaultExpr: None, pat})} + when isHuggablePattern(pat) => true + | _ => false + } + + let filterTernaryAttributes = attrs => List.filter(attr => + switch attr { + | ({Location.txt: "ns.ternary"}, _) => false + | _ => true + } + , attrs) + + let isJsxExpression = expr => { + let rec loop = attrs => + switch attrs { + | list{} => false + | list{({Location.txt: "JSX"}, _), ..._} => true + | list{_, ...attrs} => loop(attrs) + } + + switch expr.pexp_desc { + | Pexp_apply(_) => loop(expr.Parsetree.pexp_attributes) + | _ => false + } + } + + let hasJsxAttribute = attributes => + switch attributes { + | list{({Location.txt: "JSX"}, _), ..._} => true + | _ => false + } + + let shouldIndentBinaryExpr = expr => { + let samePrecedenceSubExpression = (operator, subExpression) => + switch subExpression { + | { + pexp_desc: + Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(subOperator)})}, + list{(Nolabel, _lhs), (Nolabel, _rhs)}, + ), + } when isBinaryOperator(subOperator) => + flattenableOperators(operator, subOperator) + | _ => true + } + + switch expr { + | { + pexp_desc: + Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list{(Nolabel, lhs), (Nolabel, _rhs)}, + ), + } when isBinaryOperator(operator) => + isEqualityOperator(operator) || + (!samePrecedenceSubExpression(operator, lhs) || + operator == ":=") + | _ => false + } + } + + let shouldInlineRhsBinaryExpr = rhs => + switch rhs.pexp_desc { + | Parsetree.Pexp_constant(_) + | Pexp_let(_) + | Pexp_letmodule(_) + | Pexp_letexception(_) + | Pexp_sequence(_) + | Pexp_open(_) + | Pexp_ifthenelse(_) + | Pexp_for(_) + | Pexp_while(_) + | Pexp_try(_) + | Pexp_array(_) + | Pexp_record(_) => true + | _ => false + } + + let filterPrinteableAttributes = attrs => List.filter(attr => + switch attr { + | ({Location.txt: "bs" | "ns.ternary"}, _) => false + | _ => true + } + , attrs) + + let partitionPrinteableAttributes = attrs => List.partition(attr => + switch attr { + | ({Location.txt: "bs" | "ns.ternary"}, _) => false + | _ => true + } + , attrs) + + let requiresSpecialCallbackPrintingLastArg = args => { + let rec loop = args => + switch args { + | list{} => false + | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)})} => true + | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)}), ..._} => false + | list{_, ...rest} => loop(rest) + } + + loop(args) + } + + let requiresSpecialCallbackPrintingFirstArg = args => { + let rec loop = args => + switch args { + | list{} => true + | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)}), ..._} => false + | list{_, ...rest} => loop(rest) + } + + switch args { + | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)})} => false + | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)}), ...rest} => loop(rest) + | _ => false + } + } + + let modExprApply = modExpr => { + let rec loop = (acc, modExpr) => + switch modExpr { + | {pmod_desc: Pmod_apply(next, arg)} => loop(list{arg, ...acc}, next) + | _ => (acc, modExpr) + } + + loop(list{}, modExpr) + } + + let modExprFunctor = modExpr => { + let rec loop = (acc, modExpr) => + switch modExpr { + | {pmod_desc: Pmod_functor(lbl, modType, returnModExpr), pmod_attributes: attrs} => + let param = (attrs, lbl, modType) + loop(list{param, ...acc}, returnModExpr) + | returnModExpr => (List.rev(acc), returnModExpr) + } + + loop(list{}, modExpr) + } + + let splitGenTypeAttr = attrs => + switch attrs { + | list{({Location.txt: "genType"}, _), ...attrs} => (true, attrs) + | attrs => (false, attrs) + } + + let rec collectPatternsFromListConstruct = (acc, pattern) => { + open Parsetree + switch pattern.ppat_desc { + | Ppat_construct( + {txt: Longident.Lident("::")}, + Some({ppat_desc: Ppat_tuple(list{pat, rest})}), + ) => + collectPatternsFromListConstruct(list{pat, ...acc}, rest) + | _ => (List.rev(acc), pattern) + } + } + + let rec isTemplateLiteral = expr => { + let isPexpConstantString = expr => + switch expr.pexp_desc { + | Pexp_constant(Pconst_string(_, Some(_))) => true + | _ => false + } + + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("^")})}, + list{(Nolabel, arg1), (Nolabel, arg2)}, + ) when !(isPexpConstantString(arg1) && isPexpConstantString(arg2)) => + isTemplateLiteral(arg1) || isTemplateLiteral(arg2) + | Pexp_constant(Pconst_string(_, Some(_))) => true + | _ => false + } + } + + /* Blue | Red | Green -> [Blue; Red; Green] */ + let collectOrPatternChain = pat => { + let rec loop = (pattern, chain) => + switch pattern.ppat_desc { + | Ppat_or(left, right) => loop(left, list{right, ...chain}) + | _ => list{pattern, ...chain} + } + + loop(pat, list{}) + } + + let isPipeExpr = expr => + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("|." | "|>")})}, + list{(Nolabel, _operand1), (Nolabel, _operand2)}, + ) => true + | _ => false + } + + let extractValueDescriptionFromModExpr = modExpr => { + let rec loop = (structure, acc) => + switch structure { + | list{} => List.rev(acc) + | list{structureItem, ...structure} => + switch structureItem.Parsetree.pstr_desc { + | Pstr_primitive(vd) => loop(structure, list{vd, ...acc}) + | _ => loop(structure, acc) + } + } + + switch modExpr.pmod_desc { + | Pmod_structure(structure) => loop(structure, list{}) + | _ => list{} + } + } + + type jsImportScope = + | JsGlobalImport /* nothing */ + | JsModuleImport(string) /* from "path" */ + | JsScopedImport(list) /* window.location */ + + let classifyJsImport = valueDescription => { + let rec loop = attrs => { + open Parsetree + switch attrs { + | list{} => JsGlobalImport + | list{ + ( + {Location.txt: "bs.scope"}, + PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_constant(Pconst_string(s, _))}, _)}}), + ), + ..._, + } => + JsScopedImport(list{s}) + | list{ + ( + {Location.txt: "genType.import"}, + PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_constant(Pconst_string(s, _))}, _)}}), + ), + ..._, + } => + JsModuleImport(s) + | list{ + ( + {Location.txt: "bs.scope"}, + PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_tuple(exprs)}, _)}}), + ), + ..._, + } => + let scopes = List.fold_left((acc, curr) => + switch curr.Parsetree.pexp_desc { + | Pexp_constant(Pconst_string(s, _)) => list{s, ...acc} + | _ => acc + } + , list{}, exprs) + + JsScopedImport(List.rev(scopes)) + | list{_, ...attrs} => loop(attrs) + } + } + + loop(valueDescription.pval_attributes) + } + + let isUnderscoreApplySugar = expr => + switch expr.pexp_desc { + | Pexp_fun( + Nolabel, + None, + {ppat_desc: Ppat_var({txt: "__x"})}, + {pexp_desc: Pexp_apply(_)}, + ) => true + | _ => false + } +} + +module Parens: { + type kind = Parenthesized | Braced(Location.t) | Nothing + + let expr: Parsetree.expression => kind + let structureExpr: Parsetree.expression => kind + + let unaryExprOperand: Parsetree.expression => kind + + let binaryExprOperand: (~isLhs: bool, Parsetree.expression) => kind + let subBinaryExprOperand: (string, string) => bool + let rhsBinaryExprOperand: (string, Parsetree.expression) => bool + let flattenOperandRhs: (string, Parsetree.expression) => bool + + let lazyOrAssertExprRhs: Parsetree.expression => kind + + let fieldExpr: Parsetree.expression => kind + + let setFieldExprRhs: Parsetree.expression => kind + + let ternaryOperand: Parsetree.expression => kind + + let jsxPropExpr: Parsetree.expression => kind + let jsxChildExpr: Parsetree.expression => kind + + let binaryExpr: Parsetree.expression => kind + let modTypeFunctorReturn: Parsetree.module_type => bool + let modTypeWithOperand: Parsetree.module_type => bool + let modExprFunctorConstraint: Parsetree.module_type => bool + + let bracedExpr: Parsetree.expression => bool + let callExpr: Parsetree.expression => kind + + let includeModExpr: Parsetree.module_expr => bool +} = { + type kind = Parenthesized | Braced(Location.t) | Nothing + + let expr = expr => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | _ => + switch expr { + | { + Parsetree.pexp_desc: + Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}), + } => + Nothing + | {pexp_desc: Pexp_constraint(_)} => Parenthesized + | _ => Nothing + } + } + } + + let callExpr = expr => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | _ => + switch expr { + | {Parsetree.pexp_attributes: attrs} + when switch ParsetreeViewer.filterParsingAttrs(attrs) { + | list{_, ..._} => true + | list{} => false + } => + Parenthesized + | _ + when ParsetreeViewer.isUnaryExpression(expr) || ParsetreeViewer.isBinaryExpression(expr) => + Parenthesized + | { + Parsetree.pexp_desc: + Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}), + } => + Nothing + | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing + | { + pexp_desc: + Pexp_lazy(_) + | Pexp_assert(_) + | Pexp_fun(_) + | Pexp_newtype(_) + | Pexp_function(_) + | Pexp_constraint(_) + | Pexp_setfield(_) + | Pexp_match(_) + | Pexp_try(_) + | Pexp_while(_) + | Pexp_for(_) + | Pexp_ifthenelse(_), + } => + Parenthesized + | _ => Nothing + } + } + } + + let structureExpr = expr => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | None => + switch expr { + | _ + when ParsetreeViewer.hasAttributes(expr.pexp_attributes) && + !ParsetreeViewer.isJsxExpression(expr) => + Parenthesized + | { + Parsetree.pexp_desc: + Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}), + } => + Nothing + | {pexp_desc: Pexp_constraint(_)} => Parenthesized + | _ => Nothing + } + } + } + + let unaryExprOperand = expr => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | None => + switch expr { + | {Parsetree.pexp_attributes: attrs} + when switch ParsetreeViewer.filterParsingAttrs(attrs) { + | list{_, ..._} => true + | list{} => false + } => + Parenthesized + | expr + when ParsetreeViewer.isUnaryExpression(expr) || ParsetreeViewer.isBinaryExpression(expr) => + Parenthesized + | {pexp_desc: Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)})} => + Nothing + | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing + | { + pexp_desc: + Pexp_lazy(_) + | Pexp_assert(_) + | Pexp_fun(_) + | Pexp_newtype(_) + | Pexp_function(_) + | Pexp_constraint(_) + | Pexp_setfield(_) + | Pexp_extension(_) + | Pexp_match(_) + | Pexp_try(_) + | Pexp_while(_) + | Pexp_for(_) + | Pexp_ifthenelse(_), + } => + Parenthesized + | _ => Nothing + } + } + } + + let binaryExprOperand = (~isLhs, expr) => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | None => + switch expr { + | { + Parsetree.pexp_desc: + Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}), + } => + Nothing + | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing + | {pexp_desc: Pexp_constraint(_) | Pexp_fun(_) | Pexp_function(_) | Pexp_newtype(_)} => + Parenthesized + | expr when ParsetreeViewer.isBinaryExpression(expr) => Parenthesized + | expr when ParsetreeViewer.isTernaryExpr(expr) => Parenthesized + | {pexp_desc: Pexp_lazy(_) | Pexp_assert(_)} when isLhs => Parenthesized + | _ => Nothing + } + } + } + + let subBinaryExprOperand = (parentOperator, childOperator) => { + let precParent = ParsetreeViewer.operatorPrecedence(parentOperator) + let precChild = ParsetreeViewer.operatorPrecedence(childOperator) + precParent > precChild || + ((precParent === precChild && + !ParsetreeViewer.flattenableOperators(parentOperator, childOperator)) || + /* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… */ + parentOperator == "||" && childOperator == "&&") + } + + let rhsBinaryExprOperand = (parentOperator, rhs) => + switch rhs.Parsetree.pexp_desc { + | Parsetree.Pexp_apply( + {pexp_attributes: list{}, pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list{(_, _left), (_, _right)}, + ) when ParsetreeViewer.isBinaryOperator(operator) => + let precParent = ParsetreeViewer.operatorPrecedence(parentOperator) + let precChild = ParsetreeViewer.operatorPrecedence(operator) + precParent === precChild + | _ => false + } + + let flattenOperandRhs = (parentOperator, rhs) => + switch rhs.Parsetree.pexp_desc { + | Parsetree.Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list{(_, _left), (_, _right)}, + ) when ParsetreeViewer.isBinaryOperator(operator) => + let precParent = ParsetreeViewer.operatorPrecedence(parentOperator) + let precChild = ParsetreeViewer.operatorPrecedence(operator) + precParent >= precChild || rhs.pexp_attributes != list{} + | Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}) => false + | Pexp_fun(_) when ParsetreeViewer.isUnderscoreApplySugar(rhs) => false + | Pexp_fun(_) | Pexp_newtype(_) | Pexp_setfield(_) | Pexp_constraint(_) => true + | _ when ParsetreeViewer.isTernaryExpr(rhs) => true + | _ => false + } + + let lazyOrAssertExprRhs = expr => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | None => + switch expr { + | {Parsetree.pexp_attributes: attrs} + when switch ParsetreeViewer.filterParsingAttrs(attrs) { + | list{_, ..._} => true + | list{} => false + } => + Parenthesized + | expr when ParsetreeViewer.isBinaryExpression(expr) => Parenthesized + | {pexp_desc: Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)})} => + Nothing + | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing + | { + pexp_desc: + Pexp_lazy(_) + | Pexp_assert(_) + | Pexp_fun(_) + | Pexp_newtype(_) + | Pexp_function(_) + | Pexp_constraint(_) + | Pexp_setfield(_) + | Pexp_match(_) + | Pexp_try(_) + | Pexp_while(_) + | Pexp_for(_) + | Pexp_ifthenelse(_), + } => + Parenthesized + | _ => Nothing + } + } + } + + let isNegativeConstant = constant => { + let isNeg = txt => { + let len = String.length(txt) + len > 0 && (@doesNotRaise String.get)(txt, 0) == '-' + } + + switch constant { + | Parsetree.Pconst_integer(i, _) | Pconst_float(i, _) when isNeg(i) => true + | _ => false + } + } + + let fieldExpr = expr => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | None => + switch expr { + | {Parsetree.pexp_attributes: attrs} + when switch ParsetreeViewer.filterParsingAttrs(attrs) { + | list{_, ..._} => true + | list{} => false + } => + Parenthesized + | expr + when ParsetreeViewer.isBinaryExpression(expr) || ParsetreeViewer.isUnaryExpression(expr) => + Parenthesized + | {pexp_desc: Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)})} => + Nothing + | {pexp_desc: Pexp_constant(c)} when isNegativeConstant(c) => Parenthesized + | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing + | { + pexp_desc: + Pexp_lazy(_) + | Pexp_assert(_) + | Pexp_extension(_) + | Pexp_fun(_) + | Pexp_newtype(_) + | Pexp_function(_) + | Pexp_constraint(_) + | Pexp_setfield(_) + | Pexp_match(_) + | Pexp_try(_) + | Pexp_while(_) + | Pexp_for(_) + | Pexp_ifthenelse(_), + } => + Parenthesized + | _ => Nothing + } + } + } + + let setFieldExprRhs = expr => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | None => + switch expr { + | { + Parsetree.pexp_desc: + Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}), + } => + Nothing + | {pexp_desc: Pexp_constraint(_)} => Parenthesized + | _ => Nothing + } + } + } + + let ternaryOperand = expr => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | None => + switch expr { + | { + Parsetree.pexp_desc: + Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}), + } => + Nothing + | {pexp_desc: Pexp_constraint(_)} => Parenthesized + | {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)} => + let (_attrsOnArrow, _parameters, returnExpr) = ParsetreeViewer.funExpr(expr) + switch returnExpr.pexp_desc { + | Pexp_constraint(_) => Parenthesized + | _ => Nothing + } + | _ => Nothing + } + } + } + + let startsWithMinus = txt => { + let len = String.length(txt) + if len === 0 { + false + } else { + let s = (@doesNotRaise String.get)(txt, 0) + s == '-' + } + } + + let jsxPropExpr = expr => + switch expr.Parsetree.pexp_desc { + | Parsetree.Pexp_let(_) + | Pexp_sequence(_) + | Pexp_letexception(_) + | Pexp_letmodule(_) + | Pexp_open(_) => + Nothing + | _ => + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | None => + switch expr { + | { + Parsetree.pexp_desc: Pexp_constant(Pconst_integer(x, _) | Pconst_float(x, _)), + pexp_attributes: list{}, + } when startsWithMinus(x) => + Parenthesized + | { + Parsetree.pexp_desc: + Pexp_ident(_) + | Pexp_constant(_) + | Pexp_field(_) + | Pexp_construct(_) + | Pexp_variant(_) + | Pexp_array(_) + | Pexp_pack(_) + | Pexp_record(_) + | Pexp_extension(_) + | Pexp_letmodule(_) + | Pexp_letexception(_) + | Pexp_open(_) + | Pexp_sequence(_) + | Pexp_let(_) + | Pexp_tuple(_), + pexp_attributes: list{}, + } => + Nothing + | { + Parsetree.pexp_desc: + Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}), + pexp_attributes: list{}, + } => + Nothing + | _ => Parenthesized + } + } + } + + let jsxChildExpr = expr => + switch expr.Parsetree.pexp_desc { + | Parsetree.Pexp_let(_) + | Pexp_sequence(_) + | Pexp_letexception(_) + | Pexp_letmodule(_) + | Pexp_open(_) => + Nothing + | _ => + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | _ => + switch expr { + | { + Parsetree.pexp_desc: Pexp_constant(Pconst_integer(x, _) | Pconst_float(x, _)), + pexp_attributes: list{}, + } when startsWithMinus(x) => + Parenthesized + | { + Parsetree.pexp_desc: + Pexp_ident(_) + | Pexp_constant(_) + | Pexp_field(_) + | Pexp_construct(_) + | Pexp_variant(_) + | Pexp_array(_) + | Pexp_pack(_) + | Pexp_record(_) + | Pexp_extension(_) + | Pexp_letmodule(_) + | Pexp_letexception(_) + | Pexp_open(_) + | Pexp_sequence(_) + | Pexp_let(_), + pexp_attributes: list{}, + } => + Nothing + | { + Parsetree.pexp_desc: + Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}), + pexp_attributes: list{}, + } => + Nothing + | expr when ParsetreeViewer.isJsxExpression(expr) => Nothing + | _ => Parenthesized + } + } + } + + let binaryExpr = expr => { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr) + switch optBraces { + | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc) + | None => + switch expr { + | {Parsetree.pexp_attributes: list{_, ..._}} as expr + when ParsetreeViewer.isBinaryExpression(expr) => + Parenthesized + | _ => Nothing + } + } + } + + let modTypeFunctorReturn = modType => + switch modType { + | {Parsetree.pmty_desc: Pmty_with(_)} => true + | _ => false + } + + /* Add parens for readability: + module type Functor = SetLike => Set with type t = A.t + This is actually: + module type Functor = (SetLike => Set) with type t = A.t + */ + let modTypeWithOperand = modType => + switch modType { + | {Parsetree.pmty_desc: Pmty_functor(_)} => true + | _ => false + } + + let modExprFunctorConstraint = modType => + switch modType { + | {Parsetree.pmty_desc: Pmty_functor(_) | Pmty_with(_)} => true + | _ => false + } + + let bracedExpr = expr => + switch expr.Parsetree.pexp_desc { + | Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}) => false + | Pexp_constraint(_) => true + | _ => false + } + + let includeModExpr = modExpr => + switch modExpr.Parsetree.pmod_desc { + | Parsetree.Pmod_constraint(_) => true + | _ => false + } +} + +module CommentTable = { + type t = { + leading: Hashtbl.t>, + inside: Hashtbl.t>, + trailing: Hashtbl.t>, + } + + let make = () => { + leading: Hashtbl.create(100), + inside: Hashtbl.create(100), + trailing: Hashtbl.create(100), + } + + let empty = make() + + @live + let log = t => { + open Location + let leadingStuff = Hashtbl.fold((k: Location.t, v: list, acc) => { + let loc = Doc.concat(list{ + Doc.lbracket, + Doc.text(string_of_int(k.loc_start.pos_lnum)), + Doc.text(":"), + Doc.text(string_of_int(k.loc_start.pos_cnum - k.loc_start.pos_bol)), + Doc.text("-"), + Doc.text(string_of_int(k.loc_end.pos_lnum)), + Doc.text(":"), + Doc.text(string_of_int(k.loc_end.pos_cnum - k.loc_end.pos_bol)), + Doc.rbracket, + }) + let doc = Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + loc, + Doc.indent( + Doc.concat(list{ + Doc.line, + Doc.join(~sep=Doc.comma, List.map(c => Doc.text(Comment.txt(c)), v)), + }), + ), + Doc.line, + }), + ) + list{doc, ...acc} + }, t.leading, list{}) + + let trailingStuff = Hashtbl.fold((k: Location.t, v: list, acc) => { + let loc = Doc.concat(list{ + Doc.lbracket, + Doc.text(string_of_int(k.loc_start.pos_lnum)), + Doc.text(":"), + Doc.text(string_of_int(k.loc_start.pos_cnum - k.loc_start.pos_bol)), + Doc.text("-"), + Doc.text(string_of_int(k.loc_end.pos_lnum)), + Doc.text(":"), + Doc.text(string_of_int(k.loc_end.pos_cnum - k.loc_end.pos_bol)), + Doc.rbracket, + }) + let doc = Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + loc, + Doc.indent( + Doc.concat(list{ + Doc.line, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(c => Doc.text(Comment.txt(c)), v), + ), + }), + ), + Doc.line, + }), + ) + list{doc, ...acc} + }, t.trailing, list{}) + + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.text("leading comments:"), + Doc.line, + Doc.indent(Doc.concat(leadingStuff)), + Doc.line, + Doc.line, + Doc.text("trailing comments:"), + Doc.indent(Doc.concat(trailingStuff)), + Doc.line, + Doc.line, + }), + ) + |> Doc.toString(~width=80) + |> print_endline + } + let attach = (tbl, loc, comments) => + switch comments { + | list{} => () + | comments => Hashtbl.replace(tbl, loc, comments) + } + + let partitionByLoc = (comments, loc) => { + let rec loop = ((leading, inside, trailing), comments) => { + open Location + switch comments { + | list{comment, ...rest} => + let cmtLoc = Comment.loc(comment) + if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum { + loop((list{comment, ...leading}, inside, trailing), rest) + } else if cmtLoc.loc_start.pos_cnum >= loc.loc_end.pos_cnum { + loop((leading, inside, list{comment, ...trailing}), rest) + } else { + loop((leading, list{comment, ...inside}, trailing), rest) + } + | list{} => (List.rev(leading), List.rev(inside), List.rev(trailing)) + } + } + + loop((list{}, list{}, list{}), comments) + } + + let partitionLeadingTrailing = (comments, loc) => { + let rec loop = ((leading, trailing), comments) => { + open Location + switch comments { + | list{comment, ...rest} => + let cmtLoc = Comment.loc(comment) + if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum { + loop((list{comment, ...leading}, trailing), rest) + } else { + loop((leading, list{comment, ...trailing}), rest) + } + | list{} => (List.rev(leading), List.rev(trailing)) + } + } + + loop((list{}, list{}), comments) + } + + let partitionByOnSameLine = (loc, comments) => { + let rec loop = ((onSameLine, onOtherLine), comments) => { + open Location + switch comments { + | list{} => (List.rev(onSameLine), List.rev(onOtherLine)) + | list{comment, ...rest} => + let cmtLoc = Comment.loc(comment) + if cmtLoc.loc_start.pos_lnum === loc.loc_end.pos_lnum { + loop((list{comment, ...onSameLine}, onOtherLine), rest) + } else { + loop((onSameLine, list{comment, ...onOtherLine}), rest) + } + } + } + + loop((list{}, list{}), comments) + } + + let partitionAdjacentTrailing = (loc1, comments) => { + open Location + open Lexing + let rec loop = (~prevEndPos, afterLoc1, comments) => + switch comments { + | list{} => (List.rev(afterLoc1), list{}) + | list{comment, ...rest} as comments => + let cmtPrevEndPos = Comment.prevTokEndPos(comment) + if prevEndPos.Lexing.pos_cnum === cmtPrevEndPos.pos_cnum { + let commentEnd = Comment.loc(comment).loc_end + loop(~prevEndPos=commentEnd, list{comment, ...afterLoc1}, rest) + } else { + (List.rev(afterLoc1), comments) + } + } + + loop(~prevEndPos=loc1.loc_end, list{}, comments) + } + + let rec collectListPatterns = (acc, pattern) => { + open Parsetree + switch pattern.ppat_desc { + | Ppat_construct( + {txt: Longident.Lident("::")}, + Some({ppat_desc: Ppat_tuple(list{pat, rest})}), + ) => + collectListPatterns(list{pat, ...acc}, rest) + | Ppat_construct({txt: Longident.Lident("[]")}, None) => List.rev(acc) + | _ => List.rev(list{pattern, ...acc}) + } + } + + let rec collectListExprs = (acc, expr) => { + open Parsetree + switch expr.pexp_desc { + | Pexp_construct( + {txt: Longident.Lident("::")}, + Some({pexp_desc: Pexp_tuple(list{expr, rest})}), + ) => + collectListExprs(list{expr, ...acc}, rest) + | Pexp_construct({txt: Longident.Lident("[]")}, _) => List.rev(acc) + | _ => List.rev(list{expr, ...acc}) + } + } + + /* TODO: use ParsetreeViewer */ + let arrowType = ct => { + open Parsetree + let rec process = (attrsBefore, acc, typ) => + switch typ { + | {ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2), ptyp_attributes: list{}} => + let arg = (list{}, lbl, typ1) + process(attrsBefore, list{arg, ...acc}, typ2) + | { + ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2), + ptyp_attributes: list{({txt: "bs"}, _)} as attrs, + } => + let arg = (attrs, lbl, typ1) + process(attrsBefore, list{arg, ...acc}, typ2) + | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: _attrs} as returnType => + let args = List.rev(acc) + (attrsBefore, args, returnType) + | { + ptyp_desc: Ptyp_arrow((Labelled(_) | Optional(_)) as lbl, typ1, typ2), + ptyp_attributes: attrs, + } => + let arg = (attrs, lbl, typ1) + process(attrsBefore, list{arg, ...acc}, typ2) + | typ => (attrsBefore, List.rev(acc), typ) + } + + switch ct { + | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: attrs} as typ => + process(attrs, list{}, {...typ, ptyp_attributes: list{}}) + | typ => process(list{}, list{}, typ) + } + } + + /* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? */ + let modExprApply = modExpr => { + let rec loop = (acc, modExpr) => + switch modExpr { + | {Parsetree.pmod_desc: Pmod_apply(next, arg)} => loop(list{arg, ...acc}, next) + | _ => list{modExpr, ...acc} + } + + loop(list{}, modExpr) + } + + /* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? */ + let modExprFunctor = modExpr => { + let rec loop = (acc, modExpr) => + switch modExpr { + | {Parsetree.pmod_desc: Pmod_functor(lbl, modType, returnModExpr), pmod_attributes: attrs} => + let param = (attrs, lbl, modType) + loop(list{param, ...acc}, returnModExpr) + | returnModExpr => (List.rev(acc), returnModExpr) + } + + loop(list{}, modExpr) + } + + let functorType = modtype => { + let rec process = (acc, modtype) => + switch modtype { + | {Parsetree.pmty_desc: Pmty_functor(lbl, argType, returnType), pmty_attributes: attrs} => + let arg = (attrs, lbl, argType) + process(list{arg, ...acc}, returnType) + | modType => (List.rev(acc), modType) + } + + process(list{}, modtype) + } + + let funExpr = expr => { + open Parsetree + /* Turns (type t, type u, type z) into "type t u z" */ + let rec collectNewTypes = (acc, returnExpr) => + switch returnExpr { + | {pexp_desc: Pexp_newtype(stringLoc, returnExpr), pexp_attributes: list{}} => + collectNewTypes(list{stringLoc, ...acc}, returnExpr) + | returnExpr => + let loc = switch (acc, List.rev(acc)) { + | (list{_startLoc, ..._}, list{endLoc, ..._}) => { + ...endLoc.loc, + loc_end: endLoc.loc.loc_end, + } + | _ => Location.none + } + + let txt = List.fold_right((curr, acc) => acc ++ (" " ++ curr.Location.txt), acc, "type") + (Location.mkloc(txt, loc), returnExpr) + } + + /* For simplicity reason Pexp_newtype gets converted to a Nolabel parameter, + * otherwise this function would need to return a variant: + * | NormalParamater(...) + * | NewType(...) + * This complicates printing with an extra variant/boxing/allocation for a code-path + * that is not often used. Lets just keep it simple for now */ + let rec collect = (attrsBefore, acc, expr) => + switch expr { + | {pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr), pexp_attributes: list{}} => + let parameter = (list{}, lbl, defaultExpr, pattern) + collect(attrsBefore, list{parameter, ...acc}, returnExpr) + | {pexp_desc: Pexp_newtype(stringLoc, rest), pexp_attributes: attrs} => + let (var, returnExpr) = collectNewTypes(list{stringLoc}, rest) + let parameter = (attrs, Asttypes.Nolabel, None, Ast_helper.Pat.var(~loc=stringLoc.loc, var)) + collect(attrsBefore, list{parameter, ...acc}, returnExpr) + | { + pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr), + pexp_attributes: list{({txt: "bs"}, _)} as attrs, + } => + let parameter = (attrs, lbl, defaultExpr, pattern) + collect(attrsBefore, list{parameter, ...acc}, returnExpr) + | { + pexp_desc: Pexp_fun((Labelled(_) | Optional(_)) as lbl, defaultExpr, pattern, returnExpr), + pexp_attributes: attrs, + } => + let parameter = (attrs, lbl, defaultExpr, pattern) + collect(attrsBefore, list{parameter, ...acc}, returnExpr) + | expr => (attrsBefore, List.rev(acc), expr) + } + + switch expr { + | { + pexp_desc: Pexp_fun(Nolabel, _defaultExpr, _pattern, _returnExpr), + pexp_attributes: attrs, + } as expr => + collect(attrs, list{}, {...expr, pexp_attributes: list{}}) + | expr => collect(list{}, list{}, expr) + } + } + + let rec isBlockExpr = expr => { + open Parsetree + switch expr.pexp_desc { + | Pexp_letmodule(_) + | Pexp_letexception(_) + | Pexp_let(_) + | Pexp_open(_) + | Pexp_sequence(_) => true + | Pexp_apply(callExpr, _) when isBlockExpr(callExpr) => true + | Pexp_constraint(expr, _) when isBlockExpr(expr) => true + | Pexp_field(expr, _) when isBlockExpr(expr) => true + | Pexp_setfield(expr, _, _) when isBlockExpr(expr) => true + | _ => false + } + } + + let rec walkStructure = (s, t, comments) => + switch s { + | _ when comments == list{} => () + | list{} => attach(t.inside, Location.none, comments) + | s => walkList(~getLoc=n => n.Parsetree.pstr_loc, ~walkNode=walkStructureItem, s, t, comments) + } + + and walkStructureItem = (si, t, comments) => + switch si.Parsetree.pstr_desc { + | _ when comments == list{} => () + | Pstr_primitive(valueDescription) => walkValueDescription(valueDescription, t, comments) + | Pstr_open(openDescription) => walkOpenDescription(openDescription, t, comments) + | Pstr_value(_, valueBindings) => walkValueBindings(valueBindings, t, comments) + | Pstr_type(_, typeDeclarations) => walkTypeDeclarations(typeDeclarations, t, comments) + | Pstr_eval(expr, _) => walkExpr(expr, t, comments) + | Pstr_module(moduleBinding) => walkModuleBinding(moduleBinding, t, comments) + | Pstr_recmodule(moduleBindings) => + walkList( + ~getLoc=mb => mb.Parsetree.pmb_loc, + ~walkNode=walkModuleBinding, + moduleBindings, + t, + comments, + ) + | Pstr_modtype(modTypDecl) => walkModuleTypeDeclaration(modTypDecl, t, comments) + | Pstr_attribute(attribute) => walkAttribute(attribute, t, comments) + | Pstr_extension(extension, _) => walkExtension(extension, t, comments) + | Pstr_include(includeDeclaration) => walkIncludeDeclaration(includeDeclaration, t, comments) + | Pstr_exception(extensionConstructor) => walkExtConstr(extensionConstructor, t, comments) + | Pstr_typext(typeExtension) => walkTypeExtension(typeExtension, t, comments) + | Pstr_class_type(_) | Pstr_class(_) => () + } + + and walkValueDescription = (vd, t, comments) => { + let (leading, trailing) = partitionLeadingTrailing(comments, vd.pval_name.loc) + attach(t.leading, vd.pval_name.loc, leading) + let (afterName, rest) = partitionAdjacentTrailing(vd.pval_name.loc, trailing) + attach(t.trailing, vd.pval_name.loc, afterName) + let (before, inside, after) = partitionByLoc(rest, vd.pval_type.ptyp_loc) + + attach(t.leading, vd.pval_type.ptyp_loc, before) + walkTypExpr(vd.pval_type, t, inside) + attach(t.trailing, vd.pval_type.ptyp_loc, after) + } + + and walkTypeExtension = (te, t, comments) => { + let (leading, trailing) = partitionLeadingTrailing(comments, te.ptyext_path.loc) + attach(t.leading, te.ptyext_path.loc, leading) + let (afterPath, rest) = partitionAdjacentTrailing(te.ptyext_path.loc, trailing) + attach(t.trailing, te.ptyext_path.loc, afterPath) + + /* type params */ + let rest = switch te.ptyext_params { + | list{} => rest + | typeParams => + visitListButContinueWithRemainingComments( + ~getLoc=((typexpr, _variance)) => typexpr.Parsetree.ptyp_loc, + ~walkNode=walkTypeParam, + ~newlineDelimited=false, + typeParams, + t, + rest, + ) + } + + walkList( + ~getLoc=n => n.Parsetree.pext_loc, + ~walkNode=walkExtConstr, + te.ptyext_constructors, + t, + rest, + ) + } + + and walkIncludeDeclaration = (inclDecl, t, comments) => { + let (before, inside, after) = partitionByLoc(comments, inclDecl.pincl_mod.pmod_loc) + attach(t.leading, inclDecl.pincl_mod.pmod_loc, before) + walkModExpr(inclDecl.pincl_mod, t, inside) + attach(t.trailing, inclDecl.pincl_mod.pmod_loc, after) + } + + and walkModuleTypeDeclaration = (mtd, t, comments) => { + let (leading, trailing) = partitionLeadingTrailing(comments, mtd.pmtd_name.loc) + attach(t.leading, mtd.pmtd_name.loc, leading) + switch mtd.pmtd_type { + | None => attach(t.trailing, mtd.pmtd_name.loc, trailing) + | Some(modType) => + let (afterName, rest) = partitionAdjacentTrailing(mtd.pmtd_name.loc, trailing) + attach(t.trailing, mtd.pmtd_name.loc, afterName) + let (before, inside, after) = partitionByLoc(rest, modType.pmty_loc) + attach(t.leading, modType.pmty_loc, before) + walkModType(modType, t, inside) + attach(t.trailing, modType.pmty_loc, after) + } + } + + and walkModuleBinding = (mb, t, comments) => { + let (leading, trailing) = partitionLeadingTrailing(comments, mb.pmb_name.loc) + attach(t.leading, mb.pmb_name.loc, leading) + let (afterName, rest) = partitionAdjacentTrailing(mb.pmb_name.loc, trailing) + attach(t.trailing, mb.pmb_name.loc, afterName) + let (leading, inside, trailing) = partitionByLoc(rest, mb.pmb_expr.pmod_loc) + switch mb.pmb_expr.pmod_desc { + | Pmod_constraint(_) => walkModExpr(mb.pmb_expr, t, List.concat(list{leading, inside})) + | _ => + attach(t.leading, mb.pmb_expr.pmod_loc, leading) + walkModExpr(mb.pmb_expr, t, inside) + } + attach(t.trailing, mb.pmb_expr.pmod_loc, trailing) + } + + and walkSignature = (signature, t, comments) => + switch signature { + | _ when comments == list{} => () + | list{} => attach(t.inside, Location.none, comments) + | _s => + walkList( + ~getLoc=n => n.Parsetree.psig_loc, + ~walkNode=walkSignatureItem, + signature, + t, + comments, + ) + } + + and walkSignatureItem = (si, t, comments) => + switch si.psig_desc { + | _ when comments == list{} => () + | Psig_value(valueDescription) => walkValueDescription(valueDescription, t, comments) + | Psig_type(_, typeDeclarations) => walkTypeDeclarations(typeDeclarations, t, comments) + | Psig_typext(typeExtension) => walkTypeExtension(typeExtension, t, comments) + | Psig_exception(extensionConstructor) => walkExtConstr(extensionConstructor, t, comments) + | Psig_module(moduleDeclaration) => walkModuleDeclaration(moduleDeclaration, t, comments) + | Psig_recmodule(moduleDeclarations) => + walkList( + ~getLoc=n => n.Parsetree.pmd_loc, + ~walkNode=walkModuleDeclaration, + moduleDeclarations, + t, + comments, + ) + | Psig_modtype(moduleTypeDeclaration) => + walkModuleTypeDeclaration(moduleTypeDeclaration, t, comments) + | Psig_open(openDescription) => walkOpenDescription(openDescription, t, comments) + | Psig_include(includeDescription) => walkIncludeDescription(includeDescription, t, comments) + | Psig_attribute(attribute) => walkAttribute(attribute, t, comments) + | Psig_extension(extension, _) => walkExtension(extension, t, comments) + | Psig_class(_) | Psig_class_type(_) => () + } + + and walkIncludeDescription = (id, t, comments) => { + let (before, inside, after) = partitionByLoc(comments, id.pincl_mod.pmty_loc) + attach(t.leading, id.pincl_mod.pmty_loc, before) + walkModType(id.pincl_mod, t, inside) + attach(t.trailing, id.pincl_mod.pmty_loc, after) + } + + and walkModuleDeclaration = (md, t, comments) => { + let (leading, trailing) = partitionLeadingTrailing(comments, md.pmd_name.loc) + attach(t.leading, md.pmd_name.loc, leading) + let (afterName, rest) = partitionAdjacentTrailing(md.pmd_name.loc, trailing) + attach(t.trailing, md.pmd_name.loc, afterName) + let (leading, inside, trailing) = partitionByLoc(rest, md.pmd_type.pmty_loc) + attach(t.leading, md.pmd_type.pmty_loc, leading) + walkModType(md.pmd_type, t, inside) + attach(t.trailing, md.pmd_type.pmty_loc, trailing) + } + + and walkList: 'node. ( + ~prevLoc: Location.t=?, + ~getLoc: 'node => Location.t, + ~walkNode: ('node, t, list) => unit, + list<'node>, + t, + list, + ) => unit = (~prevLoc=?, ~getLoc, ~walkNode, l, t, comments) => { + open Location + switch l { + | _ when comments == list{} => () + | list{} => + switch prevLoc { + | Some(loc) => attach(t.trailing, loc, comments) + | None => () + } + | list{node, ...rest} => + let currLoc = getLoc(node) + let (leading, inside, trailing) = partitionByLoc(comments, currLoc) + switch prevLoc { + | None => + /* first node, all leading comments attach here */ + attach(t.leading, currLoc, leading) + | Some(prevLoc) => + /* Same line */ + if prevLoc.loc_end.pos_lnum === currLoc.loc_start.pos_lnum { + let (afterPrev, beforeCurr) = partitionAdjacentTrailing(prevLoc, leading) + let () = attach(t.trailing, prevLoc, afterPrev) + attach(t.leading, currLoc, beforeCurr) + } else { + let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine(prevLoc, leading) + let () = attach(t.trailing, prevLoc, onSameLineAsPrev) + let (leading, _inside, _trailing) = partitionByLoc(afterPrev, currLoc) + attach(t.leading, currLoc, leading) + } + } + walkNode(node, t, inside) + walkList(~prevLoc=currLoc, ~getLoc, ~walkNode, rest, t, trailing) + } + } + + /* The parsetree doesn't always contain location info about the opening or + * closing token of a "list-of-things". This routine visits the whole list, + * but returns any remaining comments that likely fall after the whole list. */ + and visitListButContinueWithRemainingComments: 'node. ( + ~prevLoc: Location.t=?, + ~newlineDelimited: bool, + ~getLoc: 'node => Location.t, + ~walkNode: ('node, t, list) => unit, + list<'node>, + t, + list, + ) => list = (~prevLoc=?, ~newlineDelimited, ~getLoc, ~walkNode, l, t, comments) => { + open Location + switch l { + | _ when comments == list{} => list{} + | list{} => + switch prevLoc { + | Some(loc) => + let (afterPrev, rest) = if newlineDelimited { + partitionByOnSameLine(loc, comments) + } else { + partitionAdjacentTrailing(loc, comments) + } + + attach(t.trailing, loc, afterPrev) + rest + | None => comments + } + | list{node, ...rest} => + let currLoc = getLoc(node) + let (leading, inside, trailing) = partitionByLoc(comments, currLoc) + let () = switch prevLoc { + | None => + /* first node, all leading comments attach here */ + attach(t.leading, currLoc, leading) + () + | Some(prevLoc) => + /* Same line */ + if prevLoc.loc_end.pos_lnum === currLoc.loc_start.pos_lnum { + let (afterPrev, beforeCurr) = partitionAdjacentTrailing(prevLoc, leading) + let () = attach(t.trailing, prevLoc, afterPrev) + let () = attach(t.leading, currLoc, beforeCurr) + } else { + let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine(prevLoc, leading) + let () = attach(t.trailing, prevLoc, onSameLineAsPrev) + let (leading, _inside, _trailing) = partitionByLoc(afterPrev, currLoc) + let () = attach(t.leading, currLoc, leading) + } + } + + walkNode(node, t, inside) + visitListButContinueWithRemainingComments( + ~prevLoc=currLoc, + ~getLoc, + ~walkNode, + ~newlineDelimited, + rest, + t, + trailing, + ) + } + } + + and walkValueBindings = (vbs, t, comments) => + walkList(~getLoc=n => n.Parsetree.pvb_loc, ~walkNode=walkValueBinding, vbs, t, comments) + + and walkOpenDescription = (openDescription, t, comments) => { + let loc = openDescription.popen_lid.loc + let (leading, trailing) = partitionLeadingTrailing(comments, loc) + attach(t.leading, loc, leading) + attach(t.trailing, loc, trailing) + } + + and walkTypeDeclarations = (typeDeclarations, t, comments) => + walkList( + ~getLoc=n => n.Parsetree.ptype_loc, + ~walkNode=walkTypeDeclaration, + typeDeclarations, + t, + comments, + ) + + and walkTypeParam = ((typexpr, _variance), t, comments) => walkTypExpr(typexpr, t, comments) + + and walkTypeDeclaration = (td, t, comments) => { + let (beforeName, rest) = partitionLeadingTrailing(comments, td.ptype_name.loc) + attach(t.leading, td.ptype_name.loc, beforeName) + + let (afterName, rest) = partitionAdjacentTrailing(td.ptype_name.loc, rest) + attach(t.trailing, td.ptype_name.loc, afterName) + + /* type params */ + let rest = switch td.ptype_params { + | list{} => rest + | typeParams => + visitListButContinueWithRemainingComments( + ~getLoc=((typexpr, _variance)) => typexpr.Parsetree.ptyp_loc, + ~walkNode=walkTypeParam, + ~newlineDelimited=false, + typeParams, + t, + rest, + ) + } + + /* manifest: = typexpr */ + let rest = switch td.ptype_manifest { + | Some(typexpr) => + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, beforeTyp) + walkTypExpr(typexpr, t, insideTyp) + let (afterTyp, rest) = partitionAdjacentTrailing(typexpr.Parsetree.ptyp_loc, afterTyp) + attach(t.trailing, typexpr.ptyp_loc, afterTyp) + rest + | None => rest + } + + let rest = switch td.ptype_kind { + | Ptype_abstract | Ptype_open => rest + | Ptype_record(labelDeclarations) => + let () = walkList( + ~getLoc=ld => ld.Parsetree.pld_loc, + ~walkNode=walkLabelDeclaration, + labelDeclarations, + t, + rest, + ) + + list{} + | Ptype_variant(constructorDeclarations) => + walkConstructorDeclarations(constructorDeclarations, t, rest) + } + + attach(t.trailing, td.ptype_loc, rest) + } + + and walkLabelDeclarations = (lds, t, comments) => + visitListButContinueWithRemainingComments( + ~getLoc=ld => ld.Parsetree.pld_loc, + ~walkNode=walkLabelDeclaration, + ~newlineDelimited=false, + lds, + t, + comments, + ) + + and walkLabelDeclaration = (ld, t, comments) => { + let (beforeName, rest) = partitionLeadingTrailing(comments, ld.pld_name.loc) + attach(t.leading, ld.pld_name.loc, beforeName) + let (afterName, rest) = partitionAdjacentTrailing(ld.pld_name.loc, rest) + attach(t.trailing, ld.pld_name.loc, afterName) + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, ld.pld_type.ptyp_loc) + attach(t.leading, ld.pld_type.ptyp_loc, beforeTyp) + walkTypExpr(ld.pld_type, t, insideTyp) + attach(t.trailing, ld.pld_type.ptyp_loc, afterTyp) + } + + and walkConstructorDeclarations = (cds, t, comments) => + visitListButContinueWithRemainingComments( + ~getLoc=cd => cd.Parsetree.pcd_loc, + ~walkNode=walkConstructorDeclaration, + ~newlineDelimited=false, + cds, + t, + comments, + ) + + and walkConstructorDeclaration = (cd, t, comments) => { + let (beforeName, rest) = partitionLeadingTrailing(comments, cd.pcd_name.loc) + attach(t.leading, cd.pcd_name.loc, beforeName) + let (afterName, rest) = partitionAdjacentTrailing(cd.pcd_name.loc, rest) + attach(t.trailing, cd.pcd_name.loc, afterName) + let rest = walkConstructorArguments(cd.pcd_args, t, rest) + + let rest = switch cd.pcd_res { + | Some(typexpr) => + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, beforeTyp) + walkTypExpr(typexpr, t, insideTyp) + let (afterTyp, rest) = partitionAdjacentTrailing(typexpr.Parsetree.ptyp_loc, afterTyp) + attach(t.trailing, typexpr.ptyp_loc, afterTyp) + rest + | None => rest + } + + attach(t.trailing, cd.pcd_loc, rest) + } + + and walkConstructorArguments = (args, t, comments) => + switch args { + | Pcstr_tuple(typexprs) => + visitListButContinueWithRemainingComments( + ~getLoc=n => n.Parsetree.ptyp_loc, + ~walkNode=walkTypExpr, + ~newlineDelimited=false, + typexprs, + t, + comments, + ) + | Pcstr_record(labelDeclarations) => walkLabelDeclarations(labelDeclarations, t, comments) + } + + and walkValueBinding = (vb, t, comments) => { + open Location + + let vb = { + open Parsetree + switch (vb.pvb_pat, vb.pvb_expr) { + | ( + {ppat_desc: Ppat_constraint(pat, {ptyp_desc: Ptyp_poly(list{}, t)})}, + {pexp_desc: Pexp_constraint(expr, _typ)}, + ) => { + ...vb, + pvb_pat: Ast_helper.Pat.constraint_( + ~loc={...pat.ppat_loc, loc_end: t.Parsetree.ptyp_loc.loc_end}, + pat, + t, + ), + pvb_expr: expr, + } + | ( + {ppat_desc: Ppat_constraint(pat, {ptyp_desc: Ptyp_poly(list{_, ..._}, t)})}, + {pexp_desc: Pexp_fun(_)}, + ) => { + ...vb, + pvb_pat: { + ...vb.pvb_pat, + ppat_loc: {...pat.ppat_loc, loc_end: t.ptyp_loc.loc_end}, + }, + } + | _ => vb + } + } + + let patternLoc = vb.Parsetree.pvb_pat.ppat_loc + let exprLoc = vb.Parsetree.pvb_expr.pexp_loc + + let (leading, inside, trailing) = partitionByLoc(comments, patternLoc) + + /* everything before start of pattern can only be leading on the pattern: + * let |* before *| a = 1 */ + attach(t.leading, patternLoc, leading) + walkPattern(vb.Parsetree.pvb_pat, t, inside) + /* let pattern = expr -> pattern and expr on the same line */ + /* if patternLoc.loc_end.pos_lnum == exprLoc.loc_start.pos_lnum then ( */ + let (afterPat, surroundingExpr) = partitionAdjacentTrailing(patternLoc, trailing) + + attach(t.trailing, patternLoc, afterPat) + let (beforeExpr, insideExpr, afterExpr) = partitionByLoc(surroundingExpr, exprLoc) + if isBlockExpr(vb.pvb_expr) { + walkExpr(vb.pvb_expr, t, List.concat(list{beforeExpr, insideExpr, afterExpr})) + } else { + attach(t.leading, exprLoc, beforeExpr) + walkExpr(vb.Parsetree.pvb_expr, t, insideExpr) + attach(t.trailing, exprLoc, afterExpr) + } + } + + and walkExpr = (expr, t, comments) => { + open Location + switch expr.Parsetree.pexp_desc { + | _ when comments == list{} => () + | Pexp_constant(_) => + let (leading, trailing) = partitionLeadingTrailing(comments, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, leading) + attach(t.trailing, expr.pexp_loc, trailing) + | Pexp_ident(longident) => + let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc) + attach(t.leading, longident.loc, leading) + attach(t.trailing, longident.loc, trailing) + | Pexp_let(_recFlag, valueBindings, expr2) => + let comments = visitListButContinueWithRemainingComments(~getLoc=n => + if n.Parsetree.pvb_pat.ppat_loc.loc_ghost { + n.pvb_expr.pexp_loc + } else { + n.Parsetree.pvb_loc + } + , ~walkNode=walkValueBinding, ~newlineDelimited=true, valueBindings, t, comments) + + if isBlockExpr(expr2) { + walkExpr(expr2, t, comments) + } else { + let (leading, inside, trailing) = partitionByLoc(comments, expr2.pexp_loc) + attach(t.leading, expr2.pexp_loc, leading) + walkExpr(expr2, t, inside) + attach(t.trailing, expr2.pexp_loc, trailing) + } + | Pexp_sequence(expr1, expr2) => + let (leading, inside, trailing) = partitionByLoc(comments, expr1.pexp_loc) + let comments = if isBlockExpr(expr1) { + let (afterExpr, comments) = partitionByOnSameLine(expr1.pexp_loc, trailing) + walkExpr(expr1, t, List.concat(list{leading, inside, afterExpr})) + comments + } else { + attach(t.leading, expr1.pexp_loc, leading) + walkExpr(expr1, t, inside) + let (afterExpr, comments) = partitionByOnSameLine(expr1.pexp_loc, trailing) + attach(t.trailing, expr1.pexp_loc, afterExpr) + comments + } + if isBlockExpr(expr2) { + walkExpr(expr2, t, comments) + } else { + let (leading, inside, trailing) = partitionByLoc(comments, expr2.pexp_loc) + attach(t.leading, expr2.pexp_loc, leading) + walkExpr(expr2, t, inside) + attach(t.trailing, expr2.pexp_loc, trailing) + } + | Pexp_open(_override, longident, expr2) => + let (leading, comments) = partitionLeadingTrailing(comments, expr.pexp_loc) + attach(t.leading, {...expr.pexp_loc, loc_end: longident.loc.loc_end}, leading) + let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc) + attach(t.leading, longident.loc, leading) + let (afterLongident, rest) = partitionByOnSameLine(longident.loc, trailing) + attach(t.trailing, longident.loc, afterLongident) + if isBlockExpr(expr2) { + walkExpr(expr2, t, rest) + } else { + let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc) + attach(t.leading, expr2.pexp_loc, leading) + walkExpr(expr2, t, inside) + attach(t.trailing, expr2.pexp_loc, trailing) + } + | Pexp_extension( + {txt: "bs.obj"}, + PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_record(rows, _)}, list{})}}), + ) => + walkList(~getLoc=((longident, expr): (Asttypes.loc, Parsetree.expression)) => { + ...longident.loc, + loc_end: expr.pexp_loc.loc_end, + }, ~walkNode=walkExprRecordRow, rows, t, comments) + | Pexp_extension(extension) => walkExtension(extension, t, comments) + | Pexp_letexception(extensionConstructor, expr2) => + let (leading, comments) = partitionLeadingTrailing(comments, expr.pexp_loc) + attach(t.leading, {...expr.pexp_loc, loc_end: extensionConstructor.pext_loc.loc_end}, leading) + let (leading, inside, trailing) = partitionByLoc(comments, extensionConstructor.pext_loc) + attach(t.leading, extensionConstructor.pext_loc, leading) + walkExtConstr(extensionConstructor, t, inside) + let (afterExtConstr, rest) = partitionByOnSameLine(extensionConstructor.pext_loc, trailing) + attach(t.trailing, extensionConstructor.pext_loc, afterExtConstr) + if isBlockExpr(expr2) { + walkExpr(expr2, t, rest) + } else { + let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc) + attach(t.leading, expr2.pexp_loc, leading) + walkExpr(expr2, t, inside) + attach(t.trailing, expr2.pexp_loc, trailing) + } + | Pexp_letmodule(stringLoc, modExpr, expr2) => + let (leading, comments) = partitionLeadingTrailing(comments, expr.pexp_loc) + attach(t.leading, {...expr.pexp_loc, loc_end: modExpr.pmod_loc.loc_end}, leading) + let (leading, trailing) = partitionLeadingTrailing(comments, stringLoc.loc) + attach(t.leading, stringLoc.loc, leading) + let (afterString, rest) = partitionAdjacentTrailing(stringLoc.loc, trailing) + attach(t.trailing, stringLoc.loc, afterString) + let (beforeModExpr, insideModExpr, afterModExpr) = partitionByLoc(rest, modExpr.pmod_loc) + attach(t.leading, modExpr.pmod_loc, beforeModExpr) + walkModExpr(modExpr, t, insideModExpr) + let (afterModExpr, rest) = partitionByOnSameLine(modExpr.pmod_loc, afterModExpr) + attach(t.trailing, modExpr.pmod_loc, afterModExpr) + if isBlockExpr(expr2) { + walkExpr(expr2, t, rest) + } else { + let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc) + attach(t.leading, expr2.pexp_loc, leading) + walkExpr(expr2, t, inside) + attach(t.trailing, expr2.pexp_loc, trailing) + } + | Pexp_assert(expr) | Pexp_lazy(expr) => + if isBlockExpr(expr) { + walkExpr(expr, t, comments) + } else { + let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, leading) + walkExpr(expr, t, inside) + attach(t.trailing, expr.pexp_loc, trailing) + } + | Pexp_coerce(expr, optTypexpr, typexpr) => + let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, leading) + walkExpr(expr, t, inside) + let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing) + attach(t.trailing, expr.pexp_loc, afterExpr) + let rest = switch optTypexpr { + | Some(typexpr) => + let (leading, inside, trailing) = partitionByLoc(comments, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, leading) + walkTypExpr(typexpr, t, inside) + let (afterTyp, rest) = partitionAdjacentTrailing(typexpr.ptyp_loc, trailing) + attach(t.trailing, typexpr.ptyp_loc, afterTyp) + rest + | None => rest + } + + let (leading, inside, trailing) = partitionByLoc(rest, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, leading) + walkTypExpr(typexpr, t, inside) + attach(t.trailing, typexpr.ptyp_loc, trailing) + | Pexp_constraint(expr, typexpr) => + let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, leading) + walkExpr(expr, t, inside) + let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing) + attach(t.trailing, expr.pexp_loc, afterExpr) + let (leading, inside, trailing) = partitionByLoc(rest, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, leading) + walkTypExpr(typexpr, t, inside) + attach(t.trailing, typexpr.ptyp_loc, trailing) + | Pexp_tuple(list{}) | Pexp_array(list{}) | Pexp_construct({txt: Longident.Lident("[]")}, _) => + attach(t.inside, expr.pexp_loc, comments) + | Pexp_construct({txt: Longident.Lident("::")}, _) => + walkList( + ~getLoc=n => n.Parsetree.pexp_loc, + ~walkNode=walkExpr, + collectListExprs(list{}, expr), + t, + comments, + ) + | Pexp_construct(longident, args) => + let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc) + attach(t.leading, longident.loc, leading) + switch args { + | Some(expr) => + let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, trailing) + attach(t.trailing, longident.loc, afterLongident) + walkExpr(expr, t, rest) + | None => attach(t.trailing, longident.loc, trailing) + } + | Pexp_variant(_label, None) => () + | Pexp_variant(_label, Some(expr)) => walkExpr(expr, t, comments) + | Pexp_array(exprs) | Pexp_tuple(exprs) => + walkList(~getLoc=n => n.Parsetree.pexp_loc, ~walkNode=walkExpr, exprs, t, comments) + | Pexp_record(rows, spreadExpr) => + let comments = switch spreadExpr { + | None => comments + | Some(expr) => + let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, leading) + walkExpr(expr, t, inside) + let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing) + attach(t.trailing, expr.pexp_loc, afterExpr) + rest + } + + walkList(~getLoc=((longident, expr): (Asttypes.loc, Parsetree.expression)) => { + ...longident.loc, + loc_end: expr.pexp_loc.loc_end, + }, ~walkNode=walkExprRecordRow, rows, t, comments) + | Pexp_field(expr, longident) => + let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc) + let trailing = if isBlockExpr(expr) { + let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing) + walkExpr(expr, t, List.concat(list{leading, inside, afterExpr})) + rest + } else { + attach(t.leading, expr.pexp_loc, leading) + walkExpr(expr, t, inside) + trailing + } + let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing) + attach(t.trailing, expr.pexp_loc, afterExpr) + let (leading, trailing) = partitionLeadingTrailing(rest, longident.loc) + attach(t.leading, longident.loc, leading) + attach(t.trailing, longident.loc, trailing) + | Pexp_setfield(expr1, longident, expr2) => + let (leading, inside, trailing) = partitionByLoc(comments, expr1.pexp_loc) + let rest = if isBlockExpr(expr1) { + let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing) + walkExpr(expr1, t, List.concat(list{leading, inside, afterExpr})) + rest + } else { + let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing) + attach(t.leading, expr1.pexp_loc, leading) + walkExpr(expr1, t, inside) + attach(t.trailing, expr1.pexp_loc, afterExpr) + rest + } + let (beforeLongident, afterLongident) = partitionLeadingTrailing(rest, longident.loc) + attach(t.leading, longident.loc, beforeLongident) + let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident) + attach(t.trailing, longident.loc, afterLongident) + if isBlockExpr(expr2) { + walkExpr(expr2, t, rest) + } else { + let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc) + attach(t.leading, expr2.pexp_loc, leading) + walkExpr(expr2, t, inside) + attach(t.trailing, expr2.pexp_loc, trailing) + } + | Pexp_ifthenelse(ifExpr, thenExpr, elseExpr) => + let (leading, inside, trailing) = partitionByLoc(comments, ifExpr.pexp_loc) + let comments = if isBlockExpr(ifExpr) { + let (afterExpr, comments) = partitionAdjacentTrailing(ifExpr.pexp_loc, trailing) + walkExpr(ifExpr, t, List.concat(list{leading, inside, afterExpr})) + comments + } else { + attach(t.leading, ifExpr.pexp_loc, leading) + walkExpr(ifExpr, t, inside) + let (afterExpr, comments) = partitionAdjacentTrailing(ifExpr.pexp_loc, trailing) + attach(t.trailing, ifExpr.pexp_loc, afterExpr) + comments + } + let (leading, inside, trailing) = partitionByLoc(comments, thenExpr.pexp_loc) + let comments = if isBlockExpr(thenExpr) { + let (afterExpr, trailing) = partitionAdjacentTrailing(thenExpr.pexp_loc, trailing) + walkExpr(thenExpr, t, List.concat(list{leading, inside, afterExpr})) + trailing + } else { + attach(t.leading, thenExpr.pexp_loc, leading) + walkExpr(thenExpr, t, inside) + let (afterExpr, comments) = partitionAdjacentTrailing(thenExpr.pexp_loc, trailing) + attach(t.trailing, thenExpr.pexp_loc, afterExpr) + comments + } + switch elseExpr { + | None => () + | Some(expr) => + if isBlockExpr(expr) { + walkExpr(expr, t, comments) + } else { + let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, leading) + walkExpr(expr, t, inside) + attach(t.trailing, expr.pexp_loc, trailing) + } + } + | Pexp_while(expr1, expr2) => + let (leading, inside, trailing) = partitionByLoc(comments, expr1.pexp_loc) + let rest = if isBlockExpr(expr1) { + let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing) + walkExpr(expr1, t, List.concat(list{leading, inside, afterExpr})) + rest + } else { + attach(t.leading, expr1.pexp_loc, leading) + walkExpr(expr1, t, inside) + let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing) + attach(t.trailing, expr1.pexp_loc, afterExpr) + rest + } + if isBlockExpr(expr2) { + walkExpr(expr2, t, rest) + } else { + let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc) + attach(t.leading, expr2.pexp_loc, leading) + walkExpr(expr2, t, inside) + attach(t.trailing, expr2.pexp_loc, trailing) + } + | Pexp_for(pat, expr1, expr2, _, expr3) => + let (leading, inside, trailing) = partitionByLoc(comments, pat.ppat_loc) + attach(t.leading, pat.ppat_loc, leading) + walkPattern(pat, t, inside) + let (afterPat, rest) = partitionAdjacentTrailing(pat.ppat_loc, trailing) + attach(t.trailing, pat.ppat_loc, afterPat) + let (leading, inside, trailing) = partitionByLoc(rest, expr1.pexp_loc) + attach(t.leading, expr1.pexp_loc, leading) + walkExpr(expr1, t, inside) + let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing) + attach(t.trailing, expr1.pexp_loc, afterExpr) + let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc) + attach(t.leading, expr2.pexp_loc, leading) + walkExpr(expr2, t, inside) + let (afterExpr, rest) = partitionAdjacentTrailing(expr2.pexp_loc, trailing) + attach(t.trailing, expr2.pexp_loc, afterExpr) + if isBlockExpr(expr3) { + walkExpr(expr3, t, rest) + } else { + let (leading, inside, trailing) = partitionByLoc(rest, expr3.pexp_loc) + attach(t.leading, expr3.pexp_loc, leading) + walkExpr(expr3, t, inside) + attach(t.trailing, expr3.pexp_loc, trailing) + } + | Pexp_pack(modExpr) => + let (before, inside, after) = partitionByLoc(comments, modExpr.pmod_loc) + attach(t.leading, modExpr.pmod_loc, before) + walkModExpr(modExpr, t, inside) + attach(t.trailing, modExpr.pmod_loc, after) + | Pexp_match(expr, cases) | Pexp_try(expr, cases) => + let (before, inside, after) = partitionByLoc(comments, expr.pexp_loc) + let after = if isBlockExpr(expr) { + let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, after) + walkExpr(expr, t, List.concat(list{before, inside, afterExpr})) + rest + } else { + attach(t.leading, expr.pexp_loc, before) + walkExpr(expr, t, inside) + after + } + let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, after) + attach(t.trailing, expr.pexp_loc, afterExpr) + walkList(~getLoc=n => { + ...n.Parsetree.pc_lhs.ppat_loc, + loc_end: n.pc_rhs.pexp_loc.loc_end, + }, ~walkNode=walkCase, cases, t, rest) + /* unary expression: todo use parsetreeviewer */ + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("~+" | "~+." | "~-" | "~-." | "not" | "!")})}, + list{(Nolabel, argExpr)}, + ) => + let (before, inside, after) = partitionByLoc(comments, argExpr.pexp_loc) + attach(t.leading, argExpr.pexp_loc, before) + walkExpr(argExpr, t, inside) + attach(t.trailing, argExpr.pexp_loc, after) + /* binary expression */ + | Pexp_apply( + { + pexp_desc: + Pexp_ident({ + txt: + Longident.Lident( + ":=" + | "||" + | "&&" + | "=" + | "==" + | "<" + | ">" + | "!=" + | "!==" + | "<=" + | ">=" + | "|>" + | "+" + | "+." + | "-" + | "-." + | "++" + | "^" + | "*" + | "*." + | "/" + | "/." + | "**" + | "|." + | "<>", + ), + }), + }, + list{(Nolabel, operand1), (Nolabel, operand2)}, + ) => + let (before, inside, after) = partitionByLoc(comments, operand1.pexp_loc) + attach(t.leading, operand1.pexp_loc, before) + walkExpr(operand1, t, inside) + let (afterOperand1, rest) = partitionAdjacentTrailing(operand1.pexp_loc, after) + attach(t.trailing, operand1.pexp_loc, afterOperand1) + let (before, inside, after) = partitionByLoc(rest, operand2.pexp_loc) + attach(t.leading, operand2.pexp_loc, before) + walkExpr(operand2, t, inside) /* (List.concat [inside; after]); */ + attach(t.trailing, operand2.pexp_loc, after) + | Pexp_apply(callExpr, arguments) => + let (before, inside, after) = partitionByLoc(comments, callExpr.pexp_loc) + let after = if isBlockExpr(callExpr) { + let (afterExpr, rest) = partitionAdjacentTrailing(callExpr.pexp_loc, after) + walkExpr(callExpr, t, List.concat(list{before, inside, afterExpr})) + rest + } else { + attach(t.leading, callExpr.pexp_loc, before) + walkExpr(callExpr, t, inside) + after + } + let (afterExpr, rest) = partitionAdjacentTrailing(callExpr.pexp_loc, after) + attach(t.trailing, callExpr.pexp_loc, afterExpr) + walkList(~getLoc=((_argLabel, expr)) => { + let loc = switch expr.Parsetree.pexp_attributes { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._attrs} => { + ...loc, + loc_end: expr.pexp_loc.loc_end, + } + | _ => expr.pexp_loc + } + + loc + }, ~walkNode=walkExprArgument, arguments, t, rest) + | Pexp_fun(_, _, _, _) | Pexp_newtype(_) => + let (_, parameters, returnExpr) = funExpr(expr) + let comments = visitListButContinueWithRemainingComments( + ~newlineDelimited=false, + ~walkNode=walkExprPararameter, + ~getLoc=((_attrs, _argLbl, exprOpt, pattern)) => { + open Parsetree + let startPos = switch pattern.ppat_attributes { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._attrs} => loc.loc_start + | _ => pattern.ppat_loc.loc_start + } + + switch exprOpt { + | None => {...pattern.ppat_loc, loc_start: startPos} + | Some(expr) => { + ...pattern.ppat_loc, + loc_start: startPos, + loc_end: expr.pexp_loc.loc_end, + } + } + }, + parameters, + t, + comments, + ) + + switch returnExpr.pexp_desc { + | Pexp_constraint(expr, typ) + when expr.pexp_loc.loc_start.pos_cnum >= typ.ptyp_loc.loc_end.pos_cnum => + let (leading, inside, trailing) = partitionByLoc(comments, typ.ptyp_loc) + attach(t.leading, typ.ptyp_loc, leading) + walkTypExpr(typ, t, inside) + let (afterTyp, comments) = partitionAdjacentTrailing(typ.ptyp_loc, trailing) + attach(t.trailing, typ.ptyp_loc, afterTyp) + if isBlockExpr(expr) { + walkExpr(expr, t, comments) + } else { + let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, leading) + walkExpr(expr, t, inside) + attach(t.trailing, expr.pexp_loc, trailing) + } + | _ => + if isBlockExpr(returnExpr) { + walkExpr(returnExpr, t, comments) + } else { + let (leading, inside, trailing) = partitionByLoc(comments, returnExpr.pexp_loc) + attach(t.leading, returnExpr.pexp_loc, leading) + walkExpr(returnExpr, t, inside) + attach(t.trailing, returnExpr.pexp_loc, trailing) + } + } + | _ => () + } + } + + and walkExprPararameter = ((_attrs, _argLbl, exprOpt, pattern), t, comments) => { + let (leading, inside, trailing) = partitionByLoc(comments, pattern.ppat_loc) + attach(t.leading, pattern.ppat_loc, leading) + walkPattern(pattern, t, inside) + switch exprOpt { + | Some(expr) => + let (_afterPat, rest) = partitionAdjacentTrailing(pattern.ppat_loc, trailing) + attach(t.trailing, pattern.ppat_loc, trailing) + if isBlockExpr(expr) { + walkExpr(expr, t, rest) + } else { + let (leading, inside, trailing) = partitionByLoc(rest, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, leading) + walkExpr(expr, t, inside) + attach(t.trailing, expr.pexp_loc, trailing) + } + | None => attach(t.trailing, pattern.ppat_loc, trailing) + } + } + + and walkExprArgument = ((_argLabel, expr), t, comments) => + switch expr.Parsetree.pexp_attributes { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._attrs} => + let (leading, trailing) = partitionLeadingTrailing(comments, loc) + attach(t.leading, loc, leading) + let (afterLabel, rest) = partitionAdjacentTrailing(loc, trailing) + attach(t.trailing, loc, afterLabel) + let (before, inside, after) = partitionByLoc(rest, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, before) + walkExpr(expr, t, inside) + attach(t.trailing, expr.pexp_loc, after) + | _ => + let (before, inside, after) = partitionByLoc(comments, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, before) + walkExpr(expr, t, inside) + attach(t.trailing, expr.pexp_loc, after) + } + + and walkCase = (case, t, comments) => { + let (before, inside, after) = partitionByLoc(comments, case.pc_lhs.ppat_loc) + /* cases don't have a location on their own, leading comments should go + * after the bar on the pattern */ + walkPattern(case.pc_lhs, t, List.concat(list{before, inside})) + let (afterPat, rest) = partitionAdjacentTrailing(case.pc_lhs.ppat_loc, after) + attach(t.trailing, case.pc_lhs.ppat_loc, afterPat) + let comments = switch case.pc_guard { + | Some(expr) => + let (before, inside, after) = partitionByLoc(rest, expr.pexp_loc) + let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, after) + if isBlockExpr(expr) { + walkExpr(expr, t, List.concat(list{before, inside, afterExpr})) + } else { + attach(t.leading, expr.pexp_loc, before) + walkExpr(expr, t, inside) + attach(t.trailing, expr.pexp_loc, afterExpr) + } + rest + | None => rest + } + + if isBlockExpr(case.pc_rhs) { + walkExpr(case.pc_rhs, t, comments) + } else { + let (before, inside, after) = partitionByLoc(comments, case.pc_rhs.pexp_loc) + attach(t.leading, case.pc_rhs.pexp_loc, before) + walkExpr(case.pc_rhs, t, inside) + attach(t.trailing, case.pc_rhs.pexp_loc, after) + } + } + + and walkExprRecordRow = ((longident, expr), t, comments) => { + let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc) + + attach(t.leading, longident.loc, beforeLongident) + let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident) + attach(t.trailing, longident.loc, afterLongident) + let (leading, inside, trailing) = partitionByLoc(rest, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, leading) + walkExpr(expr, t, inside) + attach(t.trailing, expr.pexp_loc, trailing) + } + + and walkExtConstr = (extConstr, t, comments) => { + let (leading, trailing) = partitionLeadingTrailing(comments, extConstr.pext_name.loc) + attach(t.leading, extConstr.pext_name.loc, leading) + let (afterName, rest) = partitionAdjacentTrailing(extConstr.pext_name.loc, trailing) + attach(t.trailing, extConstr.pext_name.loc, afterName) + walkExtensionConstructorKind(extConstr.pext_kind, t, rest) + } + + and walkExtensionConstructorKind = (kind, t, comments) => + switch kind { + | Pext_rebind(longident) => + let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc) + attach(t.leading, longident.loc, leading) + attach(t.trailing, longident.loc, trailing) + | Pext_decl(constructorArguments, maybeTypExpr) => + let rest = walkConstructorArguments(constructorArguments, t, comments) + switch maybeTypExpr { + | None => () + | Some(typexpr) => + let (before, inside, after) = partitionByLoc(rest, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, before) + walkTypExpr(typexpr, t, inside) + attach(t.trailing, typexpr.ptyp_loc, after) + } + } + + and walkModExpr = (modExpr, t, comments) => + switch modExpr.pmod_desc { + | Pmod_ident(longident) => + let (before, after) = partitionLeadingTrailing(comments, longident.loc) + attach(t.leading, longident.loc, before) + attach(t.trailing, longident.loc, after) + | Pmod_structure(structure) => walkStructure(structure, t, comments) + | Pmod_extension(extension) => walkExtension(extension, t, comments) + | Pmod_unpack(expr) => + let (before, inside, after) = partitionByLoc(comments, expr.pexp_loc) + attach(t.leading, expr.pexp_loc, before) + walkExpr(expr, t, inside) + attach(t.trailing, expr.pexp_loc, after) + | Pmod_constraint(modexpr, modtype) => + if modtype.pmty_loc.loc_start >= modexpr.pmod_loc.loc_end { + let (before, inside, after) = partitionByLoc(comments, modexpr.pmod_loc) + attach(t.leading, modexpr.pmod_loc, before) + walkModExpr(modexpr, t, inside) + let (after, rest) = partitionAdjacentTrailing(modexpr.pmod_loc, after) + attach(t.trailing, modexpr.pmod_loc, after) + let (before, inside, after) = partitionByLoc(rest, modtype.pmty_loc) + attach(t.leading, modtype.pmty_loc, before) + walkModType(modtype, t, inside) + attach(t.trailing, modtype.pmty_loc, after) + } else { + let (before, inside, after) = partitionByLoc(comments, modtype.pmty_loc) + attach(t.leading, modtype.pmty_loc, before) + walkModType(modtype, t, inside) + let (after, rest) = partitionAdjacentTrailing(modtype.pmty_loc, after) + attach(t.trailing, modtype.pmty_loc, after) + let (before, inside, after) = partitionByLoc(rest, modexpr.pmod_loc) + attach(t.leading, modexpr.pmod_loc, before) + walkModExpr(modexpr, t, inside) + attach(t.trailing, modexpr.pmod_loc, after) + } + | Pmod_apply(_callModExpr, _argModExpr) => + let modExprs = modExprApply(modExpr) + walkList(~getLoc=n => n.Parsetree.pmod_loc, ~walkNode=walkModExpr, modExprs, t, comments) + | Pmod_functor(_) => + let (parameters, returnModExpr) = modExprFunctor(modExpr) + let comments = visitListButContinueWithRemainingComments(~getLoc=((_, lbl, modTypeOption)) => + switch modTypeOption { + | None => lbl.Asttypes.loc + | Some(modType) => {...lbl.loc, loc_end: modType.Parsetree.pmty_loc.loc_end} + } + , ~walkNode=walkModExprParameter, ~newlineDelimited=false, parameters, t, comments) + + switch returnModExpr.pmod_desc { + | Pmod_constraint(modExpr, modType) + when modType.pmty_loc.loc_end.pos_cnum <= modExpr.pmod_loc.loc_start.pos_cnum => + let (before, inside, after) = partitionByLoc(comments, modType.pmty_loc) + attach(t.leading, modType.pmty_loc, before) + walkModType(modType, t, inside) + let (after, rest) = partitionAdjacentTrailing(modType.pmty_loc, after) + attach(t.trailing, modType.pmty_loc, after) + let (before, inside, after) = partitionByLoc(rest, modExpr.pmod_loc) + attach(t.leading, modExpr.pmod_loc, before) + walkModExpr(modExpr, t, inside) + attach(t.trailing, modExpr.pmod_loc, after) + | _ => + let (before, inside, after) = partitionByLoc(comments, returnModExpr.pmod_loc) + attach(t.leading, returnModExpr.pmod_loc, before) + walkModExpr(returnModExpr, t, inside) + attach(t.trailing, returnModExpr.pmod_loc, after) + } + } + + and walkModExprParameter = (parameter, t, comments) => { + let (_attrs, lbl, modTypeOption) = parameter + let (leading, trailing) = partitionLeadingTrailing(comments, lbl.loc) + attach(t.leading, lbl.loc, leading) + switch modTypeOption { + | None => attach(t.trailing, lbl.loc, trailing) + | Some(modType) => + let (afterLbl, rest) = partitionAdjacentTrailing(lbl.loc, trailing) + attach(t.trailing, lbl.loc, afterLbl) + let (before, inside, after) = partitionByLoc(rest, modType.pmty_loc) + attach(t.leading, modType.pmty_loc, before) + walkModType(modType, t, inside) + attach(t.trailing, modType.pmty_loc, after) + } + } + + and walkModType = (modType, t, comments) => + switch modType.pmty_desc { + | Pmty_ident(longident) | Pmty_alias(longident) => + let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc) + attach(t.leading, longident.loc, leading) + attach(t.trailing, longident.loc, trailing) + | Pmty_signature(signature) => walkSignature(signature, t, comments) + | Pmty_extension(extension) => walkExtension(extension, t, comments) + | Pmty_typeof(modExpr) => + let (before, inside, after) = partitionByLoc(comments, modExpr.pmod_loc) + attach(t.leading, modExpr.pmod_loc, before) + walkModExpr(modExpr, t, inside) + attach(t.trailing, modExpr.pmod_loc, after) + | Pmty_with(modType, _withConstraints) => + let (before, inside, after) = partitionByLoc(comments, modType.pmty_loc) + attach(t.leading, modType.pmty_loc, before) + walkModType(modType, t, inside) + attach(t.trailing, modType.pmty_loc, after) + /* TODO: withConstraints */ + | Pmty_functor(_) => + let (parameters, returnModType) = functorType(modType) + let comments = visitListButContinueWithRemainingComments(~getLoc=((_, lbl, modTypeOption)) => + switch modTypeOption { + | None => lbl.Asttypes.loc + | Some(modType) => + if lbl.txt == "_" { + modType.Parsetree.pmty_loc + } else { + {...lbl.loc, loc_end: modType.Parsetree.pmty_loc.loc_end} + } + } + , ~walkNode=walkModTypeParameter, ~newlineDelimited=false, parameters, t, comments) + + let (before, inside, after) = partitionByLoc(comments, returnModType.pmty_loc) + attach(t.leading, returnModType.pmty_loc, before) + walkModType(returnModType, t, inside) + attach(t.trailing, returnModType.pmty_loc, after) + } + + and walkModTypeParameter = ((_, lbl, modTypeOption), t, comments) => { + let (leading, trailing) = partitionLeadingTrailing(comments, lbl.loc) + attach(t.leading, lbl.loc, leading) + switch modTypeOption { + | None => attach(t.trailing, lbl.loc, trailing) + | Some(modType) => + let (afterLbl, rest) = partitionAdjacentTrailing(lbl.loc, trailing) + attach(t.trailing, lbl.loc, afterLbl) + let (before, inside, after) = partitionByLoc(rest, modType.pmty_loc) + attach(t.leading, modType.pmty_loc, before) + walkModType(modType, t, inside) + attach(t.trailing, modType.pmty_loc, after) + } + } + + and walkPattern = (pat, t, comments) => { + open Location + switch pat.Parsetree.ppat_desc { + | _ when comments == list{} => () + | Ppat_alias(pat, alias) => + let (leading, inside, trailing) = partitionByLoc(comments, pat.ppat_loc) + attach(t.leading, pat.ppat_loc, leading) + walkPattern(pat, t, inside) + let (afterPat, rest) = partitionAdjacentTrailing(pat.ppat_loc, trailing) + attach(t.leading, pat.ppat_loc, leading) + attach(t.trailing, pat.ppat_loc, afterPat) + let (beforeAlias, afterAlias) = partitionLeadingTrailing(rest, alias.loc) + attach(t.leading, alias.loc, beforeAlias) + attach(t.trailing, alias.loc, afterAlias) + | Ppat_tuple(list{}) + | Ppat_array(list{}) + | Ppat_construct({txt: Longident.Lident("()")}, _) + | Ppat_construct({txt: Longident.Lident("[]")}, _) => + attach(t.inside, pat.ppat_loc, comments) + | Ppat_array(patterns) => + walkList(~getLoc=n => n.Parsetree.ppat_loc, ~walkNode=walkPattern, patterns, t, comments) + | Ppat_tuple(patterns) => + walkList(~getLoc=n => n.Parsetree.ppat_loc, ~walkNode=walkPattern, patterns, t, comments) + | Ppat_construct({txt: Longident.Lident("::")}, _) => + walkList( + ~getLoc=n => n.Parsetree.ppat_loc, + ~walkNode=walkPattern, + collectListPatterns(list{}, pat), + t, + comments, + ) + | Ppat_construct(constr, None) => + let (beforeConstr, afterConstr) = partitionLeadingTrailing(comments, constr.loc) + + attach(t.leading, constr.loc, beforeConstr) + attach(t.trailing, constr.loc, afterConstr) + | Ppat_construct(constr, Some(pat)) => + let (leading, trailing) = partitionLeadingTrailing(comments, constr.loc) + attach(t.leading, constr.loc, leading) + let (leading, inside, trailing) = partitionByLoc(trailing, pat.ppat_loc) + attach(t.leading, pat.ppat_loc, leading) + walkPattern(pat, t, inside) + attach(t.trailing, pat.ppat_loc, trailing) + | Ppat_variant(_label, None) => () + | Ppat_variant(_label, Some(pat)) => walkPattern(pat, t, comments) + | Ppat_type(_) => () + | Ppat_record(recordRows, _) => walkList(~getLoc=((longidentLoc, pattern): ( + Asttypes.loc, + Parsetree.pattern, + )) => { + ...longidentLoc.loc, + loc_end: pattern.Parsetree.ppat_loc.loc_end, + }, ~walkNode=walkPatternRecordRow, recordRows, t, comments) + | Ppat_or(pattern1, pattern2) => + let (beforePattern1, insidePattern1, afterPattern1) = partitionByLoc( + comments, + pattern1.ppat_loc, + ) + + attach(t.leading, pattern1.ppat_loc, beforePattern1) + walkPattern(pattern1, t, insidePattern1) + let (afterPattern1, rest) = partitionAdjacentTrailing(pattern1.ppat_loc, afterPattern1) + + attach(t.trailing, pattern1.ppat_loc, afterPattern1) + let (beforePattern2, insidePattern2, afterPattern2) = partitionByLoc(rest, pattern2.ppat_loc) + + attach(t.leading, pattern2.ppat_loc, beforePattern2) + walkPattern(pattern2, t, insidePattern2) + attach(t.trailing, pattern2.ppat_loc, afterPattern2) + | Ppat_constraint(pattern, typ) => + let (beforePattern, insidePattern, afterPattern) = partitionByLoc(comments, pattern.ppat_loc) + + attach(t.leading, pattern.ppat_loc, beforePattern) + walkPattern(pattern, t, insidePattern) + let (afterPattern, rest) = partitionAdjacentTrailing(pattern.ppat_loc, afterPattern) + + attach(t.trailing, pattern.ppat_loc, afterPattern) + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typ.ptyp_loc) + + attach(t.leading, typ.ptyp_loc, beforeTyp) + walkTypExpr(typ, t, insideTyp) + attach(t.trailing, typ.ptyp_loc, afterTyp) + | Ppat_lazy(pattern) | Ppat_exception(pattern) => + let (leading, inside, trailing) = partitionByLoc(comments, pattern.ppat_loc) + attach(t.leading, pattern.ppat_loc, leading) + walkPattern(pattern, t, inside) + attach(t.trailing, pattern.ppat_loc, trailing) + | Ppat_unpack(stringLoc) => + let (leading, trailing) = partitionLeadingTrailing(comments, stringLoc.loc) + attach(t.leading, stringLoc.loc, leading) + attach(t.trailing, stringLoc.loc, trailing) + | Ppat_extension(extension) => walkExtension(extension, t, comments) + | _ => () + } + } + + /* name: firstName */ + and walkPatternRecordRow = (row, t, comments) => + switch row { + /* punned {x} */ + | ( + {Location.txt: Longident.Lident(ident), loc: longidentLoc}, + {Parsetree.ppat_desc: Ppat_var({txt, _})}, + ) when ident == txt => + let (beforeLbl, afterLbl) = partitionLeadingTrailing(comments, longidentLoc) + + attach(t.leading, longidentLoc, beforeLbl) + attach(t.trailing, longidentLoc, afterLbl) + | (longident, pattern) => + let (beforeLbl, afterLbl) = partitionLeadingTrailing(comments, longident.loc) + + attach(t.leading, longident.loc, beforeLbl) + let (afterLbl, rest) = partitionAdjacentTrailing(longident.loc, afterLbl) + attach(t.trailing, longident.loc, afterLbl) + let (leading, inside, trailing) = partitionByLoc(rest, pattern.ppat_loc) + attach(t.leading, pattern.ppat_loc, leading) + walkPattern(pattern, t, inside) + attach(t.trailing, pattern.ppat_loc, trailing) + } + + and walkTypExpr = (typ, t, comments) => + switch typ.Parsetree.ptyp_desc { + | _ when comments == list{} => () + | Ptyp_tuple(typexprs) => + walkList(~getLoc=n => n.Parsetree.ptyp_loc, ~walkNode=walkTypExpr, typexprs, t, comments) + | Ptyp_extension(extension) => walkExtension(extension, t, comments) + | Ptyp_package(packageType) => walkPackageType(packageType, t, comments) + | Ptyp_alias(typexpr, _alias) => + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, beforeTyp) + walkTypExpr(typexpr, t, insideTyp) + attach(t.trailing, typexpr.ptyp_loc, afterTyp) + | Ptyp_poly(strings, typexpr) => + let comments = visitListButContinueWithRemainingComments( + ~getLoc=n => n.Asttypes.loc, + ~walkNode=(longident, t, comments) => { + let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc) + attach(t.leading, longident.loc, beforeLongident) + attach(t.trailing, longident.loc, afterLongident) + }, + ~newlineDelimited=false, + strings, + t, + comments, + ) + + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, beforeTyp) + walkTypExpr(typexpr, t, insideTyp) + attach(t.trailing, typexpr.ptyp_loc, afterTyp) + | Ptyp_constr(longident, typexprs) => + let (beforeLongident, _afterLongident) = partitionLeadingTrailing(comments, longident.loc) + let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, comments) + attach(t.leading, longident.loc, beforeLongident) + attach(t.trailing, longident.loc, afterLongident) + walkList(~getLoc=n => n.Parsetree.ptyp_loc, ~walkNode=walkTypExpr, typexprs, t, rest) + | Ptyp_arrow(_) => + let (_, parameters, typexpr) = arrowType(typ) + let comments = walkTypeParameters(parameters, t, comments) + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, beforeTyp) + walkTypExpr(typexpr, t, insideTyp) + attach(t.trailing, typexpr.ptyp_loc, afterTyp) + | Ptyp_object(fields, _) => walkTypObjectFields(fields, t, comments) + | _ => () + } + + and walkTypObjectFields = (fields, t, comments) => walkList(~getLoc=field => + switch field { + | Parsetree.Otag(lbl, _, typ) => {...lbl.loc, loc_end: typ.ptyp_loc.loc_end} + | _ => Location.none + } + , ~walkNode=walkTypObjectField, fields, t, comments) + + and walkTypObjectField = (field, t, comments) => + switch field { + | Otag(lbl, _, typexpr) => + let (beforeLbl, afterLbl) = partitionLeadingTrailing(comments, lbl.loc) + attach(t.leading, lbl.loc, beforeLbl) + let (afterLbl, rest) = partitionAdjacentTrailing(lbl.loc, afterLbl) + attach(t.trailing, lbl.loc, afterLbl) + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, beforeTyp) + walkTypExpr(typexpr, t, insideTyp) + attach(t.trailing, typexpr.ptyp_loc, afterTyp) + | _ => () + } + + and walkTypeParameters = (typeParameters, t, comments) => + visitListButContinueWithRemainingComments( + ~getLoc=((_, _, typexpr)) => typexpr.Parsetree.ptyp_loc, + ~walkNode=walkTypeParameter, + ~newlineDelimited=false, + typeParameters, + t, + comments, + ) + + and walkTypeParameter = ((_attrs, _lbl, typexpr), t, comments) => { + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, beforeTyp) + walkTypExpr(typexpr, t, insideTyp) + attach(t.trailing, typexpr.ptyp_loc, afterTyp) + } + + and walkPackageType = (packageType, t, comments) => { + let (longident, packageConstraints) = packageType + let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc) + attach(t.leading, longident.loc, beforeLongident) + let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident) + attach(t.trailing, longident.loc, afterLongident) + walkPackageConstraints(packageConstraints, t, rest) + } + + and walkPackageConstraints = (packageConstraints, t, comments) => walkList(~getLoc=(( + longident, + typexpr, + )) => { + ...longident.Asttypes.loc, + loc_end: typexpr.Parsetree.ptyp_loc.loc_end, + }, ~walkNode=walkPackageConstraint, packageConstraints, t, comments) + + and walkPackageConstraint = (packageConstraint, t, comments) => { + let (longident, typexpr) = packageConstraint + let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc) + attach(t.leading, longident.loc, beforeLongident) + let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident) + attach(t.trailing, longident.loc, afterLongident) + let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc) + attach(t.leading, typexpr.ptyp_loc, beforeTyp) + walkTypExpr(typexpr, t, insideTyp) + attach(t.trailing, typexpr.ptyp_loc, afterTyp) + } + + and walkExtension = (extension, t, comments) => { + let (id, payload) = extension + let (beforeId, afterId) = partitionLeadingTrailing(comments, id.loc) + attach(t.leading, id.loc, beforeId) + let (afterId, rest) = partitionAdjacentTrailing(id.loc, afterId) + attach(t.trailing, id.loc, afterId) + walkPayload(payload, t, rest) + } + + and walkAttribute = ((id, payload), t, comments) => { + let (beforeId, afterId) = partitionLeadingTrailing(comments, id.loc) + attach(t.leading, id.loc, beforeId) + let (afterId, rest) = partitionAdjacentTrailing(id.loc, afterId) + attach(t.trailing, id.loc, afterId) + walkPayload(payload, t, rest) + } + + and walkPayload = (payload, t, comments) => + switch payload { + | PStr(s) => walkStructure(s, t, comments) + | _ => () + } +} + +module Printer = { + let addParens = doc => + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent(Doc.concat(list{Doc.softLine, doc})), + Doc.softLine, + Doc.rparen, + }), + ) + + let addBraces = doc => Doc.group(Doc.concat(list{Doc.lbrace, doc, Doc.rbrace})) + + let getFirstLeadingComment = (tbl, loc) => + switch Hashtbl.find(tbl.CommentTable.leading, loc) { + | list{comment, ..._} => Some(comment) + | list{} => None + | exception Not_found => None + } + + let printMultilineCommentContent = txt => { + /* Turns + * |* first line + * * second line + * * third line *| + * Into + * |* first line + * * second line + * * third line *| + * + * What makes a comment suitable for this kind of indentation? + * -> multiple lines + every line starts with a star + */ + let rec indentStars = (lines, acc) => + switch lines { + | list{} => Doc.nil + | list{lastLine} => + let line = String.trim(lastLine) + let doc = Doc.text(" " ++ line) + let trailingSpace = if String.length(line) > 0 { + Doc.space + } else { + Doc.nil + } + List.rev(list{trailingSpace, doc, ...acc}) |> Doc.concat + | list{line, ...lines} => + let line = String.trim(line) + let len = String.length(line) + if len > 0 && (@doesNotRaise String.get)(line, 0) === '*' { + let doc = Doc.text(" " ++ String.trim(line)) + indentStars(lines, list{Doc.hardLine, doc, ...acc}) + } else { + let trailingSpace = { + let len = String.length(txt) + if len > 0 && String.unsafe_get(txt, len - 1) == ' ' { + Doc.space + } else { + Doc.nil + } + } + + let content = Comment.trimSpaces(txt) + Doc.concat(list{Doc.text(content), trailingSpace}) + } + } + + let lines = String.split_on_char('\n', txt) + switch lines { + | list{} => Doc.text("/* */") + | list{line} => + Doc.concat(list{Doc.text("/* "), Doc.text(Comment.trimSpaces(line)), Doc.text(" */")}) + | list{first, ...rest} => + let firstLine = Comment.trimSpaces(first) + Doc.concat(list{ + Doc.text("/*"), + if String.length(firstLine) > 0 && !String.equal(firstLine, "*") { + Doc.space + } else { + Doc.nil + }, + indentStars(rest, list{Doc.hardLine, Doc.text(firstLine)}), + Doc.text("*/"), + }) + } + } + + let printTrailingComment = (nodeLoc: Location.t, comment) => { + let singleLine = Comment.isSingleLineComment(comment) + let content = { + let txt = Comment.txt(comment) + if singleLine { + Doc.text("// " ++ String.trim(txt)) + } else { + printMultilineCommentContent(txt) + } + } + + let diff = { + let cmtStart = Comment.loc(comment).loc_start + let prevTokEndPos = Comment.prevTokEndPos(comment) + cmtStart.pos_lnum - prevTokEndPos.pos_lnum + } + + let isBelow = Comment.loc(comment).loc_start.pos_lnum > nodeLoc.loc_end.pos_lnum + if diff > 0 || isBelow { + Doc.concat(list{ + Doc.breakParent, + Doc.lineSuffix( + Doc.concat(list{ + Doc.hardLine, + if diff > 1 { + Doc.hardLine + } else { + Doc.nil + }, + content, + }), + ), + }) + } else if !singleLine { + Doc.concat(list{Doc.space, content}) + } else { + Doc.lineSuffix(Doc.concat(list{Doc.space, content})) + } + } + + let printLeadingComment = (~nextComment=?, comment) => { + let singleLine = Comment.isSingleLineComment(comment) + let content = { + let txt = Comment.txt(comment) + if singleLine { + Doc.text("// " ++ String.trim(txt)) + } else { + printMultilineCommentContent(txt) + } + } + + let separator = Doc.concat(list{ + if singleLine { + Doc.concat(list{Doc.hardLine, Doc.breakParent}) + } else { + Doc.nil + }, + switch nextComment { + | Some(next) => + let nextLoc = Comment.loc(next) + let currLoc = Comment.loc(comment) + let diff = nextLoc.Location.loc_start.pos_lnum - currLoc.Location.loc_end.pos_lnum + + let nextSingleLine = Comment.isSingleLineComment(next) + if singleLine && nextSingleLine { + if diff > 1 { + Doc.hardLine + } else { + Doc.nil + } + } else if singleLine && !nextSingleLine { + if diff > 1 { + Doc.hardLine + } else { + Doc.nil + } + } else if diff > 1 { + Doc.concat(list{Doc.hardLine, Doc.hardLine}) + } else if diff === 1 { + Doc.hardLine + } else { + Doc.space + } + | None => Doc.nil + }, + }) + + Doc.concat(list{content, separator}) + } + + let printCommentsInside = (cmtTbl, loc) => { + let rec loop = (acc, comments) => + switch comments { + | list{} => Doc.nil + | list{comment} => + let cmtDoc = printLeadingComment(comment) + let doc = Doc.group(Doc.concat(list{Doc.concat(List.rev(list{cmtDoc, ...acc}))})) + + doc + | list{comment, ...list{nextComment, ..._comments} as rest} => + let cmtDoc = printLeadingComment(~nextComment, comment) + loop(list{cmtDoc, ...acc}, rest) + } + + switch Hashtbl.find(cmtTbl.CommentTable.inside, loc) { + | exception Not_found => Doc.nil + | comments => + Hashtbl.remove(cmtTbl.inside, loc) + Doc.group(loop(list{}, comments)) + } + } + + let printLeadingComments = (node, tbl, loc) => { + let rec loop = (acc, comments) => + switch comments { + | list{} => node + | list{comment} => + let cmtDoc = printLeadingComment(comment) + let diff = loc.Location.loc_start.pos_lnum - Comment.loc(comment).Location.loc_end.pos_lnum + + let separator = if Comment.isSingleLineComment(comment) { + if diff > 1 { + Doc.hardLine + } else { + Doc.nil + } + } else if diff === 0 { + Doc.space + } else if diff > 1 { + Doc.concat(list{Doc.hardLine, Doc.hardLine}) + } else { + Doc.hardLine + } + + let doc = Doc.group( + Doc.concat(list{Doc.concat(List.rev(list{cmtDoc, ...acc})), separator, node}), + ) + + doc + | list{comment, ...list{nextComment, ..._comments} as rest} => + let cmtDoc = printLeadingComment(~nextComment, comment) + loop(list{cmtDoc, ...acc}, rest) + } + + switch Hashtbl.find(tbl, loc) { + | exception Not_found => node + | comments => + /* Remove comments from tbl: Some ast nodes have the same location. + * We only want to print comments once */ + Hashtbl.remove(tbl, loc) + loop(list{}, comments) + } + } + + let printTrailingComments = (node, tbl, loc) => { + let rec loop = (acc, comments) => + switch comments { + | list{} => Doc.concat(List.rev(acc)) + | list{comment, ...comments} => + let cmtDoc = printTrailingComment(loc, comment) + loop(list{cmtDoc, ...acc}, comments) + } + + switch Hashtbl.find(tbl, loc) { + | exception Not_found => node + | list{} => node + | list{_first, ..._} as comments => + /* Remove comments from tbl: Some ast nodes have the same location. + * We only want to print comments once */ + Hashtbl.remove(tbl, loc) + let cmtsDoc = loop(list{}, comments) + Doc.concat(list{node, cmtsDoc}) + } + } + + let printComments = (doc, tbl: CommentTable.t, loc) => { + let docWithLeadingComments = printLeadingComments(doc, tbl.leading, loc) + printTrailingComments(docWithLeadingComments, tbl.trailing, loc) + } + + let printList = (~getLoc, ~nodes, ~print, ~forceBreak=false, t) => { + let rec loop = (prevLoc: Location.t, acc, nodes) => + switch nodes { + | list{} => (prevLoc, Doc.concat(List.rev(acc))) + | list{node, ...nodes} => + let loc = getLoc(node) + let startPos = switch getFirstLeadingComment(t, loc) { + | None => loc.loc_start + | Some(comment) => Comment.loc(comment).loc_start + } + + let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 { + Doc.concat(list{Doc.hardLine, Doc.hardLine}) + } else { + Doc.hardLine + } + + let doc = printComments(print(node, t), t, loc) + loop(loc, list{doc, sep, ...acc}, nodes) + } + + switch nodes { + | list{} => Doc.nil + | list{node, ...nodes} => + let firstLoc = getLoc(node) + let doc = printComments(print(node, t), t, firstLoc) + let (lastLoc, docs) = loop(firstLoc, list{doc}, nodes) + let forceBreak = forceBreak || firstLoc.loc_start.pos_lnum !== lastLoc.loc_end.pos_lnum + + Doc.breakableGroup(~forceBreak, docs) + } + } + + let printListi = (~getLoc, ~nodes, ~print, ~forceBreak=false, t) => { + let rec loop = (i, prevLoc: Location.t, acc, nodes) => + switch nodes { + | list{} => (prevLoc, Doc.concat(List.rev(acc))) + | list{node, ...nodes} => + let loc = getLoc(node) + let startPos = switch getFirstLeadingComment(t, loc) { + | None => loc.loc_start + | Some(comment) => Comment.loc(comment).loc_start + } + + let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 { + Doc.concat(list{Doc.hardLine, Doc.hardLine}) + } else { + Doc.line + } + + let doc = printComments(print(node, t, i), t, loc) + loop(i + 1, loc, list{doc, sep, ...acc}, nodes) + } + + switch nodes { + | list{} => Doc.nil + | list{node, ...nodes} => + let firstLoc = getLoc(node) + let doc = printComments(print(node, t, 0), t, firstLoc) + let (lastLoc, docs) = loop(1, firstLoc, list{doc}, nodes) + let forceBreak = forceBreak || firstLoc.loc_start.pos_lnum !== lastLoc.loc_end.pos_lnum + + Doc.breakableGroup(~forceBreak, docs) + } + } + + let rec printLongidentAux = (accu, x) => + switch x { + | Longident.Lident(s) => list{Doc.text(s), ...accu} + | Ldot(lid, s) => printLongidentAux(list{Doc.text(s), ...accu}, lid) + | Lapply(lid1, lid2) => + let d1 = Doc.join(~sep=Doc.dot, printLongidentAux(list{}, lid1)) + let d2 = Doc.join(~sep=Doc.dot, printLongidentAux(list{}, lid2)) + list{Doc.concat(list{d1, Doc.lparen, d2, Doc.rparen}), ...accu} + } + + let printLongident = x => + switch x { + | Longident.Lident(txt) => Doc.text(txt) + | lid => Doc.join(~sep=Doc.dot, printLongidentAux(list{}, lid)) + } + + type identifierStyle = + | ExoticIdent + | NormalIdent + + let classifyIdentContent = (~allowUident=false, txt) => { + let len = String.length(txt) + let rec go = i => + if i === len { + NormalIdent + } else { + let c = String.unsafe_get(txt, i) + if ( + i === 0 && + !( + (allowUident && (c >= 'A' && c <= 'Z')) || + ((c >= 'a' && c <= 'z') || + (c == '_' || (c >= '0' && c <= '9'))) + ) + ) { + ExoticIdent + } else if ( + !( + (c >= 'a' && c <= 'z') || + ((c >= 'A' && c <= 'Z') || + (c == '\'' || (c == '_' || c >= '0' && c <= '9'))) + ) + ) { + ExoticIdent + } else { + go(i + 1) + } + } + + if Token.isKeywordTxt(txt) && txt != "list" { + ExoticIdent + } else { + go(0) + } + } + + let printIdentLike = (~allowUident=?, txt) => + switch classifyIdentContent(~allowUident?, txt) { + | ExoticIdent => Doc.concat(list{Doc.text("\\\""), Doc.text(txt), Doc.text("\"")}) + | NormalIdent => Doc.text(txt) + } + + let printLident = l => + switch l { + | Longident.Lident(txt) => printIdentLike(txt) + | Longident.Ldot(path, txt) => + let txts = Longident.flatten(path) + Doc.concat(list{ + Doc.join(~sep=Doc.dot, List.map(Doc.text, txts)), + Doc.dot, + printIdentLike(txt), + }) + | _ => Doc.text("printLident: Longident.Lapply is not supported") + } + + let printLongidentLocation = (l, cmtTbl) => { + let doc = printLongident(l.Location.txt) + printComments(doc, cmtTbl, l.loc) + } + + /* Module.SubModule.x */ + let printLidentPath = (path, cmtTbl) => { + let doc = printLident(path.Location.txt) + printComments(doc, cmtTbl, path.loc) + } + + /* Module.SubModule.x or Module.SubModule.X */ + let printIdentPath = (path, cmtTbl) => { + let doc = printLident(path.Location.txt) + printComments(doc, cmtTbl, path.loc) + } + + let printStringLoc = (sloc, cmtTbl) => { + let doc = printIdentLike(sloc.Location.txt) + printComments(doc, cmtTbl, sloc.loc) + } + + let printConstant = c => + switch c { + | Parsetree.Pconst_integer(s, suffix) => + switch suffix { + | Some(c) => Doc.text(s ++ Char.escaped(c)) + | None => Doc.text(s) + } + | Pconst_string(txt, None) => Doc.text("\"" ++ (txt ++ "\"")) + | Pconst_string(txt, Some(prefix)) => + Doc.concat(list{ + if prefix == "" { + Doc.nil + } else { + Doc.text(prefix) + }, + Doc.text("`" ++ (txt ++ "`")), + }) + | Pconst_float(s, _) => Doc.text(s) + | Pconst_char(c) => Doc.text("'" ++ (Char.escaped(c) ++ "'")) + } + + let rec printStructure = (s: Parsetree.structure, t) => + switch s { + | list{} => printCommentsInside(t, Location.none) + | structure => + printList(~getLoc=s => s.Parsetree.pstr_loc, ~nodes=structure, ~print=printStructureItem, t) + } + + and printStructureItem = (si: Parsetree.structure_item, cmtTbl) => + switch si.pstr_desc { + | Pstr_value(rec_flag, valueBindings) => + let recFlag = switch rec_flag { + | Asttypes.Nonrecursive => Doc.nil + | Asttypes.Recursive => Doc.text("rec ") + } + + printValueBindings(~recFlag, valueBindings, cmtTbl) + | Pstr_type(recFlag, typeDeclarations) => + let recFlag = switch recFlag { + | Asttypes.Nonrecursive => Doc.nil + | Asttypes.Recursive => Doc.text("rec ") + } + + printTypeDeclarations(~recFlag, typeDeclarations, cmtTbl) + | Pstr_primitive(valueDescription) => printValueDescription(valueDescription, cmtTbl) + | Pstr_eval(expr, attrs) => + let exprDoc = { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.structureExpr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + Doc.concat(list{printAttributes(attrs), exprDoc}) + | Pstr_attribute(attr) => + Doc.concat(list{Doc.text("@"), printAttributeWithComments(attr, cmtTbl)}) + | Pstr_extension(extension, attrs) => + Doc.concat(list{ + printAttributes(attrs), + Doc.concat(list{printExtensionWithComments(~atModuleLvl=true, extension, cmtTbl)}), + }) + | Pstr_include(includeDeclaration) => printIncludeDeclaration(includeDeclaration, cmtTbl) + | Pstr_open(openDescription) => printOpenDescription(openDescription, cmtTbl) + | Pstr_modtype(modTypeDecl) => printModuleTypeDeclaration(modTypeDecl, cmtTbl) + | Pstr_module(moduleBinding) => printModuleBinding(~isRec=false, moduleBinding, cmtTbl, 0) + | Pstr_recmodule(moduleBindings) => + printListi( + ~getLoc=mb => mb.Parsetree.pmb_loc, + ~nodes=moduleBindings, + ~print=printModuleBinding(~isRec=true), + cmtTbl, + ) + | Pstr_exception(extensionConstructor) => printExceptionDef(extensionConstructor, cmtTbl) + | Pstr_typext(typeExtension) => printTypeExtension(typeExtension, cmtTbl) + | Pstr_class(_) | Pstr_class_type(_) => Doc.nil + } + + and printTypeExtension = (te: Parsetree.type_extension, cmtTbl) => { + let prefix = Doc.text("type ") + let name = printLidentPath(te.ptyext_path, cmtTbl) + let typeParams = printTypeParams(te.ptyext_params, cmtTbl) + let extensionConstructors = { + let ecs = te.ptyext_constructors + let forceBreak = switch (ecs, List.rev(ecs)) { + | (list{first, ..._}, list{last, ..._}) => + first.pext_loc.loc_start.pos_lnum > te.ptyext_path.loc.loc_end.pos_lnum || + first.pext_loc.loc_start.pos_lnum < last.pext_loc.loc_end.pos_lnum + | _ => false + } + + let privateFlag = switch te.ptyext_private { + | Asttypes.Private => Doc.concat(list{Doc.text("private"), Doc.line}) + | Public => Doc.nil + } + + let rows = printListi( + ~getLoc=n => n.Parsetree.pext_loc, + ~print=printExtensionConstructor, + ~nodes=ecs, + ~forceBreak, + cmtTbl, + ) + + Doc.breakableGroup( + ~forceBreak, + Doc.indent( + Doc.concat(list{ + Doc.line, + privateFlag, + rows, + /* Doc.join ~sep:Doc.line ( */ + /* List.mapi printExtensionConstructor ecs */ + /* ) */ + }), + ), + ) + } + + Doc.group( + Doc.concat(list{ + printAttributes(~loc=te.ptyext_path.loc, te.ptyext_attributes), + prefix, + name, + typeParams, + Doc.text(" +="), + extensionConstructors, + }), + ) + } + + and printModuleBinding = (~isRec, moduleBinding, cmtTbl, i) => { + let prefix = if i == 0 { + Doc.concat(list{ + Doc.text("module "), + if isRec { + Doc.text("rec ") + } else { + Doc.nil + }, + }) + } else { + Doc.text("and ") + } + + let (modExprDoc, modConstraintDoc) = switch moduleBinding.pmb_expr { + | {pmod_desc: Pmod_constraint(modExpr, modType)} => ( + printModExpr(modExpr, cmtTbl), + Doc.concat(list{Doc.text(": "), printModType(modType, cmtTbl)}), + ) + | modExpr => (printModExpr(modExpr, cmtTbl), Doc.nil) + } + + let modName = { + let doc = Doc.text(moduleBinding.pmb_name.Location.txt) + printComments(doc, cmtTbl, moduleBinding.pmb_name.loc) + } + + let doc = Doc.concat(list{ + printAttributes(~loc=moduleBinding.pmb_name.loc, moduleBinding.pmb_attributes), + prefix, + modName, + modConstraintDoc, + Doc.text(" = "), + modExprDoc, + }) + printComments(doc, cmtTbl, moduleBinding.pmb_loc) + } + + and printModuleTypeDeclaration = (modTypeDecl: Parsetree.module_type_declaration, cmtTbl) => { + let modName = { + let doc = Doc.text(modTypeDecl.pmtd_name.txt) + printComments(doc, cmtTbl, modTypeDecl.pmtd_name.loc) + } + + Doc.concat(list{ + printAttributes(modTypeDecl.pmtd_attributes), + Doc.text("module type "), + modName, + switch modTypeDecl.pmtd_type { + | None => Doc.nil + | Some(modType) => Doc.concat(list{Doc.text(" = "), printModType(modType, cmtTbl)}) + }, + }) + } + + and printModType = (modType, cmtTbl) => { + let modTypeDoc = switch modType.pmty_desc { + | Parsetree.Pmty_ident(longident) => + Doc.concat(list{ + printAttributes(~loc=longident.loc, modType.pmty_attributes), + printLongidentLocation(longident, cmtTbl), + }) + | Pmty_signature(signature) => + let signatureDoc = Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.lbrace, + Doc.indent(Doc.concat(list{Doc.line, printSignature(signature, cmtTbl)})), + Doc.line, + Doc.rbrace, + }), + ) + Doc.concat(list{printAttributes(modType.pmty_attributes), signatureDoc}) + | Pmty_functor(_) => + let (parameters, returnType) = ParsetreeViewer.functorType(modType) + let parametersDoc = switch parameters { + | list{} => Doc.nil + | list{(attrs, {Location.txt: "_", loc}, Some(modType))} => + let cmtLoc = {...loc, loc_end: modType.Parsetree.pmty_loc.loc_end} + + let attrs = switch attrs { + | list{} => Doc.nil + | attrs => + Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.line}) + } + let doc = Doc.concat(list{attrs, printModType(modType, cmtTbl)}) + printComments(doc, cmtTbl, cmtLoc) + | params => + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map((( + attrs, + lbl, + modType, + )) => { + let cmtLoc = switch modType { + | None => lbl.Asttypes.loc + | Some(modType) => { + ...lbl.Asttypes.loc, + loc_end: modType.Parsetree.pmty_loc.loc_end, + } + } + + let attrs = switch attrs { + | list{} => Doc.nil + | attrs => + Doc.concat(list{ + Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), + Doc.line, + }) + } + let lblDoc = if lbl.Location.txt == "_" { + Doc.nil + } else { + let doc = Doc.text(lbl.txt) + printComments(doc, cmtTbl, lbl.loc) + } + + let doc = Doc.concat(list{ + attrs, + lblDoc, + switch modType { + | None => Doc.nil + | Some(modType) => + Doc.concat(list{ + if lbl.txt == "_" { + Doc.nil + } else { + Doc.text(": ") + }, + printModType(modType, cmtTbl), + }) + }, + }) + printComments(doc, cmtTbl, cmtLoc) + }, params)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + } + + let returnDoc = { + let doc = printModType(returnType, cmtTbl) + if Parens.modTypeFunctorReturn(returnType) { + addParens(doc) + } else { + doc + } + } + + Doc.group( + Doc.concat(list{ + parametersDoc, + Doc.group(Doc.concat(list{Doc.text(" =>"), Doc.line, returnDoc})), + }), + ) + | Pmty_typeof(modExpr) => + Doc.concat(list{Doc.text("module type of "), printModExpr(modExpr, cmtTbl)}) + | Pmty_extension(extension) => printExtensionWithComments(~atModuleLvl=false, extension, cmtTbl) + | Pmty_alias(longident) => + Doc.concat(list{Doc.text("module "), printLongidentLocation(longident, cmtTbl)}) + | Pmty_with(modType, withConstraints) => + let operand = { + let doc = printModType(modType, cmtTbl) + if Parens.modTypeWithOperand(modType) { + addParens(doc) + } else { + doc + } + } + + Doc.group( + Doc.concat(list{ + operand, + Doc.indent(Doc.concat(list{Doc.line, printWithConstraints(withConstraints, cmtTbl)})), + }), + ) + } + + let attrsAlreadyPrinted = switch modType.pmty_desc { + | Pmty_functor(_) | Pmty_signature(_) | Pmty_ident(_) => true + | _ => false + } + + let doc = Doc.concat(list{ + if attrsAlreadyPrinted { + Doc.nil + } else { + printAttributes(modType.pmty_attributes) + }, + modTypeDoc, + }) + printComments(doc, cmtTbl, modType.pmty_loc) + } + + and printWithConstraints = (withConstraints, cmtTbl) => { + let rows = List.mapi((i, withConstraint) => + Doc.group( + Doc.concat(list{ + if i === 0 { + Doc.text("with ") + } else { + Doc.text("and ") + }, + printWithConstraint(withConstraint, cmtTbl), + }), + ) + , withConstraints) + + Doc.join(~sep=Doc.line, rows) + } + + and printWithConstraint = (withConstraint: Parsetree.with_constraint, cmtTbl) => + switch withConstraint { + /* with type X.t = ... */ + | Pwith_type(longident, typeDeclaration) => + Doc.group( + printTypeDeclaration( + ~name=printLidentPath(longident, cmtTbl), + ~equalSign="=", + ~recFlag=Doc.nil, + 0, + typeDeclaration, + CommentTable.empty, + ), + ) + /* with module X.Y = Z */ + | Pwith_module({txt: longident1}, {txt: longident2}) => + Doc.concat(list{ + Doc.text("module "), + printLongident(longident1), + Doc.text(" ="), + Doc.indent(Doc.concat(list{Doc.line, printLongident(longident2)})), + }) + /* with type X.t := ..., same format as [Pwith_type] */ + | Pwith_typesubst(longident, typeDeclaration) => + Doc.group( + printTypeDeclaration( + ~name=printLidentPath(longident, cmtTbl), + ~equalSign=":=", + ~recFlag=Doc.nil, + 0, + typeDeclaration, + CommentTable.empty, + ), + ) + | Pwith_modsubst({txt: longident1}, {txt: longident2}) => + Doc.concat(list{ + Doc.text("module "), + printLongident(longident1), + Doc.text(" :="), + Doc.indent(Doc.concat(list{Doc.line, printLongident(longident2)})), + }) + } + + and printSignature = (signature, cmtTbl) => + switch signature { + | list{} => printCommentsInside(cmtTbl, Location.none) + | signature => + printList( + ~getLoc=s => s.Parsetree.psig_loc, + ~nodes=signature, + ~print=printSignatureItem, + cmtTbl, + ) + } + + and printSignatureItem = (si: Parsetree.signature_item, cmtTbl) => + switch si.psig_desc { + | Parsetree.Psig_value(valueDescription) => printValueDescription(valueDescription, cmtTbl) + | Psig_type(recFlag, typeDeclarations) => + let recFlag = switch recFlag { + | Asttypes.Nonrecursive => Doc.nil + | Asttypes.Recursive => Doc.text("rec ") + } + + printTypeDeclarations(~recFlag, typeDeclarations, cmtTbl) + | Psig_typext(typeExtension) => printTypeExtension(typeExtension, cmtTbl) + | Psig_exception(extensionConstructor) => printExceptionDef(extensionConstructor, cmtTbl) + | Psig_module(moduleDeclaration) => printModuleDeclaration(moduleDeclaration, cmtTbl) + | Psig_recmodule(moduleDeclarations) => printRecModuleDeclarations(moduleDeclarations, cmtTbl) + | Psig_modtype(modTypeDecl) => printModuleTypeDeclaration(modTypeDecl, cmtTbl) + | Psig_open(openDescription) => printOpenDescription(openDescription, cmtTbl) + | Psig_include(includeDescription) => printIncludeDescription(includeDescription, cmtTbl) + | Psig_attribute(attr) => + Doc.concat(list{Doc.text("@"), printAttributeWithComments(attr, cmtTbl)}) + | Psig_extension(extension, attrs) => + Doc.concat(list{ + printAttributes(attrs), + Doc.concat(list{printExtensionWithComments(~atModuleLvl=true, extension, cmtTbl)}), + }) + | Psig_class(_) | Psig_class_type(_) => Doc.nil + } + + and printRecModuleDeclarations = (moduleDeclarations, cmtTbl) => + printListi( + ~getLoc=n => n.Parsetree.pmd_loc, + ~nodes=moduleDeclarations, + ~print=printRecModuleDeclaration, + cmtTbl, + ) + + and printRecModuleDeclaration = (md, cmtTbl, i) => { + let body = switch md.pmd_type.pmty_desc { + | Parsetree.Pmty_alias(longident) => + Doc.concat(list{Doc.text(" = "), printLongidentLocation(longident, cmtTbl)}) + | _ => + let needsParens = switch md.pmd_type.pmty_desc { + | Pmty_with(_) => true + | _ => false + } + + let modTypeDoc = { + let doc = printModType(md.pmd_type, cmtTbl) + if needsParens { + addParens(doc) + } else { + doc + } + } + + Doc.concat(list{Doc.text(": "), modTypeDoc}) + } + + let prefix = if i < 1 { + "module rec " + } else { + "and " + } + Doc.concat(list{ + printAttributes(~loc=md.pmd_name.loc, md.pmd_attributes), + Doc.text(prefix), + printComments(Doc.text(md.pmd_name.txt), cmtTbl, md.pmd_name.loc), + body, + }) + } + + and printModuleDeclaration = (md: Parsetree.module_declaration, cmtTbl) => { + let body = switch md.pmd_type.pmty_desc { + | Parsetree.Pmty_alias(longident) => + Doc.concat(list{Doc.text(" = "), printLongidentLocation(longident, cmtTbl)}) + | _ => Doc.concat(list{Doc.text(": "), printModType(md.pmd_type, cmtTbl)}) + } + + Doc.concat(list{ + printAttributes(~loc=md.pmd_name.loc, md.pmd_attributes), + Doc.text("module "), + printComments(Doc.text(md.pmd_name.txt), cmtTbl, md.pmd_name.loc), + body, + }) + } + + and printOpenDescription = (openDescription: Parsetree.open_description, p) => + Doc.concat(list{ + printAttributes(openDescription.popen_attributes), + Doc.text("open"), + switch openDescription.popen_override { + | Asttypes.Fresh => Doc.space + | Asttypes.Override => Doc.text("! ") + }, + printLongidentLocation(openDescription.popen_lid, p), + }) + + and printIncludeDescription = (includeDescription: Parsetree.include_description, cmtTbl) => + Doc.concat(list{ + printAttributes(includeDescription.pincl_attributes), + Doc.text("include "), + printModType(includeDescription.pincl_mod, cmtTbl), + }) + + and printIncludeDeclaration = (includeDeclaration: Parsetree.include_declaration, cmtTbl) => { + let isJsFfiImport = List.exists(attr => + switch attr { + | ({Location.txt: "ns.jsFfi"}, _) => true + | _ => false + } + , includeDeclaration.pincl_attributes) + + if isJsFfiImport { + printJsFfiImportDeclaration(includeDeclaration, cmtTbl) + } else { + Doc.concat(list{ + printAttributes(includeDeclaration.pincl_attributes), + Doc.text("include "), + { + let includeDoc = printModExpr(includeDeclaration.pincl_mod, cmtTbl) + + if Parens.includeModExpr(includeDeclaration.pincl_mod) { + addParens(includeDoc) + } else { + includeDoc + } + }, + }) + } + } + + and printJsFfiImport = (valueDescription: Parsetree.value_description, cmtTbl) => { + let attrs = List.filter(attr => + switch attr { + | ({Location.txt: "bs.val" | "genType.import" | "bs.scope"}, _) => false + | _ => true + } + , valueDescription.pval_attributes) + let (ident, alias) = switch valueDescription.pval_prim { + | list{primitive, ..._} => + if primitive != valueDescription.pval_name.txt { + ( + printIdentLike(primitive), + Doc.concat(list{Doc.text(" as "), printIdentLike(valueDescription.pval_name.txt)}), + ) + } else { + (printIdentLike(primitive), Doc.nil) + } + | _ => (printIdentLike(valueDescription.pval_name.txt), Doc.nil) + } + + Doc.concat(list{ + printAttributes(~loc=valueDescription.pval_name.loc, attrs), + ident, + alias, + Doc.text(": "), + printTypExpr(valueDescription.pval_type, cmtTbl), + }) + } + + and printJsFfiImportScope = (scope: ParsetreeViewer.jsImportScope) => + switch scope { + | JsGlobalImport => Doc.nil + | JsModuleImport(modName) => + Doc.concat(list{Doc.text(" from "), Doc.doubleQuote, Doc.text(modName), Doc.doubleQuote}) + | JsScopedImport(idents) => + Doc.concat(list{Doc.text(" from "), Doc.join(~sep=Doc.dot, List.map(Doc.text, idents))}) + } + + and printJsFfiImportDeclaration = (includeDeclaration: Parsetree.include_declaration, cmtTbl) => { + let attrs = List.filter(attr => + switch attr { + | ({Location.txt: "ns.jsFfi"}, _) => false + | _ => true + } + , includeDeclaration.pincl_attributes) + + let imports = ParsetreeViewer.extractValueDescriptionFromModExpr(includeDeclaration.pincl_mod) + let scope = switch imports { + | list{vd, ..._} => ParsetreeViewer.classifyJsImport(vd) + | list{} => ParsetreeViewer.JsGlobalImport + } + + let scopeDoc = printJsFfiImportScope(scope) + Doc.group( + Doc.concat(list{ + printAttributes(attrs), + Doc.text("import "), + Doc.group( + Doc.concat(list{ + Doc.lbrace, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(vd => printJsFfiImport(vd, cmtTbl), imports), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + }), + ), + scopeDoc, + }), + ) + } + + and printValueBindings = (~recFlag, vbs: list, cmtTbl) => + printListi( + ~getLoc=vb => vb.Parsetree.pvb_loc, + ~nodes=vbs, + ~print=printValueBinding(~recFlag), + cmtTbl, + ) + + and printValueDescription = (valueDescription, cmtTbl) => { + let isExternal = switch valueDescription.pval_prim { + | list{} => false + | _ => true + } + + Doc.group( + Doc.concat(list{ + printAttributes(valueDescription.pval_attributes), + Doc.text( + if isExternal { + "external " + } else { + "let " + }, + ), + printComments( + printIdentLike(valueDescription.pval_name.txt), + cmtTbl, + valueDescription.pval_name.loc, + ), + Doc.text(": "), + printTypExpr(valueDescription.pval_type, cmtTbl), + if isExternal { + Doc.group( + Doc.concat(list{ + Doc.text(" ="), + Doc.indent( + Doc.concat(list{ + Doc.line, + Doc.join( + ~sep=Doc.line, + List.map( + s => Doc.concat(list{Doc.text("\""), Doc.text(s), Doc.text("\"")}), + valueDescription.pval_prim, + ), + ), + }), + ), + }), + ) + } else { + Doc.nil + }, + }), + ) + } + + and printTypeDeclarations = (~recFlag, typeDeclarations, cmtTbl) => + printListi( + ~getLoc=n => n.Parsetree.ptype_loc, + ~nodes=typeDeclarations, + ~print=printTypeDeclaration2(~recFlag), + cmtTbl, + ) + + /* + * type_declaration = { + * ptype_name: string loc; + * ptype_params: (core_type * variance) list; + * (* ('a1,...'an) t; None represents _*) + * ptype_cstrs: (core_type * core_type * Location.t) list; + * (* ... constraint T1=T1' ... constraint Tn=Tn' *) + * ptype_kind: type_kind; + * ptype_private: private_flag; (* = private ... *) + * ptype_manifest: core_type option; (* = T *) + * ptype_attributes: attributes; (* ... [@@id1] [@@id2] *) + * ptype_loc: Location.t; + * } + * + * + * type t (abstract, no manifest) + * type t = T0 (abstract, manifest=T0) + * type t = C of T | ... (variant, no manifest) + * type t = T0 = C of T | ... (variant, manifest=T0) + * type t = {l: T; ...} (record, no manifest) + * type t = T0 = {l : T; ...} (record, manifest=T0) + * type t = .. (open, no manifest) + * + * + * and type_kind = + * | Ptype_abstract + * | Ptype_variant of constructor_declaration list + * (* Invariant: non-empty list *) + * | Ptype_record of label_declaration list + * (* Invariant: non-empty list *) + * | Ptype_open + */ + and printTypeDeclaration = ( + ~name, + ~equalSign, + ~recFlag, + i, + td: Parsetree.type_declaration, + cmtTbl, + ) => { + let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr(td.ptype_attributes) + let attrs = printAttributes(~loc=td.ptype_loc, attrs) + let prefix = if i > 0 { + Doc.concat(list{ + Doc.text("and "), + if hasGenType { + Doc.text("export ") + } else { + Doc.nil + }, + }) + } else { + Doc.concat(list{ + Doc.text( + if hasGenType { + "export type " + } else { + "type " + }, + ), + recFlag, + }) + } + + let typeName = name + let typeParams = printTypeParams(td.ptype_params, cmtTbl) + let manifestAndKind = switch td.ptype_kind { + | Ptype_abstract => + switch td.ptype_manifest { + | None => Doc.nil + | Some(typ) => + Doc.concat(list{ + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printPrivateFlag(td.ptype_private), + printTypExpr(typ, cmtTbl), + }) + } + | Ptype_open => + Doc.concat(list{ + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printPrivateFlag(td.ptype_private), + Doc.text(".."), + }) + | Ptype_record(lds) => + let manifest = switch td.ptype_manifest { + | None => Doc.nil + | Some(typ) => + Doc.concat(list{ + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printTypExpr(typ, cmtTbl), + }) + } + + Doc.concat(list{ + manifest, + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printPrivateFlag(td.ptype_private), + printRecordDeclaration(lds, cmtTbl), + }) + | Ptype_variant(cds) => + let manifest = switch td.ptype_manifest { + | None => Doc.nil + | Some(typ) => + Doc.concat(list{ + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printTypExpr(typ, cmtTbl), + }) + } + + Doc.concat(list{ + manifest, + Doc.concat(list{Doc.space, Doc.text(equalSign)}), + printConstructorDeclarations(~privateFlag=td.ptype_private, cds, cmtTbl), + }) + } + + let constraints = printTypeDefinitionConstraints(td.ptype_cstrs) + Doc.group(Doc.concat(list{attrs, prefix, typeName, typeParams, manifestAndKind, constraints})) + } + + and printTypeDeclaration2 = (~recFlag, td: Parsetree.type_declaration, cmtTbl, i) => { + let name = { + let doc = printIdentLike(td.Parsetree.ptype_name.txt) + printComments(doc, cmtTbl, td.ptype_name.loc) + } + + let equalSign = "=" + let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr(td.ptype_attributes) + let attrs = printAttributes(~loc=td.ptype_loc, attrs) + let prefix = if i > 0 { + Doc.concat(list{ + Doc.text("and "), + if hasGenType { + Doc.text("export ") + } else { + Doc.nil + }, + }) + } else { + Doc.concat(list{ + Doc.text( + if hasGenType { + "export type " + } else { + "type " + }, + ), + recFlag, + }) + } + + let typeName = name + let typeParams = printTypeParams(td.ptype_params, cmtTbl) + let manifestAndKind = switch td.ptype_kind { + | Ptype_abstract => + switch td.ptype_manifest { + | None => Doc.nil + | Some(typ) => + Doc.concat(list{ + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printPrivateFlag(td.ptype_private), + printTypExpr(typ, cmtTbl), + }) + } + | Ptype_open => + Doc.concat(list{ + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printPrivateFlag(td.ptype_private), + Doc.text(".."), + }) + | Ptype_record(lds) => + let manifest = switch td.ptype_manifest { + | None => Doc.nil + | Some(typ) => + Doc.concat(list{ + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printTypExpr(typ, cmtTbl), + }) + } + + Doc.concat(list{ + manifest, + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printPrivateFlag(td.ptype_private), + printRecordDeclaration(lds, cmtTbl), + }) + | Ptype_variant(cds) => + let manifest = switch td.ptype_manifest { + | None => Doc.nil + | Some(typ) => + Doc.concat(list{ + Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}), + printTypExpr(typ, cmtTbl), + }) + } + + Doc.concat(list{ + manifest, + Doc.concat(list{Doc.space, Doc.text(equalSign)}), + printConstructorDeclarations(~privateFlag=td.ptype_private, cds, cmtTbl), + }) + } + + let constraints = printTypeDefinitionConstraints(td.ptype_cstrs) + Doc.group(Doc.concat(list{attrs, prefix, typeName, typeParams, manifestAndKind, constraints})) + } + + and printTypeDefinitionConstraints = cstrs => + switch cstrs { + | list{} => Doc.nil + | cstrs => + Doc.indent( + Doc.group( + Doc.concat(list{ + Doc.line, + Doc.group(Doc.join(~sep=Doc.line, List.map(printTypeDefinitionConstraint, cstrs))), + }), + ), + ) + } + + and printTypeDefinitionConstraint = ( + (typ1, typ2, _loc): (Parsetree.core_type, Parsetree.core_type, Location.t), + ) => + Doc.concat(list{ + Doc.text("constraint "), + printTypExpr(typ1, CommentTable.empty), + Doc.text(" = "), + printTypExpr(typ2, CommentTable.empty), + }) + + and printPrivateFlag = (flag: Asttypes.private_flag) => + switch flag { + | Private => Doc.text("private ") + | Public => Doc.nil + } + + and printTypeParams = (typeParams, cmtTbl) => + switch typeParams { + | list{} => Doc.nil + | typeParams => + Doc.group( + Doc.concat(list{ + Doc.lessThan, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(typeParam => { + let doc = printTypeParam(typeParam, cmtTbl) + printComments(doc, cmtTbl, fst(typeParam).Parsetree.ptyp_loc) + }, typeParams)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.greaterThan, + }), + ) + } + + and printTypeParam = (param: (Parsetree.core_type, Asttypes.variance), cmtTbl) => { + let (typ, variance) = param + let printedVariance = switch variance { + | Covariant => Doc.text("+") + | Contravariant => Doc.text("-") + | Invariant => Doc.nil + } + + Doc.concat(list{printedVariance, printTypExpr(typ, cmtTbl)}) + } + + and printRecordDeclaration = (lds: list, cmtTbl) => { + let forceBreak = switch (lds, List.rev(lds)) { + | (list{first, ..._}, list{last, ..._}) => + first.pld_loc.loc_start.pos_lnum < last.pld_loc.loc_end.pos_lnum + | _ => false + } + + Doc.breakableGroup( + ~forceBreak, + Doc.concat(list{ + Doc.lbrace, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(ld => { + let doc = printLabelDeclaration(ld, cmtTbl) + printComments(doc, cmtTbl, ld.Parsetree.pld_loc) + }, lds)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + }), + ) + } + + and printConstructorDeclarations = ( + ~privateFlag, + cds: list, + cmtTbl, + ) => { + let forceBreak = switch (cds, List.rev(cds)) { + | (list{first, ..._}, list{last, ..._}) => + first.pcd_loc.loc_start.pos_lnum < last.pcd_loc.loc_end.pos_lnum + | _ => false + } + + let privateFlag = switch privateFlag { + | Asttypes.Private => Doc.concat(list{Doc.text("private"), Doc.line}) + | Public => Doc.nil + } + + let rows = printListi( + ~getLoc=cd => cd.Parsetree.pcd_loc, + ~nodes=cds, + ~print=(cd, cmtTbl, i) => { + let doc = printConstructorDeclaration2(i, cd, cmtTbl) + printComments(doc, cmtTbl, cd.Parsetree.pcd_loc) + }, + ~forceBreak, + cmtTbl, + ) + + Doc.breakableGroup(~forceBreak, Doc.indent(Doc.concat(list{Doc.line, privateFlag, rows}))) + } + + and printConstructorDeclaration2 = (i, cd: Parsetree.constructor_declaration, cmtTbl) => { + let attrs = printAttributes(cd.pcd_attributes) + let bar = if i > 0 { + Doc.text("| ") + } else { + Doc.ifBreaks(Doc.text("| "), Doc.nil) + } + + let constrName = { + let doc = Doc.text(cd.pcd_name.txt) + printComments(doc, cmtTbl, cd.pcd_name.loc) + } + + let constrArgs = printConstructorArguments(~indent=true, cd.pcd_args, cmtTbl) + let gadt = switch cd.pcd_res { + | None => Doc.nil + | Some(typ) => Doc.indent(Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)})) + } + + Doc.concat(list{ + bar, + Doc.group( + Doc.concat(list{ + attrs /* TODO: fix parsing of attributes, so when can print them above the bar? */, + constrName, + constrArgs, + gadt, + }), + ), + }) + } + + and printConstructorArguments = (~indent, cdArgs: Parsetree.constructor_arguments, cmtTbl) => + switch cdArgs { + | Pcstr_tuple(list{}) => Doc.nil + | Pcstr_tuple(types) => + let args = Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(typexpr => printTypExpr(typexpr, cmtTbl), types), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }) + Doc.group( + if indent { + Doc.indent(args) + } else { + args + }, + ) + | Pcstr_record(lds) => + let args = Doc.concat(list{ + Doc.lparen, + /* manually inline the printRecordDeclaration, gives better layout */ + Doc.lbrace, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(ld => { + let doc = printLabelDeclaration(ld, cmtTbl) + printComments(doc, cmtTbl, ld.Parsetree.pld_loc) + }, lds)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + Doc.rparen, + }) + if indent { + Doc.indent(args) + } else { + args + } + } + + and printLabelDeclaration = (ld: Parsetree.label_declaration, cmtTbl) => { + let attrs = printAttributes(~loc=ld.pld_name.loc, ld.pld_attributes) + let mutableFlag = switch ld.pld_mutable { + | Mutable => Doc.text("mutable ") + | Immutable => Doc.nil + } + + let name = { + let doc = printIdentLike(ld.pld_name.txt) + printComments(doc, cmtTbl, ld.pld_name.loc) + } + + Doc.group( + Doc.concat(list{attrs, mutableFlag, name, Doc.text(": "), printTypExpr(ld.pld_type, cmtTbl)}), + ) + } + + and printTypExpr = (typExpr: Parsetree.core_type, cmtTbl) => { + let renderedType = switch typExpr.ptyp_desc { + | Ptyp_any => Doc.text("_") + | Ptyp_var(var) => Doc.concat(list{Doc.text("'"), printIdentLike(var)}) + | Ptyp_extension(extension) => printExtensionWithComments(~atModuleLvl=false, extension, cmtTbl) + | Ptyp_alias(typ, alias) => + let typ = { + /* Technically type t = (string, float) => unit as 'x, doesn't require + * parens around the arrow expression. This is very confusing though. + * Is the "as" part of "unit" or "(string, float) => unit". By printing + * parens we guide the user towards its meaning. */ + let needsParens = switch typ.ptyp_desc { + | Ptyp_arrow(_) => true + | _ => false + } + + let doc = printTypExpr(typ, cmtTbl) + if needsParens { + Doc.concat(list{Doc.lparen, doc, Doc.rparen}) + } else { + doc + } + } + + Doc.concat(list{ + typ, + Doc.text(" as "), + Doc.concat(list{Doc.text("'"), printIdentLike(alias)}), + }) + | Ptyp_constr( + {txt: Longident.Ldot(Longident.Lident("Js"), "t")}, + list{{ptyp_desc: Ptyp_object(_fields, _openFlag)} as typ}, + ) => + let bsObject = printTypExpr(typ, cmtTbl) + switch typExpr.ptyp_attributes { + | list{} => bsObject + | attrs => + Doc.concat(list{ + Doc.group(Doc.join(~sep=Doc.line, List.map(printAttribute, attrs))), + Doc.space, + printTypExpr(typ, cmtTbl), + }) + } + | Ptyp_constr(longidentLoc, list{{ptyp_desc: Parsetree.Ptyp_tuple(tuple)}}) => + let constrName = printLidentPath(longidentLoc, cmtTbl) + Doc.group( + Doc.concat(list{ + constrName, + Doc.lessThan, + printTupleType(~inline=true, tuple, cmtTbl), + Doc.greaterThan, + }), + ) + | Ptyp_constr(longidentLoc, constrArgs) => + let constrName = printLidentPath(longidentLoc, cmtTbl) + switch constrArgs { + | list{} => constrName + | list{{ + Parsetree.ptyp_desc: + Ptyp_constr( + {txt: Longident.Ldot(Longident.Lident("Js"), "t")}, + list{{ptyp_desc: Ptyp_object(fields, openFlag)}}, + ), + }} => + Doc.concat(list{ + constrName, + Doc.lessThan, + printBsObjectSugar(~inline=true, fields, openFlag, cmtTbl), + Doc.greaterThan, + }) + | _args => + Doc.group( + Doc.concat(list{ + constrName, + Doc.lessThan, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(typexpr => printTypExpr(typexpr, cmtTbl), constrArgs), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.greaterThan, + }), + ) + } + | Ptyp_arrow(_) => + let (attrsBefore, args, returnType) = ParsetreeViewer.arrowType(typExpr) + let returnTypeNeedsParens = switch returnType.ptyp_desc { + | Ptyp_alias(_) => true + | _ => false + } + + let returnDoc = { + let doc = printTypExpr(returnType, cmtTbl) + if returnTypeNeedsParens { + Doc.concat(list{Doc.lparen, doc, Doc.rparen}) + } else { + doc + } + } + + let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrsBefore) + + switch args { + | list{} => Doc.nil + | list{(list{}, Nolabel, n)} when !isUncurried => + let hasAttrsBefore = !(attrs == list{}) + let attrs = if hasAttrsBefore { + Doc.concat(list{ + Doc.join(~sep=Doc.line, List.map(printAttribute, attrsBefore)), + Doc.space, + }) + } else { + Doc.nil + } + + let typDoc = { + let doc = printTypExpr(n, cmtTbl) + switch n.ptyp_desc { + | Ptyp_arrow(_) | Ptyp_tuple(_) => addParens(doc) + | _ => doc + } + } + + Doc.group( + Doc.concat(list{ + Doc.group(attrs), + Doc.group( + if hasAttrsBefore { + Doc.concat(list{ + Doc.lparen, + Doc.indent(Doc.concat(list{Doc.softLine, typDoc, Doc.text(" => "), returnDoc})), + Doc.softLine, + Doc.rparen, + }) + } else { + Doc.concat(list{typDoc, Doc.text(" => "), returnDoc}) + }, + ), + }), + ) + | args => + let attrs = switch attrs { + | list{} => Doc.nil + | attrs => + Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.space}) + } + + let renderedArgs = Doc.concat(list{ + attrs, + Doc.text("("), + Doc.indent( + Doc.concat(list{ + Doc.softLine, + if isUncurried { + Doc.concat(list{Doc.dot, Doc.space}) + } else { + Doc.nil + }, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(tp => printTypeParameter(tp, cmtTbl), args), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.text(")"), + }) + Doc.group(Doc.concat(list{renderedArgs, Doc.text(" => "), returnDoc})) + } + | Ptyp_tuple(types) => printTupleType(~inline=false, types, cmtTbl) + | Ptyp_object(fields, openFlag) => printBsObjectSugar(~inline=false, fields, openFlag, cmtTbl) + | Ptyp_poly(list{}, typ) => printTypExpr(typ, cmtTbl) + | Ptyp_poly(stringLocs, typ) => Doc.concat(list{Doc.join(~sep=Doc.space, List.map(({ + Location.txt: txt, + loc, + }) => { + let doc = Doc.concat(list{Doc.text("'"), Doc.text(txt)}) + printComments(doc, cmtTbl, loc) + }, stringLocs)), Doc.dot, Doc.space, printTypExpr(typ, cmtTbl)}) + | Ptyp_package(packageType) => + printPackageType(~printModuleKeywordAndParens=true, packageType, cmtTbl) + | Ptyp_class(_) => Doc.text("classes are not supported in types") + | Ptyp_variant(rowFields, closedFlag, labelsOpt) => + let printRowField = x => + switch x { + | Parsetree.Rtag({txt}, attrs, true, list{}) => + Doc.concat(list{ + printAttributes(attrs), + Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, txt)}), + }) + | Rtag({txt}, attrs, truth, types) => + let doType = t => + switch t.Parsetree.ptyp_desc { + | Ptyp_tuple(_) => printTypExpr(t, cmtTbl) + | _ => Doc.concat(list{Doc.lparen, printTypExpr(t, cmtTbl), Doc.rparen}) + } + + let printedTypes = List.map(doType, types) + let cases = Doc.join(~sep=Doc.concat(list{Doc.line, Doc.text("& ")}), printedTypes) + let cases = if truth { + Doc.concat(list{Doc.line, Doc.text("& "), cases}) + } else { + cases + } + Doc.group( + Doc.concat(list{ + printAttributes(attrs), + Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, txt)}), + cases, + }), + ) + | Rinherit(coreType) => printTypExpr(coreType, cmtTbl) + } + + let docs = List.map(printRowField, rowFields) + let cases = Doc.join(~sep=Doc.concat(list{Doc.line, Doc.text("| ")}), docs) + let cases = if docs == list{} { + cases + } else { + Doc.concat(list{Doc.text("| "), cases}) + } + let openingSymbol = if closedFlag == Open { + Doc.greaterThan + } else if labelsOpt == None { + Doc.nil + } else { + Doc.lessThan + } + let hasLabels = labelsOpt != None && labelsOpt != Some(list{}) + let labels = switch labelsOpt { + | None | Some(list{}) => Doc.nil + | Some(labels) => + Doc.concat( + List.map( + label => + Doc.concat(list{Doc.line, Doc.text("#"), printIdentLike(~allowUident=true, label)}), + labels, + ), + ) + } + + let closingSymbol = if hasLabels { + Doc.text(" >") + } else { + Doc.nil + } + Doc.group( + Doc.concat(list{ + Doc.lbracket, + openingSymbol, + Doc.line, + cases, + closingSymbol, + labels, + Doc.line, + Doc.rbracket, + }), + ) + } + + let shouldPrintItsOwnAttributes = switch typExpr.ptyp_desc { + | Ptyp_arrow(_) /* es6 arrow types print their own attributes */ + | Ptyp_constr({txt: Longident.Ldot(Longident.Lident("Js"), "t")}, _) => true + | _ => false + } + + let doc = switch typExpr.ptyp_attributes { + | list{_, ..._} as attrs when !shouldPrintItsOwnAttributes => + Doc.group(Doc.concat(list{printAttributes(attrs), renderedType})) + | _ => renderedType + } + + printComments(doc, cmtTbl, typExpr.ptyp_loc) + } + + and printBsObjectSugar = (~inline, fields, openFlag, cmtTbl) => { + let doc = switch fields { + | list{} => + Doc.concat(list{ + Doc.lbrace, + switch openFlag { + | Asttypes.Closed => Doc.dot + | Open => Doc.dotdot + }, + Doc.rbrace, + }) + | fields => + Doc.concat(list{ + Doc.lbrace, + switch openFlag { + | Asttypes.Closed => Doc.nil + | Open => Doc.dotdot + }, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(field => printObjectField(field, cmtTbl), fields), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + }) + } + + if inline { + doc + } else { + Doc.group(doc) + } + } + + and printTupleType = (~inline, types: list, cmtTbl) => { + let tuple = Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(typexpr => printTypExpr(typexpr, cmtTbl), types), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }) + + if inline === false { + Doc.group(tuple) + } else { + tuple + } + } + + and printObjectField = (field: Parsetree.object_field, cmtTbl) => + switch field { + | Otag(labelLoc, attrs, typ) => + let lbl = { + let doc = Doc.text("\"" ++ (labelLoc.txt ++ "\"")) + printComments(doc, cmtTbl, labelLoc.loc) + } + + let doc = Doc.concat(list{ + printAttributes(~loc=labelLoc.loc, attrs), + lbl, + Doc.text(": "), + printTypExpr(typ, cmtTbl), + }) + let cmtLoc = {...labelLoc.loc, loc_end: typ.ptyp_loc.loc_end} + printComments(doc, cmtTbl, cmtLoc) + | _ => Doc.nil + } + + /* es6 arrow type arg + * type t = (~foo: string, ~bar: float=?, unit) => unit + * i.e. ~foo: string, ~bar: float */ + and printTypeParameter = ((attrs, lbl, typ), cmtTbl) => { + let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrs) + let uncurried = if isUncurried { + Doc.concat(list{Doc.dot, Doc.space}) + } else { + Doc.nil + } + let attrs = switch attrs { + | list{} => Doc.nil + | attrs => Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.line}) + } + let label = switch lbl { + | Asttypes.Nolabel => Doc.nil + | Labelled(lbl) => Doc.concat(list{Doc.text("~"), printIdentLike(lbl), Doc.text(": ")}) + | Optional(lbl) => Doc.concat(list{Doc.text("~"), printIdentLike(lbl), Doc.text(": ")}) + } + + let optionalIndicator = switch lbl { + | Asttypes.Nolabel | Labelled(_) => Doc.nil + | Optional(_lbl) => Doc.text("=?") + } + + let doc = Doc.group( + Doc.concat(list{uncurried, attrs, label, printTypExpr(typ, cmtTbl), optionalIndicator}), + ) + printComments(doc, cmtTbl, typ.ptyp_loc) + } + + and printValueBinding = (~recFlag, vb, cmtTbl, i) => { + let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr(vb.pvb_attributes) + let attrs = printAttributes(~loc=vb.pvb_pat.ppat_loc, attrs) + let header = if i === 0 { + Doc.concat(list{ + if hasGenType { + Doc.text("export ") + } else { + Doc.text("let ") + }, + recFlag, + }) + } else { + Doc.concat(list{ + Doc.text("and "), + if hasGenType { + Doc.text("export ") + } else { + Doc.nil + }, + }) + } + + switch vb { + | { + pvb_pat: {ppat_desc: Ppat_constraint(pattern, {ptyp_desc: Ptyp_poly(_)})}, + pvb_expr: {pexp_desc: Pexp_newtype(_)} as expr, + } => + let (_attrs, parameters, returnExpr) = ParsetreeViewer.funExpr(expr) + let abstractType = switch parameters { + | list{NewTypes({locs: vars})} => + Doc.concat(list{ + Doc.text("type "), + Doc.join(~sep=Doc.space, List.map(var => Doc.text(var.Asttypes.txt), vars)), + Doc.dot, + }) + | _ => Doc.nil + } + + switch returnExpr.pexp_desc { + | Pexp_constraint(expr, typ) => + Doc.group( + Doc.concat(list{ + attrs, + header, + printPattern(pattern, cmtTbl), + Doc.text(":"), + Doc.indent( + Doc.concat(list{ + Doc.line, + abstractType, + Doc.space, + printTypExpr(typ, cmtTbl), + Doc.text(" ="), + Doc.concat(list{Doc.line, printExpressionWithComments(expr, cmtTbl)}), + }), + ), + }), + ) + | _ => Doc.nil + } + | _ => + let (optBraces, expr) = ParsetreeViewer.processBracesAttr(vb.pvb_expr) + let printedExpr = { + let doc = printExpressionWithComments(vb.pvb_expr, cmtTbl) + switch Parens.expr(vb.pvb_expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + if ParsetreeViewer.isPipeExpr(vb.pvb_expr) { + Doc.customLayout(list{ + Doc.group( + Doc.concat(list{ + attrs, + header, + printPattern(vb.pvb_pat, cmtTbl), + Doc.text(" ="), + Doc.space, + printedExpr, + }), + ), + Doc.group( + Doc.concat(list{ + attrs, + header, + printPattern(vb.pvb_pat, cmtTbl), + Doc.text(" ="), + Doc.indent(Doc.concat(list{Doc.line, printedExpr})), + }), + ), + }) + } else { + let shouldIndent = switch optBraces { + | Some(_) => false + | _ => + ParsetreeViewer.isBinaryExpression(expr) || + switch vb.pvb_expr { + | { + pexp_attributes: list{({Location.txt: "ns.ternary"}, _)}, + pexp_desc: Pexp_ifthenelse(ifExpr, _, _), + } => + ParsetreeViewer.isBinaryExpression(ifExpr) || + ParsetreeViewer.hasAttributes(ifExpr.pexp_attributes) + | {pexp_desc: Pexp_newtype(_)} => false + | e => + ParsetreeViewer.hasAttributes(e.pexp_attributes) || ParsetreeViewer.isArrayAccess(e) + } + } + + Doc.group( + Doc.concat(list{ + attrs, + header, + printPattern(vb.pvb_pat, cmtTbl), + Doc.text(" ="), + if shouldIndent { + Doc.indent(Doc.concat(list{Doc.line, printedExpr})) + } else { + Doc.concat(list{Doc.space, printedExpr}) + }, + }), + ) + } + } + } + + and printPackageType = ( + ~printModuleKeywordAndParens, + packageType: Parsetree.package_type, + cmtTbl, + ) => { + let doc = switch packageType { + | (longidentLoc, list{}) => + Doc.group(Doc.concat(list{printLongidentLocation(longidentLoc, cmtTbl)})) + | (longidentLoc, packageConstraints) => + Doc.group( + Doc.concat(list{ + printLongidentLocation(longidentLoc, cmtTbl), + printPackageConstraints(packageConstraints, cmtTbl), + Doc.softLine, + }), + ) + } + + if printModuleKeywordAndParens { + Doc.concat(list{Doc.text("module("), doc, Doc.rparen}) + } else { + doc + } + } + + and printPackageConstraints = (packageConstraints, cmtTbl) => + Doc.concat(list{ + Doc.text(" with"), + Doc.indent(Doc.concat(list{Doc.line, Doc.join(~sep=Doc.line, List.mapi((i, pc) => { + let (longident, typexpr) = pc + let cmtLoc = { + ...longident.Asttypes.loc, + loc_end: typexpr.Parsetree.ptyp_loc.loc_end, + } + let doc = printPackageConstraint(i, cmtTbl, pc) + printComments(doc, cmtTbl, cmtLoc) + }, packageConstraints))})), + }) + + and printPackageConstraint = (i, cmtTbl, (longidentLoc, typ)) => { + let prefix = if i === 0 { + Doc.text("type ") + } else { + Doc.text("and type ") + } + Doc.concat(list{ + prefix, + printLongidentLocation(longidentLoc, cmtTbl), + Doc.text(" = "), + printTypExpr(typ, cmtTbl), + }) + } + + and printExtensionWithComments = (~atModuleLvl, (stringLoc, payload), cmtTbl) => { + let extName = { + let doc = Doc.concat(list{ + Doc.text("%"), + if atModuleLvl { + Doc.text("%") + } else { + Doc.nil + }, + Doc.text(stringLoc.Location.txt), + }) + printComments(doc, cmtTbl, stringLoc.Location.loc) + } + + switch payload { + | Parsetree.PStr(list{{pstr_desc: Pstr_eval(expr, attrs)}}) => + let exprDoc = printExpressionWithComments(expr, cmtTbl) + let needsParens = switch attrs { + | list{} => false + | _ => true + } + Doc.group( + Doc.concat(list{ + extName, + addParens( + Doc.concat(list{ + printAttributes(attrs), + if needsParens { + addParens(exprDoc) + } else { + exprDoc + }, + }), + ), + }), + ) + | _ => extName + } + } + + and printPattern = (p: Parsetree.pattern, cmtTbl) => { + let patternWithoutAttributes = switch p.ppat_desc { + | Ppat_any => Doc.text("_") + | Ppat_var(var) => printIdentLike(var.txt) + | Ppat_constant(c) => printConstant(c) + | Ppat_tuple(patterns) => + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.text(","), Doc.line}), + List.map(pat => printPattern(pat, cmtTbl), patterns), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + | Ppat_array(list{}) => + Doc.concat(list{Doc.lbracket, printCommentsInside(cmtTbl, p.ppat_loc), Doc.rbracket}) + | Ppat_array(patterns) => + Doc.group( + Doc.concat(list{ + Doc.text("["), + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.text(","), Doc.line}), + List.map(pat => printPattern(pat, cmtTbl), patterns), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.text("]"), + }), + ) + | Ppat_construct({txt: Longident.Lident("()")}, _) => + Doc.concat(list{Doc.lparen, printCommentsInside(cmtTbl, p.ppat_loc), Doc.rparen}) + | Ppat_construct({txt: Longident.Lident("[]")}, _) => + Doc.concat(list{Doc.text("list["), printCommentsInside(cmtTbl, p.ppat_loc), Doc.rbracket}) + | Ppat_construct({txt: Longident.Lident("::")}, _) => + let (patterns, tail) = ParsetreeViewer.collectPatternsFromListConstruct(list{}, p) + let shouldHug = switch (patterns, tail) { + | (list{pat}, {ppat_desc: Ppat_construct({txt: Longident.Lident("[]")}, _)}) + when ParsetreeViewer.isHuggablePattern(pat) => true + | _ => false + } + + let children = Doc.concat(list{ + if shouldHug { + Doc.nil + } else { + Doc.softLine + }, + Doc.join( + ~sep=Doc.concat(list{Doc.text(","), Doc.line}), + List.map(pat => printPattern(pat, cmtTbl), patterns), + ), + switch tail.Parsetree.ppat_desc { + | Ppat_construct({txt: Longident.Lident("[]")}, _) => Doc.nil + | _ => + let doc = Doc.concat(list{Doc.text("..."), printPattern(tail, cmtTbl)}) + let tail = printComments(doc, cmtTbl, tail.ppat_loc) + Doc.concat(list{Doc.text(","), Doc.line, tail}) + }, + }) + Doc.group( + Doc.concat(list{ + Doc.text("list["), + if shouldHug { + children + } else { + Doc.concat(list{ + Doc.indent(children), + Doc.ifBreaks(Doc.text(","), Doc.nil), + Doc.softLine, + }) + }, + Doc.rbracket, + }), + ) + | Ppat_construct(constrName, constructorArgs) => + let constrName = printLongident(constrName.txt) + let argsDoc = switch constructorArgs { + | None => Doc.nil + | Some({ppat_loc, ppat_desc: Ppat_construct({txt: Longident.Lident("()")}, _)}) => + Doc.concat(list{Doc.lparen, printCommentsInside(cmtTbl, ppat_loc), Doc.rparen}) + | Some({ppat_desc: Ppat_tuple(list{}), ppat_loc: loc}) => + Doc.concat(list{Doc.lparen, Doc.softLine, printCommentsInside(cmtTbl, loc), Doc.rparen}) + /* Some((1, 2) */ + | Some({ppat_desc: Ppat_tuple(list{{ppat_desc: Ppat_tuple(_)} as arg})}) => + Doc.concat(list{Doc.lparen, printPattern(arg, cmtTbl), Doc.rparen}) + | Some({ppat_desc: Ppat_tuple(patterns)}) => + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(pat => printPattern(pat, cmtTbl), patterns), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }) + | Some(arg) => + let argDoc = printPattern(arg, cmtTbl) + let shouldHug = ParsetreeViewer.isHuggablePattern(arg) + Doc.concat(list{ + Doc.lparen, + if shouldHug { + argDoc + } else { + Doc.concat(list{ + Doc.indent(Doc.concat(list{Doc.softLine, argDoc})), + Doc.trailingComma, + Doc.softLine, + }) + }, + Doc.rparen, + }) + } + + Doc.group(Doc.concat(list{constrName, argsDoc})) + | Ppat_variant(label, None) => + Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, label)}) + | Ppat_variant(label, variantArgs) => + let variantName = Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, label)}) + let argsDoc = switch variantArgs { + | None => Doc.nil + | Some({ppat_desc: Ppat_construct({txt: Longident.Lident("()")}, _)}) => Doc.text("()") + | Some({ppat_desc: Ppat_tuple(list{}), ppat_loc: loc}) => + Doc.concat(list{Doc.lparen, Doc.softLine, printCommentsInside(cmtTbl, loc), Doc.rparen}) + /* Some((1, 2) */ + | Some({ppat_desc: Ppat_tuple(list{{ppat_desc: Ppat_tuple(_)} as arg})}) => + Doc.concat(list{Doc.lparen, printPattern(arg, cmtTbl), Doc.rparen}) + | Some({ppat_desc: Ppat_tuple(patterns)}) => + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(pat => printPattern(pat, cmtTbl), patterns), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }) + | Some(arg) => + let argDoc = printPattern(arg, cmtTbl) + let shouldHug = ParsetreeViewer.isHuggablePattern(arg) + Doc.concat(list{ + Doc.lparen, + if shouldHug { + argDoc + } else { + Doc.concat(list{ + Doc.indent(Doc.concat(list{Doc.softLine, argDoc})), + Doc.trailingComma, + Doc.softLine, + }) + }, + Doc.rparen, + }) + } + + Doc.group(Doc.concat(list{variantName, argsDoc})) + | Ppat_type(ident) => Doc.concat(list{Doc.text("##"), printIdentPath(ident, cmtTbl)}) + | Ppat_record(rows, openFlag) => + Doc.group( + Doc.concat(list{ + Doc.lbrace, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.text(","), Doc.line}), + List.map(row => printPatternRecordRow(row, cmtTbl), rows), + ), + switch openFlag { + | Open => Doc.concat(list{Doc.text(","), Doc.line, Doc.text("_")}) + | Closed => Doc.nil + }, + }), + ), + Doc.ifBreaks(Doc.text(","), Doc.nil), + Doc.softLine, + Doc.rbrace, + }), + ) + + | Ppat_exception(p) => + let needsParens = switch p.ppat_desc { + | Ppat_or(_, _) | Ppat_alias(_, _) => true + | _ => false + } + + let pat = { + let p = printPattern(p, cmtTbl) + if needsParens { + Doc.concat(list{Doc.text("("), p, Doc.text(")")}) + } else { + p + } + } + + Doc.group(Doc.concat(list{Doc.text("exception"), Doc.line, pat})) + | Ppat_or(_) => + /* Blue | Red | Green -> [Blue; Red; Green] */ + let orChain = ParsetreeViewer.collectOrPatternChain(p) + let docs = List.mapi((i, pat) => { + let patternDoc = printPattern(pat, cmtTbl) + Doc.concat(list{ + if i === 0 { + Doc.nil + } else { + Doc.concat(list{Doc.line, Doc.text("| ")}) + }, + switch pat.ppat_desc { + /* (Blue | Red) | (Green | Black) | White */ + | Ppat_or(_) => addParens(patternDoc) + | _ => patternDoc + }, + }) + }, orChain) + Doc.group(Doc.concat(docs)) + | Ppat_extension(ext) => printExtensionWithComments(~atModuleLvl=false, ext, cmtTbl) + | Ppat_lazy(p) => + let needsParens = switch p.ppat_desc { + | Ppat_or(_, _) | Ppat_alias(_, _) => true + | _ => false + } + + let pat = { + let p = printPattern(p, cmtTbl) + if needsParens { + Doc.concat(list{Doc.text("("), p, Doc.text(")")}) + } else { + p + } + } + + Doc.concat(list{Doc.text("lazy "), pat}) + | Ppat_alias(p, aliasLoc) => + let needsParens = switch p.ppat_desc { + | Ppat_or(_, _) | Ppat_alias(_, _) => true + | _ => false + } + + let renderedPattern = { + let p = printPattern(p, cmtTbl) + if needsParens { + Doc.concat(list{Doc.text("("), p, Doc.text(")")}) + } else { + p + } + } + + Doc.concat(list{renderedPattern, Doc.text(" as "), printStringLoc(aliasLoc, cmtTbl)}) + + /* Note: module(P : S) is represented as */ + /* Ppat_constraint(Ppat_unpack, Ptyp_package) */ + | Ppat_constraint( + {ppat_desc: Ppat_unpack(stringLoc)}, + {ptyp_desc: Ptyp_package(packageType), ptyp_loc}, + ) => + Doc.concat(list{ + Doc.text("module("), + printComments(Doc.text(stringLoc.txt), cmtTbl, stringLoc.loc), + Doc.text(": "), + printComments( + printPackageType(~printModuleKeywordAndParens=false, packageType, cmtTbl), + cmtTbl, + ptyp_loc, + ), + Doc.rparen, + }) + | Ppat_constraint(pattern, typ) => + Doc.concat(list{printPattern(pattern, cmtTbl), Doc.text(": "), printTypExpr(typ, cmtTbl)}) + + /* Note: module(P : S) is represented as */ + /* Ppat_constraint(Ppat_unpack, Ptyp_package) */ + | Ppat_unpack(stringLoc) => + Doc.concat(list{ + Doc.text("module("), + printComments(Doc.text(stringLoc.txt), cmtTbl, stringLoc.loc), + Doc.rparen, + }) + | Ppat_interval(a, b) => Doc.concat(list{printConstant(a), Doc.text(" .. "), printConstant(b)}) + | Ppat_open(_) => Doc.nil + } + + let doc = switch p.ppat_attributes { + | list{} => patternWithoutAttributes + | attrs => Doc.group(Doc.concat(list{printAttributes(attrs), patternWithoutAttributes})) + } + + printComments(doc, cmtTbl, p.ppat_loc) + } + + and printPatternRecordRow = (row, cmtTbl) => + switch row { + /* punned {x} */ + | ( + {Location.txt: Longident.Lident(ident)} as longident, + {Parsetree.ppat_desc: Ppat_var({txt, _})}, + ) when ident == txt => + printLidentPath(longident, cmtTbl) + | (longident, pattern) => + let locForComments = { + ...longident.loc, + loc_end: pattern.Parsetree.ppat_loc.loc_end, + } + let doc = Doc.group( + Doc.concat(list{ + printLidentPath(longident, cmtTbl), + Doc.text(": "), + Doc.indent(Doc.concat(list{Doc.softLine, printPattern(pattern, cmtTbl)})), + }), + ) + printComments(doc, cmtTbl, locForComments) + } + + and printExpressionWithComments = (expr, cmtTbl) => { + let doc = printExpression(expr, cmtTbl) + printComments(doc, cmtTbl, expr.Parsetree.pexp_loc) + } + + and printExpression = (e: Parsetree.expression, cmtTbl) => { + let printedExpression = switch e.pexp_desc { + | Parsetree.Pexp_constant(c) => printConstant(c) + | Pexp_construct(_) when ParsetreeViewer.hasJsxAttribute(e.pexp_attributes) => + printJsxFragment(e, cmtTbl) + | Pexp_construct({txt: Longident.Lident("()")}, _) => Doc.text("()") + | Pexp_construct({txt: Longident.Lident("[]")}, _) => + Doc.concat(list{Doc.text("list["), printCommentsInside(cmtTbl, e.pexp_loc), Doc.rbracket}) + | Pexp_construct({txt: Longident.Lident("::")}, _) => + let (expressions, spread) = ParsetreeViewer.collectListExpressions(e) + let spreadDoc = switch spread { + | Some(expr) => + Doc.concat(list{ + Doc.text(","), + Doc.line, + Doc.dotdotdot, + { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + }, + }) + | None => Doc.nil + } + + Doc.group( + Doc.concat(list{ + Doc.text("list["), + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.text(","), Doc.line}), List.map(expr => { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + }, expressions)), + spreadDoc, + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbracket, + }), + ) + | Pexp_construct(longidentLoc, args) => + let constr = printLongidentLocation(longidentLoc, cmtTbl) + let args = switch args { + | None => Doc.nil + | Some({pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, _)}) => Doc.text("()") + /* Some((1, 2)) */ + | Some({pexp_desc: Pexp_tuple(list{{pexp_desc: Pexp_tuple(_)} as arg})}) => + Doc.concat(list{ + Doc.lparen, + { + let doc = printExpressionWithComments(arg, cmtTbl) + switch Parens.expr(arg) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, arg, braces) + | Nothing => doc + } + }, + Doc.rparen, + }) + | Some({pexp_desc: Pexp_tuple(args)}) => + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(expr => { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + }, args)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }) + | Some(arg) => + let argDoc = { + let doc = printExpressionWithComments(arg, cmtTbl) + switch Parens.expr(arg) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, arg, braces) + | Nothing => doc + } + } + + let shouldHug = ParsetreeViewer.isHuggableExpression(arg) + Doc.concat(list{ + Doc.lparen, + if shouldHug { + argDoc + } else { + Doc.concat(list{ + Doc.indent(Doc.concat(list{Doc.softLine, argDoc})), + Doc.trailingComma, + Doc.softLine, + }) + }, + Doc.rparen, + }) + } + + Doc.group(Doc.concat(list{constr, args})) + | Pexp_ident(path) => printLidentPath(path, cmtTbl) + | Pexp_tuple(exprs) => + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.text(","), Doc.line}), List.map(expr => { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + }, exprs)), + }), + ), + Doc.ifBreaks(Doc.text(","), Doc.nil), + Doc.softLine, + Doc.rparen, + }), + ) + | Pexp_array(list{}) => + Doc.concat(list{Doc.lbracket, printCommentsInside(cmtTbl, e.pexp_loc), Doc.rbracket}) + | Pexp_array(exprs) => + Doc.group( + Doc.concat(list{ + Doc.lbracket, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.text(","), Doc.line}), List.map(expr => { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + }, exprs)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbracket, + }), + ) + | Pexp_variant(label, args) => + let variantName = Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, label)}) + let args = switch args { + | None => Doc.nil + | Some({pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, _)}) => Doc.text("()") + /* #poly((1, 2) */ + | Some({pexp_desc: Pexp_tuple(list{{pexp_desc: Pexp_tuple(_)} as arg})}) => + Doc.concat(list{ + Doc.lparen, + { + let doc = printExpressionWithComments(arg, cmtTbl) + switch Parens.expr(arg) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, arg, braces) + | Nothing => doc + } + }, + Doc.rparen, + }) + | Some({pexp_desc: Pexp_tuple(args)}) => + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(expr => { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + }, args)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }) + | Some(arg) => + let argDoc = { + let doc = printExpressionWithComments(arg, cmtTbl) + switch Parens.expr(arg) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, arg, braces) + | Nothing => doc + } + } + + let shouldHug = ParsetreeViewer.isHuggableExpression(arg) + Doc.concat(list{ + Doc.lparen, + if shouldHug { + argDoc + } else { + Doc.concat(list{ + Doc.indent(Doc.concat(list{Doc.softLine, argDoc})), + Doc.trailingComma, + Doc.softLine, + }) + }, + Doc.rparen, + }) + } + + Doc.group(Doc.concat(list{variantName, args})) + | Pexp_record(rows, spreadExpr) => + let spread = switch spreadExpr { + | None => Doc.nil + | Some(expr) => + Doc.concat(list{ + Doc.dotdotdot, + { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + }, + Doc.comma, + Doc.line, + }) + } + + /* If the record is written over multiple lines, break automatically + * `let x = {a: 1, b: 3}` -> same line, break when line-width exceeded + * `let x = { + * a: 1, + * b: 2, + * }` -> record is written on multiple lines, break the group */ + let forceBreak = e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum + + Doc.breakableGroup( + ~forceBreak, + Doc.concat(list{ + Doc.lbrace, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + spread, + Doc.join( + ~sep=Doc.concat(list{Doc.text(","), Doc.line}), + List.map(row => printRecordRow(row, cmtTbl), rows), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + }), + ) + | Pexp_extension(extension) => + switch extension { + | ( + {txt: "bs.obj"}, + PStr(list{{ + pstr_loc: loc, + pstr_desc: Pstr_eval({pexp_desc: Pexp_record(rows, _)}, list{}), + }}), + ) => + /* If the object is written over multiple lines, break automatically + * `let x = {"a": 1, "b": 3}` -> same line, break when line-width exceeded + * `let x = { + * "a": 1, + * "b": 2, + * }` -> object is written on multiple lines, break the group */ + let forceBreak = loc.loc_start.pos_lnum < loc.loc_end.pos_lnum + + Doc.breakableGroup( + ~forceBreak, + Doc.concat(list{ + Doc.lbrace, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.text(","), Doc.line}), + List.map(row => printBsObjectRow(row, cmtTbl), rows), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + }), + ) + | extension => printExtensionWithComments(~atModuleLvl=false, extension, cmtTbl) + } + | Pexp_apply(_) => + if ParsetreeViewer.isUnaryExpression(e) { + printUnaryExpression(e, cmtTbl) + } else if ParsetreeViewer.isTemplateLiteral(e) { + printTemplateLiteral(e, cmtTbl) + } else if ParsetreeViewer.isBinaryExpression(e) { + printBinaryExpression(e, cmtTbl) + } else { + printPexpApply(e, cmtTbl) + } + | Pexp_unreachable => Doc.dot + | Pexp_field(expr, longidentLoc) => + let lhs = { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.fieldExpr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + Doc.concat(list{lhs, Doc.dot, printLidentPath(longidentLoc, cmtTbl)}) + | Pexp_setfield(expr1, longidentLoc, expr2) => + printSetFieldExpr(e.pexp_attributes, expr1, longidentLoc, expr2, e.pexp_loc, cmtTbl) + | Pexp_ifthenelse(_ifExpr, _thenExpr, _elseExpr) => + if ParsetreeViewer.isTernaryExpr(e) { + let (parts, alternate) = ParsetreeViewer.collectTernaryParts(e) + let ternaryDoc = switch parts { + | list{(condition1, consequent1), ...rest} => + Doc.group( + Doc.concat(list{ + printTernaryOperand(condition1, cmtTbl), + Doc.indent( + Doc.concat(list{ + Doc.line, + Doc.indent( + Doc.concat(list{Doc.text("? "), printTernaryOperand(consequent1, cmtTbl)}), + ), + Doc.concat( + List.map( + ((condition, consequent)) => + Doc.concat(list{ + Doc.line, + Doc.text(": "), + printTernaryOperand(condition, cmtTbl), + Doc.line, + Doc.text("? "), + printTernaryOperand(consequent, cmtTbl), + }), + rest, + ), + ), + Doc.line, + Doc.text(": "), + Doc.indent(printTernaryOperand(alternate, cmtTbl)), + }), + ), + }), + ) + | _ => Doc.nil + } + + let attrs = ParsetreeViewer.filterTernaryAttributes(e.pexp_attributes) + let needsParens = switch ParsetreeViewer.filterParsingAttrs(attrs) { + | list{} => false + | _ => true + } + + Doc.concat(list{ + printAttributes(attrs), + if needsParens { + addParens(ternaryDoc) + } else { + ternaryDoc + }, + }) + } else { + let (ifs, elseExpr) = ParsetreeViewer.collectIfExpressions(e) + let ifDocs = Doc.join(~sep=Doc.space, List.mapi((i, (ifExpr, thenExpr)) => { + let ifTxt = if i > 0 { + Doc.text("else if ") + } else { + Doc.text("if ") + } + let condition = if ParsetreeViewer.isBlockExpr(ifExpr) { + printExpressionBlock(~braces=true, ifExpr, cmtTbl) + } else { + let doc = printExpressionWithComments(ifExpr, cmtTbl) + switch Parens.expr(ifExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, ifExpr, braces) + | Nothing => Doc.ifBreaks(addParens(doc), doc) + } + } + + Doc.concat(list{ + ifTxt, + Doc.group(condition), + Doc.space, + { + let thenExpr = switch ParsetreeViewer.processBracesAttr(thenExpr) { + /* This case only happens when coming from Reason, we strip braces */ + | (Some(_), expr) => expr + | _ => thenExpr + } + + printExpressionBlock(~braces=true, thenExpr, cmtTbl) + }, + }) + }, ifs)) + let elseDoc = switch elseExpr { + | None => Doc.nil + | Some(expr) => + Doc.concat(list{Doc.text(" else "), printExpressionBlock(~braces=true, expr, cmtTbl)}) + } + + Doc.concat(list{printAttributes(e.pexp_attributes), ifDocs, elseDoc}) + } + | Pexp_while(expr1, expr2) => + let condition = { + let doc = printExpressionWithComments(expr1, cmtTbl) + switch Parens.expr(expr1) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr1, braces) + | Nothing => doc + } + } + + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.text("while "), + if ParsetreeViewer.isBlockExpr(expr1) { + condition + } else { + Doc.group(Doc.ifBreaks(addParens(condition), condition)) + }, + Doc.space, + printExpressionBlock(~braces=true, expr2, cmtTbl), + }), + ) + | Pexp_for(pattern, fromExpr, toExpr, directionFlag, body) => + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.text("for "), + printPattern(pattern, cmtTbl), + Doc.text(" in "), + { + let doc = printExpressionWithComments(fromExpr, cmtTbl) + switch Parens.expr(fromExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, fromExpr, braces) + | Nothing => doc + } + }, + printDirectionFlag(directionFlag), + { + let doc = printExpressionWithComments(toExpr, cmtTbl) + switch Parens.expr(toExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, toExpr, braces) + | Nothing => doc + } + }, + Doc.space, + printExpressionBlock(~braces=true, body, cmtTbl), + }), + ) + | Pexp_constraint( + {pexp_desc: Pexp_pack(modExpr)}, + {ptyp_desc: Ptyp_package(packageType), ptyp_loc}, + ) => + Doc.group( + Doc.concat(list{ + Doc.text("module("), + Doc.indent( + Doc.concat(list{ + Doc.softLine, + printModExpr(modExpr, cmtTbl), + Doc.text(": "), + printComments( + printPackageType(~printModuleKeywordAndParens=false, packageType, cmtTbl), + cmtTbl, + ptyp_loc, + ), + }), + ), + Doc.softLine, + Doc.rparen, + }), + ) + + | Pexp_constraint(expr, typ) => + let exprDoc = { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + Doc.concat(list{exprDoc, Doc.text(": "), printTypExpr(typ, cmtTbl)}) + | Pexp_letmodule({txt: _modName}, _modExpr, _expr) => + printExpressionBlock(~braces=true, e, cmtTbl) + | Pexp_letexception(_extensionConstructor, _expr) => + printExpressionBlock(~braces=true, e, cmtTbl) + | Pexp_assert(expr) => + let rhs = { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.lazyOrAssertExprRhs(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + Doc.concat(list{Doc.text("assert "), rhs}) + | Pexp_lazy(expr) => + let rhs = { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.lazyOrAssertExprRhs(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + Doc.group(Doc.concat(list{Doc.text("lazy "), rhs})) + | Pexp_open(_overrideFlag, _longidentLoc, _expr) => + printExpressionBlock(~braces=true, e, cmtTbl) + | Pexp_pack(modExpr) => + Doc.group( + Doc.concat(list{ + Doc.text("module("), + Doc.indent(Doc.concat(list{Doc.softLine, printModExpr(modExpr, cmtTbl)})), + Doc.softLine, + Doc.rparen, + }), + ) + | Pexp_sequence(_) => printExpressionBlock(~braces=true, e, cmtTbl) + | Pexp_let(_) => printExpressionBlock(~braces=true, e, cmtTbl) + | Pexp_fun(Nolabel, None, {ppat_desc: Ppat_var({txt: "__x"})}, {pexp_desc: Pexp_apply(_)}) => + /* (__x) => f(a, __x, c) -----> f(a, _, c) */ + printExpressionWithComments(ParsetreeViewer.rewriteUnderscoreApply(e), cmtTbl) + | Pexp_fun(_) | Pexp_newtype(_) => + let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr(e) + let (uncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrsOnArrow) + + let (returnExpr, typConstraint) = switch returnExpr.pexp_desc { + | Pexp_constraint(expr, typ) => ( + { + ...expr, + pexp_attributes: List.concat(list{expr.pexp_attributes, returnExpr.pexp_attributes}), + }, + Some(typ), + ) + | _ => (returnExpr, None) + } + + let hasConstraint = switch typConstraint { + | Some(_) => true + | None => false + } + let parametersDoc = printExprFunParameters( + ~inCallback=false, + ~uncurried, + ~hasConstraint, + parameters, + cmtTbl, + ) + + let returnExprDoc = { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(returnExpr) + let shouldInline = switch (returnExpr.pexp_desc, optBraces) { + | (_, Some(_)) => true + | (Pexp_array(_) | Pexp_tuple(_) | Pexp_construct(_, Some(_)) | Pexp_record(_), _) => true + | _ => false + } + + let shouldIndent = switch returnExpr.pexp_desc { + | Pexp_sequence(_) + | Pexp_let(_) + | Pexp_letmodule(_) + | Pexp_letexception(_) + | Pexp_open(_) => false + | _ => true + } + + let returnDoc = { + let doc = printExpressionWithComments(returnExpr, cmtTbl) + switch Parens.expr(returnExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, returnExpr, braces) + | Nothing => doc + } + } + + if shouldInline { + Doc.concat(list{Doc.space, returnDoc}) + } else { + Doc.group( + if shouldIndent { + Doc.indent(Doc.concat(list{Doc.line, returnDoc})) + } else { + Doc.concat(list{Doc.space, returnDoc}) + }, + ) + } + } + + let typConstraintDoc = switch typConstraint { + | Some(typ) => Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)}) + | _ => Doc.nil + } + + let attrs = printAttributes(attrs) + Doc.group( + Doc.concat(list{attrs, parametersDoc, typConstraintDoc, Doc.text(" =>"), returnExprDoc}), + ) + | Pexp_try(expr, cases) => + let exprDoc = { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + Doc.concat(list{Doc.text("try "), exprDoc, Doc.text(" catch "), printCases(cases, cmtTbl)}) + | Pexp_match(expr, cases) => + let exprDoc = { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + Doc.concat(list{Doc.text("switch "), exprDoc, Doc.space, printCases(cases, cmtTbl)}) + | Pexp_function(cases) => + Doc.concat(list{Doc.text("x => switch x "), printCases(cases, cmtTbl)}) + | Pexp_coerce(expr, typOpt, typ) => + let docExpr = printExpressionWithComments(expr, cmtTbl) + let docTyp = printTypExpr(typ, cmtTbl) + let ofType = switch typOpt { + | None => Doc.nil + | Some(typ1) => Doc.concat(list{Doc.text(": "), printTypExpr(typ1, cmtTbl)}) + } + + Doc.concat(list{Doc.lparen, docExpr, ofType, Doc.text(" :> "), docTyp, Doc.rparen}) + | Pexp_send(_) => Doc.text("Pexp_send not impemented in printer") + | Pexp_new(_) => Doc.text("Pexp_new not impemented in printer") + | Pexp_setinstvar(_) => Doc.text("Pexp_setinstvar not impemented in printer") + | Pexp_override(_) => Doc.text("Pexp_override not impemented in printer") + | Pexp_poly(_) => Doc.text("Pexp_poly not impemented in printer") + | Pexp_object(_) => Doc.text("Pexp_object not impemented in printer") + } + + let shouldPrintItsOwnAttributes = switch e.pexp_desc { + | Pexp_apply(_) | Pexp_fun(_) | Pexp_newtype(_) | Pexp_setfield(_) | Pexp_ifthenelse(_) => true + | Pexp_construct(_) when ParsetreeViewer.hasJsxAttribute(e.pexp_attributes) => true + | _ => false + } + + switch e.pexp_attributes { + | list{} => printedExpression + | attrs when !shouldPrintItsOwnAttributes => + Doc.group(Doc.concat(list{printAttributes(attrs), printedExpression})) + | _ => printedExpression + } + } + + and printPexpFun = (~inCallback, e, cmtTbl) => { + let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr(e) + let (uncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrsOnArrow) + + let (returnExpr, typConstraint) = switch returnExpr.pexp_desc { + | Pexp_constraint(expr, typ) => ( + { + ...expr, + pexp_attributes: List.concat(list{expr.pexp_attributes, returnExpr.pexp_attributes}), + }, + Some(typ), + ) + | _ => (returnExpr, None) + } + + let parametersDoc = printExprFunParameters( + ~inCallback, + ~uncurried, + ~hasConstraint=switch typConstraint { + | Some(_) => true + | None => false + }, + parameters, + cmtTbl, + ) + let returnShouldIndent = switch returnExpr.pexp_desc { + | Pexp_sequence(_) + | Pexp_let(_) + | Pexp_letmodule(_) + | Pexp_letexception(_) + | Pexp_open(_) => false + | _ => true + } + + let returnExprDoc = { + let (optBraces, _) = ParsetreeViewer.processBracesAttr(returnExpr) + let shouldInline = switch (returnExpr.pexp_desc, optBraces) { + | (_, Some(_)) => true + | (Pexp_array(_) | Pexp_tuple(_) | Pexp_construct(_, Some(_)) | Pexp_record(_), _) => true + | _ => false + } + + let returnDoc = { + let doc = printExpressionWithComments(returnExpr, cmtTbl) + switch Parens.expr(returnExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, returnExpr, braces) + | Nothing => doc + } + } + + if shouldInline { + Doc.concat(list{Doc.space, returnDoc}) + } else { + Doc.group( + if returnShouldIndent { + Doc.concat(list{ + Doc.indent(Doc.concat(list{Doc.line, returnDoc})), + if inCallback { + Doc.softLine + } else { + Doc.nil + }, + }) + } else { + Doc.concat(list{Doc.space, returnDoc}) + }, + ) + } + } + + let typConstraintDoc = switch typConstraint { + | Some(typ) => Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)}) + | _ => Doc.nil + } + + Doc.group( + Doc.concat(list{ + printAttributes(attrs), + parametersDoc, + typConstraintDoc, + Doc.text(" =>"), + returnExprDoc, + }), + ) + } + + and printTernaryOperand = (expr, cmtTbl) => { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.ternaryOperand(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + and printSetFieldExpr = (attrs, lhs, longidentLoc, rhs, loc, cmtTbl) => { + let rhsDoc = { + let doc = printExpressionWithComments(rhs, cmtTbl) + switch Parens.setFieldExprRhs(rhs) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, rhs, braces) + | Nothing => doc + } + } + + let lhsDoc = { + let doc = printExpressionWithComments(lhs, cmtTbl) + switch Parens.fieldExpr(lhs) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, lhs, braces) + | Nothing => doc + } + } + + let shouldIndent = ParsetreeViewer.isBinaryExpression(rhs) + let doc = Doc.group( + Doc.concat(list{ + lhsDoc, + Doc.dot, + printLidentPath(longidentLoc, cmtTbl), + Doc.text(" ="), + if shouldIndent { + Doc.group(Doc.indent(Doc.concat(list{Doc.line, rhsDoc}))) + } else { + Doc.concat(list{Doc.space, rhsDoc}) + }, + }), + ) + let doc = switch attrs { + | list{} => doc + | attrs => Doc.group(Doc.concat(list{printAttributes(attrs), doc})) + } + + printComments(doc, cmtTbl, loc) + } + + and printTemplateLiteral = (expr, cmtTbl) => { + let tag = ref("j") + let rec walkExpr = expr => { + open Parsetree + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("^")})}, + list{(Nolabel, arg1), (Nolabel, arg2)}, + ) => + let lhs = walkExpr(arg1) + let rhs = walkExpr(arg2) + Doc.concat(list{lhs, rhs}) + | Pexp_constant(Pconst_string(txt, Some(prefix))) => + tag := prefix + Doc.text(txt) + | _ => + let doc = printExpressionWithComments(expr, cmtTbl) + Doc.concat(list{Doc.text("${"), doc, Doc.rbrace}) + } + } + + let content = walkExpr(expr) + Doc.concat(list{ + if tag.contents == "j" { + Doc.nil + } else { + Doc.text(tag.contents) + }, + Doc.text("`"), + content, + Doc.text("`"), + }) + } + + and printUnaryExpression = (expr, cmtTbl) => { + let printUnaryOperator = op => + Doc.text( + switch op { + | "~+" => "+" + | "~+." => "+." + | "~-" => "-" + | "~-." => "-." + | "not" => "!" + | _ => assert false + }, + ) + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list{(Nolabel, operand)}, + ) => + let printedOperand = { + let doc = printExpressionWithComments(operand, cmtTbl) + switch Parens.unaryExprOperand(operand) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, operand, braces) + | Nothing => doc + } + } + + let doc = Doc.concat(list{printUnaryOperator(operator), printedOperand}) + printComments(doc, cmtTbl, expr.pexp_loc) + | _ => assert false + } + } + + and printBinaryExpression = (expr: Parsetree.expression, cmtTbl) => { + let printBinaryOperator = (~inlineRhs, operator) => { + let operatorTxt = switch operator { + | "|." => "->" + | "^" => "++" + | "=" => "==" + | "==" => "===" + | "<>" => "!=" + | "!=" => "!==" + | txt => txt + } + + let spacingBeforeOperator = if operator == "|." { + Doc.softLine + } else if operator == "|>" { + Doc.line + } else { + Doc.space + } + + let spacingAfterOperator = if operator == "|." { + Doc.nil + } else if operator == "|>" { + Doc.space + } else if inlineRhs { + Doc.space + } else { + Doc.line + } + + Doc.concat(list{spacingBeforeOperator, Doc.text(operatorTxt), spacingAfterOperator}) + } + + let printOperand = (~isLhs, expr, parentOperator) => { + let rec flatten = (~isLhs, expr, parentOperator) => + if ParsetreeViewer.isBinaryExpression(expr) { + switch expr { + | { + pexp_desc: + Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list{(_, left), (_, right)}, + ), + } => + if ( + ParsetreeViewer.flattenableOperators(parentOperator, operator) && + !ParsetreeViewer.hasAttributes(expr.pexp_attributes) + ) { + let leftPrinted = flatten(~isLhs=true, left, operator) + let rightPrinted = { + let (_, rightAttrs) = ParsetreeViewer.partitionPrinteableAttributes( + right.pexp_attributes, + ) + + let doc = printExpressionWithComments( + {...right, pexp_attributes: rightAttrs}, + cmtTbl, + ) + + let doc = if Parens.flattenOperandRhs(parentOperator, right) { + Doc.concat(list{Doc.lparen, doc, Doc.rparen}) + } else { + doc + } + + let printeableAttrs = ParsetreeViewer.filterPrinteableAttributes( + right.pexp_attributes, + ) + + Doc.concat(list{printAttributes(printeableAttrs), doc}) + } + + let doc = Doc.concat(list{ + leftPrinted, + printBinaryOperator(~inlineRhs=false, operator), + rightPrinted, + }) + let doc = if !isLhs && Parens.rhsBinaryExprOperand(operator, expr) { + Doc.concat(list{Doc.lparen, doc, Doc.rparen}) + } else { + doc + } + + printComments(doc, cmtTbl, expr.pexp_loc) + } else { + let doc = printExpressionWithComments({...expr, pexp_attributes: list{}}, cmtTbl) + let doc = if ( + Parens.subBinaryExprOperand(parentOperator, operator) || + (expr.pexp_attributes != list{} && + (ParsetreeViewer.isBinaryExpression(expr) || ParsetreeViewer.isTernaryExpr(expr))) + ) { + Doc.concat(list{Doc.lparen, doc, Doc.rparen}) + } else { + doc + } + Doc.concat(list{printAttributes(expr.pexp_attributes), doc}) + } + | _ => assert false + } + } else { + switch expr.pexp_desc { + | Pexp_setfield(lhs, field, rhs) => + let doc = printSetFieldExpr( + expr.pexp_attributes, + lhs, + field, + rhs, + expr.pexp_loc, + cmtTbl, + ) + if isLhs { + addParens(doc) + } else { + doc + } + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("#=")})}, + list{(Nolabel, lhs), (Nolabel, rhs)}, + ) => + let rhsDoc = printExpressionWithComments(rhs, cmtTbl) + let lhsDoc = printExpressionWithComments(lhs, cmtTbl) + /* TODO: unify indentation of "=" */ + let shouldIndent = ParsetreeViewer.isBinaryExpression(rhs) + let doc = Doc.group( + Doc.concat(list{ + lhsDoc, + Doc.text(" ="), + if shouldIndent { + Doc.group(Doc.indent(Doc.concat(list{Doc.line, rhsDoc}))) + } else { + Doc.concat(list{Doc.space, rhsDoc}) + }, + }), + ) + let doc = switch expr.pexp_attributes { + | list{} => doc + | attrs => Doc.group(Doc.concat(list{printAttributes(attrs), doc})) + } + + if isLhs { + addParens(doc) + } else { + doc + } + | _ => + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.binaryExprOperand(~isLhs, expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + } + + flatten(~isLhs, expr, parentOperator) + } + + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(("|." | "|>") as op)})}, + list{(Nolabel, lhs), (Nolabel, rhs)}, + ) + when !(ParsetreeViewer.isBinaryExpression(lhs) || ParsetreeViewer.isBinaryExpression(rhs)) => + let lhsDoc = printOperand(~isLhs=true, lhs, op) + let rhsDoc = printOperand(~isLhs=false, rhs, op) + Doc.group( + Doc.concat(list{ + lhsDoc, + switch op { + | "|." => Doc.text("->") + | "|>" => Doc.text(" |> ") + | _ => assert false + }, + rhsDoc, + }), + ) + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list{(Nolabel, lhs), (Nolabel, rhs)}, + ) => + let right = { + let operatorWithRhs = { + let rhsDoc = printOperand(~isLhs=false, rhs, operator) + Doc.concat(list{ + printBinaryOperator( + ~inlineRhs=ParsetreeViewer.shouldInlineRhsBinaryExpr(rhs), + operator, + ), + rhsDoc, + }) + } + if ParsetreeViewer.shouldIndentBinaryExpr(expr) { + Doc.group(Doc.indent(operatorWithRhs)) + } else { + operatorWithRhs + } + } + + let doc = Doc.group(Doc.concat(list{printOperand(~isLhs=true, lhs, operator), right})) + Doc.group( + Doc.concat(list{ + printAttributes(expr.pexp_attributes), + switch Parens.binaryExpr({ + ...expr, + pexp_attributes: List.filter(attr => + switch attr { + | ({Location.txt: "ns.braces"}, _) => false + | _ => true + } + , expr.pexp_attributes), + }) { + | Braced(bracesLoc) => printBraces(doc, expr, bracesLoc) + | Parenthesized => addParens(doc) + | Nothing => doc + }, + }), + ) + | _ => Doc.nil + } + } + + /* callExpr(arg1, arg2) */ + and printPexpApply = (expr, cmtTbl) => + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("##")})}, + list{(Nolabel, parentExpr), (Nolabel, memberExpr)}, + ) => + let parentDoc = { + let doc = printExpressionWithComments(parentExpr, cmtTbl) + switch Parens.unaryExprOperand(parentExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, parentExpr, braces) + | Nothing => doc + } + } + + let member = { + let memberDoc = switch memberExpr.pexp_desc { + | Pexp_ident(lident) => + printComments(printLongident(lident.txt), cmtTbl, memberExpr.pexp_loc) + | _ => printExpressionWithComments(memberExpr, cmtTbl) + } + + Doc.concat(list{Doc.text("\""), memberDoc, Doc.text("\"")}) + } + + Doc.group( + Doc.concat(list{ + printAttributes(expr.pexp_attributes), + parentDoc, + Doc.lbracket, + member, + Doc.rbracket, + }), + ) + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("#=")})}, + list{(Nolabel, lhs), (Nolabel, rhs)}, + ) => + let rhsDoc = { + let doc = printExpressionWithComments(rhs, cmtTbl) + switch Parens.expr(rhs) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, rhs, braces) + | Nothing => doc + } + } + + /* TODO: unify indentation of "=" */ + let shouldIndent = + !ParsetreeViewer.isBracedExpr(rhs) && ParsetreeViewer.isBinaryExpression(rhs) + let doc = Doc.group( + Doc.concat(list{ + printExpressionWithComments(lhs, cmtTbl), + Doc.text(" ="), + if shouldIndent { + Doc.group(Doc.indent(Doc.concat(list{Doc.line, rhsDoc}))) + } else { + Doc.concat(list{Doc.space, rhsDoc}) + }, + }), + ) + switch expr.pexp_attributes { + | list{} => doc + | attrs => Doc.group(Doc.concat(list{printAttributes(attrs), doc})) + } + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident("Array"), "get")})}, + list{(Nolabel, parentExpr), (Nolabel, memberExpr)}, + ) => + let member = { + let memberDoc = { + let doc = printExpressionWithComments(memberExpr, cmtTbl) + switch Parens.expr(memberExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, memberExpr, braces) + | Nothing => doc + } + } + + let shouldInline = switch memberExpr.pexp_desc { + | Pexp_constant(_) | Pexp_ident(_) => true + | _ => false + } + + if shouldInline { + memberDoc + } else { + Doc.concat(list{Doc.indent(Doc.concat(list{Doc.softLine, memberDoc})), Doc.softLine}) + } + } + + let parentDoc = { + let doc = printExpressionWithComments(parentExpr, cmtTbl) + switch Parens.unaryExprOperand(parentExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, parentExpr, braces) + | Nothing => doc + } + } + + Doc.group( + Doc.concat(list{ + printAttributes(expr.pexp_attributes), + parentDoc, + Doc.lbracket, + member, + Doc.rbracket, + }), + ) + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident("Array"), "set")})}, + list{(Nolabel, parentExpr), (Nolabel, memberExpr), (Nolabel, targetExpr)}, + ) => + let member = { + let memberDoc = { + let doc = printExpressionWithComments(memberExpr, cmtTbl) + switch Parens.expr(memberExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, memberExpr, braces) + | Nothing => doc + } + } + + let shouldInline = switch memberExpr.pexp_desc { + | Pexp_constant(_) | Pexp_ident(_) => true + | _ => false + } + + if shouldInline { + memberDoc + } else { + Doc.concat(list{Doc.indent(Doc.concat(list{Doc.softLine, memberDoc})), Doc.softLine}) + } + } + + let shouldIndentTargetExpr = if ParsetreeViewer.isBracedExpr(targetExpr) { + false + } else { + ParsetreeViewer.isBinaryExpression(targetExpr) || + switch targetExpr { + | { + pexp_attributes: list{({Location.txt: "ns.ternary"}, _)}, + pexp_desc: Pexp_ifthenelse(ifExpr, _, _), + } => + ParsetreeViewer.isBinaryExpression(ifExpr) || + ParsetreeViewer.hasAttributes(ifExpr.pexp_attributes) + | {pexp_desc: Pexp_newtype(_)} => false + | e => ParsetreeViewer.hasAttributes(e.pexp_attributes) || ParsetreeViewer.isArrayAccess(e) + } + } + + let targetExpr = { + let doc = printExpressionWithComments(targetExpr, cmtTbl) + switch Parens.expr(targetExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, targetExpr, braces) + | Nothing => doc + } + } + + let parentDoc = { + let doc = printExpressionWithComments(parentExpr, cmtTbl) + switch Parens.unaryExprOperand(parentExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, parentExpr, braces) + | Nothing => doc + } + } + + Doc.group( + Doc.concat(list{ + printAttributes(expr.pexp_attributes), + parentDoc, + Doc.lbracket, + member, + Doc.rbracket, + Doc.text(" ="), + if shouldIndentTargetExpr { + Doc.indent(Doc.concat(list{Doc.line, targetExpr})) + } else { + Doc.concat(list{Doc.space, targetExpr}) + }, + }), + ) + /* TODO: cleanup, are those branches even remotely performant? */ + | Pexp_apply({pexp_desc: Pexp_ident(lident)}, args) + when ParsetreeViewer.isJsxExpression(expr) => + printJsxExpression(lident, args, cmtTbl) + | Pexp_apply(callExpr, args) => + let args = List.map(((lbl, arg)) => (lbl, ParsetreeViewer.rewriteUnderscoreApply(arg)), args) + + let (uncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(expr.pexp_attributes) + + let callExprDoc = { + let doc = printExpressionWithComments(callExpr, cmtTbl) + switch Parens.callExpr(callExpr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, callExpr, braces) + | Nothing => doc + } + } + + if ParsetreeViewer.requiresSpecialCallbackPrintingFirstArg(args) { + let argsDoc = printArgumentsWithCallbackInFirstPosition(~uncurried, args, cmtTbl) + + Doc.concat(list{printAttributes(attrs), callExprDoc, argsDoc}) + } else if ParsetreeViewer.requiresSpecialCallbackPrintingLastArg(args) { + let argsDoc = printArgumentsWithCallbackInLastPosition(~uncurried, args, cmtTbl) + + Doc.concat(list{printAttributes(attrs), callExprDoc, argsDoc}) + } else { + let argsDoc = printArguments(~uncurried, args, cmtTbl) + Doc.concat(list{printAttributes(attrs), callExprDoc, argsDoc}) + } + | _ => assert false + } + + and printJsxExpression = (lident, args, cmtTbl) => { + let name = printJsxName(lident) + let (formattedProps, children) = printJsxProps(args, cmtTbl) + /*
*/ + let isSelfClosing = switch children { + | list{} => true + | _ => false + } + Doc.group( + Doc.concat(list{ + Doc.group( + Doc.concat(list{ + printComments(Doc.concat(list{Doc.lessThan, name}), cmtTbl, lident.Asttypes.loc), + formattedProps, + if isSelfClosing { + Doc.concat(list{Doc.line, Doc.text("/>")}) + } else { + Doc.nil + }, + }), + ), + if isSelfClosing { + Doc.nil + } else { + Doc.concat(list{ + Doc.greaterThan, + Doc.indent(Doc.concat(list{Doc.line, printJsxChildren(children, cmtTbl)})), + Doc.line, + Doc.text(" { + let opening = Doc.text("<>") + let closing = Doc.text("") + let (children, _) = ParsetreeViewer.collectListExpressions(expr) + Doc.group( + Doc.concat(list{ + opening, + switch children { + | list{} => Doc.nil + | children => Doc.indent(Doc.concat(list{Doc.line, printJsxChildren(children, cmtTbl)})) + }, + Doc.line, + closing, + }), + ) + } + + and printJsxChildren = (children: list, cmtTbl) => + Doc.group(Doc.join(~sep=Doc.line, List.map(expr => { + let exprDoc = printExpressionWithComments(expr, cmtTbl) + switch Parens.jsxChildExpr(expr) { + | Parenthesized | Braced(_) => + /* {(20: int)} make sure that we also protect the expression inside */ + addBraces( + if Parens.bracedExpr(expr) { + addParens(exprDoc) + } else { + exprDoc + }, + ) + | Nothing => exprDoc + } + }, children))) + + and printJsxProps = (args, cmtTbl) => { + let rec loop = (props, args) => + switch args { + | list{} => (Doc.nil, list{}) + | list{ + (Asttypes.Labelled("children"), children), + ( + Asttypes.Nolabel, + {Parsetree.pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, None)}, + ), + } => + let formattedProps = Doc.indent( + switch props { + | list{} => Doc.nil + | props => + Doc.concat(list{Doc.line, Doc.group(Doc.join(~sep=Doc.line, props |> List.rev))}) + }, + ) + let (children, _) = ParsetreeViewer.collectListExpressions(children) + (formattedProps, children) + | list{arg, ...args} => + let propDoc = printJsxProp(arg, cmtTbl) + loop(list{propDoc, ...props}, args) + } + + loop(list{}, args) + } + + and printJsxProp = (arg, cmtTbl) => + switch arg { + | ( + (Asttypes.Labelled(lblTxt) | Optional(lblTxt)) as lbl, + { + Parsetree.pexp_attributes: list{({Location.txt: "ns.namedArgLoc", loc: argLoc}, _)}, + pexp_desc: Pexp_ident({txt: Longident.Lident(ident)}), + }, + ) when lblTxt == ident /* jsx punning */ => + switch lbl { + | Nolabel => Doc.nil + | Labelled(_lbl) => printComments(printIdentLike(ident), cmtTbl, argLoc) + | Optional(_lbl) => + let doc = Doc.concat(list{Doc.question, printIdentLike(ident)}) + printComments(doc, cmtTbl, argLoc) + } + | ( + (Asttypes.Labelled(lblTxt) | Optional(lblTxt)) as lbl, + {Parsetree.pexp_attributes: list{}, pexp_desc: Pexp_ident({txt: Longident.Lident(ident)})}, + ) when lblTxt == ident /* jsx punning when printing from Reason */ => + switch lbl { + | Nolabel => Doc.nil + | Labelled(_lbl) => printIdentLike(ident) + | Optional(_lbl) => Doc.concat(list{Doc.question, printIdentLike(ident)}) + } + | (lbl, expr) => + let (argLoc, expr) = switch expr.pexp_attributes { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ...attrs} => ( + loc, + {...expr, pexp_attributes: attrs}, + ) + | _ => (Location.none, expr) + } + + let lblDoc = switch lbl { + | Asttypes.Labelled(lbl) => + let lbl = printComments(printIdentLike(lbl), cmtTbl, argLoc) + Doc.concat(list{lbl, Doc.equal}) + | Asttypes.Optional(lbl) => + let lbl = printComments(printIdentLike(lbl), cmtTbl, argLoc) + Doc.concat(list{lbl, Doc.equal, Doc.question}) + | Nolabel => Doc.nil + } + + let exprDoc = { + let doc = printExpression(expr, cmtTbl) + switch Parens.jsxPropExpr(expr) { + | Parenthesized | Braced(_) => + /* {(20: int)} make sure that we also protect the expression inside */ + addBraces( + if Parens.bracedExpr(expr) { + addParens(doc) + } else { + doc + }, + ) + | _ => doc + } + } + + let fullLoc = {...argLoc, loc_end: expr.pexp_loc.loc_end} + printComments(Doc.concat(list{lblDoc, exprDoc}), cmtTbl, fullLoc) + } + + /* div -> div. + * Navabar.createElement -> Navbar + * Staff.Users.createElement -> Staff.Users */ + and printJsxName = ({txt: lident}) => { + let rec flatten = (acc, lident) => + switch lident { + | Longident.Lident(txt) => list{txt, ...acc} + | Ldot(lident, txt) => + let acc = if txt == "createElement" { + acc + } else { + list{txt, ...acc} + } + flatten(acc, lident) + | _ => acc + } + + switch lident { + | Longident.Lident(txt) => Doc.text(txt) + | _ as lident => + let segments = flatten(list{}, lident) + Doc.join(~sep=Doc.dot, List.map(Doc.text, segments)) + } + } + + and printArgumentsWithCallbackInFirstPosition = (~uncurried, args, cmtTbl) => { + let (callback, printedArgs) = switch args { + | list{(lbl, expr), ...args} => + let lblDoc = switch lbl { + | Asttypes.Nolabel => Doc.nil + | Asttypes.Labelled(txt) => Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal}) + | Asttypes.Optional(txt) => + Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal, Doc.question}) + } + + let callback = Doc.concat(list{lblDoc, printPexpFun(~inCallback=true, expr, cmtTbl)}) + let printedArgs = + List.map(arg => printArgument(arg, cmtTbl), args) |> Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + ) + + (callback, printedArgs) + | _ => assert false + } + + /* Thing.map((arg1, arg2) => MyModuleBlah.toList(argument), foo) */ + /* Thing.map((arg1, arg2) => { + * MyModuleBlah.toList(argument) + * }, longArgumet, veryLooooongArgument) + */ + let fitsOnOneLine = Doc.concat(list{ + if uncurried { + Doc.text("(. ") + } else { + Doc.lparen + }, + callback, + Doc.comma, + Doc.line, + printedArgs, + Doc.rparen, + }) + + /* Thing.map( + * (param1, parm2) => doStuff(param1, parm2), + * arg1, + * arg2, + * arg3, + * ) + */ + let breakAllArgs = printArguments(~uncurried, args, cmtTbl) + Doc.customLayout(list{fitsOnOneLine, breakAllArgs}) + } + + and printArgumentsWithCallbackInLastPosition = (~uncurried, args, cmtTbl) => { + let rec loop = (acc, args) => + switch args { + | list{} => (Doc.nil, Doc.nil) + | list{(lbl, expr)} => + let lblDoc = switch lbl { + | Asttypes.Nolabel => Doc.nil + | Asttypes.Labelled(txt) => Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal}) + | Asttypes.Optional(txt) => + Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal, Doc.question}) + } + + let callback = printPexpFun(~inCallback=true, expr, cmtTbl) + (Doc.concat(List.rev(acc)), Doc.concat(list{lblDoc, callback})) + | list{arg, ...args} => + let argDoc = printArgument(arg, cmtTbl) + loop(list{Doc.line, Doc.comma, argDoc, ...acc}, args) + } + + let (printedArgs, callback) = loop(list{}, args) + + /* Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument)) */ + let fitsOnOneLine = Doc.concat(list{ + if uncurried { + Doc.text("(.") + } else { + Doc.lparen + }, + printedArgs, + callback, + Doc.rparen, + }) + + /* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) => + * MyModuleBlah.toList(argument) + * ) + */ + let arugmentsFitOnOneLine = Doc.concat(list{ + if uncurried { + Doc.text("(.") + } else { + Doc.lparen + }, + Doc.softLine, + printedArgs, + Doc.breakableGroup(~forceBreak=true, callback), + Doc.softLine, + Doc.rparen, + }) + + /* Thing.map( + * arg1, + * arg2, + * arg3, + * (param1, parm2) => doStuff(param1, parm2) + * ) + */ + let breakAllArgs = printArguments(~uncurried, args, cmtTbl) + Doc.customLayout(list{fitsOnOneLine, arugmentsFitOnOneLine, breakAllArgs}) + } + + and printArguments = ( + ~uncurried, + args: list<(Asttypes.arg_label, Parsetree.expression)>, + cmtTbl, + ) => + switch args { + | list{(Nolabel, {pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, _)})} => + if uncurried { + Doc.text("(.)") + } else { + Doc.text("()") + } + | list{(Nolabel, arg)} when ParsetreeViewer.isHuggableExpression(arg) => + let argDoc = { + let doc = printExpressionWithComments(arg, cmtTbl) + switch Parens.expr(arg) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, arg, braces) + | Nothing => doc + } + } + + Doc.concat(list{ + if uncurried { + Doc.text("(.") + } else { + Doc.lparen + }, + argDoc, + Doc.rparen, + }) + | args => + Doc.group( + Doc.concat(list{ + if uncurried { + Doc.text("(.") + } else { + Doc.lparen + }, + Doc.indent( + Doc.concat(list{ + if uncurried { + Doc.line + } else { + Doc.softLine + }, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(arg => printArgument(arg, cmtTbl), args), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + } + + /* + * argument ::= + * | _ (* syntax sugar *) + * | expr + * | expr : type + * | ~ label-name + * | ~ label-name + * | ~ label-name ? + * | ~ label-name = expr + * | ~ label-name = _ (* syntax sugar *) + * | ~ label-name = expr : type + * | ~ label-name = ? expr + * | ~ label-name = ? _ (* syntax sugar *) + * | ~ label-name = ? expr : type */ + and printArgument = ((argLbl, arg), cmtTbl) => + switch (argLbl, arg) { + /* ~a (punned) */ + | ( + Asttypes.Labelled(lbl), + { + pexp_desc: Pexp_ident({txt: Longident.Lident(name)}), + pexp_attributes: list{} | list{({Location.txt: "ns.namedArgLoc"}, _)}, + } as argExpr, + ) when lbl == name && !ParsetreeViewer.isBracedExpr(argExpr) => + let loc = switch arg.pexp_attributes { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => loc + | _ => arg.pexp_loc + } + + let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl)}) + printComments(doc, cmtTbl, loc) + + /* ~a: int (punned) */ + | ( + Asttypes.Labelled(lbl), + { + pexp_desc: + Pexp_constraint({pexp_desc: Pexp_ident({txt: Longident.Lident(name)})} as argExpr, typ), + pexp_loc, + pexp_attributes: (list{} | list{({Location.txt: "ns.namedArgLoc"}, _)}) as attrs, + }, + ) when lbl == name && !ParsetreeViewer.isBracedExpr(argExpr) => + let loc = switch attrs { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => { + ...loc, + loc_end: pexp_loc.loc_end, + } + | _ => arg.pexp_loc + } + + let doc = Doc.concat(list{ + Doc.tilde, + printIdentLike(lbl), + Doc.text(": "), + printTypExpr(typ, cmtTbl), + }) + printComments(doc, cmtTbl, loc) + /* ~a? (optional lbl punned) */ + | ( + Asttypes.Optional(lbl), + { + pexp_desc: Pexp_ident({txt: Longident.Lident(name)}), + pexp_attributes: list{} | list{({Location.txt: "ns.namedArgLoc"}, _)}, + }, + ) when lbl == name => + let loc = switch arg.pexp_attributes { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => loc + | _ => arg.pexp_loc + } + + let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl), Doc.question}) + printComments(doc, cmtTbl, loc) + | (_lbl, expr) => + let (argLoc, expr) = switch expr.pexp_attributes { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ...attrs} => ( + loc, + {...expr, pexp_attributes: attrs}, + ) + | _ => (expr.pexp_loc, expr) + } + + let printedLbl = switch argLbl { + | Asttypes.Nolabel => Doc.nil + | Asttypes.Labelled(lbl) => + let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl), Doc.equal}) + printComments(doc, cmtTbl, argLoc) + | Asttypes.Optional(lbl) => + let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl), Doc.equal, Doc.question}) + printComments(doc, cmtTbl, argLoc) + } + + let printedExpr = { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + let loc = {...argLoc, loc_end: expr.pexp_loc.loc_end} + let doc = Doc.concat(list{printedLbl, printedExpr}) + printComments(doc, cmtTbl, loc) + } + + and printCases = (cases: list, cmtTbl) => + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{Doc.lbrace, Doc.concat(list{Doc.line, printList(~getLoc=n => { + ...n.Parsetree.pc_lhs.ppat_loc, + loc_end: switch ParsetreeViewer.processBracesAttr(n.Parsetree.pc_rhs) { + | (None, _) => n.pc_rhs.pexp_loc.loc_end + | (Some({loc}, _), _) => loc.Location.loc_end + }, + }, ~print=printCase, ~nodes=cases, cmtTbl)}), Doc.line, Doc.rbrace}), + ) + + and printCase = (case: Parsetree.case, cmtTbl) => { + let rhs = switch case.pc_rhs.pexp_desc { + | Pexp_let(_) | Pexp_letmodule(_) | Pexp_letexception(_) | Pexp_open(_) | Pexp_sequence(_) => + printExpressionBlock(~braces=ParsetreeViewer.isBracedExpr(case.pc_rhs), case.pc_rhs, cmtTbl) + | _ => + let doc = printExpressionWithComments(case.pc_rhs, cmtTbl) + switch Parens.expr(case.pc_rhs) { + | Parenthesized => addParens(doc) + | _ => doc + } + } + + let guard = switch case.pc_guard { + | None => Doc.nil + | Some(expr) => + Doc.group( + Doc.concat(list{Doc.line, Doc.text("when "), printExpressionWithComments(expr, cmtTbl)}), + ) + } + + let shouldInlineRhs = switch case.pc_rhs.pexp_desc { + | Pexp_construct({txt: Longident.Lident("()" | "true" | "false")}, _) + | Pexp_constant(_) + | Pexp_ident(_) => true + | _ when ParsetreeViewer.isHuggableRhs(case.pc_rhs) => true + | _ => false + } + + let shouldIndentPattern = switch case.pc_lhs.ppat_desc { + | Ppat_or(_) => false + | _ => true + } + + let patternDoc = { + let doc = printPattern(case.pc_lhs, cmtTbl) + switch case.pc_lhs.ppat_desc { + | Ppat_constraint(_) => addParens(doc) + | _ => doc + } + } + + let content = Doc.concat(list{ + if shouldIndentPattern { + Doc.indent(patternDoc) + } else { + patternDoc + }, + Doc.indent(guard), + Doc.text(" =>"), + Doc.indent( + Doc.concat(list{ + if shouldInlineRhs { + Doc.space + } else { + Doc.line + }, + rhs, + }), + ), + }) + Doc.group(Doc.concat(list{Doc.text("| "), content})) + } + + and printExprFunParameters = (~inCallback, ~uncurried, ~hasConstraint, parameters, cmtTbl) => + switch parameters { + /* let f = _ => () */ + | list{ParsetreeViewer.Parameter({ + attrs: list{}, + lbl: Asttypes.Nolabel, + defaultExpr: None, + pat: {Parsetree.ppat_desc: Ppat_any}, + })} when !uncurried => + if hasConstraint { + Doc.text("(_)") + } else { + Doc.text("_") + } + /* let f = a => () */ + | list{ParsetreeViewer.Parameter({ + attrs: list{}, + lbl: Asttypes.Nolabel, + defaultExpr: None, + pat: {Parsetree.ppat_desc: Ppat_var(stringLoc)}, + })} when !uncurried => + let txtDoc = { + let var = printIdentLike(stringLoc.txt) + if hasConstraint { + addParens(var) + } else { + var + } + } + + printComments(txtDoc, cmtTbl, stringLoc.loc) + /* let f = () => () */ + | list{ParsetreeViewer.Parameter({ + attrs: list{}, + lbl: Asttypes.Nolabel, + defaultExpr: None, + pat: {ppat_desc: Ppat_construct({txt: Longident.Lident("()")}, None)}, + })} when !uncurried => + Doc.text("()") + /* let f = (~greeting, ~from as hometown, ~x=?) => () */ + | parameters => + let lparen = if uncurried { + Doc.text("(. ") + } else { + Doc.lparen + } + let shouldHug = ParsetreeViewer.parametersShouldHug(parameters) + let printedParamaters = Doc.concat(list{ + if shouldHug || inCallback { + Doc.nil + } else { + Doc.softLine + }, + Doc.join( + ~sep=Doc.concat(list{ + Doc.comma, + if inCallback { + Doc.space + } else { + Doc.line + }, + }), + List.map(p => printExpFunParameter(p, cmtTbl), parameters), + ), + }) + Doc.group( + Doc.concat(list{ + lparen, + if shouldHug || inCallback { + printedParamaters + } else { + Doc.indent(printedParamaters) + }, + if shouldHug || inCallback { + Doc.nil + } else { + Doc.concat(list{Doc.trailingComma, Doc.softLine}) + }, + Doc.rparen, + }), + ) + } + + and printExpFunParameter = (parameter, cmtTbl) => + switch parameter { + | ParsetreeViewer.NewTypes({attrs, locs: lbls}) => + Doc.group( + Doc.concat(list{ + printAttributes(attrs), + Doc.text("type "), + Doc.join( + ~sep=Doc.space, + List.map( + lbl => printComments(printIdentLike(lbl.Asttypes.txt), cmtTbl, lbl.Asttypes.loc), + lbls, + ), + ), + }), + ) + | Parameter({attrs, lbl, defaultExpr, pat: pattern}) => + let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrs) + let uncurried = if isUncurried { + Doc.concat(list{Doc.dot, Doc.space}) + } else { + Doc.nil + } + let attrs = printAttributes(attrs) + /* =defaultValue */ + let defaultExprDoc = switch defaultExpr { + | Some(expr) => Doc.concat(list{Doc.text("="), printExpressionWithComments(expr, cmtTbl)}) + | None => Doc.nil + } + + /* ~from as hometown + * ~from -> punning */ + let labelWithPattern = switch (lbl, pattern) { + | (Asttypes.Nolabel, pattern) => printPattern(pattern, cmtTbl) + | ( + Asttypes.Labelled(lbl) | Optional(lbl), + { + ppat_desc: Ppat_var(stringLoc), + ppat_attributes: list{} | list{({Location.txt: "ns.namedArgLoc"}, _)}, + }, + ) when lbl == stringLoc.txt => + /* ~d */ + Doc.concat(list{Doc.text("~"), printIdentLike(lbl)}) + | ( + Asttypes.Labelled(lbl) | Optional(lbl), + { + ppat_desc: Ppat_constraint({ppat_desc: Ppat_var({txt})}, typ), + ppat_attributes: list{} | list{({Location.txt: "ns.namedArgLoc"}, _)}, + }, + ) when lbl == txt => + /* ~d: e */ + Doc.concat(list{ + Doc.text("~"), + printIdentLike(lbl), + Doc.text(": "), + printTypExpr(typ, cmtTbl), + }) + | (Asttypes.Labelled(lbl) | Optional(lbl), pattern) => + /* ~b as c */ + Doc.concat(list{ + Doc.text("~"), + printIdentLike(lbl), + Doc.text(" as "), + printPattern(pattern, cmtTbl), + }) + } + + let optionalLabelSuffix = switch (lbl, defaultExpr) { + | (Asttypes.Optional(_), None) => Doc.text("=?") + | _ => Doc.nil + } + + let doc = Doc.group( + Doc.concat(list{uncurried, attrs, labelWithPattern, defaultExprDoc, optionalLabelSuffix}), + ) + let cmtLoc = switch defaultExpr { + | None => + switch pattern.ppat_attributes { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => { + ...loc, + loc_end: pattern.ppat_loc.loc_end, + } + | _ => pattern.ppat_loc + } + | Some(expr) => + let startPos = switch pattern.ppat_attributes { + | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => loc.loc_start + | _ => pattern.ppat_loc.loc_start + } + { + ...pattern.ppat_loc, + loc_start: startPos, + loc_end: expr.pexp_loc.loc_end, + } + } + + printComments(doc, cmtTbl, cmtLoc) + } + + and printExpressionBlock = (~braces, expr, cmtTbl) => { + let rec collectRows = (acc, expr) => + switch expr.Parsetree.pexp_desc { + | Parsetree.Pexp_letmodule(modName, modExpr, expr2) => + let name = { + let doc = Doc.text(modName.txt) + printComments(doc, cmtTbl, modName.loc) + } + + let letModuleDoc = Doc.concat(list{ + Doc.text("module "), + name, + Doc.text(" = "), + printModExpr(modExpr, cmtTbl), + }) + let loc = {...expr.pexp_loc, loc_end: modExpr.pmod_loc.loc_end} + collectRows(list{(loc, letModuleDoc), ...acc}, expr2) + | Pexp_letexception(extensionConstructor, expr2) => + let loc = { + let loc = {...expr.pexp_loc, loc_end: extensionConstructor.pext_loc.loc_end} + switch getFirstLeadingComment(cmtTbl, loc) { + | None => loc + | Some(comment) => + let cmtLoc = Comment.loc(comment) + {...cmtLoc, loc_end: loc.loc_end} + } + } + + let letExceptionDoc = printExceptionDef(extensionConstructor, cmtTbl) + collectRows(list{(loc, letExceptionDoc), ...acc}, expr2) + | Pexp_open(overrideFlag, longidentLoc, expr2) => + let openDoc = Doc.concat(list{ + Doc.text("open"), + printOverrideFlag(overrideFlag), + Doc.space, + printLongidentLocation(longidentLoc, cmtTbl), + }) + let loc = {...expr.pexp_loc, loc_end: longidentLoc.loc.loc_end} + collectRows(list{(loc, openDoc), ...acc}, expr2) + | Pexp_sequence(expr1, expr2) => + let exprDoc = { + let doc = printExpression(expr1, cmtTbl) + switch Parens.expr(expr1) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr1, braces) + | Nothing => doc + } + } + + let loc = expr1.pexp_loc + collectRows(list{(loc, exprDoc), ...acc}, expr2) + | Pexp_let(recFlag, valueBindings, expr2) => + let loc = { + let loc = switch (valueBindings, List.rev(valueBindings)) { + | (list{vb, ..._}, list{lastVb, ..._}) => {...vb.pvb_loc, loc_end: lastVb.pvb_loc.loc_end} + | _ => Location.none + } + + switch getFirstLeadingComment(cmtTbl, loc) { + | None => loc + | Some(comment) => + let cmtLoc = Comment.loc(comment) + {...cmtLoc, loc_end: loc.loc_end} + } + } + + let recFlag = switch recFlag { + | Asttypes.Nonrecursive => Doc.nil + | Asttypes.Recursive => Doc.text("rec ") + } + + let letDoc = printValueBindings(~recFlag, valueBindings, cmtTbl) + /* let () = { + * let () = foo() + * () + * } + * We don't need to print the () on the last line of the block + */ + switch expr2.pexp_desc { + | Pexp_construct({txt: Longident.Lident("()")}, _) => List.rev(list{(loc, letDoc), ...acc}) + | _ => collectRows(list{(loc, letDoc), ...acc}, expr2) + } + | _ => + let exprDoc = { + let doc = printExpression(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + } + + List.rev(list{(expr.pexp_loc, exprDoc), ...acc}) + } + + let rows = collectRows(list{}, expr) + let block = printList( + ~getLoc=fst, + ~nodes=rows, + ~print=((_, doc), _) => doc, + ~forceBreak=true, + cmtTbl, + ) + + Doc.breakableGroup( + ~forceBreak=true, + if braces { + Doc.concat(list{ + Doc.lbrace, + Doc.indent(Doc.concat(list{Doc.line, block})), + Doc.line, + Doc.rbrace, + }) + } else { + block + }, + ) + } + + /* + * // user types: + * let f = (a, b) => { a + b } + * + * // printer: everything is on one line + * let f = (a, b) => { a + b } + * + * // user types: over multiple lines + * let f = (a, b) => { + * a + b + * } + * + * // printer: over multiple lines + * let f = (a, b) => { + * a + b + * } + */ + and printBraces = (doc, expr, bracesLoc) => { + let overMultipleLines = { + open Location + bracesLoc.loc_end.pos_lnum > bracesLoc.loc_start.pos_lnum + } + + switch expr.Parsetree.pexp_desc { + | Pexp_letmodule(_) + | Pexp_letexception(_) + | Pexp_let(_) + | Pexp_open(_) + | Pexp_sequence(_) => /* already has braces */ + doc + | _ => + Doc.breakableGroup( + ~forceBreak=overMultipleLines, + Doc.concat(list{ + Doc.lbrace, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + if Parens.bracedExpr(expr) { + addParens(doc) + } else { + doc + }, + }), + ), + Doc.softLine, + Doc.rbrace, + }), + ) + } + } + + and printOverrideFlag = overrideFlag => + switch overrideFlag { + | Asttypes.Override => Doc.text("!") + | Fresh => Doc.nil + } + + and printDirectionFlag = flag => + switch flag { + | Asttypes.Downto => Doc.text(" downto ") + | Asttypes.Upto => Doc.text(" to ") + } + + and printRecordRow = ((lbl, expr), cmtTbl) => { + let cmtLoc = {...lbl.loc, loc_end: expr.pexp_loc.loc_end} + let doc = Doc.group( + Doc.concat(list{ + printLidentPath(lbl, cmtTbl), + Doc.text(": "), + { + let doc = printExpressionWithComments(expr, cmtTbl) + switch Parens.expr(expr) { + | Parens.Parenthesized => addParens(doc) + | Braced(braces) => printBraces(doc, expr, braces) + | Nothing => doc + } + }, + }), + ) + printComments(doc, cmtTbl, cmtLoc) + } + + and printBsObjectRow = ((lbl, expr), cmtTbl) => { + let cmtLoc = {...lbl.loc, loc_end: expr.pexp_loc.loc_end} + let lblDoc = { + let doc = Doc.concat(list{Doc.text("\""), printLongident(lbl.txt), Doc.text("\"")}) + printComments(doc, cmtTbl, lbl.loc) + } + + let doc = Doc.concat(list{lblDoc, Doc.text(": "), printExpressionWithComments(expr, cmtTbl)}) + printComments(doc, cmtTbl, cmtLoc) + } + + /* The optional loc indicates whether we need to print the attributes in + * relation to some location. In practise this means the following: + * `@attr type t = string` -> on the same line, print on the same line + * `@attr + * type t = string` -> attr is on prev line, print the attributes + * with a line break between, we respect the users' original layout */ + and printAttributes = (~loc=?, attrs: Parsetree.attributes) => + switch ParsetreeViewer.filterParsingAttrs(attrs) { + | list{} => Doc.nil + | attrs => + let lineBreak = switch loc { + | None => Doc.line + | Some(loc) => + switch List.rev(attrs) { + | list{({loc: firstLoc}, _), ..._} + when loc.loc_start.pos_lnum > firstLoc.loc_end.pos_lnum => Doc.hardLine + | _ => Doc.line + } + } + + Doc.concat(list{ + Doc.group(Doc.join(~sep=Doc.line, List.map(printAttribute, attrs))), + lineBreak, + }) + } + + and printAttribute = ((id, payload): Parsetree.attribute) => { + let attrName = Doc.concat(list{Doc.text("@"), Doc.text(id.txt)}) + switch payload { + | PStr(list{{pstr_desc: Pstr_eval(expr, attrs)}}) => + let exprDoc = printExpression(expr, CommentTable.empty) + let needsParens = switch attrs { + | list{} => false + | _ => true + } + Doc.group( + Doc.concat(list{ + attrName, + addParens( + Doc.concat(list{ + printAttributes(attrs), + if needsParens { + addParens(exprDoc) + } else { + exprDoc + }, + }), + ), + }), + ) + | PTyp(typ) => + Doc.group( + Doc.concat(list{ + attrName, + Doc.lparen, + Doc.indent( + Doc.concat(list{Doc.softLine, Doc.text(": "), printTypExpr(typ, CommentTable.empty)}), + ), + Doc.softLine, + Doc.rparen, + }), + ) + | _ => attrName + } + } + + and printAttributeWithComments = ((id, payload): Parsetree.attribute, cmtTbl) => { + let attrName = Doc.text("@" ++ id.txt) + switch payload { + | PStr(list{{pstr_desc: Pstr_eval(expr, attrs)}}) => + let exprDoc = printExpressionWithComments(expr, cmtTbl) + let needsParens = switch attrs { + | list{} => false + | _ => true + } + Doc.group( + Doc.concat(list{ + attrName, + addParens( + Doc.concat(list{ + printAttributes(attrs), + if needsParens { + addParens(exprDoc) + } else { + exprDoc + }, + }), + ), + }), + ) + | _ => attrName + } + } + + and printModExpr = (modExpr, cmtTbl) => { + let doc = switch modExpr.pmod_desc { + | Pmod_ident(longidentLoc) => printLongidentLocation(longidentLoc, cmtTbl) + | Pmod_structure(structure) => + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.lbrace, + Doc.indent(Doc.concat(list{Doc.softLine, printStructure(structure, cmtTbl)})), + Doc.softLine, + Doc.rbrace, + }), + ) + | Pmod_unpack(expr) => + let shouldHug = switch expr.pexp_desc { + | Pexp_let(_) => true + | Pexp_constraint({pexp_desc: Pexp_let(_)}, {ptyp_desc: Ptyp_package(_packageType)}) => true + | _ => false + } + + let (expr, moduleConstraint) = switch expr.pexp_desc { + | Pexp_constraint(expr, {ptyp_desc: Ptyp_package(packageType), ptyp_loc}) => + let packageDoc = { + let doc = printPackageType(~printModuleKeywordAndParens=false, packageType, cmtTbl) + printComments(doc, cmtTbl, ptyp_loc) + } + + let typeDoc = Doc.group( + Doc.concat(list{Doc.text(":"), Doc.indent(Doc.concat(list{Doc.line, packageDoc}))}), + ) + (expr, typeDoc) + | _ => (expr, Doc.nil) + } + + let unpackDoc = Doc.group( + Doc.concat(list{printExpressionWithComments(expr, cmtTbl), moduleConstraint}), + ) + Doc.group( + Doc.concat(list{ + Doc.text("unpack("), + if shouldHug { + unpackDoc + } else { + Doc.concat(list{Doc.indent(Doc.concat(list{Doc.softLine, unpackDoc})), Doc.softLine}) + }, + Doc.rparen, + }), + ) + | Pmod_extension(extension) => printExtensionWithComments(~atModuleLvl=false, extension, cmtTbl) + | Pmod_apply(_) => + let (args, callExpr) = ParsetreeViewer.modExprApply(modExpr) + let isUnitSugar = switch args { + | list{{pmod_desc: Pmod_structure(list{})}} => true + | _ => false + } + + let shouldHug = switch args { + | list{{pmod_desc: Pmod_structure(_)}} => true + | _ => false + } + + Doc.group( + Doc.concat(list{ + printModExpr(callExpr, cmtTbl), + if isUnitSugar { + printModApplyArg(@doesNotRaise List.hd(args), cmtTbl) + } else { + Doc.concat(list{ + Doc.lparen, + if shouldHug { + printModApplyArg(@doesNotRaise List.hd(args), cmtTbl) + } else { + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(modArg => printModApplyArg(modArg, cmtTbl), args), + ), + }), + ) + }, + if !shouldHug { + Doc.concat(list{Doc.trailingComma, Doc.softLine}) + } else { + Doc.nil + }, + Doc.rparen, + }) + }, + }), + ) + | Pmod_constraint(modExpr, modType) => + Doc.concat(list{printModExpr(modExpr, cmtTbl), Doc.text(": "), printModType(modType, cmtTbl)}) + | Pmod_functor(_) => printModFunctor(modExpr, cmtTbl) + } + + printComments(doc, cmtTbl, modExpr.pmod_loc) + } + + and printModFunctor = (modExpr, cmtTbl) => { + let (parameters, returnModExpr) = ParsetreeViewer.modExprFunctor(modExpr) + /* let shouldInline = match returnModExpr.pmod_desc with */ + /* | Pmod_structure _ | Pmod_ident _ -> true */ + /* | Pmod_constraint ({pmod_desc = Pmod_structure _}, _) -> true */ + /* | _ -> false */ + /* in */ + let (returnConstraint, returnModExpr) = switch returnModExpr.pmod_desc { + | Pmod_constraint(modExpr, modType) => + let constraintDoc = { + let doc = printModType(modType, cmtTbl) + if Parens.modExprFunctorConstraint(modType) { + addParens(doc) + } else { + doc + } + } + + let modConstraint = Doc.concat(list{Doc.text(": "), constraintDoc}) + (modConstraint, printModExpr(modExpr, cmtTbl)) + | _ => (Doc.nil, printModExpr(returnModExpr, cmtTbl)) + } + + let parametersDoc = switch parameters { + | list{(attrs, {txt: "*"}, None)} => + let attrs = switch attrs { + | list{} => Doc.nil + | attrs => + Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.line}) + } + Doc.group(Doc.concat(list{attrs, Doc.text("()")})) + | list{(list{}, {txt: lbl}, None)} => Doc.text(lbl) + | parameters => + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(param => printModFunctorParam(param, cmtTbl), parameters), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + } + + Doc.group(Doc.concat(list{parametersDoc, returnConstraint, Doc.text(" => "), returnModExpr})) + } + + and printModFunctorParam = ((attrs, lbl, optModType), cmtTbl) => { + let cmtLoc = switch optModType { + | None => lbl.Asttypes.loc + | Some(modType) => { + ...lbl.loc, + loc_end: modType.Parsetree.pmty_loc.loc_end, + } + } + + let attrs = switch attrs { + | list{} => Doc.nil + | attrs => Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.line}) + } + let lblDoc = { + let doc = Doc.text(lbl.txt) + printComments(doc, cmtTbl, lbl.loc) + } + + let doc = Doc.group( + Doc.concat(list{ + attrs, + lblDoc, + switch optModType { + | None => Doc.nil + | Some(modType) => Doc.concat(list{Doc.text(": "), printModType(modType, cmtTbl)}) + }, + }), + ) + printComments(doc, cmtTbl, cmtLoc) + } + + and printModApplyArg = (modExpr, cmtTbl) => + switch modExpr.pmod_desc { + | Pmod_structure(list{}) => Doc.text("()") + | _ => printModExpr(modExpr, cmtTbl) + } + + and printExceptionDef = (constr: Parsetree.extension_constructor, cmtTbl) => { + let kind = switch constr.pext_kind { + | Pext_rebind(longident) => + Doc.indent( + Doc.concat(list{Doc.text(" ="), Doc.line, printLongidentLocation(longident, cmtTbl)}), + ) + | Pext_decl(Pcstr_tuple(list{}), None) => Doc.nil + | Pext_decl(args, gadt) => + let gadtDoc = switch gadt { + | Some(typ) => Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)}) + | None => Doc.nil + } + + Doc.concat(list{printConstructorArguments(~indent=false, args, cmtTbl), gadtDoc}) + } + + let name = printComments(Doc.text(constr.pext_name.txt), cmtTbl, constr.pext_name.loc) + + let doc = Doc.group( + Doc.concat(list{printAttributes(constr.pext_attributes), Doc.text("exception "), name, kind}), + ) + printComments(doc, cmtTbl, constr.pext_loc) + } + + and printExtensionConstructor = (constr: Parsetree.extension_constructor, cmtTbl, i) => { + let attrs = printAttributes(constr.pext_attributes) + let bar = if i > 0 { + Doc.text("| ") + } else { + Doc.ifBreaks(Doc.text("| "), Doc.nil) + } + + let kind = switch constr.pext_kind { + | Pext_rebind(longident) => + Doc.indent( + Doc.concat(list{Doc.text(" ="), Doc.line, printLongidentLocation(longident, cmtTbl)}), + ) + | Pext_decl(Pcstr_tuple(list{}), None) => Doc.nil + | Pext_decl(args, gadt) => + let gadtDoc = switch gadt { + | Some(typ) => Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)}) + | None => Doc.nil + } + + Doc.concat(list{printConstructorArguments(~indent=false, args, cmtTbl), gadtDoc}) + } + + let name = printComments(Doc.text(constr.pext_name.txt), cmtTbl, constr.pext_name.loc) + + Doc.concat(list{bar, Doc.group(Doc.concat(list{attrs, name, kind}))}) + } + + let printImplementation = (~width, s: Parsetree.structure, comments) => { + let cmtTbl = CommentTable.make() + CommentTable.walkStructure(s, cmtTbl, comments) + /* CommentTable.log cmtTbl; */ + let doc = printStructure(s, cmtTbl) + /* Doc.debug doc; */ + let stringDoc = Doc.toString(~width, doc) + print_string(stringDoc) + } + + let printInterface = (~width, s: Parsetree.signature, comments) => { + let cmtTbl = CommentTable.make() + CommentTable.walkSignature(s, cmtTbl, comments) + let stringDoc = Doc.toString(~width, printSignature(s, cmtTbl)) + print_string(stringDoc) + } +} + +module Scanner = { + type mode = Template | Jsx | Diamond + + type t = { + filename: string, + src: bytes, + mutable err: ( + ~startPos: Lexing.position, + ~endPos: Lexing.position, + Diagnostics.category, + ) => unit, + mutable ch: int /* current character */, + mutable offset: int /* character offset */, + mutable rdOffset: int /* reading offset (position after current character) */, + mutable lineOffset: int /* current line offset */, + mutable lnum: int /* current line number */, + mutable mode: list, + } + + let setDiamondMode = scanner => scanner.mode = list{Diamond, ...scanner.mode} + + let setTemplateMode = scanner => scanner.mode = list{Template, ...scanner.mode} + + let setJsxMode = scanner => scanner.mode = list{Jsx, ...scanner.mode} + + let popMode = (scanner, mode) => + switch scanner.mode { + | list{m, ...ms} when m == mode => scanner.mode = ms + | _ => () + } + + let inDiamondMode = scanner => + switch scanner.mode { + | list{Diamond, ..._} => true + | _ => false + } + + let inJsxMode = scanner => + switch scanner.mode { + | list{Jsx, ..._} => true + | _ => false + } + + let inTemplateMode = scanner => + switch scanner.mode { + | list{Template, ..._} => true + | _ => false + } + + let position = scanner => { + open Lexing + { + pos_fname: scanner.filename, + /* line number */ + pos_lnum: scanner.lnum, + /* offset of the beginning of the line (number + of characters between the beginning of the scanner and the beginning + of the line) */ + pos_bol: scanner.lineOffset, + /* [pos_cnum] is the offset of the position (number of + characters between the beginning of the scanner and the position). */ + pos_cnum: scanner.offset, + } + } + + let next = scanner => + if scanner.rdOffset < Bytes.length(scanner.src) { + scanner.offset = scanner.rdOffset + let ch = (@doesNotRaise Bytes.get)(scanner.src, scanner.rdOffset) + scanner.rdOffset = scanner.rdOffset + 1 + scanner.ch = int_of_char(ch) + } else { + scanner.offset = Bytes.length(scanner.src) + scanner.ch = -1 + } + + let peek = scanner => + if scanner.rdOffset < Bytes.length(scanner.src) { + int_of_char(Bytes.unsafe_get(scanner.src, scanner.rdOffset)) + } else { + -1 + } + + let make = (b, filename) => { + let scanner = { + filename: filename, + src: b, + err: (~startPos as _, ~endPos as _, _) => (), + ch: CharacterCodes.space, + offset: 0, + rdOffset: 0, + lineOffset: 0, + lnum: 1, + mode: list{}, + } + next(scanner) + scanner + } + + let skipWhitespace = scanner => { + let rec scan = () => + if scanner.ch === CharacterCodes.space || scanner.ch === CharacterCodes.tab { + next(scanner) + scan() + } else if CharacterCodes.isLineBreak(scanner.ch) { + scanner.lineOffset = scanner.offset + 1 + scanner.lnum = scanner.lnum + 1 + next(scanner) + scan() + } else { + () + } + + scan() + } + + let scanIdentifier = scanner => { + let startOff = scanner.offset + while ( + CharacterCodes.isLetter(scanner.ch) || + (CharacterCodes.isDigit(scanner.ch) || + (CharacterCodes.underscore === scanner.ch || CharacterCodes.singleQuote === scanner.ch)) + ) { + next(scanner) + } + let str = Bytes.sub_string(scanner.src, startOff, scanner.offset - startOff) + Token.lookupKeyword(str) + } + + let scanDigits = (scanner, ~base) => + if base <= 10 { + while CharacterCodes.isDigit(scanner.ch) || scanner.ch === CharacterCodes.underscore { + next(scanner) + } + } else { + while CharacterCodes.isHex(scanner.ch) || scanner.ch === CharacterCodes.underscore { + next(scanner) + } + } + + /* float: (0…9) { 0…9∣ _ } [. { 0…9∣ _ }] [(e∣ E) [+∣ -] (0…9) { 0…9∣ _ }] */ + let scanNumber = scanner => { + let startOff = scanner.offset + + /* integer part */ + let (base, _prefix) = if scanner.ch !== CharacterCodes.dot { + if scanner.ch === CharacterCodes._0 { + next(scanner) + let ch = CharacterCodes.lower(scanner.ch) + if ch === CharacterCodes.Lower.x { + next(scanner) + (16, 'x') + } else if ch === CharacterCodes.Lower.o { + next(scanner) + (8, 'o') + } else if ch === CharacterCodes.Lower.b { + next(scanner) + (2, 'b') + } else { + (8, '0') + } + } else { + (10, ' ') + } + } else { + (10, ' ') + } + + scanDigits(scanner, ~base) + + /* */ + let isFloat = if CharacterCodes.dot === scanner.ch { + next(scanner) + scanDigits(scanner, ~base) + true + } else { + false + } + + /* exponent part */ + let isFloat = if { + let exp = CharacterCodes.lower(scanner.ch) + exp === CharacterCodes.Lower.e || exp === CharacterCodes.Lower.p + } { + next(scanner) + if scanner.ch === CharacterCodes.plus || scanner.ch === CharacterCodes.minus { + next(scanner) + } + scanDigits(scanner, ~base) + true + } else { + isFloat + } + + let literal = Bytes.sub_string(scanner.src, startOff, scanner.offset - startOff) + + /* suffix */ + let suffix = if ( + (scanner.ch >= CharacterCodes.Lower.g && scanner.ch <= CharacterCodes.Lower.z) || + (scanner.ch >= CharacterCodes.Upper.g && scanner.ch <= CharacterCodes.Upper.z) + ) { + let ch = scanner.ch + next(scanner) + Some(Char.unsafe_chr(ch)) + } else { + None + } + + if isFloat { + Token.Float({f: literal, suffix: suffix}) + } else { + Token.Int({i: literal, suffix: suffix}) + } + } + + let scanExoticIdentifier = scanner => { + next(scanner) + let buffer = Buffer.create(20) + let startPos = position(scanner) + + let rec scan = () => + if scanner.ch === CharacterCodes.eof { + let endPos = position(scanner) + scanner.err(~startPos, ~endPos, Diagnostics.message("Did you forget a \" here?")) + } else if scanner.ch === CharacterCodes.doubleQuote { + next(scanner) + } else if CharacterCodes.isLineBreak(scanner.ch) { + scanner.lineOffset = scanner.offset + 1 + scanner.lnum = scanner.lnum + 1 + let endPos = position(scanner) + scanner.err(~startPos, ~endPos, Diagnostics.message("Did you forget a \" here?")) + next(scanner) + } else { + Buffer.add_char(buffer, (@doesNotRaise Char.chr)(scanner.ch)) + next(scanner) + scan() + } + + scan() + Token.Lident(Buffer.contents(buffer)) + } + + let scanStringEscapeSequence = (~startPos, scanner) => + /* \ already consumed */ + if ( + CharacterCodes.Lower.n === scanner.ch || + (CharacterCodes.Lower.t === scanner.ch || + (CharacterCodes.Lower.b === scanner.ch || + (CharacterCodes.Lower.r === scanner.ch || + (CharacterCodes.backslash === scanner.ch || + (CharacterCodes.space === scanner.ch || + (CharacterCodes.singleQuote === scanner.ch || + CharacterCodes.doubleQuote === scanner.ch)))))) + ) { + next(scanner) + } else { + let (n, base, max) = if CharacterCodes.isDigit(scanner.ch) { + /* decimal */ + (3, 10, 255) + } else if scanner.ch === CharacterCodes.Lower.o { + /* octal */ + let () = next(scanner) + (3, 8, 255) + } else if scanner.ch === CharacterCodes.Lower.x { + /* hex */ + let () = next(scanner) + (2, 16, 255) + } else { + /* unknown escape sequence + * TODO: we should warn the user here. Let's not make it a hard error for now, for reason compat */ + /* let pos = position scanner in */ + /* let () = */ + /* let msg = if scanner.ch == -1 then */ + /* "unclosed escape sequence" */ + /* else "unknown escape sequence" */ + /* in */ + /* scanner.err ~startPos ~endPos:pos (Diagnostics.message msg) */ + /* in */ + (-1, -1, -1) + } + + if n < 0 { + () + } else { + let rec while_ = (n, x) => + if n === 0 { + x + } else { + let d = CharacterCodes.digitValue(scanner.ch) + if d >= base { + let pos = position(scanner) + let msg = if scanner.ch === -1 { + "unclosed escape sequence" + } else { + "unknown escape sequence" + } + + scanner.err(~startPos, ~endPos=pos, Diagnostics.message(msg)) + -1 + } else { + let () = next(scanner) + while_(n - 1, x * base + d) + } + } + + let x = while_(n, 0) + if x > max { + let pos = position(scanner) + let msg = "invalid escape sequence (value too high)" + scanner.err(~startPos, ~endPos=pos, Diagnostics.message(msg)) + () + } + } + } + + let scanString = scanner => { + let offs = scanner.offset + + let startPos = position(scanner) + let rec scan = () => + if scanner.ch === CharacterCodes.eof { + let endPos = position(scanner) + scanner.err(~startPos, ~endPos, Diagnostics.unclosedString) + } else if scanner.ch === CharacterCodes.doubleQuote { + next(scanner) + } else if scanner.ch === CharacterCodes.backslash { + let startPos = position(scanner) + next(scanner) + scanStringEscapeSequence(~startPos, scanner) + scan() + } else if CharacterCodes.isLineBreak(scanner.ch) { + scanner.lineOffset = scanner.offset + 1 + scanner.lnum = scanner.lnum + 1 + next(scanner) + scan() + } else { + next(scanner) + scan() + } + + scan() + Token.String(Bytes.sub_string(scanner.src, offs, scanner.offset - offs - 1)) + } + + /* I wonder if this gets inlined */ + let convertNumber = (scanner, ~n, ~base) => { + let x = ref(0) + for _ in n downto 1 { + let d = CharacterCodes.digitValue(scanner.ch) + x := x.contents * base + d + next(scanner) + } + x.contents + } + + let scanEscape = scanner => { + /* let offset = scanner.offset in */ + let c = switch scanner.ch { + | 98 /* b */ => + next(scanner) + '\b' + | 110 /* n */ => + next(scanner) + '\n' + | 114 /* r */ => + next(scanner) + '\r' + | 116 /* t */ => + next(scanner) + '\t' + | ch when CharacterCodes.isDigit(ch) => + let x = convertNumber(scanner, ~n=3, ~base=10) + (@doesNotRaise Char.chr)(x) + | ch when ch === CharacterCodes.Lower.x => + next(scanner) + let x = convertNumber(scanner, ~n=2, ~base=16) + (@doesNotRaise Char.chr)(x) + | ch when ch === CharacterCodes.Lower.o => + next(scanner) + let x = convertNumber(scanner, ~n=3, ~base=8) + (@doesNotRaise Char.chr)(x) + | ch => + next(scanner) + (@doesNotRaise Char.chr)(ch) + } + + next(scanner) /* Consume \' */ + Token.Character(c) + } + + let scanSingleLineComment = scanner => { + let startOff = scanner.offset + let startPos = position(scanner) + while !CharacterCodes.isLineBreak(scanner.ch) && scanner.ch >= 0 { + next(scanner) + } + let endPos = position(scanner) + Token.Comment( + Comment.makeSingleLineComment( + ~loc={ + open Location + {loc_start: startPos, loc_end: endPos, loc_ghost: false} + }, + Bytes.sub_string(scanner.src, startOff, scanner.offset - startOff), + ), + ) + } + + let scanMultiLineComment = scanner => { + let startOff = scanner.offset + let startPos = position(scanner) + let rec scan = (~depth, ()) => + if scanner.ch === CharacterCodes.asterisk && peek(scanner) === CharacterCodes.forwardslash { + next(scanner) + next(scanner) + if depth > 0 { + scan(~depth=depth - 1, ()) + } else { + () + } + } else if scanner.ch === CharacterCodes.eof { + let endPos = position(scanner) + scanner.err(~startPos, ~endPos, Diagnostics.unclosedComment) + } else if ( + scanner.ch === CharacterCodes.forwardslash && peek(scanner) === CharacterCodes.asterisk + ) { + next(scanner) + next(scanner) + scan(~depth=depth + 1, ()) + } else { + if CharacterCodes.isLineBreak(scanner.ch) { + scanner.lineOffset = scanner.offset + 1 + scanner.lnum = scanner.lnum + 1 + } + next(scanner) + scan(~depth, ()) + } + + scan(~depth=0, ()) + Token.Comment( + Comment.makeMultiLineComment( + ~loc={ + open Location + {loc_start: startPos, loc_end: position(scanner), loc_ghost: false} + }, + Bytes.sub_string(scanner.src, startOff, scanner.offset - 2 - startOff), + ), + ) + } + + let scanTemplate = scanner => { + let startOff = scanner.offset + let startPos = position(scanner) + + let rec scan = () => + if scanner.ch === CharacterCodes.eof { + let endPos = position(scanner) + scanner.err(~startPos, ~endPos, Diagnostics.unclosedTemplate) + popMode(scanner, Template) + Token.TemplateTail(Bytes.sub_string(scanner.src, startOff, scanner.offset - 2 - startOff)) + } else if scanner.ch === CharacterCodes.backslash { + next(scanner) + if ( + scanner.ch === CharacterCodes.backtick || + (scanner.ch === CharacterCodes.backslash || + scanner.ch === CharacterCodes.dollar) + ) { + next(scanner) + } + scan() + } else if scanner.ch === CharacterCodes.backtick { + next(scanner) + let contents = Bytes.sub_string(scanner.src, startOff, scanner.offset - 1 - startOff) + + popMode(scanner, Template) + Token.TemplateTail(contents) + } else if scanner.ch === CharacterCodes.dollar && peek(scanner) === CharacterCodes.lbrace { + next(scanner) /* consume $ */ + next(scanner) /* consume { */ + let contents = Bytes.sub_string(scanner.src, startOff, scanner.offset - 2 - startOff) + + popMode(scanner, Template) + Token.TemplatePart(contents) + } else { + if CharacterCodes.isLineBreak(scanner.ch) { + scanner.lineOffset = scanner.offset + 1 + scanner.lnum = scanner.lnum + 1 + } + next(scanner) + scan() + } + + scan() + } + + let rec scan = scanner => { + if !inTemplateMode(scanner) { + skipWhitespace(scanner) + } + let startPos = position(scanner) + let ch = scanner.ch + let token = if inTemplateMode(scanner) { + scanTemplate(scanner) + } else if ch === CharacterCodes.underscore { + let nextCh = peek(scanner) + if ( + nextCh === CharacterCodes.underscore || + (CharacterCodes.isDigit(nextCh) || + CharacterCodes.isLetter(nextCh)) + ) { + scanIdentifier(scanner) + } else { + next(scanner) + Token.Underscore + } + } else if CharacterCodes.isLetter(ch) { + scanIdentifier(scanner) + } else if CharacterCodes.isDigit(ch) { + scanNumber(scanner) + } else { + next(scanner) + if ch === CharacterCodes.dot { + if scanner.ch === CharacterCodes.dot { + next(scanner) + if scanner.ch === CharacterCodes.dot { + next(scanner) + Token.DotDotDot + } else { + Token.DotDot + } + } else { + Token.Dot + } + } else if ch === CharacterCodes.doubleQuote { + scanString(scanner) + } else if ch === CharacterCodes.singleQuote { + if ( + scanner.ch === CharacterCodes.backslash && !(peek(scanner) === CharacterCodes.doubleQuote) + ) { + /* start of exotic ident */ + + next(scanner) + scanEscape(scanner) + } else if peek(scanner) === CharacterCodes.singleQuote { + let ch = scanner.ch + next(scanner) + next(scanner) + Token.Character((@doesNotRaise Char.chr)(ch)) + } else { + SingleQuote + } + } else if ch === CharacterCodes.bang { + if scanner.ch === CharacterCodes.equal { + next(scanner) + if scanner.ch === CharacterCodes.equal { + next(scanner) + Token.BangEqualEqual + } else { + Token.BangEqual + } + } else { + Token.Bang + } + } else if ch === CharacterCodes.semicolon { + Token.Semicolon + } else if ch === CharacterCodes.equal { + if scanner.ch === CharacterCodes.greaterThan { + next(scanner) + Token.EqualGreater + } else if scanner.ch === CharacterCodes.equal { + next(scanner) + if scanner.ch === CharacterCodes.equal { + next(scanner) + Token.EqualEqualEqual + } else { + Token.EqualEqual + } + } else { + Token.Equal + } + } else if ch === CharacterCodes.bar { + if scanner.ch === CharacterCodes.bar { + next(scanner) + Token.Lor + } else if scanner.ch === CharacterCodes.greaterThan { + next(scanner) + Token.BarGreater + } else { + Token.Bar + } + } else if ch === CharacterCodes.ampersand { + if scanner.ch === CharacterCodes.ampersand { + next(scanner) + Token.Land + } else { + Token.Band + } + } else if ch === CharacterCodes.lparen { + Token.Lparen + } else if ch === CharacterCodes.rparen { + Token.Rparen + } else if ch === CharacterCodes.lbracket { + Token.Lbracket + } else if ch === CharacterCodes.rbracket { + Token.Rbracket + } else if ch === CharacterCodes.lbrace { + Token.Lbrace + } else if ch === CharacterCodes.rbrace { + Token.Rbrace + } else if ch === CharacterCodes.comma { + Token.Comma + } else if ch === CharacterCodes.colon { + if scanner.ch === CharacterCodes.equal { + next(scanner) + Token.ColonEqual + } else if scanner.ch === CharacterCodes.greaterThan { + next(scanner) + Token.ColonGreaterThan + } else { + Token.Colon + } + } else if ch === CharacterCodes.backslash { + scanExoticIdentifier(scanner) + } else if ch === CharacterCodes.forwardslash { + if scanner.ch === CharacterCodes.forwardslash { + next(scanner) + scanSingleLineComment(scanner) + } else if scanner.ch === CharacterCodes.asterisk { + next(scanner) + scanMultiLineComment(scanner) + } else if scanner.ch === CharacterCodes.dot { + next(scanner) + Token.ForwardslashDot + } else { + Token.Forwardslash + } + } else if ch === CharacterCodes.minus { + if scanner.ch === CharacterCodes.dot { + next(scanner) + Token.MinusDot + } else if scanner.ch === CharacterCodes.greaterThan { + next(scanner) + Token.MinusGreater + } else { + Token.Minus + } + } else if ch === CharacterCodes.plus { + if scanner.ch === CharacterCodes.dot { + next(scanner) + Token.PlusDot + } else if scanner.ch === CharacterCodes.plus { + next(scanner) + Token.PlusPlus + } else if scanner.ch === CharacterCodes.equal { + next(scanner) + Token.PlusEqual + } else { + Token.Plus + } + } else if ch === CharacterCodes.greaterThan { + if scanner.ch === CharacterCodes.equal && !inDiamondMode(scanner) { + next(scanner) + Token.GreaterEqual + } else { + Token.GreaterThan + } + } else if ch === CharacterCodes.lessThan { + /* Imagine the following:
< + * < indicates the start of a new jsx-element, the parser expects + * the name of a new element after the < + * Example:
+ * This signals a closing element. To simulate the two-token lookahead, + * the < + * is `<` the start of a jsx-child?
+ * reconsiderLessThan peeks at the next token and + * determines the correct token to disambiguate */ + let reconsiderLessThan = scanner => { + /* < consumed */ + skipWhitespace(scanner) + if scanner.ch === CharacterCodes.forwardslash { + let () = next(scanner) + Token.LessThanSlash + } else { + Token.LessThan + } + } + + /* If an operator has whitespace around both sides, it's a binary operator */ + let isBinaryOp = (src, startCnum, endCnum) => + if startCnum === 0 { + false + } else { + let leftOk = { + let c = startCnum - 1 |> (@doesNotRaise Bytes.get)(src) |> Char.code + + c === CharacterCodes.space || (c === CharacterCodes.tab || CharacterCodes.isLineBreak(c)) + } + + let rightOk = { + let c = if endCnum === Bytes.length(src) { + -1 + } else { + endCnum |> (@doesNotRaise Bytes.get)(src) |> Char.code + } + + c === CharacterCodes.space || + (c === CharacterCodes.tab || + (CharacterCodes.isLineBreak(c) || c === CharacterCodes.eof)) + } + + leftOk && rightOk + } +} + +/* AST for js externals */ +module JsFfi = { + type scope = + | Global + | Module(string) /* bs.module("path") */ + | Scope(Longident.t) /* bs.scope(/"window", "location"/) */ + + type label_declaration = { + @live jld_attributes: Parsetree.attributes, + jld_name: string, + jld_alias: string, + jld_type: Parsetree.core_type, + jld_loc: Location.t, + } + + type importSpec = + | Default(label_declaration) + | Spec(list) + + type import_description = { + jid_loc: Location.t, + jid_spec: importSpec, + jid_scope: scope, + jid_attributes: Parsetree.attributes, + } + + let decl = (~attrs, ~loc, ~name, ~alias, ~typ) => { + jld_loc: loc, + jld_attributes: attrs, + jld_name: name, + jld_alias: alias, + jld_type: typ, + } + + let importDescr = (~attrs, ~scope, ~importSpec, ~loc) => { + jid_loc: loc, + jid_spec: importSpec, + jid_scope: scope, + jid_attributes: attrs, + } + + let toParsetree = importDescr => { + let bsVal = (Location.mknoloc("bs.val"), Parsetree.PStr(list{})) + let attrs = switch importDescr.jid_scope { + | Global => list{bsVal} + /* @genType.import("./MyMath"), + * @genType.import(/"./MyMath", "default"/) */ + | Module(s) => + let structure = list{ + Parsetree.Pconst_string(s, None) |> Ast_helper.Exp.constant |> Ast_helper.Str.eval, + } + let genType = (Location.mknoloc("genType.import"), Parsetree.PStr(structure)) + list{genType} + | Scope(longident) => + let structureItem = { + let expr = switch Longident.flatten(longident) |> List.map(s => + Ast_helper.Exp.constant(Parsetree.Pconst_string(s, None)) + ) { + | list{expr} => expr + | list{} as exprs | _ as exprs => exprs |> Ast_helper.Exp.tuple + } + + Ast_helper.Str.eval(expr) + } + + let bsScope = (Location.mknoloc("bs.scope"), Parsetree.PStr(list{structureItem})) + list{bsVal, bsScope} + } + + let valueDescrs = switch importDescr.jid_spec { + | Default(decl) => + let prim = list{decl.jld_name} + let allAttrs = List.concat(list{attrs, importDescr.jid_attributes}) |> List.map(attr => + switch attr { + | ( + {Location.txt: "genType.import"} as id, + Parsetree.PStr(list{{pstr_desc: Parsetree.Pstr_eval(moduleName, _)}}), + ) => + let default = Parsetree.Pconst_string("default", None) |> Ast_helper.Exp.constant + + let structureItem = + list{moduleName, default} |> Ast_helper.Exp.tuple |> Ast_helper.Str.eval + + (id, Parsetree.PStr(list{structureItem})) + | attr => attr + } + ) + + list{ + Ast_helper.Val.mk( + ~loc=importDescr.jid_loc, + ~prim, + ~attrs=allAttrs, + Location.mknoloc(decl.jld_alias), + decl.jld_type, + ) |> Ast_helper.Str.primitive, + } + | Spec(decls) => List.map(decl => { + let prim = list{decl.jld_name} + let allAttrs = List.concat(list{attrs, decl.jld_attributes}) + Ast_helper.Val.mk( + ~loc=importDescr.jid_loc, + ~prim, + ~attrs=allAttrs, + Location.mknoloc(decl.jld_alias), + decl.jld_type, + ) |> Ast_helper.Str.primitive(~loc=decl.jld_loc) + }, decls) + } + + let jsFfiAttr = (Location.mknoloc("ns.jsFfi"), Parsetree.PStr(list{})) + Ast_helper.Mod.structure(~loc=importDescr.jid_loc, valueDescrs) + |> Ast_helper.Incl.mk(~attrs=list{jsFfiAttr}, ~loc=importDescr.jid_loc) + |> Ast_helper.Str.include_(~loc=importDescr.jid_loc) + } +} + +module ParsetreeCompatibility = { + let concatLongidents = (l1, l2) => { + let parts1 = Longident.flatten(l1) + let parts2 = Longident.flatten(l2) + switch List.concat(list{parts1, parts2}) |> Longident.unflatten { + | Some(longident) => longident + | None => l2 + } + } + + /* TODO: support nested open's ? */ + let rec rewritePpatOpen = (longidentOpen, pat) => { + open Parsetree + switch pat.ppat_desc { + | Ppat_array(list{first, ...rest}) => /* Color.[Red, Blue, Green] -> [Color.Red, Blue, Green] */ + {...pat, ppat_desc: Ppat_array(list{rewritePpatOpen(longidentOpen, first), ...rest})} + | Ppat_tuple(list{first, ...rest}) => /* Color.(Red, Blue, Green) -> (Color.Red, Blue, Green) */ + {...pat, ppat_desc: Ppat_tuple(list{rewritePpatOpen(longidentOpen, first), ...rest})} + | Ppat_construct( + {txt: Longident.Lident("::")} as listConstructor, + Some({ppat_desc: Ppat_tuple(list{pat, ...rest})} as element), + ) => /* Color.(list{Red, Blue, Green}) -> list{Color.Red, Blue, Green} */ + { + ...pat, + ppat_desc: Ppat_construct( + listConstructor, + Some({ + ...element, + ppat_desc: Ppat_tuple(list{rewritePpatOpen(longidentOpen, pat), ...rest}), + }), + ), + } + | Ppat_construct( + {txt: constructor} as longidentLoc, + optPattern, + ) => /* Foo.(Bar(a)) -> Foo.Bar(a) */ + { + ...pat, + ppat_desc: Ppat_construct( + {...longidentLoc, txt: concatLongidents(longidentOpen, constructor)}, + optPattern, + ), + } + | Ppat_record(list{({txt: lbl} as longidentLoc, firstPat), ...rest}, flag) => + /* Foo.{x} -> {Foo.x: x} */ + let firstRow = ({...longidentLoc, txt: concatLongidents(longidentOpen, lbl)}, firstPat) + {...pat, ppat_desc: Ppat_record(list{firstRow, ...rest}, flag)} + | Ppat_or(pat1, pat2) => { + ...pat, + ppat_desc: Ppat_or( + rewritePpatOpen(longidentOpen, pat1), + rewritePpatOpen(longidentOpen, pat2), + ), + } + | Ppat_constraint(pattern, typ) => { + ...pat, + ppat_desc: Ppat_constraint(rewritePpatOpen(longidentOpen, pattern), typ), + } + | Ppat_type({txt: constructor} as longidentLoc) => { + ...pat, + ppat_desc: Ppat_type({ + ...longidentLoc, + txt: concatLongidents(longidentOpen, constructor), + }), + } + | Ppat_lazy(p) => {...pat, ppat_desc: Ppat_lazy(rewritePpatOpen(longidentOpen, p))} + | Ppat_exception(p) => {...pat, ppat_desc: Ppat_exception(rewritePpatOpen(longidentOpen, p))} + | _ => pat + } + } + + let rec rewriteReasonFastPipe = expr => { + open Parsetree + switch expr.pexp_desc { + | Pexp_apply( + { + pexp_desc: + Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("|.")})} as op, + list{(Asttypes.Nolabel, lhs), (Nolabel, rhs)}, + ), + pexp_attributes: subAttrs, + }, + args, + ) => + let rhsLoc = {...rhs.pexp_loc, loc_end: expr.pexp_loc.loc_end} + let newLhs = { + let expr = rewriteReasonFastPipe(lhs) + {...expr, pexp_attributes: subAttrs} + } + + let allArgs = list{ + (Asttypes.Nolabel, newLhs), + (Asttypes.Nolabel, Ast_helper.Exp.apply(~loc=rhsLoc, rhs, args)), + } + + Ast_helper.Exp.apply(~attrs=expr.pexp_attributes, ~loc=expr.pexp_loc, op, allArgs) + | _ => expr + } + } + + let makeReasonArityMapper = (~forPrinter) => { + open Ast_mapper + { + ...default_mapper, + expr: (mapper, expr) => + switch expr { + /* Don't mind this case, Reason doesn't handle this. */ + /* | {pexp_desc = Pexp_variant (lbl, args); pexp_loc; pexp_attributes} -> */ + /* let newArgs = match args with */ + /* | (Some {pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _ } as sp]}) as args-> */ + /* if forPrinter then args else Some sp */ + /* | Some {pexp_desc = Pexp_tuple [sp]} -> Some sp */ + /* | _ -> args */ + /* in */ + /* default_mapper.expr mapper {pexp_desc=Pexp_variant(lbl, newArgs); pexp_loc; pexp_attributes} */ + | {pexp_desc: Pexp_construct(lid, args), pexp_loc, pexp_attributes} => + let newArgs = switch args { + | Some({pexp_desc: Pexp_tuple(list{{pexp_desc: Pexp_tuple(_)} as sp})}) as args => + if forPrinter { + args + } else { + Some(sp) + } + | Some({pexp_desc: Pexp_tuple(list{sp})}) => Some(sp) + | _ => args + } + + default_mapper.expr( + mapper, + { + pexp_desc: Pexp_construct(lid, newArgs), + pexp_loc: pexp_loc, + pexp_attributes: pexp_attributes, + }, + ) + | expr => default_mapper.expr(mapper, rewriteReasonFastPipe(expr)) + }, + pat: (mapper, pattern) => + switch pattern { + /* Don't mind this case, Reason doesn't handle this. */ + /* | {ppat_desc = Ppat_variant (lbl, args); ppat_loc; ppat_attributes} -> */ + /* let newArgs = match args with */ + /* | (Some {ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as sp]}) as args -> */ + /* if forPrinter then args else Some sp */ + /* | Some {ppat_desc = Ppat_tuple [sp]} -> Some sp */ + /* | _ -> args */ + /* in */ + /* default_mapper.pat mapper {ppat_desc = Ppat_variant (lbl, newArgs); ppat_loc; ppat_attributes;} */ + | {ppat_desc: Ppat_construct(lid, args), ppat_loc, ppat_attributes} => + let new_args = switch args { + | Some({ppat_desc: Ppat_tuple(list{{ppat_desc: Ppat_tuple(_)} as sp})}) as args => + if forPrinter { + args + } else { + Some(sp) + } + | Some({ppat_desc: Ppat_tuple(list{sp})}) => Some(sp) + | _ => args + } + default_mapper.pat( + mapper, + { + ppat_desc: Ppat_construct(lid, new_args), + ppat_loc: ppat_loc, + ppat_attributes: ppat_attributes, + }, + ) + | x => default_mapper.pat(mapper, x) + }, + } + } + + let escapeTemplateLiteral = s => { + let len = String.length(s) + let b = Buffer.create(len) + let i = ref(0) + while i.contents < len { + let c = (@doesNotRaise String.get)(s, i.contents) + if c == '`' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, '`') + incr(i) + } else if c == '$' { + if i.contents + 1 < len { + let c2 = (@doesNotRaise String.get)(s, i.contents + 1) + if c2 == '{' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, '$') + Buffer.add_char(b, '{') + } else { + Buffer.add_char(b, c) + Buffer.add_char(b, c2) + } + i := i.contents + 2 + } else { + Buffer.add_char(b, c) + incr(i) + } + } else if c == '\\' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, '\\') + incr(i) + } else { + Buffer.add_char(b, c) + incr(i) + } + } + Buffer.contents(b) + } + + let escapeStringContents = s => { + let len = String.length(s) + let b = Buffer.create(len) + + let i = ref(0) + + while i.contents < len { + let c = String.unsafe_get(s, i.contents) + if c == '\\' { + incr(i) + Buffer.add_char(b, c) + let c = String.unsafe_get(s, i.contents) + if i.contents < len { + let () = Buffer.add_char(b, c) + incr(i) + } else { + () + } + } else if c == '"' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, c) + incr(i) + } else { + Buffer.add_char(b, c) + incr(i) + } + } + Buffer.contents(b) + } + + let looksLikeRecursiveTypeDeclaration = typeDeclaration => { + open Parsetree + let name = typeDeclaration.ptype_name.txt + let rec checkKind = kind => + switch kind { + | Ptype_abstract | Ptype_open => false + | Ptype_variant(constructorDeclarations) => + List.exists(checkConstructorDeclaration, constructorDeclarations) + | Ptype_record(labelDeclarations) => List.exists(checkLabelDeclaration, labelDeclarations) + } + + and checkConstructorDeclaration = constrDecl => + checkConstructorArguments(constrDecl.pcd_args) || + switch constrDecl.pcd_res { + | Some(typexpr) => checkTypExpr(typexpr) + | None => false + } + + and checkLabelDeclaration = labelDeclaration => checkTypExpr(labelDeclaration.pld_type) + + and checkConstructorArguments = constrArg => + switch constrArg { + | Pcstr_tuple(types) => List.exists(checkTypExpr, types) + | Pcstr_record(labelDeclarations) => List.exists(checkLabelDeclaration, labelDeclarations) + } + + and checkTypExpr = typ => + switch typ.ptyp_desc { + | Ptyp_any => false + | Ptyp_var(_) => false + | Ptyp_object(_) => false + | Ptyp_class(_) => false + | Ptyp_package(_) => false + | Ptyp_extension(_) => false + | Ptyp_arrow(_lbl, typ1, typ2) => checkTypExpr(typ1) || checkTypExpr(typ2) + | Ptyp_tuple(types) => List.exists(checkTypExpr, types) + | Ptyp_constr({txt: longident}, types) => + switch longident { + | Lident(ident) => ident == name + | _ => false + } || + List.exists(checkTypExpr, types) + | Ptyp_alias(typ, _) => checkTypExpr(typ) + | Ptyp_variant(rowFields, _, _) => List.exists(checkRowFields, rowFields) + | Ptyp_poly(_, typ) => checkTypExpr(typ) + } + + and checkRowFields = rowField => + switch rowField { + | Rtag(_, _, _, types) => List.exists(checkTypExpr, types) + | Rinherit(typexpr) => checkTypExpr(typexpr) + } + + checkKind(typeDeclaration.ptype_kind) + } + + let filterReasonRawLiteral = attrs => List.filter(attr => + switch attr { + | ({Location.txt: "reason.raw_literal"}, _) => false + | _ => true + } + , attrs) + + let stringLiteralMapper = stringData => { + let isSameLocation = (l1, l2) => { + open Location + l1.loc_start.pos_cnum === l2.loc_start.pos_cnum + } + + let remainingStringData = stringData + open Ast_mapper + { + ...default_mapper, + expr: (mapper, expr) => + switch expr.pexp_desc { + | Pexp_constant(Pconst_string(_txt, None)) => + switch List.find_opt( + ((_stringData, stringLoc)) => isSameLocation(stringLoc, expr.pexp_loc), + remainingStringData, + ) { + | Some(stringData, _) => + let stringData = { + let attr = List.find_opt(attr => + switch attr { + | ({Location.txt: "reason.raw_literal"}, _) => true + | _ => false + } + , expr.pexp_attributes) + switch attr { + | Some( + _, + PStr(list{{ + pstr_desc: Pstr_eval({pexp_desc: Pexp_constant(Pconst_string(raw, _))}, _), + }}), + ) => raw + | _ => (@doesNotRaise String.sub)(stringData, 1, String.length(stringData) - 2) + } + } + + { + ...expr, + pexp_attributes: filterReasonRawLiteral(expr.pexp_attributes), + pexp_desc: Pexp_constant(Pconst_string(stringData, None)), + } + | None => default_mapper.expr(mapper, expr) + } + | _ => default_mapper.expr(mapper, expr) + }, + } + } + + let normalize = { + open Ast_mapper + { + ...default_mapper, + attributes: (mapper, attrs) => attrs |> List.filter(attr => + switch attr { + | ( + {Location.txt: "reason.preserve_braces" | "explicit_arity" | "implicity_arity"}, + _, + ) => false + | _ => true + } + ) |> default_mapper.attributes(mapper), + pat: (mapper, p) => + switch p.ppat_desc { + | Ppat_open({txt: longidentOpen}, pattern) => + let p = rewritePpatOpen(longidentOpen, pattern) + default_mapper.pat(mapper, p) + | _ => default_mapper.pat(mapper, p) + }, + expr: (mapper, expr) => + switch expr.pexp_desc { + | Pexp_constant(Pconst_string(txt, None)) => + let raw = escapeStringContents(txt) + let s = Parsetree.Pconst_string(raw, None) + let expr = Ast_helper.Exp.constant(~attrs=expr.pexp_attributes, ~loc=expr.pexp_loc, s) + + expr + | Pexp_constant(Pconst_string(txt, tag)) => + let s = Parsetree.Pconst_string(escapeTemplateLiteral(txt), tag) + Ast_helper.Exp.constant( + ~attrs=mapper.attributes(mapper, expr.pexp_attributes), + ~loc=expr.pexp_loc, + s, + ) + | Pexp_function(cases) => + let loc = switch (cases, List.rev(cases)) { + | (list{first, ..._}, list{last, ..._}) => { + ...first.pc_lhs.ppat_loc, + loc_end: last.pc_rhs.pexp_loc.loc_end, + } + | _ => Location.none + } + + Ast_helper.Exp.fun_( + ~loc, + Asttypes.Nolabel, + None, + Ast_helper.Pat.var(Location.mknoloc("x")), + Ast_helper.Exp.match_( + ~loc, + Ast_helper.Exp.ident(Location.mknoloc(Longident.Lident("x"))), + default_mapper.cases(mapper, cases), + ), + ) + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("!")})}, + list{(Asttypes.Nolabel, operand)}, + ) => + /* turn `!foo` into `foo.contents` */ + Ast_helper.Exp.field( + ~loc=expr.pexp_loc, + ~attrs=expr.pexp_attributes, + operand, + Location.mknoloc(Longident.Lident("contents")), + ) + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("##")})} as op, + list{ + (Asttypes.Nolabel, lhs), + (Nolabel, {pexp_desc: Pexp_constant(Pconst_string(txt, None))} as stringExpr), + }, + ) => + let ident = Ast_helper.Exp.ident( + ~loc=stringExpr.pexp_loc, + Location.mkloc(Longident.Lident(txt), stringExpr.pexp_loc), + ) + + Ast_helper.Exp.apply( + ~loc=expr.pexp_loc, + ~attrs=expr.pexp_attributes, + op, + list{(Asttypes.Nolabel, lhs), (Nolabel, ident)}, + ) + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("@@")})}, + list{(Asttypes.Nolabel, callExpr), (Nolabel, argExpr)}, + ) => + Ast_helper.Exp.apply( + mapper.expr(mapper, callExpr), + list{(Asttypes.Nolabel, mapper.expr(mapper, argExpr))}, + ) + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("@")})}, + list{(Nolabel, arg1), (Nolabel, arg2)}, + ) => + let listConcat = Longident.Ldot(Longident.Lident("List"), "append") + Ast_helper.Exp.apply( + Ast_helper.Exp.ident(Location.mknoloc(listConcat)), + list{(Nolabel, mapper.expr(mapper, arg1)), (Nolabel, mapper.expr(mapper, arg2))}, + ) + | Pexp_match( + condition, + list{ + { + pc_lhs: {ppat_desc: Ppat_construct({txt: Longident.Lident("true")}, None)}, + pc_rhs: thenExpr, + }, + { + pc_lhs: {ppat_desc: Ppat_construct({txt: Longident.Lident("false")}, None)}, + pc_rhs: elseExpr, + }, + }, + ) => + let ternaryMarker = (Location.mknoloc("ns.ternary"), Parsetree.PStr(list{})) + Ast_helper.Exp.ifthenelse( + ~loc=expr.pexp_loc, + ~attrs=list{ternaryMarker, ...expr.pexp_attributes}, + default_mapper.expr(mapper, condition), + default_mapper.expr(mapper, thenExpr), + Some(default_mapper.expr(mapper, elseExpr)), + ) + | _ => default_mapper.expr(mapper, expr) + }, + structure_item: (mapper, structureItem) => + switch structureItem.pstr_desc { + /* heuristic: if we have multiple type declarations, mark them recursive */ + | Pstr_type(recFlag, typeDeclarations) => + let flag = switch typeDeclarations { + | list{td} => + if looksLikeRecursiveTypeDeclaration(td) { + Asttypes.Recursive + } else { + Asttypes.Nonrecursive + } + | _ => recFlag + } + + { + ...structureItem, + pstr_desc: Pstr_type( + flag, + List.map( + typeDeclaration => default_mapper.type_declaration(mapper, typeDeclaration), + typeDeclarations, + ), + ), + } + | _ => default_mapper.structure_item(mapper, structureItem) + }, + signature_item: (mapper, signatureItem) => + switch signatureItem.psig_desc { + /* heuristic: if we have multiple type declarations, mark them recursive */ + | Psig_type(recFlag, typeDeclarations) => + let flag = switch typeDeclarations { + | list{td} => + if looksLikeRecursiveTypeDeclaration(td) { + Asttypes.Recursive + } else { + Asttypes.Nonrecursive + } + | _ => recFlag + } + + { + ...signatureItem, + psig_desc: Psig_type( + flag, + List.map( + typeDeclaration => default_mapper.type_declaration(mapper, typeDeclaration), + typeDeclarations, + ), + ), + } + | _ => default_mapper.signature_item(mapper, signatureItem) + }, + value_binding: (mapper, vb) => + switch vb { + | { + pvb_pat: {ppat_desc: Ppat_var(_)} as pat, + pvb_expr: {pexp_loc: expr_loc, pexp_desc: Pexp_constraint(expr, typ)}, + } when expr_loc.loc_ghost => + /* let t: t = (expr : t) -> let t: t = expr */ + let typ = default_mapper.typ(mapper, typ) + let pat = default_mapper.pat(mapper, pat) + let expr = mapper.expr(mapper, expr) + let newPattern = Ast_helper.Pat.constraint_( + ~loc={...pat.ppat_loc, loc_end: typ.ptyp_loc.loc_end}, + pat, + typ, + ) + { + ...vb, + pvb_pat: newPattern, + pvb_expr: expr, + pvb_attributes: default_mapper.attributes(mapper, vb.pvb_attributes), + } + | { + pvb_pat: {ppat_desc: Ppat_constraint(pat, {ptyp_desc: Ptyp_poly(list{}, _)})}, + pvb_expr: {pexp_loc: expr_loc, pexp_desc: Pexp_constraint(expr, typ)}, + } when expr_loc.loc_ghost => + /* let t: . t = (expr : t) -> let t: t = expr */ + let typ = default_mapper.typ(mapper, typ) + let pat = default_mapper.pat(mapper, pat) + let expr = mapper.expr(mapper, expr) + let newPattern = Ast_helper.Pat.constraint_( + ~loc={...pat.ppat_loc, loc_end: typ.ptyp_loc.loc_end}, + pat, + typ, + ) + { + ...vb, + pvb_pat: newPattern, + pvb_expr: expr, + pvb_attributes: default_mapper.attributes(mapper, vb.pvb_attributes), + } + | _ => default_mapper.value_binding(mapper, vb) + }, + } + } + + let normalizeReasonArityStructure = (~forPrinter, s) => { + let mapper = makeReasonArityMapper(~forPrinter) + mapper.Ast_mapper.structure(mapper, s) + } + + let normalizeReasonAritySignature = (~forPrinter, s) => { + let mapper = makeReasonArityMapper(~forPrinter) + mapper.Ast_mapper.signature(mapper, s) + } + + let structure = s => normalize.Ast_mapper.structure(normalize, s) + let signature = s => normalize.Ast_mapper.signature(normalize, s) + + let replaceStringLiteralStructure = (stringData, structure) => { + let mapper = stringLiteralMapper(stringData) + mapper.Ast_mapper.structure(mapper, structure) + } + + let replaceStringLiteralSignature = (stringData, signature) => { + let mapper = stringLiteralMapper(stringData) + mapper.Ast_mapper.signature(mapper, signature) + } +} + +module OcamlParser = Parser + +module Parser = { + type mode = ParseForTypeChecker | Default + + type regionStatus = Report | Silent + + type t = { + mode: mode, + mutable scanner: Scanner.t, + mutable token: Token.t, + mutable startPos: Lexing.position, + mutable endPos: Lexing.position, + mutable prevEndPos: Lexing.position, + mutable breadcrumbs: list<(Grammar.t, Lexing.position)>, + mutable errors: list, + mutable diagnostics: list, + mutable comments: list, + mutable regions: list>, + } + + let err = (~startPos=?, ~endPos=?, p, error) => { + let d = Diagnostics.make( + ~filename=p.scanner.filename, + ~startPos=switch startPos { + | Some(pos) => pos + | None => p.startPos + }, + ~endPos=switch endPos { + | Some(pos) => pos + | None => p.endPos + }, + error, + ) + + try if List.hd(p.regions).contents == Report { + p.diagnostics = list{d, ...p.diagnostics} + List.hd(p.regions) := Silent + } catch { + | Failure(_) => () + } + } + + let beginRegion = p => p.regions = list{ref(Report), ...p.regions} + let endRegion = p => + try p.regions = List.tl(p.regions) catch { + | Failure(_) => () + } + + /* Advance to the next non-comment token and store any encountered comment + * in the parser's state. Every comment contains the end position of it's + * previous token to facilite comment interleaving */ + let rec next = (~prevEndPos=?, p) => { + let prevEndPos = switch prevEndPos { + | Some(pos) => pos + | None => p.endPos + } + let (startPos, endPos, token) = Scanner.scan(p.scanner) + switch token { + | Comment(c) => + Comment.setPrevTokEndPos(c, p.endPos) + p.comments = list{c, ...p.comments} + p.prevEndPos = p.endPos + p.endPos = endPos + next(~prevEndPos, p) + | _ => + p.token = token + + /* p.prevEndPos <- prevEndPos; */ + p.prevEndPos = prevEndPos + p.startPos = startPos + p.endPos = endPos + } + } + + let checkProgress = (~prevEndPos, ~result, p) => + if p.endPos === prevEndPos { + None + } else { + Some(result) + } + + let make = (~mode=ParseForTypeChecker, src, filename) => { + let scanner = Scanner.make(Bytes.of_string(src), filename) + let parserState = { + mode: mode, + scanner: scanner, + token: Token.Eof, + startPos: Lexing.dummy_pos, + prevEndPos: Lexing.dummy_pos, + endPos: Lexing.dummy_pos, + breadcrumbs: list{}, + errors: list{}, + diagnostics: list{}, + comments: list{}, + regions: list{ref(Report)}, + } + parserState.scanner.err = (~startPos, ~endPos, error) => { + let diagnostic = Diagnostics.make(~filename, ~startPos, ~endPos, error) + + parserState.diagnostics = list{diagnostic, ...parserState.diagnostics} + } + next(parserState) + parserState + } + + let leaveBreadcrumb = (p, circumstance) => { + let crumb = (circumstance, p.startPos) + p.breadcrumbs = list{crumb, ...p.breadcrumbs} + } + + let eatBreadcrumb = p => + switch p.breadcrumbs { + | list{} => () + | list{_, ...crumbs} => p.breadcrumbs = crumbs + } + + let optional = (p, token) => + if p.token == token { + let () = next(p) + true + } else { + false + } + + let expect = (~grammar=?, token, p) => + if p.token == token { + next(p) + } else { + let error = Diagnostics.expected(~grammar?, p.prevEndPos, token) + err(~startPos=p.prevEndPos, p, error) + } + + /* Don't use immutable copies here, it trashes certain heuristics + * in the ocaml compiler, resulting in massive slowdowns of the parser */ + let lookahead = (p, callback) => { + let err = p.scanner.err + let ch = p.scanner.ch + let offset = p.scanner.offset + let rdOffset = p.scanner.rdOffset + let lineOffset = p.scanner.lineOffset + let lnum = p.scanner.lnum + let mode = p.scanner.mode + let token = p.token + let startPos = p.startPos + let endPos = p.endPos + let prevEndPos = p.prevEndPos + let breadcrumbs = p.breadcrumbs + let errors = p.errors + let diagnostics = p.diagnostics + let comments = p.comments + + let res = callback(p) + + p.scanner.err = err + p.scanner.ch = ch + p.scanner.offset = offset + p.scanner.rdOffset = rdOffset + p.scanner.lineOffset = lineOffset + p.scanner.lnum = lnum + p.scanner.mode = mode + p.token = token + p.startPos = startPos + p.endPos = endPos + p.prevEndPos = prevEndPos + p.breadcrumbs = breadcrumbs + p.errors = errors + p.diagnostics = diagnostics + p.comments = comments + + res + } +} + +module NapkinScript = { + let mkLoc = (startLoc, endLoc) => { + open Location + { + loc_start: startLoc, + loc_end: endLoc, + loc_ghost: false, + } + } + + module Recover = { + type action = option /* None is abort, Some () is retry */ + + let defaultExpr = () => { + let id = Location.mknoloc("napkinscript.exprhole") + Ast_helper.Exp.mk(Pexp_extension(id, PStr(list{}))) + } + + let defaultType = () => { + let id = Location.mknoloc("napkinscript.typehole") + Ast_helper.Typ.extension((id, PStr(list{}))) + } + + let defaultPattern = () => { + let id = Location.mknoloc("napkinscript.patternhole") + Ast_helper.Pat.extension((id, PStr(list{}))) + } + /* Ast_helper.Pat.any () */ + + let defaultModuleExpr = () => Ast_helper.Mod.structure(list{}) + let defaultModuleType = () => Ast_helper.Mty.signature(list{}) + + let recoverEqualGreater = p => { + Parser.expect(EqualGreater, p) + switch p.Parser.token { + | MinusGreater => Parser.next(p) + | _ => () + } + } + + let shouldAbortListParse = p => { + let rec check = breadcrumbs => + switch breadcrumbs { + | list{} => false + | list{(grammar, _), ...rest} => + if Grammar.isPartOfList(grammar, p.Parser.token) { + true + } else { + check(rest) + } + } + + check(p.breadcrumbs) + } + } + + module ErrorMessages = { + let listPatternSpread = "List pattern matches only supports one `...` spread, at the end. +Explanation: a list spread at the tail is efficient, but a spread in the middle would create new list{s}; out of performance concern, our pattern matching currently guarantees to never create new intermediate data." + + let recordPatternSpread = "Record's `...` spread is not supported in pattern matches. +Explanation: you can't collect a subset of a record's field into its own record, since a record needs an explicit declaration and that subset wouldn't have one. +Solution: you need to pull out each field you want explicitly." + + @live let recordPatternUnderscore = "Record patterns only support one `_`, at the end." + + let arrayPatternSpread = "Array's `...` spread is not supported in pattern matches. +Explanation: such spread would create a subarray; out of performance concern, our pattern matching currently guarantees to never create new intermediate data. +Solution: if it's to validate the first few elements, use a `when` clause + Array size check + `get` checks on the current pattern. If it's to obtain a subarray, use `Array.sub` or `Belt.Array.slice`." + + let arrayExprSpread = "Arrays can't use the `...` spread currently. Please use `concat` or other Array helpers." + + let recordExprSpread = "Records can only have one `...` spread, at the beginning. +Explanation: since records have a known, fixed shape, a spread like `{a, ...b}` wouldn't make sense, as `b` would override every field of `a` anyway." + + let listExprSpread = "Lists can only have one `...` spread, and at the end. +Explanation: lists are singly-linked list, where a node contains a value and points to the next node. `list{a, ...bc}` efficiently creates a new item and links `bc` as its next nodes. `[...bc, a]` would be expensive, as it'd need to traverse `bc` and prepend each item to `a` one by one. We therefore disallow such syntax sugar. +Solution: directly use `concat`." + + let variantIdent = "A polymorphic variant (e.g. #id) must start with an alphabetical letter." + } + + let jsxAttr = (Location.mknoloc("JSX"), Parsetree.PStr(list{})) + let uncurryAttr = (Location.mknoloc("bs"), Parsetree.PStr(list{})) + let ternaryAttr = (Location.mknoloc("ns.ternary"), Parsetree.PStr(list{})) + let makeBracesAttr = loc => (Location.mkloc("ns.braces", loc), Parsetree.PStr(list{})) + + type typDefOrExt = + | TypeDef({recFlag: Asttypes.rec_flag, types: list}) + | TypeExt(Parsetree.type_extension) + + type labelledParameter = + | TermParameter({ + uncurried: bool, + attrs: Parsetree.attributes, + label: Asttypes.arg_label, + expr: option, + pat: Parsetree.pattern, + pos: Lexing.position, + }) + | TypeParameter({ + uncurried: bool, + attrs: Parsetree.attributes, + locs: list>, + pos: Lexing.position, + }) + + type recordPatternItem = + | PatUnderscore + | PatField((Ast_helper.lid, Parsetree.pattern)) + + type context = + | OrdinaryExpr + | TernaryTrueBranchExpr + | WhenExpr + + let getClosingToken = x => + switch x { + | Token.Lparen => Token.Rparen + | Lbrace => Rbrace + | Lbracket => Rbracket + | _ => assert false + } + + let rec goToClosing = (closingToken, state) => + switch (state.Parser.token, closingToken) { + | (Rparen, Token.Rparen) | (Rbrace, Rbrace) | (Rbracket, Rbracket) => + Parser.next(state) + () + | ((Token.Lbracket | Lparen | Lbrace) as t, _) => + Parser.next(state) + goToClosing(getClosingToken(t), state) + goToClosing(closingToken, state) + | (Rparen | Token.Rbrace | Rbracket | Eof, _) => () /* TODO: how do report errors here? */ + | _ => + Parser.next(state) + goToClosing(closingToken, state) + } + + /* Madness */ + let isEs6ArrowExpression = (~inTernary, p) => Parser.lookahead(p, state => + switch state.Parser.token { + | Lident(_) | List | Underscore => + Parser.next(state) + switch state.Parser.token { + /* Don't think that this valid + * Imagine: let x = (a: int) + * This is a parenthesized expression with a type constraint, wait for + * the arrow */ + /* | Colon when not inTernary -> true */ + | EqualGreater => true + | _ => false + } + | Lparen => + let prevEndPos = state.prevEndPos + Parser.next(state) + switch state.token { + | Rparen => + Parser.next(state) + switch state.Parser.token { + | Colon when !inTernary => true + | EqualGreater => true + | _ => false + } + | Dot /* uncurried */ => true + | Tilde => true + | Backtick => false /* (` always indicates the start of an expr, can't be es6 parameter */ + | _ => + goToClosing(Rparen, state) + switch state.Parser.token { + | EqualGreater => true + /* | Lbrace TODO: detect missing =>, is this possible? */ + | Colon when !inTernary => true + | Rparen => /* imagine having something as : + * switch colour { + * | Red + * when l == l' + * || (&Clflags.classic && (l == Nolabel && !is_optional(l'))) => (t1, t2) + * We'll arrive at the outer rparen just before the =>. + * This is not an es6 arrow. + * */ + false + | _ => + Parser.next(state) + /* error recovery, peek at the next token, + * (elements, providerId] => { + * in the example above, we have an unbalanced ] here + */ + switch state.Parser.token { + | EqualGreater when state.startPos.pos_lnum === prevEndPos.pos_lnum => true + | _ => false + } + } + } + | _ => false + } + ) + + let isEs6ArrowFunctor = p => Parser.lookahead(p, state => + switch state.Parser.token { + /* | Uident _ | Underscore -> */ + /* Parser.next state; */ + /* begin match state.Parser.token with */ + /* | EqualGreater -> true */ + /* | _ -> false */ + /* end */ + | Lparen => + Parser.next(state) + switch state.token { + | Rparen => + Parser.next(state) + switch state.token { + | Colon | EqualGreater => true + | _ => false + } + | _ => + goToClosing(Rparen, state) + switch state.Parser.token { + | EqualGreater | Lbrace => true + | Colon => true + | _ => false + } + } + | _ => false + } + ) + + let isEs6ArrowType = p => Parser.lookahead(p, state => + switch state.Parser.token { + | Lparen => + Parser.next(state) + switch state.Parser.token { + | Rparen => + Parser.next(state) + switch state.Parser.token { + | EqualGreater => true + | _ => false + } + | Tilde | Dot => true + | _ => + goToClosing(Rparen, state) + switch state.Parser.token { + | EqualGreater => true + | _ => false + } + } + | Tilde => true + | _ => false + } + ) + + let buildLongident = words => + switch List.rev(words) { + | list{} => assert false + | list{hd, ...tl} => List.fold_left((p, s) => Longident.Ldot(p, s), Lident(hd), tl) + } + + let makeInfixOperator = (p, token, startPos, endPos) => { + let stringifiedToken = if token == Token.MinusGreater { + "|." + } else if token == Token.PlusPlus { + "^" + } else if token == Token.BangEqual { + "<>" + } else if token == Token.BangEqualEqual { + "!=" + } else if token == Token.Equal { + /* TODO: could have a totally different meaning like x->fooSet(y) */ + Parser.err(~startPos, ~endPos, p, Diagnostics.message("Did you mean `==` here?")) + "=" + } else if token == Token.EqualEqual { + "=" + } else if token == Token.EqualEqualEqual { + "==" + } else { + Token.toString(token) + } + + let loc = mkLoc(startPos, endPos) + let operator = Location.mkloc(Longident.Lident(stringifiedToken), loc) + + Ast_helper.Exp.ident(~loc, operator) + } + + let negateString = s => + if String.length(s) > 0 && @doesNotRaise String.get(s, 0) == '-' { + (@doesNotRaise String.sub)(s, 1, String.length(s) - 1) + } else { + "-" ++ s + } + + let makeUnaryExpr = (startPos, tokenEnd, token, operand) => + switch (token, operand.Parsetree.pexp_desc) { + | (Token.Plus | PlusDot, Pexp_constant(Pconst_integer(_) | Pconst_float(_))) => operand + | (Minus, Pexp_constant(Pconst_integer(n, m))) => { + ...operand, + pexp_desc: Pexp_constant(Pconst_integer(negateString(n), m)), + } + | (Minus | MinusDot, Pexp_constant(Pconst_float(n, m))) => { + ...operand, + pexp_desc: Pexp_constant(Pconst_float(negateString(n), m)), + } + | (Token.Plus | PlusDot | Minus | MinusDot, _) => + let tokenLoc = mkLoc(startPos, tokenEnd) + let operator = "~" ++ Token.toString(token) + Ast_helper.Exp.apply( + ~loc=mkLoc(startPos, operand.Parsetree.pexp_loc.loc_end), + Ast_helper.Exp.ident(~loc=tokenLoc, Location.mkloc(Longident.Lident(operator), tokenLoc)), + list{(Nolabel, operand)}, + ) + | (Token.Bang, _) => + let tokenLoc = mkLoc(startPos, tokenEnd) + Ast_helper.Exp.apply( + ~loc=mkLoc(startPos, operand.Parsetree.pexp_loc.loc_end), + Ast_helper.Exp.ident(~loc=tokenLoc, Location.mkloc(Longident.Lident("not"), tokenLoc)), + list{(Nolabel, operand)}, + ) + | _ => operand + } + + let makeListExpression = (loc, seq, extOpt) => { + let rec handleSeq = x => + switch x { + | list{} => + switch extOpt { + | Some(ext) => ext + | None => + let loc = {...loc, Location.loc_ghost: true} + let nil = Location.mkloc(Longident.Lident("[]"), loc) + Ast_helper.Exp.construct(~loc, nil, None) + } + | list{e1, ...el} => + let exp_el = handleSeq(el) + let loc = mkLoc(e1.Parsetree.pexp_loc.Location.loc_start, exp_el.pexp_loc.loc_end) + + let arg = Ast_helper.Exp.tuple(~loc, list{e1, exp_el}) + Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident("::"), loc), Some(arg)) + } + + let expr = handleSeq(seq) + {...expr, pexp_loc: loc} + } + + let makeListPattern = (loc, seq, ext_opt) => { + let rec handle_seq = x => + switch x { + | list{} => + let base_case = switch ext_opt { + | Some(ext) => ext + | None => + let loc = {...loc, Location.loc_ghost: true} + let nil = {Location.txt: Longident.Lident("[]"), loc: loc} + Ast_helper.Pat.construct(~loc, nil, None) + } + + base_case + | list{p1, ...pl} => + let pat_pl = handle_seq(pl) + let loc = mkLoc(p1.Parsetree.ppat_loc.loc_start, pat_pl.ppat_loc.loc_end) + let arg = Ast_helper.Pat.mk(~loc, Ppat_tuple(list{p1, pat_pl})) + Ast_helper.Pat.mk( + ~loc, + Ppat_construct(Location.mkloc(Longident.Lident("::"), loc), Some(arg)), + ) + } + + handle_seq(seq) + } + + /* {"foo": bar} -> Js.t({. foo: bar}) + * {.. "foo": bar} -> Js.t({.. foo: bar}) + * {..} -> Js.t({..}) */ + let makeBsObjType = (~attrs, ~loc, ~closed, rows) => { + let obj = Ast_helper.Typ.object_(~loc, rows, closed) + let jsDotTCtor = Location.mkloc(Longident.Ldot(Longident.Lident("Js"), "t"), loc) + + Ast_helper.Typ.constr(~loc, ~attrs, jsDotTCtor, list{obj}) + } + + /* TODO: diagnostic reporting */ + let lidentOfPath = longident => + switch Longident.flatten(longident) |> List.rev { + | list{} => "" + | list{ident, ..._} => ident + } + + let makeNewtypes = (~attrs, ~loc, newtypes, exp) => { + let expr = List.fold_right( + (newtype, exp) => Ast_helper.Exp.mk(~loc, Pexp_newtype(newtype, exp)), + newtypes, + exp, + ) + {...expr, pexp_attributes: attrs} + } + + /* locally abstract types syntax sugar + * Transforms + * let f: type t u v. = (foo : list) => ... + * into + * let f = (type t u v. foo : list) => ... + */ + let wrapTypeAnnotation = (~loc, newtypes, core_type, body) => { + let exp = makeNewtypes( + ~attrs=list{}, + ~loc, + newtypes, + Ast_helper.Exp.constraint_(~loc, body, core_type), + ) + + let typ = Ast_helper.Typ.poly( + ~loc, + newtypes, + Ast_helper.Typ.varify_constructors(newtypes, core_type), + ) + + (exp, typ) + } + + @ocaml.doc( + " + * process the occurrence of _ in the arguments of a function application + * replace _ with a new variable, currently __x, in the arguments + * return a wrapping function that wraps ((__x) => ...) around an expression + * e.g. foo(_, 3) becomes (__x) => foo(__x, 3) + " + ) + let processUnderscoreApplication = args => { + open Parsetree + let exp_question = ref(None) + let hidden_var = "__x" + let check_arg = ((lab, exp) as arg) => + switch exp.pexp_desc { + | Pexp_ident({txt: Lident("_")} as id) => + let new_id = Location.mkloc(Longident.Lident(hidden_var), id.loc) + let new_exp = Ast_helper.Exp.mk(Pexp_ident(new_id), ~loc=exp.pexp_loc) + exp_question := Some(new_exp) + (lab, new_exp) + | _ => arg + } + + let args = List.map(check_arg, args) + let wrap = exp_apply => + switch exp_question.contents { + | Some({pexp_loc: loc}) => + let pattern = Ast_helper.Pat.mk(Ppat_var(Location.mkloc(hidden_var, loc)), ~loc) + Ast_helper.Exp.mk(Pexp_fun(Nolabel, None, pattern, exp_apply), ~loc) + | None => exp_apply + } + + (args, wrap) + } + + let rec parseLident = p => { + let recoverLident = p => + if Token.isKeyword(p.Parser.token) && p.Parser.prevEndPos.pos_lnum === p.startPos.pos_lnum { + Parser.err(p, Diagnostics.lident(p.Parser.token)) + Parser.next(p) + None + } else { + let rec loop = p => + if !Recover.shouldAbortListParse(p) { + Parser.next(p) + loop(p) + } + + Parser.next(p) + loop(p) + switch p.Parser.token { + | Lident(_) => Some() + | _ => None + } + } + + let startPos = p.Parser.startPos + switch p.Parser.token { + | Lident(ident) => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + (ident, loc) + | List => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + ("list", loc) + | _ => + switch recoverLident(p) { + | Some() => parseLident(p) + | None => ("_", mkLoc(startPos, p.prevEndPos)) + } + } + } + + let parseIdent = (~msg, ~startPos, p) => + switch p.Parser.token { + | Lident(ident) | Uident(ident) => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + (ident, loc) + | List => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + ("list", loc) + | _token => + Parser.err(p, Diagnostics.message(msg)) + Parser.next(p) + ("_", mkLoc(startPos, p.prevEndPos)) + } + + let parseHashIdent = (~startPos, p) => { + Parser.expect(Hash, p) + parseIdent(~startPos, ~msg=ErrorMessages.variantIdent, p) + } + + /* Ldot (Ldot (Lident "Foo", "Bar"), "baz") */ + let parseValuePath = p => { + let startPos = p.Parser.startPos + let rec aux = (p, path) => + switch p.Parser.token { + | List => Longident.Ldot(path, "list") + | Lident(ident) => Longident.Ldot(path, ident) + | Uident(uident) => + Parser.next(p) + Parser.expect(Dot, p) + aux(p, Ldot(path, uident)) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Longident.Lident("_") + } + + let ident = switch p.Parser.token { + | List => Longident.Lident("list") + | Lident(ident) => Longident.Lident(ident) + | Uident(ident) => + Parser.next(p) + Parser.expect(Dot, p) + aux(p, Lident(ident)) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Longident.Lident("_") + } + + Parser.next(p) + Location.mkloc(ident, mkLoc(startPos, p.prevEndPos)) + } + + let parseValuePathTail = (p, startPos, ident) => { + let rec loop = (p, path) => + switch p.Parser.token { + | Lident(ident) => + Parser.next(p) + Location.mkloc(Longident.Ldot(path, ident), mkLoc(startPos, p.prevEndPos)) + | List => + Parser.next(p) + Location.mkloc(Longident.Ldot(path, "list"), mkLoc(startPos, p.prevEndPos)) + | Uident(ident) => + Parser.next(p) + Parser.expect(Dot, p) + loop(p, Longident.Ldot(path, ident)) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Location.mknoloc(path) + } + + loop(p, ident) + } + + let parseModuleLongIdentTail = (~lowercase, p, startPos, ident) => { + let rec loop = (p, acc) => + switch p.Parser.token { + | List when lowercase => + Parser.next(p) + let lident = Longident.Ldot(acc, "list") + Location.mkloc(lident, mkLoc(startPos, p.prevEndPos)) + | Lident(ident) when lowercase => + Parser.next(p) + let lident = Longident.Ldot(acc, ident) + Location.mkloc(lident, mkLoc(startPos, p.prevEndPos)) + | Uident(ident) => + Parser.next(p) + let endPos = p.prevEndPos + let lident = Longident.Ldot(acc, ident) + switch p.Parser.token { + | Dot => + Parser.next(p) + loop(p, lident) + | _ => Location.mkloc(lident, mkLoc(startPos, endPos)) + } + | t => + Parser.err(p, Diagnostics.uident(t)) + Location.mkloc(acc, mkLoc(startPos, p.prevEndPos)) + } + + loop(p, ident) + } + + /* Parses module identifiers: + Foo + Foo.Bar */ + let parseModuleLongIdent = (~lowercase, p) => { + /* Parser.leaveBreadcrumb p Reporting.ModuleLongIdent; */ + let startPos = p.Parser.startPos + let moduleIdent = switch p.Parser.token { + | List when lowercase => + let loc = mkLoc(startPos, p.endPos) + Parser.next(p) + Location.mkloc(Longident.Lident("list"), loc) + | Lident(ident) when lowercase => + let loc = mkLoc(startPos, p.endPos) + let lident = Longident.Lident(ident) + Parser.next(p) + Location.mkloc(lident, loc) + | Uident(ident) => + let lident = Longident.Lident(ident) + let endPos = p.endPos + Parser.next(p) + switch p.Parser.token { + | Dot => + Parser.next(p) + parseModuleLongIdentTail(~lowercase, p, startPos, lident) + | _ => Location.mkloc(lident, mkLoc(startPos, endPos)) + } + | t => + Parser.err(p, Diagnostics.uident(t)) + Location.mkloc(Longident.Lident("_"), mkLoc(startPos, p.prevEndPos)) + } + + /* Parser.eatBreadcrumb p; */ + moduleIdent + } + + /* `window.location` or `Math` or `Foo.Bar` */ + let parseIdentPath = p => { + let rec loop = (p, acc) => + switch p.Parser.token { + | Uident(ident) | Lident(ident) => + Parser.next(p) + let lident = Longident.Ldot(acc, ident) + switch p.Parser.token { + | Dot => + Parser.next(p) + loop(p, lident) + | _ => lident + } + | _t => acc + } + + switch p.Parser.token { + | Lident(ident) | Uident(ident) => + Parser.next(p) + switch p.Parser.token { + | Dot => + Parser.next(p) + loop(p, Longident.Lident(ident)) + | _ => Longident.Lident(ident) + } + | _ => Longident.Lident("_") + } + } + + let verifyJsxOpeningClosingName = (p, nameExpr) => { + let closing = switch p.Parser.token { + | Lident(lident) => + Parser.next(p) + Longident.Lident(lident) + | Uident(_) => parseModuleLongIdent(~lowercase=false, p).txt + | _ => Longident.Lident("") + } + + switch nameExpr.Parsetree.pexp_desc { + | Pexp_ident(openingIdent) => + let opening = { + let withoutCreateElement = + Longident.flatten(openingIdent.txt) |> List.filter(s => s != "createElement") + + switch Longident.unflatten(withoutCreateElement) { + | Some(li) => li + | None => Longident.Lident("") + } + } + + opening == closing + | _ => assert false + } + } + + let string_of_pexp_ident = nameExpr => + switch nameExpr.Parsetree.pexp_desc { + | Pexp_ident(openingIdent) => + Longident.flatten(openingIdent.txt) + |> List.filter(s => s != "createElement") + |> String.concat(".") + | _ => "" + } + + /* open-def ::= + * | open module-path + * | open! module-path */ + let parseOpenDescription = (~attrs, p) => { + Parser.leaveBreadcrumb(p, Grammar.OpenDescription) + let startPos = p.Parser.startPos + Parser.expect(Open, p) + let override = if Parser.optional(p, Token.Bang) { + Asttypes.Override + } else { + Asttypes.Fresh + } + + let modident = parseModuleLongIdent(~lowercase=false, p) + let loc = mkLoc(startPos, p.prevEndPos) + Parser.eatBreadcrumb(p) + Ast_helper.Opn.mk(~loc, ~attrs, ~override, modident) + } + + let hexValue = x => + switch x { + | '0' .. '9' => Char.code(x) - 48 + | 'A' .. 'Z' => Char.code(x) - 55 + | 'a' .. 'z' => Char.code(x) - 97 + | _ => 16 + } + + let parseStringLiteral = s => { + let len = String.length(s) + let b = Buffer.create(String.length(s)) + + let rec loop = i => + if i == len { + () + } else { + let c = String.unsafe_get(s, i) + switch c { + | '\\' as c => + let nextIx = i + 1 + if nextIx < len { + let nextChar = String.unsafe_get(s, nextIx) + switch nextChar { + | 'n' => + Buffer.add_char(b, '\n') + loop(nextIx + 1) + | 'r' => + Buffer.add_char(b, '\r') + loop(nextIx + 1) + | 'b' => + Buffer.add_char(b, '\b') + loop(nextIx + 1) + | 't' => + Buffer.add_char(b, '\t') + loop(nextIx + 1) + | '\\' as c => + Buffer.add_char(b, c) + loop(nextIx + 1) + | ' ' as c => + Buffer.add_char(b, c) + loop(nextIx + 1) + | '\'' as c => + Buffer.add_char(b, c) + loop(nextIx + 1) + | '"' as c => + Buffer.add_char(b, c) + loop(nextIx + 1) + | '0' .. '9' => + if nextIx + 2 < len { + let c0 = nextChar + let c1 = String.unsafe_get(s, nextIx + 1) + let c2 = String.unsafe_get(s, nextIx + 2) + let c = + 100 * (Char.code(c0) - 48) + 10 * (Char.code(c1) - 48) + (Char.code(c2) - 48) + + if c < 0 || c > 255 { + Buffer.add_char(b, '\\') + Buffer.add_char(b, c0) + Buffer.add_char(b, c1) + Buffer.add_char(b, c2) + loop(nextIx + 3) + } else { + Buffer.add_char(b, Char.unsafe_chr(c)) + loop(nextIx + 3) + } + } else { + Buffer.add_char(b, '\\') + Buffer.add_char(b, nextChar) + loop(nextIx + 1) + } + | 'o' => + if nextIx + 3 < len { + let c0 = String.unsafe_get(s, nextIx + 1) + let c1 = String.unsafe_get(s, nextIx + 2) + let c2 = String.unsafe_get(s, nextIx + 3) + let c = 64 * (Char.code(c0) - 48) + 8 * (Char.code(c1) - 48) + (Char.code(c2) - 48) + + if c < 0 || c > 255 { + Buffer.add_char(b, '\\') + Buffer.add_char(b, '0') + Buffer.add_char(b, c0) + Buffer.add_char(b, c1) + Buffer.add_char(b, c2) + loop(nextIx + 4) + } else { + Buffer.add_char(b, Char.unsafe_chr(c)) + loop(nextIx + 4) + } + } else { + Buffer.add_char(b, '\\') + Buffer.add_char(b, nextChar) + loop(nextIx + 1) + } + | 'x' as c => + if nextIx + 2 < len { + let c0 = String.unsafe_get(s, nextIx + 1) + let c1 = String.unsafe_get(s, nextIx + 2) + let c = 16 * hexValue(c0) + hexValue(c1) + if c < 0 || c > 255 { + Buffer.add_char(b, '\\') + Buffer.add_char(b, 'x') + Buffer.add_char(b, c0) + Buffer.add_char(b, c1) + loop(nextIx + 3) + } else { + Buffer.add_char(b, Char.unsafe_chr(c)) + loop(nextIx + 3) + } + } else { + Buffer.add_char(b, '\\') + Buffer.add_char(b, c) + loop(nextIx + 2) + } + | _ => + Buffer.add_char(b, c) + Buffer.add_char(b, nextChar) + loop(nextIx + 1) + } + } else { + Buffer.add_char(b, c) + () + } + | c => + Buffer.add_char(b, c) + loop(i + 1) + } + } + + loop(0) + Buffer.contents(b) + } + + let parseTemplateStringLiteral = s => { + let len = String.length(s) + let b = Buffer.create(len) + + let rec loop = i => + if i < len { + let c = String.unsafe_get(s, i) + switch c { + | '\\' as c => + if i + 1 < len { + let nextChar = String.unsafe_get(s, i + 1) + switch nextChar { + | '\\' as c => + Buffer.add_char(b, c) + loop(i + 2) + | '$' as c => + Buffer.add_char(b, c) + loop(i + 2) + | '`' as c => + Buffer.add_char(b, c) + loop(i + 2) + | c => + Buffer.add_char(b, '\\') + Buffer.add_char(b, c) + loop(i + 2) + } + } else { + Buffer.add_char(b, c) + } + + | c => + Buffer.add_char(b, c) + loop(i + 1) + } + } else { + () + } + + loop(0) + Buffer.contents(b) + } + + /* constant ::= integer-literal */ + /* ∣ float-literal */ + /* ∣ string-literal */ + let parseConstant = p => { + let isNegative = switch p.Parser.token { + | Token.Minus => + Parser.next(p) + true + | Plus => + Parser.next(p) + false + | _ => false + } + + let constant = switch p.Parser.token { + | Int({i, suffix}) => + let intTxt = if isNegative { + "-" ++ i + } else { + i + } + Parsetree.Pconst_integer(intTxt, suffix) + | Float({f, suffix}) => + let floatTxt = if isNegative { + "-" ++ f + } else { + f + } + Parsetree.Pconst_float(floatTxt, suffix) + | String(s) => + let txt = if p.mode == ParseForTypeChecker { + parseStringLiteral(s) + } else { + s + } + + Pconst_string(txt, None) + | Character(c) => Pconst_char(c) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Pconst_string("", None) + } + + Parser.next(p) + constant + } + + let parseCommaDelimitedRegion = (p, ~grammar, ~closing, ~f) => { + Parser.leaveBreadcrumb(p, grammar) + let rec loop = nodes => + switch f(p) { + | Some(node) => + switch p.Parser.token { + | Comma => + Parser.next(p) + loop(list{node, ...nodes}) + | token when token == closing || token == Eof => List.rev(list{node, ...nodes}) + | _ => + if !(p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p))) { + Parser.expect(Comma, p) + } + if p.token == Semicolon { + Parser.next(p) + } + loop(list{node, ...nodes}) + } + | None => + if p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p)) { + List.rev(nodes) + } else { + Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs)) + Parser.next(p) + loop(nodes) + } + } + + let nodes = loop(list{}) + Parser.eatBreadcrumb(p) + nodes + } + + let parseCommaDelimitedReversedList = (p, ~grammar, ~closing, ~f) => { + Parser.leaveBreadcrumb(p, grammar) + let rec loop = nodes => + switch f(p) { + | Some(node) => + switch p.Parser.token { + | Comma => + Parser.next(p) + loop(list{node, ...nodes}) + | token when token == closing || token == Eof => list{node, ...nodes} + | _ => + if !(p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p))) { + Parser.expect(Comma, p) + } + if p.token == Semicolon { + Parser.next(p) + } + loop(list{node, ...nodes}) + } + | None => + if p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p)) { + nodes + } else { + Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs)) + Parser.next(p) + loop(nodes) + } + } + + let nodes = loop(list{}) + Parser.eatBreadcrumb(p) + nodes + } + + let parseDelimitedRegion = (p, ~grammar, ~closing, ~f) => { + Parser.leaveBreadcrumb(p, grammar) + let rec loop = nodes => + switch f(p) { + | Some(node) => loop(list{node, ...nodes}) + | None => + if p.Parser.token == Token.Eof || (p.token == closing || Recover.shouldAbortListParse(p)) { + List.rev(nodes) + } else { + Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs)) + Parser.next(p) + loop(nodes) + } + } + + let nodes = loop(list{}) + Parser.eatBreadcrumb(p) + nodes + } + + let parseRegion = (p, ~grammar, ~f) => { + Parser.leaveBreadcrumb(p, grammar) + let rec loop = nodes => + switch f(p) { + | Some(node) => loop(list{node, ...nodes}) + | None => + if p.Parser.token == Token.Eof || Recover.shouldAbortListParse(p) { + List.rev(nodes) + } else { + Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs)) + Parser.next(p) + loop(nodes) + } + } + + let nodes = loop(list{}) + Parser.eatBreadcrumb(p) + nodes + } + + /* let-binding ::= pattern = expr */ + /* ∣ value-name { parameter } [: typexpr] [:> typexpr] = expr */ + /* ∣ value-name : poly-typexpr = expr */ + + /* pattern ::= value-name */ + /* ∣ _ */ + /* ∣ constant */ + /* ∣ pattern as value-name */ + /* ∣ ( pattern ) */ + /* ∣ ( pattern : typexpr ) */ + /* ∣ pattern | pattern */ + /* ∣ constr pattern */ + /* ∣ #variant variant-pattern */ + /* ∣ ##type */ + /* ∣ / pattern { , pattern }+ / */ + /* ∣ { field [: typexpr] [= pattern] { ; field [: typexpr] [= pattern] } [; _ ] [ ; ] } */ + /* ∣ [ pattern { ; pattern } [ ; ] ] */ + /* ∣ pattern :: pattern */ + /* ∣ [| pattern { ; pattern } [ ; ] |] */ + /* ∣ char-literal .. char-literal */ + /* ∣ exception pattern */ + let rec parsePattern = (~alias=true, ~or_=true, p) => { + let startPos = p.Parser.startPos + let attrs = parseAttributes(p) + let pat = switch p.Parser.token { + | (True | False) as token => + let endPos = p.endPos + Parser.next(p) + let loc = mkLoc(startPos, endPos) + Ast_helper.Pat.construct( + ~loc, + Location.mkloc(Longident.Lident(Token.toString(token)), loc), + None, + ) + | Int(_) | String(_) | Float(_) | Character(_) | Minus | Plus => + let c = parseConstant(p) + switch p.token { + | DotDot => + Parser.next(p) + let c2 = parseConstant(p) + Ast_helper.Pat.interval(~loc=mkLoc(startPos, p.prevEndPos), c, c2) + | _ => Ast_helper.Pat.constant(~loc=mkLoc(startPos, p.prevEndPos), c) + } + | Lparen => + Parser.next(p) + switch p.token { + | Rparen => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let lid = Location.mkloc(Longident.Lident("()"), loc) + Ast_helper.Pat.construct(~loc, lid, None) + | _ => + let pat = parseConstrainedPattern(p) + switch p.token { + | Comma => + Parser.next(p) + parseTuplePattern(~attrs, ~first=pat, ~startPos, p) + | _ => + Parser.expect(Rparen, p) + let loc = mkLoc(startPos, p.prevEndPos) + {...pat, ppat_loc: loc} + } + } + | Lbracket => parseArrayPattern(~attrs, p) + | Lbrace => parseRecordPattern(~attrs, p) + | Underscore => + let endPos = p.endPos + let loc = mkLoc(startPos, endPos) + Parser.next(p) + Ast_helper.Pat.any(~loc, ~attrs, ()) + | Lident(ident) => + let endPos = p.endPos + let loc = mkLoc(startPos, endPos) + Parser.next(p) + Ast_helper.Pat.var(~loc, ~attrs, Location.mkloc(ident, loc)) + | Uident(_) => + let constr = parseModuleLongIdent(~lowercase=false, p) + switch p.Parser.token { + | Lparen => parseConstructorPatternArgs(p, constr, startPos, attrs) + | _ => Ast_helper.Pat.construct(~loc=constr.loc, ~attrs, constr, None) + } + | Hash => + let (ident, loc) = parseHashIdent(~startPos, p) + switch p.Parser.token { + | Lparen => parseVariantPatternArgs(p, ident, startPos, attrs) + | _ => Ast_helper.Pat.variant(~loc, ~attrs, ident, None) + } + | HashHash => + Parser.next(p) + let ident = parseValuePath(p) + let loc = mkLoc(startPos, ident.loc.loc_end) + Ast_helper.Pat.type_(~loc, ~attrs, ident) + | Exception => + Parser.next(p) + let pat = parsePattern(~alias=false, ~or_=false, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Pat.exception_(~loc, ~attrs, pat) + | Lazy => + Parser.next(p) + let pat = parsePattern(~alias=false, ~or_=false, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Pat.lazy_(~loc, ~attrs, pat) + | List => + Parser.next(p) + switch p.token { + | Lbracket => parseListPattern(~startPos, ~attrs, p) + | _ => + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Pat.var(~loc, ~attrs, Location.mkloc("list", loc)) + } + | Module => parseModulePattern(~attrs, p) + | Percent => + let extension = parseExtension(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Pat.extension(~loc, ~attrs, extension) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + switch skipTokensAndMaybeRetry(p, ~isStartOfGrammar=Grammar.isAtomicPatternStart) { + | None => Recover.defaultPattern() + | Some() => parsePattern(p) + } + } + + let pat = if alias { + parseAliasPattern(~attrs, pat, p) + } else { + pat + } + if or_ { + parseOrPattern(pat, p) + } else { + pat + } + } + + and skipTokensAndMaybeRetry = (p, ~isStartOfGrammar) => + if Token.isKeyword(p.Parser.token) && p.Parser.prevEndPos.pos_lnum === p.startPos.pos_lnum { + Parser.next(p) + None + } else if Recover.shouldAbortListParse(p) { + if isStartOfGrammar(p.Parser.token) { + Parser.next(p) + Some() + } else { + None + } + } else { + Parser.next(p) + let rec loop = p => + if !Recover.shouldAbortListParse(p) { + Parser.next(p) + loop(p) + } + loop(p) + if isStartOfGrammar(p.Parser.token) { + Some() + } else { + None + } + } + + /* alias ::= pattern as lident */ + and parseAliasPattern = (~attrs, pattern, p) => + switch p.Parser.token { + | As => + Parser.next(p) + let (name, loc) = parseLident(p) + let name = Location.mkloc(name, loc) + Ast_helper.Pat.alias(~loc={...pattern.ppat_loc, loc_end: p.prevEndPos}, ~attrs, pattern, name) + | _ => pattern + } + + /* or ::= pattern | pattern + * precedence: Red | Blue | Green is interpreted as (Red | Blue) | Green */ + and parseOrPattern = (pattern1, p) => { + let rec loop = pattern1 => + switch p.Parser.token { + | Bar => + Parser.next(p) + let pattern2 = parsePattern(~or_=false, p) + let loc = { + ...pattern1.Parsetree.ppat_loc, + loc_end: pattern2.ppat_loc.loc_end, + } + loop(Ast_helper.Pat.or_(~loc, pattern1, pattern2)) + | _ => pattern1 + } + + loop(pattern1) + } + + and parseNonSpreadPattern = (~msg, p) => { + let () = switch p.Parser.token { + | DotDotDot => + Parser.err(p, Diagnostics.message(msg)) + Parser.next(p) + | _ => () + } + + switch p.Parser.token { + | token when Grammar.isPatternStart(token) => + let pat = parsePattern(p) + switch p.Parser.token { + | Colon => + Parser.next(p) + let typ = parseTypExpr(p) + let loc = mkLoc(pat.ppat_loc.loc_start, typ.Parsetree.ptyp_loc.loc_end) + Some(Ast_helper.Pat.constraint_(~loc, pat, typ)) + | _ => Some(pat) + } + | _ => None + } + } + + and parseConstrainedPattern = p => { + let pat = parsePattern(p) + switch p.Parser.token { + | Colon => + Parser.next(p) + let typ = parseTypExpr(p) + let loc = mkLoc(pat.ppat_loc.loc_start, typ.Parsetree.ptyp_loc.loc_end) + Ast_helper.Pat.constraint_(~loc, pat, typ) + | _ => pat + } + } + + and parseConstrainedPatternRegion = p => + switch p.Parser.token { + | token when Grammar.isPatternStart(token) => Some(parseConstrainedPattern(p)) + | _ => None + } + + /* field ::= + * | longident + * | longident : pattern + * | longident as lident + * + * row ::= + * | field , + * | field , _ + * | field , _, + */ + and parseRecordPatternField = p => { + let startPos = p.Parser.startPos + let label = parseValuePath(p) + let pattern = switch p.Parser.token { + | Colon => + Parser.next(p) + parsePattern(p) + | _ => Ast_helper.Pat.var(~loc=label.loc, Location.mkloc(Longident.last(label.txt), label.loc)) + } + + switch p.token { + | As => + Parser.next(p) + let (name, loc) = parseLident(p) + let name = Location.mkloc(name, loc) + let aliasPattern = Ast_helper.Pat.alias(~loc=mkLoc(startPos, p.prevEndPos), pattern, name) + + (Location.mkloc(label.txt, mkLoc(startPos, aliasPattern.ppat_loc.loc_end)), aliasPattern) + | _ => (label, pattern) + } + } + + /* TODO: there are better representations than PatField|Underscore ? */ + and parseRecordPatternItem = p => + switch p.Parser.token { + | DotDotDot => + Parser.next(p) + Some(true, PatField(parseRecordPatternField(p))) + | Uident(_) | Lident(_) => Some(false, PatField(parseRecordPatternField(p))) + | Underscore => + Parser.next(p) + Some(false, PatUnderscore) + | _ => None + } + + and parseRecordPattern = (~attrs, p) => { + let startPos = p.startPos + Parser.expect(Lbrace, p) + let rawFields = parseCommaDelimitedReversedList( + p, + ~grammar=PatternRecord, + ~closing=Rbrace, + ~f=parseRecordPatternItem, + ) + + Parser.expect(Rbrace, p) + let (fields, closedFlag) = { + let (rawFields, flag) = switch rawFields { + | list{(_hasSpread, PatUnderscore), ...rest} => (rest, Asttypes.Open) + | rawFields => (rawFields, Asttypes.Closed) + } + + List.fold_left(((fields, flag), curr) => { + let (hasSpread, field) = curr + switch field { + | PatField(field) => + if hasSpread { + let (_, pattern) = field + Parser.err( + ~startPos=pattern.Parsetree.ppat_loc.loc_start, + p, + Diagnostics.message(ErrorMessages.recordPatternSpread), + ) + } + (list{field, ...fields}, flag) + | PatUnderscore => (fields, flag) + } + }, (list{}, flag), rawFields) + } + + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Pat.record(~loc, ~attrs, fields, closedFlag) + } + + and parseTuplePattern = (~attrs, ~first, ~startPos, p) => { + let patterns = parseCommaDelimitedRegion( + p, + ~grammar=Grammar.PatternList, + ~closing=Rparen, + ~f=parseConstrainedPatternRegion, + ) + + Parser.expect(Rparen, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Pat.tuple(~loc, ~attrs, list{first, ...patterns}) + } + + and parsePatternRegion = p => + switch p.Parser.token { + | DotDotDot => + Parser.next(p) + Some(true, parseConstrainedPattern(p)) + | token when Grammar.isPatternStart(token) => Some(false, parseConstrainedPattern(p)) + | _ => None + } + + and parseModulePattern = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.expect(Module, p) + Parser.expect(Lparen, p) + let uident = switch p.token { + | Uident(uident) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + Location.mkloc(uident, loc) + | _ => + /* TODO: error recovery */ + Location.mknoloc("_") + } + + switch p.token { + | Colon => + let colonStart = p.Parser.startPos + Parser.next(p) + let packageTypAttrs = parseAttributes(p) + let packageType = parsePackageType(~startPos=colonStart, ~attrs=packageTypAttrs, p) + Parser.expect(Rparen, p) + let loc = mkLoc(startPos, p.prevEndPos) + let unpack = Ast_helper.Pat.unpack(~loc=uident.loc, uident) + Ast_helper.Pat.constraint_(~loc, ~attrs, unpack, packageType) + | _ => + Parser.expect(Rparen, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Pat.unpack(~loc, ~attrs, uident) + } + } + + and parseListPattern = (~startPos, ~attrs, p) => { + Parser.expect(Lbracket, p) + let listPatterns = parseCommaDelimitedReversedList( + p, + ~grammar=Grammar.PatternOcamlList, + ~closing=Rbracket, + ~f=parsePatternRegion, + ) + + Parser.expect(Rbracket, p) + let loc = mkLoc(startPos, p.prevEndPos) + let filterSpread = ((hasSpread, pattern)) => + if hasSpread { + Parser.err( + ~startPos=pattern.Parsetree.ppat_loc.loc_start, + p, + Diagnostics.message(ErrorMessages.listPatternSpread), + ) + pattern + } else { + pattern + } + + switch listPatterns { + | list{(true, pattern), ...patterns} => + let patterns = patterns |> List.map(filterSpread) |> List.rev + let pat = makeListPattern(loc, patterns, Some(pattern)) + {...pat, ppat_loc: loc, ppat_attributes: attrs} + | patterns => + let patterns = patterns |> List.map(filterSpread) |> List.rev + let pat = makeListPattern(loc, patterns, None) + {...pat, ppat_loc: loc, ppat_attributes: attrs} + } + } + + and parseArrayPattern = (~attrs, p) => { + let startPos = p.startPos + Parser.expect(Lbracket, p) + let patterns = parseCommaDelimitedRegion( + p, + ~grammar=Grammar.PatternList, + ~closing=Rbracket, + ~f=parseNonSpreadPattern(~msg=ErrorMessages.arrayPatternSpread), + ) + + Parser.expect(Rbracket, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Pat.array(~loc, ~attrs, patterns) + } + + and parseConstructorPatternArgs = (p, constr, startPos, attrs) => { + let lparen = p.startPos + Parser.expect(Lparen, p) + let args = parseCommaDelimitedRegion( + p, + ~grammar=Grammar.PatternList, + ~closing=Rparen, + ~f=parseConstrainedPatternRegion, + ) + + Parser.expect(Rparen, p) + let args = switch args { + | list{} => + let loc = mkLoc(lparen, p.prevEndPos) + Some(Ast_helper.Pat.construct(~loc, Location.mkloc(Longident.Lident("()"), loc), None)) + | list{{ppat_desc: Ppat_tuple(_)} as pat} as patterns => + if p.mode == ParseForTypeChecker { + /* Some(1, 2) for type-checker */ + Some(pat) + } else { + /* Some((1, 2)) for printer */ + Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns)) + } + | list{pattern} => Some(pattern) + | patterns => Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns)) + } + + Ast_helper.Pat.construct(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, constr, args) + } + + and parseVariantPatternArgs = (p, ident, startPos, attrs) => { + let lparen = p.startPos + Parser.expect(Lparen, p) + let patterns = parseCommaDelimitedRegion( + p, + ~grammar=Grammar.PatternList, + ~closing=Rparen, + ~f=parseConstrainedPatternRegion, + ) + let args = switch patterns { + | list{{ppat_desc: Ppat_tuple(_)} as pat} as patterns => + if p.mode == ParseForTypeChecker { + /* #ident(1, 2) for type-checker */ + Some(pat) + } else { + /* #ident((1, 2)) for printer */ + Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns)) + } + | list{pattern} => Some(pattern) + | patterns => Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns)) + } + + Parser.expect(Rparen, p) + Ast_helper.Pat.variant(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, ident, args) + } + + and parseExpr = (~context=OrdinaryExpr, p) => { + let expr = parseOperandExpr(~context, p) + let expr = parseBinaryExpr(~context, ~a=expr, p, 1) + parseTernaryExpr(expr, p) + } + + /* expr ? expr : expr */ + and parseTernaryExpr = (leftOperand, p) => + switch p.Parser.token { + | Question => + Parser.leaveBreadcrumb(p, Grammar.Ternary) + Parser.next(p) + let trueBranch = parseExpr(~context=TernaryTrueBranchExpr, p) + Parser.expect(Colon, p) + let falseBranch = parseExpr(p) + Parser.eatBreadcrumb(p) + let loc = { + ...leftOperand.Parsetree.pexp_loc, + loc_start: leftOperand.pexp_loc.loc_start, + loc_end: falseBranch.Parsetree.pexp_loc.loc_end, + } + Ast_helper.Exp.ifthenelse( + ~attrs=list{ternaryAttr}, + ~loc, + leftOperand, + trueBranch, + Some(falseBranch), + ) + | _ => leftOperand + } + + and parseEs6ArrowExpression = (~parameters=?, p) => { + let startPos = p.Parser.startPos + Parser.leaveBreadcrumb(p, Grammar.Es6ArrowExpr) + let parameters = switch parameters { + | Some(params) => params + | None => parseParameters(p) + } + + let returnType = switch p.Parser.token { + | Colon => + Parser.next(p) + Some(parseTypExpr(~es6Arrow=false, p)) + | _ => None + } + + Parser.expect(EqualGreater, p) + let body = { + let expr = parseExpr(p) + switch returnType { + | Some(typ) => + Ast_helper.Exp.constraint_( + ~loc=mkLoc(expr.pexp_loc.loc_start, typ.Parsetree.ptyp_loc.loc_end), + expr, + typ, + ) + | None => expr + } + } + + Parser.eatBreadcrumb(p) + let endPos = p.prevEndPos + let arrowExpr = List.fold_right((parameter, expr) => + switch parameter { + | TermParameter({uncurried, attrs, label: lbl, expr: defaultExpr, pat, pos: startPos}) => + let attrs = if uncurried { + list{uncurryAttr, ...attrs} + } else { + attrs + } + Ast_helper.Exp.fun_(~loc=mkLoc(startPos, endPos), ~attrs, lbl, defaultExpr, pat, expr) + | TypeParameter({uncurried, attrs, locs: newtypes, pos: startPos}) => + let attrs = if uncurried { + list{uncurryAttr, ...attrs} + } else { + attrs + } + makeNewtypes(~attrs, ~loc=mkLoc(startPos, endPos), newtypes, expr) + } + , parameters, body) + + {...arrowExpr, pexp_loc: {...arrowExpr.pexp_loc, loc_start: startPos}} + } + + /* + * uncurried_parameter ::= + * | . parameter + * + * parameter ::= + * | pattern + * | pattern : type + * | ~ labelName + * | ~ labelName as pattern + * | ~ labelName as pattern : type + * | ~ labelName = expr + * | ~ labelName as pattern = expr + * | ~ labelName as pattern : type = expr + * | ~ labelName = ? + * | ~ labelName as pattern = ? + * | ~ labelName as pattern : type = ? + * + * labelName ::= lident + */ + and parseParameter = p => + if ( + p.Parser.token == Token.Typ || + (p.token == Tilde || + (p.token == Dot || Grammar.isPatternStart(p.token))) + ) { + let startPos = p.Parser.startPos + let uncurried = Parser.optional(p, Token.Dot) + /* two scenarios: + * attrs ~lbl ... + * attrs pattern + * Attributes before a labelled arg, indicate that it's on the whole arrow expr + * Otherwise it's part of the pattern + * */ + let attrs = parseAttributes(p) + if p.Parser.token == Typ { + Parser.next(p) + let lidents = parseLidentList(p) + Some(TypeParameter({uncurried: uncurried, attrs: attrs, locs: lidents, pos: startPos})) + } else { + let (attrs, lbl, pat) = switch p.Parser.token { + | Tilde => + Parser.next(p) + let (lblName, loc) = parseLident(p) + let propLocAttr = (Location.mkloc("ns.namedArgLoc", loc), Parsetree.PStr(list{})) + switch p.Parser.token { + | Comma | Equal | Rparen => + let loc = mkLoc(startPos, p.prevEndPos) + ( + attrs, + Asttypes.Labelled(lblName), + Ast_helper.Pat.var(~attrs=list{propLocAttr}, ~loc, Location.mkloc(lblName, loc)), + ) + | Colon => + let lblEnd = p.prevEndPos + Parser.next(p) + let typ = parseTypExpr(p) + let loc = mkLoc(startPos, lblEnd) + let pat = { + let pat = Ast_helper.Pat.var(~loc, Location.mkloc(lblName, loc)) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Pat.constraint_(~attrs=list{propLocAttr}, ~loc, pat, typ) + } + (attrs, Asttypes.Labelled(lblName), pat) + | As => + Parser.next(p) + let pat = { + let pat = parseConstrainedPattern(p) + {...pat, ppat_attributes: list{propLocAttr, ...pat.ppat_attributes}} + } + + (attrs, Asttypes.Labelled(lblName), pat) + | t => + Parser.err(p, Diagnostics.unexpected(t, p.breadcrumbs)) + let loc = mkLoc(startPos, p.prevEndPos) + ( + attrs, + Asttypes.Labelled(lblName), + Ast_helper.Pat.var(~loc, Location.mkloc(lblName, loc)), + ) + } + | _ => + let pattern = parseConstrainedPattern(p) + let attrs = List.concat(list{attrs, pattern.ppat_attributes}) + (list{}, Asttypes.Nolabel, {...pattern, ppat_attributes: attrs}) + } + + switch p.Parser.token { + | Equal => + Parser.next(p) + let lbl = switch lbl { + | Asttypes.Labelled(lblName) => Asttypes.Optional(lblName) + | Asttypes.Optional(_) as lbl => lbl + | Asttypes.Nolabel => Asttypes.Nolabel + } + + switch p.Parser.token { + | Question => + Parser.next(p) + Some( + TermParameter({ + uncurried: uncurried, + attrs: attrs, + label: lbl, + expr: None, + pat: pat, + pos: startPos, + }), + ) + | _ => + let expr = parseConstrainedOrCoercedExpr(p) + Some( + TermParameter({ + uncurried: uncurried, + attrs: attrs, + label: lbl, + expr: Some(expr), + pat: pat, + pos: startPos, + }), + ) + } + | _ => + Some( + TermParameter({ + uncurried: uncurried, + attrs: attrs, + label: lbl, + expr: None, + pat: pat, + pos: startPos, + }), + ) + } + } + } else { + None + } + + and parseParameterList = p => { + let parameters = parseCommaDelimitedRegion( + ~grammar=Grammar.ParameterList, + ~f=parseParameter, + ~closing=Rparen, + p, + ) + + Parser.expect(Rparen, p) + parameters + } + + /* parameters ::= + * | _ + * | lident + * | () + * | (.) + * | ( parameter {, parameter} [,] ) + */ + and parseParameters = p => { + let startPos = p.Parser.startPos + switch p.Parser.token { + | Lident(ident) => + Parser.next(p) + let loc = mkLoc(startPos, p.Parser.prevEndPos) + list{ + TermParameter({ + uncurried: false, + attrs: list{}, + label: Asttypes.Nolabel, + expr: None, + pat: Ast_helper.Pat.var(~loc, Location.mkloc(ident, loc)), + pos: startPos, + }), + } + | List => + Parser.next(p) + let loc = mkLoc(startPos, p.Parser.prevEndPos) + list{ + TermParameter({ + uncurried: false, + attrs: list{}, + label: Asttypes.Nolabel, + expr: None, + pat: Ast_helper.Pat.var(~loc, Location.mkloc("list", loc)), + pos: startPos, + }), + } + | Underscore => + Parser.next(p) + let loc = mkLoc(startPos, p.Parser.prevEndPos) + list{ + TermParameter({ + uncurried: false, + attrs: list{}, + label: Asttypes.Nolabel, + expr: None, + pat: Ast_helper.Pat.any(~loc, ()), + pos: startPos, + }), + } + | Lparen => + Parser.next(p) + switch p.Parser.token { + | Rparen => + Parser.next(p) + let loc = mkLoc(startPos, p.Parser.prevEndPos) + let unitPattern = Ast_helper.Pat.construct( + ~loc, + Location.mkloc(Longident.Lident("()"), loc), + None, + ) + + list{ + TermParameter({ + uncurried: false, + attrs: list{}, + label: Asttypes.Nolabel, + expr: None, + pat: unitPattern, + pos: startPos, + }), + } + | Dot => + Parser.next(p) + switch p.token { + | Rparen => + Parser.next(p) + let loc = mkLoc(startPos, p.Parser.prevEndPos) + let unitPattern = Ast_helper.Pat.construct( + ~loc, + Location.mkloc(Longident.Lident("()"), loc), + None, + ) + + list{ + TermParameter({ + uncurried: true, + attrs: list{}, + label: Asttypes.Nolabel, + expr: None, + pat: unitPattern, + pos: startPos, + }), + } + | _ => + switch parseParameterList(p) { + | list{ + TermParameter({attrs, label: lbl, expr: defaultExpr, pat: pattern, pos: startPos}), + ...rest, + } => list{ + TermParameter({ + uncurried: true, + attrs: attrs, + label: lbl, + expr: defaultExpr, + pat: pattern, + pos: startPos, + }), + ...rest, + } + | parameters => parameters + } + } + | _ => parseParameterList(p) + } + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + list{} + } + } + + and parseCoercedExpr = (~expr: Parsetree.expression, p) => { + Parser.expect(ColonGreaterThan, p) + let typ = parseTypExpr(p) + let loc = mkLoc(expr.pexp_loc.loc_start, p.prevEndPos) + Ast_helper.Exp.coerce(~loc, expr, None, typ) + } + + and parseConstrainedOrCoercedExpr = p => { + let expr = parseExpr(p) + switch p.Parser.token { + | ColonGreaterThan => parseCoercedExpr(~expr, p) + | Colon => + Parser.next(p) + switch p.token { + | _ => + let typ = parseTypExpr(p) + let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end) + let expr = Ast_helper.Exp.constraint_(~loc, expr, typ) + switch p.token { + | ColonGreaterThan => parseCoercedExpr(~expr, p) + | _ => expr + } + } + | _ => expr + } + } + + and parseConstrainedExprRegion = p => + switch p.Parser.token { + | token when Grammar.isExprStart(token) => + let expr = parseExpr(p) + switch p.Parser.token { + | Colon => + Parser.next(p) + let typ = parseTypExpr(p) + let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end) + Some(Ast_helper.Exp.constraint_(~loc, expr, typ)) + | _ => Some(expr) + } + | _ => None + } + + /* Atomic expressions represent unambiguous expressions. + * This means that regardless of the context, these expressions + * are always interpreted correctly. */ + and parseAtomicExpr = p => { + Parser.leaveBreadcrumb(p, Grammar.ExprOperand) + let startPos = p.Parser.startPos + let expr = switch p.Parser.token { + | (True | False) as token => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.construct( + ~loc, + Location.mkloc(Longident.Lident(Token.toString(token)), loc), + None, + ) + | Int(_) | String(_) | Float(_) | Character(_) => + let c = parseConstant(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.constant(~loc, c) + | Backtick => + let expr = parseTemplateExpr(p) + {...expr, pexp_loc: mkLoc(startPos, p.prevEndPos)} + | Uident(_) | Lident(_) => parseValueOrConstructor(p) + | Hash => parsePolyVariantExpr(p) + | Lparen => + Parser.next(p) + switch p.Parser.token { + | Rparen => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident("()"), loc), None) + | _t => + let expr = parseConstrainedOrCoercedExpr(p) + switch p.token { + | Comma => + Parser.next(p) + parseTupleExpr(~startPos, ~first=expr, p) + | _ => + Parser.expect(Rparen, p) + expr + /* {expr with pexp_loc = mkLoc startPos p.prevEndPos} + * What does this location mean here? It means that when there's + * a parenthesized we keep the location here for whitespace interleaving. + * Without the closing paren in the location there will always be an extra + * line. For now we don't include it, because it does weird things + * with for comments. */ + } + } + | List => + Parser.next(p) + switch p.token { + | Lbracket => parseListExpr(~startPos, p) + | _ => + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.ident(~loc, Location.mkloc(Longident.Lident("list"), loc)) + } + | Module => + Parser.next(p) + parseFirstClassModuleExpr(~startPos, p) + | Lbracket => parseArrayExp(p) + | Lbrace => parseBracedOrRecordExpr(p) + | LessThan => parseJsx(p) + | Percent => + let extension = parseExtension(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.extension(~loc, extension) + | Underscore as token => + /* This case is for error recovery. Not sure if it's the correct place */ + Parser.err(p, Diagnostics.lident(token)) + Parser.next(p) + Recover.defaultExpr() + | token => + let errPos = p.prevEndPos + switch skipTokensAndMaybeRetry(p, ~isStartOfGrammar=Grammar.isAtomicExprStart) { + | None => + Parser.err(~startPos=errPos, p, Diagnostics.unexpected(token, p.breadcrumbs)) + Recover.defaultExpr() + | Some() => parseAtomicExpr(p) + } + } + + Parser.eatBreadcrumb(p) + expr + } + + /* module(module-expr) + * module(module-expr : package-type) */ + and parseFirstClassModuleExpr = (~startPos, p) => { + Parser.expect(Lparen, p) + + let modExpr = parseModuleExpr(p) + let modEndLoc = p.prevEndPos + switch p.Parser.token { + | Colon => + let colonStart = p.Parser.startPos + Parser.next(p) + let attrs = parseAttributes(p) + let packageType = parsePackageType(~startPos=colonStart, ~attrs, p) + Parser.expect(Rparen, p) + let loc = mkLoc(startPos, modEndLoc) + let firstClassModule = Ast_helper.Exp.pack(~loc, modExpr) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.constraint_(~loc, firstClassModule, packageType) + | _ => + Parser.expect(Rparen, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.pack(~loc, modExpr) + } + } + + and parseBracketAccess = (p, expr, startPos) => { + Parser.leaveBreadcrumb(p, Grammar.ExprArrayAccess) + let lbracket = p.startPos + Parser.next(p) + let stringStart = p.startPos + switch p.Parser.token { + | String(s) => + Parser.next(p) + let stringEnd = p.prevEndPos + Parser.expect(Rbracket, p) + let rbracket = p.prevEndPos + let e = { + let identLoc = mkLoc(stringStart, stringEnd) + let loc = mkLoc(lbracket, rbracket) + Ast_helper.Exp.apply( + ~loc, + Ast_helper.Exp.ident(~loc, Location.mkloc(Longident.Lident("##"), loc)), + list{ + (Nolabel, expr), + ( + Nolabel, + Ast_helper.Exp.ident(~loc=identLoc, Location.mkloc(Longident.Lident(s), identLoc)), + ), + }, + ) + } + + let e = parsePrimaryExpr(~operand=e, p) + let equalStart = p.startPos + switch p.token { + | Equal => + Parser.next(p) + let equalEnd = p.prevEndPos + let rhsExpr = parseExpr(p) + let loc = mkLoc(startPos, rhsExpr.pexp_loc.loc_end) + let operatorLoc = mkLoc(equalStart, equalEnd) + Ast_helper.Exp.apply( + ~loc, + Ast_helper.Exp.ident( + ~loc=operatorLoc, + Location.mkloc(Longident.Lident("#="), operatorLoc), + ), + list{(Nolabel, e), (Nolabel, rhsExpr)}, + ) + | _ => e + } + | _ => + let accessExpr = parseConstrainedOrCoercedExpr(p) + Parser.expect(Rbracket, p) + let rbracket = p.prevEndPos + let arrayLoc = mkLoc(lbracket, rbracket) + switch p.token { + | Equal => + Parser.leaveBreadcrumb(p, ExprArrayMutation) + Parser.next(p) + let rhsExpr = parseExpr(p) + let arraySet = Location.mkloc(Longident.Ldot(Lident("Array"), "set"), arrayLoc) + + let endPos = p.prevEndPos + let arraySet = Ast_helper.Exp.apply( + ~loc=mkLoc(startPos, endPos), + Ast_helper.Exp.ident(~loc=arrayLoc, arraySet), + list{(Nolabel, expr), (Nolabel, accessExpr), (Nolabel, rhsExpr)}, + ) + + Parser.eatBreadcrumb(p) + arraySet + | _ => + let endPos = p.prevEndPos + let e = Ast_helper.Exp.apply( + ~loc=mkLoc(startPos, endPos), + Ast_helper.Exp.ident( + ~loc=arrayLoc, + Location.mkloc(Longident.Ldot(Lident("Array"), "get"), arrayLoc), + ), + list{(Nolabel, expr), (Nolabel, accessExpr)}, + ) + + Parser.eatBreadcrumb(p) + parsePrimaryExpr(~operand=e, p) + } + } + } + + /* * A primary expression represents + * - atomic-expr + * - john.age + * - array[0] + * - applyFunctionTo(arg1, arg2) + * + * The "operand" represents the expression that is operated on + */ + and parsePrimaryExpr = (~operand, ~noCall=false, p) => { + let startPos = operand.pexp_loc.loc_start + let rec loop = (p, expr) => + switch p.Parser.token { + | Dot => + Parser.next(p) + let lident = parseValuePath(p) + switch p.Parser.token { + | Equal when noCall == false => + Parser.leaveBreadcrumb(p, Grammar.ExprSetField) + Parser.next(p) + let targetExpr = parseExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + let setfield = Ast_helper.Exp.setfield(~loc, expr, lident, targetExpr) + Parser.eatBreadcrumb(p) + setfield + | _ => + let endPos = p.prevEndPos + let loc = mkLoc(startPos, endPos) + loop(p, Ast_helper.Exp.field(~loc, expr, lident)) + } + | Lbracket when noCall == false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum => + parseBracketAccess(p, expr, startPos) + | Lparen when noCall == false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum => + loop(p, parseCallExpr(p, expr)) + | Backtick when noCall == false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum => + switch expr.pexp_desc { + | Pexp_ident({txt: Longident.Lident(ident)}) => parseTemplateExpr(~prefix=ident, p) + | _ => + Parser.err( + ~startPos=expr.pexp_loc.loc_start, + ~endPos=expr.pexp_loc.loc_end, + p, + Diagnostics.message( + "Tagged template literals are currently restricted to identifiers like: json`null`.", + ), + ) + parseTemplateExpr(p) + } + | _ => expr + } + + loop(p, operand) + } + + /* a unary expression is an expression with only one operand and + * unary operator. Examples: + * -1 + * !condition + * -. 1.6 + */ + and parseUnaryExpr = p => { + let startPos = p.Parser.startPos + switch p.Parser.token { + | (Minus | MinusDot | Plus | PlusDot | Bang) as token => + Parser.leaveBreadcrumb(p, Grammar.ExprUnary) + let tokenEnd = p.endPos + Parser.next(p) + let operand = parseUnaryExpr(p) + let unaryExpr = makeUnaryExpr(startPos, tokenEnd, token, operand) + Parser.eatBreadcrumb(p) + unaryExpr + | _ => parsePrimaryExpr(~operand=parseAtomicExpr(p), p) + } + } + + /* Represents an "operand" in a binary expression. + * If you have `a + b`, `a` and `b` both represent + * the operands of the binary expression with opeartor `+` */ + and parseOperandExpr = (~context, p) => { + let startPos = p.Parser.startPos + let attrs = parseAttributes(p) + let expr = switch p.Parser.token { + | Assert => + Parser.next(p) + let expr = parseUnaryExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.assert_(~loc, expr) + | Lazy => + Parser.next(p) + let expr = parseUnaryExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.lazy_(~loc, expr) + | Try => parseTryExpression(p) + | If => parseIfExpression(p) + | For => parseForExpression(p) + | While => parseWhileExpression(p) + | Switch => parseSwitchExpression(p) + | _ => + if ( + context !== WhenExpr && isEs6ArrowExpression(~inTernary=context == TernaryTrueBranchExpr, p) + ) { + parseEs6ArrowExpression(p) + } else { + parseUnaryExpr(p) + } + } + + /* let endPos = p.Parser.prevEndPos in */ + { + ...expr, + pexp_attributes: List.concat(list{expr.Parsetree.pexp_attributes, attrs}), + /* pexp_loc = mkLoc startPos endPos */ + } + } + + /* a binary expression is an expression that combines two expressions with an + * operator. Examples: + * a + b + * f(x) |> g(y) + */ + and parseBinaryExpr = (~context=OrdinaryExpr, ~a=?, p, prec) => { + let a = switch a { + | Some(e) => e + | None => parseOperandExpr(~context, p) + } + + let rec loop = a => { + let token = p.Parser.token + let tokenPrec = switch token { + /* Can the minus be interpreted as a binary operator? Or is it a unary? + * let w = { + * x + * -10 + * } + * vs + * let w = { + * width + * - gap + * } + * + * First case is unary, second is a binary operator. + * See Scanner.isBinaryOp */ + | Minus | MinusDot | LessThan + when !Scanner.isBinaryOp(p.scanner.src, p.startPos.pos_cnum, p.endPos.pos_cnum) && + p.startPos.pos_lnum > p.prevEndPos.pos_lnum => -1 + | token => Token.precedence(token) + } + + if tokenPrec < prec { + a + } else { + Parser.leaveBreadcrumb(p, Grammar.ExprBinaryAfterOp(token)) + let startPos = p.startPos + Parser.next(p) + let endPos = p.prevEndPos + let b = parseBinaryExpr(~context, p, tokenPrec + 1) + let loc = mkLoc(a.Parsetree.pexp_loc.loc_start, b.pexp_loc.loc_end) + let expr = Ast_helper.Exp.apply( + ~loc, + makeInfixOperator(p, token, startPos, endPos), + list{(Nolabel, a), (Nolabel, b)}, + ) + + loop(expr) + } + } + + loop(a) + } + + /* If we even need this, determines if < might be the start of jsx. Not 100% complete */ + /* and isStartOfJsx p = */ + /* Parser.lookahead p (fun p -> */ + /* match p.Parser.token with */ + /* | LessThan -> */ + /* Parser.next p; */ + /* begin match p.token with */ + /* | GreaterThan (* <> *) -> true */ + /* | Lident _ | Uident _ | List -> */ + /* ignore (parseJsxName p); */ + /* begin match p.token with */ + /* | GreaterThan (*
*) -> true */ + /* | Question (* true */ + /* | Lident _ | List -> */ + /* Parser.next p; */ + /* begin match p.token with */ + /* | Equal (* true */ + /* | _ -> false (* TODO *) */ + /* end */ + /* | Forwardslash (* */ + /* Parser.next p; */ + /* begin match p.token with */ + /* | GreaterThan (* *) -> true */ + /* | _ -> false */ + /* end */ + /* | _ -> */ + /* false */ + /* end */ + /* | _ -> false */ + /* end */ + /* | _ -> false */ + /* ) */ + + and parseTemplateExpr = (~prefix="", p) => { + let hiddenOperator = { + let op = Location.mknoloc(Longident.Lident("^")) + Ast_helper.Exp.ident(op) + } + + let rec loop = (acc, p) => { + let startPos = p.Parser.startPos + switch p.Parser.token { + | TemplateTail(txt) => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + if String.length(txt) > 0 { + let txt = if p.mode == ParseForTypeChecker { + parseTemplateStringLiteral(txt) + } else { + txt + } + let str = Ast_helper.Exp.constant(~loc, Pconst_string(txt, Some(prefix))) + Ast_helper.Exp.apply(~loc, hiddenOperator, list{(Nolabel, acc), (Nolabel, str)}) + } else { + acc + } + | TemplatePart(txt) => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let expr = parseExprBlock(p) + let fullLoc = mkLoc(startPos, p.prevEndPos) + Scanner.setTemplateMode(p.scanner) + Parser.expect(Rbrace, p) + let txt = if p.mode == ParseForTypeChecker { + parseTemplateStringLiteral(txt) + } else { + txt + } + let str = Ast_helper.Exp.constant(~loc, Pconst_string(txt, Some(prefix))) + let next = { + let a = if String.length(txt) > 0 { + Ast_helper.Exp.apply(~loc=fullLoc, hiddenOperator, list{(Nolabel, acc), (Nolabel, str)}) + } else { + acc + } + + Ast_helper.Exp.apply(~loc=fullLoc, hiddenOperator, list{(Nolabel, a), (Nolabel, expr)}) + } + + loop(next, p) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + acc + } + } + + Scanner.setTemplateMode(p.scanner) + Parser.expect(Backtick, p) + let startPos = p.Parser.startPos + switch p.Parser.token { + | TemplateTail(txt) => + let loc = mkLoc(startPos, p.endPos) + Parser.next(p) + let txt = if p.mode == ParseForTypeChecker { + parseTemplateStringLiteral(txt) + } else { + txt + } + Ast_helper.Exp.constant(~loc, Pconst_string(txt, Some(prefix))) + | TemplatePart(txt) => + let constantLoc = mkLoc(startPos, p.endPos) + Parser.next(p) + let expr = parseExprBlock(p) + let fullLoc = mkLoc(startPos, p.prevEndPos) + Scanner.setTemplateMode(p.scanner) + Parser.expect(Rbrace, p) + let txt = if p.mode == ParseForTypeChecker { + parseTemplateStringLiteral(txt) + } else { + txt + } + let str = Ast_helper.Exp.constant(~loc=constantLoc, Pconst_string(txt, Some(prefix))) + let next = if String.length(txt) > 0 { + Ast_helper.Exp.apply(~loc=fullLoc, hiddenOperator, list{(Nolabel, str), (Nolabel, expr)}) + } else { + expr + } + + loop(next, p) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Ast_helper.Exp.constant(Pconst_string("", None)) + } + } + + /* Overparse: let f = a : int => a + 1, is it (a : int) => or (a): int => + * Also overparse constraints: + * let x = { + * let a = 1 + * a + pi: int + * } + * + * We want to give a nice error message in these cases + * */ + and overParseConstrainedOrCoercedOrArrowExpression = (p, expr) => + switch p.Parser.token { + | ColonGreaterThan => parseCoercedExpr(~expr, p) + | Colon => + Parser.next(p) + let typ = parseTypExpr(~es6Arrow=false, p) + switch p.Parser.token { + | EqualGreater => + Parser.next(p) + let body = parseExpr(p) + let pat = switch expr.pexp_desc { + | Pexp_ident(longident) => + Ast_helper.Pat.var( + ~loc=expr.pexp_loc, + Location.mkloc(Longident.flatten(longident.txt) |> String.concat("."), longident.loc), + ) + /* TODO: can we convert more expressions to patterns? */ + | _ => Ast_helper.Pat.var(~loc=expr.pexp_loc, Location.mkloc("pattern", expr.pexp_loc)) + } + + let arrow1 = Ast_helper.Exp.fun_( + ~loc=mkLoc(expr.pexp_loc.loc_start, body.pexp_loc.loc_end), + Asttypes.Nolabel, + None, + pat, + Ast_helper.Exp.constraint_(body, typ), + ) + + let arrow2 = Ast_helper.Exp.fun_( + ~loc=mkLoc(expr.pexp_loc.loc_start, body.pexp_loc.loc_end), + Asttypes.Nolabel, + None, + Ast_helper.Pat.constraint_(pat, typ), + body, + ) + + let msg = + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.text("Did you mean to annotate the parameter type or the return type?"), + Doc.indent( + Doc.concat(list{ + Doc.line, + Doc.text("1) "), + Printer.printExpression(arrow1, CommentTable.empty), + Doc.line, + Doc.text("2) "), + Printer.printExpression(arrow2, CommentTable.empty), + }), + ), + }), + ) |> Doc.toString(~width=80) + + Parser.err( + ~startPos=expr.pexp_loc.loc_start, + ~endPos=body.pexp_loc.loc_end, + p, + Diagnostics.message(msg), + ) + arrow1 + | _ => + open Parsetree + let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end) + let expr = Ast_helper.Exp.constraint_(~loc, expr, typ) + let () = Parser.err( + ~startPos=expr.pexp_loc.loc_start, + ~endPos=typ.ptyp_loc.loc_end, + p, + Diagnostics.message( + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.text("Expressions with type constraints need to be wrapped in parens:"), + Doc.indent( + Doc.concat(list{ + Doc.line, + Printer.addParens(Printer.printExpression(expr, CommentTable.empty)), + }), + ), + }), + ) |> Doc.toString(~width=80), + ), + ) + + expr + } + | _ => expr + } + + and parseLetBindingBody = (~startPos, ~attrs, p) => { + Parser.beginRegion(p) + Parser.leaveBreadcrumb(p, Grammar.LetBinding) + let (pat, exp) = { + let pat = parsePattern(p) + switch p.Parser.token { + | Colon => + Parser.next(p) + switch p.token { + | Typ => + /* locally abstract types */ + Parser.next(p) + let newtypes = parseLidentList(p) + Parser.expect(Dot, p) + let typ = parseTypExpr(p) + Parser.expect(Equal, p) + let expr = parseExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + let (exp, poly) = wrapTypeAnnotation(~loc, newtypes, typ, expr) + let pat = Ast_helper.Pat.constraint_(~loc, pat, poly) + (pat, exp) + | _ => + let polyType = parsePolyTypeExpr(p) + let loc = {...pat.ppat_loc, loc_end: polyType.Parsetree.ptyp_loc.loc_end} + let pat = Ast_helper.Pat.constraint_(~loc, pat, polyType) + Parser.expect(Token.Equal, p) + let exp = parseExpr(p) + let exp = overParseConstrainedOrCoercedOrArrowExpression(p, exp) + (pat, exp) + } + | _ => + Parser.expect(Token.Equal, p) + let exp = overParseConstrainedOrCoercedOrArrowExpression(p, parseExpr(p)) + (pat, exp) + } + } + + let loc = mkLoc(startPos, p.prevEndPos) + let vb = Ast_helper.Vb.mk(~loc, ~attrs, pat, exp) + Parser.eatBreadcrumb(p) + Parser.endRegion(p) + vb + } + + /* TODO: find a better way? Is it possible? + * let a = 1 + * @attr + * and b = 2 + * + * The problem is that without semi we need a lookahead to determine + * if the attr is on the letbinding or the start of a new thing + * + * let a = 1 + * @attr + * let b = 1 + * + * Here @attr should attach to something "new": `let b = 1` + * The parser state is forked, which is quite expensive… + */ + and parseAttributesAndBinding = (p: Parser.t) => { + let err = p.scanner.err + let ch = p.scanner.ch + let offset = p.scanner.offset + let rdOffset = p.scanner.rdOffset + let lineOffset = p.scanner.lineOffset + let lnum = p.scanner.lnum + let mode = p.scanner.mode + let token = p.token + let startPos = p.startPos + let endPos = p.endPos + let prevEndPos = p.prevEndPos + let breadcrumbs = p.breadcrumbs + let errors = p.errors + let diagnostics = p.diagnostics + let comments = p.comments + + switch p.Parser.token { + | At => + let attrs = parseAttributes(p) + switch p.Parser.token { + | And => attrs + | _ => + p.scanner.err = err + p.scanner.ch = ch + p.scanner.offset = offset + p.scanner.rdOffset = rdOffset + p.scanner.lineOffset = lineOffset + p.scanner.lnum = lnum + p.scanner.mode = mode + p.token = token + p.startPos = startPos + p.endPos = endPos + p.prevEndPos = prevEndPos + p.breadcrumbs = breadcrumbs + p.errors = errors + p.diagnostics = diagnostics + p.comments = comments + list{} + } + | _ => list{} + } + } + + /* definition ::= let [rec] let-binding { and let-binding } */ + and parseLetBindings = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.optional(p, Let) |> ignore + let recFlag = if Parser.optional(p, Token.Rec) { + Asttypes.Recursive + } else { + Asttypes.Nonrecursive + } + + let first = parseLetBindingBody(~startPos, ~attrs, p) + + let rec loop = (p, bindings) => { + let startPos = p.Parser.startPos + let attrs = parseAttributesAndBinding(p) + switch p.Parser.token { + | And => + Parser.next(p) + let attrs = switch p.token { + | Export => + let exportLoc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + let genTypeAttr = (Location.mkloc("genType", exportLoc), Parsetree.PStr(list{})) + list{genTypeAttr, ...attrs} + | _ => attrs + } + + ignore(Parser.optional(p, Let)) /* overparse for fault tolerance */ + let letBinding = parseLetBindingBody(~startPos, ~attrs, p) + loop(p, list{letBinding, ...bindings}) + | _ => List.rev(bindings) + } + } + + (recFlag, loop(p, list{first})) + } + + /* + * div -> div + * Foo -> Foo.createElement + * Foo.Bar -> Foo.Bar.createElement + */ + and parseJsxName = p => { + let longident = switch p.Parser.token { + | Lident(ident) => + let identStart = p.startPos + let identEnd = p.endPos + Parser.next(p) + let loc = mkLoc(identStart, identEnd) + Location.mkloc(Longident.Lident(ident), loc) + | Uident(_) => + let longident = parseModuleLongIdent(~lowercase=false, p) + Location.mkloc(Longident.Ldot(longident.txt, "createElement"), longident.loc) + | _ => + let msg = "A jsx name should start with a lowercase or uppercase identifier, like: div in
or Navbar in " + + Parser.err(p, Diagnostics.message(msg)) + Location.mknoloc(Longident.Lident("_")) + } + + Ast_helper.Exp.ident(~loc=longident.loc, longident) + } + + and parseJsxOpeningOrSelfClosingElement = (~startPos, p) => { + let jsxStartPos = p.Parser.startPos + let name = parseJsxName(p) + let jsxProps = parseJsxProps(p) + let children = switch p.Parser.token { + | Forwardslash => + /* */ + let childrenStartPos = p.Parser.startPos + Parser.next(p) + let childrenEndPos = p.Parser.startPos + Parser.expect(GreaterThan, p) + let loc = mkLoc(childrenStartPos, childrenEndPos) + makeListExpression(loc, list{}, None) /* no children */ + | GreaterThan => + /* bar */ + let childrenStartPos = p.Parser.startPos + Scanner.setJsxMode(p.scanner) + Parser.next(p) + let (spread, children) = parseJsxChildren(p) + let childrenEndPos = p.Parser.startPos + let () = switch p.token { + | LessThanSlash => Parser.next(p) + | LessThan => + Parser.next(p) + Parser.expect(Forwardslash, p) + | token when Grammar.isStructureItemStart(token) => () + | _ => Parser.expect(LessThanSlash, p) + } + + switch p.Parser.token { + | Lident(_) | Uident(_) when verifyJsxOpeningClosingName(p, name) => + Parser.expect(GreaterThan, p) + let loc = mkLoc(childrenStartPos, childrenEndPos) + switch (spread, children) { + | (true, list{child, ..._}) => child + | _ => makeListExpression(loc, children, None) + } + | token => + let () = if Grammar.isStructureItemStart(token) { + let closing = "") + let msg = Diagnostics.message("Missing " ++ closing) + Parser.err(~startPos, ~endPos=p.prevEndPos, p, msg) + } else { + let opening = "") + let msg = + "Closing jsx name should be the same as the opening name. Did you mean " ++ + (opening ++ + " ?") + Parser.err(~startPos, ~endPos=p.prevEndPos, p, Diagnostics.message(msg)) + Parser.expect(GreaterThan, p) + } + + let loc = mkLoc(childrenStartPos, childrenEndPos) + switch (spread, children) { + | (true, list{child, ..._}) => child + | _ => makeListExpression(loc, children, None) + } + } + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + makeListExpression(Location.none, list{}, None) + } + + let jsxEndPos = p.prevEndPos + let loc = mkLoc(jsxStartPos, jsxEndPos) + Ast_helper.Exp.apply( + ~loc, + name, + List.concat(list{ + jsxProps, + list{ + (Asttypes.Labelled("children"), children), + ( + Asttypes.Nolabel, + Ast_helper.Exp.construct(Location.mknoloc(Longident.Lident("()")), None), + ), + }, + }), + ) + } + + /* + * jsx ::= + * | <> jsx-children + * | + * | jsx-children + * + * jsx-children ::= primary-expr* * => 0 or more + */ + and parseJsx = p => { + Parser.leaveBreadcrumb(p, Grammar.Jsx) + let startPos = p.Parser.startPos + Parser.expect(LessThan, p) + let jsxExpr = switch p.Parser.token { + | Lident(_) | Uident(_) => parseJsxOpeningOrSelfClosingElement(~startPos, p) + | GreaterThan => + /* fragment: <> foo */ + parseJsxFragment(p) + | _ => parseJsxName(p) + } + + {...jsxExpr, pexp_attributes: list{jsxAttr}} + } + + /* + * jsx-fragment ::= + * | <> + * | <> jsx-children + */ + and parseJsxFragment = p => { + let childrenStartPos = p.Parser.startPos + Scanner.setJsxMode(p.scanner) + Parser.expect(GreaterThan, p) + let (_spread, children) = parseJsxChildren(p) + let childrenEndPos = p.Parser.startPos + Parser.expect(LessThanSlash, p) + Parser.expect(GreaterThan, p) + let loc = mkLoc(childrenStartPos, childrenEndPos) + makeListExpression(loc, children, None) + } + + /* + * jsx-prop ::= + * | lident + * | ?lident + * | lident = jsx_expr + * | lident = ?jsx_expr + */ + and parseJsxProp = p => { + Parser.leaveBreadcrumb(p, Grammar.JsxAttribute) + switch p.Parser.token { + | Question | Lident(_) => + let optional = Parser.optional(p, Question) + let (name, loc) = parseLident(p) + let propLocAttr = (Location.mkloc("ns.namedArgLoc", loc), Parsetree.PStr(list{})) + /* optional punning: */ + if optional { + Some( + Asttypes.Optional(name), + Ast_helper.Exp.ident( + ~attrs=list{propLocAttr}, + ~loc, + Location.mkloc(Longident.Lident(name), loc), + ), + ) + } else { + switch p.Parser.token { + | Equal => + Parser.next(p) + /* no punning */ + let optional = Parser.optional(p, Question) + let attrExpr = { + let e = parsePrimaryExpr(~operand=parseAtomicExpr(p), p) + {...e, pexp_attributes: list{propLocAttr, ...e.pexp_attributes}} + } + + let label = if optional { + Asttypes.Optional(name) + } else { + Asttypes.Labelled(name) + } + + Some(label, attrExpr) + | _ => + let attrExpr = Ast_helper.Exp.ident( + ~loc, + ~attrs=list{propLocAttr}, + Location.mknoloc(Longident.Lident(name)), + ) + let label = if optional { + Asttypes.Optional(name) + } else { + Asttypes.Labelled(name) + } + + Some(label, attrExpr) + } + } + | _ => None + } + } + + and parseJsxProps = p => parseRegion(~grammar=Grammar.JsxAttribute, ~f=parseJsxProp, p) + + and parseJsxChildren = p => { + let rec loop = (p, children) => + switch p.Parser.token { + | Token.Eof | LessThanSlash => + Scanner.popMode(p.scanner, Jsx) + List.rev(children) + | LessThan => + /* Imagine:
< + * is `<` the start of a jsx-child?
+ * reconsiderLessThan peeks at the next token and + * determines the correct token to disambiguate */ + let token = Scanner.reconsiderLessThan(p.scanner) + if token == LessThan { + let child = parsePrimaryExpr(~operand=parseAtomicExpr(p), ~noCall=true, p) + loop(p, list{child, ...children}) + } else { + /* LessThanSlash */ + let () = p.token = token + let () = Scanner.popMode(p.scanner, Jsx) + List.rev(children) + } + | token when Grammar.isJsxChildStart(token) => + let () = Scanner.popMode(p.scanner, Jsx) + let child = parsePrimaryExpr(~operand=parseAtomicExpr(p), ~noCall=true, p) + loop(p, list{child, ...children}) + | _ => + Scanner.popMode(p.scanner, Jsx) + List.rev(children) + } + + switch p.Parser.token { + | DotDotDot => + Parser.next(p) + (true, list{parsePrimaryExpr(~operand=parseAtomicExpr(p), ~noCall=true, p)}) + | _ => (false, loop(p, list{})) + } + } + + and parseBracedOrRecordExpr = p => { + let startPos = p.Parser.startPos + Parser.expect(Lbrace, p) + switch p.Parser.token { + | Rbrace => + Parser.err(p, Diagnostics.unexpected(Rbrace, p.breadcrumbs)) + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + Ast_helper.Exp.construct( + ~attrs=list{braces}, + ~loc, + Location.mkloc(Longident.Lident("()"), loc), + None, + ) + | DotDotDot => + /* beginning of record spread, parse record */ + Parser.next(p) + let spreadExpr = parseConstrainedOrCoercedExpr(p) + Parser.expect(Comma, p) + let expr = parseRecordExpr(~startPos, ~spread=Some(spreadExpr), list{}, p) + Parser.expect(Rbrace, p) + expr + | String(s) => + let field = { + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + Location.mkloc(Longident.Lident(s), loc) + } + + switch p.Parser.token { + | Colon => + Parser.next(p) + let fieldExpr = parseExpr(p) + Parser.optional(p, Comma) |> ignore + let expr = parseRecordExprWithStringKeys(~startPos, (field, fieldExpr), p) + Parser.expect(Rbrace, p) + expr + | _ => + let constant = Ast_helper.Exp.constant(~loc=field.loc, Parsetree.Pconst_string(s, None)) + let a = parsePrimaryExpr(~operand=constant, p) + let e = parseBinaryExpr(~a, p, 1) + let e = parseTernaryExpr(e, p) + switch p.Parser.token { + | Semicolon => + Parser.next(p) + let expr = parseExprBlock(~first=e, p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + | Rbrace => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...e, pexp_attributes: list{braces, ...e.pexp_attributes}} + | _ => + let expr = parseExprBlock(~first=e, p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + } + } + | Uident(_) | Lident(_) => + let valueOrConstructor = parseValueOrConstructor(p) + switch valueOrConstructor.pexp_desc { + | Pexp_ident(pathIdent) => + let identEndPos = p.prevEndPos + switch p.Parser.token { + | Comma => + Parser.next(p) + let expr = parseRecordExpr(~startPos, list{(pathIdent, valueOrConstructor)}, p) + Parser.expect(Rbrace, p) + expr + | Colon => + Parser.next(p) + let fieldExpr = parseExpr(p) + switch p.token { + | Rbrace => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.record(~loc, list{(pathIdent, fieldExpr)}, None) + | _ => + Parser.expect(Comma, p) + let expr = parseRecordExpr(~startPos, list{(pathIdent, fieldExpr)}, p) + Parser.expect(Rbrace, p) + expr + } + /* error case */ + | Lident(_) => + if p.prevEndPos.pos_lnum < p.startPos.pos_lnum { + Parser.expect(Comma, p) + let expr = parseRecordExpr(~startPos, list{(pathIdent, valueOrConstructor)}, p) + Parser.expect(Rbrace, p) + expr + } else { + Parser.expect(Colon, p) + let expr = parseRecordExpr(~startPos, list{(pathIdent, valueOrConstructor)}, p) + Parser.expect(Rbrace, p) + expr + } + | Semicolon => + Parser.next(p) + let expr = parseExprBlock(~first=Ast_helper.Exp.ident(pathIdent), p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + | Rbrace => + Parser.next(p) + let expr = Ast_helper.Exp.ident(~loc=pathIdent.loc, pathIdent) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + | EqualGreater => + let loc = mkLoc(startPos, identEndPos) + let ident = Location.mkloc(Longident.last(pathIdent.txt), loc) + let a = parseEs6ArrowExpression( + ~parameters=list{ + TermParameter({ + uncurried: false, + attrs: list{}, + label: Asttypes.Nolabel, + expr: None, + pat: Ast_helper.Pat.var(ident), + pos: startPos, + }), + }, + p, + ) + + let e = parseBinaryExpr(~a, p, 1) + let e = parseTernaryExpr(e, p) + switch p.Parser.token { + | Semicolon => + Parser.next(p) + let expr = parseExprBlock(~first=e, p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + | Rbrace => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...e, pexp_attributes: list{braces, ...e.pexp_attributes}} + | _ => + let expr = parseExprBlock(~first=e, p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + } + | _ => + Parser.leaveBreadcrumb(p, Grammar.ExprBlock) + let a = parsePrimaryExpr(~operand=Ast_helper.Exp.ident(~loc=pathIdent.loc, pathIdent), p) + let e = parseBinaryExpr(~a, p, 1) + let e = parseTernaryExpr(e, p) + Parser.eatBreadcrumb(p) + switch p.Parser.token { + | Semicolon => + Parser.next(p) + let expr = parseExprBlock(~first=e, p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + | Rbrace => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...e, pexp_attributes: list{braces, ...e.pexp_attributes}} + | _ => + let expr = parseExprBlock(~first=e, p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + } + } + | _ => + Parser.leaveBreadcrumb(p, Grammar.ExprBlock) + let a = parsePrimaryExpr(~operand=valueOrConstructor, p) + let e = parseBinaryExpr(~a, p, 1) + let e = parseTernaryExpr(e, p) + Parser.eatBreadcrumb(p) + switch p.Parser.token { + | Semicolon => + Parser.next(p) + let expr = parseExprBlock(~first=e, p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + | Rbrace => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...e, pexp_attributes: list{braces, ...e.pexp_attributes}} + | _ => + let expr = parseExprBlock(~first=e, p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + } + } + | _ => + let expr = parseExprBlock(p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let braces = makeBracesAttr(loc) + {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}} + } + } + + and parseRecordRowWithStringKey = p => + switch p.Parser.token { + | String(s) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + let field = Location.mkloc(Longident.Lident(s), loc) + switch p.Parser.token { + | Colon => + Parser.next(p) + let fieldExpr = parseExpr(p) + Some(field, fieldExpr) + | _ => Some(field, Ast_helper.Exp.ident(~loc=field.loc, field)) + } + | _ => None + } + + and parseRecordRow = p => { + let () = switch p.Parser.token { + | Token.DotDotDot => + Parser.err(p, Diagnostics.message(ErrorMessages.recordExprSpread)) + Parser.next(p) + | _ => () + } + + switch p.Parser.token { + | Lident(_) | Uident(_) | List => + let field = parseValuePath(p) + switch p.Parser.token { + | Colon => + Parser.next(p) + let fieldExpr = parseExpr(p) + Some(field, fieldExpr) + | _ => Some(field, Ast_helper.Exp.ident(~loc=field.loc, field)) + } + | _ => None + } + } + + and parseRecordExprWithStringKeys = (~startPos, firstRow, p) => { + let rows = list{ + firstRow, + ...parseCommaDelimitedRegion( + ~grammar=Grammar.RecordRowsStringKey, + ~closing=Rbrace, + ~f=parseRecordRowWithStringKey, + p, + ), + } + let loc = mkLoc(startPos, p.endPos) + let recordStrExpr = Ast_helper.Str.eval(~loc, Ast_helper.Exp.record(~loc, rows, None)) + Ast_helper.Exp.extension( + ~loc, + (Location.mkloc("bs.obj", loc), Parsetree.PStr(list{recordStrExpr})), + ) + } + + and parseRecordExpr = (~startPos, ~spread=None, rows, p) => { + let exprs = parseCommaDelimitedRegion( + ~grammar=Grammar.RecordRows, + ~closing=Rbrace, + ~f=parseRecordRow, + p, + ) + + let rows = List.concat(list{rows, exprs}) + let () = switch rows { + | list{} => + let msg = "Record spread needs at least one field that's updated" + Parser.err(p, Diagnostics.message(msg)) + | _rows => () + } + + let loc = mkLoc(startPos, p.endPos) + Ast_helper.Exp.record(~loc, rows, spread) + } + + and parseExprBlockItem = p => { + let startPos = p.Parser.startPos + let attrs = parseAttributes(p) + switch p.Parser.token { + | Module => + Parser.next(p) + switch p.token { + | Lparen => parseFirstClassModuleExpr(~startPos, p) + | _ => + let name = switch p.Parser.token { + | Uident(ident) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + Location.mkloc(ident, loc) + | t => + Parser.err(p, Diagnostics.uident(t)) + Location.mknoloc("_") + } + + let body = parseModuleBindingBody(p) + Parser.optional(p, Semicolon) |> ignore + let expr = parseExprBlock(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.letmodule(~loc, name, body, expr) + } + | Exception => + let extensionConstructor = parseExceptionDef(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let blockExpr = parseExprBlock(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.letexception(~loc, extensionConstructor, blockExpr) + | Open => + let od = parseOpenDescription(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let blockExpr = parseExprBlock(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.open_(~loc, od.popen_override, od.popen_lid, blockExpr) + | Let => + let (recFlag, letBindings) = parseLetBindings(~attrs, p) + let next = switch p.Parser.token { + | Semicolon => + Parser.next(p) + if Grammar.isBlockExprStart(p.Parser.token) { + parseExprBlock(p) + } else { + let loc = mkLoc(p.startPos, p.endPos) + Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident("()"), loc), None) + } + | token when Grammar.isBlockExprStart(token) => parseExprBlock(p) + | _ => + let loc = mkLoc(p.startPos, p.endPos) + Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident("()"), loc), None) + } + + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.let_(~loc, recFlag, letBindings, next) + | _ => + let e1 = { + let expr = parseExpr(p) + {...expr, pexp_attributes: List.concat(list{attrs, expr.pexp_attributes})} + } + + ignore(Parser.optional(p, Semicolon)) + if Grammar.isBlockExprStart(p.Parser.token) { + let e2 = parseExprBlock(p) + let loc = {...e1.pexp_loc, loc_end: e2.pexp_loc.loc_end} + Ast_helper.Exp.sequence(~loc, e1, e2) + } else { + e1 + } + } + } + + /* blockExpr ::= expr + * | expr ; + * | expr ; blockExpr + * | module ... ; blockExpr + * | open ... ; blockExpr + * | exception ... ; blockExpr + * | let ... + * | let ... ; + * | let ... ; blockExpr + * + * note: semi should be made optional + * a block of expression is always + */ + and parseExprBlock = (~first=?, p) => { + Parser.leaveBreadcrumb(p, Grammar.ExprBlock) + let item = switch first { + | Some(e) => e + | None => parseExprBlockItem(p) + } + + let blockExpr = switch p.Parser.token { + | Semicolon => + Parser.next(p) + if Grammar.isBlockExprStart(p.Parser.token) { + let next = parseExprBlockItem(p) + ignore(Parser.optional(p, Semicolon)) + let loc = {...item.pexp_loc, loc_end: next.pexp_loc.loc_end} + Ast_helper.Exp.sequence(~loc, item, next) + } else { + item + } + | token when Grammar.isBlockExprStart(token) => + let next = parseExprBlockItem(p) + ignore(Parser.optional(p, Semicolon)) + let loc = {...item.pexp_loc, loc_end: next.pexp_loc.loc_end} + Ast_helper.Exp.sequence(~loc, item, next) + | _ => item + } + + Parser.eatBreadcrumb(p) + overParseConstrainedOrCoercedOrArrowExpression(p, blockExpr) + } + + and parseTryExpression = p => { + let startPos = p.Parser.startPos + Parser.expect(Try, p) + let expr = parseExpr(~context=WhenExpr, p) + Parser.expect(Catch, p) + Parser.expect(Lbrace, p) + let cases = parsePatternMatching(p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.try_(~loc, expr, cases) + } + + and parseIfExpression = p => { + Parser.beginRegion(p) + Parser.leaveBreadcrumb(p, Grammar.ExprIf) + let startPos = p.Parser.startPos + Parser.expect(If, p) + Parser.leaveBreadcrumb(p, Grammar.IfCondition) + /* doesn't make sense to try es6 arrow here? */ + let conditionExpr = parseExpr(~context=WhenExpr, p) + Parser.eatBreadcrumb(p) + Parser.leaveBreadcrumb(p, IfBranch) + Parser.expect(Lbrace, p) + let thenExpr = parseExprBlock(p) + Parser.expect(Rbrace, p) + Parser.eatBreadcrumb(p) + let elseExpr = switch p.Parser.token { + | Else => + Parser.endRegion(p) + Parser.leaveBreadcrumb(p, Grammar.ElseBranch) + Parser.next(p) + Parser.beginRegion(p) + let elseExpr = switch p.token { + | If => parseIfExpression(p) + | _ => + Parser.expect(Lbrace, p) + let blockExpr = parseExprBlock(p) + Parser.expect(Rbrace, p) + blockExpr + } + + Parser.eatBreadcrumb(p) + Parser.endRegion(p) + Some(elseExpr) + | _ => + Parser.endRegion(p) + None + } + + let loc = mkLoc(startPos, p.prevEndPos) + Parser.eatBreadcrumb(p) + Ast_helper.Exp.ifthenelse(~loc, conditionExpr, thenExpr, elseExpr) + } + + and parseForRest = (hasOpeningParen, pattern, startPos, p) => { + Parser.expect(In, p) + let e1 = parseExpr(p) + let direction = switch p.Parser.token { + | To => Asttypes.Upto + | Downto => Asttypes.Downto + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Asttypes.Upto + } + + Parser.next(p) + let e2 = parseExpr(~context=WhenExpr, p) + if hasOpeningParen { + Parser.expect(Rparen, p) + } + Parser.expect(Lbrace, p) + let bodyExpr = parseExprBlock(p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.for_(~loc, pattern, e1, e2, direction, bodyExpr) + } + + and parseForExpression = p => { + let startPos = p.Parser.startPos + Parser.expect(For, p) + switch p.token { + | Lparen => + let lparen = p.startPos + Parser.next(p) + switch p.token { + | Rparen => + Parser.next(p) + let unitPattern = { + let loc = mkLoc(lparen, p.prevEndPos) + let lid = Location.mkloc(Longident.Lident("()"), loc) + Ast_helper.Pat.construct(lid, None) + } + + parseForRest(false, parseAliasPattern(~attrs=list{}, unitPattern, p), startPos, p) + | _ => + let pat = parsePattern(p) + switch p.token { + | Comma => + Parser.next(p) + let tuplePattern = parseTuplePattern(~attrs=list{}, ~startPos=lparen, ~first=pat, p) + + let pattern = parseAliasPattern(~attrs=list{}, tuplePattern, p) + parseForRest(false, pattern, startPos, p) + | _ => parseForRest(true, pat, startPos, p) + } + } + | _ => parseForRest(false, parsePattern(p), startPos, p) + } + } + + and parseWhileExpression = p => { + let startPos = p.Parser.startPos + Parser.expect(While, p) + let expr1 = parseExpr(~context=WhenExpr, p) + Parser.expect(Lbrace, p) + let expr2 = parseExprBlock(p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.while_(~loc, expr1, expr2) + } + + and parsePatternMatchCase = p => { + Parser.beginRegion(p) + Parser.leaveBreadcrumb(p, Grammar.PatternMatchCase) + switch p.Parser.token { + | Token.Bar => + Parser.next(p) + let lhs = parsePattern(p) + let guard = switch p.Parser.token { + | When => + Parser.next(p) + Some(parseExpr(~context=WhenExpr, p)) + | _ => None + } + + let () = switch p.token { + | EqualGreater => Parser.next(p) + | _ => Recover.recoverEqualGreater(p) + } + + let rhs = parseExprBlock(p) + Parser.endRegion(p) + Parser.eatBreadcrumb(p) + Some(Ast_helper.Exp.case(lhs, ~guard?, rhs)) + | _ => + Parser.endRegion(p) + None + } + } + + and parsePatternMatching = p => { + Parser.leaveBreadcrumb(p, Grammar.PatternMatching) + let cases = parseDelimitedRegion( + ~grammar=Grammar.PatternMatching, + ~closing=Rbrace, + ~f=parsePatternMatchCase, + p, + ) + + let () = switch cases { + | list{} => + Parser.err( + ~startPos=p.prevEndPos, + p, + Diagnostics.message("Pattern matching needs at least one case"), + ) + | _ => () + } + + cases + } + + and parseSwitchExpression = p => { + let startPos = p.Parser.startPos + Parser.expect(Switch, p) + let switchExpr = parseExpr(~context=WhenExpr, p) + Parser.expect(Lbrace, p) + let cases = parsePatternMatching(p) + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.match_(~loc, switchExpr, cases) + } + + /* + * argument ::= + * | _ (* syntax sugar *) + * | expr + * | expr : type + * | ~ label-name + * | ~ label-name + * | ~ label-name ? + * | ~ label-name = expr + * | ~ label-name = _ (* syntax sugar *) + * | ~ label-name = expr : type + * | ~ label-name = ? expr + * | ~ label-name = ? _ (* syntax sugar *) + * | ~ label-name = ? expr : type + * + * uncurried_argument ::= + * | . argument + */ + and parseArgument = p => + if ( + p.Parser.token == Token.Tilde || + (p.token == Dot || + (p.token == Underscore || Grammar.isExprStart(p.token))) + ) { + switch p.Parser.token { + | Dot => + let uncurried = true + let startPos = p.Parser.startPos + Parser.next(p) + switch p.token { + /* apply(.) */ + | Rparen => + let loc = mkLoc(startPos, p.prevEndPos) + let unitExpr = Ast_helper.Exp.construct( + ~loc, + Location.mkloc(Longident.Lident("()"), loc), + None, + ) + + Some(uncurried, Asttypes.Nolabel, unitExpr) + | _ => parseArgument2(p, ~uncurried) + } + | _ => parseArgument2(p, ~uncurried=false) + } + } else { + None + } + + and parseArgument2 = (p, ~uncurried) => + switch p.Parser.token { + /* foo(_), do not confuse with foo(_ => x), TODO: performance */ + | Underscore when !isEs6ArrowExpression(~inTernary=false, p) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + let exp = Ast_helper.Exp.ident(~loc, Location.mkloc(Longident.Lident("_"), loc)) + Some(uncurried, Asttypes.Nolabel, exp) + | Tilde => + Parser.next(p) + /* TODO: nesting of pattern matches not intuitive for error recovery */ + switch p.Parser.token { + | Lident(ident) => + let startPos = p.startPos + Parser.next(p) + let endPos = p.prevEndPos + let loc = mkLoc(startPos, endPos) + let propLocAttr = (Location.mkloc("ns.namedArgLoc", loc), Parsetree.PStr(list{})) + let identExpr = Ast_helper.Exp.ident( + ~attrs=list{propLocAttr}, + ~loc, + Location.mkloc(Longident.Lident(ident), loc), + ) + switch p.Parser.token { + | Question => + Parser.next(p) + Some(uncurried, Asttypes.Optional(ident), identExpr) + | Equal => + Parser.next(p) + let label = switch p.Parser.token { + | Question => + Parser.next(p) + Asttypes.Optional(ident) + | _ => Labelled(ident) + } + + let expr = switch p.Parser.token { + | Underscore when !isEs6ArrowExpression(~inTernary=false, p) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + Ast_helper.Exp.ident(~loc, Location.mkloc(Longident.Lident("_"), loc)) + | _ => + let expr = parseConstrainedOrCoercedExpr(p) + {...expr, pexp_attributes: list{propLocAttr, ...expr.pexp_attributes}} + } + + Some(uncurried, label, expr) + | Colon => + Parser.next(p) + let typ = parseTypExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + let expr = Ast_helper.Exp.constraint_(~attrs=list{propLocAttr}, ~loc, identExpr, typ) + Some(uncurried, Labelled(ident), expr) + | _ => Some(uncurried, Labelled(ident), identExpr) + } + | t => + Parser.err(p, Diagnostics.lident(t)) + Some(uncurried, Nolabel, Recover.defaultExpr()) + } + | _ => Some(uncurried, Nolabel, parseConstrainedOrCoercedExpr(p)) + } + + and parseCallExpr = (p, funExpr) => { + Parser.expect(Lparen, p) + let startPos = p.Parser.startPos + Parser.leaveBreadcrumb(p, Grammar.ExprCall) + let args = parseCommaDelimitedRegion( + ~grammar=Grammar.ArgumentList, + ~closing=Rparen, + ~f=parseArgument, + p, + ) + + Parser.expect(Rparen, p) + let args = switch args { + | list{} => + let loc = mkLoc(startPos, p.prevEndPos) + /* No args -> unit sugar: `foo()` */ + list{ + ( + false, + Asttypes.Nolabel, + Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident("()"), loc), None), + ), + } + | args => args + } + + let loc = {...funExpr.pexp_loc, loc_end: p.prevEndPos} + let args = switch args { + | list{(u, lbl, expr), ...args} => + let group = ((grp, acc), (uncurried, lbl, expr)) => { + let (_u, grp) = grp + if uncurried === true { + ((true, list{(lbl, expr)}), list{(_u, List.rev(grp)), ...acc}) + } else { + ((_u, list{(lbl, expr), ...grp}), acc) + } + } + + let ((_u, grp), acc) = List.fold_left(group, ((u, list{(lbl, expr)}), list{}), args) + List.rev(list{(_u, List.rev(grp)), ...acc}) + | list{} => list{} + } + + let apply = List.fold_left((callBody, group) => { + let (uncurried, args) = group + let (args, wrap) = processUnderscoreApplication(args) + let exp = if uncurried { + let attrs = list{uncurryAttr} + Ast_helper.Exp.apply(~loc, ~attrs, callBody, args) + } else { + Ast_helper.Exp.apply(~loc, callBody, args) + } + + wrap(exp) + }, funExpr, args) + + Parser.eatBreadcrumb(p) + apply + } + + and parseValueOrConstructor = p => { + let startPos = p.Parser.startPos + let rec aux = (p, acc) => + switch p.Parser.token { + | Uident(ident) => + let endPosLident = p.endPos + Parser.next(p) + switch p.Parser.token { + | Dot => + Parser.next(p) + aux(p, list{ident, ...acc}) + | Lparen when p.prevEndPos.pos_lnum === p.startPos.pos_lnum => + let lparen = p.startPos + let args = parseConstructorArgs(p) + let rparen = p.prevEndPos + let lident = buildLongident(list{ident, ...acc}) + let tail = switch args { + | list{} => None + | list{{Parsetree.pexp_desc: Pexp_tuple(_)} as arg} as args => + let loc = mkLoc(lparen, rparen) + if p.mode == ParseForTypeChecker { + /* Some(1, 2) for type-checker */ + Some(arg) + } else { + /* Some((1, 2)) for printer */ + Some(Ast_helper.Exp.tuple(~loc, args)) + } + | list{arg} => Some(arg) + | args => + let loc = mkLoc(lparen, rparen) + Some(Ast_helper.Exp.tuple(~loc, args)) + } + + let loc = mkLoc(startPos, p.prevEndPos) + let identLoc = mkLoc(startPos, endPosLident) + Ast_helper.Exp.construct(~loc, Location.mkloc(lident, identLoc), tail) + | _ => + let loc = mkLoc(startPos, p.prevEndPos) + let lident = buildLongident(list{ident, ...acc}) + Ast_helper.Exp.construct(~loc, Location.mkloc(lident, loc), None) + } + | Lident(ident) => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let lident = buildLongident(list{ident, ...acc}) + Ast_helper.Exp.ident(~loc, Location.mkloc(lident, loc)) + | List => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let lident = buildLongident(list{"list", ...acc}) + Ast_helper.Exp.ident(~loc, Location.mkloc(lident, loc)) + | token => + Parser.next(p) + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Recover.defaultExpr() + } + + aux(p, list{}) + } + + and parsePolyVariantExpr = p => { + let startPos = p.startPos + let (ident, _loc) = parseHashIdent(~startPos, p) + switch p.Parser.token { + | Lparen when p.prevEndPos.pos_lnum === p.startPos.pos_lnum => + let lparen = p.startPos + let args = parseConstructorArgs(p) + let rparen = p.prevEndPos + let loc_paren = mkLoc(lparen, rparen) + let tail = switch args { + | list{} => None + | list{{Parsetree.pexp_desc: Pexp_tuple(_)} as expr} as args => + if p.mode == ParseForTypeChecker { + /* #a(1, 2) for type-checker */ + Some(expr) + } else { + /* #a((1, 2)) for type-checker */ + Some(Ast_helper.Exp.tuple(~loc=loc_paren, args)) + } + | list{arg} => Some(arg) + | args => + /* #a((1, 2)) for printer */ + Some(Ast_helper.Exp.tuple(~loc=loc_paren, args)) + } + + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.variant(~loc, ident, tail) + | _ => + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Exp.variant(~loc, ident, None) + } + } + + and parseConstructorArgs = p => { + let lparen = p.Parser.startPos + Parser.expect(Lparen, p) + let args = parseCommaDelimitedRegion( + ~grammar=Grammar.ExprList, + ~f=parseConstrainedExprRegion, + ~closing=Rparen, + p, + ) + + Parser.expect(Rparen, p) + switch args { + | list{} => + let loc = mkLoc(lparen, p.prevEndPos) + list{Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident("()"), loc), None)} + | args => args + } + } + + and parseTupleExpr = (~first, ~startPos, p) => { + let exprs = parseCommaDelimitedRegion( + p, + ~grammar=Grammar.ExprList, + ~closing=Rparen, + ~f=parseConstrainedExprRegion, + ) + + Parser.expect(Rparen, p) + Ast_helper.Exp.tuple(~loc=mkLoc(startPos, p.prevEndPos), list{first, ...exprs}) + } + + and parseSpreadExprRegion = p => + switch p.Parser.token { + | DotDotDot => + Parser.next(p) + let expr = parseConstrainedOrCoercedExpr(p) + Some(true, expr) + | token when Grammar.isExprStart(token) => Some(false, parseConstrainedOrCoercedExpr(p)) + | _ => None + } + + and parseListExpr = (~startPos, p) => { + Parser.expect(Lbracket, p) + let listExprs = parseCommaDelimitedReversedList( + p, + ~grammar=Grammar.ListExpr, + ~closing=Rbracket, + ~f=parseSpreadExprRegion, + ) + + Parser.expect(Rbracket, p) + let loc = mkLoc(startPos, p.prevEndPos) + switch listExprs { + | list{(true, expr), ...exprs} => + let exprs = exprs |> List.map(snd) |> List.rev + makeListExpression(loc, exprs, Some(expr)) + | exprs => + let exprs = exprs |> List.map(((spread, expr)) => { + if spread { + Parser.err(p, Diagnostics.message(ErrorMessages.listExprSpread)) + } + expr + }) |> List.rev + + makeListExpression(loc, exprs, None) + } + } + + /* Overparse ... and give a nice error message */ + and parseNonSpreadExp = (~msg, p) => { + let () = switch p.Parser.token { + | DotDotDot => + Parser.err(p, Diagnostics.message(msg)) + Parser.next(p) + | _ => () + } + + switch p.Parser.token { + | token when Grammar.isExprStart(token) => + let expr = parseExpr(p) + switch p.Parser.token { + | Colon => + Parser.next(p) + let typ = parseTypExpr(p) + let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end) + Some(Ast_helper.Exp.constraint_(~loc, expr, typ)) + | _ => Some(expr) + } + | _ => None + } + } + + and parseArrayExp = p => { + let startPos = p.Parser.startPos + Parser.expect(Lbracket, p) + let exprs = parseCommaDelimitedRegion( + p, + ~grammar=Grammar.ExprList, + ~closing=Rbracket, + ~f=parseNonSpreadExp(~msg=ErrorMessages.arrayExprSpread), + ) + + Parser.expect(Rbracket, p) + Ast_helper.Exp.array(~loc=mkLoc(startPos, p.prevEndPos), exprs) + } + + /* TODO: check attributes in the case of poly type vars, + * might be context dependend: parseFieldDeclaration (see ocaml) */ + and parsePolyTypeExpr = p => { + let startPos = p.Parser.startPos + switch p.Parser.token { + | SingleQuote => + let vars = parseTypeVarList(p) + switch vars { + | list{_v1, _v2, ..._} => + Parser.expect(Dot, p) + let typ = parseTypExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.poly(~loc, vars, typ) + | list{var} => + switch p.Parser.token { + | Dot => + Parser.next(p) + let typ = parseTypExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.poly(~loc, vars, typ) + | EqualGreater => + Parser.next(p) + let typ = Ast_helper.Typ.var(~loc=var.loc, var.txt) + let returnType = parseTypExpr(~alias=false, p) + let loc = mkLoc(typ.Parsetree.ptyp_loc.loc_start, p.prevEndPos) + Ast_helper.Typ.arrow(~loc, Asttypes.Nolabel, typ, returnType) + | _ => Ast_helper.Typ.var(~loc=var.loc, var.txt) + } + | _ => assert false + } + | _ => parseTypExpr(p) + } + } + + /* 'a 'b 'c */ + and parseTypeVarList = p => { + let rec loop = (p, vars) => + switch p.Parser.token { + | SingleQuote => + Parser.next(p) + let (lident, loc) = parseLident(p) + let var = Location.mkloc(lident, loc) + loop(p, list{var, ...vars}) + | _ => List.rev(vars) + } + + loop(p, list{}) + } + + and parseLidentList = p => { + let rec loop = (p, ls) => + switch p.Parser.token { + | Lident(lident) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + loop(p, list{Location.mkloc(lident, loc), ...ls}) + | _ => List.rev(ls) + } + + loop(p, list{}) + } + + and parseAtomicTypExpr = (~attrs, p) => { + Parser.leaveBreadcrumb(p, Grammar.AtomicTypExpr) + let startPos = p.Parser.startPos + let typ = switch p.Parser.token { + | SingleQuote => + Parser.next(p) + let (ident, loc) = parseLident(p) + Ast_helper.Typ.var(~loc, ~attrs, ident) + | Underscore => + let endPos = p.endPos + Parser.next(p) + Ast_helper.Typ.any(~loc=mkLoc(startPos, endPos), ~attrs, ()) + | Lparen => + Parser.next(p) + switch p.Parser.token { + | Rparen => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let unitConstr = Location.mkloc(Longident.Lident("unit"), loc) + Ast_helper.Typ.constr(~attrs, unitConstr, list{}) + | _ => + let t = parseTypExpr(p) + switch p.token { + | Comma => + Parser.next(p) + parseTupleType(~attrs, ~first=t, ~startPos, p) + | _ => + Parser.expect(Rparen, p) + { + ...t, + ptyp_loc: mkLoc(startPos, p.prevEndPos), + ptyp_attributes: List.concat(list{attrs, t.ptyp_attributes}), + } + } + } + | Lbracket => parsePolymorphicVariantType(~attrs, p) + | Uident(_) | Lident(_) | List => + let constr = parseValuePath(p) + let args = parseTypeConstructorArgs(~constrName=constr, p) + Ast_helper.Typ.constr(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, constr, args) + | Module => + Parser.next(p) + Parser.expect(Lparen, p) + let packageType = parsePackageType(~startPos, ~attrs, p) + Parser.expect(Rparen, p) + {...packageType, ptyp_loc: mkLoc(startPos, p.prevEndPos)} + | Percent => + let extension = parseExtension(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.extension(~attrs, ~loc, extension) + | Lbrace => parseBsObjectType(~attrs, p) + | token => + switch skipTokensAndMaybeRetry(p, ~isStartOfGrammar=Grammar.isAtomicTypExprStart) { + | Some() => parseAtomicTypExpr(~attrs, p) + | None => + Parser.err(~startPos=p.prevEndPos, p, Diagnostics.unexpected(token, p.breadcrumbs)) + Recover.defaultType() + } + } + + Parser.eatBreadcrumb(p) + typ + } + + /* package-type ::= + | modtype-path + ∣ modtype-path with package-constraint { and package-constraint } + */ + and parsePackageType = (~startPos, ~attrs, p) => { + let modTypePath = parseModuleLongIdent(~lowercase=true, p) + switch p.Parser.token { + | With => + Parser.next(p) + let constraints = parsePackageConstraints(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.package(~loc, ~attrs, modTypePath, constraints) + | _ => + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.package(~loc, ~attrs, modTypePath, list{}) + } + } + + /* package-constraint { and package-constraint } */ + and parsePackageConstraints = p => { + let first = { + Parser.expect(Typ, p) + let typeConstr = parseValuePath(p) + Parser.expect(Equal, p) + let typ = parseTypExpr(p) + (typeConstr, typ) + } + + let rest = parseRegion(~grammar=Grammar.PackageConstraint, ~f=parsePackageConstraint, p) + + list{first, ...rest} + } + + /* and type typeconstr = typexpr */ + and parsePackageConstraint = p => + switch p.Parser.token { + | And => + Parser.next(p) + Parser.expect(Typ, p) + let typeConstr = parseValuePath(p) + Parser.expect(Equal, p) + let typ = parseTypExpr(p) + Some(typeConstr, typ) + | _ => None + } + + and parseBsObjectType = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.expect(Lbrace, p) + let closedFlag = switch p.token { + | DotDot => + Parser.next(p) + Asttypes.Open + | Dot => + Parser.next(p) + Asttypes.Closed + | _ => Asttypes.Closed + } + + let fields = parseCommaDelimitedRegion( + ~grammar=Grammar.StringFieldDeclarations, + ~closing=Rbrace, + ~f=parseStringFieldDeclaration, + p, + ) + + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + makeBsObjType(~attrs, ~loc, ~closed=closedFlag, fields) + } + + /* TODO: check associativity in combination with attributes */ + and parseTypeAlias = (p, typ) => + switch p.Parser.token { + | As => + Parser.next(p) + Parser.expect(SingleQuote, p) + let (ident, _loc) = parseLident(p) + /* TODO: how do we parse attributes here? */ + Ast_helper.Typ.alias(~loc=mkLoc(typ.Parsetree.ptyp_loc.loc_start, p.prevEndPos), typ, ident) + | _ => typ + } + + /* type_parameter ::= + * | type_expr + * | ~ident: type_expr + * | ~ident: type_expr=? + * + * note: + * | attrs ~ident: type_expr -> attrs are on the arrow + * | attrs type_expr -> attrs are here part of the type_expr + * + * uncurried_type_parameter ::= + * | . type_parameter + */ + and parseTypeParameter = p => + if p.Parser.token == Token.Tilde || (p.token == Dot || Grammar.isTypExprStart(p.token)) { + let startPos = p.Parser.startPos + let uncurried = Parser.optional(p, Dot) + let attrs = parseAttributes(p) + switch p.Parser.token { + | Tilde => + Parser.next(p) + let (name, _loc) = parseLident(p) + Parser.expect(~grammar=Grammar.TypeExpression, Colon, p) + let typ = parseTypExpr(p) + switch p.Parser.token { + | Equal => + Parser.next(p) + Parser.expect(Question, p) + Some(uncurried, attrs, Asttypes.Optional(name), typ, startPos) + | _ => Some(uncurried, attrs, Asttypes.Labelled(name), typ, startPos) + } + | Lident(_) | List => + let (name, loc) = parseLident(p) + switch p.token { + | Colon => + let () = { + let error = Diagnostics.message("Parameter names start with a `~`, like: ~" ++ name) + + Parser.err(~startPos=loc.loc_start, ~endPos=loc.loc_end, p, error) + } + + Parser.next(p) + let typ = parseTypExpr(p) + switch p.Parser.token { + | Equal => + Parser.next(p) + Parser.expect(Question, p) + Some(uncurried, attrs, Asttypes.Optional(name), typ, startPos) + | _ => Some(uncurried, attrs, Asttypes.Labelled(name), typ, startPos) + } + | _ => + let constr = Location.mkloc(Longident.Lident(name), loc) + let args = parseTypeConstructorArgs(~constrName=constr, p) + let typ = Ast_helper.Typ.constr(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, constr, args) + + let typ = parseArrowTypeRest(~es6Arrow=true, ~startPos, typ, p) + let typ = parseTypeAlias(p, typ) + Some(uncurried, list{}, Asttypes.Nolabel, typ, startPos) + } + | _ => + let typ = parseTypExpr(p) + let typWithAttributes = { + ...typ, + ptyp_attributes: List.concat(list{attrs, typ.ptyp_attributes}), + } + Some(uncurried, list{}, Asttypes.Nolabel, typWithAttributes, startPos) + } + } else { + None + } + + /* (int, ~x:string, float) */ + and parseTypeParameters = p => { + let startPos = p.Parser.startPos + Parser.expect(Lparen, p) + switch p.Parser.token { + | Rparen => + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + let unitConstr = Location.mkloc(Longident.Lident("unit"), loc) + let typ = Ast_helper.Typ.constr(unitConstr, list{}) + list{(false, list{}, Asttypes.Nolabel, typ, startPos)} + | _ => + let params = parseCommaDelimitedRegion( + ~grammar=Grammar.TypeParameters, + ~closing=Rparen, + ~f=parseTypeParameter, + p, + ) + + Parser.expect(Rparen, p) + params + } + } + + and parseEs6ArrowType = (~attrs, p) => { + let startPos = p.Parser.startPos + switch p.Parser.token { + | Tilde => + Parser.next(p) + let (name, _loc) = parseLident(p) + Parser.expect(~grammar=Grammar.TypeExpression, Colon, p) + let typ = parseTypExpr(~alias=false, ~es6Arrow=false, p) + let arg = switch p.Parser.token { + | Equal => + Parser.next(p) + Parser.expect(Question, p) + Asttypes.Optional(name) + | _ => Asttypes.Labelled(name) + } + + Parser.expect(EqualGreater, p) + let returnType = parseTypExpr(~alias=false, p) + Ast_helper.Typ.arrow(~attrs, arg, typ, returnType) + | _ => + let parameters = parseTypeParameters(p) + Parser.expect(EqualGreater, p) + let returnType = parseTypExpr(~alias=false, p) + let endPos = p.prevEndPos + let typ = List.fold_right(((uncurried, attrs, argLbl, typ, startPos), t) => { + let attrs = if uncurried { + list{uncurryAttr, ...attrs} + } else { + attrs + } + Ast_helper.Typ.arrow(~loc=mkLoc(startPos, endPos), ~attrs, argLbl, typ, t) + }, parameters, returnType) + + { + ...typ, + ptyp_attributes: List.concat(list{typ.ptyp_attributes, attrs}), + ptyp_loc: mkLoc(startPos, p.prevEndPos), + } + } + } + + /* + * typexpr ::= + * | 'ident + * | _ + * | (typexpr) + * | typexpr => typexpr --> es6 arrow + * | (typexpr, typexpr) => typexpr --> es6 arrow + * | /typexpr, typexpr, typexpr/ --> tuple + * | typeconstr + * | typeconstr + * | typeconstr + * | typexpr as 'ident + * | %attr-id --> extension + * | %attr-id(payload) --> extension + * + * typeconstr ::= + * | lident + * | uident.lident + * | uident.uident.lident --> long module path + */ + and parseTypExpr = (~attrs=?, ~es6Arrow=true, ~alias=true, p) => { + /* Parser.leaveBreadcrumb p Grammar.TypeExpression; */ + let startPos = p.Parser.startPos + let attrs = switch attrs { + | Some(attrs) => attrs + | None => parseAttributes(p) + } + let typ = if es6Arrow && isEs6ArrowType(p) { + parseEs6ArrowType(~attrs, p) + } else { + let typ = parseAtomicTypExpr(~attrs, p) + parseArrowTypeRest(~es6Arrow, ~startPos, typ, p) + } + + let typ = if alias { + parseTypeAlias(p, typ) + } else { + typ + } + + /* Parser.eatBreadcrumb p; */ + typ + } + + and parseArrowTypeRest = (~es6Arrow, ~startPos, typ, p) => + switch p.Parser.token { + | (EqualGreater | MinusGreater) as token when es6Arrow === true => + /* error recovery */ + if token == MinusGreater { + Parser.expect(EqualGreater, p) + } + Parser.next(p) + let returnType = parseTypExpr(~alias=false, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.arrow(~loc, Asttypes.Nolabel, typ, returnType) + | _ => typ + } + + and parseTypExprRegion = p => + if Grammar.isTypExprStart(p.Parser.token) { + Some(parseTypExpr(p)) + } else { + None + } + + and parseTupleType = (~attrs, ~first, ~startPos, p) => { + let typexprs = parseCommaDelimitedRegion( + ~grammar=Grammar.TypExprList, + ~closing=Rparen, + ~f=parseTypExprRegion, + p, + ) + + Parser.expect(Rparen, p) + let tupleLoc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.tuple(~attrs, ~loc=tupleLoc, list{first, ...typexprs}) + } + + and parseTypeConstructorArgRegion = p => + if Grammar.isTypExprStart(p.Parser.token) { + Some(parseTypExpr(p)) + } else if p.token == LessThan { + Parser.next(p) + parseTypeConstructorArgRegion(p) + } else { + None + } + + /* Js.Nullable.value<'a> */ + and parseTypeConstructorArgs = (~constrName, p) => { + let opening = p.Parser.token + let openingStartPos = p.startPos + switch opening { + | LessThan | Lparen => + Scanner.setDiamondMode(p.scanner) + Parser.next(p) + let typeArgs = /* TODO: change Grammar.TypExprList to TypArgList!!! Why did I wrote this? */ + parseCommaDelimitedRegion( + ~grammar=Grammar.TypExprList, + ~closing=GreaterThan, + ~f=parseTypeConstructorArgRegion, + p, + ) + + let () = switch p.token { + | Rparen when opening == Token.Lparen => + let typ = Ast_helper.Typ.constr(constrName, typeArgs) + let msg = + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.text("Type parameters require angle brackets:"), + Doc.indent(Doc.concat(list{Doc.line, Printer.printTypExpr(typ, CommentTable.empty)})), + }), + ) |> Doc.toString(~width=80) + + Parser.err(~startPos=openingStartPos, p, Diagnostics.message(msg)) + Parser.next(p) + | _ => Parser.expect(GreaterThan, p) + } + + Scanner.popMode(p.scanner, Diamond) + typeArgs + | _ => list{} + } + } + + /* string-field-decl ::= + * | string: poly-typexpr + * | attributes string-field-decl */ + and parseStringFieldDeclaration = p => { + let attrs = parseAttributes(p) + switch p.Parser.token { + | String(name) => + let nameStartPos = p.startPos + let nameEndPos = p.endPos + Parser.next(p) + let fieldName = Location.mkloc(name, mkLoc(nameStartPos, nameEndPos)) + Parser.expect(~grammar=Grammar.TypeExpression, Colon, p) + let typ = parsePolyTypeExpr(p) + Some(Parsetree.Otag(fieldName, attrs, typ)) + | _token => None + } + } + + /* field-decl ::= + * | [mutable] field-name : poly-typexpr + * | attributes field-decl */ + and parseFieldDeclaration = p => { + let startPos = p.Parser.startPos + let attrs = parseAttributes(p) + let mut = if Parser.optional(p, Token.Mutable) { + Asttypes.Mutable + } else { + Asttypes.Immutable + } + + let (lident, loc) = switch p.token { + | List => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + ("list", loc) + | _ => parseLident(p) + } + + let name = Location.mkloc(lident, loc) + let typ = switch p.Parser.token { + | Colon => + Parser.next(p) + parsePolyTypeExpr(p) + | _ => Ast_helper.Typ.constr(~loc=name.loc, {...name, txt: Lident(name.txt)}, list{}) + } + + let loc = mkLoc(startPos, typ.ptyp_loc.loc_end) + Ast_helper.Type.field(~attrs, ~loc, ~mut, name, typ) + } + + and parseFieldDeclarationRegion = p => { + let startPos = p.Parser.startPos + let attrs = parseAttributes(p) + let mut = if Parser.optional(p, Token.Mutable) { + Asttypes.Mutable + } else { + Asttypes.Immutable + } + + switch p.token { + | Lident(_) | List => + let (lident, loc) = switch p.token { + | List => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + ("list", loc) + | _ => parseLident(p) + } + + let name = Location.mkloc(lident, loc) + let typ = switch p.Parser.token { + | Colon => + Parser.next(p) + parsePolyTypeExpr(p) + | _ => Ast_helper.Typ.constr(~loc=name.loc, {...name, txt: Lident(name.txt)}, list{}) + } + + let loc = mkLoc(startPos, typ.ptyp_loc.loc_end) + Some(Ast_helper.Type.field(~attrs, ~loc, ~mut, name, typ)) + | _ => None + } + } + + /* record-decl ::= + * | { field-decl } + * | { field-decl, field-decl } + * | { field-decl, field-decl, field-decl, } + */ + and parseRecordDeclaration = p => { + Parser.leaveBreadcrumb(p, Grammar.RecordDecl) + Parser.expect(Lbrace, p) + let rows = parseCommaDelimitedRegion( + ~grammar=Grammar.RecordDecl, + ~closing=Rbrace, + ~f=parseFieldDeclarationRegion, + p, + ) + + Parser.expect(Rbrace, p) + Parser.eatBreadcrumb(p) + rows + } + + /* constr-args ::= + * | (typexpr) + * | (typexpr, typexpr) + * | (typexpr, typexpr, typexpr,) + * | (record-decl) + * + * TODO: should we overparse inline-records in every position? + * Give a good error message afterwards? + */ + and parseConstrDeclArgs = p => { + let constrArgs = switch p.Parser.token { + | Lparen => + Parser.next(p) + /* TODO: this could use some cleanup/stratification */ + switch p.Parser.token { + | Lbrace => + let lbrace = p.startPos + Parser.next(p) + let startPos = p.Parser.startPos + switch p.Parser.token { + | DotDot | Dot => + let closedFlag = switch p.token { + | DotDot => + Parser.next(p) + Asttypes.Open + | Dot => + Parser.next(p) + Asttypes.Closed + | _ => Asttypes.Closed + } + + let fields = parseCommaDelimitedRegion( + ~grammar=Grammar.StringFieldDeclarations, + ~closing=Rbrace, + ~f=parseStringFieldDeclaration, + p, + ) + + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let typ = makeBsObjType(~attrs=list{}, ~loc, ~closed=closedFlag, fields) + Parser.optional(p, Comma) |> ignore + let moreArgs = parseCommaDelimitedRegion( + ~grammar=Grammar.TypExprList, + ~closing=Rparen, + ~f=parseTypExprRegion, + p, + ) + + Parser.expect(Rparen, p) + Parsetree.Pcstr_tuple(list{typ, ...moreArgs}) + | _ => + let attrs = parseAttributes(p) + switch p.Parser.token { + | String(_) => + let closedFlag = Asttypes.Closed + let fields = switch attrs { + | list{} => + parseCommaDelimitedRegion( + ~grammar=Grammar.StringFieldDeclarations, + ~closing=Rbrace, + ~f=parseStringFieldDeclaration, + p, + ) + | attrs => + let first = { + Parser.leaveBreadcrumb(p, Grammar.StringFieldDeclarations) + let field = switch parseStringFieldDeclaration(p) { + | Some(field) => field + | None => assert false + } + + /* parse comma after first */ + let () = switch p.Parser.token { + | Rbrace | Eof => () + | Comma => Parser.next(p) + | _ => Parser.expect(Comma, p) + } + + Parser.eatBreadcrumb(p) + switch field { + | Parsetree.Otag(label, _, ct) => Parsetree.Otag(label, attrs, ct) + | Oinherit(ct) => Oinherit(ct) + } + } + + list{ + first, + ...parseCommaDelimitedRegion( + ~grammar=Grammar.StringFieldDeclarations, + ~closing=Rbrace, + ~f=parseStringFieldDeclaration, + p, + ), + } + } + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let typ = makeBsObjType(~attrs=list{}, ~loc, ~closed=closedFlag, fields) + Parser.optional(p, Comma) |> ignore + let moreArgs = parseCommaDelimitedRegion( + ~grammar=Grammar.TypExprList, + ~closing=Rparen, + ~f=parseTypExprRegion, + p, + ) + + Parser.expect(Rparen, p) + Parsetree.Pcstr_tuple(list{typ, ...moreArgs}) + | _ => + let fields = switch attrs { + | list{} => + parseCommaDelimitedRegion( + ~grammar=Grammar.FieldDeclarations, + ~closing=Rbrace, + ~f=parseFieldDeclarationRegion, + p, + ) + | attrs => + let first = { + let field = parseFieldDeclaration(p) + Parser.expect(Comma, p) + {...field, Parsetree.pld_attributes: attrs} + } + + list{ + first, + ...parseCommaDelimitedRegion( + ~grammar=Grammar.FieldDeclarations, + ~closing=Rbrace, + ~f=parseFieldDeclarationRegion, + p, + ), + } + } + + let () = switch fields { + | list{} => + Parser.err( + ~startPos=lbrace, + p, + Diagnostics.message("An inline record declaration needs at least one field"), + ) + | _ => () + } + + Parser.expect(Rbrace, p) + Parser.optional(p, Comma) |> ignore + Parser.expect(Rparen, p) + Parsetree.Pcstr_record(fields) + } + } + | _ => + let args = parseCommaDelimitedRegion( + ~grammar=Grammar.TypExprList, + ~closing=Rparen, + ~f=parseTypExprRegion, + p, + ) + + Parser.expect(Rparen, p) + Parsetree.Pcstr_tuple(args) + } + | _ => Pcstr_tuple(list{}) + } + + let res = switch p.Parser.token { + | Colon => + Parser.next(p) + Some(parseTypExpr(p)) + | _ => None + } + + (constrArgs, res) + } + + /* constr-decl ::= + * | constr-name + * | attrs constr-name + * | constr-name const-args + * | attrs constr-name const-args */ + and parseTypeConstructorDeclarationWithBar = p => + switch p.Parser.token { + | Bar => + let startPos = p.Parser.startPos + Parser.next(p) + Some(parseTypeConstructorDeclaration(~startPos, p)) + | _ => None + } + + and parseTypeConstructorDeclaration = (~startPos, p) => { + Parser.leaveBreadcrumb(p, Grammar.ConstructorDeclaration) + let attrs = parseAttributes(p) + switch p.Parser.token { + | Uident(uident) => + let uidentLoc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + let (args, res) = parseConstrDeclArgs(p) + Parser.eatBreadcrumb(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Type.constructor(~loc, ~attrs, ~res?, ~args, Location.mkloc(uident, uidentLoc)) + | t => + Parser.err(p, Diagnostics.uident(t)) + Ast_helper.Type.constructor(Location.mknoloc("_")) + } + } + + /* [|] constr-decl { | constr-decl } */ + and parseTypeConstructorDeclarations = (~first=?, p) => { + let firstConstrDecl = switch first { + | None => + let startPos = p.Parser.startPos + ignore(Parser.optional(p, Token.Bar)) + parseTypeConstructorDeclaration(~startPos, p) + | Some(firstConstrDecl) => firstConstrDecl + } + + list{ + firstConstrDecl, + ...parseRegion( + ~grammar=Grammar.ConstructorDeclaration, + ~f=parseTypeConstructorDeclarationWithBar, + p, + ), + } + } + + /* + * type-representation ::= + * ∣ = [ | ] constr-decl { | constr-decl } + * ∣ = private [ | ] constr-decl { | constr-decl } + * | = | + * ∣ = private | + * ∣ = record-decl + * ∣ = private record-decl + * | = .. + */ + and parseTypeRepresentation = p => { + Parser.leaveBreadcrumb(p, Grammar.TypeRepresentation) + /* = consumed */ + let privateFlag = if Parser.optional(p, Token.Private) { + Asttypes.Private + } else { + Asttypes.Public + } + + let kind = switch p.Parser.token { + | Bar | Uident(_) => Parsetree.Ptype_variant(parseTypeConstructorDeclarations(p)) + | Lbrace => Parsetree.Ptype_record(parseRecordDeclaration(p)) + | DotDot => + Parser.next(p) + Ptype_open + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + /* TODO: I have no idea if this is even remotely a good idea */ + Parsetree.Ptype_variant(list{}) + } + + Parser.eatBreadcrumb(p) + (privateFlag, kind) + } + + /* type-param ::= + * | variance 'lident + * | variance _ + * + * variance ::= + * | + + * | - + * | (* empty *) + */ + and parseTypeParam = p => { + let variance = switch p.Parser.token { + | Plus => + Parser.next(p) + Asttypes.Covariant + | Minus => + Parser.next(p) + Contravariant + | _ => Invariant + } + + switch p.Parser.token { + | SingleQuote => + Parser.next(p) + let (ident, loc) = parseLident(p) + Some(Ast_helper.Typ.var(~loc, ident), variance) + | Underscore => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + Some(Ast_helper.Typ.any(~loc, ()), variance) + /* TODO: should we try parsing lident as 'ident ? */ + | _token => None + } + } + + /* type-params ::= + * | + * ∣ + * ∣ + * ∣ + * + * TODO: when we have pretty-printer show an error + * with the actual code corrected. */ + and parseTypeParams = (~parent, p) => { + let opening = p.Parser.token + switch opening { + | LessThan | Lparen when p.startPos.pos_lnum === p.prevEndPos.pos_lnum => + Scanner.setDiamondMode(p.scanner) + let openingStartPos = p.startPos + Parser.leaveBreadcrumb(p, Grammar.TypeParams) + Parser.next(p) + let params = parseCommaDelimitedRegion( + ~grammar=Grammar.TypeParams, + ~closing=GreaterThan, + ~f=parseTypeParam, + p, + ) + + let () = switch p.token { + | Rparen when opening == Token.Lparen => + let msg = + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.text("Type parameters require angle brackets:"), + Doc.indent( + Doc.concat(list{ + Doc.line, + Doc.concat(list{ + Printer.printLongident(parent.Location.txt), + Printer.printTypeParams(params, CommentTable.empty), + }), + }), + ), + }), + ) |> Doc.toString(~width=80) + + Parser.err(~startPos=openingStartPos, p, Diagnostics.message(msg)) + Parser.next(p) + | _ => Parser.expect(GreaterThan, p) + } + + Scanner.popMode(p.scanner, Diamond) + Parser.eatBreadcrumb(p) + params + | _ => list{} + } + } + + /* type-constraint ::= constraint ' ident = typexpr */ + and parseTypeConstraint = p => { + let startPos = p.Parser.startPos + switch p.Parser.token { + | Token.Constraint => + Parser.next(p) + Parser.expect(SingleQuote, p) + switch p.Parser.token { + | Lident(ident) => + let identLoc = mkLoc(startPos, p.endPos) + Parser.next(p) + Parser.expect(Equal, p) + let typ = parseTypExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Typ.var(~loc=identLoc, ident), typ, loc) + | t => + Parser.err(p, Diagnostics.lident(t)) + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Typ.any(), parseTypExpr(p), loc) + } + | _ => None + } + } + + /* type-constraints ::= + * | (* empty *) + * | type-constraint + * | type-constraint type-constraint + * | type-constraint type-constraint type-constraint (* 0 or more *) + */ + and parseTypeConstraints = p => + parseRegion(~grammar=Grammar.TypeConstraint, ~f=parseTypeConstraint, p) + + and parseTypeEquationOrConstrDecl = p => { + let uidentStartPos = p.Parser.startPos + switch p.Parser.token { + | Uident(uident) => + Parser.next(p) + switch p.Parser.token { + | Dot => + Parser.next(p) + let typeConstr = parseValuePathTail(p, uidentStartPos, Longident.Lident(uident)) + + let loc = mkLoc(uidentStartPos, p.prevEndPos) + let typ = parseTypeAlias( + p, + Ast_helper.Typ.constr( + ~loc, + typeConstr, + parseTypeConstructorArgs(~constrName=typeConstr, p), + ), + ) + switch p.token { + | Equal => + Parser.next(p) + let (priv, kind) = parseTypeRepresentation(p) + (Some(typ), priv, kind) + | EqualGreater => + Parser.next(p) + let returnType = parseTypExpr(~alias=false, p) + let loc = mkLoc(uidentStartPos, p.prevEndPos) + let arrowType = Ast_helper.Typ.arrow(~loc, Asttypes.Nolabel, typ, returnType) + let typ = parseTypeAlias(p, arrowType) + (Some(typ), Asttypes.Public, Parsetree.Ptype_abstract) + | _ => (Some(typ), Asttypes.Public, Parsetree.Ptype_abstract) + } + | _ => + let uidentEndPos = p.endPos + let (args, res) = parseConstrDeclArgs(p) + let first = Some({ + let uidentLoc = mkLoc(uidentStartPos, uidentEndPos) + Ast_helper.Type.constructor( + ~loc=mkLoc(uidentStartPos, p.prevEndPos), + ~res?, + ~args, + Location.mkloc(uident, uidentLoc), + ) + }) + ( + None, + Asttypes.Public, + Parsetree.Ptype_variant(parseTypeConstructorDeclarations(p, ~first?)), + ) + } + | t => + Parser.err(p, Diagnostics.uident(t)) + /* TODO: is this a good idea? */ + (None, Asttypes.Public, Parsetree.Ptype_abstract) + } + } + + and parseRecordOrBsObjectDecl = p => { + let startPos = p.Parser.startPos + Parser.expect(Lbrace, p) + switch p.Parser.token { + | DotDot | Dot => + let closedFlag = switch p.token { + | DotDot => + Parser.next(p) + Asttypes.Open + | Dot => + Parser.next(p) + Asttypes.Closed + | _ => Asttypes.Closed + } + + let fields = parseCommaDelimitedRegion( + ~grammar=Grammar.StringFieldDeclarations, + ~closing=Rbrace, + ~f=parseStringFieldDeclaration, + p, + ) + + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let typ = makeBsObjType(~attrs=list{}, ~loc, ~closed=closedFlag, fields) |> parseTypeAlias(p) + + let typ = parseArrowTypeRest(~es6Arrow=true, ~startPos, typ, p) + (Some(typ), Asttypes.Public, Parsetree.Ptype_abstract) + | _ => + let attrs = parseAttributes(p) + switch p.Parser.token { + | String(_) => + let closedFlag = Asttypes.Closed + let fields = switch attrs { + | list{} => + parseCommaDelimitedRegion( + ~grammar=Grammar.StringFieldDeclarations, + ~closing=Rbrace, + ~f=parseStringFieldDeclaration, + p, + ) + | attrs => + let first = { + Parser.leaveBreadcrumb(p, Grammar.StringFieldDeclarations) + let field = switch parseStringFieldDeclaration(p) { + | Some(field) => field + | None => assert false + } + + /* parse comma after first */ + let () = switch p.Parser.token { + | Rbrace | Eof => () + | Comma => Parser.next(p) + | _ => Parser.expect(Comma, p) + } + + Parser.eatBreadcrumb(p) + switch field { + | Parsetree.Otag(label, _, ct) => Parsetree.Otag(label, attrs, ct) + | Oinherit(ct) => Oinherit(ct) + } + } + + list{ + first, + ...parseCommaDelimitedRegion( + ~grammar=Grammar.StringFieldDeclarations, + ~closing=Rbrace, + ~f=parseStringFieldDeclaration, + p, + ), + } + } + + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + let typ = + makeBsObjType(~attrs=list{}, ~loc, ~closed=closedFlag, fields) |> parseTypeAlias(p) + + let typ = parseArrowTypeRest(~es6Arrow=true, ~startPos, typ, p) + (Some(typ), Asttypes.Public, Parsetree.Ptype_abstract) + | _ => + Parser.leaveBreadcrumb(p, Grammar.RecordDecl) + let fields = switch attrs { + | list{} => + parseCommaDelimitedRegion( + ~grammar=Grammar.FieldDeclarations, + ~closing=Rbrace, + ~f=parseFieldDeclarationRegion, + p, + ) + | list{attr, ..._} as attrs => + let first = { + let field = parseFieldDeclaration(p) + Parser.optional(p, Comma) |> ignore + { + ...field, + Parsetree.pld_attributes: attrs, + pld_loc: { + ...field.Parsetree.pld_loc, + loc_start: (attr |> fst).loc.loc_start, + }, + } + } + + list{ + first, + ...parseCommaDelimitedRegion( + ~grammar=Grammar.FieldDeclarations, + ~closing=Rbrace, + ~f=parseFieldDeclarationRegion, + p, + ), + } + } + + let () = switch fields { + | list{} => + Parser.err(~startPos, p, Diagnostics.message("A record needs at least one field")) + | _ => () + } + + Parser.expect(Rbrace, p) + Parser.eatBreadcrumb(p) + (None, Asttypes.Public, Parsetree.Ptype_record(fields)) + } + } + } + + and parsePrivateEqOrRepr = p => { + Parser.expect(Private, p) + switch p.Parser.token { + | Lbrace => + let (manifest, _, kind) = parseRecordOrBsObjectDecl(p) + (manifest, Asttypes.Private, kind) + | Uident(_) => + let (manifest, _, kind) = parseTypeEquationOrConstrDecl(p) + (manifest, Asttypes.Private, kind) + | Bar | DotDot => + let (_, kind) = parseTypeRepresentation(p) + (None, Asttypes.Private, kind) + | t when Grammar.isTypExprStart(t) => ( + Some(parseTypExpr(p)), + Asttypes.Private, + Parsetree.Ptype_abstract, + ) + | _ => + let (_, kind) = parseTypeRepresentation(p) + (None, Asttypes.Private, kind) + } + } + + /* + polymorphic-variant-type ::= + | [ tag-spec-first { | tag-spec } ] + | [> [ tag-spec ] { | tag-spec } ] + | [< [|] tag-spec-full { | tag-spec-full } [ > { `tag-name }+ ] ] + + tag-spec-first ::= `tag-name [ of typexpr ] + | [ typexpr ] | tag-spec + + tag-spec ::= `tag-name [ of typexpr ] + | typexpr + + tag-spec-full ::= `tag-name [ of [&] typexpr { & typexpr } ] + | typexpr + */ + and parsePolymorphicVariantType = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.expect(Lbracket, p) + switch p.token { + | GreaterThan => + Parser.next(p) + let rowFields = switch p.token { + | Rbracket => list{} + | Bar => parseTagSpecs(p) + | _ => + let rowField = parseTagSpec(p) + list{rowField, ...parseTagSpecs(p)} + } + + let variant = { + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.variant(~attrs, ~loc, rowFields, Open, None) + } + Parser.expect(Rbracket, p) + variant + | LessThan => + Parser.next(p) + Parser.optional(p, Bar) |> ignore + let rowField = parseTagSpecFull(p) + let rowFields = parseTagSpecFulls(p) + let tagNames = if p.token === GreaterThan { + Parser.next(p) + let rec loop = p => + switch p.Parser.token { + | Rbracket => list{} + | _ => + let (ident, _loc) = parseHashIdent(~startPos=p.startPos, p) + list{ident, ...loop(p)} + } + + loop(p) + } else { + list{} + } + let variant = { + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.variant(~attrs, ~loc, list{rowField, ...rowFields}, Closed, Some(tagNames)) + } + Parser.expect(Rbracket, p) + variant + | _ => + let rowFields1 = parseTagSpecFirst(p) + let rowFields2 = parseTagSpecs(p) + let variant = { + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Typ.variant(~attrs, ~loc, \"@"(rowFields1, rowFields2), Closed, None) + } + Parser.expect(Rbracket, p) + variant + } + } + + and parseTagSpecFulls = p => + switch p.Parser.token { + | Rbracket => list{} + | GreaterThan => list{} + | Bar => + Parser.next(p) + let rowField = parseTagSpecFull(p) + list{rowField, ...parseTagSpecFulls(p)} + | _ => list{} + } + + and parseTagSpecFull = p => { + let attrs = parseAttributes(p) + switch p.Parser.token { + | Hash => parsePolymorphicVariantTypeSpecHash(~attrs, ~full=true, p) + | _ => + let typ = parseTypExpr(~attrs, p) + Parsetree.Rinherit(typ) + } + } + + and parseTagSpecs = p => + switch p.Parser.token { + | Bar => + Parser.next(p) + let rowField = parseTagSpec(p) + list{rowField, ...parseTagSpecs(p)} + | _ => list{} + } + + and parseTagSpec = p => { + let attrs = parseAttributes(p) + switch p.Parser.token { + | Hash => parsePolymorphicVariantTypeSpecHash(~attrs, ~full=false, p) + | _ => + let typ = parseTypExpr(~attrs, p) + Parsetree.Rinherit(typ) + } + } + + and parseTagSpecFirst = p => { + let attrs = parseAttributes(p) + switch p.Parser.token { + | Bar => + Parser.next(p) + list{parseTagSpec(p)} + | Hash => list{parsePolymorphicVariantTypeSpecHash(~attrs, ~full=false, p)} + | _ => + let typ = parseTypExpr(~attrs, p) + Parser.expect(Bar, p) + list{Parsetree.Rinherit(typ), parseTagSpec(p)} + } + } + + and parsePolymorphicVariantTypeSpecHash = (~attrs, ~full, p): Parsetree.row_field => { + let startPos = p.Parser.startPos + let (ident, loc) = parseHashIdent(~startPos, p) + let rec loop = p => + switch p.Parser.token { + | Band when full => + Parser.next(p) + let rowField = parsePolymorphicVariantTypeArgs(p) + list{rowField, ...loop(p)} + | _ => list{} + } + + let (firstTuple, tagContainsAConstantEmptyConstructor) = switch p.Parser.token { + | Band when full => + Parser.next(p) + (list{parsePolymorphicVariantTypeArgs(p)}, true) + | Lparen => (list{parsePolymorphicVariantTypeArgs(p)}, false) + | _ => (list{}, true) + } + + let tuples = \"@"(firstTuple, loop(p)) + Parsetree.Rtag(Location.mkloc(ident, loc), attrs, tagContainsAConstantEmptyConstructor, tuples) + } + + and parsePolymorphicVariantTypeArgs = p => { + let startPos = p.Parser.startPos + Parser.expect(Lparen, p) + let args = parseCommaDelimitedRegion( + ~grammar=Grammar.TypExprList, + ~closing=Rparen, + ~f=parseTypExprRegion, + p, + ) + + Parser.expect(Rparen, p) + let attrs = list{} + let loc = mkLoc(startPos, p.prevEndPos) + switch args { + | list{{ptyp_desc: Ptyp_tuple(_)} as typ} as types => + if p.mode == ParseForTypeChecker { + typ + } else { + Ast_helper.Typ.tuple(~loc, ~attrs, types) + } + | list{typ} => typ + | types => Ast_helper.Typ.tuple(~loc, ~attrs, types) + } + } + + and parseTypeEquationAndRepresentation = p => + switch p.Parser.token { + | (Equal | Bar) as token => + if token == Bar { + Parser.expect(Equal, p) + } + Parser.next(p) + switch p.Parser.token { + | Uident(_) => parseTypeEquationOrConstrDecl(p) + | Lbrace => parseRecordOrBsObjectDecl(p) + | Private => parsePrivateEqOrRepr(p) + | Bar | DotDot => + let (priv, kind) = parseTypeRepresentation(p) + (None, priv, kind) + | _ => + let manifest = Some(parseTypExpr(p)) + switch p.Parser.token { + | Equal => + Parser.next(p) + let (priv, kind) = parseTypeRepresentation(p) + (manifest, priv, kind) + | _ => (manifest, Public, Parsetree.Ptype_abstract) + } + } + | _ => (None, Public, Parsetree.Ptype_abstract) + } + + /* type-definition ::= type [rec] typedef { and typedef } + * typedef ::= typeconstr-name [type-params] type-information + * type-information ::= [type-equation] [type-representation] { type-constraint } + * type-equation ::= = typexpr */ + and parseTypeDef = (~attrs, ~startPos, p) => { + Parser.leaveBreadcrumb(p, Grammar.TypeDef) + /* let attrs = match attrs with | Some attrs -> attrs | None -> parseAttributes p in */ + Parser.leaveBreadcrumb(p, Grammar.TypeConstrName) + let (name, loc) = parseLident(p) + let typeConstrName = Location.mkloc(name, loc) + Parser.eatBreadcrumb(p) + let params = { + let constrName = Location.mkloc(Longident.Lident(name), loc) + parseTypeParams(~parent=constrName, p) + } + let typeDef = { + let (manifest, priv, kind) = parseTypeEquationAndRepresentation(p) + let cstrs = parseTypeConstraints(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Type.mk(~loc, ~attrs, ~priv, ~kind, ~params, ~cstrs, ~manifest?, typeConstrName) + } + + Parser.eatBreadcrumb(p) + typeDef + } + + and parseTypeExtension = (~params, ~attrs, ~name, p) => { + Parser.expect(PlusEqual, p) + let priv = if Parser.optional(p, Token.Private) { + Asttypes.Private + } else { + Asttypes.Public + } + + let constrStart = p.Parser.startPos + Parser.optional(p, Bar) |> ignore + let first = { + let (attrs, name, kind) = switch p.Parser.token { + | Bar => + Parser.next(p) + parseConstrDef(~parseAttrs=true, p) + | _ => parseConstrDef(~parseAttrs=true, p) + } + + let loc = mkLoc(constrStart, p.prevEndPos) + Ast_helper.Te.constructor(~loc, ~attrs, name, kind) + } + + let rec loop = (p, cs) => + switch p.Parser.token { + | Bar => + let startPos = p.Parser.startPos + Parser.next(p) + let (attrs, name, kind) = parseConstrDef(~parseAttrs=true, p) + let extConstr = Ast_helper.Te.constructor( + ~attrs, + ~loc=mkLoc(startPos, p.prevEndPos), + name, + kind, + ) + + loop(p, list{extConstr, ...cs}) + | _ => List.rev(cs) + } + + let constructors = loop(p, list{first}) + Ast_helper.Te.mk(~attrs, ~params, ~priv, name, constructors) + } + + and parseTypeDefinitions = (~attrs, ~name, ~params, ~startPos, p) => { + let typeDef = { + let (manifest, priv, kind) = parseTypeEquationAndRepresentation(p) + let cstrs = parseTypeConstraints(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Type.mk( + ~loc, + ~attrs, + ~priv, + ~kind, + ~params, + ~cstrs, + ~manifest?, + {...name, txt: lidentOfPath(name.Location.txt)}, + ) + } + + let rec loop = (p, defs) => { + let startPos = p.Parser.startPos + let attrs = parseAttributesAndBinding(p) + switch p.Parser.token { + | And => + Parser.next(p) + let attrs = switch p.token { + | Export => + let exportLoc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + let genTypeAttr = (Location.mkloc("genType", exportLoc), Parsetree.PStr(list{})) + list{genTypeAttr, ...attrs} + | _ => attrs + } + + let typeDef = parseTypeDef(~attrs, ~startPos, p) + loop(p, list{typeDef, ...defs}) + | _ => List.rev(defs) + } + } + + loop(p, list{typeDef}) + } + + /* TODO: decide if we really want type extensions (eg. type x += Blue) + * It adds quite a bit of complexity that can be avoided, + * implemented for now. Needed to get a feel for the complexities of + * this territory of the grammar */ + and parseTypeDefinitionOrExtension = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.expect(Token.Typ, p) + let recFlag = switch p.token { + | Rec => + Parser.next(p) + Asttypes.Recursive + | Lident("nonrec") => + Parser.next(p) + Asttypes.Nonrecursive + | _ => Asttypes.Nonrecursive + } + + let name = parseValuePath(p) + let params = parseTypeParams(~parent=name, p) + switch p.Parser.token { + | PlusEqual => TypeExt(parseTypeExtension(~params, ~attrs, ~name, p)) + | _ => + let typeDefs = parseTypeDefinitions(~attrs, ~name, ~params, ~startPos, p) + TypeDef({recFlag: recFlag, types: typeDefs}) + } + } + + and parsePrimitive = p => + switch p.Parser.token { + | String(s) => + Parser.next(p) + Some(s) + | _ => None + } + + and parsePrimitives = p => + switch parseRegion(~grammar=Grammar.Primitive, ~f=parsePrimitive, p) { + | list{} => + let msg = "An external definition should have at least one primitive. Example: \"setTimeout\"" + Parser.err(p, Diagnostics.message(msg)) + list{} + | primitives => primitives + } + + /* external value-name : typexp = external-declaration */ + and parseExternalDef = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.leaveBreadcrumb(p, Grammar.External) + Parser.expect(Token.External, p) + let (name, loc) = parseLident(p) + let name = Location.mkloc(name, loc) + Parser.expect(~grammar=Grammar.TypeExpression, Colon, p) + let typExpr = parseTypExpr(p) + Parser.expect(Equal, p) + let prim = parsePrimitives(p) + let loc = mkLoc(startPos, p.prevEndPos) + let vb = Ast_helper.Val.mk(~loc, ~attrs, ~prim, name, typExpr) + Parser.eatBreadcrumb(p) + vb + } + + /* constr-def ::= + * | constr-decl + * | constr-name = constr + * + * constr-decl ::= constr-name constr-args + * constr-name ::= uident + * constr ::= path-uident */ + and parseConstrDef = (~parseAttrs, p) => { + let attrs = if parseAttrs { + parseAttributes(p) + } else { + list{} + } + let name = switch p.Parser.token { + | Uident(name) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + Location.mkloc(name, loc) + | t => + Parser.err(p, Diagnostics.uident(t)) + Location.mknoloc("_") + } + + let kind = switch p.Parser.token { + | Lparen => + let (args, res) = parseConstrDeclArgs(p) + Parsetree.Pext_decl(args, res) + | Equal => + Parser.next(p) + let longident = parseModuleLongIdent(~lowercase=false, p) + Parsetree.Pext_rebind(longident) + | _ => Parsetree.Pext_decl(Pcstr_tuple(list{}), None) + } + + (attrs, name, kind) + } + + /* + * exception-definition ::= + * | exception constr-decl + * ∣ exception constr-name = constr + * + * constr-name ::= uident + * constr ::= long_uident */ + and parseExceptionDef = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.expect(Token.Exception, p) + let (_, name, kind) = parseConstrDef(~parseAttrs=false, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Te.constructor(~loc, ~attrs, name, kind) + } + + /* module structure on the file level */ + @progress((Parser.next, Parser.expect, Parser.checkProgress)) + and parseImplementation = (p): Parsetree.structure => + parseRegion(p, ~grammar=Grammar.Implementation, ~f=parseStructureItemRegion) + + and parseStructureItemRegion = p => { + let startPos = p.Parser.startPos + let attrs = parseAttributes(p) + switch p.Parser.token { + | Open => + let openDescription = parseOpenDescription(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Str.open_(~loc, openDescription)) + | Let => + let (recFlag, letBindings) = parseLetBindings(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Str.value(~loc, recFlag, letBindings)) + | Typ => + Parser.beginRegion(p) + switch parseTypeDefinitionOrExtension(~attrs, p) { + | TypeDef({recFlag, types}) => + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Parser.endRegion(p) + Some(Ast_helper.Str.type_(~loc, recFlag, types)) + | TypeExt(ext) => + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Parser.endRegion(p) + Some(Ast_helper.Str.type_extension(~loc, ext)) + } + | External => + let externalDef = parseExternalDef(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Str.primitive(~loc, externalDef)) + | Import => + let importDescr = parseJsImport(~startPos, ~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + let structureItem = JsFfi.toParsetree(importDescr) + Some({...structureItem, pstr_loc: loc}) + | Exception => + let exceptionDef = parseExceptionDef(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Str.exception_(~loc, exceptionDef)) + | Include => + let includeStatement = parseIncludeStatement(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Str.include_(~loc, includeStatement)) + | Export => + let structureItem = parseJsExport(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some({...structureItem, pstr_loc: loc}) + | Module => + let structureItem = parseModuleOrModuleTypeImplOrPackExpr(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some({...structureItem, pstr_loc: loc}) + | AtAt => + let attr = parseStandaloneAttribute(p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Str.attribute(~loc, attr)) + | PercentPercent => + let extension = parseExtension(~moduleLanguage=true, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Str.extension(~attrs, ~loc, extension)) + | token when Grammar.isExprStart(token) => + let prevEndPos = p.Parser.endPos + let exp = parseExpr(p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Parser.checkProgress(~prevEndPos, ~result=Ast_helper.Str.eval(~loc, ~attrs, exp), p) + | _ => None + } + } + + and parseJsImport = (~startPos, ~attrs, p) => { + Parser.expect(Token.Import, p) + let importSpec = switch p.Parser.token { + | Token.Lident(_) | Token.At => + let decl = switch parseJsFfiDeclaration(p) { + | Some(decl) => decl + | None => assert false + } + + JsFfi.Default(decl) + | _ => JsFfi.Spec(parseJsFfiDeclarations(p)) + } + + let scope = parseJsFfiScope(p) + let loc = mkLoc(startPos, p.prevEndPos) + JsFfi.importDescr(~attrs, ~importSpec, ~scope, ~loc) + } + + and parseJsExport = (~attrs, p) => { + let exportStart = p.Parser.startPos + Parser.expect(Token.Export, p) + let exportLoc = mkLoc(exportStart, p.prevEndPos) + let genTypeAttr = (Location.mkloc("genType", exportLoc), Parsetree.PStr(list{})) + let attrs = list{genTypeAttr, ...attrs} + switch p.Parser.token { + | Typ => + switch parseTypeDefinitionOrExtension(~attrs, p) { + | TypeDef({recFlag, types}) => Ast_helper.Str.type_(recFlag, types) + | TypeExt(ext) => Ast_helper.Str.type_extension(ext) + } + /* Let */ | _ => + let (recFlag, letBindings) = parseLetBindings(~attrs, p) + Ast_helper.Str.value(recFlag, letBindings) + } + } + + and parseJsFfiScope = p => + switch p.Parser.token { + | Token.Lident("from") => + Parser.next(p) + switch p.token { + | String(s) => + Parser.next(p) + JsFfi.Module(s) + | Uident(_) | Lident(_) => + let value = parseIdentPath(p) + JsFfi.Scope(value) + | _ => JsFfi.Global + } + | _ => JsFfi.Global + } + + and parseJsFfiDeclarations = p => { + Parser.expect(Token.Lbrace, p) + let decls = parseCommaDelimitedRegion( + ~grammar=Grammar.JsFfiImport, + ~closing=Rbrace, + ~f=parseJsFfiDeclaration, + p, + ) + + Parser.expect(Rbrace, p) + decls + } + + and parseJsFfiDeclaration = p => { + let startPos = p.Parser.startPos + let attrs = parseAttributes(p) + switch p.Parser.token { + | Lident(_) => + let (ident, _) = parseLident(p) + let alias = switch p.token { + | As => + Parser.next(p) + let (ident, _) = parseLident(p) + ident + | _ => ident + } + + Parser.expect(Token.Colon, p) + let typ = parseTypExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + Some(JsFfi.decl(~loc, ~alias, ~attrs, ~name=ident, ~typ)) + | _ => None + } + } + + /* include-statement ::= include module-expr */ + and parseIncludeStatement = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.expect(Token.Include, p) + let modExpr = parseModuleExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Incl.mk(~loc, ~attrs, modExpr) + } + + and parseAtomicModuleExpr = p => { + let startPos = p.Parser.startPos + switch p.Parser.token { + | Uident(_ident) => + let longident = parseModuleLongIdent(~lowercase=false, p) + Ast_helper.Mod.ident(~loc=longident.loc, longident) + | Lbrace => + Parser.next(p) + let structure = Ast_helper.Mod.structure( + parseDelimitedRegion( + ~grammar=Grammar.Structure, + ~closing=Rbrace, + ~f=parseStructureItemRegion, + p, + ), + ) + Parser.expect(Rbrace, p) + let endPos = p.prevEndPos + {...structure, pmod_loc: mkLoc(startPos, endPos)} + | Lparen => + Parser.next(p) + let modExpr = switch p.token { + | Rparen => Ast_helper.Mod.structure(~loc=mkLoc(startPos, p.prevEndPos), list{}) + | _ => parseConstrainedModExpr(p) + } + + Parser.expect(Rparen, p) + modExpr + | Lident("unpack") => + /* TODO: should this be made a keyword?? */ + Parser.next(p) + Parser.expect(Lparen, p) + let expr = parseExpr(p) + switch p.Parser.token { + | Colon => + let colonStart = p.Parser.startPos + Parser.next(p) + let attrs = parseAttributes(p) + let packageType = parsePackageType(~startPos=colonStart, ~attrs, p) + Parser.expect(Rparen, p) + let loc = mkLoc(startPos, p.prevEndPos) + let constraintExpr = Ast_helper.Exp.constraint_(~loc, expr, packageType) + + Ast_helper.Mod.unpack(~loc, constraintExpr) + | _ => + Parser.expect(Rparen, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Mod.unpack(~loc, expr) + } + | Percent => + let extension = parseExtension(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Mod.extension(~loc, extension) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Recover.defaultModuleExpr() + } + } + + and parsePrimaryModExpr = p => { + let startPos = p.Parser.startPos + let modExpr = parseAtomicModuleExpr(p) + let rec loop = (p, modExpr) => + switch p.Parser.token { + | Lparen when p.prevEndPos.pos_lnum === p.startPos.pos_lnum => + loop(p, parseModuleApplication(p, modExpr)) + | _ => modExpr + } + + let modExpr = loop(p, modExpr) + {...modExpr, pmod_loc: mkLoc(startPos, p.prevEndPos)} + } + + /* + * functor-arg ::= + * | uident : modtype + * | _ : modtype + * | modtype --> "punning" for _ : modtype + * | attributes functor-arg + */ + and parseFunctorArg = p => { + let startPos = p.Parser.startPos + let attrs = parseAttributes(p) + switch p.Parser.token { + | Uident(ident) => + Parser.next(p) + let uidentEndPos = p.prevEndPos + switch p.Parser.token { + | Colon => + Parser.next(p) + let moduleType = parseModuleType(p) + let loc = mkLoc(startPos, uidentEndPos) + let argName = Location.mkloc(ident, loc) + Some(attrs, argName, Some(moduleType), startPos) + | Dot => + Parser.next(p) + let moduleType = { + let moduleLongIdent = parseModuleLongIdentTail( + ~lowercase=false, + p, + startPos, + Longident.Lident(ident), + ) + Ast_helper.Mty.ident(~loc=moduleLongIdent.loc, moduleLongIdent) + } + + let argName = Location.mknoloc("_") + Some(attrs, argName, Some(moduleType), startPos) + | _ => + let loc = mkLoc(startPos, uidentEndPos) + let modIdent = Location.mkloc(Longident.Lident(ident), loc) + let moduleType = Ast_helper.Mty.ident(~loc, modIdent) + let argName = Location.mknoloc("_") + Some(attrs, argName, Some(moduleType), startPos) + } + | Underscore => + Parser.next(p) + let argName = Location.mkloc("_", mkLoc(startPos, p.prevEndPos)) + Parser.expect(Colon, p) + let moduleType = parseModuleType(p) + Some(attrs, argName, Some(moduleType), startPos) + | _ => None + } + } + + and parseFunctorArgs = p => { + let startPos = p.Parser.startPos + Parser.expect(Lparen, p) + let args = parseCommaDelimitedRegion( + ~grammar=Grammar.FunctorArgs, + ~closing=Rparen, + ~f=parseFunctorArg, + p, + ) + + Parser.expect(Rparen, p) + switch args { + | list{} => list{(list{}, Location.mkloc("*", mkLoc(startPos, p.prevEndPos)), None, startPos)} + | args => args + } + } + + and parseFunctorModuleExpr = p => { + let startPos = p.Parser.startPos + let args = parseFunctorArgs(p) + let returnType = switch p.Parser.token { + | Colon => + Parser.next(p) + Some(parseModuleType(~es6Arrow=false, p)) + | _ => None + } + + Parser.expect(EqualGreater, p) + let rhsModuleExpr = { + let modExpr = parseModuleExpr(p) + switch returnType { + | Some(modType) => + Ast_helper.Mod.constraint_( + ~loc=mkLoc(modExpr.pmod_loc.loc_start, modType.Parsetree.pmty_loc.loc_end), + modExpr, + modType, + ) + | None => modExpr + } + } + + let endPos = p.prevEndPos + let modExpr = List.fold_right( + ((attrs, name, moduleType, startPos), acc) => + Ast_helper.Mod.functor_(~loc=mkLoc(startPos, endPos), ~attrs, name, moduleType, acc), + args, + rhsModuleExpr, + ) + + {...modExpr, pmod_loc: mkLoc(startPos, endPos)} + } + + /* module-expr ::= + * | module-path + * ∣ { structure-items } + * ∣ functorArgs => module-expr + * ∣ module-expr(module-expr) + * ∣ ( module-expr ) + * ∣ ( module-expr : module-type ) + * | extension + * | attributes module-expr */ + and parseModuleExpr = p => { + let attrs = parseAttributes(p) + let modExpr = if isEs6ArrowFunctor(p) { + parseFunctorModuleExpr(p) + } else { + parsePrimaryModExpr(p) + } + + {...modExpr, pmod_attributes: List.concat(list{modExpr.pmod_attributes, attrs})} + } + + and parseConstrainedModExpr = p => { + let modExpr = parseModuleExpr(p) + switch p.Parser.token { + | Colon => + Parser.next(p) + let modType = parseModuleType(p) + let loc = mkLoc(modExpr.pmod_loc.loc_start, modType.pmty_loc.loc_end) + Ast_helper.Mod.constraint_(~loc, modExpr, modType) + | _ => modExpr + } + } + + and parseConstrainedModExprRegion = p => + if Grammar.isModExprStart(p.Parser.token) { + Some(parseConstrainedModExpr(p)) + } else { + None + } + + and parseModuleApplication = (p, modExpr) => { + let startPos = p.Parser.startPos + Parser.expect(Lparen, p) + let args = parseCommaDelimitedRegion( + ~grammar=Grammar.ModExprList, + ~closing=Rparen, + ~f=parseConstrainedModExprRegion, + p, + ) + + Parser.expect(Rparen, p) + let args = switch args { + | list{} => + let loc = mkLoc(startPos, p.prevEndPos) + list{Ast_helper.Mod.structure(~loc, list{})} + | args => args + } + + List.fold_left( + (modExpr, arg) => + Ast_helper.Mod.apply( + ~loc=mkLoc(modExpr.Parsetree.pmod_loc.loc_start, arg.Parsetree.pmod_loc.loc_end), + modExpr, + arg, + ), + modExpr, + args, + ) + } + + and parseModuleOrModuleTypeImplOrPackExpr = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.expect(Module, p) + switch p.Parser.token { + | Typ => parseModuleTypeImpl(~attrs, startPos, p) + | Lparen => + let expr = parseFirstClassModuleExpr(~startPos, p) + Ast_helper.Str.eval(~attrs, expr) + | _ => parseMaybeRecModuleBinding(~attrs, ~startPos, p) + } + } + + and parseModuleTypeImpl = (~attrs, startPos, p) => { + Parser.expect(Typ, p) + let nameStart = p.Parser.startPos + let name = switch p.Parser.token { + | List => + Parser.next(p) + let loc = mkLoc(nameStart, p.prevEndPos) + Location.mkloc("list", loc) + | Lident(ident) => + Parser.next(p) + let loc = mkLoc(nameStart, p.prevEndPos) + Location.mkloc(ident, loc) + | Uident(ident) => + Parser.next(p) + let loc = mkLoc(nameStart, p.prevEndPos) + Location.mkloc(ident, loc) + | t => + Parser.err(p, Diagnostics.uident(t)) + Location.mknoloc("_") + } + + Parser.expect(Equal, p) + let moduleType = parseModuleType(p) + let moduleTypeDeclaration = Ast_helper.Mtd.mk( + ~attrs, + ~loc=mkLoc(nameStart, p.prevEndPos), + ~typ=moduleType, + name, + ) + + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Str.modtype(~loc, moduleTypeDeclaration) + } + + /* definition ::= + ∣ module rec module-name : module-type = module-expr { and module-name + : module-type = module-expr } */ + and parseMaybeRecModuleBinding = (~attrs, ~startPos, p) => + switch p.Parser.token { + | Token.Rec => + Parser.next(p) + Ast_helper.Str.rec_module(parseModuleBindings(~startPos, ~attrs, p)) + | _ => Ast_helper.Str.module_(parseModuleBinding(~attrs, ~startPos=p.Parser.startPos, p)) + } + + and parseModuleBinding = (~attrs, ~startPos, p) => { + let name = switch p.Parser.token { + | Uident(ident) => + let startPos = p.Parser.startPos + Parser.next(p) + let loc = mkLoc(startPos, p.prevEndPos) + Location.mkloc(ident, loc) + | t => + Parser.err(p, Diagnostics.uident(t)) + Location.mknoloc("_") + } + + let body = parseModuleBindingBody(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Mb.mk(~attrs, ~loc, name, body) + } + + and parseModuleBindingBody = p => { + /* TODO: make required with good error message when rec module binding */ + let returnModType = switch p.Parser.token { + | Colon => + Parser.next(p) + Some(parseModuleType(p)) + | _ => None + } + + Parser.expect(Equal, p) + let modExpr = parseModuleExpr(p) + switch returnModType { + | Some(modType) => + Ast_helper.Mod.constraint_( + ~loc=mkLoc(modType.pmty_loc.loc_start, modExpr.pmod_loc.loc_end), + modExpr, + modType, + ) + | None => modExpr + } + } + + /* module-name : module-type = module-expr + * { and module-name : module-type = module-expr } */ + and parseModuleBindings = (~attrs, ~startPos, p) => { + let rec loop = (p, acc) => { + let startPos = p.Parser.startPos + let attrs = parseAttributesAndBinding(p) + switch p.Parser.token { + | And => + Parser.next(p) + ignore(Parser.optional(p, Module)) /* over-parse for fault-tolerance */ + let modBinding = parseModuleBinding(~attrs, ~startPos, p) + loop(p, list{modBinding, ...acc}) + | _ => List.rev(acc) + } + } + + let first = parseModuleBinding(~attrs, ~startPos, p) + loop(p, list{first}) + } + + and parseAtomicModuleType = p => { + let startPos = p.Parser.startPos + let moduleType = switch p.Parser.token { + | Uident(_) | Lident(_) | List => + /* Ocaml allows module types to end with lowercase: module Foo : bar = { ... } + * lets go with uppercase terminal for now */ + let moduleLongIdent = parseModuleLongIdent(~lowercase=true, p) + Ast_helper.Mty.ident(~loc=moduleLongIdent.loc, moduleLongIdent) + | Lparen => + Parser.next(p) + let mty = parseModuleType(p) + Parser.expect(Rparen, p) + {...mty, pmty_loc: mkLoc(startPos, p.prevEndPos)} + | Lbrace => + Parser.next(p) + let spec = parseDelimitedRegion( + ~grammar=Grammar.Signature, + ~closing=Rbrace, + ~f=parseSignatureItemRegion, + p, + ) + + Parser.expect(Rbrace, p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Mty.signature(~loc, spec) + | Module => + /* TODO: check if this is still atomic when implementing first class modules */ + parseModuleTypeOf(p) + | Percent => + let extension = parseExtension(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Mty.extension(~loc, extension) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Recover.defaultModuleType() + } + + let moduleTypeLoc = mkLoc(startPos, p.prevEndPos) + {...moduleType, pmty_loc: moduleTypeLoc} + } + + and parseFunctorModuleType = p => { + let startPos = p.Parser.startPos + let args = parseFunctorArgs(p) + Parser.expect(EqualGreater, p) + let rhs = parseModuleType(p) + let endPos = p.prevEndPos + let modType = List.fold_right( + ((attrs, name, moduleType, startPos), acc) => + Ast_helper.Mty.functor_(~loc=mkLoc(startPos, endPos), ~attrs, name, moduleType, acc), + args, + rhs, + ) + + {...modType, pmty_loc: mkLoc(startPos, endPos)} + } + + /* Module types are the module-level equivalent of type expressions: they + * specify the general shape and type properties of modules. + * + * module-type ::= + * | modtype-path + * | { signature } + * | ( module-type ) --> parenthesized module-type + * | functor-args => module-type --> functor + * | module-type => module-type --> functor + * | module type of module-expr + * | attributes module-type + * | module-type with-mod-constraints + * | extension + */ + and parseModuleType = (~es6Arrow=true, ~with_=true, p) => { + let attrs = parseAttributes(p) + let modty = if es6Arrow && isEs6ArrowFunctor(p) { + parseFunctorModuleType(p) + } else { + let modty = parseAtomicModuleType(p) + switch p.Parser.token { + | EqualGreater when es6Arrow === true => + Parser.next(p) + let rhs = parseModuleType(~with_=false, p) + let str = Location.mknoloc("_") + let loc = mkLoc(modty.pmty_loc.loc_start, p.prevEndPos) + Ast_helper.Mty.functor_(~loc, str, Some(modty), rhs) + | _ => modty + } + } + + let moduleType = { + ...modty, + pmty_attributes: List.concat(list{modty.pmty_attributes, attrs}), + } + if with_ { + parseWithConstraints(moduleType, p) + } else { + moduleType + } + } + + and parseWithConstraints = (moduleType, p) => + switch p.Parser.token { + | With => + Parser.next(p) + let first = parseWithConstraint(p) + let rec loop = (p, acc) => + switch p.Parser.token { + | And => + Parser.next(p) + loop(p, list{parseWithConstraint(p), ...acc}) + | _ => List.rev(acc) + } + + let constraints = loop(p, list{first}) + let loc = mkLoc(moduleType.pmty_loc.loc_start, p.prevEndPos) + Ast_helper.Mty.with_(~loc, moduleType, constraints) + | _ => moduleType + } + + /* mod-constraint ::= + * | type typeconstr type-equation type-constraints? + * ∣ type typeconstr-name := typexpr + * ∣ module module-path = extended-module-path + * ∣ module module-path := extended-module-path + * + * TODO: split this up into multiple functions, better errors */ + and parseWithConstraint = p => + switch p.Parser.token { + | Module => + Parser.next(p) + let modulePath = parseModuleLongIdent(~lowercase=false, p) + switch p.Parser.token { + | ColonEqual => + Parser.next(p) + let lident = parseModuleLongIdent(~lowercase=false, p) + Parsetree.Pwith_modsubst(modulePath, lident) + | Equal => + Parser.next(p) + let lident = parseModuleLongIdent(~lowercase=false, p) + Parsetree.Pwith_module(modulePath, lident) + | token => + /* TODO: revisit */ + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + let lident = parseModuleLongIdent(~lowercase=false, p) + Parsetree.Pwith_modsubst(modulePath, lident) + } + | Typ => + Parser.next(p) + let typeConstr = parseValuePath(p) + let params = parseTypeParams(~parent=typeConstr, p) + switch p.Parser.token { + | ColonEqual => + Parser.next(p) + let typExpr = parseTypExpr(p) + Parsetree.Pwith_typesubst( + typeConstr, + Ast_helper.Type.mk( + ~loc=typeConstr.loc, + ~params, + ~manifest=typExpr, + Location.mkloc(Longident.last(typeConstr.txt), typeConstr.loc), + ), + ) + | Equal => + Parser.next(p) + let typExpr = parseTypExpr(p) + let typeConstraints = parseTypeConstraints(p) + Parsetree.Pwith_type( + typeConstr, + Ast_helper.Type.mk( + ~loc=typeConstr.loc, + ~params, + ~manifest=typExpr, + ~cstrs=typeConstraints, + Location.mkloc(Longident.last(typeConstr.txt), typeConstr.loc), + ), + ) + | token => + /* TODO: revisit */ + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + let typExpr = parseTypExpr(p) + let typeConstraints = parseTypeConstraints(p) + Parsetree.Pwith_type( + typeConstr, + Ast_helper.Type.mk( + ~loc=typeConstr.loc, + ~params, + ~manifest=typExpr, + ~cstrs=typeConstraints, + Location.mkloc(Longident.last(typeConstr.txt), typeConstr.loc), + ), + ) + } + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + exit(-1) + } /* TODO: handle this case */ + + and parseModuleTypeOf = p => { + let startPos = p.Parser.startPos + Parser.expect(Module, p) + Parser.expect(Typ, p) + Parser.expect(Of, p) + let moduleExpr = parseModuleExpr(p) + Ast_helper.Mty.typeof_(~loc=mkLoc(startPos, p.prevEndPos), moduleExpr) + } + + /* module signature on the file level */ + @progress((Parser.next, Parser.expect, Parser.checkProgress)) + and parseSpecification = p => + parseRegion(~grammar=Grammar.Specification, ~f=parseSignatureItemRegion, p) + + and parseSignatureItemRegion = p => { + let startPos = p.Parser.startPos + let attrs = parseAttributes(p) + switch p.Parser.token { + | Let => + Parser.beginRegion(p) + let valueDesc = parseSignLetDesc(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Parser.endRegion(p) + Some(Ast_helper.Sig.value(~loc, valueDesc)) + | Typ => + Parser.beginRegion(p) + switch parseTypeDefinitionOrExtension(~attrs, p) { + | TypeDef({recFlag, types}) => + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Parser.endRegion(p) + Some(Ast_helper.Sig.type_(~loc, recFlag, types)) + | TypeExt(ext) => + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Parser.endRegion(p) + Some(Ast_helper.Sig.type_extension(~loc, ext)) + } + | External => + let externalDef = parseExternalDef(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Sig.value(~loc, externalDef)) + | Exception => + let exceptionDef = parseExceptionDef(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Sig.exception_(~loc, exceptionDef)) + | Open => + let openDescription = parseOpenDescription(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Sig.open_(~loc, openDescription)) + | Include => + Parser.next(p) + let moduleType = parseModuleType(p) + let includeDescription = Ast_helper.Incl.mk( + ~loc=mkLoc(startPos, p.prevEndPos), + ~attrs, + moduleType, + ) + + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Sig.include_(~loc, includeDescription)) + | Module => + Parser.next(p) + switch p.Parser.token { + | Uident(_) => + let modDecl = parseModuleDeclarationOrAlias(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Sig.module_(~loc, modDecl)) + | Rec => + let recModule = parseRecModuleSpec(~attrs, ~startPos, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Sig.rec_module(~loc, recModule)) + | Typ => Some(parseModuleTypeDeclaration(~attrs, ~startPos, p)) + | _t => + let modDecl = parseModuleDeclarationOrAlias(~attrs, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Sig.module_(~loc, modDecl)) + } + | AtAt => + let attr = parseStandaloneAttribute(p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Sig.attribute(~loc, attr)) + | PercentPercent => + let extension = parseExtension(~moduleLanguage=true, p) + Parser.optional(p, Semicolon) |> ignore + let loc = mkLoc(startPos, p.prevEndPos) + Some(Ast_helper.Sig.extension(~attrs, ~loc, extension)) + | Import => + Parser.next(p) + parseSignatureItemRegion(p) + | _ => None + } + } + + /* module rec module-name : module-type { and module-name: module-type } */ + and parseRecModuleSpec = (~attrs, ~startPos, p) => { + Parser.expect(Rec, p) + let rec loop = (p, spec) => { + let startPos = p.Parser.startPos + let attrs = parseAttributesAndBinding(p) + switch p.Parser.token { + | And => + /* TODO: give a good error message when with constraint, no parens + * and ASet: (Set.S with type elt = A.t) + * and BTree: (Btree.S with type elt = A.t) + * Without parens, the `and` signals the start of another + * `with-constraint` + */ + Parser.expect(And, p) + let decl = parseRecModuleDeclaration(~attrs, ~startPos, p) + loop(p, list{decl, ...spec}) + | _ => List.rev(spec) + } + } + + let first = parseRecModuleDeclaration(~attrs, ~startPos, p) + loop(p, list{first}) + } + + /* module-name : module-type */ + and parseRecModuleDeclaration = (~attrs, ~startPos, p) => { + let name = switch p.Parser.token { + | Uident(modName) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + Location.mkloc(modName, loc) + | t => + Parser.err(p, Diagnostics.uident(t)) + Location.mknoloc("_") + } + + Parser.expect(Colon, p) + let modType = parseModuleType(p) + Ast_helper.Md.mk(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, name, modType) + } + + and parseModuleDeclarationOrAlias = (~attrs, p) => { + let startPos = p.Parser.startPos + let moduleName = switch p.Parser.token { + | Uident(ident) => + let loc = mkLoc(p.Parser.startPos, p.endPos) + Parser.next(p) + Location.mkloc(ident, loc) + | t => + Parser.err(p, Diagnostics.uident(t)) + Location.mknoloc("_") + } + + let body = switch p.Parser.token { + | Colon => + Parser.next(p) + parseModuleType(p) + | Equal => + Parser.next(p) + let lident = parseModuleLongIdent(~lowercase=false, p) + Ast_helper.Mty.alias(lident) + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + Recover.defaultModuleType() + } + + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Md.mk(~loc, ~attrs, moduleName, body) + } + + and parseModuleTypeDeclaration = (~attrs, ~startPos, p) => { + Parser.expect(Typ, p) + let moduleName = switch p.Parser.token { + | Uident(ident) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + Location.mkloc(ident, loc) + | Lident(ident) => + let loc = mkLoc(p.startPos, p.endPos) + Parser.next(p) + Location.mkloc(ident, loc) + | t => + Parser.err(p, Diagnostics.uident(t)) + Location.mknoloc("_") + } + + let typ = switch p.Parser.token { + | Equal => + Parser.next(p) + Some(parseModuleType(p)) + | _ => None + } + + let moduleDecl = Ast_helper.Mtd.mk(~attrs, ~typ?, moduleName) + Ast_helper.Sig.modtype(~loc=mkLoc(startPos, p.prevEndPos), moduleDecl) + } + + and parseSignLetDesc = (~attrs, p) => { + let startPos = p.Parser.startPos + Parser.expect(Let, p) + let (name, loc) = parseLident(p) + let name = Location.mkloc(name, loc) + Parser.expect(Colon, p) + let typExpr = parsePolyTypeExpr(p) + let loc = mkLoc(startPos, p.prevEndPos) + Ast_helper.Val.mk(~loc, ~attrs, name, typExpr) + } + + /* attr-id ::= lowercase-ident + ∣ capitalized-ident + ∣ attr-id . attr-id */ + and parseAttributeId = p => { + let startPos = p.Parser.startPos + let rec loop = (p, acc) => + switch p.Parser.token { + | Lident(ident) | Uident(ident) => + Parser.next(p) + let id = acc ++ ident + switch p.Parser.token { + | Dot => + Parser.next(p) + loop(p, id ++ ".") + | _ => id + } + | token when Token.isKeyword(token) => + Parser.next(p) + let id = acc ++ Token.toString(token) + switch p.Parser.token { + | Dot => + Parser.next(p) + loop(p, id ++ ".") + | _ => id + } + | token => + Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs)) + acc + } + + let id = loop(p, "") + let endPos = p.prevEndPos + Location.mkloc(id, mkLoc(startPos, endPos)) + } + + /* + * payload ::= empty + * | ( structure-item ) + * + * TODO: what about multiple structure items? + * @attr({let x = 1; let x = 2}) + * + * Also what about type-expressions and specifications? + * @attr(:myType) ??? + */ + and parsePayload = p => + switch p.Parser.token { + | Lparen when p.startPos.pos_cnum == p.prevEndPos.pos_cnum => + Parser.next(p) + switch p.token { + | Colon => + Parser.next(p) + let typ = parseTypExpr(p) + Parser.expect(Rparen, p) + Parsetree.PTyp(typ) + | _ => + let items = parseDelimitedRegion( + ~grammar=Grammar.Structure, + ~closing=Rparen, + ~f=parseStructureItemRegion, + p, + ) + + Parser.expect(Rparen, p) + Parsetree.PStr(items) + } + | _ => Parsetree.PStr(list{}) + } + + /* type attribute = string loc * payload */ + and parseAttribute = p => + switch p.Parser.token { + | At => + Parser.next(p) + let attrId = parseAttributeId(p) + let payload = parsePayload(p) + Some(attrId, payload) + | _ => None + } + + and parseAttributes = p => parseRegion(p, ~grammar=Grammar.Attribute, ~f=parseAttribute) + + /* + * standalone-attribute ::= + * | @@ atribute-id + * | @@ attribute-id ( structure-item ) + */ + and parseStandaloneAttribute = p => { + Parser.expect(AtAt, p) + let attrId = parseAttributeId(p) + let payload = parsePayload(p) + (attrId, payload) + } + + /* extension ::= % attr-id attr-payload + * | %% attr-id( + * expr ::= ... + * ∣ extension + * + * typexpr ::= ... + * ∣ extension + * + * pattern ::= ... + * ∣ extension + * + * module-expr ::= ... + * ∣ extension + * + * module-type ::= ... + * ∣ extension + * + * class-expr ::= ... + * ∣ extension + * + * class-type ::= ... + * ∣ extension + * + * + * item extension nodes usable in structures and signature + * + * item-extension ::= %% attr-id + * | %% attr-id(structure-item) + * + * attr-payload ::= structure-item + * + * ~moduleLanguage represents whether we're on the module level or not + */ + and parseExtension = (~moduleLanguage=false, p) => { + if moduleLanguage { + Parser.expect(PercentPercent, p) + } else { + Parser.expect(Percent, p) + } + let attrId = parseAttributeId(p) + let payload = parsePayload(p) + (attrId, payload) + } +} + +module OutcomePrinter: { + open Format + open Outcometree + + @live let out_value: ref<(formatter, out_value) => unit> + @live let out_type: ref<(formatter, out_type) => unit> + @live let out_class_type: ref<(formatter, out_class_type) => unit> + @live let out_module_type: ref<(formatter, out_module_type) => unit> + @live let out_sig_item: ref<(formatter, out_sig_item) => unit> + @live let out_signature: ref<(formatter, list) => unit> + @live let out_type_extension: ref<(formatter, out_type_extension) => unit> + @live let out_phrase: ref<(formatter, out_phrase) => unit> + + @live let parenthesized_ident: string => bool +} = { + /* Napkin doesn't have parenthesized identifiers. + * We don't support custom operators. */ + let parenthesized_ident = _name => true + + /* TODO: better allocation strategy for the buffer */ + let escapeStringContents = s => { + let len = String.length(s) + let b = Buffer.create(len) + for i in 0 to len - 1 { + let c = (@doesNotRaise String.get)(s, i) + if c == '\b' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, 'b') + } else if c == '\t' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, 't') + } else if c == '\n' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, 'n') + } else if c == '\r' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, 'r') + } else if c == '"' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, '"') + } else if c == '\\' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, '\\') + } else { + Buffer.add_char(b, c) + } + } + Buffer.contents(b) + } + + /* let rec print_ident fmt ident = match ident with + | Outcometree.Oide_ident s -> Format.pp_print_string fmt s + | Oide_dot (id, s) -> + print_ident fmt id; + Format.pp_print_char fmt '.'; + Format.pp_print_string fmt s + | Oide_apply (id1, id2) -> + print_ident fmt id1; + Format.pp_print_char fmt '('; + print_ident fmt id2; + Format.pp_print_char fmt ')' */ + + let rec printOutIdentDoc = (ident: Outcometree.out_ident) => + switch ident { + | Oide_ident(s) => Doc.text(s) + | Oide_dot(ident, s) => Doc.concat(list{printOutIdentDoc(ident), Doc.dot, Doc.text(s)}) + | Oide_apply(call, arg) => + Doc.concat(list{printOutIdentDoc(call), Doc.lparen, printOutIdentDoc(arg), Doc.rparen}) + } + + let printOutAttributeDoc = (outAttribute: Outcometree.out_attribute) => + Doc.concat(list{Doc.text("@"), Doc.text(outAttribute.oattr_name)}) + + let printOutAttributesDoc = (attrs: list) => + switch attrs { + | list{} => Doc.nil + | attrs => + Doc.concat(list{ + Doc.group(Doc.join(~sep=Doc.line, List.map(printOutAttributeDoc, attrs))), + Doc.line, + }) + } + + let rec collectArrowArgs = (outType: Outcometree.out_type, args) => + switch outType { + | Otyp_arrow(label, argType, returnType) => + let arg = (label, argType) + collectArrowArgs(returnType, list{arg, ...args}) + | _ as returnType => (List.rev(args), returnType) + } + + let rec collectFunctorArgs = (outModuleType: Outcometree.out_module_type, args) => + switch outModuleType { + | Omty_functor(lbl, optModType, returnModType) => + let arg = (lbl, optModType) + collectFunctorArgs(returnModType, list{arg, ...args}) + | _ => (List.rev(args), outModuleType) + } + + let rec printOutTypeDoc = (outType: Outcometree.out_type) => + switch outType { + | Otyp_abstract | Otyp_variant(_) | Otyp_open => Doc.nil + | Otyp_alias(typ, aliasTxt) => + Doc.concat(list{printOutTypeDoc(typ), Doc.text(" as '"), Doc.text(aliasTxt)}) + | Otyp_constr(outIdent, list{}) => printOutIdentDoc(outIdent) + | Otyp_manifest(typ1, typ2) => + Doc.concat(list{printOutTypeDoc(typ1), Doc.text(" = "), printOutTypeDoc(typ2)}) + | Otyp_record(record) => printRecordDeclarationDoc(~inline=true, record) + | Otyp_stuff(txt) => Doc.text(txt) + | Otyp_var(ng, s) => + Doc.concat(list{ + Doc.text( + "'" ++ if ng { + "_" + } else { + "" + }, + ), + Doc.text(s), + }) + | Otyp_object(fields, rest) => printObjectFields(fields, rest) + | Otyp_class(_) => Doc.nil + | Otyp_attribute(typ, attribute) => + Doc.group(Doc.concat(list{printOutAttributeDoc(attribute), Doc.line, printOutTypeDoc(typ)})) + /* example: Red | Blue | Green | CustomColour(float, float, float) */ + | Otyp_sum(constructors) => printOutConstructorsDoc(constructors) + + /* example: {"name": string, "age": int} */ + | Otyp_constr(Oide_dot(Oide_ident("Js"), "t"), list{Otyp_object(fields, rest)}) => + printObjectFields(fields, rest) + + /* example: node */ + | Otyp_constr(outIdent, args) => + let argsDoc = switch args { + | list{} => Doc.nil + | args => + Doc.concat(list{ + Doc.lessThan, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(printOutTypeDoc, args)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.greaterThan, + }) + } + + Doc.group(Doc.concat(list{printOutIdentDoc(outIdent), argsDoc})) + | Otyp_tuple(tupleArgs) => + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(printOutTypeDoc, tupleArgs), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + | Otyp_poly(vars, outType) => + Doc.group( + Doc.concat(list{ + Doc.join(~sep=Doc.space, List.map(var => Doc.text("'" ++ var), vars)), + printOutTypeDoc(outType), + }), + ) + | Otyp_arrow(_) as typ => + let (typArgs, typ) = collectArrowArgs(typ, list{}) + let args = Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(((lbl, typ)) => + if lbl == "" { + printOutTypeDoc(typ) + } else { + Doc.group(Doc.concat(list{Doc.text("~" ++ (lbl ++ ": ")), printOutTypeDoc(typ)})) + } + , typArgs)) + let argsDoc = { + let needsParens = switch typArgs { + | list{(_, Otyp_tuple(_) | Otyp_arrow(_))} => true + /* single argument should not be wrapped */ + | list{("", _)} => false + | _ => true + } + + if needsParens { + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent(Doc.concat(list{Doc.softLine, args})), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + } else { + args + } + } + + Doc.concat(list{argsDoc, Doc.text(" => "), printOutTypeDoc(typ)}) + | Otyp_module(_modName, _stringList, _outTypes) => Doc.nil + } + + and printObjectFields = (fields, rest) => { + let dots = switch rest { + | Some(non_gen) => + Doc.text( + if non_gen { + "_" + } else { + "" + } ++ "..", + ) + | None => Doc.nil + } + + Doc.group( + Doc.concat(list{ + Doc.lbrace, + dots, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map( + ((lbl, outType)) => + Doc.group( + Doc.concat(list{Doc.text("\"" ++ (lbl ++ "\": ")), printOutTypeDoc(outType)}), + ), + fields, + ), + ), + }), + ), + Doc.softLine, + Doc.trailingComma, + Doc.rbrace, + }), + ) + } + + and printOutConstructorsDoc = constructors => + Doc.group( + Doc.indent(Doc.concat(list{Doc.line, Doc.join(~sep=Doc.line, List.mapi((i, constructor) => + Doc.concat(list{ + if i > 0 { + Doc.text("| ") + } else { + Doc.ifBreaks(Doc.text("| "), Doc.nil) + }, + printOutConstructorDoc(constructor), + }) + , constructors))})), + ) + + and printOutConstructorDoc = ((name, args, gadt)) => { + let gadtDoc = switch gadt { + | Some(outType) => Doc.concat(list{Doc.text(": "), printOutTypeDoc(outType)}) + | None => Doc.nil + } + + let argsDoc = switch args { + | list{} => Doc.nil + | list{Otyp_record(record)} => + /* inline records + * | Root({ + * mutable value: 'value, + * mutable updatedTime: float, + * }) + */ + Doc.concat(list{ + Doc.lparen, + Doc.indent(printRecordDeclarationDoc(~inline=true, record)), + Doc.rparen, + }) + | _types => + Doc.indent( + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(printOutTypeDoc, args)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + } + + Doc.group(Doc.concat(list{Doc.text(name), argsDoc, gadtDoc})) + } + + and printRecordDeclRowDoc = ((name, mut, arg)) => + Doc.group( + Doc.concat(list{ + if mut { + Doc.text("mutable ") + } else { + Doc.nil + }, + Doc.text(name), + Doc.text(": "), + printOutTypeDoc(arg), + }), + ) + + and printRecordDeclarationDoc = (~inline, rows) => { + let content = Doc.concat(list{ + Doc.lbrace, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(printRecordDeclRowDoc, rows), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + }) + if !inline { + Doc.group(content) + } else { + content + } + } + + let printOutType = (fmt, outType) => + Format.pp_print_string(fmt, Doc.toString(~width=80, printOutTypeDoc(outType))) + + let printTypeParameterDoc = ((typ, (co, cn))) => + Doc.concat(list{ + if !cn { + Doc.text("+") + } else if !co { + Doc.text("-") + } else { + Doc.nil + }, + if typ == "_" { + Doc.text("_") + } else { + Doc.text("'" ++ typ) + }, + }) + + let rec printOutSigItemDoc = (outSigItem: Outcometree.out_sig_item) => + switch outSigItem { + | Osig_class(_) | Osig_class_type(_) => Doc.nil + | Osig_ellipsis => Doc.dotdotdot + | Osig_value(valueDecl) => + Doc.group( + Doc.concat(list{ + printOutAttributesDoc(valueDecl.oval_attributes), + Doc.text( + switch valueDecl.oval_prims { + | list{} => "let " + | _ => "external " + }, + ), + Doc.text(valueDecl.oval_name), + Doc.text(":"), + Doc.space, + printOutTypeDoc(valueDecl.oval_type), + switch valueDecl.oval_prims { + | list{} => Doc.nil + | primitives => + Doc.indent( + Doc.concat(list{ + Doc.text(" ="), + Doc.line, + Doc.group( + Doc.join( + ~sep=Doc.line, + List.map(prim => Doc.text("\"" ++ (prim ++ "\"")), primitives), + ), + ), + }), + ) + }, + }), + ) + | Osig_typext(outExtensionConstructor, _outExtStatus) => + printOutExtensionConstructorDoc(outExtensionConstructor) + | Osig_modtype(modName, Omty_signature(list{})) => + Doc.concat(list{Doc.text("module type "), Doc.text(modName)}) + | Osig_modtype(modName, outModuleType) => + Doc.group( + Doc.concat(list{ + Doc.text("module type "), + Doc.text(modName), + Doc.text(" = "), + printOutModuleTypeDoc(outModuleType), + }), + ) + | Osig_module(modName, Omty_alias(ident), _) => + Doc.group( + Doc.concat(list{ + Doc.text("module "), + Doc.text(modName), + Doc.text(" ="), + Doc.line, + printOutIdentDoc(ident), + }), + ) + | Osig_module(modName, outModType, outRecStatus) => + Doc.group( + Doc.concat(list{ + Doc.text( + switch outRecStatus { + | Orec_not => "module " + | Orec_first => "module rec " + | Orec_next => "and" + }, + ), + Doc.text(modName), + Doc.text(" = "), + printOutModuleTypeDoc(outModType), + }), + ) + | Osig_type(outTypeDecl, outRecStatus) => + /* TODO: manifest ? */ + let attrs = switch (outTypeDecl.otype_immediate, outTypeDecl.otype_unboxed) { + | (false, false) => Doc.nil + | (true, false) => Doc.concat(list{Doc.text("@immediate"), Doc.line}) + | (false, true) => Doc.concat(list{Doc.text("@unboxed"), Doc.line}) + | (true, true) => Doc.concat(list{Doc.text("@immediate @unboxed"), Doc.line}) + } + + let kw = Doc.text( + switch outRecStatus { + | Orec_not => "type " + | Orec_first => "type rec " + | Orec_next => "and " + }, + ) + let typeParams = switch outTypeDecl.otype_params { + | list{} => Doc.nil + | _params => + Doc.group( + Doc.concat(list{ + Doc.lessThan, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(printTypeParameterDoc, outTypeDecl.otype_params), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.greaterThan, + }), + ) + } + + let privateDoc = switch outTypeDecl.otype_private { + | Asttypes.Private => Doc.text("private ") + | Public => Doc.nil + } + + let kind = switch outTypeDecl.otype_type { + | Otyp_open => Doc.concat(list{Doc.text(" = "), privateDoc, Doc.text("..")}) + | Otyp_abstract => Doc.nil + | Otyp_record(record) => + Doc.concat(list{ + Doc.text(" = "), + privateDoc, + printRecordDeclarationDoc(~inline=false, record), + }) + | typ => Doc.concat(list{Doc.text(" = "), printOutTypeDoc(typ)}) + } + + let constraints = switch outTypeDecl.otype_cstrs { + | list{} => Doc.nil + | _ => + Doc.group( + Doc.concat(list{ + Doc.line, + Doc.indent( + Doc.concat(list{ + Doc.hardLine, + Doc.join( + ~sep=Doc.line, + List.map( + ((typ1, typ2)) => + Doc.group( + Doc.concat(list{ + Doc.text("constraint "), + printOutTypeDoc(typ1), + Doc.text(" ="), + Doc.indent(Doc.concat(list{Doc.line, printOutTypeDoc(typ2)})), + }), + ), + outTypeDecl.otype_cstrs, + ), + ), + }), + ), + }), + ) + } + Doc.group( + Doc.concat(list{ + attrs, + Doc.group( + Doc.concat(list{attrs, kw, Doc.text(outTypeDecl.otype_name), typeParams, kind}), + ), + constraints, + }), + ) + } + + and printOutModuleTypeDoc = (outModType: Outcometree.out_module_type) => + switch outModType { + | Omty_abstract => Doc.nil + | Omty_ident(ident) => printOutIdentDoc(ident) + /* example: module Increment = (M: X_int) => X_int */ + | Omty_functor(_) => + let (args, returnModType) = collectFunctorArgs(outModType, list{}) + let argsDoc = switch args { + | list{(_, None)} => Doc.text("()") + | args => + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(((lbl, optModType)) => + Doc.group( + Doc.concat(list{ + Doc.text(lbl), + switch optModType { + | None => Doc.nil + | Some(modType) => + Doc.concat(list{Doc.text(": "), printOutModuleTypeDoc(modType)}) + }, + }), + ) + , args)), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + } + + Doc.group(Doc.concat(list{argsDoc, Doc.text(" => "), printOutModuleTypeDoc(returnModType)})) + | Omty_signature(list{}) => Doc.nil + | Omty_signature(signature) => + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list{ + Doc.lbrace, + Doc.indent(Doc.concat(list{Doc.line, printOutSignatureDoc(signature)})), + Doc.softLine, + Doc.rbrace, + }), + ) + | Omty_alias(_ident) => Doc.nil + } + + and printOutSignatureDoc = (signature: list) => { + let rec loop = (signature, acc) => + switch signature { + | list{} => List.rev(acc) + | list{Outcometree.Osig_typext(ext, Oext_first), ...items} => + /* Gather together the extension constructors */ + let rec gather_extensions = (acc, items) => + switch items { + | list{Outcometree.Osig_typext(ext, Oext_next), ...items} => + gather_extensions( + list{(ext.oext_name, ext.oext_args, ext.oext_ret_type), ...acc}, + items, + ) + | _ => (List.rev(acc), items) + } + + let (exts, items) = gather_extensions( + list{(ext.oext_name, ext.oext_args, ext.oext_ret_type)}, + items, + ) + + let te = { + Outcometree.otyext_name: ext.oext_type_name, + otyext_params: ext.oext_type_params, + otyext_constructors: exts, + otyext_private: ext.oext_private, + } + + let doc = printOutTypeExtensionDoc(te) + loop(items, list{doc, ...acc}) + | list{item, ...items} => + let doc = printOutSigItemDoc(item) + loop(items, list{doc, ...acc}) + } + + switch loop(signature, list{}) { + | list{doc} => doc + | docs => Doc.breakableGroup(~forceBreak=true, Doc.join(~sep=Doc.line, docs)) + } + } + + and printOutExtensionConstructorDoc = (outExt: Outcometree.out_extension_constructor) => { + let typeParams = switch outExt.oext_type_params { + | list{} => Doc.nil + | params => + Doc.group( + Doc.concat(list{ + Doc.lessThan, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(ty => + Doc.text( + if ty == "_" { + ty + } else { + "'" ++ ty + }, + ) + , params)), + }), + ), + Doc.softLine, + Doc.greaterThan, + }), + ) + } + + Doc.group( + Doc.concat(list{ + Doc.text("type "), + Doc.text(outExt.oext_type_name), + typeParams, + Doc.text(" +="), + Doc.line, + if outExt.oext_private == Asttypes.Private { + Doc.text("private ") + } else { + Doc.nil + }, + printOutConstructorDoc((outExt.oext_name, outExt.oext_args, outExt.oext_ret_type)), + }), + ) + } + + and printOutTypeExtensionDoc = (typeExtension: Outcometree.out_type_extension) => { + let typeParams = switch typeExtension.otyext_params { + | list{} => Doc.nil + | params => + Doc.group( + Doc.concat(list{ + Doc.lessThan, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(ty => + Doc.text( + if ty == "_" { + ty + } else { + "'" ++ ty + }, + ) + , params)), + }), + ), + Doc.softLine, + Doc.greaterThan, + }), + ) + } + + Doc.group( + Doc.concat(list{ + Doc.text("type "), + Doc.text(typeExtension.otyext_name), + typeParams, + Doc.text(" +="), + if typeExtension.otyext_private == Asttypes.Private { + Doc.text("private ") + } else { + Doc.nil + }, + printOutConstructorsDoc(typeExtension.otyext_constructors), + }), + ) + } + + let printOutSigItem = (fmt, outSigItem) => + Format.pp_print_string(fmt, Doc.toString(~width=80, printOutSigItemDoc(outSigItem))) + + let printOutSignature = (fmt, signature) => + Format.pp_print_string(fmt, Doc.toString(~width=80, printOutSignatureDoc(signature))) + + let validFloatLexeme = s => { + let l = String.length(s) + let rec loop = i => + if i >= l { + s ++ "." + } else { + switch @doesNotRaise + String.get(s, i) { + | '0' .. '9' | '-' => loop(i + 1) + | _ => s + } + } + loop(0) + } + + let floatRepres = f => + switch classify_float(f) { + | FP_nan => "nan" + | FP_infinite => + if f < 0.0 { + "neg_infinity" + } else { + "infinity" + } + | _ => + let float_val = { + let s1 = Printf.sprintf("%.12g", f) + if f == (@doesNotRaise float_of_string)(s1) { + s1 + } else { + let s2 = Printf.sprintf("%.15g", f) + if f == (@doesNotRaise float_of_string)(s2) { + s2 + } else { + Printf.sprintf("%.18g", f) + } + } + } + validFloatLexeme(float_val) + } + + let rec printOutValueDoc = (outValue: Outcometree.out_value) => + switch outValue { + | Oval_array(outValues) => + Doc.group( + Doc.concat(list{ + Doc.lbracket, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(printOutValueDoc, outValues), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbracket, + }), + ) + | Oval_char(c) => Doc.text("'" ++ (Char.escaped(c) ++ "'")) + | Oval_constr(outIdent, outValues) => + Doc.group( + Doc.concat(list{ + printOutIdentDoc(outIdent), + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(printOutValueDoc, outValues), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + | Oval_ellipsis => Doc.text("...") + | Oval_int(i) => Doc.text(Format.sprintf("%i", i)) + | Oval_int32(i) => Doc.text(Format.sprintf("%lil", i)) + | Oval_int64(i) => Doc.text(Format.sprintf("%LiL", i)) + | Oval_nativeint(i) => Doc.text(Format.sprintf("%nin", i)) + | Oval_float(f) => Doc.text(floatRepres(f)) + | Oval_list(outValues) => + Doc.group( + Doc.concat(list{ + Doc.text("list["), + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(printOutValueDoc, outValues), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbracket, + }), + ) + | Oval_printer(fn) => + let fmt = Format.str_formatter + fn(fmt) + let str = Format.flush_str_formatter() + Doc.text(str) + | Oval_record(rows) => + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map( + ((outIdent, outValue)) => + Doc.group( + Doc.concat(list{ + printOutIdentDoc(outIdent), + Doc.text(": "), + printOutValueDoc(outValue), + }), + ), + rows, + ), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + | Oval_string(txt, _sizeToPrint, _kind) => Doc.text(escapeStringContents(txt)) + | Oval_stuff(txt) => Doc.text(txt) + | Oval_tuple(outValues) => + Doc.group( + Doc.concat(list{ + Doc.lparen, + Doc.indent( + Doc.concat(list{ + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list{Doc.comma, Doc.line}), + List.map(printOutValueDoc, outValues), + ), + }), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + }), + ) + /* Not supported by NapkinScript */ + | Oval_variant(_) => Doc.nil + } + + let printOutExceptionDoc = (exc, outValue) => + switch exc { + | Sys.Break => Doc.text("Interrupted.") + | Out_of_memory => Doc.text("Out of memory during evaluation.") + | Stack_overflow => Doc.text("Stack overflow during evaluation (looping recursion?).") + | _ => + Doc.group( + Doc.indent(Doc.concat(list{Doc.text("Exception:"), Doc.line, printOutValueDoc(outValue)})), + ) + } + + let printOutPhraseSignature = signature => { + let rec loop = (signature, acc) => + switch signature { + | list{} => List.rev(acc) + | list{(Outcometree.Osig_typext(ext, Oext_first), None), ...signature} => + /* Gather together extension constructors */ + let rec gather_extensions = (acc, items) => + switch items { + | list{(Outcometree.Osig_typext(ext, Oext_next), None), ...items} => + gather_extensions( + list{(ext.oext_name, ext.oext_args, ext.oext_ret_type), ...acc}, + items, + ) + | _ => (List.rev(acc), items) + } + + let (exts, signature) = gather_extensions( + list{(ext.oext_name, ext.oext_args, ext.oext_ret_type)}, + signature, + ) + + let te = { + Outcometree.otyext_name: ext.oext_type_name, + otyext_params: ext.oext_type_params, + otyext_constructors: exts, + otyext_private: ext.oext_private, + } + + let doc = printOutTypeExtensionDoc(te) + loop(signature, list{doc, ...acc}) + | list{(sigItem, optOutValue), ...signature} => + let doc = switch optOutValue { + | None => printOutSigItemDoc(sigItem) + | Some(outValue) => + Doc.group( + Doc.concat(list{ + printOutSigItemDoc(sigItem), + Doc.text(" = "), + printOutValueDoc(outValue), + }), + ) + } + + loop(signature, list{doc, ...acc}) + } + + Doc.breakableGroup(~forceBreak=true, Doc.join(~sep=Doc.line, loop(signature, list{}))) + } + + let printOutPhraseDoc = (outPhrase: Outcometree.out_phrase) => + switch outPhrase { + | Ophr_eval(outValue, outType) => + Doc.group( + Doc.concat(list{ + Doc.text("- : "), + printOutTypeDoc(outType), + Doc.text(" ="), + Doc.indent(Doc.concat(list{Doc.line, printOutValueDoc(outValue)})), + }), + ) + | Ophr_signature(list{}) => Doc.nil + | Ophr_signature(signature) => printOutPhraseSignature(signature) + | Ophr_exception(exc, outValue) => printOutExceptionDoc(exc, outValue) + } + + let printOutPhase = (fmt, outPhrase) => + Format.pp_print_string(fmt, Doc.toString(~width=80, printOutPhraseDoc(outPhrase))) + + let printOutModuleType = (fmt, outModuleType) => + Format.pp_print_string(fmt, Doc.toString(~width=80, printOutModuleTypeDoc(outModuleType))) + + let printOutTypeExtension = (fmt, typeExtension) => + Format.pp_print_string(fmt, Doc.toString(~width=80, printOutTypeExtensionDoc(typeExtension))) + + let printOutValue = (fmt, outValue) => + Format.pp_print_string(fmt, Doc.toString(~width=80, printOutValueDoc(outValue))) + + /* Not supported in Napkin */ + let printOutClassType = (_fmt, _) => () + + let out_value = ref(printOutValue) + let out_type = ref(printOutType) + let out_module_type = ref(printOutModuleType) + let out_sig_item = ref(printOutSigItem) + let out_signature = ref(printOutSignature) + let out_type_extension = ref(printOutTypeExtension) + let out_phrase = @live ref(printOutPhase) + let out_class_type = ref(printOutClassType) +} + +module Repl = { + let parseToplevelPhrase = filename => { + let src = IO.readFile(filename) + let p = Parser.make(src, filename) + Parsetree.Ptop_def(NapkinScript.parseImplementation(p)) + } + + let typeAndPrintOutcome = filename => { + Compmisc.init_path(false) + let env = Compmisc.initial_env() + try { + let sstr = switch parseToplevelPhrase(filename) { + | Parsetree.Ptop_def(sstr) => sstr + | _ => assert false + } + + let (_str, signature, _newenv) = Typemod.type_toplevel_phrase(env, sstr) + let outSigItems = Printtyp.tree_of_signature(signature) + let fmt = Format.str_formatter + OutcomePrinter.out_signature.contents(fmt, outSigItems) + let result = Format.flush_str_formatter() + print_string(result) + } catch { + | Typetexp.Error(_, _, err) => + let fmt = Format.str_formatter + Typetexp.report_error(env, fmt, err) + let result = Format.flush_str_formatter() + let () = print_endline(result) + | _ => print_endline("catch all") + } + } +} + +/* command line flags */ +module Clflags: { + let recover: ref + let print: ref + let width: ref + let origin: ref + let files: ref> + let interface: ref + let report: ref + + let parse: unit => unit + let outcome: ref +} = { + let recover = ref(false) + let width = ref(100) + + let files = ref(list{}) + let addFilename = filename => files := list{filename, ...files.contents} + + let print = ref("") + let outcome = ref(false) + let origin = ref("") + let interface = ref(false) + let report = ref("pretty") + + let usage = "Usage: napkinscript \nOptions are:" + + let spec = list{ + ("-recover", Arg.Unit(() => recover := true), "Emit partial ast"), + ("-print", Arg.String(txt => print := txt), "Print either binary, ocaml or ast"), + ("-parse", Arg.String(txt => origin := txt), "Parse ocaml or napkinscript"), + ("-outcome", Arg.Bool(printOutcomeTree => outcome := printOutcomeTree), "print outcometree"), + ("-width", Arg.Int(w => width := w), "Specify the line length that the printer will wrap on"), + ("-interface", Arg.Unit(() => interface := true), "Parse as interface"), + ( + "-report", + Arg.String(txt => report := txt), + "Stylize errors and messages using color and context. Accepts `Pretty` and `Plain`. Default `Plain`", + ), + } + + let parse = () => Arg.parse(spec, addFilename, usage) +} + +module Driver: { + let processFile: ( + ~isInterface: bool, + ~width: int, + ~recover: bool, + ~origin: string, + ~target: string, + ~report: string, + string, + ) => unit +} = { + type rec file_kind<'a> = + | Structure: file_kind + | Signature: file_kind + + let parseNapkin = (type a, kind: file_kind, p): a => + switch kind { + | Structure => NapkinScript.parseImplementation(p) + | Signature => NapkinScript.parseSpecification(p) + } + + let extractOcamlStringData = filename => { + let lexbuf = if String.length(filename) > 0 { + IO.readFile(filename) |> Lexing.from_string + } else { + Lexing.from_channel(stdin) + } + + let stringLocs = ref(list{}) + let rec next = () => { + let token = Lexer.token_with_comments(lexbuf) + switch token { + | OcamlParser.STRING(_txt, None) => + open Location + let loc = { + loc_start: lexbuf.lex_start_p, + loc_end: lexbuf.Lexing.lex_curr_p, + loc_ghost: false, + } + let len = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum + let txt = Bytes.to_string( + (@doesNotRaise Bytes.sub)(lexbuf.Lexing.lex_buffer, loc.loc_start.pos_cnum, len), + ) + stringLocs := list{(txt, loc), ...stringLocs.contents} + next() + | OcamlParser.EOF => () + | _ => next() + } + } + + next() + List.rev(stringLocs.contents) + } + + let parseOcaml = (type a, kind: file_kind, filename): a => { + let lexbuf = if String.length(filename) > 0 { + IO.readFile(filename) |> Lexing.from_string + } else { + Lexing.from_channel(stdin) + } + + let stringData = extractOcamlStringData(filename) + switch kind { + | Structure => + Parse.implementation(lexbuf) + |> ParsetreeCompatibility.replaceStringLiteralStructure(stringData) + |> ParsetreeCompatibility.structure + | Signature => + Parse.interface(lexbuf) + |> ParsetreeCompatibility.replaceStringLiteralSignature(stringData) + |> ParsetreeCompatibility.signature + } + } + + let parseNapkinFile = (~destination, kind, filename) => { + let src = if String.length(filename) > 0 { + IO.readFile(filename) + } else { + IO.readStdin() + } + + let p = { + let mode = switch destination { + | "napkinscript" | "ns" | "sexp" => Parser.Default + | _ => Parser.ParseForTypeChecker + } + + Parser.make(~mode, src, filename) + } + let ast = parseNapkin(kind, p) + let report = switch p.diagnostics { + | list{} => None + | diagnostics => Some(diagnostics) + } + + (ast, report, p) + } + + let parseOcamlFile = (kind, filename) => { + let ast = parseOcaml(kind, filename) + let lexbuf2 = if String.length(filename) > 0 { + IO.readFile(filename) |> Lexing.from_string + } else { + Lexing.from_channel(stdin) + } + + let comments = { + let rec next = (prevTokEndPos: Lexing.position, comments, lb) => { + let token = Lexer.token_with_comments(lb) + switch token { + | OcamlParser.EOF => comments + | OcamlParser.COMMENT(txt, loc) => + let comment = Comment.fromOcamlComment(~loc, ~prevTokEndPos, ~txt) + + next(loc.Location.loc_end, list{comment, ...comments}, lb) + | _ => next(lb.Lexing.lex_curr_p, comments, lb) + } + } + + let cmts = next(lexbuf2.Lexing.lex_start_p, list{}, lexbuf2) + cmts + } + + let p = Parser.make("", filename) + p.comments = comments + (ast, None, p) + } + + let reasonFilename = ref("") + let commentData = ref(list{}) + let stringData = ref(list{}) + + let parseReasonBinaryFromStdin = (type a, kind: file_kind, filename): a => { + let (chan, close) = + String.length(filename) === 0 + ? (stdin, _ => ()) + : { + let file_chan = open_in_bin(filename) + seek_in(file_chan, 0) + (file_chan, close_in_noerr) + } + + let ic = chan + let magic = switch kind { + | Structure => Config.ast_impl_magic_number + | Signature => Config.ast_intf_magic_number + } + + let buffer = (@doesNotRaise really_input_string)(ic, String.length(magic)) + assert (buffer == magic) + let filename = input_value(ic) + reasonFilename := filename + let ast = input_value(ic) + close(chan) + + let src = if String.length(filename) > 0 { + IO.readFile(filename) + } else { + IO.readStdin() + } + + let scanner = Scanner.make(Bytes.of_string(src), filename) + + let rec next = (prevEndPos, scanner) => { + let (startPos, endPos, token) = Scanner.scan(scanner) + switch token { + | Eof => () + | Comment(c) => + Comment.setPrevTokEndPos(c, prevEndPos) + commentData := list{c, ...commentData.contents} + next(endPos, scanner) + | String(_) => + let loc = {Location.loc_start: startPos, loc_end: endPos, loc_ghost: false} + let len = endPos.pos_cnum - startPos.pos_cnum + let txt = (@doesNotRaise String.sub)(src, startPos.pos_cnum, len) + stringData := list{(txt, loc), ...stringData.contents} + next(endPos, scanner) + | _ => next(endPos, scanner) + } + } + + next(Lexing.dummy_pos, scanner) + + switch kind { + | Structure => + ast + |> ParsetreeCompatibility.replaceStringLiteralStructure(stringData.contents) + |> ParsetreeCompatibility.normalizeReasonArityStructure(~forPrinter=true) + |> ParsetreeCompatibility.structure + | Signature => + ast + |> ParsetreeCompatibility.replaceStringLiteralSignature(stringData.contents) + |> ParsetreeCompatibility.normalizeReasonAritySignature(~forPrinter=true) + |> ParsetreeCompatibility.signature + } + } + + let isReasonDocComment = (comment: Comment.t) => { + let content = Comment.txt(comment) + let len = String.length(content) + if len == 0 { + true + } else if ( + len >= 2 && (String.unsafe_get(content, 0) == '*' && String.unsafe_get(content, 1) == '*') + ) { + false + } else if len >= 1 && String.unsafe_get(content, 0) == '*' { + true + } else { + false + } + } + + let parseReasonBinary = (kind, filename) => { + let ast = parseReasonBinaryFromStdin(kind, filename) + let p = Parser.make("", reasonFilename.contents) + p.comments = List.filter(c => !isReasonDocComment(c), commentData.contents) + (ast, None, p) + } + + let parseImplementation = (~origin, ~destination, filename) => + switch origin { + | "ml" | "ocaml" => parseOcamlFile(Structure, filename) + | "reasonBinary" => parseReasonBinary(Structure, filename) + | _ => parseNapkinFile(~destination, Structure, filename) + } + + let parseInterface = (~destination, ~origin, filename) => + switch origin { + | "ml" | "ocaml" => parseOcamlFile(Signature, filename) + | "reasonBinary" => parseReasonBinary(Signature, filename) + | _ => parseNapkinFile(~destination, Signature, filename) + } + + let process = (~reportStyle, parseFn, printFn, recover, filename) => { + let (ast, report, parserState) = parseFn(filename) + switch report { + | Some(report) when recover == true => + printFn(ast, parserState) + prerr_string( + Diagnostics.stringOfReport( + ~style=Diagnostics.parseReportStyle(reportStyle), + report, + Bytes.to_string(parserState.Parser.scanner.src), + ), + ) + | Some(report) => + prerr_string( + Diagnostics.stringOfReport( + ~style=Diagnostics.parseReportStyle(reportStyle), + report, + Bytes.to_string(parserState.Parser.scanner.src), + ), + ) + exit(1) + | None => printFn(ast, parserState) + } + } + + type action = + | ProcessImplementation + | ProcessInterface + + let printImplementation = (~target, ~width, filename, ast, _parserState) => + switch target { + | "ml" | "ocaml" => Pprintast.structure(Format.std_formatter, ast) + | "ns" | "napkinscript" => + Printer.printImplementation(~width, ast, List.rev(_parserState.Parser.comments)) + | "ast" => Printast.implementation(Format.std_formatter, ast) + | "sexp" => ast |> SexpAst.implementation |> Sexp.toString |> print_string + | _ => + /* default binary */ + output_string(stdout, Config.ast_impl_magic_number) + output_value(stdout, filename) + output_value(stdout, ast) + } + + let printInterface = (~target, ~width, filename, ast, _parserState) => + switch target { + | "ml" | "ocaml" => Pprintast.signature(Format.std_formatter, ast) + | "ns" | "napkinscript" => + Printer.printInterface(~width, ast, List.rev(_parserState.Parser.comments)) + | "ast" => Printast.interface(Format.std_formatter, ast) + | "sexp" => ast |> SexpAst.interface |> Sexp.toString |> print_string + | _ => + /* default binary */ + output_string(stdout, Config.ast_intf_magic_number) + output_value(stdout, filename) + output_value(stdout, ast) + } + + let processFile = (~isInterface, ~width, ~recover, ~origin, ~target, ~report, filename) => + try { + let len = String.length(filename) + let action = if ( + isInterface || (len > 0 && (@doesNotRaise String.get)(filename, len - 1) == 'i') + ) { + ProcessInterface + } else { + ProcessImplementation + } + + switch action { + | ProcessImplementation => + process( + ~reportStyle=report, + parseImplementation(~origin, ~destination=target), + printImplementation(~target, ~width, filename), + recover, + filename, + ) + | ProcessInterface => + process( + ~reportStyle=report, + parseInterface(~origin, ~destination=target), + printInterface(~target, ~width, filename), + recover, + filename, + ) + } + } catch { + | Failure(txt) => + prerr_string(txt) + prerr_newline() + exit(1) + | _ => exit(1) + } +} + +let () = { + Clflags.parse() + if Clflags.outcome.contents { + Repl.typeAndPrintOutcome(List.hd(Clflags.files.contents)) + } else { + let () = switch Clflags.files.contents { + | list{_file, ..._} as files => + List.iter( + filename => + Driver.processFile( + ~isInterface=Clflags.interface.contents, + ~width=Clflags.width.contents, + ~recover=Clflags.recover.contents, + ~target=Clflags.print.contents, + ~origin=Clflags.origin.contents, + ~report=Clflags.report.contents, + filename, + ), + files, + ) + | list{} => + Driver.processFile( + ~isInterface=Clflags.interface.contents, + ~width=Clflags.width.contents, + ~recover=Clflags.recover.contents, + ~target=Clflags.print.contents, + ~origin=Clflags.origin.contents, + ~report=Clflags.report.contents, + "", + ) + } + + exit(0) + } +} diff --git a/res_syntax/benchmarks/data/PrinterNapkin.ml b/res_syntax/benchmarks/data/PrinterNapkin.ml new file mode 100644 index 0000000000..852d93baad --- /dev/null +++ b/res_syntax/benchmarks/data/PrinterNapkin.ml @@ -0,0 +1,3501 @@ +module Printer = { + type rec printer = { + src: bytes, + comments: CommentAst.t, + } + + let rec collectPatternsFromListConstruct = (acc, pattern) => + { + open Parsetree + switch pattern.ppat_desc { + | Ppat_construct( + {txt: Longident.Lident("::")}, + Some({ppat_desc: Ppat_tuple(list(pat, rest))}), + ) => + collectPatternsFromListConstruct(list(pat, ...acc), rest) + | _ => /List.rev(acc), pattern/ + } + } + + let addParens = doc => + Doc.group( + Doc.concat(list( + Doc.lparen, + Doc.indent(Doc.concat(list(Doc.softLine, doc))), + Doc.softLine, + Doc.rparen, + )), + ) + + let addBraces = doc => + Doc.group(Doc.concat(list(Doc.lbrace, doc, Doc.rbrace))) + + let interleaveWhitespace = ( + ~forceBreak=false, + rows: list, + ) => { + let rec loop = (prevLoc, acc, rows) => + switch rows { + | list() => Doc.concat(List.rev(acc)) + | list(/loc, doc/, ...rest) => + if ( + loc.Location.loc_start.pos_lnum - + prevLoc.Location.loc_end.pos_lnum > 1 + ) { + loop(loc, list(doc, Doc.line, Doc.line, ...acc), rest) + } else { + loop(loc, list(doc, Doc.line, ...acc), rest) + } + } + + switch rows { + | list() => Doc.nil + | list(/firstLoc, firstDoc/, ...rest) => + let forceBreak = + forceBreak || + switch List.rev(rest) { + | list(/lastLoc, _/, ..._) => + firstLoc.loc_start.pos_lnum !== lastLoc.loc_end.pos_lnum + | _ => false + } + + Doc.breakableGroup(~forceBreak, loop(firstLoc, list(firstDoc), rest)) + } + } + + let printLongident = l => + switch l { + | Longident.Lident(lident) => Doc.text(lident) + | Longident.Ldot(lident, txt) as l => + let txts = Longident.flatten(l) + Doc.join(~sep=Doc.dot, List.map(Doc.text, txts)) + | _ => failwith("unsupported ident") + } + + let escapeStringContents = s => { + let len = String.length(s) + let b = Buffer.create(len) + for i in 0 to len - 1 { + let c = String.get(s, i) + if c == '\b' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, 'b') + } else if c == '\t' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, 't') + } else if c == '\n' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, 'n') + } else if c == '\r' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, 'r') + } else if c == '"' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, '"') + } else if c == '\\' { + Buffer.add_char(b, '\\') + Buffer.add_char(b, '\\') + } else { + Buffer.add_char(b, c) + } + } + Buffer.contents(b) + } + + let printConstant = c => + switch c { + | Parsetree.Pconst_integer(s, _) => Doc.text(s) + | Pconst_string(s, _) => Doc.text("\"" ++ escapeStringContents(s) ++ "\"") + | Pconst_float(s, _) => Doc.text(s) + | Pconst_char(c) => Doc.text("'" ++ Char.escaped(c) ++ "'") + } + + let rec printStructure = (s: Parsetree.structure) => + interleaveWhitespace( + List.map(si => /si.Parsetree.pstr_loc, printStructureItem(si)/, s), + ) + + and printStructureItem = (si: Parsetree.structure_item) => + switch si.pstr_desc { + | Pstr_value(rec_flag, valueBindings) => + let recFlag = switch rec_flag { + | Asttypes.Nonrecursive => Doc.nil + | Asttypes.Recursive => Doc.text("rec ") + } + + printValueBindings(~recFlag, valueBindings) + | Pstr_type(recFlag, typeDeclarations) => + let recFlag = switch recFlag { + | Asttypes.Nonrecursive => Doc.nil + | Asttypes.Recursive => Doc.text("rec ") + } + + printTypeDeclarations(~recFlag, typeDeclarations) + | Pstr_primitive(valueDescription) => + printValueDescription(valueDescription) + | Pstr_eval(expr, attrs) => + let needsParens = switch expr { + | { + pexp_attributes: list(/{txt: "ns.ternary"}, _/), + pexp_desc: Pexp_ifthenelse(_), + } => + false + | _ when ParsetreeViewer.hasAttributes(expr.pexp_attributes) => true + | _ => false + } + + let exprDoc = { + let doc = printExpression(expr) + if needsParens { + addParens(doc) + } else { + doc + } + } + + Doc.concat(list(printAttributes(attrs), exprDoc)) + | Pstr_attribute(attr) => + Doc.concat(list(Doc.text("@"), printAttribute(attr))) + | Pstr_extension(extension, attrs) => + Doc.concat(list( + printAttributes(attrs), + Doc.concat(list(Doc.text("%"), printExtension(extension))), + )) + | Pstr_include(includeDeclaration) => + printIncludeDeclaration(includeDeclaration) + | Pstr_open(openDescription) => printOpenDescription(openDescription) + | Pstr_modtype(modTypeDecl) => printModuleTypeDeclaration(modTypeDecl) + | Pstr_module(moduleBinding) => + printModuleBinding(~isRec=false, 0, moduleBinding) + | Pstr_recmodule(moduleBindings) => + Doc.join( + ~sep=Doc.line, + List.mapi( + (i, mb) => printModuleBinding(~isRec=true, i, mb), + moduleBindings, + ), + ) + | Pstr_exception(extensionConstructor) => + printExceptionDef(extensionConstructor) + | Pstr_typext(typeExtension) => printTypeExtension(typeExtension) + | Pstr_class(_) | Pstr_class_type(_) => Doc.nil + } + + and printTypeExtension = (te: Parsetree.type_extension) => { + let prefix = Doc.text("type ") + let name = printLongident(te.ptyext_path.txt) + let typeParams = switch te.ptyext_params { + | list() => Doc.nil + | typeParams => + Doc.group( + Doc.concat(list( + Doc.lessThan, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printTypeParam, typeParams), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.greaterThan, + )), + ) + } + + let extensionConstructors = { + let ecs = te.ptyext_constructors + let forceBreak = switch /ecs, List.rev(ecs)/ { + | /list(first, ..._), list(last, ..._)/ => + first.pext_loc.loc_start.pos_lnum > + te.ptyext_path.loc.loc_end.pos_lnum || + first.pext_loc.loc_start.pos_lnum < last.pext_loc.loc_end.pos_lnum + | _ => false + } + + let privateFlag = switch te.ptyext_private { + | Asttypes.Private => Doc.concat(list(Doc.text("private"), Doc.line)) + | Public => Doc.nil + } + + Doc.breakableGroup( + ~forceBreak, + Doc.indent( + Doc.concat(list( + Doc.line, + privateFlag, + Doc.join(~sep=Doc.line, List.mapi(printExtensionConstructor, ecs)), + )), + ), + ) + } + + Doc.group( + Doc.concat(list( + printAttributes(~loc=te.ptyext_path.loc, te.ptyext_attributes), + prefix, + name, + typeParams, + Doc.text(" +="), + extensionConstructors, + )), + ) + } + + and printModuleBinding = (~isRec, i, moduleBinding) => { + let prefix = if i == 0 { + Doc.concat(list( + Doc.text("module "), + if isRec { + Doc.text("rec ") + } else { + Doc.nil + }, + )) + } else { + Doc.text("and ") + } + + let /modExprDoc, modConstraintDoc/ = switch moduleBinding.pmb_expr { + | {pmod_desc: Pmod_constraint(modExpr, modType)} => + / + printModExpr(modExpr), + Doc.concat(list(Doc.text(": "), printModType(modType))), + / + | modExpr => /printModExpr(modExpr), Doc.nil/ + } + + Doc.concat(list( + printAttributes( + ~loc=moduleBinding.pmb_name.loc, + moduleBinding.pmb_attributes, + ), + prefix, + Doc.text(moduleBinding.pmb_name.Location.txt), + modConstraintDoc, + Doc.text(" = "), + modExprDoc, + )) + } + + and printModuleTypeDeclaration = ( + modTypeDecl: Parsetree.module_type_declaration, + ) => + Doc.concat(list( + printAttributes(modTypeDecl.pmtd_attributes), + Doc.text("module type "), + Doc.text(modTypeDecl.pmtd_name.txt), + switch modTypeDecl.pmtd_type { + | None => Doc.nil + | Some(modType) => + Doc.concat(list(Doc.text(" = "), printModType(modType))) + }, + )) + + and printModType = modType => { + let modTypeDoc = switch modType.pmty_desc { + | Parsetree.Pmty_ident({txt: longident, loc}) => + Doc.concat(list( + printAttributes(~loc, modType.pmty_attributes), + printLongident(longident), + )) + | Pmty_signature(signature) => + let signatureDoc = Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list( + Doc.lbrace, + Doc.indent(Doc.concat(list(Doc.line, printSignature(signature)))), + Doc.line, + Doc.rbrace, + )), + ) + Doc.concat(list(printAttributes(modType.pmty_attributes), signatureDoc)) + | Pmty_functor(_) => + let /parameters, returnType/ = ParsetreeViewer.functorType(modType) + let parametersDoc = switch parameters { + | list() => Doc.nil + | list(/attrs, {Location.txt: "_"}, Some(modType)/) => + let attrs = switch attrs { + | list() => Doc.nil + | attrs => + Doc.concat(list( + Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), + Doc.line, + )) + } + Doc.concat(list(attrs, printModType(modType))) + | params => + Doc.group( + Doc.concat(list( + Doc.lparen, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map( + (/attrs, lbl, modType/) => { + let attrs = switch attrs { + | list() => Doc.nil + | attrs => + Doc.concat(list( + Doc.join( + ~sep=Doc.line, + List.map(printAttribute, attrs), + ), + Doc.line, + )) + } + Doc.concat(list( + attrs, + if lbl.Location.txt == "_" { + Doc.nil + } else { + Doc.text(lbl.txt) + }, + switch modType { + | None => Doc.nil + | Some(modType) => + Doc.concat(list( + if lbl.txt == "_" { + Doc.nil + } else { + Doc.text(": ") + }, + printModType(modType), + )) + }, + )) + }, + params, + ), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + )), + ) + } + + let returnDoc = { + let doc = printModType(returnType) + if Parens.modTypeFunctorReturn(returnType) { + addParens(doc) + } else { + doc + } + } + + Doc.group( + Doc.concat(list( + parametersDoc, + Doc.group(Doc.concat(list(Doc.text(" =>"), Doc.line, returnDoc))), + )), + ) + | Pmty_typeof(modExpr) => + Doc.concat(list(Doc.text("module type of "), printModExpr(modExpr))) + | Pmty_extension(extension) => printExtension(extension) + | Pmty_alias({txt: longident}) => + Doc.concat(list(Doc.text("module "), printLongident(longident))) + | Pmty_with(modType, withConstraints) => + let operand = { + let doc = printModType(modType) + if Parens.modTypeWithOperand(modType) { + addParens(doc) + } else { + doc + } + } + + Doc.group( + Doc.concat(list( + operand, + Doc.indent( + Doc.concat(list(Doc.line, printWithConstraints(withConstraints))), + ), + )), + ) + } + + let attrsAlreadyPrinted = switch modType.pmty_desc { + | (Pmty_functor(_) | Pmty_signature(_)) | Pmty_ident(_) => true + | _ => false + } + Doc.concat(list( + if attrsAlreadyPrinted { + Doc.nil + } else { + printAttributes(modType.pmty_attributes) + }, + modTypeDoc, + )) + } + + and printWithConstraints = withConstraints => { + let rows = List.mapi( + (i, withConstraint) => + Doc.group( + Doc.concat(list( + if i === 0 { + Doc.text("with ") + } else { + Doc.text("and ") + }, + printWithConstraint(withConstraint), + )), + ), + withConstraints, + ) + + Doc.join(~sep=Doc.line, rows) + } + + and printWithConstraint = (withConstraint: Parsetree.with_constraint) => + switch withConstraint { + | Pwith_type({txt: longident}, typeDeclaration) => + Doc.group( + printTypeDeclaration( + ~name=printLongident(longident), + ~equalSign="=", + ~recFlag=Doc.nil, + 0, + typeDeclaration, + ), + ) + | Pwith_module({txt: longident1}, {txt: longident2}) => + Doc.concat(list( + Doc.text("module "), + printLongident(longident1), + Doc.text(" ="), + Doc.indent(Doc.concat(list(Doc.line, printLongident(longident2)))), + )) + | Pwith_typesubst({txt: longident}, typeDeclaration) => + Doc.group( + printTypeDeclaration( + ~name=printLongident(longident), + ~equalSign=":=", + ~recFlag=Doc.nil, + 0, + typeDeclaration, + ), + ) + | Pwith_modsubst({txt: longident1}, {txt: longident2}) => + Doc.concat(list( + Doc.text("module "), + printLongident(longident1), + Doc.text(" :="), + Doc.indent(Doc.concat(list(Doc.line, printLongident(longident2)))), + )) + } + + and printSignature = signature => + interleaveWhitespace( + List.map( + si => /si.Parsetree.psig_loc, printSignatureItem(si)/, + signature, + ), + ) + + and printSignatureItem = (si: Parsetree.signature_item) => + switch si.psig_desc { + | Parsetree.Psig_value(valueDescription) => + printValueDescription(valueDescription) + | Psig_type(recFlag, typeDeclarations) => + let recFlag = switch recFlag { + | Asttypes.Nonrecursive => Doc.nil + | Asttypes.Recursive => Doc.text("rec ") + } + + printTypeDeclarations(~recFlag, typeDeclarations) + | Psig_typext(typeExtension) => printTypeExtension(typeExtension) + | Psig_exception(extensionConstructor) => + printExceptionDef(extensionConstructor) + | Psig_module(moduleDeclaration) => + printModuleDeclaration(moduleDeclaration) + | Psig_recmodule(moduleDeclarations) => + printRecModuleDeclarations(moduleDeclarations) + | Psig_modtype(modTypeDecl) => printModuleTypeDeclaration(modTypeDecl) + | Psig_open(openDescription) => printOpenDescription(openDescription) + | Psig_include(includeDescription) => + printIncludeDescription(includeDescription) + | Psig_attribute(attr) => + Doc.concat(list(Doc.text("@"), printAttribute(attr))) + | Psig_extension(extension, attrs) => + Doc.concat(list( + printAttributes(attrs), + Doc.concat(list(Doc.text("%"), printExtension(extension))), + )) + | Psig_class(_) | Psig_class_type(_) => Doc.nil + } + + and printRecModuleDeclarations = moduleDeclarations => + Doc.group( + Doc.join( + ~sep=Doc.line, + List.mapi( + (i, md: Parsetree.module_declaration) => { + let body = switch md.pmd_type.pmty_desc { + | Parsetree.Pmty_alias({txt: longident}) => + Doc.concat(list(Doc.text(" = "), printLongident(longident))) + | _ => + let needsParens = switch md.pmd_type.pmty_desc { + | Pmty_with(_) => true + | _ => false + } + + let modTypeDoc = { + let doc = printModType(md.pmd_type) + if needsParens { + addParens(doc) + } else { + doc + } + } + + Doc.concat(list(Doc.text(": "), modTypeDoc)) + } + + let prefix = if i < 1 { + "module rec " + } else { + "and " + } + Doc.concat(list( + printAttributes(~loc=md.pmd_name.loc, md.pmd_attributes), + Doc.text(prefix), + Doc.text(md.pmd_name.txt), + body, + )) + }, + moduleDeclarations, + ), + ), + ) + + and printModuleDeclaration = (md: Parsetree.module_declaration) => { + let body = switch md.pmd_type.pmty_desc { + | Parsetree.Pmty_alias({txt: longident}) => + Doc.concat(list(Doc.text(" = "), printLongident(longident))) + | _ => Doc.concat(list(Doc.text(": "), printModType(md.pmd_type))) + } + + Doc.concat(list( + printAttributes(~loc=md.pmd_name.loc, md.pmd_attributes), + Doc.text("module "), + Doc.text(md.pmd_name.txt), + body, + )) + } + + and printOpenDescription = (openDescription: Parsetree.open_description) => + Doc.concat(list( + printAttributes(openDescription.popen_attributes), + Doc.text("open"), + switch openDescription.popen_override { + | Asttypes.Fresh => Doc.space + | Asttypes.Override => Doc.text("! ") + }, + printLongident(openDescription.popen_lid.txt), + )) + + and printIncludeDescription = ( + includeDescription: Parsetree.include_description, + ) => + Doc.concat(list( + printAttributes(includeDescription.pincl_attributes), + Doc.text("include "), + printModType(includeDescription.pincl_mod), + )) + + and printIncludeDeclaration = ( + includeDeclaration: Parsetree.include_declaration, + ) => + Doc.concat(list( + printAttributes(includeDeclaration.pincl_attributes), + Doc.text("include "), + printModExpr(includeDeclaration.pincl_mod), + )) + + and printValueBindings = (~recFlag, vbs: list) => { + let rows = List.mapi( + (i, vb) => { + let doc = printValueBinding(~recFlag, i, vb) + /vb.Parsetree.pvb_loc, doc/ + }, + vbs, + ) + + interleaveWhitespace(rows) + } + + and printValueDescription = valueDescription => { + let isExternal = switch valueDescription.pval_prim { + | list() => false + | _ => true + } + + Doc.group( + Doc.concat(list( + Doc.text( + if isExternal { + "external " + } else { + "let " + }, + ), + Doc.text(valueDescription.pval_name.txt), + Doc.text(": "), + printTypExpr(valueDescription.pval_type), + if isExternal { + Doc.group( + Doc.concat(list( + Doc.text(" ="), + Doc.indent( + Doc.concat(list( + Doc.line, + Doc.join( + ~sep=Doc.line, + List.map( + s => + Doc.concat(list( + Doc.text("\""), + Doc.text(s), + Doc.text("\""), + )), + valueDescription.pval_prim, + ), + ), + )), + ), + )), + ) + } else { + Doc.nil + }, + )), + ) + } + + and printTypeDeclarations = (~recFlag, typeDeclarations) => { + let rows = List.mapi( + (i, td) => { + let doc = printTypeDeclaration( + ~name=Doc.text(td.Parsetree.ptype_name.txt), + ~equalSign="=", + ~recFlag, + i, + td, + ) + + /td.Parsetree.ptype_loc, doc/ + }, + typeDeclarations, + ) + interleaveWhitespace(rows) + } + + and printTypeDeclaration = ( + ~name, + ~equalSign, + ~recFlag, + i, + td: Parsetree.type_declaration, + ) => { + let attrs = printAttributes(~loc=td.ptype_loc, td.ptype_attributes) + let prefix = if i > 0 { + Doc.text("and ") + } else { + Doc.concat(list(Doc.text("type "), recFlag)) + } + + let typeName = name + let typeParams = switch td.ptype_params { + | list() => Doc.nil + | typeParams => + Doc.group( + Doc.concat(list( + Doc.lessThan, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printTypeParam, typeParams), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.greaterThan, + )), + ) + } + + let manifestAndKind = switch td.ptype_kind { + | Ptype_abstract => + switch td.ptype_manifest { + | None => Doc.nil + | Some(typ) => + Doc.concat(list( + Doc.concat(list(Doc.space, Doc.text(equalSign), Doc.space)), + printPrivateFlag(td.ptype_private), + printTypExpr(typ), + )) + } + | Ptype_open => + Doc.concat(list( + Doc.concat(list(Doc.space, Doc.text(equalSign), Doc.space)), + printPrivateFlag(td.ptype_private), + Doc.text(".."), + )) + | Ptype_record(lds) => + let manifest = switch td.ptype_manifest { + | None => Doc.nil + | Some(typ) => + Doc.concat(list( + Doc.concat(list(Doc.space, Doc.text(equalSign), Doc.space)), + printTypExpr(typ), + )) + } + + Doc.concat(list( + manifest, + Doc.concat(list(Doc.space, Doc.text(equalSign), Doc.space)), + printPrivateFlag(td.ptype_private), + printRecordDeclaration(lds), + )) + | Ptype_variant(cds) => + let manifest = switch td.ptype_manifest { + | None => Doc.nil + | Some(typ) => + Doc.concat(list( + Doc.concat(list(Doc.space, Doc.text(equalSign), Doc.space)), + printTypExpr(typ), + )) + } + + Doc.concat(list( + manifest, + Doc.concat(list(Doc.space, Doc.text(equalSign))), + printConstructorDeclarations(~privateFlag=td.ptype_private, cds), + )) + } + + let constraints = printTypeDefinitionConstraints(td.ptype_cstrs) + Doc.group( + Doc.concat(list( + attrs, + prefix, + typeName, + typeParams, + manifestAndKind, + constraints, + )), + ) + } + + and printTypeDefinitionConstraints = cstrs => + switch cstrs { + | list() => Doc.nil + | cstrs => + Doc.indent( + Doc.group( + Doc.concat(list( + Doc.line, + Doc.group( + Doc.join( + ~sep=Doc.line, + List.map(printTypeDefinitionConstraint, cstrs), + ), + ), + )), + ), + ) + } + + and printTypeDefinitionConstraint = ( + /typ1, typ2, _loc/: /Parsetree.core_type, Parsetree.core_type, Location.t/, + ) => + Doc.concat(list( + Doc.text("constraint "), + printTypExpr(typ1), + Doc.text(" = "), + printTypExpr(typ2), + )) + + and printPrivateFlag = (flag: Asttypes.private_flag) => + switch flag { + | Private => Doc.text("private ") + | Public => Doc.nil + } + + and printTypeParam = (param: /Parsetree.core_type, Asttypes.variance/) => { + let /typ, variance/ = param + let printedVariance = switch variance { + | Covariant => Doc.text("+") + | Contravariant => Doc.text("-") + | Invariant => Doc.nil + } + + Doc.concat(list(printedVariance, printTypExpr(typ))) + } + + and printRecordDeclaration = (lds: list) => { + let forceBreak = switch /lds, List.rev(lds)/ { + | /list(first, ..._), list(last, ..._)/ => + first.pld_loc.loc_start.pos_lnum < last.pld_loc.loc_end.pos_lnum + | _ => false + } + + Doc.breakableGroup( + ~forceBreak, + Doc.concat(list( + Doc.lbrace, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printLabelDeclaration, lds), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + )), + ) + } + + and printConstructorDeclarations = ( + ~privateFlag, + cds: list, + ) => { + let forceBreak = switch /cds, List.rev(cds)/ { + | /list(first, ..._), list(last, ..._)/ => + first.pcd_loc.loc_start.pos_lnum < last.pcd_loc.loc_end.pos_lnum + | _ => false + } + + let privateFlag = switch privateFlag { + | Asttypes.Private => Doc.concat(list(Doc.text("private"), Doc.line)) + | Public => Doc.nil + } + + Doc.breakableGroup( + ~forceBreak, + Doc.indent( + Doc.concat(list( + Doc.line, + privateFlag, + Doc.join(~sep=Doc.line, List.mapi(printConstructorDeclaration, cds)), + )), + ), + ) + } + + and printConstructorDeclaration = ( + i, + cd: Parsetree.constructor_declaration, + ) => { + let attrs = printAttributes(cd.pcd_attributes) + let bar = if i > 0 { + Doc.text("| ") + } else { + Doc.ifBreaks(Doc.text("| "), Doc.nil) + } + + let constrName = Doc.text(cd.pcd_name.txt) + let constrArgs = printConstructorArguments(cd.pcd_args) + let gadt = switch cd.pcd_res { + | None => Doc.nil + | Some(typ) => + Doc.indent(Doc.concat(list(Doc.text(": "), printTypExpr(typ)))) + } + + Doc.concat(list( + bar, + Doc.group(Doc.concat(list(attrs, constrName, constrArgs, gadt))), + )) + } + + and printConstructorArguments = (cdArgs: Parsetree.constructor_arguments) => + switch cdArgs { + | Pcstr_tuple(list()) => Doc.nil + | Pcstr_tuple(types) => + Doc.group( + Doc.indent( + Doc.concat(list( + Doc.lparen, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printTypExpr, types), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + )), + ), + ) + | Pcstr_record(lds) => + Doc.indent( + Doc.concat(list( + Doc.lparen, + Doc.lbrace, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printLabelDeclaration, lds), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + Doc.rparen, + )), + ) + } + + and printLabelDeclaration = (ld: Parsetree.label_declaration) => { + let attrs = printAttributes(~loc=ld.pld_name.loc, ld.pld_attributes) + let mutableFlag = switch ld.pld_mutable { + | Mutable => Doc.text("mutable ") + | Immutable => Doc.nil + } + + let name = Doc.text(ld.pld_name.txt) + Doc.group( + Doc.concat(list( + attrs, + mutableFlag, + name, + Doc.text(": "), + printTypExpr(ld.pld_type), + )), + ) + } + + and printTypExpr = (typExpr: Parsetree.core_type) => { + let renderedType = switch typExpr.ptyp_desc { + | Ptyp_any => Doc.text("_") + | Ptyp_var(var) => Doc.text("'" ++ var) + | Ptyp_extension(extension) => printExtension(extension) + | Ptyp_alias(typ, alias) => + let typ = { + let needsParens = switch typ.ptyp_desc { + | Ptyp_arrow(_) => true + | _ => false + } + + let doc = printTypExpr(typ) + if needsParens { + Doc.concat(list(Doc.lparen, doc, Doc.rparen)) + } else { + doc + } + } + + Doc.concat(list(typ, Doc.text(" as "), Doc.text("'" ++ alias))) + | Ptyp_constr( + {txt: Longident.Ldot(Longident.Lident("Js"), "t")}, + list(typ), + ) => + let bsObject = printTypExpr(typ) + switch typExpr.ptyp_attributes { + | list() => bsObject + | attrs => + Doc.concat(list( + Doc.group(Doc.join(~sep=Doc.line, List.map(printAttribute, attrs))), + Doc.space, + printTypExpr(typ), + )) + } + | Ptyp_constr( + longidentLoc, + list({ptyp_desc: Parsetree.Ptyp_tuple(tuple)}), + ) => + let constrName = printLongident(longidentLoc.txt) + Doc.group( + Doc.concat(list( + constrName, + Doc.lessThan, + printTupleType(~inline=true, tuple), + Doc.greaterThan, + )), + ) + | Ptyp_constr(longidentLoc, constrArgs) => + let constrName = printLongident(longidentLoc.txt) + switch constrArgs { + | list() => constrName + | list({ + Parsetree.ptyp_desc: + Ptyp_constr( + {txt: Longident.Ldot(Longident.Lident("Js"), "t")}, + list({ptyp_desc: Ptyp_object(fields, openFlag)}), + ), + }) => + Doc.concat(list( + constrName, + Doc.lessThan, + printBsObjectSugar(~inline=true, fields, openFlag), + Doc.greaterThan, + )) + | args => + Doc.group( + Doc.concat(list( + constrName, + Doc.lessThan, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printTypExpr, constrArgs), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.greaterThan, + )), + ) + } + | Ptyp_arrow(_) => + let /attrsBefore, args, returnType/ = ParsetreeViewer.arrowType(typExpr) + let returnTypeNeedsParens = switch returnType.ptyp_desc { + | Ptyp_alias(_) => true + | _ => false + } + + let returnDoc = { + let doc = printTypExpr(returnType) + if returnTypeNeedsParens { + Doc.concat(list(Doc.lparen, doc, Doc.rparen)) + } else { + doc + } + } + + let /isUncurried, attrs/ = ParsetreeViewer.processUncurriedAttribute( + attrsBefore, + ) + switch args { + | list() => Doc.nil + | list(/list(), Nolabel, n/) when !isUncurried => + let hasAttrsBefore = !(attrs == list()) + let attrs = if hasAttrsBefore { + Doc.concat(list( + Doc.join(~sep=Doc.line, List.map(printAttribute, attrsBefore)), + Doc.space, + )) + } else { + Doc.nil + } + + Doc.group( + Doc.concat(list( + Doc.group(attrs), + Doc.group( + if hasAttrsBefore { + Doc.concat(list( + Doc.lparen, + Doc.indent( + Doc.concat(list( + Doc.softLine, + printTypExpr(n), + Doc.text(" => "), + returnDoc, + )), + ), + Doc.softLine, + Doc.rparen, + )) + } else { + Doc.concat(list(printTypExpr(n), Doc.text(" => "), returnDoc)) + }, + ), + )), + ) + | args => + let attrs = switch attrs { + | list() => Doc.nil + | attrs => + Doc.concat(list( + Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), + Doc.space, + )) + } + + let renderedArgs = Doc.concat(list( + attrs, + Doc.text("("), + Doc.indent( + Doc.concat(list( + Doc.softLine, + if isUncurried { + Doc.concat(list(Doc.dot, Doc.space)) + } else { + Doc.nil + }, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printTypeParameter, args), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.text(")"), + )) + Doc.group(Doc.concat(list(renderedArgs, Doc.text(" => "), returnDoc))) + } + | Ptyp_tuple(types) => printTupleType(~inline=false, types) + | Ptyp_object(fields, openFlag) => + printBsObjectSugar(~inline=false, fields, openFlag) + | Ptyp_poly(stringLocs, typ) => + Doc.concat(list( + Doc.join( + ~sep=Doc.space, + List.map(({Location.txt: txt}) => Doc.text("'" ++ txt), stringLocs), + ), + Doc.dot, + Doc.space, + printTypExpr(typ), + )) + | Ptyp_package(packageType) => + printPackageType(~printModuleKeywordAndParens=true, packageType) + | Ptyp_class(_) => failwith("classes are not supported in types") + | Ptyp_variant(_) => + failwith("Polymorphic variants currently not supported") + } + + let shouldPrintItsOwnAttributes = switch typExpr.ptyp_desc { + | Ptyp_arrow(_) + | Ptyp_constr({txt: Longident.Ldot(Longident.Lident("Js"), "t")}, _) => + true + | _ => false + } + + switch typExpr.ptyp_attributes { + | list(_, ..._) as attrs when !shouldPrintItsOwnAttributes => + Doc.group(Doc.concat(list(printAttributes(attrs), renderedType))) + | _ => renderedType + } + } + + and printBsObjectSugar = (~inline, fields, openFlag) => { + let flag = switch openFlag { + | Asttypes.Closed => Doc.nil + | Open => Doc.dotdot + } + + let doc = Doc.concat(list( + Doc.lbrace, + flag, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printObjectField, fields), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + )) + if inline { + doc + } else { + Doc.group(doc) + } + } + + and printTupleType = (~inline, types: list) => { + let tuple = Doc.concat(list( + Doc.text("/"), + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printTypExpr, types), + ), + )), + ), + Doc.softLine, + Doc.text("/"), + )) + + if inline === false { + Doc.group(tuple) + } else { + tuple + } + } + + and printObjectField = (field: Parsetree.object_field) => + switch field { + | Otag(labelLoc, attrs, typ) => + Doc.concat(list( + Doc.text("\"" ++ labelLoc.txt ++ "\""), + Doc.text(": "), + printTypExpr(typ), + )) + | _ => Doc.nil + } + + and printTypeParameter = (/attrs, lbl, typ/) => { + let /isUncurried, attrs/ = ParsetreeViewer.processUncurriedAttribute(attrs) + let uncurried = if isUncurried { + Doc.concat(list(Doc.dot, Doc.space)) + } else { + Doc.nil + } + let attrs = switch attrs { + | list() => Doc.nil + | attrs => + Doc.concat(list( + Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), + Doc.line, + )) + } + let label = switch lbl { + | Asttypes.Nolabel => Doc.nil + | Labelled(lbl) => Doc.text("~" ++ lbl ++ ": ") + | Optional(lbl) => Doc.text("~" ++ lbl ++ ": ") + } + + let optionalIndicator = switch lbl { + | Asttypes.Nolabel | Labelled(_) => Doc.nil + | Optional(lbl) => Doc.text("=?") + } + + Doc.group( + Doc.concat(list( + uncurried, + attrs, + label, + printTypExpr(typ), + optionalIndicator, + )), + ) + } + + and printValueBinding = (~recFlag, i, vb) => { + let isGhost = ParsetreeViewer.isGhostUnitBinding(i, vb) + let header = if isGhost { + Doc.nil + } else if i === 0 { + Doc.concat(list(Doc.text("let "), recFlag)) + } else { + Doc.text("and ") + } + + let printedExpr = { + let exprDoc = printExpression(vb.pvb_expr) + let needsParens = switch vb.pvb_expr.pexp_desc { + | Pexp_constraint( + {pexp_desc: Pexp_pack(_)}, + {ptyp_desc: Ptyp_package(_)}, + ) => + false + | Pexp_constraint(_) => true + | _ => false + } + + if needsParens { + addParens(exprDoc) + } else { + exprDoc + } + } + + if isGhost { + printedExpr + } else { + let shouldIndent = + ParsetreeViewer.isBinaryExpression(vb.pvb_expr) || + switch vb.pvb_expr { + | { + pexp_attributes: list(/{Location.txt: "ns.ternary"}, _/), + pexp_desc: Pexp_ifthenelse(ifExpr, _, _), + } => + ParsetreeViewer.isBinaryExpression(ifExpr) || + ParsetreeViewer.hasAttributes(ifExpr.pexp_attributes) + | {pexp_desc: Pexp_newtype(_)} => false + | e => + ParsetreeViewer.hasAttributes(e.pexp_attributes) || + ParsetreeViewer.isArrayAccess(e) + } + + Doc.concat(list( + printAttributes(~loc=vb.pvb_loc, vb.pvb_attributes), + header, + printPattern(vb.pvb_pat), + Doc.text(" ="), + if shouldIndent { + Doc.indent(Doc.concat(list(Doc.line, printedExpr))) + } else { + Doc.concat(list(Doc.space, printedExpr)) + }, + )) + } + } + + and printPackageType = ( + ~printModuleKeywordAndParens, + packageType: Parsetree.package_type, + ) => { + let doc = switch packageType { + | /longidentLoc, list()/ => + Doc.group(Doc.concat(list(printLongident(longidentLoc.txt)))) + | /longidentLoc, packageConstraints/ => + Doc.group( + Doc.concat(list( + printLongident(longidentLoc.txt), + printPackageConstraints(packageConstraints), + Doc.softLine, + )), + ) + } + + if printModuleKeywordAndParens { + Doc.concat(list(Doc.text("module("), doc, Doc.rparen)) + } else { + doc + } + } + + and printPackageConstraints = packageConstraints => + Doc.concat(list( + Doc.text(" with"), + Doc.indent( + Doc.concat(list( + Doc.line, + Doc.join( + ~sep=Doc.line, + List.mapi(printPackageconstraint, packageConstraints), + ), + )), + ), + )) + + and printPackageconstraint = (i, /longidentLoc, typ/) => { + let prefix = if i === 0 { + Doc.text("type ") + } else { + Doc.text("and type ") + } + Doc.concat(list( + prefix, + printLongident(longidentLoc.Location.txt), + Doc.text(" = "), + printTypExpr(typ), + )) + } + + and printExtension = (/stringLoc, payload/) => { + let extName = Doc.text("%" ++ stringLoc.Location.txt) + switch payload { + | PStr(list({pstr_desc: Pstr_eval(expr, attrs)})) => + let exprDoc = printExpression(expr) + let needsParens = switch attrs { + | list() => false + | _ => true + } + Doc.group( + Doc.concat(list( + extName, + addParens( + Doc.concat(list( + printAttributes(attrs), + if needsParens { + addParens(exprDoc) + } else { + exprDoc + }, + )), + ), + )), + ) + | _ => extName + } + } + + and printPattern = (p: Parsetree.pattern) => { + let patternWithoutAttributes = switch p.ppat_desc { + | Ppat_any => Doc.text("_") + | Ppat_var(stringLoc) => Doc.text(stringLoc.txt) + | Ppat_constant(c) => printConstant(c) + | Ppat_tuple(patterns) => + Doc.group( + Doc.concat(list( + Doc.text("/"), + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.text(","), Doc.line)), + List.map(printPattern, patterns), + ), + )), + ), + Doc.softLine, + Doc.text("/"), + )), + ) + | Ppat_array(patterns) => + Doc.group( + Doc.concat(list( + Doc.text("["), + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.text(","), Doc.line)), + List.map(printPattern, patterns), + ), + )), + ), + Doc.ifBreaks(Doc.text(","), Doc.nil), + Doc.softLine, + Doc.text("]"), + )), + ) + | Ppat_construct({txt: Longident.Lident("[]")}, _) => Doc.text("list()") + | Ppat_construct({txt: Longident.Lident("::")}, _) => + let /patterns, tail/ = collectPatternsFromListConstruct(list(), p) + let shouldHug = switch /patterns, tail/ { + | / + list(pat), + {ppat_desc: Ppat_construct({txt: Longident.Lident("[]")}, _)} + / when ParsetreeViewer.isHuggablePattern(pat) => + true + | _ => false + } + + let children = Doc.concat(list( + if shouldHug { + Doc.nil + } else { + Doc.softLine + }, + Doc.join( + ~sep=Doc.concat(list(Doc.text(","), Doc.line)), + List.map(printPattern, patterns), + ), + switch tail.Parsetree.ppat_desc { + | Ppat_construct({txt: Longident.Lident("[]")}, _) => Doc.nil + | _ => + Doc.concat(list( + Doc.text(","), + Doc.line, + Doc.text("..."), + printPattern(tail), + )) + }, + )) + Doc.group( + Doc.concat(list( + Doc.text("list("), + if shouldHug { + children + } else { + Doc.concat(list( + Doc.indent(children), + Doc.ifBreaks(Doc.text(","), Doc.nil), + Doc.softLine, + )) + }, + Doc.text(")"), + )), + ) + | Ppat_construct(constrName, constructorArgs) => + let constrName = printLongident(constrName.txt) + switch constructorArgs { + | None => constrName + | Some(args) => + let args = switch args.ppat_desc { + | Ppat_construct({txt: Longident.Lident("()")}, None) => list(Doc.nil) + | Ppat_tuple(patterns) => List.map(printPattern, patterns) + | _ => list(printPattern(args)) + } + + Doc.group( + Doc.concat(list( + constrName, + Doc.text("("), + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join(~sep=Doc.concat(list(Doc.text(","), Doc.line)), args), + )), + ), + Doc.ifBreaks(Doc.text(","), Doc.nil), + Doc.softLine, + Doc.text(")"), + )), + ) + } + | Ppat_record(rows, openFlag) => + Doc.group( + Doc.concat(list( + Doc.text("{"), + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.text(","), Doc.line)), + List.map(printPatternRecordRow, rows), + ), + switch openFlag { + | Open => Doc.concat(list(Doc.text(","), Doc.line, Doc.text("_"))) + | Closed => Doc.nil + }, + )), + ), + Doc.ifBreaks(Doc.text(","), Doc.nil), + Doc.softLine, + Doc.text("}"), + )), + ) + | Ppat_exception(p) => + let needsParens = switch p.ppat_desc { + | Ppat_or(_, _) | Ppat_alias(_, _) => true + | _ => false + } + + let pat = { + let p = printPattern(p) + if needsParens { + Doc.concat(list(Doc.text("("), p, Doc.text(")"))) + } else { + p + } + } + + Doc.group(Doc.concat(list(Doc.text("exception"), Doc.line, pat))) + | Ppat_or(p1, p2) => + let p1 = { + let p = printPattern(p1) + switch p1.ppat_desc { + | Ppat_or(_, _) => Doc.concat(list(Doc.text("("), p, Doc.text(")"))) + | _ => p + } + } + + let p2 = { + let p = printPattern(p2) + switch p2.ppat_desc { + | Ppat_or(_, _) => Doc.concat(list(Doc.text("("), p, Doc.text(")"))) + | _ => p + } + } + + Doc.group(Doc.concat(list(p1, Doc.line, Doc.text("| "), p2))) + | Ppat_extension(ext) => printExtension(ext) + | Ppat_lazy(p) => + let needsParens = switch p.ppat_desc { + | Ppat_or(_, _) | Ppat_alias(_, _) => true + | _ => false + } + + let pat = { + let p = printPattern(p) + if needsParens { + Doc.concat(list(Doc.text("("), p, Doc.text(")"))) + } else { + p + } + } + + Doc.concat(list(Doc.text("lazy "), pat)) + | Ppat_alias(p, aliasLoc) => + let needsParens = switch p.ppat_desc { + | Ppat_or(_, _) | Ppat_alias(_, _) => true + | _ => false + } + + let renderedPattern = { + let p = printPattern(p) + if needsParens { + Doc.concat(list(Doc.text("("), p, Doc.text(")"))) + } else { + p + } + } + + Doc.concat(list( + renderedPattern, + Doc.text(" as "), + Doc.text(aliasLoc.txt), + )) + | Ppat_constraint( + {ppat_desc: Ppat_unpack(stringLoc)}, + {ptyp_desc: Ptyp_package(packageType)}, + ) => + Doc.concat(list( + Doc.text("module("), + Doc.text(stringLoc.txt), + Doc.text(": "), + printPackageType(~printModuleKeywordAndParens=false, packageType), + Doc.rparen, + )) + | Ppat_constraint(pattern, typ) => + Doc.concat(list(printPattern(pattern), Doc.text(": "), printTypExpr(typ))) + | Ppat_unpack(stringLoc) => + Doc.concat(list(Doc.text("module("), Doc.text(stringLoc.txt), Doc.rparen)) + | _ => failwith("unsupported pattern") + } + + switch p.ppat_attributes { + | list() => patternWithoutAttributes + | attrs => + Doc.group( + Doc.concat(list(printAttributes(attrs), patternWithoutAttributes)), + ) + } + } + + and printPatternRecordRow = row => + switch row { + | / + {Location.txt: Longident.Lident(ident)}, + {Parsetree.ppat_desc: Ppat_var({txt, _})} + / when ident == txt => + Doc.text(ident) + | /longident, pattern/ => + Doc.group( + Doc.concat(list( + printLongident(longident.txt), + Doc.text(": "), + Doc.indent(Doc.concat(list(Doc.softLine, printPattern(pattern)))), + )), + ) + } + + and printExpression = (e: Parsetree.expression) => { + let printedExpression = switch e.pexp_desc { + | Parsetree.Pexp_constant(c) => printConstant(c) + | Pexp_construct(_) + when ParsetreeViewer.hasJsxAttribute(e.pexp_attributes) => + printJsxFragment(e) + | Pexp_construct({txt: Longident.Lident("()")}, _) => Doc.text("()") + | Pexp_construct({txt: Longident.Lident("[]")}, _) => Doc.text("list()") + | Pexp_construct({txt: Longident.Lident("::")}, _) => + let /expressions, spread/ = ParsetreeViewer.collectListExpressions(e) + let spreadDoc = switch spread { + | Some(expr) => + Doc.concat(list( + Doc.text(","), + Doc.line, + Doc.dotdotdot, + printExpression(expr), + )) + | None => Doc.nil + } + + Doc.group( + Doc.concat(list( + Doc.text("list("), + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.text(","), Doc.line)), + List.map(printExpression, expressions), + ), + spreadDoc, + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + )), + ) + | Pexp_construct(longidentLoc, args) => + let constr = printLongident(longidentLoc.txt) + let args = switch args { + | None => Doc.nil + | Some({pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, _)}) => + Doc.text("()") + | Some({pexp_desc: Pexp_tuple(args)}) => + Doc.concat(list( + Doc.lparen, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printExpression, args), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + )) + | Some(arg) => + let argDoc = printExpression(arg) + let shouldHug = ParsetreeViewer.isHuggableExpression(arg) + Doc.concat(list( + Doc.lparen, + if shouldHug { + argDoc + } else { + Doc.concat(list( + Doc.indent(Doc.concat(list(Doc.softLine, argDoc))), + Doc.trailingComma, + Doc.softLine, + )) + }, + Doc.rparen, + )) + } + + Doc.group(Doc.concat(list(constr, args))) + | Pexp_ident(longidentLoc) => printLongident(longidentLoc.txt) + | Pexp_tuple(exprs) => + Doc.group( + Doc.concat(list( + Doc.text("/"), + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.text(","), Doc.line)), + List.map(printExpression, exprs), + ), + )), + ), + Doc.ifBreaks(Doc.text(","), Doc.nil), + Doc.softLine, + Doc.text("/"), + )), + ) + | Pexp_array(list()) => Doc.text("[]") + | Pexp_array(exprs) => + Doc.group( + Doc.concat(list( + Doc.lbracket, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.text(","), Doc.line)), + List.map(printExpression, exprs), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbracket, + )), + ) + | Pexp_record(rows, spreadExpr) => + let spread = switch spreadExpr { + | None => Doc.nil + | Some(expr) => + Doc.concat(list( + Doc.dotdotdot, + printExpression(expr), + Doc.comma, + Doc.line, + )) + } + + let forceBreak = + e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum + + Doc.breakableGroup( + ~forceBreak, + Doc.concat(list( + Doc.lbrace, + Doc.indent( + Doc.concat(list( + Doc.softLine, + spread, + Doc.join( + ~sep=Doc.concat(list(Doc.text(","), Doc.line)), + List.map(printRecordRow, rows), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + )), + ) + | Pexp_extension(extension) => + switch extension { + | / + {txt: "bs.obj"}, + PStr( + list({ + pstr_loc: loc, + pstr_desc: Pstr_eval({pexp_desc: Pexp_record(rows, _)}, list()), + }), + ) + / => + let forceBreak = loc.loc_start.pos_lnum < loc.loc_end.pos_lnum + + Doc.breakableGroup( + ~forceBreak, + Doc.concat(list( + Doc.lbrace, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.text(","), Doc.line)), + List.map(printBsObjectRow, rows), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rbrace, + )), + ) + | extension => printExtension(extension) + } + | Pexp_apply(_) => + if ParsetreeViewer.isUnaryExpression(e) { + printUnaryExpression(e) + } else if ParsetreeViewer.isBinaryExpression(e) { + printBinaryExpression(e) + } else { + printPexpApply(e) + } + | Pexp_unreachable => Doc.dot + | Pexp_field(expr, longidentLoc) => + let lhs = { + let doc = printExpression(expr) + if Parens.fieldExpr(expr) { + addParens(doc) + } else { + doc + } + } + + Doc.concat(list(lhs, Doc.dot, printLongident(longidentLoc.txt))) + | Pexp_setfield(expr1, longidentLoc, expr2) => + printSetFieldExpr(e.pexp_attributes, expr1, longidentLoc, expr2) + | Pexp_ifthenelse(ifExpr, thenExpr, elseExpr) => + if ParsetreeViewer.isTernaryExpr(e) { + let /parts, alternate/ = ParsetreeViewer.collectTernaryParts(e) + let ternaryDoc = switch parts { + | list(/condition1, consequent1/, ...rest) => + Doc.group( + Doc.concat(list( + printTernaryOperand(condition1), + Doc.indent( + Doc.concat(list( + Doc.line, + Doc.indent( + Doc.concat(list( + Doc.text("? "), + printTernaryOperand(consequent1), + )), + ), + Doc.concat( + List.map( + (/condition, consequent/) => + Doc.concat(list( + Doc.line, + Doc.text(": "), + printTernaryOperand(condition), + Doc.line, + Doc.text("? "), + printTernaryOperand(consequent), + )), + rest, + ), + ), + Doc.line, + Doc.text(": "), + Doc.indent(printTernaryOperand(alternate)), + )), + ), + )), + ) + | _ => Doc.nil + } + + let attrs = ParsetreeViewer.filterTernaryAttributes(e.pexp_attributes) + let needsParens = switch attrs { + | list() => false + | _ => true + } + Doc.concat(list( + printAttributes(attrs), + if needsParens { + addParens(ternaryDoc) + } else { + ternaryDoc + }, + )) + } else { + let /ifs, elseExpr/ = ParsetreeViewer.collectIfExpressions(e) + let ifDocs = Doc.join( + ~sep=Doc.space, + List.mapi( + (i, /ifExpr, thenExpr/) => { + let ifTxt = if i > 0 { + Doc.text("else if ") + } else { + Doc.text("if ") + } + let condition = printExpression(ifExpr) + Doc.concat(list( + ifTxt, + Doc.group(Doc.ifBreaks(addParens(condition), condition)), + Doc.space, + printExpressionBlock(~braces=true, thenExpr), + )) + }, + ifs, + ), + ) + let elseDoc = switch elseExpr { + | None => Doc.nil + | Some(expr) => + Doc.concat(list( + Doc.text(" else "), + printExpressionBlock(~braces=true, expr), + )) + } + + Doc.concat(list(printAttributes(e.pexp_attributes), ifDocs, elseDoc)) + } + | Pexp_while(expr1, expr2) => + let condition = printExpression(expr1) + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list( + Doc.text("while "), + Doc.group(Doc.ifBreaks(addParens(condition), condition)), + Doc.space, + printExpressionBlock(~braces=true, expr2), + )), + ) + | Pexp_for(pattern, fromExpr, toExpr, directionFlag, body) => + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list( + Doc.text("for "), + printPattern(pattern), + Doc.text(" in "), + printExpression(fromExpr), + printDirectionFlag(directionFlag), + printExpression(toExpr), + Doc.space, + printExpressionBlock(~braces=true, body), + )), + ) + | Pexp_constraint( + {pexp_desc: Pexp_pack(modExpr)}, + {ptyp_desc: Ptyp_package(packageType)}, + ) => + Doc.group( + Doc.concat(list( + Doc.text("module("), + Doc.indent( + Doc.concat(list( + Doc.softLine, + printModExpr(modExpr), + Doc.text(": "), + printPackageType(~printModuleKeywordAndParens=false, packageType), + )), + ), + Doc.softLine, + Doc.rparen, + )), + ) + | Pexp_constraint(expr, typ) => + Doc.concat(list(printExpression(expr), Doc.text(": "), printTypExpr(typ))) + | Pexp_letmodule({txt: modName}, modExpr, expr) => + printExpressionBlock(~braces=true, e) + | Pexp_letexception(extensionConstructor, expr) => + printExpressionBlock(~braces=true, e) + | Pexp_assert(expr) => + let rhs = { + let doc = printExpression(expr) + if Parens.lazyOrAssertExprRhs(expr) { + addParens(doc) + } else { + doc + } + } + + Doc.concat(list(Doc.text("assert "), rhs)) + | Pexp_lazy(expr) => + let rhs = { + let doc = printExpression(expr) + if Parens.lazyOrAssertExprRhs(expr) { + addParens(doc) + } else { + doc + } + } + + Doc.concat(list(Doc.text("lazy "), rhs)) + | Pexp_open(overrideFlag, longidentLoc, expr) => + printExpressionBlock(~braces=true, e) + | Pexp_pack(modExpr) => + Doc.group( + Doc.concat(list( + Doc.text("module("), + Doc.indent(Doc.concat(list(Doc.softLine, printModExpr(modExpr)))), + Doc.softLine, + Doc.rparen, + )), + ) + | Pexp_sequence(_) => printExpressionBlock(~braces=true, e) + | Pexp_let(_) => printExpressionBlock(~braces=true, e) + | Pexp_fun(_) | Pexp_newtype(_) => + let /attrsOnArrow, parameters, returnExpr/ = ParsetreeViewer.funExpr(e) + let /uncurried, attrs/ = ParsetreeViewer.processUncurriedAttribute( + attrsOnArrow, + ) + + let /returnExpr, typConstraint/ = switch returnExpr.pexp_desc { + | Pexp_constraint(expr, typ) => /expr, Some(typ)/ + | _ => /returnExpr, None/ + } + + let parametersDoc = printExprFunParameters( + ~inCallback=false, + ~uncurried, + parameters, + ) + let returnExprDoc = { + let shouldInline = switch returnExpr.pexp_desc { + | ((Pexp_array(_) | Pexp_tuple(_)) | Pexp_construct(_, Some(_))) + | Pexp_record(_) => + true + | _ => false + } + + let shouldIndent = switch returnExpr.pexp_desc { + | ((Pexp_sequence(_) | Pexp_let(_)) | Pexp_letmodule(_)) + | Pexp_letexception(_) => + false + | _ => true + } + + let returnDoc = printExpression(returnExpr) + if shouldInline { + Doc.concat(list(Doc.space, returnDoc)) + } else { + Doc.group( + if shouldIndent { + Doc.indent(Doc.concat(list(Doc.line, returnDoc))) + } else { + Doc.concat(list(Doc.space, returnDoc)) + }, + ) + } + } + + let typConstraintDoc = switch typConstraint { + | Some(typ) => Doc.concat(list(Doc.text(": "), printTypExpr(typ))) + | _ => Doc.nil + } + + let attrs = switch attrs { + | list() => Doc.nil + | attrs => + Doc.concat(list( + Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), + Doc.space, + )) + } + + Doc.group( + Doc.concat(list( + attrs, + parametersDoc, + typConstraintDoc, + Doc.text(" =>"), + returnExprDoc, + )), + ) + | Pexp_try(expr, cases) => + Doc.concat(list( + Doc.text("try "), + printExpression(expr), + Doc.text(" catch "), + printCases(cases), + )) + | Pexp_match(expr, cases) => + Doc.concat(list( + Doc.text("switch "), + printExpression(expr), + Doc.space, + printCases(cases), + )) + | _ => failwith("expression not yet implemented in printer") + } + + let shouldPrintItsOwnAttributes = switch e.pexp_desc { + | (((Pexp_apply(_) | Pexp_fun(_)) | Pexp_newtype(_)) | Pexp_setfield(_)) + | Pexp_ifthenelse(_) => + true + | Pexp_construct(_) + when ParsetreeViewer.hasJsxAttribute(e.pexp_attributes) => + true + | _ => false + } + + switch e.pexp_attributes { + | list() => printedExpression + | attrs when !shouldPrintItsOwnAttributes => + Doc.group(Doc.concat(list(printAttributes(attrs), printedExpression))) + | _ => printedExpression + } + } + + and printPexpFun = (~inCallback, e) => { + let /attrsOnArrow, parameters, returnExpr/ = ParsetreeViewer.funExpr(e) + let /uncurried, attrs/ = ParsetreeViewer.processUncurriedAttribute( + attrsOnArrow, + ) + + let /returnExpr, typConstraint/ = switch returnExpr.pexp_desc { + | Pexp_constraint(expr, typ) => /expr, Some(typ)/ + | _ => /returnExpr, None/ + } + + let parametersDoc = printExprFunParameters( + ~inCallback, + ~uncurried, + parameters, + ) + let returnShouldIndent = switch returnExpr.pexp_desc { + | ((Pexp_sequence(_) | Pexp_let(_)) | Pexp_letmodule(_)) + | Pexp_letexception(_) => + false + | _ => true + } + + let returnExprDoc = { + let shouldInline = switch returnExpr.pexp_desc { + | ((Pexp_array(_) | Pexp_tuple(_)) | Pexp_construct(_, Some(_))) + | Pexp_record(_) => + true + | _ => false + } + + let returnDoc = printExpression(returnExpr) + if shouldInline { + Doc.concat(list(Doc.space, returnDoc)) + } else { + Doc.group( + if returnShouldIndent { + Doc.concat(list( + Doc.indent(Doc.concat(list(Doc.line, returnDoc))), + if inCallback { + Doc.softLine + } else { + Doc.nil + }, + )) + } else { + Doc.concat(list(Doc.space, returnDoc)) + }, + ) + } + } + + let typConstraintDoc = switch typConstraint { + | Some(typ) => Doc.concat(list(Doc.text(": "), printTypExpr(typ))) + | _ => Doc.nil + } + + let attrs = switch attrs { + | list() => Doc.nil + | attrs => + Doc.concat(list( + Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), + Doc.space, + )) + } + + Doc.group( + Doc.concat(list( + attrs, + parametersDoc, + typConstraintDoc, + Doc.text(" =>"), + returnExprDoc, + )), + ) + } + + and printTernaryOperand = expr => { + let doc = printExpression(expr) + if Parens.ternaryOperand(expr) { + addParens(doc) + } else { + doc + } + } + + and printSetFieldExpr = (attrs, lhs, longidentLoc, rhs) => { + let rhsDoc = { + let doc = printExpression(rhs) + if Parens.setFieldExprRhs(rhs) { + addParens(doc) + } else { + doc + } + } + + let lhsDoc = { + let doc = printExpression(lhs) + if Parens.fieldExpr(lhs) { + addParens(doc) + } else { + doc + } + } + + let shouldIndent = ParsetreeViewer.isBinaryExpression(rhs) + let doc = Doc.concat(list( + lhsDoc, + Doc.dot, + printLongident(longidentLoc.txt), + Doc.text(" ="), + if shouldIndent { + Doc.group(Doc.indent(Doc.concat(list(Doc.line, rhsDoc)))) + } else { + Doc.concat(list(Doc.space, rhsDoc)) + }, + )) + switch attrs { + | list() => doc + | attrs => Doc.group(Doc.concat(list(printAttributes(attrs), doc))) + } + } + + and printUnaryExpression = expr => { + let printUnaryOperator = op => + Doc.text( + switch op { + | "~+" => "+" + | "~+." => "+." + | "~-" => "-" + | "~-." => "-." + | "not" => "!" + | "!" => "&" + | _ => assert false + }, + ) + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list(/Nolabel, operand/), + ) => + let printedOperand = { + let doc = printExpression(operand) + if Parens.unaryExprOperand(operand) { + addParens(doc) + } else { + doc + } + } + + Doc.concat(list(printUnaryOperator(operator), printedOperand)) + | _ => assert false + } + } + + and printBinaryExpression = (expr: Parsetree.expression) => { + let printBinaryOperator = (~inlineRhs, operator) => { + let operatorTxt = switch operator { + | "|." => "->" + | "^" => "++" + | "=" => "==" + | "==" => "===" + | "<>" => "!=" + | "!=" => "!==" + | txt => txt + } + + let spacingBeforeOperator = if operator == "|." { + Doc.softLine + } else if operator == "|>" { + Doc.line + } else { + Doc.space + } + + let spacingAfterOperator = if operator == "|." { + Doc.nil + } else if operator == "|>" { + Doc.space + } else if inlineRhs { + Doc.space + } else { + Doc.line + } + + Doc.concat(list( + spacingBeforeOperator, + Doc.text(operatorTxt), + spacingAfterOperator, + )) + } + + let printOperand = (~isLhs, expr, parentOperator) => { + let rec flatten = (~isLhs, expr, parentOperator) => + if ParsetreeViewer.isBinaryExpression(expr) { + switch expr { + | { + pexp_desc: + Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list(/_, left/, /_, right/), + ), + } => + if ( + ParsetreeViewer.flattenableOperators(parentOperator, operator) && + !ParsetreeViewer.hasAttributes(expr.pexp_attributes) + ) { + let leftPrinted = flatten(~isLhs=true, left, operator) + let rightPrinted = { + let / + _, + rightAttrs + / = ParsetreeViewer.partitionPrinteableAttributes( + right.pexp_attributes, + ) + + let doc = printExpression({ + ...right, + pexp_attributes: rightAttrs, + }) + let doc = if Parens.flattenOperandRhs(parentOperator, right) { + Doc.concat(list(Doc.lparen, doc, Doc.rparen)) + } else { + doc + } + + let printeableAttrs = ParsetreeViewer.filterPrinteableAttributes( + right.pexp_attributes, + ) + + Doc.concat(list(printAttributes(printeableAttrs), doc)) + } + + Doc.concat(list( + leftPrinted, + printBinaryOperator(~inlineRhs=false, operator), + rightPrinted, + )) + } else { + let doc = printExpression({...expr, pexp_attributes: list()}) + let doc = if ( + Parens.subBinaryExprOperand(parentOperator, operator) || + ((expr.pexp_attributes != list()) && + (ParsetreeViewer.isBinaryExpression(expr) || + ParsetreeViewer.isTernaryExpr(expr))) + ) { + Doc.concat(list(Doc.lparen, doc, Doc.rparen)) + } else { + doc + } + Doc.concat(list(printAttributes(expr.pexp_attributes), doc)) + } + | _ => assert false + } + } else { + switch expr.pexp_desc { + | Pexp_setfield(lhs, field, rhs) => + let doc = printSetFieldExpr(expr.pexp_attributes, lhs, field, rhs) + if isLhs { + addParens(doc) + } else { + doc + } + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("#=")})}, + list(/Nolabel, lhs/, /Nolabel, rhs/), + ) => + let rhsDoc = printExpression(rhs) + let lhsDoc = printExpression(lhs) + + let shouldIndent = ParsetreeViewer.isBinaryExpression(rhs) + let doc = Doc.group( + Doc.concat(list( + lhsDoc, + Doc.text(" ="), + if shouldIndent { + Doc.group(Doc.indent(Doc.concat(list(Doc.line, rhsDoc)))) + } else { + Doc.concat(list(Doc.space, rhsDoc)) + }, + )), + ) + let doc = switch expr.pexp_attributes { + | list() => doc + | attrs => Doc.group(Doc.concat(list(printAttributes(attrs), doc))) + } + + if isLhs { + addParens(doc) + } else { + doc + } + | _ => + let doc = printExpression(expr) + if Parens.binaryExprOperand(~isLhs, expr, parentOperator) { + addParens(doc) + } else { + doc + } + } + } + + flatten(~isLhs, expr, parentOperator) + } + + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(("|." | "|>") as op)})}, + list(/Nolabel, lhs/, /Nolabel, rhs/), + ) + when !( + ParsetreeViewer.isBinaryExpression(lhs) || + ParsetreeViewer.isBinaryExpression(rhs) + ) => + let lhsDoc = printOperand(~isLhs=true, lhs, op) + let rhsDoc = printOperand(~isLhs=false, rhs, op) + Doc.concat(list( + lhsDoc, + switch op { + | "|." => Doc.text("->") + | "|>" => Doc.text(" |> ") + | _ => assert false + }, + rhsDoc, + )) + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, + list(/Nolabel, lhs/, /Nolabel, rhs/), + ) => + let right = { + let operatorWithRhs = Doc.concat(list( + printBinaryOperator( + ~inlineRhs=ParsetreeViewer.shouldInlineRhsBinaryExpr(rhs), + operator, + ), + printOperand(~isLhs=false, rhs, operator), + )) + if ParsetreeViewer.shouldIndentBinaryExpr(expr) { + Doc.group(Doc.indent(operatorWithRhs)) + } else { + operatorWithRhs + } + } + + let doc = Doc.group( + Doc.concat(list(printOperand(~isLhs=true, lhs, operator), right)), + ) + Doc.concat(list( + printAttributes(expr.pexp_attributes), + if Parens.binaryExpr(expr) { + addParens(doc) + } else { + doc + }, + )) + | _ => Doc.nil + } + } + + and printPexpApply = expr => + switch expr.pexp_desc { + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("##")})}, + list(/Nolabel, parentExpr/, /Nolabel, memberExpr/), + ) => + let member = { + let memberDoc = printExpression(memberExpr) + Doc.concat(list(Doc.text("\""), memberDoc, Doc.text("\""))) + } + + Doc.group( + Doc.concat(list( + printAttributes(expr.pexp_attributes), + printExpression(parentExpr), + Doc.lbracket, + member, + Doc.rbracket, + )), + ) + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Lident("#=")})}, + list(/Nolabel, lhs/, /Nolabel, rhs/), + ) => + let rhsDoc = printExpression(rhs) + + let shouldIndent = ParsetreeViewer.isBinaryExpression(rhs) + let doc = Doc.group( + Doc.concat(list( + printExpression(lhs), + Doc.text(" ="), + if shouldIndent { + Doc.group(Doc.indent(Doc.concat(list(Doc.line, rhsDoc)))) + } else { + Doc.concat(list(Doc.space, rhsDoc)) + }, + )), + ) + switch expr.pexp_attributes { + | list() => doc + | attrs => Doc.group(Doc.concat(list(printAttributes(attrs), doc))) + } + | Pexp_apply( + {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident("Array"), "get")})}, + list(/Nolabel, parentExpr/, /Nolabel, memberExpr/), + ) => + let member = { + let memberDoc = printExpression(memberExpr) + let shouldInline = switch memberExpr.pexp_desc { + | Pexp_constant(_) | Pexp_ident(_) => true + | _ => false + } + + if shouldInline { + memberDoc + } else { + Doc.concat(list( + Doc.indent(Doc.concat(list(Doc.softLine, memberDoc))), + Doc.softLine, + )) + } + } + + Doc.group( + Doc.concat(list( + printAttributes(expr.pexp_attributes), + printExpression(parentExpr), + Doc.lbracket, + member, + Doc.rbracket, + )), + ) + | Pexp_apply({pexp_desc: Pexp_ident({txt: lident})}, args) + when ParsetreeViewer.isJsxExpression(expr) => + printJsxExpression(lident, args) + | Pexp_apply(callExpr, args) => + let /uncurried, attrs/ = ParsetreeViewer.processUncurriedAttribute( + expr.pexp_attributes, + ) + + let callExprDoc = printExpression(callExpr) + if ParsetreeViewer.requiresSpecialCallbackPrinting(args) { + let argsDoc = printArgumentsWithCallback(~uncurried, args) + Doc.concat(list(printAttributes(attrs), callExprDoc, argsDoc)) + } else { + let argsDoc = printArguments(~uncurried, args) + Doc.concat(list(printAttributes(attrs), callExprDoc, argsDoc)) + } + | _ => assert false + } + + and printJsxExpression = (lident, args) => { + let name = printJsxName(lident) + let /formattedProps, children/ = formatJsxProps(args) + + let isSelfClosing = switch children { + | list() => true + | _ => false + } + Doc.group( + Doc.concat(list( + Doc.group( + Doc.concat(list( + Doc.lessThan, + name, + formattedProps, + if isSelfClosing { + Doc.concat(list(Doc.line, Doc.text("/>"))) + } else { + Doc.nil + }, + )), + ), + if isSelfClosing { + Doc.nil + } else { + Doc.concat(list( + Doc.greaterThan, + Doc.indent(Doc.concat(list(Doc.line, printJsxChildren(children)))), + Doc.line, + Doc.text(" { + let opening = Doc.text("<>") + let closing = Doc.text("") + let /children, _/ = ParsetreeViewer.collectListExpressions(expr) + Doc.group( + Doc.concat(list( + opening, + switch children { + | list() => Doc.nil + | children => + Doc.indent(Doc.concat(list(Doc.line, printJsxChildren(children)))) + }, + Doc.line, + closing, + )), + ) + } + + and printJsxChildren = (children: list) => + Doc.group( + Doc.join( + ~sep=Doc.line, + List.map( + expr => { + let exprDoc = printExpression(expr) + if Parens.jsxChildExpr(expr) { + addBraces(exprDoc) + } else { + exprDoc + } + }, + children, + ), + ), + ) + + and formatJsxProps = args => { + let rec loop = (props, args) => + switch args { + | list() => /Doc.nil, list()/ + | list( + /Asttypes.Labelled("children"), children/, + / + Asttypes.Nolabel, + { + Parsetree.pexp_desc: + Pexp_construct({txt: Longident.Lident("()")}, None), + } + /, + ) => + let formattedProps = Doc.indent( + switch props { + | list() => Doc.nil + | props => + Doc.concat(list( + Doc.line, + Doc.group(Doc.join(~sep=Doc.line, props |> List.rev)), + )) + }, + ) + let /children, _/ = ParsetreeViewer.collectListExpressions(children) + /formattedProps, children/ + | list(arg, ...args) => + let propDoc = formatJsxProp(arg) + loop(list(propDoc, ...props), args) + } + + loop(list(), args) + } + + and formatJsxProp = arg => + switch arg { + | / + (Asttypes.Labelled(lblTxt) | Optional(lblTxt)) as lbl, + { + Parsetree.pexp_attributes: list(), + pexp_desc: Pexp_ident({txt: Longident.Lident(ident)}), + } + / when lblTxt == ident => + switch lbl { + | Nolabel => Doc.nil + | Labelled(lbl) => Doc.text(lbl) + | Optional(lbl) => Doc.text("?" ++ lbl) + } + | /lbl, expr/ => + let lblDoc = switch lbl { + | Asttypes.Labelled(lbl) => Doc.text(lbl ++ "=") + | Asttypes.Optional(lbl) => Doc.text(lbl ++ "=?") + | Nolabel => Doc.nil + } + + let exprDoc = printExpression(expr) + Doc.concat(list( + lblDoc, + if Parens.jsxPropExpr(expr) { + addBraces(exprDoc) + } else { + exprDoc + }, + )) + } + + and printJsxName = lident => { + let rec flatten = (acc, lident) => + switch lident { + | Longident.Lident(txt) => list(txt, ...acc) + | Ldot(lident, txt) => + let acc = if txt == "createElement" { + acc + } else { + list(txt, ...acc) + } + flatten(acc, lident) + | _ => acc + } + + switch lident { + | Longident.Lident(txt) => Doc.text(txt) + | _ as lident => + let segments = flatten(list(), lident) + Doc.join(~sep=Doc.dot, List.map(Doc.text, segments)) + } + } + + and printArgumentsWithCallback = (~uncurried, args) => { + let rec loop = (acc, args) => + switch args { + | list() => /Doc.nil, Doc.nil/ + | list(/_lbl, expr/) => + let callback = printPexpFun(~inCallback=true, expr) + /Doc.concat(List.rev(acc)), callback/ + | list(arg, ...args) => + let argDoc = printArgument(arg) + loop(list(Doc.line, Doc.comma, argDoc, ...acc), args) + } + + let /printedArgs, callback/ = loop(list(), args) + + let fitsOnOneLine = Doc.concat(list( + if uncurried { + Doc.text("(.") + } else { + Doc.lparen + }, + Doc.concat(list(printedArgs, callback)), + Doc.rparen, + )) + + let arugmentsFitOnOneLine = Doc.concat(list( + if uncurried { + Doc.text("(.") + } else { + Doc.lparen + }, + Doc.concat(list( + Doc.softLine, + printedArgs, + Doc.breakableGroup(~forceBreak=true, callback), + )), + Doc.softLine, + Doc.rparen, + )) + + let breakAllArgs = printArguments(~uncurried, args) + Doc.customLayout(list(fitsOnOneLine, arugmentsFitOnOneLine, breakAllArgs)) + } + + and printArguments = ( + ~uncurried, + args: list, + ) => + switch args { + | list(/ + Nolabel, + {pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, _)} + /) => + if uncurried { + Doc.text("(.)") + } else { + Doc.text("()") + } + | list(/Nolabel, arg/) when ParsetreeViewer.isHuggableExpression(arg) => + Doc.concat(list( + if uncurried { + Doc.text("(.") + } else { + Doc.lparen + }, + printExpression(arg), + Doc.rparen, + )) + | args => + Doc.group( + Doc.concat(list( + if uncurried { + Doc.text("(.") + } else { + Doc.lparen + }, + Doc.indent( + Doc.concat(list( + if uncurried { + Doc.line + } else { + Doc.softLine + }, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printArgument, args), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + )), + ) + } + + and printArgument = ( + /argLbl, arg/: /Asttypes.arg_label, Parsetree.expression/, + ) => + switch /argLbl, arg/ { + | / + Asttypes.Labelled(lbl), + {pexp_desc: Pexp_ident({txt: Longident.Lident(name)})} + / when lbl == name => + Doc.text("~" ++ lbl) + | / + Asttypes.Optional(lbl), + {pexp_desc: Pexp_ident({txt: Longident.Lident(name)})} + / when lbl == name => + Doc.text("~" ++ lbl ++ "?") + | /lbl, expr/ => + let printedLbl = switch argLbl { + | Asttypes.Nolabel => Doc.nil + | Asttypes.Labelled(lbl) => Doc.text("~" ++ lbl ++ "=") + | Asttypes.Optional(lbl) => Doc.text("~" ++ lbl ++ "=?") + } + + let printedExpr = printExpression(expr) + Doc.concat(list(printedLbl, printedExpr)) + } + + and printCases = (cases: list) => + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list( + Doc.lbrace, + Doc.concat(list( + Doc.line, + Doc.join(~sep=Doc.line, List.map(printCase, cases)), + )), + Doc.line, + Doc.rbrace, + )), + ) + + and printCase = (case: Parsetree.case) => { + let rhs = switch case.pc_rhs.pexp_desc { + | (((Pexp_let(_) | Pexp_letmodule(_)) | Pexp_letexception(_)) + | Pexp_open(_)) + | Pexp_sequence(_) => + printExpressionBlock(~braces=false, case.pc_rhs) + | _ => printExpression(case.pc_rhs) + } + + let guard = switch case.pc_guard { + | None => Doc.nil + | Some(expr) => + Doc.group( + Doc.concat(list(Doc.line, Doc.text("when "), printExpression(expr))), + ) + } + + Doc.group( + Doc.concat(list( + Doc.text("| "), + Doc.indent( + Doc.concat(list( + printPattern(case.pc_lhs), + guard, + Doc.text(" =>"), + Doc.line, + rhs, + )), + ), + )), + ) + } + + and printExprFunParameters = (~inCallback, ~uncurried, parameters) => + switch parameters { + | list(/list(), Asttypes.Nolabel, None, {Parsetree.ppat_desc: Ppat_any}/) + when !uncurried => + Doc.text("_") + | list(/ + list(), + Asttypes.Nolabel, + None, + {Parsetree.ppat_desc: Ppat_var(stringLoc)} + /) when !uncurried => + Doc.text(stringLoc.txt) + | list(/ + list(), + Nolabel, + None, + {ppat_desc: Ppat_construct({txt: Longident.Lident("()")}, None)} + /) when !uncurried => + Doc.text("()") + | parameters => + let lparen = if uncurried { + Doc.text("(. ") + } else { + Doc.lparen + } + let shouldHug = ParsetreeViewer.parametersShouldHug(parameters) + let printedParamaters = Doc.concat(list( + if shouldHug || inCallback { + Doc.nil + } else { + Doc.softLine + }, + Doc.join( + ~sep=Doc.concat(list( + Doc.comma, + if inCallback { + Doc.space + } else { + Doc.line + }, + )), + List.map(printExpFunParameter, parameters), + ), + )) + Doc.group( + Doc.concat(list( + lparen, + if shouldHug || inCallback { + printedParamaters + } else { + Doc.indent(printedParamaters) + }, + if shouldHug || inCallback { + Doc.nil + } else { + Doc.concat(list(Doc.trailingComma, Doc.softLine)) + }, + Doc.rparen, + )), + ) + } + + and printExpFunParameter = (/attrs, lbl, defaultExpr, pattern/) => { + let /isUncurried, attrs/ = ParsetreeViewer.processUncurriedAttribute(attrs) + let uncurried = if isUncurried { + Doc.concat(list(Doc.dot, Doc.space)) + } else { + Doc.nil + } + let attrs = switch attrs { + | list() => Doc.nil + | attrs => + Doc.concat(list( + Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), + Doc.line, + )) + } + + let defaultExprDoc = switch defaultExpr { + | Some(expr) => Doc.concat(list(Doc.text("="), printExpression(expr))) + | None => Doc.nil + } + + let labelWithPattern = switch /lbl, pattern/ { + | /Asttypes.Nolabel, pattern/ => printPattern(pattern) + | /Asttypes.Labelled(lbl) | Optional(lbl), {ppat_desc: Ppat_var(stringLoc)}/ + when lbl == stringLoc.txt => + Doc.concat(list(Doc.text("~"), Doc.text(lbl))) + | /Asttypes.Labelled(lbl) | Optional(lbl), pattern/ => + Doc.concat(list( + Doc.text("~"), + Doc.text(lbl), + Doc.text(" as "), + printPattern(pattern), + )) + } + + let optionalLabelSuffix = switch /lbl, defaultExpr/ { + | /Asttypes.Optional(_), None/ => Doc.text("=?") + | _ => Doc.nil + } + + Doc.group( + Doc.concat(list( + uncurried, + attrs, + labelWithPattern, + defaultExprDoc, + optionalLabelSuffix, + )), + ) + } + + and printExpressionBlock = (~braces, expr) => { + let rec collectRows = (acc, expr) => + switch expr.Parsetree.pexp_desc { + | Parsetree.Pexp_letmodule({txt: modName, loc: modLoc}, modExpr, expr) => + let letModuleDoc = Doc.concat(list( + Doc.text("module "), + Doc.text(modName), + Doc.text(" = "), + printModExpr(modExpr), + )) + let loc = {...modLoc, loc_end: modExpr.pmod_loc.loc_end} + collectRows(list(/loc, letModuleDoc/, ...acc), expr) + | Pexp_letexception(extensionConstructor, expr) => + let letExceptionDoc = printExceptionDef(extensionConstructor) + let loc = extensionConstructor.pext_loc + collectRows(list(/loc, letExceptionDoc/, ...acc), expr) + | Pexp_open(overrideFlag, longidentLoc, expr) => + let openDoc = Doc.concat(list( + Doc.text("open"), + printOverrideFlag(overrideFlag), + Doc.space, + printLongident(longidentLoc.txt), + )) + let loc = longidentLoc.loc + collectRows(list(/loc, openDoc/, ...acc), expr) + | Pexp_sequence(expr1, expr2) => + let exprDoc = { + let doc = printExpression(expr1) + if Parens.blockExpr(expr1) { + addParens(doc) + } else { + doc + } + } + + let loc = expr1.pexp_loc + collectRows(list(/loc, exprDoc/, ...acc), expr2) + | Pexp_let(recFlag, valueBindings, expr) => + let recFlag = switch recFlag { + | Asttypes.Nonrecursive => Doc.nil + | Asttypes.Recursive => Doc.text("rec ") + } + + let letDoc = printValueBindings(~recFlag, valueBindings) + let loc = switch /valueBindings, List.rev(valueBindings)/ { + | /list({pvb_loc: firstLoc}, ..._), list({pvb_loc: lastLoc}, ..._)/ => + {...firstLoc, loc_end: lastLoc.loc_end} + | _ => Location.none + } + + collectRows(list(/loc, letDoc/, ...acc), expr) + | _ => + let exprDoc = { + let doc = printExpression(expr) + if Parens.blockExpr(expr) { + addParens(doc) + } else { + doc + } + } + + List.rev(list(/expr.pexp_loc, exprDoc/, ...acc)) + } + + let block = + collectRows(list(), expr) |> interleaveWhitespace(~forceBreak=true) + Doc.breakableGroup( + ~forceBreak=true, + if braces { + Doc.concat(list( + Doc.lbrace, + Doc.indent(Doc.concat(list(Doc.line, block))), + Doc.line, + Doc.rbrace, + )) + } else { + block + }, + ) + } + + and printOverrideFlag = overrideFlag => + switch overrideFlag { + | Asttypes.Override => Doc.text("!") + | Fresh => Doc.nil + } + + and printDirectionFlag = flag => + switch flag { + | Asttypes.Downto => Doc.text(" downto ") + | Asttypes.Upto => Doc.text(" to ") + } + + and printRecordRow = (/lbl, expr/) => + Doc.concat(list( + printLongident(lbl.txt), + Doc.text(": "), + printExpression(expr), + )) + + and printBsObjectRow = (/lbl, expr/) => + Doc.concat(list( + Doc.text("\""), + printLongident(lbl.txt), + Doc.text("\""), + Doc.text(": "), + printExpression(expr), + )) + + and printAttributes = (~loc=?, attrs: Parsetree.attributes) => + switch attrs { + | list() => Doc.nil + | attrs => + let lineBreak = switch loc { + | None => Doc.line + | Some(loc) => + switch List.rev(attrs) { + | list(/{loc: firstLoc}, _/, ..._) + when loc.loc_start.pos_lnum > firstLoc.loc_end.pos_lnum => + Doc.literalLine + | _ => Doc.line + } + } + + Doc.concat(list( + Doc.group(Doc.join(~sep=Doc.line, List.map(printAttribute, attrs))), + lineBreak, + )) + } + + and printAttribute = (/id, payload/: Parsetree.attribute) => { + let attrName = Doc.text("@" ++ id.txt) + switch payload { + | PStr(list({pstr_desc: Pstr_eval(expr, attrs)})) => + let exprDoc = printExpression(expr) + let needsParens = switch attrs { + | list() => false + | _ => true + } + Doc.group( + Doc.concat(list( + attrName, + addParens( + Doc.concat(list( + printAttributes(attrs), + if needsParens { + addParens(exprDoc) + } else { + exprDoc + }, + )), + ), + )), + ) + | _ => attrName + } + } + + and printModExpr = modExpr => + switch modExpr.pmod_desc { + | Pmod_ident(longidentLoc) => printLongident(longidentLoc.txt) + | Pmod_structure(structure) => + Doc.breakableGroup( + ~forceBreak=true, + Doc.concat(list( + Doc.lbrace, + Doc.indent(Doc.concat(list(Doc.softLine, printStructure(structure)))), + Doc.softLine, + Doc.rbrace, + )), + ) + | Pmod_unpack(expr) => + let shouldHug = switch expr.pexp_desc { + | Pexp_let(_) => true + | Pexp_constraint( + {pexp_desc: Pexp_let(_)}, + {ptyp_desc: Ptyp_package(packageType)}, + ) => + true + | _ => false + } + + let /expr, moduleConstraint/ = switch expr.pexp_desc { + | Pexp_constraint(expr, {ptyp_desc: Ptyp_package(packageType)}) => + let typeDoc = Doc.group( + Doc.concat(list( + Doc.text(":"), + Doc.indent( + Doc.concat(list( + Doc.line, + printPackageType( + ~printModuleKeywordAndParens=false, + packageType, + ), + )), + ), + )), + ) + /expr, typeDoc/ + | _ => /expr, Doc.nil/ + } + + let unpackDoc = Doc.group( + Doc.concat(list(printExpression(expr), moduleConstraint)), + ) + Doc.group( + Doc.concat(list( + Doc.text("unpack("), + if shouldHug { + unpackDoc + } else { + Doc.concat(list( + Doc.indent(Doc.concat(list(Doc.softLine, unpackDoc))), + Doc.softLine, + )) + }, + Doc.rparen, + )), + ) + | Pmod_extension(extension) => printExtension(extension) + | Pmod_apply(_) => + let /args, callExpr/ = ParsetreeViewer.modExprApply(modExpr) + let isUnitSugar = switch args { + | list({pmod_desc: Pmod_structure(list())}) => true + | _ => false + } + + let shouldHug = switch args { + | list({pmod_desc: Pmod_structure(_)}) => true + | _ => false + } + + Doc.group( + Doc.concat(list( + printModExpr(callExpr), + if isUnitSugar { + printModApplyArg(List.hd(args)) + } else { + Doc.concat(list( + Doc.lparen, + if shouldHug { + printModApplyArg(List.hd(args)) + } else { + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printModApplyArg, args), + ), + )), + ) + }, + if !shouldHug { + Doc.concat(list(Doc.trailingComma, Doc.softLine)) + } else { + Doc.nil + }, + Doc.rparen, + )) + }, + )), + ) + | Pmod_constraint(modExpr, modType) => + Doc.concat(list( + printModExpr(modExpr), + Doc.text(": "), + printModType(modType), + )) + | Pmod_functor(_) => printModFunctor(modExpr) + } + + and printModFunctor = modExpr => { + let /parameters, returnModExpr/ = ParsetreeViewer.modExprFunctor(modExpr) + + let /returnConstraint, returnModExpr/ = switch returnModExpr.pmod_desc { + | Pmod_constraint(modExpr, modType) => + let constraintDoc = { + let doc = printModType(modType) + if Parens.modExprFunctorConstraint(modType) { + addParens(doc) + } else { + doc + } + } + + let modConstraint = Doc.concat(list(Doc.text(": "), constraintDoc)) + /modConstraint, printModExpr(modExpr)/ + | _ => /Doc.nil, printModExpr(returnModExpr)/ + } + + let parametersDoc = switch parameters { + | list(/attrs, {txt: "*"}, None/) => + let attrs = switch attrs { + | list() => Doc.nil + | attrs => + Doc.concat(list( + Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), + Doc.line, + )) + } + Doc.group(Doc.concat(list(attrs, Doc.text("()")))) + | list(/list(), {txt: lbl}, None/) => Doc.text(lbl) + | parameters => + Doc.group( + Doc.concat(list( + Doc.lparen, + Doc.indent( + Doc.concat(list( + Doc.softLine, + Doc.join( + ~sep=Doc.concat(list(Doc.comma, Doc.line)), + List.map(printModFunctorParam, parameters), + ), + )), + ), + Doc.trailingComma, + Doc.softLine, + Doc.rparen, + )), + ) + } + + Doc.group( + Doc.concat(list( + parametersDoc, + returnConstraint, + Doc.text(" => "), + returnModExpr, + )), + ) + } + + and printModFunctorParam = (/attrs, lbl, optModType/) => { + let attrs = switch attrs { + | list() => Doc.nil + | attrs => + Doc.concat(list( + Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), + Doc.line, + )) + } + Doc.group( + Doc.concat(list( + attrs, + Doc.text(lbl.txt), + switch optModType { + | None => Doc.nil + | Some(modType) => + Doc.concat(list(Doc.text(": "), printModType(modType))) + }, + )), + ) + } + + and printModApplyArg = modExpr => + switch modExpr.pmod_desc { + | Pmod_structure(list()) => Doc.text("()") + | _ => printModExpr(modExpr) + } + + and printExceptionDef = (constr: Parsetree.extension_constructor) => { + let kind = switch constr.pext_kind { + | Pext_rebind({txt: longident}) => + Doc.indent( + Doc.concat(list(Doc.text(" ="), Doc.line, printLongident(longident))), + ) + | Pext_decl(Pcstr_tuple(list()), None) => Doc.nil + | Pext_decl(args, gadt) => + let gadtDoc = switch gadt { + | Some(typ) => Doc.concat(list(Doc.text(": "), printTypExpr(typ))) + | None => Doc.nil + } + + Doc.concat(list(printConstructorArguments(args), gadtDoc)) + } + + Doc.group( + Doc.concat(list( + printAttributes(constr.pext_attributes), + Doc.text("exception "), + Doc.text(constr.pext_name.txt), + kind, + )), + ) + } + + and printExtensionConstructor = ( + i, + constr: Parsetree.extension_constructor, + ) => { + let attrs = printAttributes(constr.pext_attributes) + let bar = if i > 0 { + Doc.text("| ") + } else { + Doc.ifBreaks(Doc.text("| "), Doc.nil) + } + + let kind = switch constr.pext_kind { + | Pext_rebind({txt: longident}) => + Doc.indent( + Doc.concat(list(Doc.text(" ="), Doc.line, printLongident(longident))), + ) + | Pext_decl(Pcstr_tuple(list()), None) => Doc.nil + | Pext_decl(args, gadt) => + let gadtDoc = switch gadt { + | Some(typ) => Doc.concat(list(Doc.text(": "), printTypExpr(typ))) + | None => Doc.nil + } + + Doc.concat(list(printConstructorArguments(args), gadtDoc)) + } + + Doc.concat(list( + bar, + Doc.group(Doc.concat(list(attrs, Doc.text(constr.pext_name.txt), kind))), + )) + } + + let printImplementation = (s: Parsetree.structure, comments, src) => { + let t = CommentAst.initStructure(s, comments) + + let stringDoc = Doc.toString(~width=80, printStructure(s)) + print_endline(stringDoc) + print_newline() + } + + let printInterface = (s: Parsetree.signature) => { + let stringDoc = Doc.toString(~width=80, printSignature(s)) + print_endline(stringDoc) + print_newline() + } +} + + + diff --git a/res_syntax/benchmarks/data/PrinterOcaml.ml b/res_syntax/benchmarks/data/PrinterOcaml.ml new file mode 100644 index 0000000000..8691e38bb9 --- /dev/null +++ b/res_syntax/benchmarks/data/PrinterOcaml.ml @@ -0,0 +1,3228 @@ +module Printer = struct + type printer = { + src: bytes; + comments: CommentAst.t; + } + + + (* TODO: should this go inside a ast utility module? *) + let rec collectPatternsFromListConstruct acc pattern = + let open Parsetree in + match pattern.ppat_desc with + | Ppat_construct( + {txt = Longident.Lident "::"}, + Some {ppat_desc=Ppat_tuple (pat::rest::[])} + ) -> + collectPatternsFromListConstruct (pat::acc) rest + | _ -> List.rev acc, pattern + + let addParens doc = + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + doc + ] + ); + Doc.softLine; + Doc.rparen; + ] + ) + + let addBraces doc = + Doc.group ( + Doc.concat [ + Doc.lbrace; + doc; + Doc.rbrace; + ] + ) + + (* This could be done in one pass by collecting locations as we go? *) + let interleaveWhitespace ?(forceBreak=false) (rows: (Location.t * Doc.t) list) = + let rec loop prevLoc acc rows = + match rows with + | [] -> Doc.concat (List.rev acc) + | (loc, doc)::rest -> + if loc.Location.loc_start.pos_lnum - prevLoc.Location.loc_end.pos_lnum > 1 then + loop loc (doc::Doc.line::Doc.line::acc) rest + else + loop loc (doc::Doc.line::acc) rest + in + match rows with + | [] -> Doc.nil + | (firstLoc, firstDoc)::rest -> + (* TODO: perf, reversing the list twice! *) + let forceBreak = forceBreak || (match List.rev rest with + | (lastLoc, _)::_ -> + firstLoc.loc_start.pos_lnum != lastLoc.loc_end.pos_lnum + | _ -> false) + in + Doc.breakableGroup ~forceBreak ( + loop firstLoc [firstDoc] rest + ) + + let printLongident l = match l with + | Longident.Lident lident -> Doc.text lident + | Longident.Ldot (lident, txt) as l -> + let txts = Longident.flatten l in + Doc.join ~sep:Doc.dot (List.map Doc.text txts) + | _ -> failwith "unsupported ident" + + (* TODO: better allocation strategy for the buffer *) + let escapeStringContents s = + let len = String.length s in + let b = Buffer.create len in + for i = 0 to len - 1 do + let c = String.get s i in + if c = '\008' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'b'; + ) else if c = '\009' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 't'; + ) else if c = '\010' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'n'; + ) else if c = '\013' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'r'; + ) else if c = '\034' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '"'; + ) else if c = '\092' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '\\'; + )else ( + Buffer.add_char b c; + ); + done; + Buffer.contents b + + let printConstant c = match c with + | Parsetree.Pconst_integer (s, _) -> Doc.text s + | Pconst_string (s, _) -> Doc.text ("\"" ^ (escapeStringContents s) ^ "\"") + | Pconst_float (s, _) -> Doc.text s + | Pconst_char c -> Doc.text ("'" ^ (Char.escaped c) ^ "'") + + let rec printStructure (s : Parsetree.structure) = + interleaveWhitespace ( + List.map (fun si -> (si.Parsetree.pstr_loc, printStructureItem si)) s + ) + + and printStructureItem (si: Parsetree.structure_item) = + match si.pstr_desc with + | Pstr_value(rec_flag, valueBindings) -> + let recFlag = match rec_flag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + printValueBindings ~recFlag valueBindings + | Pstr_type(recFlag, typeDeclarations) -> + let recFlag = match recFlag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + printTypeDeclarations ~recFlag typeDeclarations + | Pstr_primitive valueDescription -> + printValueDescription valueDescription + | Pstr_eval (expr, attrs) -> + let needsParens = match expr with + | {pexp_attributes=[({txt="ns.ternary"},_)]; pexp_desc = Pexp_ifthenelse _} -> false + | _ when ParsetreeViewer.hasAttributes expr.pexp_attributes -> true + | _ -> false + in + let exprDoc = + let doc = printExpression expr in + if needsParens then addParens doc else doc + in + Doc.concat [ + printAttributes attrs; + exprDoc; + ] + | Pstr_attribute attr -> Doc.concat [Doc.text "@"; printAttribute attr] + | Pstr_extension (extension, attrs) -> Doc.concat [ + printAttributes attrs; + Doc.concat [Doc.text "%";printExtension extension]; + ] + | Pstr_include includeDeclaration -> + printIncludeDeclaration includeDeclaration + | Pstr_open openDescription -> + printOpenDescription openDescription + | Pstr_modtype modTypeDecl -> + printModuleTypeDeclaration modTypeDecl + | Pstr_module moduleBinding -> + printModuleBinding ~isRec:false 0 moduleBinding + | Pstr_recmodule moduleBindings -> + Doc.join ~sep:Doc.line (List.mapi (fun i mb -> + printModuleBinding ~isRec:true i mb + ) moduleBindings) + | Pstr_exception extensionConstructor -> + printExceptionDef extensionConstructor; + | Pstr_typext typeExtension -> + printTypeExtension typeExtension + | Pstr_class _ | Pstr_class_type _ -> Doc.nil + + and printTypeExtension (te : Parsetree.type_extension) = + let prefix = Doc.text "type " in + let name = printLongident te.ptyext_path.txt in + let typeParams = match te.ptyext_params with + | [] -> Doc.nil + | typeParams -> Doc.group ( + Doc.concat [ + Doc.lessThan; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printTypeParam typeParams + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ] + ) + in + let extensionConstructors = + let ecs = te.ptyext_constructors in + let forceBreak = + match (ecs, List.rev ecs) with + | (first::_, last::_) -> + first.pext_loc.loc_start.pos_lnum > te.ptyext_path.loc.loc_end.pos_lnum || + first.pext_loc.loc_start.pos_lnum < last.pext_loc.loc_end.pos_lnum + | _ -> false + in + let privateFlag = match te.ptyext_private with + | Asttypes.Private -> Doc.concat [ + Doc.text "private"; + Doc.line; + ] + | Public -> Doc.nil + in + Doc.breakableGroup ~forceBreak ( + Doc.indent ( + Doc.concat [ + Doc.line; + privateFlag; + Doc.join ~sep:Doc.line ( + List.mapi printExtensionConstructor ecs + ) + ] + ) + ) + in + Doc.group ( + Doc.concat [ + printAttributes ~loc: te.ptyext_path.loc te.ptyext_attributes; + prefix; + name; + typeParams; + Doc.text " +="; + extensionConstructors; + ] + ) + + and printModuleBinding ~isRec i moduleBinding = + let prefix = if i = 0 then + Doc.concat [ + Doc.text "module "; + if isRec then Doc.text "rec " else Doc.nil; + ] + else + Doc.text "and " + in + let (modExprDoc, modConstraintDoc) = + match moduleBinding.pmb_expr with + | {pmod_desc = Pmod_constraint (modExpr, modType)} -> + ( + printModExpr modExpr, + Doc.concat [ + Doc.text ": "; + printModType modType + ] + ) + | modExpr -> + (printModExpr modExpr, Doc.nil) + in + Doc.concat [ + printAttributes ~loc:moduleBinding.pmb_name.loc moduleBinding.pmb_attributes; + prefix; + Doc.text moduleBinding.pmb_name.Location.txt; + modConstraintDoc; + Doc.text " = "; + modExprDoc; + ] + + and printModuleTypeDeclaration (modTypeDecl : Parsetree.module_type_declaration) = + Doc.concat [ + printAttributes modTypeDecl.pmtd_attributes; + Doc.text "module type "; + Doc.text modTypeDecl.pmtd_name.txt; + (match modTypeDecl.pmtd_type with + | None -> Doc.nil + | Some modType -> Doc.concat [ + Doc.text " = "; + printModType modType; + ]); + ] + + and printModType modType = + let modTypeDoc = match modType.pmty_desc with + | Parsetree.Pmty_ident {txt = longident; loc} -> + Doc.concat [ + printAttributes ~loc modType.pmty_attributes; + printLongident longident + ] + | Pmty_signature signature -> + let signatureDoc = Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.line; + printSignature signature; + ] + ); + Doc.line; + Doc.rbrace; + ] + ) in + Doc.concat [ + printAttributes modType.pmty_attributes; + signatureDoc + ] + | Pmty_functor _ -> + let (parameters, returnType) = ParsetreeViewer.functorType modType in + let parametersDoc = match parameters with + | [] -> Doc.nil + | [attrs, {Location.txt = "_"}, Some modType] -> + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + Doc.concat [ + attrs; + printModType modType + ] + | params -> + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map (fun (attrs, lbl, modType) -> + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + Doc.concat [ + attrs; + if lbl.Location.txt = "_" then Doc.nil else Doc.text lbl.txt; + (match modType with + | None -> Doc.nil + | Some modType -> Doc.concat [ + if lbl.txt = "_" then Doc.nil else Doc.text ": "; + printModType modType; + ]); + ] + ) params + ); + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + in + let returnDoc = + let doc = printModType returnType in + if Parens.modTypeFunctorReturn returnType then addParens doc else doc + in + Doc.group ( + Doc.concat [ + parametersDoc; + Doc.group ( + Doc.concat [ + Doc.text " =>"; + Doc.line; + returnDoc; + ] + ) + ] + ) + | Pmty_typeof modExpr -> Doc.concat [ + Doc.text "module type of "; + printModExpr modExpr; + ] + | Pmty_extension extension -> printExtension extension + | Pmty_alias {txt = longident} -> Doc.concat [ + Doc.text "module "; + printLongident longident; + ] + | Pmty_with (modType, withConstraints) -> + let operand = + let doc = printModType modType in + if Parens.modTypeWithOperand modType then addParens doc else doc + in + Doc.group ( + Doc.concat [ + operand; + Doc.indent ( + Doc.concat [ + Doc.line; + printWithConstraints withConstraints; + ] + ) + ] + ) + in + let attrsAlreadyPrinted = match modType.pmty_desc with + | Pmty_functor _ | Pmty_signature _ | Pmty_ident _ -> true + | _ -> false in + Doc.concat [ + if attrsAlreadyPrinted then Doc.nil else printAttributes modType.pmty_attributes; + modTypeDoc; + ] + + and printWithConstraints withConstraints = + let rows =List.mapi (fun i withConstraint -> + Doc.group ( + Doc.concat [ + if i == 0 then Doc.text "with " else Doc.text "and "; + printWithConstraint withConstraint; + ] + ) + ) withConstraints + in + Doc.join ~sep:Doc.line rows + + and printWithConstraint (withConstraint : Parsetree.with_constraint) = + match withConstraint with + (* with type X.t = ... *) + | Pwith_type ({txt = longident}, typeDeclaration) -> + Doc.group (printTypeDeclaration + ~name:(printLongident longident) + ~equalSign:"=" + ~recFlag:Doc.nil + 0 + typeDeclaration) + (* with module X.Y = Z *) + | Pwith_module ({txt = longident1}, {txt = longident2}) -> + Doc.concat [ + Doc.text "module "; + printLongident longident1; + Doc.text " ="; + Doc.indent ( + Doc.concat [ + Doc.line; + printLongident longident2; + ] + ) + ] + (* with type X.t := ..., same format as [Pwith_type] *) + | Pwith_typesubst ({txt = longident}, typeDeclaration) -> + Doc.group(printTypeDeclaration + ~name:(printLongident longident) + ~equalSign:":=" + ~recFlag:Doc.nil + 0 + typeDeclaration) + | Pwith_modsubst ({txt = longident1}, {txt = longident2}) -> + Doc.concat [ + Doc.text "module "; + printLongident longident1; + Doc.text " :="; + Doc.indent ( + Doc.concat [ + Doc.line; + printLongident longident2; + ] + ) + ] + + and printSignature signature = + interleaveWhitespace ( + List.map (fun si -> (si.Parsetree.psig_loc, printSignatureItem si)) signature + ) + + and printSignatureItem (si : Parsetree.signature_item) = + match si.psig_desc with + | Parsetree.Psig_value valueDescription -> + printValueDescription valueDescription + | Psig_type (recFlag, typeDeclarations) -> + let recFlag = match recFlag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + printTypeDeclarations ~recFlag typeDeclarations + | Psig_typext typeExtension -> + printTypeExtension typeExtension + | Psig_exception extensionConstructor -> + printExceptionDef extensionConstructor + | Psig_module moduleDeclaration -> + printModuleDeclaration moduleDeclaration + | Psig_recmodule moduleDeclarations -> + printRecModuleDeclarations moduleDeclarations + | Psig_modtype modTypeDecl -> + printModuleTypeDeclaration modTypeDecl + | Psig_open openDescription -> + printOpenDescription openDescription + | Psig_include includeDescription -> + printIncludeDescription includeDescription + | Psig_attribute attr -> Doc.concat [Doc.text "@"; printAttribute attr] + | Psig_extension (extension, attrs) -> Doc.concat [ + printAttributes attrs; + Doc.concat [Doc.text "%";printExtension extension]; + ] + | Psig_class _ | Psig_class_type _ -> Doc.nil + + and printRecModuleDeclarations moduleDeclarations = + Doc.group ( + Doc.join ~sep:Doc.line ( + List.mapi (fun i (md: Parsetree.module_declaration) -> + let body = match md.pmd_type.pmty_desc with + | Parsetree.Pmty_alias {txt = longident } -> + Doc.concat [Doc.text " = "; printLongident longident] + | _ -> + let needsParens = match md.pmd_type.pmty_desc with + | Pmty_with _ -> true + | _ -> false + in + let modTypeDoc = + let doc = printModType md.pmd_type in + if needsParens then addParens doc else doc + in + Doc.concat [Doc.text ": "; modTypeDoc] + in + let prefix = if i < 1 then "module rec " else "and " in + Doc.concat [ + printAttributes ~loc:md.pmd_name.loc md.pmd_attributes; + Doc.text prefix; + Doc.text md.pmd_name.txt; + body + ] + ) moduleDeclarations + ) + ) + + and printModuleDeclaration (md: Parsetree.module_declaration) = + let body = match md.pmd_type.pmty_desc with + | Parsetree.Pmty_alias {txt = longident } -> + Doc.concat [Doc.text " = "; printLongident longident] + | _ -> Doc.concat [Doc.text ": "; printModType md.pmd_type] + in + Doc.concat [ + printAttributes ~loc:md.pmd_name.loc md.pmd_attributes; + Doc.text "module "; + Doc.text md.pmd_name.txt; + body + ] + + and printOpenDescription (openDescription : Parsetree.open_description) = + Doc.concat [ + printAttributes openDescription.popen_attributes; + Doc.text "open"; + (match openDescription.popen_override with + | Asttypes.Fresh -> Doc.space + | Asttypes.Override -> Doc.text "! "); + printLongident openDescription.popen_lid.txt + ] + + and printIncludeDescription (includeDescription: Parsetree.include_description) = + Doc.concat [ + printAttributes includeDescription.pincl_attributes; + Doc.text "include "; + printModType includeDescription.pincl_mod; + ] + + and printIncludeDeclaration (includeDeclaration : Parsetree.include_declaration) = + Doc.concat [ + printAttributes includeDeclaration.pincl_attributes; + Doc.text "include "; + printModExpr includeDeclaration.pincl_mod; + ] + + + and printValueBindings ~recFlag (vbs: Parsetree.value_binding list) = + let rows = List.mapi (fun i vb -> + let doc = printValueBinding ~recFlag i vb in + (vb.Parsetree.pvb_loc, doc) + ) vbs + in + interleaveWhitespace rows + + (* + * type value_description = { + * pval_name : string Asttypes.loc; + * pval_type : Parsetree.core_type; + * pval_prim : string list; + * pval_attributes : Parsetree.attributes; + * pval_loc : Location.t; + * } + *) + and printValueDescription valueDescription = + let isExternal = + match valueDescription.pval_prim with | [] -> false | _ -> true + in + Doc.group ( + Doc.concat [ + Doc.text (if isExternal then "external " else "let "); + Doc.text valueDescription.pval_name.txt; + Doc.text ": "; + printTypExpr valueDescription.pval_type; + if isExternal then + Doc.group ( + Doc.concat [ + Doc.text " ="; + Doc.indent( + Doc.concat [ + Doc.line; + Doc.join ~sep:Doc.line ( + List.map(fun s -> Doc.concat [ + Doc.text "\""; + Doc.text s; + Doc.text "\""; + ]) + valueDescription.pval_prim + ); + ] + ) + ] + ) + else Doc.nil + ] + ) + + and printTypeDeclarations ~recFlag typeDeclarations = + let rows = List.mapi (fun i td -> + let doc = printTypeDeclaration + ~name:(Doc.text td.Parsetree.ptype_name.txt) + ~equalSign:"=" + ~recFlag + i td + in + (td.Parsetree.ptype_loc, doc) + ) typeDeclarations in + interleaveWhitespace rows + + (* + * type_declaration = { + * ptype_name: string loc; + * ptype_params: (core_type * variance) list; + * (* ('a1,...'an) t; None represents _*) + * ptype_cstrs: (core_type * core_type * Location.t) list; + * (* ... constraint T1=T1' ... constraint Tn=Tn' *) + * ptype_kind: type_kind; + * ptype_private: private_flag; (* = private ... *) + * ptype_manifest: core_type option; (* = T *) + * ptype_attributes: attributes; (* ... [@@id1] [@@id2] *) + * ptype_loc: Location.t; + * } + * + * + * type t (abstract, no manifest) + * type t = T0 (abstract, manifest=T0) + * type t = C of T | ... (variant, no manifest) + * type t = T0 = C of T | ... (variant, manifest=T0) + * type t = {l: T; ...} (record, no manifest) + * type t = T0 = {l : T; ...} (record, manifest=T0) + * type t = .. (open, no manifest) + * + * + * and type_kind = + * | Ptype_abstract + * | Ptype_variant of constructor_declaration list + * (* Invariant: non-empty list *) + * | Ptype_record of label_declaration list + * (* Invariant: non-empty list *) + * | Ptype_open + *) + and printTypeDeclaration ~name ~equalSign ~recFlag i (td: Parsetree.type_declaration) = + let attrs = printAttributes ~loc:td.ptype_loc td.ptype_attributes in + let prefix = if i > 0 then + Doc.text "and " + else + Doc.concat [Doc.text "type "; recFlag] + in + let typeName = name in + let typeParams = match td.ptype_params with + | [] -> Doc.nil + | typeParams -> Doc.group ( + Doc.concat [ + Doc.lessThan; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printTypeParam typeParams + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ] + ) + in + let manifestAndKind = match td.ptype_kind with + | Ptype_abstract -> + begin match td.ptype_manifest with + | None -> Doc.nil + | Some(typ) -> + Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printTypExpr typ; + ] + end + | Ptype_open -> Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + Doc.text ".."; + ] + | Ptype_record(lds) -> + let manifest = match td.ptype_manifest with + | None -> Doc.nil + | Some(typ) -> Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr typ; + ] + in + Doc.concat [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printRecordDeclaration lds; + ] + | Ptype_variant(cds) -> + let manifest = match td.ptype_manifest with + | None -> Doc.nil + | Some(typ) -> Doc.concat [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr typ; + ] + in + Doc.concat [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign]; + printConstructorDeclarations ~privateFlag:td.ptype_private cds; + ] + in + let constraints = printTypeDefinitionConstraints td.ptype_cstrs in + Doc.group ( + Doc.concat [ + attrs; + prefix; + typeName; + typeParams; + manifestAndKind; + constraints; + ] + ) + + and printTypeDefinitionConstraints cstrs = + match cstrs with + | [] -> Doc.nil + | cstrs -> Doc.indent ( + Doc.group ( + Doc.concat [ + Doc.line; + Doc.group( + Doc.join ~sep:Doc.line ( + List.map printTypeDefinitionConstraint cstrs + ) + ) + ] + ) + ) + + and printTypeDefinitionConstraint ((typ1, typ2, _loc ): Parsetree.core_type * Parsetree.core_type * Location.t) = + Doc.concat [ + Doc.text "constraint "; + printTypExpr typ1; + Doc.text " = "; + printTypExpr typ2; + ] + + and printPrivateFlag (flag : Asttypes.private_flag) = match flag with + | Private -> Doc.text "private " + | Public -> Doc.nil + + and printTypeParam (param : (Parsetree.core_type * Asttypes.variance)) = + let (typ, variance) = param in + let printedVariance = match variance with + | Covariant -> Doc.text "+" + | Contravariant -> Doc.text "-" + | Invariant -> Doc.nil + in + Doc.concat [ + printedVariance; + printTypExpr typ + ] + + and printRecordDeclaration (lds: Parsetree.label_declaration list) = + let forceBreak = match (lds, List.rev lds) with + | (first::_, last::_) -> + first.pld_loc.loc_start.pos_lnum < last.pld_loc.loc_end.pos_lnum + | _ -> false + in + Doc.breakableGroup ~forceBreak ( + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printLabelDeclaration lds) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ] + ) + + and printConstructorDeclarations ~privateFlag (cds: Parsetree.constructor_declaration list) = + let forceBreak = match (cds, List.rev cds) with + | (first::_, last::_) -> + first.pcd_loc.loc_start.pos_lnum < last.pcd_loc.loc_end.pos_lnum + | _ -> false + in + let privateFlag = match privateFlag with + | Asttypes.Private -> Doc.concat [ + Doc.text "private"; + Doc.line; + ] + | Public -> Doc.nil + in + Doc.breakableGroup ~forceBreak ( + Doc.indent ( + Doc.concat [ + Doc.line; + privateFlag; + Doc.join ~sep:Doc.line ( + List.mapi printConstructorDeclaration cds + ) + ] + ) + ) + + (* + * { + * pcd_name: string loc; + * pcd_args: constructor_arguments; + * pcd_res: core_type option; + * pcd_loc: Location.t; + * pcd_attributes: attributes; (* C of ... [@id1] [@id2] *) + * } + *) + and printConstructorDeclaration i (cd : Parsetree.constructor_declaration) = + let attrs = printAttributes cd.pcd_attributes in + let bar = if i > 0 then Doc.text "| " + else Doc.ifBreaks (Doc.text "| ") Doc.nil + in + let constrName = Doc.text cd.pcd_name.txt in + let constrArgs = printConstructorArguments cd.pcd_args in + let gadt = match cd.pcd_res with + | None -> Doc.nil + | Some(typ) -> Doc.indent ( + Doc.concat [ + Doc.text ": "; + printTypExpr typ; + ] + ) + in + Doc.concat [ + bar; + Doc.group ( + Doc.concat [ + attrs; (* TODO: fix parsing of attributes, so when can print them above the bar? *) + constrName; + constrArgs; + gadt; + ] + ) + ] + + and printConstructorArguments (cdArgs : Parsetree.constructor_arguments) = + match cdArgs with + | Pcstr_tuple [] -> Doc.nil + | Pcstr_tuple types -> Doc.group ( + Doc.indent ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printTypExpr types + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + ) + | Pcstr_record lds -> + Doc.indent ( + Doc.concat [ + Doc.lparen; + (* manually inline the printRecordDeclaration, gives better layout *) + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printLabelDeclaration lds) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + Doc.rparen; + ] + ) + + + and printLabelDeclaration (ld : Parsetree.label_declaration) = + let attrs = printAttributes ~loc:ld.pld_name.loc ld.pld_attributes in + let mutableFlag = match ld.pld_mutable with + | Mutable -> Doc.text "mutable " + | Immutable -> Doc.nil + in + let name = Doc.text ld.pld_name.txt in + Doc.group ( + Doc.concat [ + attrs; + mutableFlag; + name; + Doc.text ": "; + printTypExpr ld.pld_type; + ] + ) + + and printTypExpr (typExpr : Parsetree.core_type) = + let renderedType = match typExpr.ptyp_desc with + | Ptyp_any -> Doc.text "_" + | Ptyp_var var -> Doc.text ("'" ^ var) + | Ptyp_extension(extension) -> + printExtension extension + | Ptyp_alias(typ, alias) -> + let typ = + (* Technically type t = (string, float) => unit as 'x, doesn't require + * parens around the arrow expression. This is very confusing though. + * Is the "as" part of "unit" or "(string, float) => unit". By printing + * parens we guide the user towards its meaning.*) + let needsParens = match typ.ptyp_desc with + | Ptyp_arrow _ -> true + | _ -> false + in + let doc = printTypExpr typ in + if needsParens then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else + doc + in + Doc.concat [typ; Doc.text " as "; Doc.text ("'" ^ alias)] + | Ptyp_constr({txt = Longident.Ldot(Longident.Lident "Js", "t")}, [typ]) -> + let bsObject = printTypExpr typ in + begin match typExpr.ptyp_attributes with + | [] -> bsObject + | attrs -> + Doc.concat [ + Doc.group ( + Doc.join ~sep:Doc.line (List.map printAttribute attrs) + ); + Doc.space; + printTypExpr typ; + ] + end + | Ptyp_constr(longidentLoc, [{ ptyp_desc = Parsetree.Ptyp_tuple tuple }]) -> + let constrName = printLongident longidentLoc.txt in + Doc.group( + Doc.concat([ + constrName; + Doc.lessThan; + printTupleType ~inline:true tuple; + Doc.greaterThan; + ]) + ) + | Ptyp_constr(longidentLoc, constrArgs) -> + let constrName = printLongident longidentLoc.txt in + begin match constrArgs with + | [] -> constrName + | [{ + Parsetree.ptyp_desc = + Ptyp_constr({txt = Longident.Ldot(Longident.Lident "Js", "t")}, + [{ptyp_desc = Ptyp_object (fields, openFlag)}]) + }] -> + Doc.concat([ + constrName; + Doc.lessThan; + printBsObjectSugar ~inline:true fields openFlag; + Doc.greaterThan; + ]) + | args -> Doc.group( + Doc.concat([ + constrName; + Doc.lessThan; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printTypExpr constrArgs + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ]) + ) + end + | Ptyp_arrow _ -> + let (attrsBefore, args, returnType) = ParsetreeViewer.arrowType typExpr in + let returnTypeNeedsParens = match returnType.ptyp_desc with + | Ptyp_alias _ -> true + | _ -> false + in + let returnDoc = + let doc = printTypExpr returnType in + if returnTypeNeedsParens then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else doc + in + let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute attrsBefore in + begin match args with + | [] -> Doc.nil + | [([], Nolabel, n)] when not isUncurried -> + let hasAttrsBefore = not (attrs = []) in + let attrs = if hasAttrsBefore then + Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrsBefore); + Doc.space; + ] + else Doc.nil + in + Doc.group ( + Doc.concat [ + Doc.group attrs; + Doc.group ( + if hasAttrsBefore then + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + printTypExpr n; + Doc.text " => "; + returnDoc; + ] + ); + Doc.softLine; + Doc.rparen + ] + else + Doc.concat [ + printTypExpr n; + Doc.text " => "; + returnDoc; + ] + ) + ] + ) + | args -> + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.space; + ] + in + let renderedArgs = Doc.concat [ + attrs; + Doc.text "("; + Doc.indent ( + Doc.concat [ + Doc.softLine; + if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printTypeParameter args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.text ")"; + ] in + Doc.group ( + Doc.concat [ + renderedArgs; + Doc.text " => "; + returnDoc; + ] + ) + end + | Ptyp_tuple types -> printTupleType ~inline:false types + | Ptyp_object (fields, openFlag) -> + printBsObjectSugar ~inline:false fields openFlag + | Ptyp_poly(stringLocs, typ) -> + Doc.concat [ + Doc.join ~sep:Doc.space (List.map (fun {Location.txt} -> + Doc.text ("'" ^ txt)) stringLocs); + Doc.dot; + Doc.space; + printTypExpr typ + ] + | Ptyp_package packageType -> + printPackageType ~printModuleKeywordAndParens:true packageType + | Ptyp_class _ -> failwith "classes are not supported in types" + | Ptyp_variant _ -> failwith "Polymorphic variants currently not supported" + in + let shouldPrintItsOwnAttributes = match typExpr.ptyp_desc with + | Ptyp_arrow _ (* es6 arrow types print their own attributes *) + | Ptyp_constr({txt = Longident.Ldot(Longident.Lident "Js", "t")}, _) -> true + | _ -> false + in + begin match typExpr.ptyp_attributes with + | _::_ as attrs when not shouldPrintItsOwnAttributes -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + renderedType; + ] + ) + | _ -> renderedType + end + + and printBsObjectSugar ~inline fields openFlag = + let flag = match openFlag with + | Asttypes.Closed -> Doc.nil + | Open -> Doc.dotdot + in + let doc = Doc.concat [ + Doc.lbrace; + flag; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printObjectField fields + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ] in + if inline then doc else Doc.group doc + + + and printTupleType ~inline (types: Parsetree.core_type list) = + let tuple = Doc.concat([ + Doc.text "/"; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printTypExpr types + ) + ]) + ); + (* Doc.trailingComma; *) (* Trailing comma not supported in tuples right now‚ͬ†*) + Doc.softLine; + Doc.text "/"; + ]) + in + if inline == false then Doc.group(tuple) else tuple + + and printObjectField (field : Parsetree.object_field) = + match field with + | Otag (labelLoc, attrs, typ) -> + Doc.concat [ + Doc.text ("\"" ^ labelLoc.txt ^ "\""); + Doc.text ": "; + printTypExpr typ; + ] + | _ -> Doc.nil + + (* es6 arrow type arg + * type t = (~foo: string, ~bar: float=?, unit) => unit + * i.e. ~foo: string, ~bar: float *) + and printTypeParameter (attrs, lbl, typ) = + let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute attrs in + let uncurried = if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil in + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + let label = match lbl with + | Asttypes.Nolabel -> Doc.nil + | Labelled lbl -> Doc.text ("~" ^ lbl ^ ": ") + | Optional lbl -> Doc.text ("~" ^ lbl ^ ": ") + in + let optionalIndicator = match lbl with + | Asttypes.Nolabel + | Labelled _ -> Doc.nil + | Optional lbl -> Doc.text "=?" + in + Doc.group ( + Doc.concat [ + uncurried; + attrs; + label; + printTypExpr typ; + optionalIndicator; + ] + ) + + + (* + * { + * pvb_pat: pattern; + * pvb_expr: expression; + * pvb_attributes: attributes; + * pvb_loc: Location.t; + * } + *) + and printValueBinding ~recFlag i vb = + let isGhost = ParsetreeViewer.isGhostUnitBinding i vb in + let header = if isGhost then Doc.nil else + if i == 0 then Doc.concat [Doc.text "let "; recFlag] + else Doc.text "and " + in + let printedExpr = + let exprDoc = printExpression vb.pvb_expr in + let needsParens = match vb.pvb_expr.pexp_desc with + | Pexp_constraint( + {pexp_desc = Pexp_pack _}, + {ptyp_desc = Ptyp_package _} + ) -> false + | Pexp_constraint _ -> true + | _ -> false + in + if needsParens then addParens exprDoc else exprDoc + in + if isGhost then + printedExpr + else + let shouldIndent = + ParsetreeViewer.isBinaryExpression vb.pvb_expr || + (match vb.pvb_expr with + | { + pexp_attributes = [({Location.txt="ns.ternary"}, _)]; + pexp_desc = Pexp_ifthenelse (ifExpr, _, _) + } -> + ParsetreeViewer.isBinaryExpression ifExpr || ParsetreeViewer.hasAttributes ifExpr.pexp_attributes + | { pexp_desc = Pexp_newtype _} -> false + | e -> + ParsetreeViewer.hasAttributes e.pexp_attributes || + ParsetreeViewer.isArrayAccess e + ) + in + Doc.concat [ + printAttributes ~loc:vb.pvb_loc vb.pvb_attributes; + header; + printPattern vb.pvb_pat; + Doc.text " ="; + if shouldIndent then + Doc.indent ( + Doc.concat [ + Doc.line; + printedExpr; + ] + ) + else + Doc.concat [ + Doc.space; + printedExpr; + ] + ] + + and printPackageType ~printModuleKeywordAndParens (packageType: Parsetree.package_type) = + let doc = match packageType with + | (longidentLoc, []) -> Doc.group( + Doc.concat [ + printLongident longidentLoc.txt; + ] + ) + | (longidentLoc, packageConstraints) -> Doc.group( + Doc.concat [ + printLongident longidentLoc.txt; + printPackageConstraints packageConstraints; + Doc.softLine; + ] + ) + in + if printModuleKeywordAndParens then + Doc.concat[ + Doc.text "module("; + doc; + Doc.rparen + ] + else + doc + + + + + and printPackageConstraints packageConstraints = + Doc.concat [ + Doc.text " with"; + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.join ~sep:Doc.line ( + List.mapi printPackageconstraint packageConstraints + ) + ] + ) + ] + + and printPackageconstraint i (longidentLoc, typ) = + let prefix = if i == 0 then Doc.text "type " else Doc.text "and type " in + Doc.concat [ + prefix; + printLongident longidentLoc.Location.txt; + Doc.text " = "; + printTypExpr typ + ] + + and printExtension (stringLoc, payload) = + let extName = Doc.text ("%" ^ stringLoc.Location.txt) in + match payload with + | PStr [{pstr_desc = Pstr_eval (expr, attrs)}] -> + let exprDoc = printExpression expr in + let needsParens = match attrs with | [] -> false | _ -> true in + Doc.group ( + Doc.concat [ + extName; + addParens ( + Doc.concat [ + printAttributes attrs; + if needsParens then addParens exprDoc else exprDoc; + ] + ) + ] + ) + | _ -> extName + + and printPattern (p : Parsetree.pattern) = + let patternWithoutAttributes = match p.ppat_desc with + | Ppat_any -> Doc.text "_" + | Ppat_var stringLoc -> Doc.text (stringLoc.txt) + | Ppat_constant c -> printConstant c + | Ppat_tuple patterns -> + Doc.group( + Doc.concat([ + Doc.text "/"; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map printPattern patterns) + ]) + ); + (* Doc.ifBreaks (Doc.text ",") Doc.nil; *) + Doc.softLine; + Doc.text "/"; + ]) + ) + | Ppat_array patterns -> + Doc.group( + Doc.concat([ + Doc.text "["; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map printPattern patterns) + ]) + ); + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + Doc.text "]"; + ]) + ) + | Ppat_construct({txt = Longident.Lident "[]"}, _) -> + Doc.text "list()" + | Ppat_construct({txt = Longident.Lident "::"}, _) -> + let (patterns, tail) = collectPatternsFromListConstruct [] p in + let shouldHug = match (patterns, tail) with + | ([pat], + {ppat_desc = Ppat_construct({txt = Longident.Lident "[]"}, _)}) when ParsetreeViewer.isHuggablePattern pat -> true + | _ -> false + in + let children = Doc.concat([ + if shouldHug then Doc.nil else Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map printPattern patterns); + begin match tail.Parsetree.ppat_desc with + | Ppat_construct({txt = Longident.Lident "[]"}, _) -> Doc.nil + | _ -> Doc.concat([Doc.text ","; Doc.line; Doc.text "..."; printPattern tail]) + end; + ]) in + Doc.group( + Doc.concat([ + Doc.text "list("; + if shouldHug then children else Doc.concat [ + Doc.indent children; + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + ]; + Doc.text ")"; + ]) + ) + | Ppat_construct(constrName, constructorArgs) -> + let constrName = printLongident constrName.txt in + begin match constructorArgs with + | None -> constrName + | Some(args) -> + let args = match args.ppat_desc with + | Ppat_construct({txt = Longident.Lident "()"}, None) -> [Doc.nil] + | Ppat_tuple(patterns) -> List.map printPattern patterns + | _ -> [printPattern args] + in + Doc.group( + Doc.concat([ + constrName; + Doc.text "("; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + args + ] + ); + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + Doc.text ")"; + ]) + ) + end + | Ppat_record(rows, openFlag) -> + Doc.group( + Doc.concat([ + Doc.text "{"; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map printPatternRecordRow rows); + begin match openFlag with + | Open -> Doc.concat [Doc.text ","; Doc.line; Doc.text "_"] + | Closed -> Doc.nil + end; + ] + ); + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + Doc.text "}"; + ]) + ) + + | Ppat_exception p -> + let needsParens = match p.ppat_desc with + | Ppat_or (_, _) | Ppat_alias (_, _) -> true + | _ -> false + in + let pat = + let p = printPattern p in + if needsParens then + Doc.concat [Doc.text "("; p; Doc.text ")"] + else + p + in + Doc.group ( + Doc.concat [ Doc.text "exception"; Doc.line; pat ] + ) + | Ppat_or (p1, p2) -> + let p1 = + let p = printPattern p1 in + match p1.ppat_desc with + | Ppat_or (_, _) -> Doc.concat [Doc.text "("; p; Doc.text ")"] + | _ -> p + in + let p2 = + let p = printPattern p2 in + match p2.ppat_desc with + | Ppat_or (_, _) -> Doc.concat [Doc.text "("; p; Doc.text ")"] + | _ -> p + in + Doc.group( + Doc.concat([p1; Doc.line; Doc.text "| "; p2]) + ) + | Ppat_extension ext -> + printExtension ext + | Ppat_lazy p -> + let needsParens = match p.ppat_desc with + | Ppat_or (_, _) | Ppat_alias (_, _) -> true + | _ -> false + in + let pat = + let p = printPattern p in + if needsParens then + Doc.concat [Doc.text "("; p; Doc.text ")"] + else + p + in + Doc.concat [Doc.text "lazy "; pat] + | Ppat_alias (p, aliasLoc) -> + let needsParens = match p.ppat_desc with + | Ppat_or (_, _) | Ppat_alias (_, _) -> true + | _ -> false + in + let renderedPattern = + let p = printPattern p in + if needsParens then + Doc.concat [Doc.text "("; p; Doc.text ")"] + else + p + in + Doc.concat([ + renderedPattern; + Doc.text " as "; + Doc.text aliasLoc.txt + ]) + + (* Note: module(P : S) is represented as *) + (* Ppat_constraint(Ppat_unpack, Ptyp_package) *) + | Ppat_constraint ({ppat_desc = Ppat_unpack stringLoc}, {ptyp_desc = Ptyp_package packageType}) -> + Doc.concat [ + Doc.text "module("; + Doc.text stringLoc.txt; + Doc.text ": "; + printPackageType ~printModuleKeywordAndParens:false packageType; + Doc.rparen; + ] + | Ppat_constraint (pattern, typ) -> + Doc.concat [ + printPattern pattern; + Doc.text ": "; + printTypExpr typ; + ] + + (* Note: module(P : S) is represented as *) + (* Ppat_constraint(Ppat_unpack, Ptyp_package) *) + | Ppat_unpack stringLoc -> + Doc.concat [ + Doc.text "module("; + Doc.text stringLoc.txt; + Doc.rparen; + ] + | _ -> failwith "unsupported pattern" + in + begin match p.ppat_attributes with + | [] -> patternWithoutAttributes + | attrs -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + patternWithoutAttributes; + ] + ) + end + + and printPatternRecordRow row = + match row with + (* punned {x}*) + | ({Location.txt=Longident.Lident ident}, + {Parsetree.ppat_desc=Ppat_var {txt;_}}) when ident = txt -> + Doc.text ident + | (longident, pattern) -> + Doc.group ( + Doc.concat([ + printLongident longident.txt; + Doc.text ": "; + Doc.indent( + Doc.concat [ + Doc.softLine; + printPattern pattern; + ] + ) + ]) + ) + + and printExpression (e : Parsetree.expression) = + let printedExpression = match e.pexp_desc with + | Parsetree.Pexp_constant c -> printConstant c + | Pexp_construct _ when ParsetreeViewer.hasJsxAttribute e.pexp_attributes -> + printJsxFragment e + | Pexp_construct ({txt = Longident.Lident "()"}, _) -> Doc.text "()" + | Pexp_construct ({txt = Longident.Lident "[]"}, _) -> Doc.text "list()" + | Pexp_construct ({txt = Longident.Lident "::"}, _) -> + let (expressions, spread) = ParsetreeViewer.collectListExpressions e in + let spreadDoc = match spread with + | Some(expr) -> Doc.concat [ + Doc.text ","; + Doc.line; + Doc.dotdotdot; + printExpression expr + ] + | None -> Doc.nil + in + Doc.group( + Doc.concat([ + Doc.text "list("; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map printExpression expressions); + spreadDoc; + ]) + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + ) + | Pexp_construct (longidentLoc, args) -> + let constr = printLongident longidentLoc.txt in + let args = match args with + | None -> Doc.nil + | Some({pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)}) -> + Doc.text "()" + | Some({pexp_desc = Pexp_tuple args }) -> + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printExpression args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + | Some(arg) -> + let argDoc = printExpression arg in + let shouldHug = ParsetreeViewer.isHuggableExpression arg in + Doc.concat [ + Doc.lparen; + if shouldHug then argDoc + else Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + argDoc; + ] + ); + Doc.trailingComma; + Doc.softLine; + ]; + Doc.rparen; + ] + in + Doc.group(Doc.concat [constr; args]) + | Pexp_ident(longidentLoc) -> + printLongident longidentLoc.txt + | Pexp_tuple exprs -> + Doc.group( + Doc.concat([ + Doc.text "/"; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map printExpression exprs) + ]) + ); + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + Doc.text "/"; + ]) + ) + | Pexp_array [] -> Doc.text "[]" + | Pexp_array exprs -> + Doc.group( + Doc.concat([ + Doc.lbracket; + Doc.indent ( + Doc.concat([ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map printExpression exprs) + ]) + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbracket; + ]) + ) + | Pexp_record (rows, spreadExpr) -> + let spread = match spreadExpr with + | None -> Doc.nil + | Some expr -> Doc.concat [ + Doc.dotdotdot; + printExpression expr; + Doc.comma; + Doc.line; + ] + in + (* If the record is written over multiple lines, break automatically + * `let x = {a: 1, b: 3}` -> same line, break when line-width exceeded + * `let x = { + * a: 1, + * b: 2, + * }` -> record is written on multiple lines, break the group *) + let forceBreak = + e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak ( + Doc.concat([ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + spread; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map printRecordRow rows) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + ) + | Pexp_extension extension -> + begin match extension with + | ( + {txt = "bs.obj"}, + PStr [{ + pstr_loc = loc; + pstr_desc = Pstr_eval({pexp_desc = Pexp_record (rows, _)}, []) + }] + ) -> + (* If the object is written over multiple lines, break automatically + * `let x = {"a": 1, "b": 3}` -> same line, break when line-width exceeded + * `let x = { + * "a": 1, + * "b": 2, + * }` -> object is written on multiple lines, break the group *) + let forceBreak = + loc.loc_start.pos_lnum < loc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak ( + Doc.concat([ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map printBsObjectRow rows) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + ) + | extension -> + printExtension extension + end + | Pexp_apply _ -> + if ParsetreeViewer.isUnaryExpression e then + printUnaryExpression e + else if ParsetreeViewer.isBinaryExpression e then + printBinaryExpression e + else + printPexpApply e + | Pexp_unreachable -> Doc.dot + | Pexp_field (expr, longidentLoc) -> + let lhs = + let doc = printExpression expr in + if Parens.fieldExpr expr then addParens doc else doc + in + Doc.concat [ + lhs; + Doc.dot; + printLongident longidentLoc.txt; + ] + | Pexp_setfield (expr1, longidentLoc, expr2) -> + printSetFieldExpr e.pexp_attributes expr1 longidentLoc expr2 + | Pexp_ifthenelse (ifExpr, thenExpr, elseExpr) -> + if ParsetreeViewer.isTernaryExpr e then + let (parts, alternate) = ParsetreeViewer.collectTernaryParts e in + let ternaryDoc = match parts with + | (condition1, consequent1)::rest -> + Doc.group (Doc.concat [ + printTernaryOperand condition1; + Doc.indent ( + Doc.concat [ + Doc.line; + Doc.indent (Doc.concat [Doc.text "? "; printTernaryOperand consequent1]); + Doc.concat ( + List.map (fun (condition, consequent) -> + Doc.concat [ + Doc.line; + Doc.text ": "; + printTernaryOperand condition; + Doc.line; + Doc.text "? "; + printTernaryOperand consequent; + ] + ) rest + ); + Doc.line; + Doc.text ": "; + Doc.indent (printTernaryOperand alternate); + ] + ) + ]) + | _ -> Doc.nil + in + let attrs = ParsetreeViewer.filterTernaryAttributes e.pexp_attributes in + let needsParens = match attrs with | [] -> false | _ -> true in + Doc.concat [ + printAttributes attrs; + if needsParens then addParens ternaryDoc else ternaryDoc; + ] + else + let (ifs, elseExpr) = ParsetreeViewer.collectIfExpressions e in + let ifDocs = Doc.join ~sep:Doc.space ( + List.mapi (fun i (ifExpr, thenExpr) -> + let ifTxt = if i > 0 then Doc.text "else if " else Doc.text "if " in + let condition = printExpression ifExpr in + Doc.concat [ + ifTxt; + Doc.group ( + Doc.ifBreaks (addParens condition) condition; + ); + Doc.space; + printExpressionBlock ~braces:true thenExpr; + ] + ) ifs + ) in + let elseDoc = match elseExpr with + | None -> Doc.nil + | Some expr -> Doc.concat [ + Doc.text " else "; + printExpressionBlock ~braces:true expr; + ] + in + Doc.concat [ + printAttributes e.pexp_attributes; + ifDocs; + elseDoc; + ] + | Pexp_while (expr1, expr2) -> + let condition = printExpression expr1 in + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.text "while "; + Doc.group ( + Doc.ifBreaks (addParens condition) condition + ); + Doc.space; + printExpressionBlock ~braces:true expr2; + ] + ) + | Pexp_for (pattern, fromExpr, toExpr, directionFlag, body) -> + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.text "for "; + printPattern pattern; + Doc.text " in "; + printExpression fromExpr; + printDirectionFlag directionFlag; + printExpression toExpr; + Doc.space; + printExpressionBlock ~braces:true body; + ] + ) + | Pexp_constraint( + {pexp_desc = Pexp_pack modExpr}, + {ptyp_desc = Ptyp_package packageType} + ) -> + Doc.group ( + Doc.concat [ + Doc.text "module("; + Doc.indent ( + Doc.concat [ + Doc.softLine; + printModExpr modExpr; + Doc.text ": "; + printPackageType ~printModuleKeywordAndParens:false packageType; + ] + ); + Doc.softLine; + Doc.rparen; + ] + ) + + | Pexp_constraint (expr, typ) -> + Doc.concat [ + printExpression expr; + Doc.text ": "; + printTypExpr typ; + ] + | Pexp_letmodule ({txt = modName}, modExpr, expr) -> + printExpressionBlock ~braces:true e + + | Pexp_letexception (extensionConstructor, expr) -> + printExpressionBlock ~braces:true e + | Pexp_assert expr -> + let rhs = + let doc = printExpression expr in + if Parens.lazyOrAssertExprRhs expr then addParens doc else doc + in + Doc.concat [ + Doc.text "assert "; + rhs; + ] + | Pexp_lazy expr -> + let rhs = + let doc = printExpression expr in + if Parens.lazyOrAssertExprRhs expr then addParens doc else doc + in + Doc.concat [ + Doc.text "lazy "; + rhs; + ] + | Pexp_open (overrideFlag, longidentLoc, expr) -> + printExpressionBlock ~braces:true e + | Pexp_pack (modExpr) -> + Doc.group (Doc.concat [ + Doc.text "module("; + Doc.indent ( + Doc.concat [ + Doc.softLine; + printModExpr modExpr; + ] + ); + Doc.softLine; + Doc.rparen; + ]) + | Pexp_sequence _ -> + printExpressionBlock ~braces:true e + | Pexp_let _ -> + printExpressionBlock ~braces:true e + | Pexp_fun _ | Pexp_newtype _ -> + let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr e in + let (uncurried, attrs) = + ParsetreeViewer.processUncurriedAttribute attrsOnArrow + in + let (returnExpr, typConstraint) = match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) -> (expr, Some typ) + | _ -> (returnExpr, None) + in + let parametersDoc = printExprFunParameters ~inCallback:false ~uncurried parameters in + let returnExprDoc = + let shouldInline = match returnExpr.pexp_desc with + | Pexp_array _ + | Pexp_tuple _ + | Pexp_construct (_, Some _) + | Pexp_record _ -> true + | _ -> false + in + let shouldIndent = match returnExpr.pexp_desc with + | Pexp_sequence _ | Pexp_let _ | Pexp_letmodule _ | Pexp_letexception _ -> false + | _ -> true + in + let returnDoc = printExpression returnExpr in + if shouldInline then Doc.concat [ + Doc.space; + returnDoc; + ] else + Doc.group ( + if shouldIndent then + Doc.indent ( + Doc.concat [ + Doc.line; + returnDoc; + ] + ) + else + Doc.concat [ + Doc.space; + returnDoc + ] + ) + in + let typConstraintDoc = match typConstraint with + | Some(typ) -> Doc.concat [Doc.text ": "; printTypExpr typ] + | _ -> Doc.nil + in + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.space; + ] + in + Doc.group ( + Doc.concat [ + attrs; + parametersDoc; + typConstraintDoc; + Doc.text " =>"; + returnExprDoc; + ] + ) + | Pexp_try (expr, cases) -> + Doc.concat [ + Doc.text "try "; + printExpression expr; + Doc.text " catch "; + printCases cases; + ] + | Pexp_match (expr, cases) -> + Doc.concat [ + Doc.text "switch "; + printExpression expr; + Doc.space; + printCases cases; + ] + | _ -> failwith "expression not yet implemented in printer" + in + let shouldPrintItsOwnAttributes = match e.pexp_desc with + | Pexp_apply _ + | Pexp_fun _ + | Pexp_newtype _ + | Pexp_setfield _ + | Pexp_ifthenelse _ -> true + | Pexp_construct _ when ParsetreeViewer.hasJsxAttribute e.pexp_attributes -> true + | _ -> false + in + begin match e.pexp_attributes with + | [] -> printedExpression + | attrs when not shouldPrintItsOwnAttributes -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + printedExpression; + ] + ) + | _ -> printedExpression + end + + and printPexpFun ~inCallback e = + let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr e in + let (uncurried, attrs) = + ParsetreeViewer.processUncurriedAttribute attrsOnArrow + in + let (returnExpr, typConstraint) = match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) -> (expr, Some typ) + | _ -> (returnExpr, None) + in + let parametersDoc = printExprFunParameters ~inCallback ~uncurried parameters in + let returnShouldIndent = match returnExpr.pexp_desc with + | Pexp_sequence _ | Pexp_let _ | Pexp_letmodule _ | Pexp_letexception _ -> false + | _ -> true + in + let returnExprDoc = + let shouldInline = match returnExpr.pexp_desc with + | Pexp_array _ + | Pexp_tuple _ + | Pexp_construct (_, Some _) + | Pexp_record _ -> true + | _ -> false + in + let returnDoc = printExpression returnExpr in + if shouldInline then Doc.concat [ + Doc.space; + returnDoc; + ] else + Doc.group ( + if returnShouldIndent then + Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.line; + returnDoc; + ] + ); + if inCallback then Doc.softLine else Doc.nil; + ] + else + Doc.concat [ + Doc.space; + returnDoc; + ] + ) + in + let typConstraintDoc = match typConstraint with + | Some(typ) -> Doc.concat [Doc.text ": "; printTypExpr typ] + | _ -> Doc.nil + in + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.space; + ] + in + Doc.group ( + Doc.concat [ + attrs; + parametersDoc; + typConstraintDoc; + Doc.text " =>"; + returnExprDoc; + ] + ) + + and printTernaryOperand expr = + let doc = printExpression expr in + if Parens.ternaryOperand expr then addParens doc else doc + + and printSetFieldExpr attrs lhs longidentLoc rhs = + let rhsDoc = + let doc = printExpression rhs in + if Parens.setFieldExprRhs rhs then addParens doc else doc + in + let lhsDoc = + let doc = printExpression lhs in + if Parens.fieldExpr lhs then addParens doc else doc + in + let shouldIndent = ParsetreeViewer.isBinaryExpression rhs in + let doc = Doc.concat [ + lhsDoc; + Doc.dot; + printLongident longidentLoc.txt; + Doc.text " ="; + if shouldIndent then Doc.group ( + Doc.indent ( + (Doc.concat [Doc.line; rhsDoc]) + ) + ) else + Doc.concat [Doc.space; rhsDoc] + ] in + match attrs with + | [] -> doc + | attrs -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + doc + ] + ) + + and printUnaryExpression expr = + let printUnaryOperator op = Doc.text ( + match op with + | "~+" -> "+" + | "~+." -> "+." + | "~-" -> "-" + | "~-." -> "-." + | "not" -> "!" + | "!" -> "&" + | _ -> assert false + ) in + match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [Nolabel, operand] + ) -> + let printedOperand = + let doc = printExpression operand in + if Parens.unaryExprOperand operand then addParens doc else doc + in + Doc.concat [ + printUnaryOperator operator; + printedOperand; + ] + | _ -> assert false + + and printBinaryExpression (expr : Parsetree.expression) = + let printBinaryOperator ~inlineRhs operator = + let operatorTxt = match operator with + | "|." -> "->" + | "^" -> "++" + | "=" -> "==" + | "==" -> "===" + | "<>" -> "!=" + | "!=" -> "!==" + | txt -> txt + in + let spacingBeforeOperator = + if operator = "|." then Doc.softLine + else if operator = "|>" then Doc.line + else Doc.space; + in + let spacingAfterOperator = + if operator = "|." then Doc.nil + else if operator = "|>" then Doc.space + else if inlineRhs then Doc.space else Doc.line + in + Doc.concat [ + spacingBeforeOperator; + Doc.text operatorTxt; + spacingAfterOperator; + ] + in + let printOperand ~isLhs expr parentOperator = + let rec flatten ~isLhs expr parentOperator = + if ParsetreeViewer.isBinaryExpression expr then + begin match expr with + | {pexp_desc = Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [_, left; _, right] + )} -> + if ParsetreeViewer.flattenableOperators parentOperator operator && + not (ParsetreeViewer.hasAttributes expr.pexp_attributes) then + let leftPrinted = flatten ~isLhs:true left operator in + let rightPrinted = + let (_, rightAttrs) = + ParsetreeViewer.partitionPrinteableAttributes right.pexp_attributes + in + let doc = + printExpression {right with pexp_attributes = rightAttrs } in + let doc = if Parens.flattenOperandRhs parentOperator right then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else + doc + in + let printeableAttrs = + ParsetreeViewer.filterPrinteableAttributes right.pexp_attributes + in + Doc.concat [printAttributes printeableAttrs; doc] + in + Doc.concat [ + leftPrinted; + printBinaryOperator ~inlineRhs:false operator; + rightPrinted; + ] + else + let doc = printExpression {expr with pexp_attributes = []} in + let doc = if Parens.subBinaryExprOperand parentOperator operator || + (expr.pexp_attributes <> [] && + (ParsetreeViewer.isBinaryExpression expr || + ParsetreeViewer.isTernaryExpr expr)) then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else doc + in Doc.concat [ + printAttributes expr.pexp_attributes; + doc + ] + | _ -> assert false + end + else + begin match expr.pexp_desc with + | Pexp_setfield (lhs, field, rhs) -> + let doc = printSetFieldExpr expr.pexp_attributes lhs field rhs in + if isLhs then addParens doc else doc + | Pexp_apply( + {pexp_desc = Pexp_ident {txt = Longident.Lident "#="}}, + [(Nolabel, lhs); (Nolabel, rhs)] + ) -> + let rhsDoc = printExpression rhs in + let lhsDoc = printExpression lhs in + (* TODO: unify indentation of "=" *) + let shouldIndent = ParsetreeViewer.isBinaryExpression rhs in + let doc = Doc.group( + Doc.concat [ + lhsDoc; + Doc.text " ="; + if shouldIndent then Doc.group ( + Doc.indent (Doc.concat [Doc.line; rhsDoc]) + ) else + Doc.concat [Doc.space; rhsDoc] + ] + ) in + let doc = match expr.pexp_attributes with + | [] -> doc + | attrs -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + doc + ] + ) + in + if isLhs then addParens doc else doc + | _ -> + let doc = printExpression expr in + if Parens.binaryExprOperand ~isLhs expr parentOperator then + addParens doc + else doc + end + in + flatten ~isLhs expr parentOperator + in + match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident (("|." | "|>") as op)}}, + [Nolabel, lhs; Nolabel, rhs] + ) when not ( + ParsetreeViewer.isBinaryExpression lhs || + ParsetreeViewer.isBinaryExpression rhs + ) -> + let lhsDoc = printOperand ~isLhs:true lhs op in + let rhsDoc = printOperand ~isLhs:false rhs op in + Doc.concat [ + lhsDoc; + (match op with + | "|." -> Doc.text "->" + | "|>" -> Doc.text " |> " + | _ -> assert false); + rhsDoc; + ] + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [Nolabel, lhs; Nolabel, rhs] + ) -> + let right = + let operatorWithRhs = Doc.concat [ + printBinaryOperator + ~inlineRhs:(ParsetreeViewer.shouldInlineRhsBinaryExpr rhs) operator; + printOperand ~isLhs:false rhs operator; + ] in + if ParsetreeViewer.shouldIndentBinaryExpr expr then + Doc.group (Doc.indent operatorWithRhs) + else operatorWithRhs + in + let doc = Doc.group ( + Doc.concat [ + printOperand ~isLhs:true lhs operator; + right + ] + ) in + Doc.concat [ + printAttributes expr.pexp_attributes; + if Parens.binaryExpr expr then addParens doc else doc + ] + | _ -> Doc.nil + + (* callExpr(arg1, arg2)*) + and printPexpApply expr = + match expr.pexp_desc with + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "##"}}, + [Nolabel, parentExpr; Nolabel, memberExpr] + ) -> + let member = + let memberDoc = printExpression memberExpr in + Doc.concat [Doc.text "\""; memberDoc; Doc.text "\""] + in + Doc.group (Doc.concat [ + printAttributes expr.pexp_attributes; + printExpression parentExpr; + Doc.lbracket; + member; + Doc.rbracket; + ]) + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Lident "#="}}, + [Nolabel, lhs; Nolabel, rhs] + ) -> + let rhsDoc = printExpression rhs in + (* TODO: unify indentation of "=" *) + let shouldIndent = ParsetreeViewer.isBinaryExpression rhs in + let doc = Doc.group( + Doc.concat [ + printExpression lhs; + Doc.text " ="; + if shouldIndent then Doc.group ( + Doc.indent ( + (Doc.concat [Doc.line; rhsDoc]) + ) + ) else + Doc.concat [Doc.space; rhsDoc] + ] + ) in + begin match expr.pexp_attributes with + | [] -> doc + | attrs -> + Doc.group ( + Doc.concat [ + printAttributes attrs; + doc + ] + ) + end + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "get")}}, + [Nolabel, parentExpr; Nolabel, memberExpr] + ) -> + let member = + let memberDoc = printExpression memberExpr in + let shouldInline = match memberExpr.pexp_desc with + | Pexp_constant _ | Pexp_ident _ -> true + | _ -> false + in + if shouldInline then memberDoc else ( + Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + memberDoc; + ] + ); + Doc.softLine + ] + ) + in + Doc.group (Doc.concat [ + printAttributes expr.pexp_attributes; + printExpression parentExpr; + Doc.lbracket; + member; + Doc.rbracket; + ]) + (* TODO: cleanup, are those branches even remotely performant? *) + | Pexp_apply ( + {pexp_desc = Pexp_ident {txt = lident}}, + args + ) when ParsetreeViewer.isJsxExpression expr -> + printJsxExpression lident args + | Pexp_apply (callExpr, args) -> + let (uncurried, attrs) = + ParsetreeViewer.processUncurriedAttribute expr.pexp_attributes + in + let callExprDoc = printExpression callExpr in + if ParsetreeViewer.requiresSpecialCallbackPrinting args then + let argsDoc = printArgumentsWithCallback ~uncurried args in + Doc.concat [ + printAttributes attrs; + callExprDoc; + argsDoc; + ] + else + let argsDoc = printArguments ~uncurried args in + Doc.concat [ + printAttributes attrs; + callExprDoc; + argsDoc; + ] + | _ -> assert false + + and printJsxExpression lident args = + let name = printJsxName lident in + let (formattedProps, children) = formatJsxProps args in + (*
*) + let isSelfClosing = match children with | [] -> true | _ -> false in + Doc.group ( + Doc.concat [ + Doc.group ( + Doc.concat [ + Doc.lessThan; + name; + formattedProps; + if isSelfClosing then Doc.concat [Doc.line; Doc.text "/>"] else Doc.nil + ] + ); + if isSelfClosing then Doc.nil + else + Doc.concat [ + Doc.greaterThan; + Doc.indent ( + Doc.concat [ + Doc.line; + printJsxChildren children; + ] + ); + Doc.line; + Doc.text "" in + let closing = Doc.text "" in + let (children, _) = ParsetreeViewer.collectListExpressions expr in + Doc.group ( + Doc.concat [ + opening; + begin match children with + | [] -> Doc.nil + | children -> + Doc.indent ( + Doc.concat [ + Doc.line; + printJsxChildren children; + ] + ) + end; + Doc.line; + closing; + ] + ) + + and printJsxChildren (children: Parsetree.expression list) = + Doc.group ( + Doc.join ~sep:Doc.line ( + List.map (fun expr -> + let exprDoc = printExpression expr in + if Parens.jsxChildExpr expr then addBraces exprDoc else exprDoc + ) children + ) + ) + + and formatJsxProps args = + let rec loop props args = + match args with + | [] -> (Doc.nil, []) + | [ + (Asttypes.Labelled "children", children); + ( + Asttypes.Nolabel, + {Parsetree.pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, None)} + ) + ] -> + let formattedProps = Doc.indent ( + match props with + | [] -> Doc.nil + | props -> + Doc.concat [ + Doc.line; + Doc.group ( + Doc.join ~sep:Doc.line (props |> List.rev) + ) + ] + ) in + let (children, _) = ParsetreeViewer.collectListExpressions children in + (formattedProps, children) + | arg::args -> + let propDoc = formatJsxProp arg in + loop (propDoc::props) args + in + loop [] args + + and formatJsxProp arg = + match arg with + | ( + (Asttypes.Labelled lblTxt | Optional lblTxt) as lbl, + { + Parsetree.pexp_attributes = []; + pexp_desc = Pexp_ident {txt = Longident.Lident ident} + } + ) when lblTxt = ident (* jsx punning *) -> + + begin match lbl with + | Nolabel -> Doc.nil + | Labelled lbl -> Doc.text lbl + | Optional lbl -> Doc.text ("?" ^ lbl) + end + | (lbl, expr) -> + let lblDoc = match lbl with + | Asttypes.Labelled lbl -> Doc.text (lbl ^ "=") + | Asttypes.Optional lbl -> Doc.text (lbl ^ "=?") + | Nolabel -> Doc.nil + in + let exprDoc = printExpression expr in + Doc.concat [ + lblDoc; + if Parens.jsxPropExpr expr then addBraces exprDoc else exprDoc; + ] + + (* div -> div. + * Navabar.createElement -> Navbar + * Staff.Users.createElement -> Staff.Users *) + and printJsxName lident = + let rec flatten acc lident = match lident with + | Longident.Lident txt -> txt::acc + | Ldot (lident, txt) -> + let acc = if txt = "createElement" then acc else txt::acc in + flatten acc lident + | _ -> acc + in + match lident with + | Longident.Lident txt -> Doc.text txt + | _ as lident -> + let segments = flatten [] lident in + Doc.join ~sep:Doc.dot (List.map Doc.text segments) + + and printArgumentsWithCallback ~uncurried args = + let rec loop acc args = match args with + | [] -> (Doc.nil, Doc.nil) + | [_lbl, expr] -> + let callback = printPexpFun ~inCallback:true expr in + (Doc.concat (List.rev acc), callback) + | arg::args -> + let argDoc = printArgument arg in + loop (Doc.line::Doc.comma::argDoc::acc) args + in + let (printedArgs, callback) = loop [] args in + + (* Thing.map(foo,(arg1, arg2) => MyModuleBlah.toList(argument)) *) + let fitsOnOneLine = Doc.concat [ + if uncurried then Doc.text "(." else Doc.lparen; + Doc.concat [ + printedArgs; + callback; + ]; + Doc.rparen; + ] in + + (* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) => + * MyModuleBlah.toList(argument) + * ) + *) + let arugmentsFitOnOneLine = + Doc.concat [ + if uncurried then Doc.text "(." else Doc.lparen; + Doc.concat [ + Doc.softLine; + printedArgs; + Doc.breakableGroup ~forceBreak:true callback; + ]; + Doc.softLine; + Doc.rparen; + ] + in + + (* Thing.map( + * arg1, + * arg2, + * arg3, + * (param1, parm2) => doStuff(param1, parm2) + * ) + *) + let breakAllArgs = printArguments ~uncurried args in + Doc.customLayout [ + fitsOnOneLine; + arugmentsFitOnOneLine; + breakAllArgs; + ] + + and printArguments ~uncurried (args : (Asttypes.arg_label * Parsetree.expression) list) = + match args with + | [Nolabel, {pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)}] -> + if uncurried then Doc.text "(.)" else Doc.text "()" + | [(Nolabel, arg)] when ParsetreeViewer.isHuggableExpression arg -> + Doc.concat [ + if uncurried then Doc.text "(." else Doc.lparen; + printExpression arg; + Doc.rparen; + ] + | args -> Doc.group ( + Doc.concat [ + if uncurried then Doc.text "(." else Doc.lparen; + Doc.indent ( + Doc.concat [ + if uncurried then Doc.line else Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printArgument args + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + +(* + * argument ::= + * | _ (* syntax sugar *) + * | expr + * | expr : type + * | ~ label-name + * | ~ label-name + * | ~ label-name ? + * | ~ label-name = expr + * | ~ label-name = _ (* syntax sugar *) + * | ~ label-name = expr : type + * | ~ label-name = ? expr + * | ~ label-name = ? _ (* syntax sugar *) + * | ~ label-name = ? expr : type *) + and printArgument ((argLbl, arg) : Asttypes.arg_label * Parsetree.expression) = + match (argLbl, arg) with + (* ~a (punned)*) + | ( + (Asttypes.Labelled lbl), + {pexp_desc=Pexp_ident {txt =Longident.Lident name}} + ) when lbl = name -> + Doc.text ("~" ^ lbl) + (* ~a? (optional lbl punned)*) + | ( + (Asttypes.Optional lbl), + {pexp_desc=Pexp_ident {txt =Longident.Lident name}} + ) when lbl = name -> + Doc.text ("~" ^ lbl ^ "?") + | (lbl, expr) -> + let printedLbl = match argLbl with + | Asttypes.Nolabel -> Doc.nil + | Asttypes.Labelled lbl -> Doc.text ("~" ^ lbl ^ "=") + | Asttypes.Optional lbl -> Doc.text ("~" ^ lbl ^ "=?") + in + let printedExpr = printExpression expr in + Doc.concat [ + printedLbl; + printedExpr; + ] + + and printCases (cases: Parsetree.case list) = + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.lbrace; + Doc.concat [ + Doc.line; + Doc.join ~sep:Doc.line ( + List.map printCase cases + ) + ]; + Doc.line; + Doc.rbrace; + ] + ) + + and printCase (case: Parsetree.case) = + let rhs = match case.pc_rhs.pexp_desc with + | Pexp_let _ + | Pexp_letmodule _ + | Pexp_letexception _ + | Pexp_open _ + | Pexp_sequence _ -> + printExpressionBlock ~braces:false case.pc_rhs + | _ -> printExpression case.pc_rhs + in + let guard = match case.pc_guard with + | None -> Doc.nil + | Some expr -> Doc.group ( + Doc.concat [ + Doc.line; + Doc.text "when "; + printExpression expr; + ] + ) + in + Doc.group ( + Doc.concat [ + Doc.text "| "; + Doc.indent ( + Doc.concat [ + printPattern case.pc_lhs; + guard; + Doc.text " =>"; + Doc.line; + rhs; + ] + ); + ] + ) + + and printExprFunParameters ~inCallback ~uncurried parameters = + match parameters with + (* let f = _ => () *) + | [([], Asttypes.Nolabel, None, {Parsetree.ppat_desc = Ppat_any})] when not uncurried -> + Doc.text "_" + (* let f = a => () *) + | [([], Asttypes.Nolabel, None, {Parsetree.ppat_desc = Ppat_var stringLoc})] when not uncurried -> + Doc.text stringLoc.txt + (* let f = () => () *) + | [([], Nolabel, None, {ppat_desc = Ppat_construct({txt = Longident.Lident "()"}, None)})] when not uncurried -> + Doc.text "()" + (* let f = (~greeting, ~from as hometown, ~x=?) => () *) + | parameters -> + let lparen = if uncurried then Doc.text "(. " else Doc.lparen in + let shouldHug = ParsetreeViewer.parametersShouldHug parameters in + let printedParamaters = Doc.concat [ + if shouldHug || inCallback then Doc.nil else Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; if inCallback then Doc.space else Doc.line]) + (List.map printExpFunParameter parameters) + ] in + Doc.group ( + Doc.concat [ + lparen; + if shouldHug || inCallback then printedParamaters else Doc.indent (printedParamaters); + if shouldHug || inCallback then Doc.nil else Doc.concat [Doc.trailingComma; Doc.softLine]; + Doc.rparen; + ] + ) + + and printExpFunParameter (attrs, lbl, defaultExpr, pattern) = + let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute attrs in + let uncurried = if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil in + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + (* =defaultValue *) + let defaultExprDoc = match defaultExpr with + | Some expr -> Doc.concat [ + Doc.text "="; + printExpression expr + ] + | None -> Doc.nil + in + (* ~from as hometown + * ~from -> punning *) + let labelWithPattern = match (lbl, pattern) with + | (Asttypes.Nolabel, pattern) -> printPattern pattern + | ( + (Asttypes.Labelled lbl | Optional lbl), + {ppat_desc = Ppat_var stringLoc} + ) when lbl = stringLoc.txt -> + Doc.concat [ + Doc.text "~"; + Doc.text lbl; + ] + | ((Asttypes.Labelled lbl | Optional lbl), pattern) -> + Doc.concat [ + Doc.text "~"; + Doc.text lbl; + Doc.text " as "; + printPattern pattern; + ] + in + let optionalLabelSuffix = match (lbl, defaultExpr) with + | (Asttypes.Optional _, None) -> Doc.text "=?" + | _ -> Doc.nil + in + Doc.group ( + Doc.concat [ + uncurried; + attrs; + labelWithPattern; + defaultExprDoc; + optionalLabelSuffix; + ] + ) + + (* + * let x = { + * module Foo = Bar + * exception Exit + * open Belt + * let a = 1 + * let b = 2 + * sideEffect() + * a + b + * } + * What is an expr-block ? Everything between { ... } + *) + and printExpressionBlock ~braces expr = + let rec collectRows acc expr = match expr.Parsetree.pexp_desc with + | Parsetree.Pexp_letmodule ({txt = modName; loc = modLoc}, modExpr, expr) -> + let letModuleDoc = Doc.concat [ + Doc.text "module "; + Doc.text modName; + Doc.text " = "; + printModExpr modExpr; + ] in + let loc = {modLoc with loc_end = modExpr.pmod_loc.loc_end} in + collectRows ((loc, letModuleDoc)::acc) expr + | Pexp_letexception (extensionConstructor, expr) -> + let letExceptionDoc = printExceptionDef extensionConstructor in + let loc = extensionConstructor.pext_loc in + collectRows ((loc, letExceptionDoc)::acc) expr + | Pexp_open (overrideFlag, longidentLoc, expr) -> + let openDoc = Doc.concat [ + Doc.text "open"; + printOverrideFlag overrideFlag; + Doc.space; + printLongident longidentLoc.txt; + ] in + let loc = longidentLoc.loc in + collectRows ((loc, openDoc)::acc) expr + | Pexp_sequence (expr1, expr2) -> + let exprDoc = + let doc = printExpression expr1 in + if Parens.blockExpr expr1 then addParens doc else doc + in + let loc = expr1.pexp_loc in + collectRows ((loc, exprDoc)::acc) expr2 + | Pexp_let (recFlag, valueBindings, expr) -> + let recFlag = match recFlag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + let letDoc = printValueBindings ~recFlag valueBindings in + let loc = match (valueBindings, List.rev valueBindings) with + | ({pvb_loc = firstLoc}::_,{pvb_loc = lastLoc}::_) -> + {firstLoc with loc_end = lastLoc.loc_end} + | _ -> Location.none + in + collectRows((loc, letDoc)::acc) expr + | _ -> + let exprDoc = + let doc = printExpression expr in + if Parens.blockExpr expr then addParens doc else doc + in + List.rev ((expr.pexp_loc, exprDoc)::acc) + in + let block = collectRows [] expr |> interleaveWhitespace ~forceBreak:true in + Doc.breakableGroup ~forceBreak:true ( + if braces then + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.line; + block; + ] + ); + Doc.line; + Doc.rbrace; + ] + else block + ) + + and printOverrideFlag overrideFlag = match overrideFlag with + | Asttypes.Override -> Doc.text "!" + | Fresh -> Doc.nil + + and printDirectionFlag flag = match flag with + | Asttypes.Downto -> Doc.text " downto " + | Asttypes.Upto -> Doc.text " to " + + and printRecordRow (lbl, expr) = + Doc.concat [ + printLongident lbl.txt; + Doc.text ": "; + printExpression expr; + ] + + and printBsObjectRow (lbl, expr) = + Doc.concat [ + Doc.text "\""; + printLongident lbl.txt; + Doc.text "\""; + Doc.text ": "; + printExpression expr; + ] + (* The optional loc indicates whether we need to print the attributes in + * relation to some location. In practise this means the following: + * `@attr type t = string` -> on the same line, print on the same line + * `@attr + * type t = string` -> attr is on prev line, print the attributes + * with a line break between, we respect the users' original layout *) + and printAttributes ?loc (attrs: Parsetree.attributes) = + match attrs with + | [] -> Doc.nil + | attrs -> + let lineBreak = match loc with + | None -> Doc.line + | Some loc -> begin match List.rev attrs with + | ({loc = firstLoc}, _)::_ when loc.loc_start.pos_lnum > firstLoc.loc_end.pos_lnum -> + Doc.literalLine; + | _ -> Doc.line + end + in + Doc.concat [ + Doc.group (Doc.join ~sep:Doc.line (List.map printAttribute attrs)); + lineBreak; + ] + + and printAttribute ((id, payload) : Parsetree.attribute) = + let attrName = Doc.text ("@" ^ id.txt) in + match payload with + | PStr [{pstr_desc = Pstr_eval (expr, attrs)}] -> + let exprDoc = printExpression expr in + let needsParens = match attrs with | [] -> false | _ -> true in + Doc.group ( + Doc.concat [ + attrName; + addParens ( + Doc.concat [ + printAttributes attrs; + if needsParens then addParens exprDoc else exprDoc; + ] + ) + ] + ) + | _ -> attrName + + + and printModExpr modExpr = + match modExpr.pmod_desc with + | Pmod_ident longidentLoc -> + printLongident longidentLoc.txt + | Pmod_structure structure -> + Doc.breakableGroup ~forceBreak:true ( + Doc.concat [ + Doc.lbrace; + Doc.indent ( + Doc.concat [ + Doc.softLine; + printStructure structure; + ]; + ); + Doc.softLine; + Doc.rbrace; + ] + ) + | Pmod_unpack expr -> + let shouldHug = match expr.pexp_desc with + | Pexp_let _ -> true + | Pexp_constraint ( + {pexp_desc = Pexp_let _ }, + {ptyp_desc = Ptyp_package packageType} + ) -> true + | _ -> false + in + let (expr, moduleConstraint) = match expr.pexp_desc with + | Pexp_constraint ( + expr, + {ptyp_desc = Ptyp_package packageType} + ) -> + let typeDoc = Doc.group (Doc.concat [ + Doc.text ":"; + Doc.indent ( + Doc.concat [ + Doc.line; + printPackageType ~printModuleKeywordAndParens:false packageType + ] + ) + ]) in + (expr, typeDoc) + | _ -> (expr, Doc.nil) + in + let unpackDoc = Doc.group(Doc.concat [ + printExpression expr; + moduleConstraint; + ]) in + Doc.group ( + Doc.concat [ + Doc.text "unpack("; + if shouldHug then unpackDoc + else + Doc.concat [ + Doc.indent ( + Doc.concat [ + Doc.softLine; + unpackDoc; + ] + ); + Doc.softLine; + ]; + Doc.rparen; + ] + ) + | Pmod_extension extension -> + printExtension extension + | Pmod_apply _ -> + let (args, callExpr) = ParsetreeViewer.modExprApply modExpr in + let isUnitSugar = match args with + | [{pmod_desc = Pmod_structure []}] -> true + | _ -> false + in + let shouldHug = match args with + | [{pmod_desc = Pmod_structure _}] -> true + | _ -> false + in + Doc.group ( + Doc.concat [ + printModExpr callExpr; + if isUnitSugar then + printModApplyArg (List.hd args) + else + Doc.concat [ + Doc.lparen; + if shouldHug then + printModApplyArg (List.hd args) + else + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printModApplyArg args + ) + ] + ); + if not shouldHug then + Doc.concat [ + Doc.trailingComma; + Doc.softLine; + ] + else Doc.nil; + Doc.rparen; + ] + ] + ) + | Pmod_constraint (modExpr, modType) -> + Doc.concat [ + printModExpr modExpr; + Doc.text ": "; + printModType modType; + ] + | Pmod_functor _ -> + printModFunctor modExpr + + and printModFunctor modExpr = + let (parameters, returnModExpr) = ParsetreeViewer.modExprFunctor modExpr in + (* let shouldInline = match returnModExpr.pmod_desc with *) + (* | Pmod_structure _ | Pmod_ident _ -> true *) + (* | Pmod_constraint ({pmod_desc = Pmod_structure _}, _) -> true *) + (* | _ -> false *) + (* in *) + let (returnConstraint, returnModExpr) = match returnModExpr.pmod_desc with + | Pmod_constraint (modExpr, modType) -> + let constraintDoc = + let doc = printModType modType in + if Parens.modExprFunctorConstraint modType then addParens doc else doc + in + let modConstraint = Doc.concat [ + Doc.text ": "; + constraintDoc; + ] in + (modConstraint, printModExpr modExpr) + | _ -> (Doc.nil, printModExpr returnModExpr) + in + let parametersDoc = match parameters with + | [(attrs, {txt = "*"}, None)] -> + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + Doc.group (Doc.concat [ + attrs; + Doc.text "()" + ]) + | [([], {txt = lbl}, None)] -> Doc.text lbl + | parameters -> + Doc.group ( + Doc.concat [ + Doc.lparen; + Doc.indent ( + Doc.concat [ + Doc.softLine; + Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) ( + List.map printModFunctorParam parameters + ) + ] + ); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + ) + in + Doc.group ( + Doc.concat [ + parametersDoc; + returnConstraint; + Doc.text " => "; + returnModExpr + + ] + ) + + and printModFunctorParam (attrs, lbl, optModType) = + let attrs = match attrs with + | [] -> Doc.nil + | attrs -> Doc.concat [ + Doc.join ~sep:Doc.line (List.map printAttribute attrs); + Doc.line; + ] in + Doc.group ( + Doc.concat [ + attrs; + Doc.text lbl.txt; + (match optModType with + | None -> Doc.nil + | Some modType -> + Doc.concat [ + Doc.text ": "; + printModType modType + ]); + ] + ) + + and printModApplyArg modExpr = + match modExpr.pmod_desc with + | Pmod_structure [] -> Doc.text "()" + | _ -> printModExpr modExpr + + + and printExceptionDef (constr : Parsetree.extension_constructor) = + let kind = match constr.pext_kind with + | Pext_rebind {txt = longident} -> Doc.indent ( + Doc.concat [ + Doc.text " ="; + Doc.line; + printLongident longident; + ] + ) + | Pext_decl (Pcstr_tuple [], None) -> Doc.nil + | Pext_decl (args, gadt) -> + let gadtDoc = match gadt with + | Some typ -> Doc.concat [ + Doc.text ": "; + printTypExpr typ; + ] + | None -> Doc.nil + in + Doc.concat [ + printConstructorArguments args; + gadtDoc + ] + in + Doc.group ( + Doc.concat [ + printAttributes constr.pext_attributes; + Doc.text "exception "; + Doc.text constr.pext_name.txt; + kind + ] + ) + + and printExtensionConstructor i (constr : Parsetree.extension_constructor) = + let attrs = printAttributes constr.pext_attributes in + let bar = if i > 0 then Doc.text "| " + else Doc.ifBreaks (Doc.text "| ") Doc.nil + in + let kind = match constr.pext_kind with + | Pext_rebind {txt = longident} -> Doc.indent ( + Doc.concat [ + Doc.text " ="; + Doc.line; + printLongident longident; + ] + ) + | Pext_decl (Pcstr_tuple [], None) -> Doc.nil + | Pext_decl (args, gadt) -> + let gadtDoc = match gadt with + | Some typ -> Doc.concat [ + Doc.text ": "; + printTypExpr typ; + ] + | None -> Doc.nil + in + Doc.concat [ + printConstructorArguments args; + gadtDoc + ] + in + Doc.concat [ + bar; + Doc.group ( + Doc.concat [ + attrs; + Doc.text constr.pext_name.txt; + kind; + ] + ) + ] + + let printImplementation (s: Parsetree.structure) comments src = + let t = CommentAst.initStructure s comments in + + let stringDoc = Doc.toString ~width:80 (printStructure s) in + print_endline stringDoc; + print_newline() + + let printInterface (s: Parsetree.signature) = + let stringDoc = Doc.toString ~width:80 (printSignature s) in + print_endline stringDoc; + print_newline() +end + diff --git a/res_syntax/benchmarks/data/RedBlackTree.ml b/res_syntax/benchmarks/data/RedBlackTree.ml new file mode 100644 index 0000000000..ff86c59a04 --- /dev/null +++ b/res_syntax/benchmarks/data/RedBlackTree.ml @@ -0,0 +1,498 @@ +type nonrec nodeColor = + | Red + | Black +type 'value node = + { + mutable left: 'value node option ; + mutable right: 'value node option ; + mutable parent: 'value node option ; + mutable sum: float ; + mutable color: nodeColor ; + mutable height: float ; + mutable value: 'value } +type nonrec 'value t = + { + mutable size: int ; + mutable root: 'value node option ; + compare: (('value -> 'value -> int)[@bs ]) } +let createNode ~color ~value ~height = + { left = None; right = None; parent = None; sum = 0.; height; value; color + } +external castNotOption : 'a option -> 'a = "%identity" +let updateSum node = + let leftSum = match node.left with | None -> 0. | Some left -> left.sum in + let rightSum = match node.right with | None -> 0. | Some right -> right.sum in + node.sum <- ((leftSum +. rightSum) +. node.height) +let rec updateSumRecursive rbt node = + updateSum node; + (match node.parent with + | None -> () + | Some parent -> rbt |. (updateSumRecursive parent)) +let grandParentOf node = + match node.parent with | None -> None | Some ref_ -> ref_.parent +let isLeft node = + match node.parent with + | None -> false + | Some parent -> (Some node) == parent.left +let leftOrRightSet ~node x value = + ((if isLeft node then x.left <- value else x.right <- value)[@ns.ternary ]) +let siblingOf node = + if isLeft node + then (castNotOption node.parent).right + else (castNotOption node.parent).left +let uncleOf node = + match grandParentOf node with + | None -> None + | Some grandParentOfNode -> + if isLeft (castNotOption node.parent) + then grandParentOfNode.right + else grandParentOfNode.left +let rec findNode rbt node value = + match node with + | None -> None + | Some node -> + let cmp = ((rbt.compare value node.value)[@bs ]) in + if cmp == 0 + then Some node + else + if cmp < 0 + then findNode rbt node.left value + else findNode rbt node.right value +let has rbt value = (findNode rbt rbt.root value) != None +let rec peekMinNode node = + match node with + | None -> None + | Some node -> + ((if node.left == None then Some node else node.left |. peekMinNode) + [@ns.ternary ]) +let rec peekMaxNode node = + match node with + | None -> None + | Some node -> + ((if node.right == None then Some node else node.right |. peekMaxNode) + [@ns.ternary ]) +let rotateLeft rbt node = + let parent = node.parent in + let right = node.right in + (match parent with + | Some parent -> parent |. (leftOrRightSet ~node right) + | None -> rbt.root <- right); + node.parent <- right; + (let right = right |. castNotOption in + let rightLeft = right.left in + node.right <- rightLeft; + (match rightLeft with + | Some rightLeft -> rightLeft.parent <- (Some node) + | None -> ()); + right.parent <- parent; + right.left <- (Some node); + updateSum node; + updateSum right) +let rotateRight rbt node = + let parent = node.parent in + let left = node.left in + (match parent with + | Some parent -> parent |. (leftOrRightSet ~node left) + | None -> rbt.root <- left); + node.parent <- left; + (let left = left |. castNotOption in + let leftRight = left.right in + node.left <- leftRight; + (match leftRight with + | Some leftRight -> leftRight.parent <- (Some node) + | None -> ()); + left.parent <- parent; + left.right <- (Some node); + updateSum node; + updateSum left) +let rec findInsert rbt node nodeToInsert value = + match node with + | None -> None + | Some node -> + let cmp = ((rbt.compare value node.value)[@bs ]) in + if cmp == 0 + then Some node + else + if cmp < 0 + then + (if node.left != None + then rbt |. (findInsert node.left nodeToInsert value) + else + (nodeToInsert.parent <- (Some node); + node.left <- (Some nodeToInsert); + None)) + else + if node.right != None + then rbt |. (findInsert node.right nodeToInsert value) + else + (nodeToInsert.parent <- (Some node); + node.right <- (Some nodeToInsert); + None) +let rec _addLoop rbt currentNode = + if (Some currentNode) == rbt.root + then currentNode.color <- Black + else + if (currentNode.parent |. castNotOption).color == Black + then () + else + if + (let uncle = uncleOf currentNode in + (uncle != None) && ((uncle |. castNotOption).color == Red)) + then + ((currentNode.parent |. castNotOption).color <- Black; + ((uncleOf currentNode) |. castNotOption).color <- Black; + ((grandParentOf currentNode) |. castNotOption).color <- Red; + _addLoop rbt ((grandParentOf currentNode) |. castNotOption)) + else + (let currentNode = + if + (not (isLeft currentNode)) && + (isLeft (currentNode.parent |. castNotOption)) + then + (rotateLeft rbt (currentNode.parent |. castNotOption); + currentNode.left |. castNotOption) + else + if + (isLeft currentNode) && + (not (isLeft (currentNode.parent |. castNotOption))) + then + (rotateRight rbt (currentNode.parent |. castNotOption); + currentNode.right |. castNotOption) + else currentNode in + (currentNode.parent |. castNotOption).color <- Black; + ((grandParentOf currentNode) |. castNotOption).color <- Red; + if isLeft currentNode + then rotateRight rbt ((grandParentOf currentNode) |. castNotOption) + else rotateLeft rbt ((grandParentOf currentNode) |. castNotOption)) +let add rbt value ~height = + rbt.size <- (rbt.size + 1); + (let nodeToInsert = createNode ~value ~color:Red ~height in + let inserted = + if rbt.root == None + then (rbt.root <- (Some nodeToInsert); true) + else + (let foundNode = findInsert rbt rbt.root nodeToInsert value in + foundNode == None) in + if inserted + then + (rbt |. (updateSumRecursive nodeToInsert); + _addLoop rbt nodeToInsert; + Some nodeToInsert) + else None) +let removeNode rbt node = + let nodeToRemove = + match ((node.left), (node.right)) with + | (Some _, Some _) -> + let successor = (peekMinNode node.right) |. castNotOption in + (node.value <- (successor.value); + node.height <- (successor.height); + successor) + | _ -> node in + let successor = + match nodeToRemove.left with | None -> nodeToRemove.right | left -> left in + let (successor, isLeaf) = + match successor with + | None -> + let leaf = createNode ~value:([%bs.raw "0"]) ~color:Black ~height:0. in + let isLeaf = ((fun x -> x == leaf)[@bs ]) in (leaf, isLeaf) + | Some successor -> (successor, (((fun _ -> false))[@bs ])) in + let nodeParent = nodeToRemove.parent in + successor.parent <- nodeParent; + (match nodeParent with + | None -> () + | Some parent -> + parent |. (leftOrRightSet ~node:nodeToRemove (Some successor))); + rbt |. (updateSumRecursive successor); + if nodeToRemove.color == Black + then + (if successor.color == Red + then + (successor.color <- Black; + if successor.parent == None then rbt.root <- (Some successor)) + else + (let break = ref false in + let successorRef = ref successor in + while not break.contents do + let successor = successorRef.contents in + match successor.parent with + | None -> (rbt.root <- (Some successor); break.contents <- true) + | Some successorParent -> + let sibling = siblingOf successor in + (if + (sibling != None) && + ((sibling |. castNotOption).color == Red) + then + (successorParent.color <- Red; + (sibling |. castNotOption).color <- Black; + if isLeft successor + then rotateLeft rbt successorParent + else rotateRight rbt successorParent); + (let sibling = siblingOf successor in + let siblingNN = sibling |. castNotOption in + if + (successorParent.color == Black) && + ((sibling == None) || + (((siblingNN.color == Black) && + ((siblingNN.left == None) || + ((siblingNN.left |. castNotOption).color == + Black))) + && + ((siblingNN.right == None) || + ((siblingNN.right |. castNotOption).color == + Black)))) + then + (if sibling != None then siblingNN.color <- Red; + successorRef.contents <- successorParent) + else + if + (successorParent.color == Red) && + ((sibling == None) || + (((siblingNN.color == Black) && + ((siblingNN.left == None) || + ((siblingNN.left |. castNotOption).color == + Black))) + && + ((siblingNN.right == None) || + ((siblingNN.right |. castNotOption).color == + Black)))) + then + (if sibling != None then siblingNN.color <- Red; + successorParent.color <- Black; + break.contents <- true) + else + if + (sibling != None) && + ((sibling |. castNotOption).color == Black) + then + (let sibling = sibling |. castNotOption in + if + (((isLeft successor) && + ((sibling.right == None) || + ((sibling.right |. castNotOption).color == + Black))) + && (sibling.left != None)) + && ((sibling.left |. castNotOption).color == Red) + then + (sibling.color <- Red; + (sibling.left |. castNotOption).color <- Black; + rotateRight rbt sibling) + else + if + (((not (isLeft successor)) && + ((sibling.left == None) || + ((sibling.left |. castNotOption).color == + Black))) + && (sibling.right != None)) + && + ((sibling.right |. castNotOption).color == Red) + then + (sibling.color <- Red; + (sibling.right |. castNotOption).color <- Black; + rotateLeft rbt sibling); + break.contents <- true) + else + (let sibling = siblingOf successor in + let sibling = sibling |. castNotOption in + sibling.color <- (successorParent.color); + if isLeft successor + then + ((sibling.right |. castNotOption).color <- Black; + rotateRight rbt successorParent) + else + ((sibling.left |. castNotOption).color <- Black; + rotateLeft rbt successorParent)))) + done)); + if ((isLeaf successor)[@bs ]) + then + (if rbt.root == (Some successor) then rbt.root <- None; + (match successor.parent with + | None -> () + | Some parent -> parent |. (leftOrRightSet ~node:successor None))) +let remove rbt value = + match findNode rbt rbt.root value with + | Some node -> (rbt |. (removeNode node); rbt.size <- (rbt.size - 1); true) + | None -> false +let rec findNodeThroughCallback rbt node cb = + match node with + | None -> None + | Some node -> + let cmp = ((cb node)[@bs ]) in + if cmp == 0 + then Some node + else + if cmp < 0 + then findNodeThroughCallback rbt node.left cb + else findNodeThroughCallback rbt node.right cb +let removeThroughCallback rbt cb = + match findNodeThroughCallback rbt rbt.root cb with + | Some node -> (rbt |. (removeNode node); rbt.size <- (rbt.size - 1); true) + | None -> false +let make ~compare = { size = 0; root = None; compare } +let makeWith array ~compare = + let rbt = make ~compare in + array |. + (Js.Array2.forEach + (fun (value, height) -> (add rbt value ~height) |. ignore)); + rbt +let rec heightOfInterval rbt node lhs rhs = + match node with + | None -> 0. + | Some n -> + if (lhs == None) && (rhs == None) + then n.sum + else + if + (lhs != None) && + (((rbt.compare n.value (lhs |. castNotOption))[@bs ]) < 0) + then rbt |. (heightOfInterval n.right lhs rhs) + else + if + (rhs != None) && + (((rbt.compare n.value (rhs |. castNotOption))[@bs ]) > 0) + then rbt |. (heightOfInterval n.left lhs rhs) + else + (n.height +. (rbt |. (heightOfInterval n.left lhs None))) +. + (rbt |. (heightOfInterval n.right None rhs)) +let heightOfInterval rbt lhs rhs = heightOfInterval rbt rbt.root lhs rhs +let rec firstVisibleNode node top = + match node with + | None -> None + | Some node -> + if node.sum <= top + then None + else + (let nodeHeight = node.height in + let sumLeft = + match node.left with | None -> 0.0 | Some left -> left.sum in + if sumLeft > top + then firstVisibleNode node.left top + else + if (sumLeft +. nodeHeight) > top + then Some node + else + (let offset = sumLeft +. nodeHeight in + firstVisibleNode node.right (top -. offset))) +let lastVisibleNode node top = + match firstVisibleNode node top with + | None -> node |. peekMaxNode + | first -> first +let firstVisibleValue rbt ~top = + match firstVisibleNode rbt.root top with + | None -> None + | Some node -> Some (node.value) +let rec leftmost node = + match node.left with | None -> node | Some node -> node |. leftmost +let rec firstRightParent node = + match node.parent with + | None -> None + | Some parent -> + ((if isLeft node then Some parent else parent |. firstRightParent) + [@ns.ternary ]) +let nextNode node = + match node.right with + | None -> node |. firstRightParent + | Some right -> Some (right |. leftmost) +let rec sumLeftSpine node ~fromRightChild = + let leftSpine = + match node.left with + | None -> node.height + | Some left -> ((if fromRightChild then node.height +. left.sum else 0.0) + [@ns.ternary ]) in + match node.parent with + | None -> leftSpine + | Some parent -> + leftSpine +. + (parent |. + (sumLeftSpine ~fromRightChild:(parent.right == (Some node)))) +let getY node = (node |. (sumLeftSpine ~fromRightChild:true)) -. node.height +let rec iterate ~inclusive firstNode lastNode ~callback = + match firstNode with + | None -> () + | Some node -> + (if inclusive then ((callback node)[@bs ]); + if firstNode != lastNode + then + (if not inclusive then ((callback node)[@bs ]); + iterate ~inclusive (node |. nextNode) lastNode ~callback)) +let rec iterateWithY ?y ~inclusive firstNode lastNode ~callback = + match firstNode with + | None -> () + | Some node -> + let y = match y with | None -> node |. getY | Some y -> y in + (if inclusive then ((callback node y)[@bs ]); + if firstNode != lastNode + then + (if not inclusive then ((callback node y)[@bs ]); + iterateWithY ~y:(y +. node.height) ~inclusive (node |. nextNode) + lastNode ~callback)) +let rec updateSum node ~delta = + match node with + | None -> () + | Some node -> + (node.sum <- (node.sum +. delta); node.parent |. (updateSum ~delta)) +let updateHeight node ~height = + let delta = height -. node.height in + node.height <- height; (Some node) |. (updateSum ~delta) +type nonrec 'value oldNewVisible = + { + mutable old: 'value array ; + mutable new_: 'value array } +let getAnchorDelta rbt ~anchor = + match anchor with + | None -> 0.0 + | Some (value, y) -> + (match rbt |. (findNode rbt.root value) with + | Some node -> y -. (node |. getY) + | None -> 0.0) +let onChangedVisible ?(anchor= None) rbt ~oldNewVisible ~top:top_ + ~bottom:bottom_ ~appear ~remained ~disappear = + let old = oldNewVisible.new_ in + let new_ = oldNewVisible.old in + (new_ |. + (Js.Array2.removeCountInPlace ~pos:0 ~count:(new_ |. Js.Array2.length))) + |. ignore; + oldNewVisible.old <- old; + oldNewVisible.new_ <- new_; + (let anchorDelta = rbt |. (getAnchorDelta ~anchor) in + let top = top_ -. anchorDelta in + let top = ((if top < 0.0 then 0.0 else top)[@ns.ternary ]) in + let bottom = bottom_ -. anchorDelta in + let first = firstVisibleNode rbt.root top in + let last = lastVisibleNode rbt.root bottom in + let oldLen = old |. Js.Array2.length in + let oldIter = ref 0 in + iterateWithY ~inclusive:true first last + ((fun node -> + fun y_ -> + let y = y_ +. anchorDelta in + if y >= 0.0 + then + (while + (oldIter.contents < oldLen) && + (((rbt.compare (Js.Array2.unsafe_get old oldIter.contents) + node.value) + [@bs ]) < 0) + do + (((disappear (Js.Array2.unsafe_get old oldIter.contents)) + [@bs ]); + oldIter.contents <- (oldIter.contents + 1)) + done; + (new_ |. (Js.Array2.push node.value)) |. ignore; + if oldIter.contents < oldLen + then + (let cmp = + ((rbt.compare (Js.Array2.unsafe_get old oldIter.contents) + node.value) + [@bs ]) in + if cmp = 0 + then + (((remained node y) + [@bs ]); + oldIter.contents <- (oldIter.contents + 1)) + else ((appear node y)[@bs ])) + else ((appear node y)[@bs ])))[@bs ]); + while oldIter.contents < oldLen do + (((disappear (Js.Array2.unsafe_get old oldIter.contents)) + [@bs ]); + oldIter.contents <- (oldIter.contents + 1)) + done) diff --git a/res_syntax/benchmarks/data/RedBlackTree.res b/res_syntax/benchmarks/data/RedBlackTree.res new file mode 100644 index 0000000000..fbf68c266e --- /dev/null +++ b/res_syntax/benchmarks/data/RedBlackTree.res @@ -0,0 +1,751 @@ +/* +Credit to Wikipedia's article on [Red-black +tree](http://en.wikipedia.org/wiki/Red–black_tree) + +**Note:** doesn't handle duplicate entries. This is by design. + +## Overview example: + +``` +var rbt = new RedBlackTree([7, 5, 1, 8]) +rbt.add(2) // => 2 +rbt.add(10) // => 10 +rbt.has(5) // => true +rbt.remove(8) // => 8 +``` + +## Properties: + +- size: The total number of items. +*/ + +type nodeColor = + | Red + | Black + +/* +Property of a red-black tree, taken from Wikipedia: +1. A node is either red or black. +2. Root is black. +3. Leaves are all null and considered black. +4. Both children of a red node are black. +5. Every path from a node to any of its descendent leaves contains the same +number of black nodes. +*/ + +type rec node<'value> = { + mutable left: option>, + mutable right: option>, + mutable parent: option>, + mutable sum: float, + mutable color : nodeColor, + mutable height: float, + mutable value: 'value, +} + +type t<'value> = { + mutable size: int, + mutable root: option>, + compare: (. 'value, 'value) => int, +} + +let createNode = (~color, ~value, ~height) => + {left:None, right:None, parent:None, sum:0., height, value, color} + +external castNotOption: option<'a> => 'a = "%identity" + +let updateSum = (node) => { + let leftSum = switch node.left { + | None => 0. + | Some(left) => left.sum + } + let rightSum = switch node.right { + | None => 0. + | Some(right) => right.sum + } + node.sum = leftSum +. rightSum +. node.height +} + +/* Update the sum for the node and parents recursively. */ +let rec updateSumRecursive = (rbt, node) => { + updateSum(node) + switch node.parent { + | None => () + | Some(parent) => + rbt->updateSumRecursive(parent) + } +} + +let grandParentOf = node => { + switch node.parent { + | None => None + | Some(ref_) => ref_.parent + } +} + +let isLeft = node => { + switch node.parent { + | None => false + | Some(parent) => Some(node) === parent.left + } +} + +let leftOrRightSet = (~node, x, value) => { + isLeft(node) ? x.left=value : x.right=value +} + +let siblingOf = node => { + if isLeft(node) { + castNotOption(node.parent).right + } else { + castNotOption(node.parent).left + } +} + +let uncleOf = node => { + switch grandParentOf(node) { + | None => None + | Some(grandParentOfNode) => + if isLeft(castNotOption(node.parent)) { + grandParentOfNode.right + } else { + grandParentOfNode.left + } + } +} + +let rec findNode = (rbt, node, value) => { + switch node { + | None => None + | Some(node) => + let cmp = rbt.compare(. value, node.value) + if cmp === 0 { + Some(node) + } else if cmp < 0 { + findNode(rbt, node.left, value) + } else { + findNode(rbt, node.right, value) + } + } +} + +let has = (rbt, value) => findNode(rbt, rbt.root, value) !== None + +let rec peekMinNode = node => switch node { + | None => None + | Some(node) => + node.left === None ? Some(node) : node.left->peekMinNode +} + +let rec peekMaxNode = node => switch node { + | None => None + | Some(node) => + node.right === None ? Some(node) : node.right->peekMaxNode +} + +let rotateLeft = (rbt, node) => { + let parent = node.parent + let right = node.right + switch parent { + | Some(parent) => + parent->leftOrRightSet(~node, right) + | None => + rbt.root = right + } + node.parent = right + let right = right->castNotOption // precondition + let rightLeft = right.left + node.right = rightLeft + switch rightLeft { + | Some(rightLeft) => + rightLeft.parent = Some(node) + | None => + () + } + right.parent = parent + right.left = Some(node) + updateSum(node) + updateSum(right) +} + +let rotateRight = (rbt, node) => { + let parent = node.parent + let left = node.left + switch parent { + | Some(parent) => + parent->leftOrRightSet(~node, left) + | None => + rbt.root = left + } + node.parent = left + let left = left->castNotOption // precondition + let leftRight = left.right + node.left = leftRight + switch leftRight { + | Some(leftRight) => + leftRight.parent = Some(node) + | None => + () + } + left.parent = parent + left.right = Some(node) + updateSum(node) + updateSum(left) +} + +let rec findInsert = (rbt, node, nodeToInsert, value) => { + switch node { + | None => None + | Some(node) => { + let cmp = rbt.compare(. value, node.value) + if cmp === 0 { + Some(node) + } else { + if cmp < 0 { + if node.left !== None { + rbt->findInsert(node.left, nodeToInsert, value) + } else { + nodeToInsert.parent = Some(node) + node.left = Some(nodeToInsert) + None + } + } else { + if node.right !== None { + rbt->findInsert(node.right, nodeToInsert, value) + } else { + nodeToInsert.parent = Some(node) + node.right = Some(nodeToInsert) + None + } + } + } + } + } +} + +// After adding the node, we need to operate on it to preserve the tree's +// properties by filtering it through a series of cases. It'd be easier if +// there's tail recursion in JavaScript, as some cases fix the node but +// restart the cases on the node's ancestor. We'll have to use loops for now. + +let rec _addLoop = (rbt, currentNode) => { + // Case 1: node is root. Violates 1. Paint it black. + if Some(currentNode) === rbt.root { + currentNode.color = Black + } + + // Case 2: parent black. No properties violated. After that, parent is sure + // to be red. + else if (currentNode.parent->castNotOption).color === Black { + () + } + + // Case 3: if node's parent and uncle are red, they are painted black. + // Their parent (node's grandparent) should be painted red, and the + // grandparent red. Note that node certainly has a grandparent, since at + // this point, its parent's red, which can't be the root. + + // After the painting, the grandparent might violate 2 or 4. + else if({ + let uncle = uncleOf(currentNode) + uncle !== None && (uncle->castNotOption).color === Red + }) { + (currentNode.parent->castNotOption).color = Black + (uncleOf(currentNode)->castNotOption).color = Black + (grandParentOf(currentNode)->castNotOption).color = Red + _addLoop(rbt, grandParentOf(currentNode)->castNotOption) + } + else { + // At this point, uncle is either black or doesn't exist. + + // Case 4: parent red, uncle black, node is right child, parent is left + // child. Do a left rotation. Then, former parent passes through case 5. + let currentNode = + if !isLeft(currentNode) && isLeft(currentNode.parent->castNotOption) { + rotateLeft(rbt, currentNode.parent->castNotOption) + currentNode.left->castNotOption + } else if isLeft(currentNode) && !isLeft(currentNode.parent->castNotOption) { + rotateRight(rbt, currentNode.parent->castNotOption) + currentNode.right->castNotOption + } else { + currentNode + } + + // Case 5: parent red, uncle black, node is left child, parent is left + // child. Right rotation. Switch parent and grandparent's color. + (currentNode.parent->castNotOption).color = Black + (grandParentOf(currentNode)->castNotOption).color = Red + if isLeft(currentNode) { + rotateRight(rbt, grandParentOf(currentNode)->castNotOption) + } else { + rotateLeft(rbt, grandParentOf(currentNode)->castNotOption) + } + } +} + +let add = (rbt, value, ~height) => { + // Again, make sure to not pass a value already in the tree. + // + // _Returns:_ value added. + rbt.size = rbt.size + 1 + let nodeToInsert = createNode(~value, ~color=Red, ~height) + let inserted = + if rbt.root === None { + rbt.root = Some(nodeToInsert) + true + } + else { + let foundNode = findInsert(rbt, rbt.root, nodeToInsert, value) + foundNode === None + } + if inserted { + rbt->updateSumRecursive(nodeToInsert) + + _addLoop(rbt, nodeToInsert) + Some(nodeToInsert) + } else { + None + } +} + + +// To simplify removal cases, we can notice this: +// 1. Node has no child. +// 2. Node has two children. Select the smallest child on the right branch +// (leftmost) and copy its value into the node to delete. This replacement node +// certainly has less than two children or it wouldn't be the smallest. Then +// delete this replacement node. +// 3. Node has one child. +// They all come down to removing a node with maximum one child. +let removeNode = (rbt, node) => { + let nodeToRemove = + switch (node.left, node.right) { + | (Some(_), Some(_)) => + let successor = peekMinNode(node.right)->castNotOption + node.value = successor.value + node.height = successor.height + successor + | _ => node + } + // At this point, the node to remove has only one child. + let successor = switch nodeToRemove.left { + | None => nodeToRemove.right + | left => left + } + let (successor, isLeaf) = switch successor { + | None => + let leaf = createNode(~value=%bs.raw("0"), ~color=Black, ~height=0.) + let isLeaf = (. x) => x === leaf; + (leaf, isLeaf) + | Some(successor) => + (successor, (. _) => false) + } + let nodeParent = nodeToRemove.parent + successor.parent = nodeParent + switch nodeParent { + | None => () + | Some(parent) => + parent->leftOrRightSet(~node=nodeToRemove, Some(successor)) + } + + rbt->updateSumRecursive(successor) + + // We're done if node's red. If it's black and its child that took its place + // is red, change it to black. If both are black, we do cases checking like + // in insert. + if nodeToRemove.color === Black { + if successor.color === Red { + successor.color = Black + if successor.parent === None { + rbt.root = Some(successor) + } + } else { + let break = ref(false) + let successorRef = ref(successor) + while !break.contents { + let successor = successorRef.contents + // Case 1: node is root. Done. + switch successor.parent { + | None => + rbt.root = Some(successor) + break.contents = true + | Some(successorParent) => + // Case 2: sibling red. Flip color of P and S. Left rotate P. + let sibling = siblingOf(successor) + if sibling !== None && (sibling->castNotOption).color === Red { + successorParent.color = Red + (sibling->castNotOption).color = Black + if isLeft(successor) { + rotateLeft(rbt, successorParent) + } else { + rotateRight(rbt, successorParent) + } + } + + // Case 3: parent, sibling and sibling children all black. Paint + // sibling red. Rebalance parent. + let sibling = siblingOf(successor) + let siblingNN = sibling->castNotOption + if + successorParent.color === Black && + ( sibling === None || + ( siblingNN.color === Black && + ( siblingNN.left === None || + (siblingNN.left->castNotOption).color === Black ) && + ( siblingNN.right === None || + (siblingNN.right->castNotOption).color === Black))) + { + if sibling !== None { + siblingNN.color = Red + } + successorRef.contents = successorParent + // continue + } else if + // Case 4: sibling and sibling children black. Node parent red. Swap + // color of sibling and node parent. + successorParent.color === Red && + ( sibling === None || + ( siblingNN.color === Black && + ( siblingNN.left === None || + (siblingNN.left->castNotOption).color === Black) && + ( siblingNN.right === None || + (siblingNN.right->castNotOption).color === Black))) + { + if sibling !== None { + siblingNN.color = Red + } + successorParent.color = Black + break.contents = true + } else if + // Case 5: sibling black, sibling left child red, right child black, + // node is left child. Rotate right sibling. Swap color of sibling and + // its new parent. + sibling !== None && (sibling->castNotOption).color === Black + { + let sibling = sibling->castNotOption + if + isLeft(successor) && + (sibling.right === None || (sibling.right->castNotOption).color === Black) && + sibling.left !== None && + (sibling.left->castNotOption).color === Red { + sibling.color = Red + (sibling.left->castNotOption).color = Black + rotateRight(rbt, sibling) + } else if + !isLeft(successor) && + (sibling.left === None || (sibling.left->castNotOption).color === Black) && + sibling.right !== None && + (sibling.right->castNotOption).color === Red + { + sibling.color = Red + (sibling.right->castNotOption).color = Black + rotateLeft(rbt, sibling) + } + break.contents = true + } else { + // Case 6: sibling black, sibling right child red, node is left child. + // Rotate left node parent. Swap color of parent and sibling. Paint + // sibling right child black. + let sibling = siblingOf(successor) + let sibling = sibling->castNotOption + sibling.color = successorParent.color + if isLeft(successor) { + (sibling.right->castNotOption).color = Black + rotateRight(rbt, successorParent) + } else { + (sibling.left->castNotOption).color = Black + rotateLeft(rbt, successorParent) + } + } + } + } + } + } + // Don't forget to detatch the artificially created leaf. + if isLeaf(. successor) { + if rbt.root === Some(successor) { + rbt.root = None + } + switch successor.parent { + | None => () + | Some(parent) => + parent->leftOrRightSet(~node=successor, None) + } + } +} + +let remove = (rbt, value) => { + switch findNode(rbt, rbt.root, value) { + | Some(node) => + rbt->removeNode(node) + rbt.size = rbt.size - 1 + true + | None => + false + } +} + +let rec findNodeThroughCallback = (rbt, node, cb) => { + switch node { + | None => None + | Some(node) => + let cmp = cb(. node) + if cmp === 0 { + Some(node) + } else if cmp < 0 { + findNodeThroughCallback(rbt, node.left, cb) + } else { + findNodeThroughCallback(rbt, node.right, cb) + } + } +} + +let removeThroughCallback = (rbt, cb) => { + switch findNodeThroughCallback(rbt, rbt.root, cb) { + | Some(node) => + rbt->removeNode(node) + rbt.size = rbt.size - 1 + true + | None => + false + } +} + +let make = (~compare) => {size: 0, root: None, compare} + +let makeWith = (array, ~compare) => { + let rbt = make(~compare) + array->Js.Array2.forEach(((value, height)) => add(rbt,value, ~height)->ignore) + rbt +} + +// sum of the heights of the elements in [lhs ... rhs] +// both lhs and rhs are optional +let rec heightOfInterval = (rbt, node, lhs, rhs) => { + switch node { + | None => 0. + | Some(n) => + //Js.log4("heightOfInterval n:", n.value, lhs, rhs) + if lhs === None && rhs === None { + n.sum + } else if lhs !== None && rbt.compare(. n.value, lhs->castNotOption) < 0 { + // to the lhs of the interval + rbt->heightOfInterval(n.right, lhs, rhs) + } else if rhs !== None && rbt.compare(. n.value, rhs->castNotOption) > 0 { + // to the rhs of the interval + rbt->heightOfInterval(n.left, lhs, rhs) + } else { + // in the interval + n.height +. + rbt->heightOfInterval(n.left, lhs, None) +. + rbt->heightOfInterval(n.right, None, rhs) + } + } +} + +let heightOfInterval = (rbt, lhs, rhs) => { + //Js.log("-----------") + heightOfInterval(rbt, rbt.root, lhs, rhs) +} + +// Return a node at y such that y <= top < y + node.height +let rec firstVisibleNode = (node, top) => { + switch node { + | None => None + | Some(node) => + //Js.log4("firstVisibleNode", node.value, "top:", top) + if node.sum <= top { + // no node is visible + None + } else { + let nodeHeight = node.height + let sumLeft = switch node.left { + | None => 0.0 + | Some(left) => left.sum + } + if sumLeft > top { + firstVisibleNode(node.left, top) + } else if sumLeft +. nodeHeight > top { + // found + Some(node) + } else { + let offset = sumLeft +. nodeHeight + firstVisibleNode(node.right, top -. offset) + } + } + } +} + +let lastVisibleNode = (node, top) => { + switch firstVisibleNode(node, top) { + | None => + node->peekMaxNode + | first => first + } +} + +// Find the value of the first visible node starting from top +let firstVisibleValue = (rbt, ~top) => + switch firstVisibleNode(rbt.root, top) { + | None => None + | Some(node) => Some(node.value) +} + +let rec leftmost = node => switch node.left { + | None => node + | Some(node) => node->leftmost +} + +let rec firstRightParent = node => { + switch node.parent { + | None => None + | Some(parent) => + isLeft(node) ? Some(parent) : parent->firstRightParent + } +} + +let nextNode = node => { + switch node.right { + | None => + node->firstRightParent + | Some(right) => + Some(right->leftmost) + } +} + +let rec sumLeftSpine = (node, ~fromRightChild) => { + let leftSpine = switch node.left { + | None => node.height + | Some(left) => fromRightChild ? node.height +. left.sum : 0.0 + } + switch node.parent { + | None => + leftSpine + | Some(parent) => + leftSpine +. parent->sumLeftSpine(~fromRightChild = parent.right === Some(node)) + } +} + +let getY = node => + node->sumLeftSpine(~fromRightChild=true) -. node.height + +let rec iterate = (~inclusive, firstNode, lastNode, ~callback) => { + switch firstNode { + | None => () + | Some(node) => + if inclusive { callback(. node) } + if firstNode !== lastNode { + if !inclusive { callback (.node) } + iterate(~inclusive, node->nextNode, lastNode, ~callback) + } + } +} + +let rec iterateWithY = (~y=?, ~inclusive, firstNode, lastNode, ~callback) => { + switch firstNode { + | None => () + | Some(node) => + let y = switch y { + | None => node->getY + | Some(y) => y + } + if inclusive { + callback(. node, y) + } + if firstNode !== lastNode { + if !inclusive { + callback (.node, y) + } + iterateWithY(~y=y+.node.height, ~inclusive, node->nextNode, lastNode, ~callback) + } + } +} + +let rec updateSum = (node, ~delta) => switch node { + | None => () + | Some(node) => + node.sum = node.sum +. delta + node.parent->updateSum(~delta) +} + +let updateHeight = (node, ~height) => { + let delta = height -. node.height + node.height = height + Some(node)->updateSum(~delta) +} + +type oldNewVisible<'value> = { + mutable old: array<'value>, + mutable new: array<'value>, +}; + +let getAnchorDelta = (rbt, ~anchor) => { + switch anchor { + | None => 0.0 + | Some((value, y)) => + switch rbt->findNode(rbt.root, value) { + | Some(node) => y -. node->getY + | None => 0.0 + } + } +} + +let onChangedVisible = + ( + ~anchor=None, + rbt, + ~oldNewVisible, + ~top as top_, + ~bottom as bottom_, + ~appear, + ~remained, + ~disappear, + ) => + { + let old = oldNewVisible.new + let new = oldNewVisible.old + // empty new + new->Js.Array2.removeCountInPlace(~pos=0, ~count=new->Js.Array2.length)->ignore + oldNewVisible.old = old + oldNewVisible.new = new + + let anchorDelta = rbt->getAnchorDelta(~anchor) + //Js.log2("anchorDelta", anchorDelta) + let top = top_ -. anchorDelta + let top = top < 0.0 ? 0.0 : top // anchoring can make top negative + let bottom = bottom_ -. anchorDelta + + let first = firstVisibleNode(rbt.root, top) + let last = lastVisibleNode(rbt.root, bottom) + + let oldLen = old->Js.Array2.length + let oldIter = ref(0) + iterateWithY(~inclusive=true, first, last, ~callback=(. node, y_) => { + let y = y_ +. anchorDelta + if y >= 0.0 { // anchoring can make y negative + while ( + oldIter.contents < oldLen && + rbt.compare(. Js.Array2.unsafe_get(old, oldIter.contents), node.value) < 0 + ) { + disappear(. Js.Array2.unsafe_get(old, oldIter.contents)) + oldIter.contents = oldIter.contents + 1 + } + new->Js.Array2.push(node.value)->ignore + if (oldIter.contents < oldLen) { + let cmp = rbt.compare(. Js.Array2.unsafe_get(old, oldIter.contents), node.value) + if cmp == 0 { + remained(. node, y) + oldIter.contents = oldIter.contents + 1 + } else { + appear(. node, y) + } + } else { + appear(. node, y) + } + } + }) + while (oldIter.contents < oldLen) { + disappear(. Js.Array2.unsafe_get(old, oldIter.contents)) + oldIter.contents = oldIter.contents + 1 + } +}; diff --git a/res_syntax/benchmarks/data/RedBlackTreeNoComments.res b/res_syntax/benchmarks/data/RedBlackTreeNoComments.res new file mode 100644 index 0000000000..67a2a5463c --- /dev/null +++ b/res_syntax/benchmarks/data/RedBlackTreeNoComments.res @@ -0,0 +1,643 @@ +type nodeColor = + | Red + | Black + +type rec node<'value> = { + mutable left: option>, + mutable right: option>, + mutable parent: option>, + mutable sum: float, + mutable color: nodeColor, + mutable height: float, + mutable value: 'value, +} + +type t<'value> = { + mutable size: int, + mutable root: option>, + compare: (. 'value, 'value) => int, +} + +let createNode = (~color, ~value, ~height) => { + left: None, + right: None, + parent: None, + sum: 0., + height: height, + value: value, + color: color, +} + +external castNotOption: option<'a> => 'a = "%identity" + +let updateSum = node => { + let leftSum = switch node.left { + | None => 0. + | Some(left) => left.sum + } + let rightSum = switch node.right { + | None => 0. + | Some(right) => right.sum + } + node.sum = leftSum +. rightSum +. node.height +} + +let rec updateSumRecursive = (rbt, node) => { + updateSum(node) + switch node.parent { + | None => () + | Some(parent) => rbt->updateSumRecursive(parent) + } +} + +let grandParentOf = node => + switch node.parent { + | None => None + | Some(ref_) => ref_.parent + } + +let isLeft = node => + switch node.parent { + | None => false + | Some(parent) => Some(node) === parent.left + } + +let leftOrRightSet = (~node, x, value) => + isLeft(node) + ? x.left = value + : x.right = value + +let siblingOf = node => + if isLeft(node) { + castNotOption(node.parent).right + } else { + castNotOption(node.parent).left + } + +let uncleOf = node => + switch grandParentOf(node) { + | None => None + | Some(grandParentOfNode) => + if isLeft(castNotOption(node.parent)) { + grandParentOfNode.right + } else { + grandParentOfNode.left + } + } + +let rec findNode = (rbt, node, value) => + switch node { + | None => None + | Some(node) => + let cmp = rbt.compare(. value, node.value) + if cmp === 0 { + Some(node) + } else if cmp < 0 { + findNode(rbt, node.left, value) + } else { + findNode(rbt, node.right, value) + } + } + +let has = (rbt, value) => findNode(rbt, rbt.root, value) !== None + +let rec peekMinNode = node => + switch node { + | None => None + | Some(node) => node.left === None ? Some(node) : node.left->peekMinNode + } + +let rec peekMaxNode = node => + switch node { + | None => None + | Some(node) => node.right === None ? Some(node) : node.right->peekMaxNode + } + +let rotateLeft = (rbt, node) => { + let parent = node.parent + let right = node.right + switch parent { + | Some(parent) => parent->leftOrRightSet(~node, right) + | None => rbt.root = right + } + node.parent = right + let right = right->castNotOption + let rightLeft = right.left + node.right = rightLeft + switch rightLeft { + | Some(rightLeft) => rightLeft.parent = Some(node) + | None => () + } + right.parent = parent + right.left = Some(node) + updateSum(node) + updateSum(right) +} + +let rotateRight = (rbt, node) => { + let parent = node.parent + let left = node.left + switch parent { + | Some(parent) => parent->leftOrRightSet(~node, left) + | None => rbt.root = left + } + node.parent = left + let left = left->castNotOption + let leftRight = left.right + node.left = leftRight + switch leftRight { + | Some(leftRight) => leftRight.parent = Some(node) + | None => () + } + left.parent = parent + left.right = Some(node) + updateSum(node) + updateSum(left) +} + +let rec findInsert = (rbt, node, nodeToInsert, value) => + switch node { + | None => None + | Some(node) => + let cmp = rbt.compare(. value, node.value) + if cmp === 0 { + Some(node) + } else if cmp < 0 { + if node.left !== None { + rbt->findInsert(node.left, nodeToInsert, value) + } else { + nodeToInsert.parent = Some(node) + node.left = Some(nodeToInsert) + None + } + } else if node.right !== None { + rbt->findInsert(node.right, nodeToInsert, value) + } else { + nodeToInsert.parent = Some(node) + node.right = Some(nodeToInsert) + None + } + } + +let rec _addLoop = (rbt, currentNode) => + if Some(currentNode) === rbt.root { + currentNode.color = Black + } else if (currentNode.parent->castNotOption).color === Black { + () + } else if { + let uncle = uncleOf(currentNode) + uncle !== None && (uncle->castNotOption).color === Red + } { + (currentNode.parent->castNotOption).color = Black + (uncleOf(currentNode)->castNotOption).color = Black + (grandParentOf(currentNode)->castNotOption).color = Red + _addLoop(rbt, grandParentOf(currentNode)->castNotOption) + } else { + let currentNode = if ( + !isLeft(currentNode) && isLeft(currentNode.parent->castNotOption) + ) { + rotateLeft(rbt, currentNode.parent->castNotOption) + currentNode.left->castNotOption + } else if ( + isLeft(currentNode) && !isLeft(currentNode.parent->castNotOption) + ) { + rotateRight(rbt, currentNode.parent->castNotOption) + currentNode.right->castNotOption + } else { + currentNode + } + + (currentNode.parent->castNotOption).color = Black + (grandParentOf(currentNode)->castNotOption).color = Red + if isLeft(currentNode) { + rotateRight(rbt, grandParentOf(currentNode)->castNotOption) + } else { + rotateLeft(rbt, grandParentOf(currentNode)->castNotOption) + } + } + +let add = (rbt, value, ~height) => { + rbt.size = rbt.size + 1 + let nodeToInsert = createNode(~value, ~color=Red, ~height) + let inserted = if rbt.root === None { + rbt.root = Some(nodeToInsert) + true + } else { + let foundNode = findInsert(rbt, rbt.root, nodeToInsert, value) + foundNode === None + } + if inserted { + rbt->updateSumRecursive(nodeToInsert) + + _addLoop(rbt, nodeToInsert) + Some(nodeToInsert) + } else { + None + } +} + +let removeNode = (rbt, node) => { + let nodeToRemove = switch (node.left, node.right) { + | (Some(_), Some(_)) => + let successor = peekMinNode(node.right)->castNotOption + node.value = successor.value + node.height = successor.height + successor + | _ => node + } + + let successor = switch nodeToRemove.left { + | None => nodeToRemove.right + | left => left + } + let (successor, isLeaf) = switch successor { + | None => + let leaf = createNode(~value=%bs.raw("0"), ~color=Black, ~height=0.) + let isLeaf = (. x) => x === leaf + (leaf, isLeaf) + | Some(successor) => (successor, (. _) => false) + } + let nodeParent = nodeToRemove.parent + successor.parent = nodeParent + switch nodeParent { + | None => () + | Some(parent) => parent->leftOrRightSet(~node=nodeToRemove, Some(successor)) + } + + rbt->updateSumRecursive(successor) + + if nodeToRemove.color === Black { + if successor.color === Red { + successor.color = Black + if successor.parent === None { + rbt.root = Some(successor) + } + } else { + let break = ref(false) + let successorRef = ref(successor) + while !break.contents { + let successor = successorRef.contents + + switch successor.parent { + | None => + rbt.root = Some(successor) + break.contents = true + | Some(successorParent) => + let sibling = siblingOf(successor) + if sibling !== None && (sibling->castNotOption).color === Red { + successorParent.color = Red + (sibling->castNotOption).color = Black + if isLeft(successor) { + rotateLeft(rbt, successorParent) + } else { + rotateRight(rbt, successorParent) + } + } + + let sibling = siblingOf(successor) + let siblingNN = sibling->castNotOption + if ( + successorParent.color === Black && + (sibling === None || + (siblingNN.color === Black && + (siblingNN.left === None || + (siblingNN.left->castNotOption).color === Black) && + (siblingNN.right === None || + (siblingNN.right->castNotOption).color === Black))) + ) { + if sibling !== None { + siblingNN.color = Red + } + successorRef.contents = successorParent + } else if ( + successorParent.color === Red && + (sibling === None || + (siblingNN.color === Black && + (siblingNN.left === None || + (siblingNN.left->castNotOption).color === Black) && + (siblingNN.right === None || + (siblingNN.right->castNotOption).color === Black))) + ) { + if sibling !== None { + siblingNN.color = Red + } + successorParent.color = Black + break.contents = true + } else if ( + sibling !== None && (sibling->castNotOption).color === Black + ) { + let sibling = sibling->castNotOption + if ( + isLeft(successor) && + (sibling.right === None || + (sibling.right->castNotOption).color === Black) && + sibling.left !== None && + (sibling.left->castNotOption).color === Red + ) { + sibling.color = Red + (sibling.left->castNotOption).color = Black + rotateRight(rbt, sibling) + } else if ( + !isLeft(successor) && + (sibling.left === None || + (sibling.left->castNotOption).color === Black) && + sibling.right !== None && + (sibling.right->castNotOption).color === Red + ) { + sibling.color = Red + (sibling.right->castNotOption).color = Black + rotateLeft(rbt, sibling) + } + break.contents = true + } else { + let sibling = siblingOf(successor) + let sibling = sibling->castNotOption + sibling.color = successorParent.color + if isLeft(successor) { + (sibling.right->castNotOption).color = Black + rotateRight(rbt, successorParent) + } else { + (sibling.left->castNotOption).color = Black + rotateLeft(rbt, successorParent) + } + } + } + } + } + } + + if isLeaf(. successor) { + if rbt.root === Some(successor) { + rbt.root = None + } + switch successor.parent { + | None => () + | Some(parent) => parent->leftOrRightSet(~node=successor, None) + } + } +} + +let remove = (rbt, value) => + switch findNode(rbt, rbt.root, value) { + | Some(node) => + rbt->removeNode(node) + rbt.size = rbt.size - 1 + true + | None => false + } + +let rec findNodeThroughCallback = (rbt, node, cb) => + switch node { + | None => None + | Some(node) => + let cmp = cb(. node) + if cmp === 0 { + Some(node) + } else if cmp < 0 { + findNodeThroughCallback(rbt, node.left, cb) + } else { + findNodeThroughCallback(rbt, node.right, cb) + } + } + +let removeThroughCallback = (rbt, cb) => + switch findNodeThroughCallback(rbt, rbt.root, cb) { + | Some(node) => + rbt->removeNode(node) + rbt.size = rbt.size - 1 + true + | None => false + } + +let make = (~compare) => {size: 0, root: None, compare: compare} + +let makeWith = (array, ~compare) => { + let rbt = make(~compare) + array->Js.Array2.forEach(((value, height)) => + add(rbt, value, ~height)->ignore + ) + rbt +} + +let rec heightOfInterval = (rbt, node, lhs, rhs) => + switch node { + | None => 0. + | Some(n) => + if lhs === None && rhs === None { + n.sum + } else if lhs !== None && rbt.compare(. n.value, lhs->castNotOption) < 0 { + rbt->heightOfInterval(n.right, lhs, rhs) + } else if rhs !== None && rbt.compare(. n.value, rhs->castNotOption) > 0 { + rbt->heightOfInterval(n.left, lhs, rhs) + } else { + n.height +. + rbt->heightOfInterval(n.left, lhs, None) +. + rbt->heightOfInterval(n.right, None, rhs) + } + } + +let heightOfInterval = (rbt, lhs, rhs) => + heightOfInterval(rbt, rbt.root, lhs, rhs) + +let rec firstVisibleNode = (node, top) => + switch node { + | None => None + | Some(node) => + if node.sum <= top { + None + } else { + let nodeHeight = node.height + let sumLeft = switch node.left { + | None => 0.0 + | Some(left) => left.sum + } + if sumLeft > top { + firstVisibleNode(node.left, top) + } else if sumLeft +. nodeHeight > top { + Some(node) + } else { + let offset = sumLeft +. nodeHeight + firstVisibleNode(node.right, top -. offset) + } + } + } + +let lastVisibleNode = (node, top) => + switch firstVisibleNode(node, top) { + | None => node->peekMaxNode + | first => first + } + +let firstVisibleValue = (rbt, ~top) => + switch firstVisibleNode(rbt.root, top) { + | None => None + | Some(node) => Some(node.value) + } + +let rec leftmost = node => + switch node.left { + | None => node + | Some(node) => node->leftmost + } + +let rec firstRightParent = node => + switch node.parent { + | None => None + | Some(parent) => isLeft(node) ? Some(parent) : parent->firstRightParent + } + +let nextNode = node => + switch node.right { + | None => node->firstRightParent + | Some(right) => Some(right->leftmost) + } + +let rec sumLeftSpine = (node, ~fromRightChild) => { + let leftSpine = switch node.left { + | None => node.height + | Some(left) => fromRightChild ? node.height +. left.sum : 0.0 + } + switch node.parent { + | None => leftSpine + | Some(parent) => + leftSpine +. + parent->sumLeftSpine(~fromRightChild=parent.right === Some(node)) + } +} + +let getY = node => node->sumLeftSpine(~fromRightChild=true) -. node.height + +let rec iterate = (~inclusive, firstNode, lastNode, ~callback) => + switch firstNode { + | None => () + | Some(node) => + if inclusive { + callback(. node) + } + if firstNode !== lastNode { + if !inclusive { + callback(. node) + } + iterate(~inclusive, node->nextNode, lastNode, ~callback) + } + } + +let rec iterateWithY = (~y=?, ~inclusive, firstNode, lastNode, ~callback) => + switch firstNode { + | None => () + | Some(node) => + let y = switch y { + | None => node->getY + | Some(y) => y + } + if inclusive { + callback(. node, y) + } + if firstNode !== lastNode { + if !inclusive { + callback(. node, y) + } + iterateWithY( + ~y=y +. node.height, + ~inclusive, + node->nextNode, + lastNode, + ~callback, + ) + } + } + +let rec updateSum = (node, ~delta) => + switch node { + | None => () + | Some(node) => + node.sum = node.sum +. delta + node.parent->updateSum(~delta) + } + +let updateHeight = (node, ~height) => { + let delta = height -. node.height + node.height = height + Some(node)->updateSum(~delta) +} + +type oldNewVisible<'value> = { + mutable old: array<'value>, + mutable new: array<'value>, +} + +let getAnchorDelta = (rbt, ~anchor) => + switch anchor { + | None => 0.0 + | Some(value, y) => + switch rbt->findNode(rbt.root, value) { + | Some(node) => y -. node->getY + | None => 0.0 + } + } + +let onChangedVisible = ( + ~anchor=None, + rbt, + ~oldNewVisible, + ~top as top_, + ~bottom as bottom_, + ~appear, + ~remained, + ~disappear, +) => { + let old = oldNewVisible.new + let new = oldNewVisible.old + + new + ->Js.Array2.removeCountInPlace(~pos=0, ~count=new->Js.Array2.length) + ->ignore + oldNewVisible.old = old + oldNewVisible.new = new + + let anchorDelta = rbt->getAnchorDelta(~anchor) + + let top = top_ -. anchorDelta + let top = top < 0.0 ? 0.0 : top + let bottom = bottom_ -. anchorDelta + + let first = firstVisibleNode(rbt.root, top) + let last = lastVisibleNode(rbt.root, bottom) + + let oldLen = old->Js.Array2.length + let oldIter = ref(0) + iterateWithY(~inclusive=true, first, last, (. node, y_) => { + let y = y_ +. anchorDelta + if y >= 0.0 { + while ( + oldIter.contents < oldLen && + rbt.compare(. + Js.Array2.unsafe_get(old, oldIter.contents), + node.value, + ) < 0 + ) { + disappear(. Js.Array2.unsafe_get(old, oldIter.contents)) + oldIter.contents = oldIter.contents + 1 + } + new->Js.Array2.push(node.value)->ignore + if oldIter.contents < oldLen { + let cmp = rbt.compare(. + Js.Array2.unsafe_get(old, oldIter.contents), + node.value, + ) + if cmp == 0 { + remained(. node, y) + oldIter.contents = oldIter.contents + 1 + } else { + appear(. node, y) + } + } else { + appear(. node, y) + } + } + }) + while oldIter.contents < oldLen { + disappear(. Js.Array2.unsafe_get(old, oldIter.contents)) + oldIter.contents = oldIter.contents + 1 + } +} diff --git a/res_syntax/benchmarks/dune b/res_syntax/benchmarks/dune new file mode 100644 index 0000000000..5c2ec666b8 --- /dev/null +++ b/res_syntax/benchmarks/dune @@ -0,0 +1,20 @@ +(executable + (name benchmark) + (public_name bench) + (enabled_if + (or + (= %{system} macosx) + ; or one of Linuxes (see https://github.com/ocaml/ocaml/issues/10613) + (= %{system} linux) + (= %{system} linux_elf) + (= %{system} elf) + (= %{system} linux_eabihf) + (= %{system} linux_eabi))) + (flags + (-open Syntax -open Compilerlibs406)) + (foreign_stubs + (language c) + (names time)) + (libraries syntax compilerlibs406)) + +(data_only_dirs data) diff --git a/res_syntax/benchmarks/time.c b/res_syntax/benchmarks/time.c new file mode 100644 index 0000000000..df29af7bf2 --- /dev/null +++ b/res_syntax/benchmarks/time.c @@ -0,0 +1,44 @@ +#include +#include + +// +// Platform-specific includes +// +#if (defined(__MACH__) && defined(__APPLE__)) +#include +#elif defined(__linux__) +#include +#endif + +// +// Platform-specific globals +// +#if (defined(__MACH__) && defined(__APPLE__)) +static mach_timebase_info_data_t info; +#endif + +// +// Exported functions +// +CAMLprim value caml_mach_initialize(value unit) { +#if (defined(__MACH__) && defined(__APPLE__)) + mach_timebase_info(&info); +#endif + + return Val_unit; +} + +CAMLprim value caml_mach_absolute_time(value unit) { + uint64_t result = 0; + +#if (defined(__MACH__) && defined(__APPLE__)) + uint64_t now = mach_absolute_time(); + result = (now * info.numer) / info.denom; +#elif defined(__linux__) + struct timespec now; + clock_gettime(CLOCK_MONOTONIC, &now); + result = now.tv_sec * 1000 + now.tv_nsec / 1000000; +#endif + + return caml_copy_int64(result); +} diff --git a/res_syntax/cli/JSXV4.md b/res_syntax/cli/JSXV4.md new file mode 100644 index 0000000000..b8f6b0ef23 --- /dev/null +++ b/res_syntax/cli/JSXV4.md @@ -0,0 +1,423 @@ +## Introduction + +JSX V4, supported in the compiler version introduces a new idiomatic record-based representation of components which is incompatible with V3. Because of this, either the entire project or dependencies need to be compiled in V4 mode, or some compatibility features need to be used to mix V3 and V4 in the same project. +The V4 representation is part of the spec, so `@react.component` is effectively just an abbreviation for code that can be written by hand. + +## Turn On V4 + +To build an entire project in V4 mode, including all its dependencies, use the new `"jsx"` configuration in `bsconfig.json` instead of the old `"reason"`: + +```json +"jsx": { "version": 4 } +``` + +> Note that JSX V4 requires the rescript compiler 10.1 or higher, and `rescript-react` version `0.11` or higher. In addition, `react` version `18.0` is required. + +## Configuration And Upgrade + +### Dependency-level config + +Dependencies inherit the `jsx` configuration of the root project. So if the root project uses V4 then the dependencies are built using V4, and the same for V3. +To build certain dependencies in V3 compatibility mode, whatever the version used in the root project, use `"v3-dependencies"` as in the example: + +```json +"jsx": { + "version": 4, + "v3-dependencies": ["rescript-react-native", "rescript-react-navigation"] +} +``` + +In V3 compatibility mode, the listed dependencies are built in V3 mode, and in addition `-open ReatcV3` is added to the compiler options, so that the `ReactV3` compatibility module in rescript-react is used. + +> Note: do not add @rescript/react to the v3-dependencies, or it will cause a cyclic dependencies error. + +### Classic and Automatic Mode + +Classic mode is the default and generates calls to `React.createElement` just as with V3. + +```json +"jsx": { + "version": 4, + "mode": "classic" +} +``` + +Automatic mode is an experimental mode that generate calls to `_jsx` functions (similar to TypeScript's `react-jsx` mode) + +```json +"jsx": { + "version": 4, + "mode": "automatic" +} +``` + +### File-level config + +The top-level attribute `@@jsxConfig` is used to update the `jsx` config for the rest of the file (or until the next config update). Only the values mentioned are updated, the others are left unchanged. + +```rescript +@@jsxConfig({ version: 4, mode: "automatic" }) + +module Wrapper = { + module R1 = { + @react.component // V4 and new _jsx transform + let make = () => body + } + + @@jsxConfig({ version: 4, mode: "classic" }) + + module R2 = { + @react.component // V4 with `React.createElement` + let make = () => body + } +} + +@@jsxConfig({ version: 3 }) + +@react.component // V3 +let make = () => body +``` + +### Migration of V3 components that depend on the internal representation + +Some components in existing projects are written in a way that is dependent on the V3 internal representation. +Here are a few examples of how to convert them to V4. + +#### `makeProps` does not exist in V4 + +Rewrite this: + +```rescript +// V3 +module M = { + @obj external makeProps: (~msg: 'msg, ~key: string=?, unit) => {"msg": 'msg} = "" // No more makeProps + + let make = (~msg) => { +
{React.string(msg)}
+ } +} +``` + +To this: + +```rescript +// V4 +module M = { + type props<'msg> = {msg: 'msg} + let make = props =>
{React.string(props.msg)}
+} +``` + +#### `React.Context` + +Rewrite this: + +```rescript +module Context = { + let context = React.createContext(() => ()) + + module Provider = { + let provider = React.Context.provider(context) + + @react.component + let make = (~value, ~children) => { + React.createElement(provider, {"value": value, "children": children}) // Error + } + } +} +``` + +To this: + +```rescript +module Context = { + let context = React.createContext(() => ()) + + module Provider = { + let make = React.Context.provider(context) + } +} +``` + +#### `React.forwardRef`(Discouraged) + +`forwardRef` is discouraged, but sometimes used in existing V3 code such as this example: + +```rescript +module FancyInput = { + @react.component + let make = React.forwardRef(( + ~className=?, + ~children, + ref_, // argument + ) => +
+ Js.Nullable.toOption->Belt.Option.map(ReactDOM.Ref.domRef)} + /> + children +
+ ) +} + +@react.component +let make = () => { + let input = React.useRef(Js.Nullable.null) + +
+ // prop + + +
+} +``` + +In this example, there is an inconsistency between `ref` as prop and `ref_` as argument. With JSX V4, `ref` is only allowed as an argument. + +```rescript +module FancyInput = { + @react.component + let make = React.forwardRef(( + ~className=?, + ~children, + ref, // only `ref` is allowed + ) => +
+ Js.Nullable.toOption->Belt.Option.map(ReactDOM.Ref.domRef)} + /> + children +
+ ) +} + +@react.component +let make = () => { + let input = React.useRef(Js.Nullable.null) + +
+ + + +
+} +``` + +## V4 Spec + +This is the specification that decribes the two JSX V4 transformations: + +- For component definition `@react.component let make = ...` +- For component application `` + +The transformations are optional in that it is possible to write the resulting code manually instead of using them. + +### Pre-transformation for component definition + +To simplify the description of component definition, a pre-transformation +is used to move `@react.component` to a place where the actual transformations operate. + +#### Normal Case + +```rescript +@react.component +let make = (~x, ~y, ~z) => body +``` + +is pre-transformed to + +```rescript +let make = @react.component (~x, ~y, ~z) => body +``` + +#### Forward Ref + +```rescript +@react.component +let make = React.forwardRef((~x, ~y, ref) => body) +``` + +is pre-transformed to + +```rescript +let make = React.forwardRef({ + let fn = + @react.component (~x, ~y) => ref => body + (props, ref) => fn(props, ref) +}) +``` + +### Transformation for Component Definition + +```rescript +@react.component (~x, ~y=3+x, ?z) => body +``` + +is transformed to + +```rescript +type props<'x, 'y, 'z> = {x: 'x, y?: 'y, z?: 'z} + +({x, y, z}: props<_>) => { + let y = switch props.y { + | None => 3 + x + | Some(y) => y + } + body +} +``` + +> Note: this implicit definition of type `props` means that there cannot be other type definitions of `props` in the same scope, or it will be a compiler error about multiple definitions of the type name. + +### Transformation for Component Application + +```rescript + +// is transformed to +React.createElement(Comp.make, {x: x}) + + +// is transformed to +React.createElement(Comp.make, {x, y: 7, ?z}) + + +// is transformed to +React.createElement(Comp.make, React.addKeyProp(~key="7", {x: x})) + + +// is transformed to +React.createElement(Comp.make, React.addKeyProp(~key=?Some("7"), {x: x})) +``` + +### New experimental automatic mode + +The V4 ppx supports [the new jsx transform](https://reactjs.org/blog/2020/09/22/introducing-the-new-jsx-transform.html) of React.js. + +The jsx transform only affects component application, but not the definition. + +```rescript + +// is transformed to +React.jsx(Comp.make, {x: x}) +``` + +```rescript +
+// is transformed to +ReactDOM.jsx("div", { name: "div" }) +``` + +The props type of dom elements, e.g. `div`, is inferred to `ReactDOM.domProps`. + +```rescript +type domProps = { + key?: string, + id?: string, + ... +} +``` + +### Interface And External + +```rescript +@react.component (~x: int, ~y: int=?, ~z: int=?) => React.element + +// is transformed to + +type props<'x, 'y, 'z> = {x: 'x, y?: 'y, z?: 'z} + +props => React.element +``` + +Since an external is a function declaration, it follows the same rule. + +### Component Name + +The convention for names is the same one used in V3: the generated +function has the name of the enclosing module/file. + +### Fragments + +```rescript +<> comp1 comp2 comp3 + +// is transformed to + +// v4 +ReactDOMRe.createElement(ReasonReact.fragment, [comp1, comp2, comp3]) + +// v4 @ new jsx transform +React.jsxs(React.jsxFragment, {children: [comp1, comp2, comp3]}) +``` + +### Spread props (new feature) + +V4 introduces support for the spread operator for props: `{...p}`. + +```rescript +module A = { + @react.component + let make = (~x, ~y) => body +} + +let p: A.props<_> = {x: "x", y: "y"} + + + + +// not allowed + + +``` + +### Shared props type (new feature) + +V4 introduces support to control the definition of the `props` type by passing as argument to `@react.component` the body of the type definition of `props`. The main application is sharing a single type definition across several components. Here are a few examples: + + +```rescript +type sharedprops<'x, 'y> = {x: 'x, y: 'y, z:string} + +module C1 = { + @react.component(:sharedProps<'a, 'b>) + let make = (~x, ~y) => React.string(x ++ y ++ z) +} + +module C2 = { + @react.component(:sharedProps) + let make = (~x, ~y) => React.string(x ++ y ++ z) +} + +module C3 = { + type myProps = sharedProps + @react.component(:myProps) + let make = (~x, ~y) => React.int(x + y) +} +``` + +The generated code (some details removed) looks like this: +```rescript +@@jsxConfig({version: 4, mode: "classic"}) + +type sharedprops<'x, 'y> = {x: 'x, y: 'y, z: string} + +module C1 = { + type props<'a, 'b> = sharedProps<'a, 'b> + let make = ({x, y, _}: props<_>) => React.string(x ++ y ++ z) +} + +module C2 = { + type props<'b> = sharedProps + let make = ({x, y, _}: props<_>) => React.string(x ++ y ++ z) +} + +module C3 = { + type myProps = sharedProps + type props = myProps + let make = ({x, y, _}: props) => React.int(x + y) +} +``` diff --git a/res_syntax/cli/dune b/res_syntax/cli/dune new file mode 100644 index 0000000000..a04696d6e7 --- /dev/null +++ b/res_syntax/cli/dune @@ -0,0 +1,8 @@ +(executable + (name res_cli) + (public_name rescript) + (modes byte exe) + (flags + (-open Syntax -open Compilerlibs406) + (:standard -w +a-4-42-40-9-48-70)) + (libraries syntax compilerlibs406)) diff --git a/res_syntax/cli/react_jsx_common.ml b/res_syntax/cli/react_jsx_common.ml new file mode 100644 index 0000000000..3b7f98df78 --- /dev/null +++ b/res_syntax/cli/react_jsx_common.ml @@ -0,0 +1,43 @@ +open Asttypes +open Parsetree + +type jsxConfig = { + mutable version: int; + mutable module_: string; + mutable mode: string; + mutable nestedModules: string list; + mutable hasReactComponent: bool; +} + +(* Helper method to look up the [@react.component] attribute *) +let hasAttr (loc, _) = loc.txt = "react.component" + +(* Iterate over the attributes and try to find the [@react.component] attribute *) +let hasAttrOnBinding {pvb_attributes} = + List.find_opt hasAttr pvb_attributes <> None + +let coreTypeOfAttrs attributes = + List.find_map + (fun ({txt}, payload) -> + match (txt, payload) with + | "react.component", PTyp coreType -> Some coreType + | _ -> None) + attributes + +let typVarsOfCoreType {ptyp_desc} = + match ptyp_desc with + | Ptyp_constr (_, coreTypes) -> + List.filter + (fun {ptyp_desc} -> + match ptyp_desc with + | Ptyp_var _ -> true + | _ -> false) + coreTypes + | _ -> [] + +let raiseError ~loc msg = Location.raise_errorf ~loc msg + +let raiseErrorMultipleReactComponent ~loc = + raiseError ~loc + "Only one component definition is allowed for each module. Move to a \ + submodule or other file if necessary." diff --git a/res_syntax/cli/reactjs_jsx_ppx.ml b/res_syntax/cli/reactjs_jsx_ppx.ml new file mode 100644 index 0000000000..4da2cae1e3 --- /dev/null +++ b/res_syntax/cli/reactjs_jsx_ppx.ml @@ -0,0 +1,167 @@ +open Ast_mapper +open Asttypes +open Parsetree +open Longident + +let getPayloadFields payload = + match payload with + | PStr + ({ + pstr_desc = + Pstr_eval ({pexp_desc = Pexp_record (recordFields, None)}, _); + } + :: _rest) -> + recordFields + | _ -> [] + +type configKey = Int | String + +let getJsxConfigByKey ~key ~type_ recordFields = + let values = + List.filter_map + (fun ((lid, expr) : Longident.t Location.loc * expression) -> + match (type_, lid, expr) with + | ( Int, + {txt = Lident k}, + {pexp_desc = Pexp_constant (Pconst_integer (value, None))} ) + when k = key -> + Some value + | ( String, + {txt = Lident k}, + {pexp_desc = Pexp_constant (Pconst_string (value, None))} ) + when k = key -> + Some value + | _ -> None) + recordFields + in + match values with + | [] -> None + | [v] | v :: _ -> Some v + +let getInt ~key fields = + match fields |> getJsxConfigByKey ~key ~type_:Int with + | None -> None + | Some s -> int_of_string_opt s + +let getString ~key fields = fields |> getJsxConfigByKey ~key ~type_:String + +let updateConfig config payload = + let fields = getPayloadFields payload in + (match getInt ~key:"version" fields with + | None -> () + | Some i -> config.React_jsx_common.version <- i); + (match getString ~key:"module" fields with + | None -> () + | Some s -> config.module_ <- s); + match getString ~key:"mode" fields with + | None -> () + | Some s -> config.mode <- s + +let isJsxConfigAttr ((loc, _) : attribute) = loc.txt = "jsxConfig" + +let processConfigAttribute attribute config = + if isJsxConfigAttr attribute then updateConfig config (snd attribute) + +let getMapper ~config = + let expr3, module_binding3, transformSignatureItem3, transformStructureItem3 = + Reactjs_jsx_v3.jsxMapper ~config + in + let expr4, module_binding4, transformSignatureItem4, transformStructureItem4 = + Reactjs_jsx_v4.jsxMapper ~config + in + + let expr mapper e = + match config.version with + | 3 -> expr3 mapper e + | 4 -> expr4 mapper e + | _ -> default_mapper.expr mapper e + in + let module_binding mapper mb = + match config.version with + | 3 -> module_binding3 mapper mb + | 4 -> module_binding4 mapper mb + | _ -> default_mapper.module_binding mapper mb + in + let saveConfig () = + { + config with + version = config.version; + module_ = config.module_; + mode = config.mode; + hasReactComponent = config.hasReactComponent; + } + in + let restoreConfig oldConfig = + config.version <- oldConfig.React_jsx_common.version; + config.module_ <- oldConfig.module_; + config.mode <- oldConfig.mode; + config.hasReactComponent <- oldConfig.hasReactComponent + in + let signature mapper items = + let oldConfig = saveConfig () in + config.hasReactComponent <- false; + let result = + List.map + (fun item -> + (match item.psig_desc with + | Psig_attribute attr -> processConfigAttribute attr config + | _ -> ()); + let item = default_mapper.signature_item mapper item in + if config.version = 3 then transformSignatureItem3 mapper item + else if config.version = 4 then transformSignatureItem4 mapper item + else [item]) + items + |> List.flatten + in + restoreConfig oldConfig; + result + in + let structure mapper items = + let oldConfig = saveConfig () in + config.hasReactComponent <- false; + let result = + List.map + (fun item -> + (match item.pstr_desc with + | Pstr_attribute attr -> processConfigAttribute attr config + | _ -> ()); + let item = default_mapper.structure_item mapper item in + if config.version = 3 then transformStructureItem3 mapper item + else if config.version = 4 then transformStructureItem4 mapper item + else [item]) + items + |> List.flatten + in + restoreConfig oldConfig; + result + in + + {default_mapper with expr; module_binding; signature; structure} + +let rewrite_implementation ~jsxVersion ~jsxModule ~jsxMode + (code : Parsetree.structure) : Parsetree.structure = + let config = + { + React_jsx_common.version = jsxVersion; + module_ = jsxModule; + mode = jsxMode; + nestedModules = []; + hasReactComponent = false; + } + in + let mapper = getMapper ~config in + mapper.structure mapper code + +let rewrite_signature ~jsxVersion ~jsxModule ~jsxMode + (code : Parsetree.signature) : Parsetree.signature = + let config = + { + React_jsx_common.version = jsxVersion; + module_ = jsxModule; + mode = jsxMode; + nestedModules = []; + hasReactComponent = false; + } + in + let mapper = getMapper ~config in + mapper.signature mapper code diff --git a/res_syntax/cli/reactjs_jsx_ppx.mli b/res_syntax/cli/reactjs_jsx_ppx.mli new file mode 100644 index 0000000000..388202edba --- /dev/null +++ b/res_syntax/cli/reactjs_jsx_ppx.mli @@ -0,0 +1,23 @@ +(* + This is the module that handles turning Reason JSX' agnostic function call into + a ReasonReact-specific function call. Aka, this is a macro, using OCaml's ppx + facilities; https://whitequark.org/blog/2014/04/16/a-guide-to-extension- + points-in-ocaml/ + You wouldn't use this file directly; it's used by ReScript's + bsconfig.json. Specifically, there's a field called `react-jsx` inside the + field `reason`, which enables this ppx through some internal call in bsb +*) + +val rewrite_implementation : + jsxVersion:int -> + jsxModule:string -> + jsxMode:string -> + Parsetree.structure -> + Parsetree.structure + +val rewrite_signature : + jsxVersion:int -> + jsxModule:string -> + jsxMode:string -> + Parsetree.signature -> + Parsetree.signature diff --git a/res_syntax/cli/reactjs_jsx_v3.ml b/res_syntax/cli/reactjs_jsx_v3.ml new file mode 100644 index 0000000000..c55970d1b8 --- /dev/null +++ b/res_syntax/cli/reactjs_jsx_v3.ml @@ -0,0 +1,1190 @@ +open Ast_helper +open Ast_mapper +open Asttypes +open Parsetree +open Longident + +let nolabel = Nolabel + +let labelled str = Labelled str + +let optional str = Optional str + +let isOptional str = + match str with + | Optional _ -> true + | _ -> false + +let isLabelled str = + match str with + | Labelled _ -> true + | _ -> false + +let getLabel str = + match str with + | Optional str | Labelled str -> str + | Nolabel -> "" + +let optionIdent = Lident "option" + +let constantString ~loc str = + Ast_helper.Exp.constant ~loc (Pconst_string (str, None)) + +let safeTypeFromValue valueStr = + let valueStr = getLabel valueStr in + if valueStr = "" || (valueStr.[0] [@doesNotRaise]) <> '_' then valueStr + else "T" ^ valueStr + +let keyType loc = + Typ.constr ~loc {loc; txt = optionIdent} + [Typ.constr ~loc {loc; txt = Lident "string"} []] + +type 'a children = ListLiteral of 'a | Exact of 'a + +type componentConfig = {propsName: string} + +(* if children is a list, convert it to an array while mapping each element. If not, just map over it, as usual *) +let transformChildrenIfListUpper ~loc ~mapper theList = + let rec transformChildren_ theList accum = + (* not in the sense of converting a list to an array; convert the AST + reprensentation of a list to the AST reprensentation of an array *) + match theList with + | {pexp_desc = Pexp_construct ({txt = Lident "[]"}, None)} -> ( + match accum with + | [singleElement] -> Exact singleElement + | accum -> ListLiteral (Exp.array ~loc (List.rev accum))) + | { + pexp_desc = + Pexp_construct + ({txt = Lident "::"}, Some {pexp_desc = Pexp_tuple [v; acc]}); + } -> + transformChildren_ acc (mapper.expr mapper v :: accum) + | notAList -> Exact (mapper.expr mapper notAList) + in + transformChildren_ theList [] + +let transformChildrenIfList ~loc ~mapper theList = + let rec transformChildren_ theList accum = + (* not in the sense of converting a list to an array; convert the AST + reprensentation of a list to the AST reprensentation of an array *) + match theList with + | {pexp_desc = Pexp_construct ({txt = Lident "[]"}, None)} -> + Exp.array ~loc (List.rev accum) + | { + pexp_desc = + Pexp_construct + ({txt = Lident "::"}, Some {pexp_desc = Pexp_tuple [v; acc]}); + } -> + transformChildren_ acc (mapper.expr mapper v :: accum) + | notAList -> mapper.expr mapper notAList + in + transformChildren_ theList [] + +let extractChildren ?(removeLastPositionUnit = false) ~loc propsAndChildren = + let rec allButLast_ lst acc = + match lst with + | [] -> [] + | [(Nolabel, {pexp_desc = Pexp_construct ({txt = Lident "()"}, None)})] -> + acc + | (Nolabel, {pexp_loc}) :: _rest -> + React_jsx_common.raiseError ~loc:pexp_loc + "JSX: found non-labelled argument before the last position" + | arg :: rest -> allButLast_ rest (arg :: acc) + in + let allButLast lst = allButLast_ lst [] |> List.rev in + match + List.partition + (fun (label, _) -> label = labelled "children") + propsAndChildren + with + | [], props -> + (* no children provided? Place a placeholder list *) + ( Exp.construct ~loc {loc; txt = Lident "[]"} None, + if removeLastPositionUnit then allButLast props else props ) + | [(_, childrenExpr)], props -> + (childrenExpr, if removeLastPositionUnit then allButLast props else props) + | _ -> + React_jsx_common.raiseError ~loc + "JSX: somehow there's more than one `children` label" + +let unerasableIgnore loc = + ( {loc; txt = "warning"}, + PStr [Str.eval (Exp.constant (Pconst_string ("-16", None)))] ) + +let merlinFocus = ({loc = Location.none; txt = "merlin.focus"}, PStr []) + +(* Helper method to filter out any attribute that isn't [@react.component] *) +let otherAttrsPure (loc, _) = loc.txt <> "react.component" + +(* Finds the name of the variable the binding is assigned to, otherwise raises Invalid_argument *) +let rec getFnName binding = + match binding with + | {ppat_desc = Ppat_var {txt}} -> txt + | {ppat_desc = Ppat_constraint (pat, _)} -> getFnName pat + | {ppat_loc} -> + React_jsx_common.raiseError ~loc:ppat_loc + "react.component calls cannot be destructured." + +let makeNewBinding binding expression newName = + match binding with + | {pvb_pat = {ppat_desc = Ppat_var ppat_var} as pvb_pat} -> + { + binding with + pvb_pat = + {pvb_pat with ppat_desc = Ppat_var {ppat_var with txt = newName}}; + pvb_expr = expression; + pvb_attributes = [merlinFocus]; + } + | {pvb_loc} -> + React_jsx_common.raiseError ~loc:pvb_loc + "react.component calls cannot be destructured." + +(* Lookup the value of `props` otherwise raise Invalid_argument error *) +let getPropsNameValue _acc (loc, exp) = + match (loc, exp) with + | {txt = Lident "props"}, {pexp_desc = Pexp_ident {txt = Lident str}} -> + {propsName = str} + | {txt; loc}, _ -> + React_jsx_common.raiseError ~loc + "react.component only accepts props as an option, given: { %s }" + (Longident.last txt) + +(* Lookup the `props` record or string as part of [@react.component] and store the name for use when rewriting *) +let getPropsAttr payload = + let defaultProps = {propsName = "Props"} in + match payload with + | Some + (PStr + ({ + pstr_desc = + Pstr_eval ({pexp_desc = Pexp_record (recordFields, None)}, _); + } + :: _rest)) -> + List.fold_left getPropsNameValue defaultProps recordFields + | Some + (PStr + ({ + pstr_desc = + Pstr_eval ({pexp_desc = Pexp_ident {txt = Lident "props"}}, _); + } + :: _rest)) -> + {propsName = "props"} + | Some (PStr ({pstr_desc = Pstr_eval (_, _); pstr_loc} :: _rest)) -> + React_jsx_common.raiseError ~loc:pstr_loc + "react.component accepts a record config with props as an options." + | _ -> defaultProps + +(* Plucks the label, loc, and type_ from an AST node *) +let pluckLabelDefaultLocType (label, default, _, _, loc, type_) = + (label, default, loc, type_) + +(* Lookup the filename from the location information on the AST node and turn it into a valid module identifier *) +let filenameFromLoc (pstr_loc : Location.t) = + let fileName = + match pstr_loc.loc_start.pos_fname with + | "" -> !Location.input_name + | fileName -> fileName + in + let fileName = + try Filename.chop_extension (Filename.basename fileName) + with Invalid_argument _ -> fileName + in + let fileName = String.capitalize_ascii fileName in + fileName + +(* Build a string representation of a module name with segments separated by $ *) +let makeModuleName fileName nestedModules fnName = + let fullModuleName = + match (fileName, nestedModules, fnName) with + (* TODO: is this even reachable? It seems like the fileName always exists *) + | "", nestedModules, "make" -> nestedModules + | "", nestedModules, fnName -> List.rev (fnName :: nestedModules) + | fileName, nestedModules, "make" -> fileName :: List.rev nestedModules + | fileName, nestedModules, fnName -> + fileName :: List.rev (fnName :: nestedModules) + in + let fullModuleName = String.concat "$" fullModuleName in + fullModuleName + +(* + AST node builders + These functions help us build AST nodes that are needed when transforming a [@react.component] into a + constructor and a props external +*) + +(* Build an AST node representing all named args for the `external` definition for a component's props *) +let rec recursivelyMakeNamedArgsForExternal list args = + match list with + | (label, default, loc, interiorType) :: tl -> + recursivelyMakeNamedArgsForExternal tl + (Typ.arrow ~loc label + (match (label, interiorType, default) with + (* ~foo=1 *) + | label, None, Some _ -> + { + ptyp_desc = Ptyp_var (safeTypeFromValue label); + ptyp_loc = loc; + ptyp_attributes = []; + } + (* ~foo: int=1 *) + | _label, Some type_, Some _ -> type_ + (* ~foo: option(int)=? *) + | ( label, + Some {ptyp_desc = Ptyp_constr ({txt = Lident "option"}, [type_])}, + _ ) + | ( label, + Some + { + ptyp_desc = + Ptyp_constr + ({txt = Ldot (Lident "*predef*", "option")}, [type_]); + }, + _ ) + (* ~foo: int=? - note this isnt valid. but we want to get a type error *) + | label, Some type_, _ + when isOptional label -> + type_ + (* ~foo=? *) + | label, None, _ when isOptional label -> + { + ptyp_desc = Ptyp_var (safeTypeFromValue label); + ptyp_loc = loc; + ptyp_attributes = []; + } + (* ~foo *) + | label, None, _ -> + { + ptyp_desc = Ptyp_var (safeTypeFromValue label); + ptyp_loc = loc; + ptyp_attributes = []; + } + | _label, Some type_, _ -> type_) + args) + | [] -> args + +(* Build an AST node for the [@bs.obj] representing props for a component *) +let makePropsValue fnName loc namedArgListWithKeyAndRef propsType = + let propsName = fnName ^ "Props" in + { + pval_name = {txt = propsName; loc}; + pval_type = + recursivelyMakeNamedArgsForExternal namedArgListWithKeyAndRef + (Typ.arrow nolabel + { + ptyp_desc = Ptyp_constr ({txt = Lident "unit"; loc}, []); + ptyp_loc = loc; + ptyp_attributes = []; + } + propsType); + pval_prim = [""]; + pval_attributes = [({txt = "bs.obj"; loc}, PStr [])]; + pval_loc = loc; + } + +(* Build an AST node representing an `external` with the definition of the [@bs.obj] *) +let makePropsExternal fnName loc namedArgListWithKeyAndRef propsType = + { + pstr_loc = loc; + pstr_desc = + Pstr_primitive + (makePropsValue fnName loc namedArgListWithKeyAndRef propsType); + } + +(* Build an AST node for the signature of the `external` definition *) +let makePropsExternalSig fnName loc namedArgListWithKeyAndRef propsType = + { + psig_loc = loc; + psig_desc = + Psig_value (makePropsValue fnName loc namedArgListWithKeyAndRef propsType); + } + +(* Build an AST node for the props name when converted to an object inside the function signature *) +let makePropsName ~loc name = + {ppat_desc = Ppat_var {txt = name; loc}; ppat_loc = loc; ppat_attributes = []} + +let makeObjectField loc (str, attrs, type_) = + Otag ({loc; txt = str}, attrs, type_) + +(* Build an AST node representing a "closed" object representing a component's props *) +let makePropsType ~loc namedTypeList = + Typ.mk ~loc + (Ptyp_object (List.map (makeObjectField loc) namedTypeList, Closed)) + +(* Builds an AST node for the entire `external` definition of props *) +let makeExternalDecl fnName loc namedArgListWithKeyAndRef namedTypeList = + makePropsExternal fnName loc + (List.map pluckLabelDefaultLocType namedArgListWithKeyAndRef) + (makePropsType ~loc namedTypeList) + +let newtypeToVar newtype type_ = + let var_desc = Ptyp_var ("type-" ^ newtype) in + let typ (mapper : Ast_mapper.mapper) typ = + match typ.ptyp_desc with + | Ptyp_constr ({txt = Lident name}, _) when name = newtype -> + {typ with ptyp_desc = var_desc} + | _ -> Ast_mapper.default_mapper.typ mapper typ + in + let mapper = {Ast_mapper.default_mapper with typ} in + mapper.typ mapper type_ + +(* TODO: some line number might still be wrong *) +let jsxMapper ~config = + let transformUppercaseCall3 modulePath mapper loc attrs _ callArguments = + let children, argsWithLabels = + extractChildren ~loc ~removeLastPositionUnit:true callArguments + in + let argsForMake = argsWithLabels in + let childrenExpr = transformChildrenIfListUpper ~loc ~mapper children in + let recursivelyTransformedArgsForMake = + argsForMake + |> List.map (fun (label, expression) -> + (label, mapper.expr mapper expression)) + in + let childrenArg = ref None in + let args = + recursivelyTransformedArgsForMake + @ (match childrenExpr with + | Exact children -> [(labelled "children", children)] + | ListLiteral {pexp_desc = Pexp_array list} when list = [] -> [] + | ListLiteral expression -> + (* this is a hack to support react components that introspect into their children *) + childrenArg := Some expression; + [ + ( labelled "children", + Exp.ident ~loc {loc; txt = Ldot (Lident "React", "null")} ); + ]) + @ [(nolabel, Exp.construct ~loc {loc; txt = Lident "()"} None)] + in + let isCap str = String.capitalize_ascii str = str in + let ident = + match modulePath with + | Lident _ -> Ldot (modulePath, "make") + | Ldot (_modulePath, value) as fullPath when isCap value -> + Ldot (fullPath, "make") + | modulePath -> modulePath + in + let propsIdent = + match ident with + | Lident path -> Lident (path ^ "Props") + | Ldot (ident, path) -> Ldot (ident, path ^ "Props") + | _ -> + React_jsx_common.raiseError ~loc + "JSX name can't be the result of function applications" + in + let props = + Exp.apply ~attrs ~loc (Exp.ident ~loc {loc; txt = propsIdent}) args + in + (* handle key, ref, children *) + (* React.createElement(Component.make, props, ...children) *) + match !childrenArg with + | None -> + Exp.apply ~loc ~attrs + (Exp.ident ~loc {loc; txt = Ldot (Lident "React", "createElement")}) + [(nolabel, Exp.ident ~loc {txt = ident; loc}); (nolabel, props)] + | Some children -> + Exp.apply ~loc ~attrs + (Exp.ident ~loc + {loc; txt = Ldot (Lident "React", "createElementVariadic")}) + [ + (nolabel, Exp.ident ~loc {txt = ident; loc}); + (nolabel, props); + (nolabel, children); + ] + in + + let transformLowercaseCall3 mapper loc attrs callArguments id = + let children, nonChildrenProps = extractChildren ~loc callArguments in + let componentNameExpr = constantString ~loc id in + let childrenExpr = transformChildrenIfList ~loc ~mapper children in + let createElementCall = + match children with + (* [@JSX] div(~children=[a]), coming from
a
*) + | { + pexp_desc = + ( Pexp_construct ({txt = Lident "::"}, Some {pexp_desc = Pexp_tuple _}) + | Pexp_construct ({txt = Lident "[]"}, None) ); + } -> + "createDOMElementVariadic" + (* [@JSX] div(~children= value), coming from
...(value)
*) + | {pexp_loc} -> + React_jsx_common.raiseError ~loc:pexp_loc + "A spread as a DOM element's children don't make sense written \ + together. You can simply remove the spread." + in + let args = + match nonChildrenProps with + | [_justTheUnitArgumentAtEnd] -> + [ + (* "div" *) + (nolabel, componentNameExpr); + (* [|moreCreateElementCallsHere|] *) + (nolabel, childrenExpr); + ] + | nonEmptyProps -> + let propsCall = + Exp.apply ~loc + (Exp.ident ~loc {loc; txt = Ldot (Lident "ReactDOMRe", "domProps")}) + (nonEmptyProps + |> List.map (fun (label, expression) -> + (label, mapper.expr mapper expression))) + in + [ + (* "div" *) + (nolabel, componentNameExpr); + (* ReactDOMRe.props(~className=blabla, ~foo=bar, ()) *) + (labelled "props", propsCall); + (* [|moreCreateElementCallsHere|] *) + (nolabel, childrenExpr); + ] + in + Exp.apply + ~loc (* throw away the [@JSX] attribute and keep the others, if any *) + ~attrs + (* ReactDOMRe.createElement *) + (Exp.ident ~loc + {loc; txt = Ldot (Lident "ReactDOMRe", createElementCall)}) + args + in + + let rec recursivelyTransformNamedArgsForMake mapper expr args newtypes = + let expr = mapper.expr mapper expr in + match expr.pexp_desc with + (* TODO: make this show up with a loc. *) + | Pexp_fun (Labelled "key", _, _, _) | Pexp_fun (Optional "key", _, _, _) -> + React_jsx_common.raiseError ~loc:expr.pexp_loc + "Key cannot be accessed inside of a component. Don't worry - you can \ + always key a component from its parent!" + | Pexp_fun (Labelled "ref", _, _, _) | Pexp_fun (Optional "ref", _, _, _) -> + React_jsx_common.raiseError ~loc:expr.pexp_loc + "Ref cannot be passed as a normal prop. Either give the prop a \ + different name or use the `forwardRef` API instead." + | Pexp_fun (arg, default, pattern, expression) + when isOptional arg || isLabelled arg -> + let () = + match (isOptional arg, pattern, default) with + | true, {ppat_desc = Ppat_constraint (_, {ptyp_desc})}, None -> ( + match ptyp_desc with + | Ptyp_constr ({txt = Lident "option"}, [_]) -> () + | _ -> + let currentType = + match ptyp_desc with + | Ptyp_constr ({txt}, []) -> + String.concat "." (Longident.flatten txt) + | Ptyp_constr ({txt}, _innerTypeArgs) -> + String.concat "." (Longident.flatten txt) ^ "(...)" + | _ -> "..." + in + Location.prerr_warning pattern.ppat_loc + (Preprocessor + (Printf.sprintf + "React: optional argument annotations must have explicit \ + `option`. Did you mean `option(%s)=?`?" + currentType))) + | _ -> () + in + let alias = + match pattern with + | {ppat_desc = Ppat_alias (_, {txt}) | Ppat_var {txt}} -> txt + | {ppat_desc = Ppat_any} -> "_" + | _ -> getLabel arg + in + let type_ = + match pattern with + | {ppat_desc = Ppat_constraint (_, type_)} -> Some type_ + | _ -> None + in + + recursivelyTransformNamedArgsForMake mapper expression + ((arg, default, pattern, alias, pattern.ppat_loc, type_) :: args) + newtypes + | Pexp_fun + ( Nolabel, + _, + {ppat_desc = Ppat_construct ({txt = Lident "()"}, _) | Ppat_any}, + _expression ) -> + (args, newtypes, None) + | Pexp_fun + ( Nolabel, + _, + { + ppat_desc = + Ppat_var {txt} | Ppat_constraint ({ppat_desc = Ppat_var {txt}}, _); + }, + _expression ) -> + (args, newtypes, Some txt) + | Pexp_fun (Nolabel, _, pattern, _expression) -> + Location.raise_errorf ~loc:pattern.ppat_loc + "React: react.component refs only support plain arguments and type \ + annotations." + | Pexp_newtype (label, expression) -> + recursivelyTransformNamedArgsForMake mapper expression args + (label :: newtypes) + | Pexp_constraint (expression, _typ) -> + recursivelyTransformNamedArgsForMake mapper expression args newtypes + | _ -> (args, newtypes, None) + in + + let argToType types (name, default, _noLabelName, _alias, loc, type_) = + match (type_, name, default) with + | Some {ptyp_desc = Ptyp_constr ({txt = Lident "option"}, [type_])}, name, _ + when isOptional name -> + ( getLabel name, + [], + { + type_ with + ptyp_desc = + Ptyp_constr ({loc = type_.ptyp_loc; txt = optionIdent}, [type_]); + } ) + :: types + | Some type_, name, Some _default -> + ( getLabel name, + [], + { + ptyp_desc = Ptyp_constr ({loc; txt = optionIdent}, [type_]); + ptyp_loc = loc; + ptyp_attributes = []; + } ) + :: types + | Some type_, name, _ -> (getLabel name, [], type_) :: types + | None, name, _ when isOptional name -> + ( getLabel name, + [], + { + ptyp_desc = + Ptyp_constr + ( {loc; txt = optionIdent}, + [ + { + ptyp_desc = Ptyp_var (safeTypeFromValue name); + ptyp_loc = loc; + ptyp_attributes = []; + }; + ] ); + ptyp_loc = loc; + ptyp_attributes = []; + } ) + :: types + | None, name, _ when isLabelled name -> + ( getLabel name, + [], + { + ptyp_desc = Ptyp_var (safeTypeFromValue name); + ptyp_loc = loc; + ptyp_attributes = []; + } ) + :: types + | _ -> types + in + + let argToConcreteType types (name, loc, type_) = + match name with + | name when isLabelled name -> (getLabel name, [], type_) :: types + | name when isOptional name -> + (getLabel name, [], Typ.constr ~loc {loc; txt = optionIdent} [type_]) + :: types + | _ -> types + in + + let nestedModules = ref [] in + let transformStructureItem mapper item = + match item with + (* external *) + | { + pstr_loc; + pstr_desc = + Pstr_primitive + ({pval_name = {txt = fnName}; pval_attributes; pval_type} as + value_description); + } as pstr -> ( + match List.filter React_jsx_common.hasAttr pval_attributes with + | [] -> [item] + | [_] -> + let rec getPropTypes types ({ptyp_loc; ptyp_desc} as fullType) = + match ptyp_desc with + | Ptyp_arrow (name, type_, ({ptyp_desc = Ptyp_arrow _} as rest)) + when isLabelled name || isOptional name -> + getPropTypes ((name, ptyp_loc, type_) :: types) rest + | Ptyp_arrow (Nolabel, _type, rest) -> getPropTypes types rest + | Ptyp_arrow (name, type_, returnValue) + when isLabelled name || isOptional name -> + (returnValue, (name, returnValue.ptyp_loc, type_) :: types) + | _ -> (fullType, types) + in + let innerType, propTypes = getPropTypes [] pval_type in + let namedTypeList = List.fold_left argToConcreteType [] propTypes in + let pluckLabelAndLoc (label, loc, type_) = + (label, None (* default *), loc, Some type_) + in + let retPropsType = makePropsType ~loc:pstr_loc namedTypeList in + let externalPropsDecl = + makePropsExternal fnName pstr_loc + ((optional "key", None, pstr_loc, Some (keyType pstr_loc)) + :: List.map pluckLabelAndLoc propTypes) + retPropsType + in + (* can't be an arrow because it will defensively uncurry *) + let newExternalType = + Ptyp_constr + ( {loc = pstr_loc; txt = Ldot (Lident "React", "componentLike")}, + [retPropsType; innerType] ) + in + let newStructure = + { + pstr with + pstr_desc = + Pstr_primitive + { + value_description with + pval_type = {pval_type with ptyp_desc = newExternalType}; + pval_attributes = List.filter otherAttrsPure pval_attributes; + }; + } + in + [externalPropsDecl; newStructure] + | _ -> + React_jsx_common.raiseError ~loc:pstr_loc + "Only one react.component call can exist on a component at one time") + (* let component = ... *) + | {pstr_loc; pstr_desc = Pstr_value (recFlag, valueBindings)} -> ( + let fileName = filenameFromLoc pstr_loc in + let emptyLoc = Location.in_file fileName in + let mapBinding binding = + if React_jsx_common.hasAttrOnBinding binding then + let bindingLoc = binding.pvb_loc in + let bindingPatLoc = binding.pvb_pat.ppat_loc in + let binding = + { + binding with + pvb_pat = {binding.pvb_pat with ppat_loc = emptyLoc}; + pvb_loc = emptyLoc; + } + in + let fnName = getFnName binding.pvb_pat in + let internalFnName = fnName ^ "$Internal" in + let fullModuleName = makeModuleName fileName !nestedModules fnName in + let modifiedBindingOld binding = + let expression = binding.pvb_expr in + (* TODO: there is a long-tail of unsupported features inside of blocks - Pexp_letmodule , Pexp_letexception , Pexp_ifthenelse *) + let rec spelunkForFunExpression expression = + match expression with + (* let make = (~prop) => ... *) + | {pexp_desc = Pexp_fun _} | {pexp_desc = Pexp_newtype _} -> + expression + (* let make = {let foo = bar in (~prop) => ...} *) + | {pexp_desc = Pexp_let (_recursive, _vbs, returnExpression)} -> + (* here's where we spelunk! *) + spelunkForFunExpression returnExpression + (* let make = React.forwardRef((~prop) => ...) *) + | { + pexp_desc = + Pexp_apply + (_wrapperExpression, [(Nolabel, innerFunctionExpression)]); + } -> + spelunkForFunExpression innerFunctionExpression + | { + pexp_desc = + Pexp_sequence (_wrapperExpression, innerFunctionExpression); + } -> + spelunkForFunExpression innerFunctionExpression + | {pexp_desc = Pexp_constraint (innerFunctionExpression, _typ)} -> + spelunkForFunExpression innerFunctionExpression + | {pexp_loc} -> + React_jsx_common.raiseError ~loc:pexp_loc + "react.component calls can only be on function definitions \ + or component wrappers (forwardRef, memo)." + in + spelunkForFunExpression expression + in + let modifiedBinding binding = + let hasApplication = ref false in + let wrapExpressionWithBinding expressionFn expression = + Vb.mk ~loc:bindingLoc + ~attrs:(List.filter otherAttrsPure binding.pvb_attributes) + (Pat.var ~loc:bindingPatLoc {loc = bindingPatLoc; txt = fnName}) + (expressionFn expression) + in + let expression = binding.pvb_expr in + let unerasableIgnoreExp exp = + { + exp with + pexp_attributes = + unerasableIgnore emptyLoc :: exp.pexp_attributes; + } + in + (* TODO: there is a long-tail of unsupported features inside of blocks - Pexp_letmodule , Pexp_letexception , Pexp_ifthenelse *) + let rec spelunkForFunExpression expression = + match expression with + (* let make = (~prop) => ... with no final unit *) + | { + pexp_desc = + Pexp_fun + ( ((Labelled _ | Optional _) as label), + default, + pattern, + ({pexp_desc = Pexp_fun _} as internalExpression) ); + } -> + let wrap, hasUnit, exp = + spelunkForFunExpression internalExpression + in + ( wrap, + hasUnit, + unerasableIgnoreExp + { + expression with + pexp_desc = Pexp_fun (label, default, pattern, exp); + } ) + (* let make = (()) => ... *) + (* let make = (_) => ... *) + | { + pexp_desc = + Pexp_fun + ( Nolabel, + _default, + { + ppat_desc = + Ppat_construct ({txt = Lident "()"}, _) | Ppat_any; + }, + _internalExpression ); + } -> + ((fun a -> a), true, expression) + (* let make = (~prop) => ... *) + | { + pexp_desc = + Pexp_fun + ( (Labelled _ | Optional _), + _default, + _pattern, + _internalExpression ); + } -> + ((fun a -> a), false, unerasableIgnoreExp expression) + (* let make = (prop) => ... *) + | { + pexp_desc = + Pexp_fun (_nolabel, _default, pattern, _internalExpression); + } -> + if hasApplication.contents then + ((fun a -> a), false, unerasableIgnoreExp expression) + else + Location.raise_errorf ~loc:pattern.ppat_loc + "React: props need to be labelled arguments.\n\ + \ If you are working with refs be sure to wrap with \ + React.forwardRef.\n\ + \ If your component doesn't have any props use () or _ \ + instead of a name." + (* let make = {let foo = bar in (~prop) => ...} *) + | {pexp_desc = Pexp_let (recursive, vbs, internalExpression)} -> + (* here's where we spelunk! *) + let wrap, hasUnit, exp = + spelunkForFunExpression internalExpression + in + ( wrap, + hasUnit, + {expression with pexp_desc = Pexp_let (recursive, vbs, exp)} + ) + (* let make = React.forwardRef((~prop) => ...) *) + | { + pexp_desc = + Pexp_apply (wrapperExpression, [(Nolabel, internalExpression)]); + } -> + let () = hasApplication := true in + let _, hasUnit, exp = + spelunkForFunExpression internalExpression + in + ( (fun exp -> Exp.apply wrapperExpression [(nolabel, exp)]), + hasUnit, + exp ) + | { + pexp_desc = Pexp_sequence (wrapperExpression, internalExpression); + } -> + let wrap, hasUnit, exp = + spelunkForFunExpression internalExpression + in + ( wrap, + hasUnit, + { + expression with + pexp_desc = Pexp_sequence (wrapperExpression, exp); + } ) + | e -> ((fun a -> a), false, e) + in + let wrapExpression, hasUnit, expression = + spelunkForFunExpression expression + in + (wrapExpressionWithBinding wrapExpression, hasUnit, expression) + in + let bindingWrapper, hasUnit, expression = modifiedBinding binding in + let reactComponentAttribute = + try Some (List.find React_jsx_common.hasAttr binding.pvb_attributes) + with Not_found -> None + in + let _attr_loc, payload = + match reactComponentAttribute with + | Some (loc, payload) -> (loc.loc, Some payload) + | None -> (emptyLoc, None) + in + let props = getPropsAttr payload in + (* do stuff here! *) + let namedArgList, newtypes, forwardRef = + recursivelyTransformNamedArgsForMake mapper + (modifiedBindingOld binding) + [] [] + in + let namedArgListWithKeyAndRef = + ( optional "key", + None, + Pat.var {txt = "key"; loc = emptyLoc}, + "key", + emptyLoc, + Some (keyType emptyLoc) ) + :: namedArgList + in + let namedArgListWithKeyAndRef = + match forwardRef with + | Some _ -> + ( optional "ref", + None, + Pat.var {txt = "key"; loc = emptyLoc}, + "ref", + emptyLoc, + None ) + :: namedArgListWithKeyAndRef + | None -> namedArgListWithKeyAndRef + in + let namedArgListWithKeyAndRefForNew = + match forwardRef with + | Some txt -> + namedArgList + @ [ + ( nolabel, + None, + Pat.var {txt; loc = emptyLoc}, + txt, + emptyLoc, + None ); + ] + | None -> namedArgList + in + let pluckArg (label, _, _, alias, loc, _) = + let labelString = + match label with + | label when isOptional label || isLabelled label -> + getLabel label + | _ -> "" + in + ( label, + match labelString with + | "" -> Exp.ident ~loc {txt = Lident alias; loc} + | labelString -> + Exp.apply ~loc + (Exp.ident ~loc {txt = Lident "##"; loc}) + [ + (nolabel, Exp.ident ~loc {txt = Lident props.propsName; loc}); + (nolabel, Exp.ident ~loc {txt = Lident labelString; loc}); + ] ) + in + let namedTypeList = List.fold_left argToType [] namedArgList in + let loc = emptyLoc in + let externalArgs = + (* translate newtypes to type variables *) + List.fold_left + (fun args newtype -> + List.map + (fun (a, b, c, d, e, maybeTyp) -> + match maybeTyp with + | Some typ -> + (a, b, c, d, e, Some (newtypeToVar newtype.txt typ)) + | None -> (a, b, c, d, e, None)) + args) + namedArgListWithKeyAndRef newtypes + in + let externalTypes = + (* translate newtypes to type variables *) + List.fold_left + (fun args newtype -> + List.map + (fun (a, b, typ) -> (a, b, newtypeToVar newtype.txt typ)) + args) + namedTypeList newtypes + in + let externalDecl = + makeExternalDecl fnName loc externalArgs externalTypes + in + let innerExpressionArgs = + List.map pluckArg namedArgListWithKeyAndRefForNew + @ + if hasUnit then + [(Nolabel, Exp.construct {loc; txt = Lident "()"} None)] + else [] + in + let innerExpression = + Exp.apply + (Exp.ident + { + loc; + txt = + Lident + (match recFlag with + | Recursive -> internalFnName + | Nonrecursive -> fnName); + }) + innerExpressionArgs + in + let innerExpressionWithRef = + match forwardRef with + | Some txt -> + { + innerExpression with + pexp_desc = + Pexp_fun + ( nolabel, + None, + { + ppat_desc = Ppat_var {txt; loc = emptyLoc}; + ppat_loc = emptyLoc; + ppat_attributes = []; + }, + innerExpression ); + } + | None -> innerExpression + in + let fullExpression = + Exp.fun_ nolabel None + { + ppat_desc = + Ppat_constraint + ( makePropsName ~loc:emptyLoc props.propsName, + makePropsType ~loc:emptyLoc externalTypes ); + ppat_loc = emptyLoc; + ppat_attributes = []; + } + innerExpressionWithRef + in + let fullExpression = + match fullModuleName with + | "" -> fullExpression + | txt -> + Exp.let_ Nonrecursive + [ + Vb.mk ~loc:emptyLoc + (Pat.var ~loc:emptyLoc {loc = emptyLoc; txt}) + fullExpression; + ] + (Exp.ident ~loc:emptyLoc {loc = emptyLoc; txt = Lident txt}) + in + let bindings, newBinding = + match recFlag with + | Recursive -> + ( [ + bindingWrapper + (Exp.let_ ~loc:emptyLoc Recursive + [ + makeNewBinding binding expression internalFnName; + Vb.mk + (Pat.var {loc = emptyLoc; txt = fnName}) + fullExpression; + ] + (Exp.ident {loc = emptyLoc; txt = Lident fnName})); + ], + None ) + | Nonrecursive -> + ( [{binding with pvb_expr = expression}], + Some (bindingWrapper fullExpression) ) + in + (Some externalDecl, bindings, newBinding) + else (None, [binding], None) + in + let structuresAndBinding = List.map mapBinding valueBindings in + let otherStructures (extern, binding, newBinding) + (externs, bindings, newBindings) = + let externs = + match extern with + | Some extern -> extern :: externs + | None -> externs + in + let newBindings = + match newBinding with + | Some newBinding -> newBinding :: newBindings + | None -> newBindings + in + (externs, binding @ bindings, newBindings) + in + let externs, bindings, newBindings = + List.fold_right otherStructures structuresAndBinding ([], [], []) + in + externs + @ [{pstr_loc; pstr_desc = Pstr_value (recFlag, bindings)}] + @ + match newBindings with + | [] -> [] + | newBindings -> + [{pstr_loc = emptyLoc; pstr_desc = Pstr_value (recFlag, newBindings)}]) + | _ -> [item] + in + + let transformSignatureItem _mapper item = + match item with + | { + psig_loc; + psig_desc = + Psig_value + ({pval_name = {txt = fnName}; pval_attributes; pval_type} as + psig_desc); + } as psig -> ( + match List.filter React_jsx_common.hasAttr pval_attributes with + | [] -> [item] + | [_] -> + let rec getPropTypes types ({ptyp_loc; ptyp_desc} as fullType) = + match ptyp_desc with + | Ptyp_arrow (name, type_, ({ptyp_desc = Ptyp_arrow _} as rest)) + when isOptional name || isLabelled name -> + getPropTypes ((name, ptyp_loc, type_) :: types) rest + | Ptyp_arrow (Nolabel, _type, rest) -> getPropTypes types rest + | Ptyp_arrow (name, type_, returnValue) + when isOptional name || isLabelled name -> + (returnValue, (name, returnValue.ptyp_loc, type_) :: types) + | _ -> (fullType, types) + in + let innerType, propTypes = getPropTypes [] pval_type in + let namedTypeList = List.fold_left argToConcreteType [] propTypes in + let pluckLabelAndLoc (label, loc, type_) = + (label, None, loc, Some type_) + in + let retPropsType = makePropsType ~loc:psig_loc namedTypeList in + let externalPropsDecl = + makePropsExternalSig fnName psig_loc + ((optional "key", None, psig_loc, Some (keyType psig_loc)) + :: List.map pluckLabelAndLoc propTypes) + retPropsType + in + (* can't be an arrow because it will defensively uncurry *) + let newExternalType = + Ptyp_constr + ( {loc = psig_loc; txt = Ldot (Lident "React", "componentLike")}, + [retPropsType; innerType] ) + in + let newStructure = + { + psig with + psig_desc = + Psig_value + { + psig_desc with + pval_type = {pval_type with ptyp_desc = newExternalType}; + pval_attributes = List.filter otherAttrsPure pval_attributes; + }; + } + in + [externalPropsDecl; newStructure] + | _ -> + React_jsx_common.raiseError ~loc:psig_loc + "Only one react.component call can exist on a component at one time") + | _ -> [item] + in + + let transformJsxCall mapper callExpression callArguments attrs = + match callExpression.pexp_desc with + | Pexp_ident caller -> ( + match caller with + | {txt = Lident "createElement"; loc} -> + React_jsx_common.raiseError ~loc + "JSX: `createElement` should be preceeded by a module name." + (* Foo.createElement(~prop1=foo, ~prop2=bar, ~children=[], ()) *) + | {loc; txt = Ldot (modulePath, ("createElement" | "make"))} -> ( + match config.React_jsx_common.version with + | 3 -> + transformUppercaseCall3 modulePath mapper loc attrs callExpression + callArguments + | _ -> + React_jsx_common.raiseError ~loc "JSX: the JSX version must be 3") + (* div(~prop1=foo, ~prop2=bar, ~children=[bla], ()) *) + (* turn that into + ReactDOMRe.createElement(~props=ReactDOMRe.props(~props1=foo, ~props2=bar, ()), [|bla|]) *) + | {loc; txt = Lident id} -> ( + match config.version with + | 3 -> transformLowercaseCall3 mapper loc attrs callArguments id + | _ -> React_jsx_common.raiseError ~loc "JSX: the JSX version must be 3" + ) + | {txt = Ldot (_, anythingNotCreateElementOrMake); loc} -> + React_jsx_common.raiseError ~loc + "JSX: the JSX attribute should be attached to a \ + `YourModuleName.createElement` or `YourModuleName.make` call. We \ + saw `%s` instead" + anythingNotCreateElementOrMake + | {txt = Lapply _; loc} -> + (* don't think there's ever a case where this is reached *) + React_jsx_common.raiseError ~loc + "JSX: encountered a weird case while processing the code. Please \ + report this!") + | _ -> + React_jsx_common.raiseError ~loc:callExpression.pexp_loc + "JSX: `createElement` should be preceeded by a simple, direct module \ + name." + in + + let expr mapper expression = + match expression with + (* Does the function application have the @JSX attribute? *) + | {pexp_desc = Pexp_apply (callExpression, callArguments); pexp_attributes} + -> ( + let jsxAttribute, nonJSXAttributes = + List.partition + (fun (attribute, _) -> attribute.txt = "JSX") + pexp_attributes + in + match (jsxAttribute, nonJSXAttributes) with + (* no JSX attribute *) + | [], _ -> default_mapper.expr mapper expression + | _, nonJSXAttributes -> + transformJsxCall mapper callExpression callArguments nonJSXAttributes) + (* is it a list with jsx attribute? Reason <>foo desugars to [@JSX][foo]*) + | { + pexp_desc = + ( Pexp_construct + ({txt = Lident "::"; loc}, Some {pexp_desc = Pexp_tuple _}) + | Pexp_construct ({txt = Lident "[]"; loc}, None) ); + pexp_attributes; + } as listItems -> ( + let jsxAttribute, nonJSXAttributes = + List.partition + (fun (attribute, _) -> attribute.txt = "JSX") + pexp_attributes + in + match (jsxAttribute, nonJSXAttributes) with + (* no JSX attribute *) + | [], _ -> default_mapper.expr mapper expression + | _, nonJSXAttributes -> + let loc = {loc with loc_ghost = true} in + let fragment = + Exp.ident ~loc {loc; txt = Ldot (Lident "ReasonReact", "fragment")} + in + let childrenExpr = transformChildrenIfList ~loc ~mapper listItems in + let args = + [ + (* "div" *) + (nolabel, fragment); + (* [|moreCreateElementCallsHere|] *) + (nolabel, childrenExpr); + ] + in + Exp.apply + ~loc (* throw away the [@JSX] attribute and keep the others, if any *) + ~attrs:nonJSXAttributes + (* ReactDOMRe.createElement *) + (Exp.ident ~loc + {loc; txt = Ldot (Lident "ReactDOMRe", "createElement")}) + args) + (* Delegate to the default mapper, a deep identity traversal *) + | e -> default_mapper.expr mapper e + in + + let module_binding mapper module_binding = + let _ = nestedModules := module_binding.pmb_name.txt :: !nestedModules in + let mapped = default_mapper.module_binding mapper module_binding in + let () = + match !nestedModules with + | _ :: rest -> nestedModules := rest + | [] -> () + in + mapped + in + (expr, module_binding, transformSignatureItem, transformStructureItem) diff --git a/res_syntax/cli/reactjs_jsx_v4.ml b/res_syntax/cli/reactjs_jsx_v4.ml new file mode 100644 index 0000000000..ef478eccfe --- /dev/null +++ b/res_syntax/cli/reactjs_jsx_v4.ml @@ -0,0 +1,1436 @@ +open Ast_helper +open Ast_mapper +open Asttypes +open Parsetree +open Longident + +let nolabel = Nolabel + +let labelled str = Labelled str + +let isOptional str = + match str with + | Optional _ -> true + | _ -> false + +let isLabelled str = + match str with + | Labelled _ -> true + | _ -> false + +let isForwardRef = function + | {pexp_desc = Pexp_ident {txt = Ldot (Lident "React", "forwardRef")}} -> true + | _ -> false + +let getLabel str = + match str with + | Optional str | Labelled str -> str + | Nolabel -> "" + +let optionalAttr = ({txt = "ns.optional"; loc = Location.none}, PStr []) +let optionalAttrs = [optionalAttr] + +let constantString ~loc str = + Ast_helper.Exp.constant ~loc (Pconst_string (str, None)) + +(* {} empty record *) +let emptyRecord ~loc = Exp.record ~loc [] None + +let unitExpr ~loc = Exp.construct ~loc (Location.mkloc (Lident "()") loc) None + +let safeTypeFromValue valueStr = + let valueStr = getLabel valueStr in + if valueStr = "" || (valueStr.[0] [@doesNotRaise]) <> '_' then valueStr + else "T" ^ valueStr + +let refType loc = + Typ.constr ~loc + {loc; txt = Ldot (Ldot (Lident "ReactDOM", "Ref"), "currentDomRef")} + [] + +type 'a children = ListLiteral of 'a | Exact of 'a + +(* if children is a list, convert it to an array while mapping each element. If not, just map over it, as usual *) +let transformChildrenIfListUpper ~mapper theList = + let rec transformChildren_ theList accum = + (* not in the sense of converting a list to an array; convert the AST + reprensentation of a list to the AST reprensentation of an array *) + match theList with + | {pexp_desc = Pexp_construct ({txt = Lident "[]"}, None)} -> ( + match accum with + | [singleElement] -> Exact singleElement + | accum -> ListLiteral (Exp.array (List.rev accum))) + | { + pexp_desc = + Pexp_construct + ({txt = Lident "::"}, Some {pexp_desc = Pexp_tuple [v; acc]}); + } -> + transformChildren_ acc (mapper.expr mapper v :: accum) + | notAList -> Exact (mapper.expr mapper notAList) + in + transformChildren_ theList [] + +let transformChildrenIfList ~mapper theList = + let rec transformChildren_ theList accum = + (* not in the sense of converting a list to an array; convert the AST + reprensentation of a list to the AST reprensentation of an array *) + match theList with + | {pexp_desc = Pexp_construct ({txt = Lident "[]"}, None)} -> + Exp.array (List.rev accum) + | { + pexp_desc = + Pexp_construct + ({txt = Lident "::"}, Some {pexp_desc = Pexp_tuple [v; acc]}); + } -> + transformChildren_ acc (mapper.expr mapper v :: accum) + | notAList -> mapper.expr mapper notAList + in + transformChildren_ theList [] + +let extractChildren ?(removeLastPositionUnit = false) ~loc propsAndChildren = + let rec allButLast_ lst acc = + match lst with + | [] -> [] + | [(Nolabel, {pexp_desc = Pexp_construct ({txt = Lident "()"}, None)})] -> + acc + | (Nolabel, {pexp_loc}) :: _rest -> + React_jsx_common.raiseError ~loc:pexp_loc + "JSX: found non-labelled argument before the last position" + | arg :: rest -> allButLast_ rest (arg :: acc) + in + let allButLast lst = allButLast_ lst [] |> List.rev in + match + List.partition + (fun (label, _) -> label = labelled "children") + propsAndChildren + with + | [], props -> + (* no children provided? Place a placeholder list *) + ( Exp.construct {loc = Location.none; txt = Lident "[]"} None, + if removeLastPositionUnit then allButLast props else props ) + | [(_, childrenExpr)], props -> + (childrenExpr, if removeLastPositionUnit then allButLast props else props) + | _ -> + React_jsx_common.raiseError ~loc + "JSX: somehow there's more than one `children` label" + +let merlinFocus = ({loc = Location.none; txt = "merlin.focus"}, PStr []) + +(* Helper method to filter out any attribute that isn't [@react.component] *) +let otherAttrsPure (loc, _) = loc.txt <> "react.component" + +(* Finds the name of the variable the binding is assigned to, otherwise raises Invalid_argument *) +let rec getFnName binding = + match binding with + | {ppat_desc = Ppat_var {txt}} -> txt + | {ppat_desc = Ppat_constraint (pat, _)} -> getFnName pat + | {ppat_loc} -> + React_jsx_common.raiseError ~loc:ppat_loc + "react.component calls cannot be destructured." + +let makeNewBinding binding expression newName = + match binding with + | {pvb_pat = {ppat_desc = Ppat_var ppat_var} as pvb_pat} -> + { + binding with + pvb_pat = + {pvb_pat with ppat_desc = Ppat_var {ppat_var with txt = newName}}; + pvb_expr = expression; + pvb_attributes = [merlinFocus]; + } + | {pvb_loc} -> + React_jsx_common.raiseError ~loc:pvb_loc + "react.component calls cannot be destructured." + +(* Lookup the filename from the location information on the AST node and turn it into a valid module identifier *) +let filenameFromLoc (pstr_loc : Location.t) = + let fileName = + match pstr_loc.loc_start.pos_fname with + | "" -> !Location.input_name + | fileName -> fileName + in + let fileName = + try Filename.chop_extension (Filename.basename fileName) + with Invalid_argument _ -> fileName + in + let fileName = String.capitalize_ascii fileName in + fileName + +(* Build a string representation of a module name with segments separated by $ *) +let makeModuleName fileName nestedModules fnName = + let fullModuleName = + match (fileName, nestedModules, fnName) with + (* TODO: is this even reachable? It seems like the fileName always exists *) + | "", nestedModules, "make" -> nestedModules + | "", nestedModules, fnName -> List.rev (fnName :: nestedModules) + | fileName, nestedModules, "make" -> fileName :: List.rev nestedModules + | fileName, nestedModules, fnName -> + fileName :: List.rev (fnName :: nestedModules) + in + let fullModuleName = String.concat "$" fullModuleName in + fullModuleName + +(* + AST node builders + These functions help us build AST nodes that are needed when transforming a [@react.component] into a + constructor and a props external + *) + +(* make record from props and spread props if exists *) +let recordFromProps ~loc ~removeKey callArguments = + let spreadPropsLabel = "_spreadProps" in + let rec removeLastPositionUnitAux props acc = + match props with + | [] -> acc + | [(Nolabel, {pexp_desc = Pexp_construct ({txt = Lident "()"}, None)})] -> + acc + | (Nolabel, {pexp_loc}) :: _rest -> + React_jsx_common.raiseError ~loc:pexp_loc + "JSX: found non-labelled argument before the last position" + | ((Labelled txt, {pexp_loc}) as prop) :: rest + | ((Optional txt, {pexp_loc}) as prop) :: rest -> + if txt = spreadPropsLabel then + match acc with + | [] -> removeLastPositionUnitAux rest (prop :: acc) + | _ -> + React_jsx_common.raiseError ~loc:pexp_loc + "JSX: use {...p} {x: v} not {x: v} {...p} \n\ + \ multiple spreads {...p} {...p} not allowed." + else removeLastPositionUnitAux rest (prop :: acc) + in + let props, propsToSpread = + removeLastPositionUnitAux callArguments [] + |> List.rev + |> List.partition (fun (label, _) -> label <> labelled "_spreadProps") + in + let props = + if removeKey then + props |> List.filter (fun (arg_label, _) -> "key" <> getLabel arg_label) + else props + in + + let processProp (arg_label, ({pexp_loc} as pexpr)) = + (* In case filed label is "key" only then change expression to option *) + let id = getLabel arg_label in + if isOptional arg_label then + ( {txt = Lident id; loc = pexp_loc}, + {pexpr with pexp_attributes = optionalAttrs} ) + else ({txt = Lident id; loc = pexp_loc}, pexpr) + in + let fields = props |> List.map processProp in + let spreadFields = + propsToSpread |> List.map (fun (_, expression) -> expression) + in + match (fields, spreadFields) with + | [], [spreadProps] | [], spreadProps :: _ -> spreadProps + | _, [] -> + { + pexp_desc = Pexp_record (fields, None); + pexp_loc = loc; + pexp_attributes = []; + } + | _, [spreadProps] + (* take the first spreadProps only *) + | _, spreadProps :: _ -> + { + pexp_desc = Pexp_record (fields, Some spreadProps); + pexp_loc = loc; + pexp_attributes = []; + } + +(* make type params for make fn arguments *) +(* let make = ({id, name, children}: props<'id, 'name, 'children>) *) +let makePropsTypeParamsTvar namedTypeList = + namedTypeList + |> List.filter_map (fun (_isOptional, label, _, _interiorType) -> + if label = "key" then None + else Some (Typ.var @@ safeTypeFromValue (Labelled label))) + +let stripOption coreType = + match coreType with + | {ptyp_desc = Ptyp_constr ({txt = Lident "option"}, coreTypes)} -> + List.nth_opt coreTypes 0 [@doesNotRaise] + | _ -> Some coreType + +let stripJsNullable coreType = + match coreType with + | { + ptyp_desc = + Ptyp_constr ({txt = Ldot (Ldot (Lident "Js", "Nullable"), "t")}, coreTypes); + } -> + List.nth_opt coreTypes 0 [@doesNotRaise] + | _ -> Some coreType + +(* Make type params of the props type *) +(* (Sig) let make: React.componentLike, React.element> *) +(* (Str) let make = ({x, _}: props<'x>) => body *) +(* (Str) external make: React.componentLike, React.element> = "default" *) +let makePropsTypeParams ?(stripExplicitOption = false) + ?(stripExplicitJsNullableOfRef = false) namedTypeList = + namedTypeList + |> List.filter_map (fun (isOptional, label, _, interiorType) -> + if label = "key" then None + (* TODO: Worth thinking how about "ref_" or "_ref" usages *) + else if label = "ref" then + (* + If ref has a type annotation then use it, else `ReactDOM.Ref.currentDomRef. + For example, if JSX ppx is used for React Native, type would be different. + *) + match interiorType with + | {ptyp_desc = Ptyp_var "ref"} -> Some (refType Location.none) + | _ -> + (* Strip explicit Js.Nullable.t in case of forwardRef *) + if stripExplicitJsNullableOfRef then stripJsNullable interiorType + else Some interiorType + (* Strip the explicit option type in implementation *) + (* let make = (~x: option=?) => ... *) + else if isOptional && stripExplicitOption then stripOption interiorType + else Some interiorType) + +let makeLabelDecls ~loc namedTypeList = + namedTypeList + |> List.map (fun (isOptional, label, _, interiorType) -> + if label = "key" then + Type.field ~loc ~attrs:optionalAttrs {txt = label; loc} interiorType + else if isOptional then + Type.field ~loc ~attrs:optionalAttrs {txt = label; loc} + (Typ.var @@ safeTypeFromValue @@ Labelled label) + else + Type.field ~loc {txt = label; loc} + (Typ.var @@ safeTypeFromValue @@ Labelled label)) + +let makeTypeDecls propsName loc namedTypeList = + let labelDeclList = makeLabelDecls ~loc namedTypeList in + (* 'id, 'className, ... *) + let params = + makePropsTypeParamsTvar namedTypeList + |> List.map (fun coreType -> (coreType, Invariant)) + in + [ + Type.mk ~loc ~params {txt = propsName; loc} + ~kind:(Ptype_record labelDeclList); + ] + +let makeTypeDeclsWithCoreType propsName loc coreType typVars = + [ + Type.mk ~loc {txt = propsName; loc} ~kind:Ptype_abstract + ~params:(typVars |> List.map (fun v -> (v, Invariant))) + ~manifest:coreType; + ] + +(* type props<'x, 'y, ...> = { x: 'x, y?: 'y, ... } *) +let makePropsRecordType ~coreTypeOfAttr ~typVarsOfCoreType propsName loc + namedTypeList = + Str.type_ Nonrecursive + (match coreTypeOfAttr with + | None -> makeTypeDecls propsName loc namedTypeList + | Some coreType -> + makeTypeDeclsWithCoreType propsName loc coreType typVarsOfCoreType) + +(* type props<'x, 'y, ...> = { x: 'x, y?: 'y, ... } *) +let makePropsRecordTypeSig ~coreTypeOfAttr ~typVarsOfCoreType propsName loc + namedTypeList = + Sig.type_ Nonrecursive + (match coreTypeOfAttr with + | None -> makeTypeDecls propsName loc namedTypeList + | Some coreType -> + makeTypeDeclsWithCoreType propsName loc coreType typVarsOfCoreType) + +let transformUppercaseCall3 ~config modulePath mapper jsxExprLoc callExprLoc + attrs callArguments = + let children, argsWithLabels = + extractChildren ~removeLastPositionUnit:true ~loc:jsxExprLoc callArguments + in + let argsForMake = argsWithLabels in + let childrenExpr = transformChildrenIfListUpper ~mapper children in + let recursivelyTransformedArgsForMake = + argsForMake + |> List.map (fun (label, expression) -> + (label, mapper.expr mapper expression)) + in + let childrenArg = ref None in + let args = + recursivelyTransformedArgsForMake + @ + match childrenExpr with + | Exact children -> [(labelled "children", children)] + | ListLiteral {pexp_desc = Pexp_array list} when list = [] -> [] + | ListLiteral expression -> ( + (* this is a hack to support react components that introspect into their children *) + childrenArg := Some expression; + match config.React_jsx_common.mode with + | "automatic" -> + [ + ( labelled "children", + Exp.apply + (Exp.ident + {txt = Ldot (Lident "React", "array"); loc = Location.none}) + [(Nolabel, expression)] ); + ] + | _ -> + [ + ( labelled "children", + Exp.ident {loc = Location.none; txt = Ldot (Lident "React", "null")} + ); + ]) + in + + let isCap str = String.capitalize_ascii str = str in + let ident ~suffix = + match modulePath with + | Lident _ -> Ldot (modulePath, suffix) + | Ldot (_modulePath, value) as fullPath when isCap value -> + Ldot (fullPath, suffix) + | modulePath -> modulePath + in + let isEmptyRecord {pexp_desc} = + match pexp_desc with + | Pexp_record (labelDecls, _) when List.length labelDecls = 0 -> true + | _ -> false + in + + (* handle key, ref, children *) + (* React.createElement(Component.make, props, ...children) *) + let record = recordFromProps ~loc:jsxExprLoc ~removeKey:true args in + let props = + if isEmptyRecord record then emptyRecord ~loc:jsxExprLoc else record + in + let keyProp = + args |> List.filter (fun (arg_label, _) -> "key" = getLabel arg_label) + in + let makeID = + Exp.ident ~loc:callExprLoc {txt = ident ~suffix:"make"; loc = callExprLoc} + in + match config.mode with + (* The new jsx transform *) + | "automatic" -> + let jsxExpr, keyAndUnit = + match (!childrenArg, keyProp) with + | None, key :: _ -> + ( Exp.ident + {loc = Location.none; txt = Ldot (Lident "React", "jsxKeyed")}, + [key; (nolabel, unitExpr ~loc:Location.none)] ) + | None, [] -> + (Exp.ident {loc = Location.none; txt = Ldot (Lident "React", "jsx")}, []) + | Some _, key :: _ -> + ( Exp.ident + {loc = Location.none; txt = Ldot (Lident "React", "jsxsKeyed")}, + [key; (nolabel, unitExpr ~loc:Location.none)] ) + | Some _, [] -> + ( Exp.ident {loc = Location.none; txt = Ldot (Lident "React", "jsxs")}, + [] ) + in + Exp.apply ~attrs jsxExpr ([(nolabel, makeID); (nolabel, props)] @ keyAndUnit) + | _ -> ( + match (!childrenArg, keyProp) with + | None, key :: _ -> + Exp.apply ~attrs + (Exp.ident + { + loc = Location.none; + txt = Ldot (Lident "JsxPPXReactSupport", "createElementWithKey"); + }) + [key; (nolabel, makeID); (nolabel, props)] + | None, [] -> + Exp.apply ~attrs + (Exp.ident + {loc = Location.none; txt = Ldot (Lident "React", "createElement")}) + [(nolabel, makeID); (nolabel, props)] + | Some children, key :: _ -> + Exp.apply ~attrs + (Exp.ident + { + loc = Location.none; + txt = + Ldot (Lident "JsxPPXReactSupport", "createElementVariadicWithKey"); + }) + [key; (nolabel, makeID); (nolabel, props); (nolabel, children)] + | Some children, [] -> + Exp.apply ~attrs + (Exp.ident + { + loc = Location.none; + txt = Ldot (Lident "React", "createElementVariadic"); + }) + [(nolabel, makeID); (nolabel, props); (nolabel, children)]) + +let transformLowercaseCall3 ~config mapper jsxExprLoc callExprLoc attrs + callArguments id = + let componentNameExpr = constantString ~loc:callExprLoc id in + match config.React_jsx_common.mode with + (* the new jsx transform *) + | "automatic" -> + let children, nonChildrenProps = + extractChildren ~removeLastPositionUnit:true ~loc:jsxExprLoc callArguments + in + let argsForMake = nonChildrenProps in + let childrenExpr = transformChildrenIfListUpper ~mapper children in + let recursivelyTransformedArgsForMake = + argsForMake + |> List.map (fun (label, expression) -> + (label, mapper.expr mapper expression)) + in + let childrenArg = ref None in + let args = + recursivelyTransformedArgsForMake + @ + match childrenExpr with + | Exact children -> + [ + ( labelled "children", + Exp.apply ~attrs:optionalAttrs + (Exp.ident + { + txt = Ldot (Lident "ReactDOM", "someElement"); + loc = Location.none; + }) + [(Nolabel, children)] ); + ] + | ListLiteral {pexp_desc = Pexp_array list} when list = [] -> [] + | ListLiteral expression -> + (* this is a hack to support react components that introspect into their children *) + childrenArg := Some expression; + [ + ( labelled "children", + Exp.apply + (Exp.ident + {txt = Ldot (Lident "React", "array"); loc = Location.none}) + [(Nolabel, expression)] ); + ] + in + let isEmptyRecord {pexp_desc} = + match pexp_desc with + | Pexp_record (labelDecls, _) when List.length labelDecls = 0 -> true + | _ -> false + in + let record = recordFromProps ~loc:jsxExprLoc ~removeKey:true args in + let props = + if isEmptyRecord record then emptyRecord ~loc:jsxExprLoc else record + in + let keyProp = + args |> List.filter (fun (arg_label, _) -> "key" = getLabel arg_label) + in + let jsxExpr, keyAndUnit = + match (!childrenArg, keyProp) with + | None, key :: _ -> + ( Exp.ident + {loc = Location.none; txt = Ldot (Lident "ReactDOM", "jsxKeyed")}, + [key; (nolabel, unitExpr ~loc:Location.none)] ) + | None, [] -> + ( Exp.ident {loc = Location.none; txt = Ldot (Lident "ReactDOM", "jsx")}, + [] ) + | Some _, key :: _ -> + ( Exp.ident + {loc = Location.none; txt = Ldot (Lident "ReactDOM", "jsxsKeyed")}, + [key; (nolabel, unitExpr ~loc:Location.none)] ) + | Some _, [] -> + ( Exp.ident {loc = Location.none; txt = Ldot (Lident "ReactDOM", "jsxs")}, + [] ) + in + Exp.apply ~attrs jsxExpr + ([(nolabel, componentNameExpr); (nolabel, props)] @ keyAndUnit) + | _ -> + let children, nonChildrenProps = + extractChildren ~loc:jsxExprLoc callArguments + in + let childrenExpr = transformChildrenIfList ~mapper children in + let createElementCall = + match children with + (* [@JSX] div(~children=[a]), coming from
a
*) + | { + pexp_desc = + ( Pexp_construct ({txt = Lident "::"}, Some {pexp_desc = Pexp_tuple _}) + | Pexp_construct ({txt = Lident "[]"}, None) ); + } -> + "createDOMElementVariadic" + (* [@JSX] div(~children= value), coming from
...(value)
*) + | {pexp_loc} -> + React_jsx_common.raiseError ~loc:pexp_loc + "A spread as a DOM element's children don't make sense written \ + together. You can simply remove the spread." + in + let args = + match nonChildrenProps with + | [_justTheUnitArgumentAtEnd] -> + [ + (* "div" *) + (nolabel, componentNameExpr); + (* [|moreCreateElementCallsHere|] *) + (nolabel, childrenExpr); + ] + | nonEmptyProps -> + let propsRecord = + recordFromProps ~loc:Location.none ~removeKey:false nonEmptyProps + in + [ + (* "div" *) + (nolabel, componentNameExpr); + (* ReactDOM.domProps(~className=blabla, ~foo=bar, ()) *) + (labelled "props", propsRecord); + (* [|moreCreateElementCallsHere|] *) + (nolabel, childrenExpr); + ] + in + Exp.apply ~loc:jsxExprLoc ~attrs + (* ReactDOM.createElement *) + (Exp.ident + { + loc = Location.none; + txt = Ldot (Lident "ReactDOM", createElementCall); + }) + args + +let rec recursivelyTransformNamedArgsForMake mapper expr args newtypes coreType + = + let expr = mapper.expr mapper expr in + match expr.pexp_desc with + (* TODO: make this show up with a loc. *) + | Pexp_fun (Labelled "key", _, _, _) | Pexp_fun (Optional "key", _, _, _) -> + React_jsx_common.raiseError ~loc:expr.pexp_loc + "Key cannot be accessed inside of a component. Don't worry - you can \ + always key a component from its parent!" + | Pexp_fun (Labelled "ref", _, _, _) | Pexp_fun (Optional "ref", _, _, _) -> + React_jsx_common.raiseError ~loc:expr.pexp_loc + "Ref cannot be passed as a normal prop. Please use `forwardRef` API \ + instead." + | Pexp_fun (arg, default, pattern, expression) + when isOptional arg || isLabelled arg -> + let () = + match (isOptional arg, pattern, default) with + | true, {ppat_desc = Ppat_constraint (_, {ptyp_desc})}, None -> ( + match ptyp_desc with + | Ptyp_constr ({txt = Lident "option"}, [_]) -> () + | _ -> + let currentType = + match ptyp_desc with + | Ptyp_constr ({txt}, []) -> + String.concat "." (Longident.flatten txt) + | Ptyp_constr ({txt}, _innerTypeArgs) -> + String.concat "." (Longident.flatten txt) ^ "(...)" + | _ -> "..." + in + Location.prerr_warning pattern.ppat_loc + (Preprocessor + (Printf.sprintf + "React: optional argument annotations must have explicit \ + `option`. Did you mean `option(%s)=?`?" + currentType))) + | _ -> () + in + let alias = + match pattern with + | {ppat_desc = Ppat_alias (_, {txt}) | Ppat_var {txt}} -> txt + | {ppat_desc = Ppat_any} -> "_" + | _ -> getLabel arg + in + let type_ = + match pattern with + | {ppat_desc = Ppat_constraint (_, type_)} -> Some type_ + | _ -> None + in + + recursivelyTransformNamedArgsForMake mapper expression + ((arg, default, pattern, alias, pattern.ppat_loc, type_) :: args) + newtypes coreType + | Pexp_fun + ( Nolabel, + _, + {ppat_desc = Ppat_construct ({txt = Lident "()"}, _) | Ppat_any}, + _expression ) -> + (args, newtypes, coreType) + | Pexp_fun + ( Nolabel, + _, + ({ + ppat_desc = + Ppat_var {txt} | Ppat_constraint ({ppat_desc = Ppat_var {txt}}, _); + } as pattern), + _expression ) -> + if txt = "ref" then + let type_ = + match pattern with + | {ppat_desc = Ppat_constraint (_, type_)} -> Some type_ + | _ -> None + in + (* The ref arguement of forwardRef should be optional *) + ( (Optional "ref", None, pattern, txt, pattern.ppat_loc, type_) :: args, + newtypes, + coreType ) + else (args, newtypes, coreType) + | Pexp_fun (Nolabel, _, pattern, _expression) -> + Location.raise_errorf ~loc:pattern.ppat_loc + "React: react.component refs only support plain arguments and type \ + annotations." + | Pexp_newtype (label, expression) -> + recursivelyTransformNamedArgsForMake mapper expression args + (label :: newtypes) coreType + | Pexp_constraint (expression, coreType) -> + recursivelyTransformNamedArgsForMake mapper expression args newtypes + (Some coreType) + | _ -> (args, newtypes, coreType) + +let newtypeToVar newtype type_ = + let var_desc = Ptyp_var ("type-" ^ newtype) in + let typ (mapper : Ast_mapper.mapper) typ = + match typ.ptyp_desc with + | Ptyp_constr ({txt = Lident name}, _) when name = newtype -> + {typ with ptyp_desc = var_desc} + | _ -> Ast_mapper.default_mapper.typ mapper typ + in + let mapper = {Ast_mapper.default_mapper with typ} in + mapper.typ mapper type_ + +let argToType ~newtypes ~(typeConstraints : core_type option) types + (name, default, _noLabelName, _alias, loc, type_) = + let rec getType name coreType = + match coreType with + | {ptyp_desc = Ptyp_arrow (arg, c1, c2)} -> + if name = arg then Some c1 else getType name c2 + | _ -> None + in + let typeConst = Option.bind typeConstraints (getType name) in + let type_ = + List.fold_left + (fun type_ newtype -> + match (type_, typeConst) with + | _, Some typ | Some typ, None -> Some (newtypeToVar newtype.txt typ) + | _ -> None) + type_ newtypes + in + match (type_, name, default) with + | Some type_, name, _ when isOptional name -> + (true, getLabel name, [], {type_ with ptyp_attributes = optionalAttrs}) + :: types + | Some type_, name, _ -> (false, getLabel name, [], type_) :: types + | None, name, _ when isOptional name -> + ( true, + getLabel name, + [], + Typ.var ~loc ~attrs:optionalAttrs (safeTypeFromValue name) ) + :: types + | None, name, _ when isLabelled name -> + (false, getLabel name, [], Typ.var ~loc (safeTypeFromValue name)) :: types + | _ -> types + +let argWithDefaultValue (name, default, _, _, _, _) = + match default with + | Some default when isOptional name -> Some (getLabel name, default) + | _ -> None + +let argToConcreteType types (name, _loc, type_) = + match name with + | name when isLabelled name -> (false, getLabel name, [], type_) :: types + | name when isOptional name -> (true, getLabel name, [], type_) :: types + | _ -> types + +let check_string_int_attribute_iter = + let attribute _ ({txt; loc}, _) = + if txt = "string" || txt = "int" then + React_jsx_common.raiseError ~loc + "@string and @int attributes not supported. See \ + https://github.com/rescript-lang/rescript-compiler/issues/5724" + in + + {Ast_iterator.default_iterator with attribute} + +let transformStructureItem ~config mapper item = + match item with + (* external *) + | { + pstr_loc; + pstr_desc = + Pstr_primitive ({pval_attributes; pval_type} as value_description); + } as pstr -> ( + match List.filter React_jsx_common.hasAttr pval_attributes with + | [] -> [item] + | [_] -> + (* If there is another @react.component, throw error *) + if config.React_jsx_common.hasReactComponent then + React_jsx_common.raiseErrorMultipleReactComponent ~loc:pstr_loc + else ( + config.hasReactComponent <- true; + check_string_int_attribute_iter.structure_item + check_string_int_attribute_iter item; + let coreTypeOfAttr = React_jsx_common.coreTypeOfAttrs pval_attributes in + let typVarsOfCoreType = + coreTypeOfAttr + |> Option.map React_jsx_common.typVarsOfCoreType + |> Option.value ~default:[] + in + let rec getPropTypes types ({ptyp_loc; ptyp_desc} as fullType) = + match ptyp_desc with + | Ptyp_arrow (name, type_, ({ptyp_desc = Ptyp_arrow _} as rest)) + when isLabelled name || isOptional name -> + getPropTypes ((name, ptyp_loc, type_) :: types) rest + | Ptyp_arrow (Nolabel, _type, rest) -> getPropTypes types rest + | Ptyp_arrow (name, type_, returnValue) + when isLabelled name || isOptional name -> + (returnValue, (name, returnValue.ptyp_loc, type_) :: types) + | _ -> (fullType, types) + in + let innerType, propTypes = getPropTypes [] pval_type in + let namedTypeList = List.fold_left argToConcreteType [] propTypes in + let retPropsType = + Typ.constr ~loc:pstr_loc + (Location.mkloc (Lident "props") pstr_loc) + (match coreTypeOfAttr with + | None -> makePropsTypeParams namedTypeList + | Some _ -> ( + match typVarsOfCoreType with + | [] -> [] + | _ -> [Typ.any ()])) + in + (* type props<'x, 'y> = { x: 'x, y?: 'y, ... } *) + let propsRecordType = + makePropsRecordType ~coreTypeOfAttr ~typVarsOfCoreType "props" + pstr_loc namedTypeList + in + (* can't be an arrow because it will defensively uncurry *) + let newExternalType = + Ptyp_constr + ( {loc = pstr_loc; txt = Ldot (Lident "React", "componentLike")}, + [retPropsType; innerType] ) + in + let newStructure = + { + pstr with + pstr_desc = + Pstr_primitive + { + value_description with + pval_type = {pval_type with ptyp_desc = newExternalType}; + pval_attributes = List.filter otherAttrsPure pval_attributes; + }; + } + in + [propsRecordType; newStructure]) + | _ -> + React_jsx_common.raiseError ~loc:pstr_loc + "Only one react.component call can exist on a component at one time") + (* let component = ... *) + | {pstr_loc; pstr_desc = Pstr_value (recFlag, valueBindings)} -> ( + let fileName = filenameFromLoc pstr_loc in + let emptyLoc = Location.in_file fileName in + let mapBinding binding = + if React_jsx_common.hasAttrOnBinding binding then + if config.hasReactComponent then + React_jsx_common.raiseErrorMultipleReactComponent ~loc:pstr_loc + else ( + config.hasReactComponent <- true; + let coreTypeOfAttr = + React_jsx_common.coreTypeOfAttrs binding.pvb_attributes + in + let typVarsOfCoreType = + coreTypeOfAttr + |> Option.map React_jsx_common.typVarsOfCoreType + |> Option.value ~default:[] + in + let bindingLoc = binding.pvb_loc in + let bindingPatLoc = binding.pvb_pat.ppat_loc in + let binding = + { + binding with + pvb_pat = {binding.pvb_pat with ppat_loc = emptyLoc}; + pvb_loc = emptyLoc; + } + in + let fnName = getFnName binding.pvb_pat in + let internalFnName = fnName ^ "$Internal" in + let fullModuleName = + makeModuleName fileName config.nestedModules fnName + in + let modifiedBindingOld binding = + let expression = binding.pvb_expr in + (* TODO: there is a long-tail of unsupported features inside of blocks - Pexp_letmodule , Pexp_letexception , Pexp_ifthenelse *) + let rec spelunkForFunExpression expression = + match expression with + (* let make = (~prop) => ... *) + | {pexp_desc = Pexp_fun _} | {pexp_desc = Pexp_newtype _} -> + expression + (* let make = {let foo = bar in (~prop) => ...} *) + | {pexp_desc = Pexp_let (_recursive, _vbs, returnExpression)} -> + (* here's where we spelunk! *) + spelunkForFunExpression returnExpression + (* let make = React.forwardRef((~prop) => ...) *) + | { + pexp_desc = + Pexp_apply + (_wrapperExpression, [(Nolabel, innerFunctionExpression)]); + } -> + spelunkForFunExpression innerFunctionExpression + | { + pexp_desc = + Pexp_sequence (_wrapperExpression, innerFunctionExpression); + } -> + spelunkForFunExpression innerFunctionExpression + | {pexp_desc = Pexp_constraint (innerFunctionExpression, _typ)} -> + spelunkForFunExpression innerFunctionExpression + | {pexp_loc} -> + React_jsx_common.raiseError ~loc:pexp_loc + "react.component calls can only be on function definitions \ + or component wrappers (forwardRef, memo)." + in + spelunkForFunExpression expression + in + let modifiedBinding binding = + let hasApplication = ref false in + let wrapExpressionWithBinding expressionFn expression = + Vb.mk ~loc:bindingLoc + ~attrs:(List.filter otherAttrsPure binding.pvb_attributes) + (Pat.var ~loc:bindingPatLoc {loc = bindingPatLoc; txt = fnName}) + (expressionFn expression) + in + let expression = binding.pvb_expr in + (* TODO: there is a long-tail of unsupported features inside of blocks - Pexp_letmodule , Pexp_letexception , Pexp_ifthenelse *) + let rec spelunkForFunExpression expression = + match expression with + (* let make = (~prop) => ... with no final unit *) + | { + pexp_desc = + Pexp_fun + ( ((Labelled _ | Optional _) as label), + default, + pattern, + ({pexp_desc = Pexp_fun _} as internalExpression) ); + } -> + let wrap, hasForwardRef, exp = + spelunkForFunExpression internalExpression + in + ( wrap, + hasForwardRef, + { + expression with + pexp_desc = Pexp_fun (label, default, pattern, exp); + } ) + (* let make = (()) => ... *) + (* let make = (_) => ... *) + | { + pexp_desc = + Pexp_fun + ( Nolabel, + _default, + { + ppat_desc = + Ppat_construct ({txt = Lident "()"}, _) | Ppat_any; + }, + _internalExpression ); + } -> + ((fun a -> a), false, expression) + (* let make = (~prop) => ... *) + | { + pexp_desc = + Pexp_fun + ( (Labelled _ | Optional _), + _default, + _pattern, + _internalExpression ); + } -> + ((fun a -> a), false, expression) + (* let make = (prop) => ... *) + | { + pexp_desc = + Pexp_fun (_nolabel, _default, pattern, _internalExpression); + } -> + if !hasApplication then ((fun a -> a), false, expression) + else + Location.raise_errorf ~loc:pattern.ppat_loc + "React: props need to be labelled arguments.\n\ + \ If you are working with refs be sure to wrap with \ + React.forwardRef.\n\ + \ If your component doesn't have any props use () or _ \ + instead of a name." + (* let make = {let foo = bar in (~prop) => ...} *) + | {pexp_desc = Pexp_let (recursive, vbs, internalExpression)} -> + (* here's where we spelunk! *) + let wrap, hasForwardRef, exp = + spelunkForFunExpression internalExpression + in + ( wrap, + hasForwardRef, + {expression with pexp_desc = Pexp_let (recursive, vbs, exp)} + ) + (* let make = React.forwardRef((~prop) => ...) *) + | { + pexp_desc = + Pexp_apply (wrapperExpression, [(Nolabel, internalExpression)]); + } -> + let () = hasApplication := true in + let _, _, exp = spelunkForFunExpression internalExpression in + let hasForwardRef = isForwardRef wrapperExpression in + ( (fun exp -> Exp.apply wrapperExpression [(nolabel, exp)]), + hasForwardRef, + exp ) + | { + pexp_desc = Pexp_sequence (wrapperExpression, internalExpression); + } -> + let wrap, hasForwardRef, exp = + spelunkForFunExpression internalExpression + in + ( wrap, + hasForwardRef, + { + expression with + pexp_desc = Pexp_sequence (wrapperExpression, exp); + } ) + | e -> ((fun a -> a), false, e) + in + let wrapExpression, hasForwardRef, expression = + spelunkForFunExpression expression + in + (wrapExpressionWithBinding wrapExpression, hasForwardRef, expression) + in + let bindingWrapper, hasForwardRef, expression = + modifiedBinding binding + in + (* do stuff here! *) + let namedArgList, newtypes, typeConstraints = + recursivelyTransformNamedArgsForMake mapper + (modifiedBindingOld binding) + [] [] None + in + let namedTypeList = + List.fold_left + (argToType ~newtypes ~typeConstraints) + [] namedArgList + in + let namedArgWithDefaultValueList = + List.filter_map argWithDefaultValue namedArgList + in + let vbMatch (label, default) = + Vb.mk + (Pat.var (Location.mknoloc label)) + (Exp.match_ + (Exp.ident {txt = Lident label; loc = Location.none}) + [ + Exp.case + (Pat.construct + (Location.mknoloc @@ Lident "Some") + (Some (Pat.var (Location.mknoloc label)))) + (Exp.ident (Location.mknoloc @@ Lident label)); + Exp.case + (Pat.construct (Location.mknoloc @@ Lident "None") None) + default; + ]) + in + let vbMatchList = List.map vbMatch namedArgWithDefaultValueList in + (* type props = { ... } *) + let propsRecordType = + makePropsRecordType ~coreTypeOfAttr ~typVarsOfCoreType "props" + pstr_loc namedTypeList + in + let innerExpression = + Exp.apply + (Exp.ident (Location.mknoloc @@ Lident fnName)) + ([(Nolabel, Exp.ident (Location.mknoloc @@ Lident "props"))] + @ + match hasForwardRef with + | true -> + [(Nolabel, Exp.ident (Location.mknoloc @@ Lident "ref"))] + | false -> []) + in + let makePropsPattern = function + | [] -> Pat.var @@ Location.mknoloc "props" + | _ -> + Pat.constraint_ + (Pat.var @@ Location.mknoloc "props") + (Typ.constr (Location.mknoloc @@ Lident "props") [Typ.any ()]) + in + let fullExpression = + (* React component name should start with uppercase letter *) + (* let make = { let \"App" = props => make(props); \"App" } *) + (* let make = React.forwardRef({ + let \"App" = (props, ref) => make({...props, ref: @optional (Js.Nullabel.toOption(ref))}) + })*) + Exp.fun_ nolabel None + (match coreTypeOfAttr with + | None -> makePropsPattern namedTypeList + | Some _ -> makePropsPattern typVarsOfCoreType) + (if hasForwardRef then + Exp.fun_ nolabel None + (Pat.var @@ Location.mknoloc "ref") + innerExpression + else innerExpression) + in + let fullExpression = + match fullModuleName with + | "" -> fullExpression + | txt -> + Exp.let_ Nonrecursive + [ + Vb.mk ~loc:emptyLoc + (Pat.var ~loc:emptyLoc {loc = emptyLoc; txt}) + fullExpression; + ] + (Exp.ident ~loc:pstr_loc {loc = emptyLoc; txt = Lident txt}) + in + let rec stripConstraintUnpack ~label pattern = + match pattern with + | {ppat_desc = Ppat_constraint (pattern, _)} -> + stripConstraintUnpack ~label pattern + | {ppat_desc = Ppat_unpack _; ppat_loc} -> + (* remove unpack e.g. model: module(T) *) + Pat.var ~loc:ppat_loc {txt = label; loc = ppat_loc} + | _ -> pattern + in + let rec returnedExpression patternsWithLabel patternsWithNolabel + ({pexp_desc} as expr) = + match pexp_desc with + | Pexp_newtype (_, expr) -> + returnedExpression patternsWithLabel patternsWithNolabel expr + | Pexp_constraint (expr, _) -> + returnedExpression patternsWithLabel patternsWithNolabel expr + | Pexp_fun + ( _arg_label, + _default, + { + ppat_desc = + Ppat_construct ({txt = Lident "()"}, _) | Ppat_any; + }, + expr ) -> + (patternsWithLabel, patternsWithNolabel, expr) + | Pexp_fun + (arg_label, _default, ({ppat_loc; ppat_desc} as pattern), expr) + -> ( + let patternWithoutConstraint = + stripConstraintUnpack ~label:(getLabel arg_label) pattern + in + if isLabelled arg_label || isOptional arg_label then + returnedExpression + (( {loc = ppat_loc; txt = Lident (getLabel arg_label)}, + { + patternWithoutConstraint with + ppat_attributes = + (if isOptional arg_label then optionalAttrs else []) + @ pattern.ppat_attributes; + } ) + :: patternsWithLabel) + patternsWithNolabel expr + else + (* Special case of nolabel arg "ref" in forwardRef fn *) + (* let make = React.forwardRef(ref => body) *) + match ppat_desc with + | Ppat_var {txt} + | Ppat_constraint ({ppat_desc = Ppat_var {txt}}, _) -> + returnedExpression patternsWithLabel + (( {loc = ppat_loc; txt = Lident txt}, + { + pattern with + ppat_attributes = + optionalAttrs @ pattern.ppat_attributes; + } ) + :: patternsWithNolabel) + expr + | _ -> + returnedExpression patternsWithLabel patternsWithNolabel expr) + | _ -> (patternsWithLabel, patternsWithNolabel, expr) + in + let patternsWithLabel, patternsWithNolabel, expression = + returnedExpression [] [] expression + in + (* add pattern matching for optional prop value *) + let expression = + if List.length vbMatchList = 0 then expression + else Exp.let_ Nonrecursive vbMatchList expression + in + (* (ref) => expr *) + let expression = + List.fold_left + (fun expr (_, pattern) -> Exp.fun_ Nolabel None pattern expr) + expression patternsWithNolabel + in + let recordPattern = + match patternsWithLabel with + | [] -> Pat.any () + | _ -> Pat.record (List.rev patternsWithLabel) Open + in + let expression = + Exp.fun_ Nolabel None + (Pat.constraint_ recordPattern + (Typ.constr ~loc:emptyLoc + {txt = Lident "props"; loc = emptyLoc} + (match coreTypeOfAttr with + | None -> + makePropsTypeParams ~stripExplicitOption:true + ~stripExplicitJsNullableOfRef:hasForwardRef + namedTypeList + | Some _ -> ( + match typVarsOfCoreType with + | [] -> [] + | _ -> [Typ.any ()])))) + expression + in + (* let make = ({id, name, ...}: props<'id, 'name, ...>) => { ... } *) + let bindings, newBinding = + match recFlag with + | Recursive -> + ( [ + bindingWrapper + (Exp.let_ ~loc:emptyLoc Recursive + [ + makeNewBinding binding expression internalFnName; + Vb.mk + (Pat.var {loc = emptyLoc; txt = fnName}) + fullExpression; + ] + (Exp.ident {loc = emptyLoc; txt = Lident fnName})); + ], + None ) + | Nonrecursive -> + ( [ + { + binding with + pvb_expr = expression; + pvb_pat = Pat.var {txt = fnName; loc = Location.none}; + }; + ], + Some (bindingWrapper fullExpression) ) + in + (Some propsRecordType, bindings, newBinding)) + else (None, [binding], None) + in + (* END of mapBinding fn *) + let structuresAndBinding = List.map mapBinding valueBindings in + let otherStructures (type_, binding, newBinding) + (types, bindings, newBindings) = + let types = + match type_ with + | Some type_ -> type_ :: types + | None -> types + in + let newBindings = + match newBinding with + | Some newBinding -> newBinding :: newBindings + | None -> newBindings + in + (types, binding @ bindings, newBindings) + in + let types, bindings, newBindings = + List.fold_right otherStructures structuresAndBinding ([], [], []) + in + types + @ [{pstr_loc; pstr_desc = Pstr_value (recFlag, bindings)}] + @ + match newBindings with + | [] -> [] + | newBindings -> + [{pstr_loc = emptyLoc; pstr_desc = Pstr_value (recFlag, newBindings)}]) + | _ -> [item] + +let transformSignatureItem ~config _mapper item = + match item with + | { + psig_loc; + psig_desc = Psig_value ({pval_attributes; pval_type} as psig_desc); + } as psig -> ( + match List.filter React_jsx_common.hasAttr pval_attributes with + | [] -> [item] + | [_] -> + (* If there is another @react.component, throw error *) + if config.React_jsx_common.hasReactComponent then + React_jsx_common.raiseErrorMultipleReactComponent ~loc:psig_loc + else config.hasReactComponent <- true; + check_string_int_attribute_iter.signature_item + check_string_int_attribute_iter item; + let hasForwardRef = ref false in + let coreTypeOfAttr = React_jsx_common.coreTypeOfAttrs pval_attributes in + let typVarsOfCoreType = + coreTypeOfAttr + |> Option.map React_jsx_common.typVarsOfCoreType + |> Option.value ~default:[] + in + let rec getPropTypes types ({ptyp_loc; ptyp_desc} as fullType) = + match ptyp_desc with + | Ptyp_arrow (name, type_, ({ptyp_desc = Ptyp_arrow _} as rest)) + when isOptional name || isLabelled name -> + getPropTypes ((name, ptyp_loc, type_) :: types) rest + | Ptyp_arrow + (Nolabel, {ptyp_desc = Ptyp_constr ({txt = Lident "unit"}, _)}, rest) + -> + getPropTypes types rest + | Ptyp_arrow (Nolabel, _type, rest) -> + hasForwardRef := true; + getPropTypes types rest + | Ptyp_arrow (name, type_, returnValue) + when isOptional name || isLabelled name -> + (returnValue, (name, returnValue.ptyp_loc, type_) :: types) + | _ -> (fullType, types) + in + let innerType, propTypes = getPropTypes [] pval_type in + let namedTypeList = List.fold_left argToConcreteType [] propTypes in + let retPropsType = + Typ.constr + (Location.mkloc (Lident "props") psig_loc) + (match coreTypeOfAttr with + | None -> makePropsTypeParams namedTypeList + | Some _ -> ( + match typVarsOfCoreType with + | [] -> [] + | _ -> [Typ.any ()])) + in + let propsRecordType = + makePropsRecordTypeSig ~coreTypeOfAttr ~typVarsOfCoreType "props" + psig_loc + ((* If there is Nolabel arg, regard the type as ref in forwardRef *) + (if !hasForwardRef then [(true, "ref", [], refType Location.none)] + else []) + @ namedTypeList) + in + (* can't be an arrow because it will defensively uncurry *) + let newExternalType = + Ptyp_constr + ( {loc = psig_loc; txt = Ldot (Lident "React", "componentLike")}, + [retPropsType; innerType] ) + in + let newStructure = + { + psig with + psig_desc = + Psig_value + { + psig_desc with + pval_type = {pval_type with ptyp_desc = newExternalType}; + pval_attributes = List.filter otherAttrsPure pval_attributes; + }; + } + in + [propsRecordType; newStructure] + | _ -> + React_jsx_common.raiseError ~loc:psig_loc + "Only one react.component call can exist on a component at one time") + | _ -> [item] + +let transformJsxCall ~config mapper callExpression callArguments jsxExprLoc + attrs = + match callExpression.pexp_desc with + | Pexp_ident caller -> ( + match caller with + | {txt = Lident "createElement"; loc} -> + React_jsx_common.raiseError ~loc + "JSX: `createElement` should be preceeded by a module name." + (* Foo.createElement(~prop1=foo, ~prop2=bar, ~children=[], ()) *) + | {loc; txt = Ldot (modulePath, ("createElement" | "make"))} -> + transformUppercaseCall3 ~config modulePath mapper jsxExprLoc loc attrs + callArguments + (* div(~prop1=foo, ~prop2=bar, ~children=[bla], ()) *) + (* turn that into + ReactDOM.createElement(~props=ReactDOM.props(~props1=foo, ~props2=bar, ()), [|bla|]) *) + | {loc; txt = Lident id} -> + transformLowercaseCall3 ~config mapper jsxExprLoc loc attrs callArguments + id + | {txt = Ldot (_, anythingNotCreateElementOrMake); loc} -> + React_jsx_common.raiseError ~loc + "JSX: the JSX attribute should be attached to a \ + `YourModuleName.createElement` or `YourModuleName.make` call. We saw \ + `%s` instead" + anythingNotCreateElementOrMake + | {txt = Lapply _; loc} -> + (* don't think there's ever a case where this is reached *) + React_jsx_common.raiseError ~loc + "JSX: encountered a weird case while processing the code. Please \ + report this!") + | _ -> + React_jsx_common.raiseError ~loc:callExpression.pexp_loc + "JSX: `createElement` should be preceeded by a simple, direct module \ + name." + +let expr ~config mapper expression = + match expression with + (* Does the function application have the @JSX attribute? *) + | { + pexp_desc = Pexp_apply (callExpression, callArguments); + pexp_attributes; + pexp_loc; + } -> ( + let jsxAttribute, nonJSXAttributes = + List.partition + (fun (attribute, _) -> attribute.txt = "JSX") + pexp_attributes + in + match (jsxAttribute, nonJSXAttributes) with + (* no JSX attribute *) + | [], _ -> default_mapper.expr mapper expression + | _, nonJSXAttributes -> + transformJsxCall ~config mapper callExpression callArguments pexp_loc + nonJSXAttributes) + (* is it a list with jsx attribute? Reason <>foo desugars to [@JSX][foo]*) + | { + pexp_desc = + ( Pexp_construct + ({txt = Lident "::"; loc}, Some {pexp_desc = Pexp_tuple _}) + | Pexp_construct ({txt = Lident "[]"; loc}, None) ); + pexp_attributes; + } as listItems -> ( + let jsxAttribute, nonJSXAttributes = + List.partition + (fun (attribute, _) -> attribute.txt = "JSX") + pexp_attributes + in + match (jsxAttribute, nonJSXAttributes) with + (* no JSX attribute *) + | [], _ -> default_mapper.expr mapper expression + | _, nonJSXAttributes -> + let loc = {loc with loc_ghost = true} in + let fragment = + match config.mode with + | "automatic" -> + Exp.ident ~loc {loc; txt = Ldot (Lident "React", "jsxFragment")} + | "classic" | _ -> + Exp.ident ~loc {loc; txt = Ldot (Lident "React", "fragment")} + in + let childrenExpr = transformChildrenIfList ~mapper listItems in + let recordOfChildren children = + Exp.record [(Location.mknoloc (Lident "children"), children)] None + in + let args = + [ + (nolabel, fragment); + (match config.mode with + | "automatic" -> ( + ( nolabel, + match childrenExpr with + | {pexp_desc = Pexp_array children} -> ( + match children with + | [] -> emptyRecord ~loc:Location.none + | [child] -> recordOfChildren child + | _ -> recordOfChildren childrenExpr) + | _ -> recordOfChildren childrenExpr )) + | "classic" | _ -> (nolabel, childrenExpr)); + ] + in + let countOfChildren = function + | {pexp_desc = Pexp_array children} -> List.length children + | _ -> 0 + in + Exp.apply + ~loc (* throw away the [@JSX] attribute and keep the others, if any *) + ~attrs:nonJSXAttributes + (* ReactDOM.createElement *) + (match config.mode with + | "automatic" -> + if countOfChildren childrenExpr > 1 then + Exp.ident ~loc {loc; txt = Ldot (Lident "React", "jsxs")} + else Exp.ident ~loc {loc; txt = Ldot (Lident "React", "jsx")} + | "classic" | _ -> + Exp.ident ~loc {loc; txt = Ldot (Lident "ReactDOM", "createElement")}) + args) + (* Delegate to the default mapper, a deep identity traversal *) + | e -> default_mapper.expr mapper e + +let module_binding ~(config : React_jsx_common.jsxConfig) mapper module_binding + = + config.nestedModules <- module_binding.pmb_name.txt :: config.nestedModules; + let mapped = default_mapper.module_binding mapper module_binding in + let () = + match config.nestedModules with + | _ :: rest -> config.nestedModules <- rest + | [] -> () + in + mapped + +(* TODO: some line number might still be wrong *) +let jsxMapper ~config = + let expr = expr ~config in + let module_binding = module_binding ~config in + let transformStructureItem = transformStructureItem ~config in + let transformSignatureItem = transformSignatureItem ~config in + (expr, module_binding, transformSignatureItem, transformStructureItem) diff --git a/res_syntax/cli/res_cli.ml b/res_syntax/cli/res_cli.ml new file mode 100644 index 0000000000..efe5e9ef88 --- /dev/null +++ b/res_syntax/cli/res_cli.ml @@ -0,0 +1,322 @@ +(* + This CLI isn't used apart for this repo's testing purposes. The syntax + itself is used by ReScript's compiler programmatically through various other apis. +*) + +(* + This is OCaml's Misc.ml's Color module. More specifically, this is + ReScript's OCaml fork's Misc.ml's Color module: + https://github.com/rescript-lang/ocaml/blob/92e58bedced8d7e3e177677800a38922327ab860/utils/misc.ml#L540 + + The syntax's printing's coloring logic depends on: + 1. a global mutable variable that's set in the compiler: Misc.Color.color_enabled + 2. the colors tags supported by Misc.Color, e.g. style_of_tag, which Format + tags like @{hello@} use + 3. etc. + + When this syntax is programmatically used inside ReScript, the various + Format tags like and get properly colored depending on the + above points. + + But when used by this cli file, that coloring logic doesn't render properly + because we're compiling against vanilla OCaml 4.06 instead of ReScript's + OCaml fork. For example, the vanilla compiler doesn't support the `dim` + color (grey). So we emulate the right coloring logic by copy pasting how our + forked OCaml compiler does it. +*) +module Color = struct + (* use ANSI color codes, see https://en.wikipedia.org/wiki/ANSI_escape_code *) + type color = + | Black [@live] + | Red + | Green [@live] + | Yellow + | Blue [@live] + | Magenta + | Cyan + | White [@live] + + type style = + | FG of color (* foreground *) + | BG of color [@live] (* background *) + | Bold + | Reset + | Dim + + let ansi_of_color = function + | Black -> "0" + | Red -> "1" + | Green -> "2" + | Yellow -> "3" + | Blue -> "4" + | Magenta -> "5" + | Cyan -> "6" + | White -> "7" + + let code_of_style = function + | FG c -> "3" ^ ansi_of_color c + | BG c -> "4" ^ ansi_of_color c + | Bold -> "1" + | Reset -> "0" + | Dim -> "2" + + let ansi_of_style_l l = + let s = + match l with + | [] -> code_of_style Reset + | [s] -> code_of_style s + | _ -> String.concat ";" (List.map code_of_style l) + in + "\x1b[" ^ s ^ "m" + + type styles = {error: style list; warning: style list; loc: style list} + + let default_styles = + {warning = [Bold; FG Magenta]; error = [Bold; FG Red]; loc = [Bold]} + + let cur_styles = ref default_styles + + (* let get_styles () = !cur_styles *) + (* let set_styles s = cur_styles := s *) + + (* map a tag to a style, if the tag is known. + @raise Not_found otherwise *) + let style_of_tag s = + match s with + | Format.String_tag "error" -> !cur_styles.error + | Format.String_tag "warning" -> !cur_styles.warning + | Format.String_tag "loc" -> !cur_styles.loc + | Format.String_tag "info" -> [Bold; FG Yellow] + | Format.String_tag "dim" -> [Dim] + | Format.String_tag "filename" -> [FG Cyan] + | _ -> raise Not_found + [@@raises Not_found] + + let color_enabled = ref true + + (* either prints the tag of [s] or delegates to [or_else] *) + let mark_open_tag ~or_else s = + try + let style = style_of_tag s in + if !color_enabled then ansi_of_style_l style else "" + with Not_found -> or_else s + + let mark_close_tag ~or_else s = + try + let _ = style_of_tag s in + if !color_enabled then ansi_of_style_l [Reset] else "" + with Not_found -> or_else s + + (* add color handling to formatter [ppf] *) + let set_color_tag_handling ppf = + let open Format in + let functions = pp_get_formatter_stag_functions ppf () in + let functions' = + { + functions with + mark_open_stag = mark_open_tag ~or_else:functions.mark_open_stag; + mark_close_stag = mark_close_tag ~or_else:functions.mark_close_stag; + } + in + pp_set_mark_tags ppf true; + (* enable tags *) + pp_set_formatter_stag_functions ppf functions'; + (* also setup margins *) + pp_set_margin ppf (pp_get_margin std_formatter ()); + () + + external isatty : out_channel -> bool = "caml_sys_isatty" + + (* reasonable heuristic on whether colors should be enabled *) + let should_enable_color () = + let term = try Sys.getenv "TERM" with Not_found -> "" in + term <> "dumb" && term <> "" && isatty stderr + + type setting = Auto [@live] | Always [@live] | Never [@live] + + let setup = + let first = ref true in + (* initialize only once *) + let formatter_l = + [Format.std_formatter; Format.err_formatter; Format.str_formatter] + in + fun o -> + if !first then ( + first := false; + Format.set_mark_tags true; + List.iter set_color_tag_handling formatter_l; + color_enabled := + match o with + | Some Always -> true + | Some Auto -> should_enable_color () + | Some Never -> false + | None -> should_enable_color ()); + () +end + +(* command line flags *) +module ResClflags : sig + val recover : bool ref + val print : string ref + val width : int ref + val origin : string ref + val file : string ref + val interface : bool ref + val jsxVersion : int ref + val jsxModule : string ref + val jsxMode : string ref + val typechecker : bool ref + + val parse : unit -> unit +end = struct + let recover = ref false + let width = ref 100 + + let print = ref "res" + let origin = ref "" + let interface = ref false + let jsxVersion = ref (-1) + let jsxModule = ref "react" + let jsxMode = ref "classic" + let file = ref "" + let typechecker = ref false + + let usage = + "\n\ + **This command line is for the repo developer's testing purpose only. DO \ + NOT use it in production**!\n\n" + ^ "Usage:\n rescript \n\n" ^ "Examples:\n" + ^ " rescript myFile.res\n" ^ " rescript -parse ml -print res myFile.ml\n" + ^ " rescript -parse res -print binary -interface myFile.resi\n\n" + ^ "Options are:" + + let spec = + [ + ("-recover", Arg.Unit (fun () -> recover := true), "Emit partial ast"); + ( "-parse", + Arg.String (fun txt -> origin := txt), + "Parse reasonBinary, ml or res. Default: res" ); + ( "-print", + Arg.String (fun txt -> print := txt), + "Print either binary, ml, ast, sexp, comments or res. Default: res" ); + ( "-width", + Arg.Int (fun w -> width := w), + "Specify the line length for the printer (formatter)" ); + ( "-interface", + Arg.Unit (fun () -> interface := true), + "Parse as interface" ); + ( "-jsx-version", + Arg.Int (fun i -> jsxVersion := i), + "Apply a specific built-in ppx before parsing, none or 3, 4. Default: \ + none" ); + ( "-jsx-module", + Arg.String (fun txt -> jsxModule := txt), + "Specify the jsx module. Default: react" ); + ( "-jsx-mode", + Arg.String (fun txt -> jsxMode := txt), + "Specify the jsx mode, classic or automatic. Default: classic" ); + ( "-typechecker", + Arg.Unit (fun () -> typechecker := true), + "Parses the ast as it would be passed to the typechecker and not the \ + printer" ); + ] + + let parse () = Arg.parse spec (fun f -> file := f) usage +end + +module CliArgProcessor = struct + type backend = Parser : 'diagnostics Res_driver.parsingEngine -> backend + [@@unboxed] + + let processFile ~isInterface ~width ~recover ~origin ~target ~jsxVersion + ~jsxModule ~jsxMode ~typechecker filename = + let len = String.length filename in + let processInterface = + isInterface + || (len > 0 && (String.get [@doesNotRaise]) filename (len - 1) = 'i') + in + let parsingEngine = + match origin with + | "reasonBinary" -> Parser Res_driver_reason_binary.parsingEngine + | "ml" -> Parser Res_driver_ml_parser.parsingEngine + | "res" -> Parser Res_driver.parsingEngine + | "" -> ( + match Filename.extension filename with + | ".ml" | ".mli" -> Parser Res_driver_ml_parser.parsingEngine + | ".re" | ".rei" -> Parser Res_driver_reason_binary.parsingEngine + | _ -> Parser Res_driver.parsingEngine) + | origin -> + print_endline + ("-parse needs to be either reasonBinary, ml or res. You provided " + ^ origin); + exit 1 + in + let printEngine = + match target with + | "binary" -> Res_driver_binary.printEngine + | "ml" -> Res_driver_ml_parser.printEngine + | "ast" -> Res_ast_debugger.printEngine + | "sexp" -> Res_ast_debugger.sexpPrintEngine + | "comments" -> Res_ast_debugger.commentsPrintEngine + | "res" -> Res_driver.printEngine + | target -> + print_endline + ("-print needs to be either binary, ml, ast, sexp, comments or res. \ + You provided " ^ target); + exit 1 + in + + let forPrinter = + match target with + | ("res" | "sexp") when not typechecker -> true + | _ -> false + in + + let (Parser backend) = parsingEngine in + (* This is the whole purpose of the Color module above *) + Color.setup None; + if processInterface then + let parseResult = backend.parseInterface ~forPrinter ~filename in + if parseResult.invalid then ( + backend.stringOfDiagnostics ~source:parseResult.source + ~filename:parseResult.filename parseResult.diagnostics; + if recover then + printEngine.printInterface ~width ~filename + ~comments:parseResult.comments parseResult.parsetree + else exit 1) + else + let parsetree = + Reactjs_jsx_ppx.rewrite_signature ~jsxVersion ~jsxModule ~jsxMode + parseResult.parsetree + in + printEngine.printInterface ~width ~filename + ~comments:parseResult.comments parsetree + else + let parseResult = backend.parseImplementation ~forPrinter ~filename in + if parseResult.invalid then ( + backend.stringOfDiagnostics ~source:parseResult.source + ~filename:parseResult.filename parseResult.diagnostics; + if recover then + printEngine.printImplementation ~width ~filename + ~comments:parseResult.comments parseResult.parsetree + else exit 1) + else + let parsetree = + Reactjs_jsx_ppx.rewrite_implementation ~jsxVersion ~jsxModule ~jsxMode + parseResult.parsetree + in + printEngine.printImplementation ~width ~filename + ~comments:parseResult.comments parsetree + [@@raises exit] +end + +let () = + if not !Sys.interactive then ( + ResClflags.parse (); + CliArgProcessor.processFile ~isInterface:!ResClflags.interface + ~width:!ResClflags.width ~recover:!ResClflags.recover + ~target:!ResClflags.print ~origin:!ResClflags.origin + ~jsxVersion:!ResClflags.jsxVersion ~jsxModule:!ResClflags.jsxModule + ~jsxMode:!ResClflags.jsxMode ~typechecker:!ResClflags.typechecker + !ResClflags.file) + [@@raises exit] diff --git a/res_syntax/compiler-libs-406/annot.mli b/res_syntax/compiler-libs-406/annot.mli new file mode 100644 index 0000000000..3cae8f2735 --- /dev/null +++ b/res_syntax/compiler-libs-406/annot.mli @@ -0,0 +1,24 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Gallium, INRIA Rocquencourt *) +(* *) +(* Copyright 2007 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Data types for annotations (Stypes.ml) *) + +type call = Tail | Stack | Inline;; + +type ident = + | Iref_internal of Location.t (* defining occurrence *) + | Iref_external + | Idef of Location.t (* scope *) +;; diff --git a/res_syntax/compiler-libs-406/arg_helper.ml b/res_syntax/compiler-libs-406/arg_helper.ml new file mode 100644 index 0000000000..fa80007ad4 --- /dev/null +++ b/res_syntax/compiler-libs-406/arg_helper.ml @@ -0,0 +1,127 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2015--2016 OCamlPro SAS *) +(* Copyright 2015--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +let fatal err = + prerr_endline err; + exit 2 + +module Make (S : sig + module Key : sig + type t + val of_string : string -> t + module Map : Map.S with type key = t + end + + module Value : sig + type t + val of_string : string -> t + end +end) = struct + type parsed = { + base_default : S.Value.t; + base_override : S.Value.t S.Key.Map.t; + user_default : S.Value.t option; + user_override : S.Value.t S.Key.Map.t; + } + + let default v = + { base_default = v; + base_override = S.Key.Map.empty; + user_default = None; + user_override = S.Key.Map.empty; } + + let set_base_default value t = + { t with base_default = value } + + let add_base_override key value t = + { t with base_override = S.Key.Map.add key value t.base_override } + + let reset_base_overrides t = + { t with base_override = S.Key.Map.empty } + + let set_user_default value t = + { t with user_default = Some value } + + let add_user_override key value t = + { t with user_override = S.Key.Map.add key value t.user_override } + + exception Parse_failure of exn + + let parse_exn str ~update = + (* Is the removal of empty chunks really relevant here? *) + (* (It has been added to mimic the old Misc.String.split.) *) + let values = String.split_on_char ',' str |> List.filter ((<>) "") in + let parsed = + List.fold_left (fun acc value -> + match String.index value '=' with + | exception Not_found -> + begin match S.Value.of_string value with + | value -> set_user_default value acc + | exception exn -> raise (Parse_failure exn) + end + | equals -> + let key_value_pair = value in + let length = String.length key_value_pair in + assert (equals >= 0 && equals < length); + if equals = 0 then begin + raise (Parse_failure ( + Failure "Missing key in argument specification")) + end; + let key = + let key = String.sub key_value_pair 0 equals in + try S.Key.of_string key + with exn -> raise (Parse_failure exn) + in + let value = + let value = + String.sub key_value_pair (equals + 1) (length - equals - 1) + in + try S.Value.of_string value + with exn -> raise (Parse_failure exn) + in + add_user_override key value acc) + !update + values + in + update := parsed + + let parse str help_text update = + match parse_exn str ~update with + | () -> () + | exception (Parse_failure exn) -> + fatal (Printf.sprintf "%s: %s" (Printexc.to_string exn) help_text) + + type parse_result = + | Ok + | Parse_failed of exn + + let parse_no_error str update = + match parse_exn str ~update with + | () -> Ok + | exception (Parse_failure exn) -> Parse_failed exn + + let get ~key parsed = + match S.Key.Map.find key parsed.user_override with + | value -> value + | exception Not_found -> + match parsed.user_default with + | Some value -> value + | None -> + match S.Key.Map.find key parsed.base_override with + | value -> value + | exception Not_found -> parsed.base_default + +end diff --git a/res_syntax/compiler-libs-406/arg_helper.mli b/res_syntax/compiler-libs-406/arg_helper.mli new file mode 100644 index 0000000000..fba7aa2188 --- /dev/null +++ b/res_syntax/compiler-libs-406/arg_helper.mli @@ -0,0 +1,63 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2015--2016 OCamlPro SAS *) +(* Copyright 2015--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Decipher command line arguments of the form + | =[,...] + (as used for example for the specification of inlining parameters + varying by simplification round). +*) + +module Make (S : sig + module Key : sig + type t + + (** The textual representation of a key must not contain '=' or ','. *) + val of_string : string -> t + + module Map : Map.S with type key = t + end + + module Value : sig + type t + + (** The textual representation of a value must not contain ','. *) + val of_string : string -> t + end +end) : sig + type parsed + + val default : S.Value.t -> parsed + + val set_base_default : S.Value.t -> parsed -> parsed + + val add_base_override : S.Key.t -> S.Value.t -> parsed -> parsed + + val reset_base_overrides : parsed -> parsed + + val set_user_default : S.Value.t -> parsed -> parsed + + val add_user_override : S.Key.t -> S.Value.t -> parsed -> parsed + + val parse : string -> string -> parsed ref -> unit + + type parse_result = + | Ok + | Parse_failed of exn + + val parse_no_error : string -> parsed ref -> parse_result + + val get : key:S.Key.t -> parsed -> S.Value.t +end diff --git a/res_syntax/compiler-libs-406/ast_helper.ml b/res_syntax/compiler-libs-406/ast_helper.ml new file mode 100644 index 0000000000..2c28493395 --- /dev/null +++ b/res_syntax/compiler-libs-406/ast_helper.ml @@ -0,0 +1,561 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Helpers to produce Parsetree fragments *) + +open Asttypes +open Parsetree +open Docstrings + +type lid = Longident.t loc +type str = string loc +type loc = Location.t +type attrs = attribute list + +let default_loc = ref Location.none + +let with_default_loc l f = + let old = !default_loc in + default_loc := l; + try let r = f () in default_loc := old; r + with exn -> default_loc := old; raise exn + +module Const = struct + let integer ?suffix i = Pconst_integer (i, suffix) + let int ?suffix i = integer ?suffix (string_of_int i) + let int32 ?(suffix='l') i = integer ~suffix (Int32.to_string i) + let int64 ?(suffix='L') i = integer ~suffix (Int64.to_string i) + let nativeint ?(suffix='n') i = integer ~suffix (Nativeint.to_string i) + let float ?suffix f = Pconst_float (f, suffix) + let char c = Pconst_char c + let string ?quotation_delimiter s = Pconst_string (s, quotation_delimiter) +end + +module Typ = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + {ptyp_desc = d; ptyp_loc = loc; ptyp_attributes = attrs} + let attr d a = {d with ptyp_attributes = d.ptyp_attributes @ [a]} + + let any ?loc ?attrs () = mk ?loc ?attrs Ptyp_any + let var ?loc ?attrs a = mk ?loc ?attrs (Ptyp_var a) + let arrow ?loc ?attrs a b c = mk ?loc ?attrs (Ptyp_arrow (a, b, c)) + let tuple ?loc ?attrs a = mk ?loc ?attrs (Ptyp_tuple a) + let constr ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_constr (a, b)) + let object_ ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_object (a, b)) + let class_ ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_class (a, b)) + let alias ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_alias (a, b)) + let variant ?loc ?attrs a b c = mk ?loc ?attrs (Ptyp_variant (a, b, c)) + let poly ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_poly (a, b)) + let package ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_package (a, b)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Ptyp_extension a) + + let force_poly t = + match t.ptyp_desc with + | Ptyp_poly _ -> t + | _ -> poly ~loc:t.ptyp_loc [] t (* -> ghost? *) + + let varify_constructors var_names t = + let check_variable vl loc v = + if List.mem v vl then + raise Syntaxerr.(Error(Variable_in_scope(loc,v))) in + let var_names = List.map (fun v -> v.txt) var_names in + let rec loop t = + let desc = + match t.ptyp_desc with + | Ptyp_any -> Ptyp_any + | Ptyp_var x -> + check_variable var_names t.ptyp_loc x; + Ptyp_var x + | Ptyp_arrow (label,core_type,core_type') -> + Ptyp_arrow(label, loop core_type, loop core_type') + | Ptyp_tuple lst -> Ptyp_tuple (List.map loop lst) + | Ptyp_constr( { txt = Longident.Lident s }, []) + when List.mem s var_names -> + Ptyp_var s + | Ptyp_constr(longident, lst) -> + Ptyp_constr(longident, List.map loop lst) + | Ptyp_object (lst, o) -> + Ptyp_object (List.map loop_object_field lst, o) + | Ptyp_class (longident, lst) -> + Ptyp_class (longident, List.map loop lst) + | Ptyp_alias(core_type, string) -> + check_variable var_names t.ptyp_loc string; + Ptyp_alias(loop core_type, string) + | Ptyp_variant(row_field_list, flag, lbl_lst_option) -> + Ptyp_variant(List.map loop_row_field row_field_list, + flag, lbl_lst_option) + | Ptyp_poly(string_lst, core_type) -> + List.iter (fun v -> + check_variable var_names t.ptyp_loc v.txt) string_lst; + Ptyp_poly(string_lst, loop core_type) + | Ptyp_package(longident,lst) -> + Ptyp_package(longident,List.map (fun (n,typ) -> (n,loop typ) ) lst) + | Ptyp_extension (s, arg) -> + Ptyp_extension (s, arg) + in + {t with ptyp_desc = desc} + and loop_row_field = + function + | Rtag(label,attrs,flag,lst) -> + Rtag(label,attrs,flag,List.map loop lst) + | Rinherit t -> + Rinherit (loop t) + and loop_object_field = + function + | Otag(label, attrs, t) -> + Otag(label, attrs, loop t) + | Oinherit t -> + Oinherit (loop t) + in + loop t + +end + +module Pat = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + {ppat_desc = d; ppat_loc = loc; ppat_attributes = attrs} + let attr d a = {d with ppat_attributes = d.ppat_attributes @ [a]} + + let any ?loc ?attrs () = mk ?loc ?attrs Ppat_any + let var ?loc ?attrs a = mk ?loc ?attrs (Ppat_var a) + let alias ?loc ?attrs a b = mk ?loc ?attrs (Ppat_alias (a, b)) + let constant ?loc ?attrs a = mk ?loc ?attrs (Ppat_constant a) + let interval ?loc ?attrs a b = mk ?loc ?attrs (Ppat_interval (a, b)) + let tuple ?loc ?attrs a = mk ?loc ?attrs (Ppat_tuple a) + let construct ?loc ?attrs a b = mk ?loc ?attrs (Ppat_construct (a, b)) + let variant ?loc ?attrs a b = mk ?loc ?attrs (Ppat_variant (a, b)) + let record ?loc ?attrs a b = mk ?loc ?attrs (Ppat_record (a, b)) + let array ?loc ?attrs a = mk ?loc ?attrs (Ppat_array a) + let or_ ?loc ?attrs a b = mk ?loc ?attrs (Ppat_or (a, b)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Ppat_constraint (a, b)) + let type_ ?loc ?attrs a = mk ?loc ?attrs (Ppat_type a) + let lazy_ ?loc ?attrs a = mk ?loc ?attrs (Ppat_lazy a) + let unpack ?loc ?attrs a = mk ?loc ?attrs (Ppat_unpack a) + let open_ ?loc ?attrs a b = mk ?loc ?attrs (Ppat_open (a, b)) + let exception_ ?loc ?attrs a = mk ?loc ?attrs (Ppat_exception a) + let extension ?loc ?attrs a = mk ?loc ?attrs (Ppat_extension a) +end + +module Exp = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + {pexp_desc = d; pexp_loc = loc; pexp_attributes = attrs} + let attr d a = {d with pexp_attributes = d.pexp_attributes @ [a]} + + let ident ?loc ?attrs a = mk ?loc ?attrs (Pexp_ident a) + let constant ?loc ?attrs a = mk ?loc ?attrs (Pexp_constant a) + let let_ ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_let (a, b, c)) + let fun_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pexp_fun (a, b, c, d)) + let function_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_function a) + let apply ?loc ?attrs a b = mk ?loc ?attrs (Pexp_apply (a, b)) + let match_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_match (a, b)) + let try_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_try (a, b)) + let tuple ?loc ?attrs a = mk ?loc ?attrs (Pexp_tuple a) + let construct ?loc ?attrs a b = mk ?loc ?attrs (Pexp_construct (a, b)) + let variant ?loc ?attrs a b = mk ?loc ?attrs (Pexp_variant (a, b)) + let record ?loc ?attrs a b = mk ?loc ?attrs (Pexp_record (a, b)) + let field ?loc ?attrs a b = mk ?loc ?attrs (Pexp_field (a, b)) + let setfield ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_setfield (a, b, c)) + let array ?loc ?attrs a = mk ?loc ?attrs (Pexp_array a) + let ifthenelse ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_ifthenelse (a, b, c)) + let sequence ?loc ?attrs a b = mk ?loc ?attrs (Pexp_sequence (a, b)) + let while_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_while (a, b)) + let for_ ?loc ?attrs a b c d e = mk ?loc ?attrs (Pexp_for (a, b, c, d, e)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_constraint (a, b)) + let coerce ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_coerce (a, b, c)) + let send ?loc ?attrs a b = mk ?loc ?attrs (Pexp_send (a, b)) + let new_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_new a) + let setinstvar ?loc ?attrs a b = mk ?loc ?attrs (Pexp_setinstvar (a, b)) + let override ?loc ?attrs a = mk ?loc ?attrs (Pexp_override a) + let letmodule ?loc ?attrs a b c= mk ?loc ?attrs (Pexp_letmodule (a, b, c)) + let letexception ?loc ?attrs a b = mk ?loc ?attrs (Pexp_letexception (a, b)) + let assert_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_assert a) + let lazy_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_lazy a) + let poly ?loc ?attrs a b = mk ?loc ?attrs (Pexp_poly (a, b)) + let object_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_object a) + let newtype ?loc ?attrs a b = mk ?loc ?attrs (Pexp_newtype (a, b)) + let pack ?loc ?attrs a = mk ?loc ?attrs (Pexp_pack a) + let open_ ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_open (a, b, c)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pexp_extension a) + let unreachable ?loc ?attrs () = mk ?loc ?attrs Pexp_unreachable + + let case lhs ?guard rhs = + { + pc_lhs = lhs; + pc_guard = guard; + pc_rhs = rhs; + } +end + +module Mty = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + {pmty_desc = d; pmty_loc = loc; pmty_attributes = attrs} + let attr d a = {d with pmty_attributes = d.pmty_attributes @ [a]} + + let ident ?loc ?attrs a = mk ?loc ?attrs (Pmty_ident a) + let alias ?loc ?attrs a = mk ?loc ?attrs (Pmty_alias a) + let signature ?loc ?attrs a = mk ?loc ?attrs (Pmty_signature a) + let functor_ ?loc ?attrs a b c = mk ?loc ?attrs (Pmty_functor (a, b, c)) + let with_ ?loc ?attrs a b = mk ?loc ?attrs (Pmty_with (a, b)) + let typeof_ ?loc ?attrs a = mk ?loc ?attrs (Pmty_typeof a) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pmty_extension a) +end + +module Mod = struct +let mk ?(loc = !default_loc) ?(attrs = []) d = + {pmod_desc = d; pmod_loc = loc; pmod_attributes = attrs} + let attr d a = {d with pmod_attributes = d.pmod_attributes @ [a]} + + let ident ?loc ?attrs x = mk ?loc ?attrs (Pmod_ident x) + let structure ?loc ?attrs x = mk ?loc ?attrs (Pmod_structure x) + let functor_ ?loc ?attrs arg arg_ty body = + mk ?loc ?attrs (Pmod_functor (arg, arg_ty, body)) + let apply ?loc ?attrs m1 m2 = mk ?loc ?attrs (Pmod_apply (m1, m2)) + let constraint_ ?loc ?attrs m mty = mk ?loc ?attrs (Pmod_constraint (m, mty)) + let unpack ?loc ?attrs e = mk ?loc ?attrs (Pmod_unpack e) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pmod_extension a) +end + +module Sig = struct + let mk ?(loc = !default_loc) d = {psig_desc = d; psig_loc = loc} + + let value ?loc a = mk ?loc (Psig_value a) + let type_ ?loc rec_flag a = mk ?loc (Psig_type (rec_flag, a)) + let type_extension ?loc a = mk ?loc (Psig_typext a) + let exception_ ?loc a = mk ?loc (Psig_exception a) + let module_ ?loc a = mk ?loc (Psig_module a) + let rec_module ?loc a = mk ?loc (Psig_recmodule a) + let modtype ?loc a = mk ?loc (Psig_modtype a) + let open_ ?loc a = mk ?loc (Psig_open a) + let include_ ?loc a = mk ?loc (Psig_include a) + let class_ ?loc a = mk ?loc (Psig_class a) + let class_type ?loc a = mk ?loc (Psig_class_type a) + let extension ?loc ?(attrs = []) a = mk ?loc (Psig_extension (a, attrs)) + let attribute ?loc a = mk ?loc (Psig_attribute a) + let text txt = + let f_txt = List.filter (fun ds -> docstring_body ds <> "") txt in + List.map + (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) + f_txt +end + +module Str = struct + let mk ?(loc = !default_loc) d = {pstr_desc = d; pstr_loc = loc} + + let eval ?loc ?(attrs = []) a = mk ?loc (Pstr_eval (a, attrs)) + let value ?loc a b = mk ?loc (Pstr_value (a, b)) + let primitive ?loc a = mk ?loc (Pstr_primitive a) + let type_ ?loc rec_flag a = mk ?loc (Pstr_type (rec_flag, a)) + let type_extension ?loc a = mk ?loc (Pstr_typext a) + let exception_ ?loc a = mk ?loc (Pstr_exception a) + let module_ ?loc a = mk ?loc (Pstr_module a) + let rec_module ?loc a = mk ?loc (Pstr_recmodule a) + let modtype ?loc a = mk ?loc (Pstr_modtype a) + let open_ ?loc a = mk ?loc (Pstr_open a) + let class_ ?loc a = mk ?loc (Pstr_class a) + let class_type ?loc a = mk ?loc (Pstr_class_type a) + let include_ ?loc a = mk ?loc (Pstr_include a) + let extension ?loc ?(attrs = []) a = mk ?loc (Pstr_extension (a, attrs)) + let attribute ?loc a = mk ?loc (Pstr_attribute a) + let text txt = + let f_txt = List.filter (fun ds -> docstring_body ds <> "") txt in + List.map + (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) + f_txt +end + +module Cl = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + { + pcl_desc = d; + pcl_loc = loc; + pcl_attributes = attrs; + } + let attr d a = {d with pcl_attributes = d.pcl_attributes @ [a]} + + let constr ?loc ?attrs a b = mk ?loc ?attrs (Pcl_constr (a, b)) + let structure ?loc ?attrs a = mk ?loc ?attrs (Pcl_structure a) + let fun_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pcl_fun (a, b, c, d)) + let apply ?loc ?attrs a b = mk ?loc ?attrs (Pcl_apply (a, b)) + let let_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcl_let (a, b, c)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pcl_constraint (a, b)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pcl_extension a) + let open_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcl_open (a, b, c)) +end + +module Cty = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + { + pcty_desc = d; + pcty_loc = loc; + pcty_attributes = attrs; + } + let attr d a = {d with pcty_attributes = d.pcty_attributes @ [a]} + + let constr ?loc ?attrs a b = mk ?loc ?attrs (Pcty_constr (a, b)) + let signature ?loc ?attrs a = mk ?loc ?attrs (Pcty_signature a) + let arrow ?loc ?attrs a b c = mk ?loc ?attrs (Pcty_arrow (a, b, c)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pcty_extension a) + let open_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcty_open (a, b, c)) +end + +module Ctf = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) d = + { + pctf_desc = d; + pctf_loc = loc; + pctf_attributes = add_docs_attrs docs attrs; + } + + let inherit_ ?loc ?attrs a = mk ?loc ?attrs (Pctf_inherit a) + let val_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pctf_val (a, b, c, d)) + let method_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pctf_method (a, b, c, d)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pctf_constraint (a, b)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pctf_extension a) + let attribute ?loc a = mk ?loc (Pctf_attribute a) + let text txt = + let f_txt = List.filter (fun ds -> docstring_body ds <> "") txt in + List.map + (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) + f_txt + + let attr d a = {d with pctf_attributes = d.pctf_attributes @ [a]} + +end + +module Cf = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) d = + { + pcf_desc = d; + pcf_loc = loc; + pcf_attributes = add_docs_attrs docs attrs; + } + + let inherit_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcf_inherit (a, b, c)) + let val_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcf_val (a, b, c)) + let method_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcf_method (a, b, c)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pcf_constraint (a, b)) + let initializer_ ?loc ?attrs a = mk ?loc ?attrs (Pcf_initializer a) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pcf_extension a) + let attribute ?loc a = mk ?loc (Pcf_attribute a) + let text txt = + let f_txt = List.filter (fun ds -> docstring_body ds <> "") txt in + List.map + (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) + f_txt + + let virtual_ ct = Cfk_virtual ct + let concrete o e = Cfk_concrete (o, e) + + let attr d a = {d with pcf_attributes = d.pcf_attributes @ [a]} + +end + +module Val = struct + let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) + ?(prim = []) name typ = + { + pval_name = name; + pval_type = typ; + pval_attributes = add_docs_attrs docs attrs; + pval_loc = loc; + pval_prim = prim; + } +end + +module Md = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) name typ = + { + pmd_name = name; + pmd_type = typ; + pmd_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pmd_loc = loc; + } +end + +module Mtd = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) ?typ name = + { + pmtd_name = name; + pmtd_type = typ; + pmtd_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pmtd_loc = loc; + } +end + +module Mb = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) name expr = + { + pmb_name = name; + pmb_expr = expr; + pmb_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pmb_loc = loc; + } +end + +module Opn = struct + let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) + ?(override = Fresh) lid = + { + popen_lid = lid; + popen_override = override; + popen_loc = loc; + popen_attributes = add_docs_attrs docs attrs; + } +end + +module Incl = struct + let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) mexpr = + { + pincl_mod = mexpr; + pincl_loc = loc; + pincl_attributes = add_docs_attrs docs attrs; + } + +end + +module Vb = struct + let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) + ?(text = []) pat expr = + { + pvb_pat = pat; + pvb_expr = expr; + pvb_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pvb_loc = loc; + } +end + +module Ci = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) + ?(virt = Concrete) ?(params = []) name expr = + { + pci_virt = virt; + pci_params = params; + pci_name = name; + pci_expr = expr; + pci_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pci_loc = loc; + } +end + +module Type = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) + ?(params = []) + ?(cstrs = []) + ?(kind = Ptype_abstract) + ?(priv = Public) + ?manifest + name = + { + ptype_name = name; + ptype_params = params; + ptype_cstrs = cstrs; + ptype_kind = kind; + ptype_private = priv; + ptype_manifest = manifest; + ptype_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + ptype_loc = loc; + } + + let constructor ?(loc = !default_loc) ?(attrs = []) ?(info = empty_info) + ?(args = Pcstr_tuple []) ?res name = + { + pcd_name = name; + pcd_args = args; + pcd_res = res; + pcd_loc = loc; + pcd_attributes = add_info_attrs info attrs; + } + + let field ?(loc = !default_loc) ?(attrs = []) ?(info = empty_info) + ?(mut = Immutable) name typ = + { + pld_name = name; + pld_mutable = mut; + pld_type = typ; + pld_loc = loc; + pld_attributes = add_info_attrs info attrs; + } + +end + +(** Type extensions *) +module Te = struct + let mk ?(attrs = []) ?(docs = empty_docs) + ?(params = []) ?(priv = Public) path constructors = + { + ptyext_path = path; + ptyext_params = params; + ptyext_constructors = constructors; + ptyext_private = priv; + ptyext_attributes = add_docs_attrs docs attrs; + } + + let constructor ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(info = empty_info) name kind = + { + pext_name = name; + pext_kind = kind; + pext_loc = loc; + pext_attributes = add_docs_attrs docs (add_info_attrs info attrs); + } + + let decl ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) + ?(info = empty_info) ?(args = Pcstr_tuple []) ?res name = + { + pext_name = name; + pext_kind = Pext_decl(args, res); + pext_loc = loc; + pext_attributes = add_docs_attrs docs (add_info_attrs info attrs); + } + + let rebind ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(info = empty_info) name lid = + { + pext_name = name; + pext_kind = Pext_rebind lid; + pext_loc = loc; + pext_attributes = add_docs_attrs docs (add_info_attrs info attrs); + } + +end + +module Csig = struct + let mk self fields = + { + pcsig_self = self; + pcsig_fields = fields; + } +end + +module Cstr = struct + let mk self fields = + { + pcstr_self = self; + pcstr_fields = fields; + } +end diff --git a/res_syntax/compiler-libs-406/ast_helper.mli b/res_syntax/compiler-libs-406/ast_helper.mli new file mode 100644 index 0000000000..a35d462a12 --- /dev/null +++ b/res_syntax/compiler-libs-406/ast_helper.mli @@ -0,0 +1,442 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Helpers to produce Parsetree fragments *) + +open Asttypes +open Docstrings +open Parsetree + +type lid = Longident.t loc +type str = string loc +type loc = Location.t +type attrs = attribute list + +(** {1 Default locations} *) + +val default_loc: loc ref + (** Default value for all optional location arguments. *) + +val with_default_loc: loc -> (unit -> 'a) -> 'a + (** Set the [default_loc] within the scope of the execution + of the provided function. *) + +(** {1 Constants} *) + +module Const : sig + val char : int -> constant + val string : ?quotation_delimiter:string -> string -> constant + val integer : ?suffix:char -> string -> constant + val int : ?suffix:char -> int -> constant + val int32 : ?suffix:char -> int32 -> constant + val int64 : ?suffix:char -> int64 -> constant + val nativeint : ?suffix:char -> nativeint -> constant + val float : ?suffix:char -> string -> constant +end + +(** {1 Core language} *) + +(** Type expressions *) +module Typ : + sig + val mk: ?loc:loc -> ?attrs:attrs -> core_type_desc -> core_type + val attr: core_type -> attribute -> core_type + + val any: ?loc:loc -> ?attrs:attrs -> unit -> core_type + val var: ?loc:loc -> ?attrs:attrs -> string -> core_type + val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type -> core_type + -> core_type + val tuple: ?loc:loc -> ?attrs:attrs -> core_type list -> core_type + val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type + val object_: ?loc:loc -> ?attrs:attrs -> object_field list + -> closed_flag -> core_type + val class_: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type + val alias: ?loc:loc -> ?attrs:attrs -> core_type -> string -> core_type + val variant: ?loc:loc -> ?attrs:attrs -> row_field list -> closed_flag + -> label list option -> core_type + val poly: ?loc:loc -> ?attrs:attrs -> str list -> core_type -> core_type + val package: ?loc:loc -> ?attrs:attrs -> lid -> (lid * core_type) list + -> core_type + val extension: ?loc:loc -> ?attrs:attrs -> extension -> core_type + + val force_poly: core_type -> core_type + + val varify_constructors: str list -> core_type -> core_type + (** [varify_constructors newtypes te] is type expression [te], of which + any of nullary type constructor [tc] is replaced by type variable of + the same name, if [tc]'s name appears in [newtypes]. + Raise [Syntaxerr.Variable_in_scope] if any type variable inside [te] + appears in [newtypes]. + @since 4.05 + *) + end + +(** Patterns *) +module Pat: + sig + val mk: ?loc:loc -> ?attrs:attrs -> pattern_desc -> pattern + val attr:pattern -> attribute -> pattern + + val any: ?loc:loc -> ?attrs:attrs -> unit -> pattern + val var: ?loc:loc -> ?attrs:attrs -> str -> pattern + val alias: ?loc:loc -> ?attrs:attrs -> pattern -> str -> pattern + val constant: ?loc:loc -> ?attrs:attrs -> constant -> pattern + val interval: ?loc:loc -> ?attrs:attrs -> constant -> constant -> pattern + val tuple: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern + val construct: ?loc:loc -> ?attrs:attrs -> lid -> pattern option -> pattern + val variant: ?loc:loc -> ?attrs:attrs -> label -> pattern option -> pattern + val record: ?loc:loc -> ?attrs:attrs -> (lid * pattern) list -> closed_flag + -> pattern + val array: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern + val or_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern -> pattern + val constraint_: ?loc:loc -> ?attrs:attrs -> pattern -> core_type -> pattern + val type_: ?loc:loc -> ?attrs:attrs -> lid -> pattern + val lazy_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern + val unpack: ?loc:loc -> ?attrs:attrs -> str -> pattern + val open_: ?loc:loc -> ?attrs:attrs -> lid -> pattern -> pattern + val exception_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern + val extension: ?loc:loc -> ?attrs:attrs -> extension -> pattern + end + +(** Expressions *) +module Exp: + sig + val mk: ?loc:loc -> ?attrs:attrs -> expression_desc -> expression + val attr: expression -> attribute -> expression + + val ident: ?loc:loc -> ?attrs:attrs -> lid -> expression + val constant: ?loc:loc -> ?attrs:attrs -> constant -> expression + val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list + -> expression -> expression + val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option + -> pattern -> expression -> expression + val function_: ?loc:loc -> ?attrs:attrs -> case list -> expression + val apply: ?loc:loc -> ?attrs:attrs -> expression + -> (arg_label * expression) list -> expression + val match_: ?loc:loc -> ?attrs:attrs -> expression -> case list + -> expression + val try_: ?loc:loc -> ?attrs:attrs -> expression -> case list -> expression + val tuple: ?loc:loc -> ?attrs:attrs -> expression list -> expression + val construct: ?loc:loc -> ?attrs:attrs -> lid -> expression option + -> expression + val variant: ?loc:loc -> ?attrs:attrs -> label -> expression option + -> expression + val record: ?loc:loc -> ?attrs:attrs -> (lid * expression) list + -> expression option -> expression + val field: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression + val setfield: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression + -> expression + val array: ?loc:loc -> ?attrs:attrs -> expression list -> expression + val ifthenelse: ?loc:loc -> ?attrs:attrs -> expression -> expression + -> expression option -> expression + val sequence: ?loc:loc -> ?attrs:attrs -> expression -> expression + -> expression + val while_: ?loc:loc -> ?attrs:attrs -> expression -> expression + -> expression + val for_: ?loc:loc -> ?attrs:attrs -> pattern -> expression -> expression + -> direction_flag -> expression -> expression + val coerce: ?loc:loc -> ?attrs:attrs -> expression -> core_type option + -> core_type -> expression + val constraint_: ?loc:loc -> ?attrs:attrs -> expression -> core_type + -> expression + val send: ?loc:loc -> ?attrs:attrs -> expression -> str -> expression + val new_: ?loc:loc -> ?attrs:attrs -> lid -> expression + val setinstvar: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression + val override: ?loc:loc -> ?attrs:attrs -> (str * expression) list + -> expression + val letmodule: ?loc:loc -> ?attrs:attrs -> str -> module_expr -> expression + -> expression + val letexception: + ?loc:loc -> ?attrs:attrs -> extension_constructor -> expression + -> expression + val assert_: ?loc:loc -> ?attrs:attrs -> expression -> expression + val lazy_: ?loc:loc -> ?attrs:attrs -> expression -> expression + val poly: ?loc:loc -> ?attrs:attrs -> expression -> core_type option + -> expression + val object_: ?loc:loc -> ?attrs:attrs -> class_structure -> expression + val newtype: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression + val pack: ?loc:loc -> ?attrs:attrs -> module_expr -> expression + val open_: ?loc:loc -> ?attrs:attrs -> override_flag -> lid -> expression + -> expression + val extension: ?loc:loc -> ?attrs:attrs -> extension -> expression + val unreachable: ?loc:loc -> ?attrs:attrs -> unit -> expression + + val case: pattern -> ?guard:expression -> expression -> case + end + +(** Value declarations *) +module Val: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> + ?prim:string list -> str -> core_type -> value_description + end + +(** Type declarations *) +module Type: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + ?params:(core_type * variance) list -> + ?cstrs:(core_type * core_type * loc) list -> + ?kind:type_kind -> ?priv:private_flag -> ?manifest:core_type -> str -> + type_declaration + + val constructor: ?loc:loc -> ?attrs:attrs -> ?info:info -> + ?args:constructor_arguments -> ?res:core_type -> str -> + constructor_declaration + val field: ?loc:loc -> ?attrs:attrs -> ?info:info -> + ?mut:mutable_flag -> str -> core_type -> label_declaration + end + +(** Type extensions *) +module Te: + sig + val mk: ?attrs:attrs -> ?docs:docs -> + ?params:(core_type * variance) list -> ?priv:private_flag -> + lid -> extension_constructor list -> type_extension + + val constructor: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info -> + str -> extension_constructor_kind -> extension_constructor + + val decl: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info -> + ?args:constructor_arguments -> ?res:core_type -> str -> + extension_constructor + val rebind: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info -> + str -> lid -> extension_constructor + end + +(** {1 Module language} *) + +(** Module type expressions *) +module Mty: + sig + val mk: ?loc:loc -> ?attrs:attrs -> module_type_desc -> module_type + val attr: module_type -> attribute -> module_type + + val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_type + val alias: ?loc:loc -> ?attrs:attrs -> lid -> module_type + val signature: ?loc:loc -> ?attrs:attrs -> signature -> module_type + val functor_: ?loc:loc -> ?attrs:attrs -> + str -> module_type option -> module_type -> module_type + val with_: ?loc:loc -> ?attrs:attrs -> module_type -> + with_constraint list -> module_type + val typeof_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type + val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_type + end + +(** Module expressions *) +module Mod: + sig + val mk: ?loc:loc -> ?attrs:attrs -> module_expr_desc -> module_expr + val attr: module_expr -> attribute -> module_expr + + val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_expr + val structure: ?loc:loc -> ?attrs:attrs -> structure -> module_expr + val functor_: ?loc:loc -> ?attrs:attrs -> + str -> module_type option -> module_expr -> module_expr + val apply: ?loc:loc -> ?attrs:attrs -> module_expr -> module_expr -> + module_expr + val constraint_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type -> + module_expr + val unpack: ?loc:loc -> ?attrs:attrs -> expression -> module_expr + val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_expr + end + +(** Signature items *) +module Sig: + sig + val mk: ?loc:loc -> signature_item_desc -> signature_item + + val value: ?loc:loc -> value_description -> signature_item + val type_: ?loc:loc -> rec_flag -> type_declaration list -> signature_item + val type_extension: ?loc:loc -> type_extension -> signature_item + val exception_: ?loc:loc -> extension_constructor -> signature_item + val module_: ?loc:loc -> module_declaration -> signature_item + val rec_module: ?loc:loc -> module_declaration list -> signature_item + val modtype: ?loc:loc -> module_type_declaration -> signature_item + val open_: ?loc:loc -> open_description -> signature_item + val include_: ?loc:loc -> include_description -> signature_item + val class_: ?loc:loc -> class_description list -> signature_item + val class_type: ?loc:loc -> class_type_declaration list -> signature_item + val extension: ?loc:loc -> ?attrs:attrs -> extension -> signature_item + val attribute: ?loc:loc -> attribute -> signature_item + val text: text -> signature_item list + end + +(** Structure items *) +module Str: + sig + val mk: ?loc:loc -> structure_item_desc -> structure_item + + val eval: ?loc:loc -> ?attrs:attributes -> expression -> structure_item + val value: ?loc:loc -> rec_flag -> value_binding list -> structure_item + val primitive: ?loc:loc -> value_description -> structure_item + val type_: ?loc:loc -> rec_flag -> type_declaration list -> structure_item + val type_extension: ?loc:loc -> type_extension -> structure_item + val exception_: ?loc:loc -> extension_constructor -> structure_item + val module_: ?loc:loc -> module_binding -> structure_item + val rec_module: ?loc:loc -> module_binding list -> structure_item + val modtype: ?loc:loc -> module_type_declaration -> structure_item + val open_: ?loc:loc -> open_description -> structure_item + val class_: ?loc:loc -> class_declaration list -> structure_item + val class_type: ?loc:loc -> class_type_declaration list -> structure_item + val include_: ?loc:loc -> include_declaration -> structure_item + val extension: ?loc:loc -> ?attrs:attrs -> extension -> structure_item + val attribute: ?loc:loc -> attribute -> structure_item + val text: text -> structure_item list + end + +(** Module declarations *) +module Md: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + str -> module_type -> module_declaration + end + +(** Module type declarations *) +module Mtd: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + ?typ:module_type -> str -> module_type_declaration + end + +(** Module bindings *) +module Mb: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + str -> module_expr -> module_binding + end + +(** Opens *) +module Opn: + sig + val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> + ?override:override_flag -> lid -> open_description + end + +(** Includes *) +module Incl: + sig + val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> 'a -> 'a include_infos + end + +(** Value bindings *) +module Vb: + sig + val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + pattern -> expression -> value_binding + end + + +(** {1 Class language} *) + +(** Class type expressions *) +module Cty: + sig + val mk: ?loc:loc -> ?attrs:attrs -> class_type_desc -> class_type + val attr: class_type -> attribute -> class_type + + val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_type + val signature: ?loc:loc -> ?attrs:attrs -> class_signature -> class_type + val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type -> + class_type -> class_type + val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type + val open_: ?loc:loc -> ?attrs:attrs -> override_flag -> lid -> class_type + -> class_type + end + +(** Class type fields *) +module Ctf: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> + class_type_field_desc -> class_type_field + val attr: class_type_field -> attribute -> class_type_field + + val inherit_: ?loc:loc -> ?attrs:attrs -> class_type -> class_type_field + val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag -> + virtual_flag -> core_type -> class_type_field + val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag -> + virtual_flag -> core_type -> class_type_field + val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type -> + class_type_field + val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type_field + val attribute: ?loc:loc -> attribute -> class_type_field + val text: text -> class_type_field list + end + +(** Class expressions *) +module Cl: + sig + val mk: ?loc:loc -> ?attrs:attrs -> class_expr_desc -> class_expr + val attr: class_expr -> attribute -> class_expr + + val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_expr + val structure: ?loc:loc -> ?attrs:attrs -> class_structure -> class_expr + val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option -> + pattern -> class_expr -> class_expr + val apply: ?loc:loc -> ?attrs:attrs -> class_expr -> + (arg_label * expression) list -> class_expr + val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list -> + class_expr -> class_expr + val constraint_: ?loc:loc -> ?attrs:attrs -> class_expr -> class_type -> + class_expr + val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_expr + val open_: ?loc:loc -> ?attrs:attrs -> override_flag -> lid -> class_expr + -> class_expr + end + +(** Class fields *) +module Cf: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> class_field_desc -> + class_field + val attr: class_field -> attribute -> class_field + + val inherit_: ?loc:loc -> ?attrs:attrs -> override_flag -> class_expr -> + str option -> class_field + val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag -> + class_field_kind -> class_field + val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag -> + class_field_kind -> class_field + val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type -> + class_field + val initializer_: ?loc:loc -> ?attrs:attrs -> expression -> class_field + val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_field + val attribute: ?loc:loc -> attribute -> class_field + val text: text -> class_field list + + val virtual_: core_type -> class_field_kind + val concrete: override_flag -> expression -> class_field_kind + + end + +(** Classes *) +module Ci: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + ?virt:virtual_flag -> ?params:(core_type * variance) list -> + str -> 'a -> 'a class_infos + end + +(** Class signatures *) +module Csig: + sig + val mk: core_type -> class_type_field list -> class_signature + end + +(** Class structures *) +module Cstr: + sig + val mk: pattern -> class_field list -> class_structure + end diff --git a/res_syntax/compiler-libs-406/ast_iterator.ml b/res_syntax/compiler-libs-406/ast_iterator.ml new file mode 100755 index 0000000000..aa601e6419 --- /dev/null +++ b/res_syntax/compiler-libs-406/ast_iterator.ml @@ -0,0 +1,606 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Nicolas Ojeda Bar, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* A generic Parsetree mapping class *) + +(* +[@@@ocaml.warning "+9"] + (* Ensure that record patterns don't miss any field. *) +*) + + +open Parsetree +open Location + +type iterator = { + attribute: iterator -> attribute -> unit; + attributes: iterator -> attribute list -> unit; + case: iterator -> case -> unit; + cases: iterator -> case list -> unit; + class_declaration: iterator -> class_declaration -> unit; + class_description: iterator -> class_description -> unit; + class_expr: iterator -> class_expr -> unit; + class_field: iterator -> class_field -> unit; + class_signature: iterator -> class_signature -> unit; + class_structure: iterator -> class_structure -> unit; + class_type: iterator -> class_type -> unit; + class_type_declaration: iterator -> class_type_declaration -> unit; + class_type_field: iterator -> class_type_field -> unit; + constructor_declaration: iterator -> constructor_declaration -> unit; + expr: iterator -> expression -> unit; + extension: iterator -> extension -> unit; + extension_constructor: iterator -> extension_constructor -> unit; + include_declaration: iterator -> include_declaration -> unit; + include_description: iterator -> include_description -> unit; + label_declaration: iterator -> label_declaration -> unit; + location: iterator -> Location.t -> unit; + module_binding: iterator -> module_binding -> unit; + module_declaration: iterator -> module_declaration -> unit; + module_expr: iterator -> module_expr -> unit; + module_type: iterator -> module_type -> unit; + module_type_declaration: iterator -> module_type_declaration -> unit; + open_description: iterator -> open_description -> unit; + pat: iterator -> pattern -> unit; + payload: iterator -> payload -> unit; + signature: iterator -> signature -> unit; + signature_item: iterator -> signature_item -> unit; + structure: iterator -> structure -> unit; + structure_item: iterator -> structure_item -> unit; + typ: iterator -> core_type -> unit; + type_declaration: iterator -> type_declaration -> unit; + type_extension: iterator -> type_extension -> unit; + type_kind: iterator -> type_kind -> unit; + value_binding: iterator -> value_binding -> unit; + value_description: iterator -> value_description -> unit; + with_constraint: iterator -> with_constraint -> unit; +} +(** A [iterator] record implements one "method" per syntactic category, + using an open recursion style: each method takes as its first + argument the iterator to be applied to children in the syntax + tree. *) + +let iter_fst f (x, _) = f x +let iter_snd f (_, y) = f y +let iter_tuple f1 f2 (x, y) = f1 x; f2 y +let iter_tuple3 f1 f2 f3 (x, y, z) = f1 x; f2 y; f3 z +let iter_opt f = function None -> () | Some x -> f x + +let iter_loc sub {loc; txt = _} = sub.location sub loc + +module T = struct + (* Type expressions for the core language *) + + let row_field sub = function + | Rtag (_, attrs, _, tl) -> + sub.attributes sub attrs; List.iter (sub.typ sub) tl + | Rinherit t -> sub.typ sub t + + let object_field sub = function + | Otag (_, attrs, t) -> + sub.attributes sub attrs; sub.typ sub t + | Oinherit t -> sub.typ sub t + + let iter sub {ptyp_desc = desc; ptyp_loc = loc; ptyp_attributes = attrs} = + sub.location sub loc; + sub.attributes sub attrs; + match desc with + | Ptyp_any + | Ptyp_var _ -> () + | Ptyp_arrow (_lab, t1, t2) -> + sub.typ sub t1; sub.typ sub t2 + | Ptyp_tuple tyl -> List.iter (sub.typ sub) tyl + | Ptyp_constr (lid, tl) -> + iter_loc sub lid; List.iter (sub.typ sub) tl + | Ptyp_object (ol, _o) -> + List.iter (object_field sub) ol + | Ptyp_class (lid, tl) -> + iter_loc sub lid; List.iter (sub.typ sub) tl + | Ptyp_alias (t, _) -> sub.typ sub t + | Ptyp_variant (rl, _b, _ll) -> + List.iter (row_field sub) rl + | Ptyp_poly (_, t) -> sub.typ sub t + | Ptyp_package (lid, l) -> + iter_loc sub lid; + List.iter (iter_tuple (iter_loc sub) (sub.typ sub)) l + | Ptyp_extension x -> sub.extension sub x + + let iter_type_declaration sub + {ptype_name; ptype_params; ptype_cstrs; + ptype_kind; + ptype_private = _; + ptype_manifest; + ptype_attributes; + ptype_loc} = + iter_loc sub ptype_name; + List.iter (iter_fst (sub.typ sub)) ptype_params; + List.iter + (iter_tuple3 (sub.typ sub) (sub.typ sub) (sub.location sub)) + ptype_cstrs; + sub.type_kind sub ptype_kind; + iter_opt (sub.typ sub) ptype_manifest; + sub.location sub ptype_loc; + sub.attributes sub ptype_attributes + + let iter_type_kind sub = function + | Ptype_abstract -> () + | Ptype_variant l -> + List.iter (sub.constructor_declaration sub) l + | Ptype_record l -> List.iter (sub.label_declaration sub) l + | Ptype_open -> () + + let iter_constructor_arguments sub = function + | Pcstr_tuple l -> List.iter (sub.typ sub) l + | Pcstr_record l -> + List.iter (sub.label_declaration sub) l + + let iter_type_extension sub + {ptyext_path; ptyext_params; + ptyext_constructors; + ptyext_private = _; + ptyext_attributes} = + iter_loc sub ptyext_path; + List.iter (sub.extension_constructor sub) ptyext_constructors; + List.iter (iter_fst (sub.typ sub)) ptyext_params; + sub.attributes sub ptyext_attributes + + let iter_extension_constructor_kind sub = function + Pext_decl(ctl, cto) -> + iter_constructor_arguments sub ctl; iter_opt (sub.typ sub) cto + | Pext_rebind li -> + iter_loc sub li + + let iter_extension_constructor sub + {pext_name; + pext_kind; + pext_loc; + pext_attributes} = + iter_loc sub pext_name; + iter_extension_constructor_kind sub pext_kind; + sub.location sub pext_loc; + sub.attributes sub pext_attributes + +end + +module CT = struct + (* Type expressions for the class language *) + + let iter sub {pcty_loc = loc; pcty_desc = desc; pcty_attributes = attrs} = + sub.location sub loc; + sub.attributes sub attrs; + match desc with + | Pcty_constr (lid, tys) -> + iter_loc sub lid; List.iter (sub.typ sub) tys + | Pcty_signature x -> sub.class_signature sub x + | Pcty_arrow (_lab, t, ct) -> + sub.typ sub t; sub.class_type sub ct + | Pcty_extension x -> sub.extension sub x + | Pcty_open (_ovf, lid, e) -> + iter_loc sub lid; sub.class_type sub e + + let iter_field sub {pctf_desc = desc; pctf_loc = loc; pctf_attributes = attrs} + = + sub.location sub loc; + sub.attributes sub attrs; + match desc with + | Pctf_inherit ct -> sub.class_type sub ct + | Pctf_val (_s, _m, _v, t) -> sub.typ sub t + | Pctf_method (_s, _p, _v, t) -> sub.typ sub t + | Pctf_constraint (t1, t2) -> + sub.typ sub t1; sub.typ sub t2 + | Pctf_attribute x -> sub.attribute sub x + | Pctf_extension x -> sub.extension sub x + + let iter_signature sub {pcsig_self; pcsig_fields} = + sub.typ sub pcsig_self; + List.iter (sub.class_type_field sub) pcsig_fields +end + +module MT = struct + (* Type expressions for the module language *) + + let iter sub {pmty_desc = desc; pmty_loc = loc; pmty_attributes = attrs} = + sub.location sub loc; + sub.attributes sub attrs; + match desc with + | Pmty_ident s -> iter_loc sub s + | Pmty_alias s -> iter_loc sub s + | Pmty_signature sg -> sub.signature sub sg + | Pmty_functor (s, mt1, mt2) -> + iter_loc sub s; + iter_opt (sub.module_type sub) mt1; + sub.module_type sub mt2 + | Pmty_with (mt, l) -> + sub.module_type sub mt; + List.iter (sub.with_constraint sub) l + | Pmty_typeof me -> sub.module_expr sub me + | Pmty_extension x -> sub.extension sub x + + let iter_with_constraint sub = function + | Pwith_type (lid, d) -> + iter_loc sub lid; sub.type_declaration sub d + | Pwith_module (lid, lid2) -> + iter_loc sub lid; iter_loc sub lid2 + | Pwith_typesubst (lid, d) -> + iter_loc sub lid; sub.type_declaration sub d + | Pwith_modsubst (s, lid) -> + iter_loc sub s; iter_loc sub lid + + let iter_signature_item sub {psig_desc = desc; psig_loc = loc} = + sub.location sub loc; + match desc with + | Psig_value vd -> sub.value_description sub vd + | Psig_type (_rf, l) -> List.iter (sub.type_declaration sub) l + | Psig_typext te -> sub.type_extension sub te + | Psig_exception ed -> sub.extension_constructor sub ed + | Psig_module x -> sub.module_declaration sub x + | Psig_recmodule l -> + List.iter (sub.module_declaration sub) l + | Psig_modtype x -> sub.module_type_declaration sub x + | Psig_open x -> sub.open_description sub x + | Psig_include x -> sub.include_description sub x + | Psig_class l -> List.iter (sub.class_description sub) l + | Psig_class_type l -> + List.iter (sub.class_type_declaration sub) l + | Psig_extension (x, attrs) -> + sub.extension sub x; sub.attributes sub attrs + | Psig_attribute x -> sub.attribute sub x +end + + +module M = struct + (* Value expressions for the module language *) + + let iter sub {pmod_loc = loc; pmod_desc = desc; pmod_attributes = attrs} = + sub.location sub loc; + sub.attributes sub attrs; + match desc with + | Pmod_ident x -> iter_loc sub x + | Pmod_structure str -> sub.structure sub str + | Pmod_functor (arg, arg_ty, body) -> + iter_loc sub arg; + iter_opt (sub.module_type sub) arg_ty; + sub.module_expr sub body + | Pmod_apply (m1, m2) -> + sub.module_expr sub m1; sub.module_expr sub m2 + | Pmod_constraint (m, mty) -> + sub.module_expr sub m; sub.module_type sub mty + | Pmod_unpack e -> sub.expr sub e + | Pmod_extension x -> sub.extension sub x + + let iter_structure_item sub {pstr_loc = loc; pstr_desc = desc} = + sub.location sub loc; + match desc with + | Pstr_eval (x, attrs) -> + sub.expr sub x; sub.attributes sub attrs + | Pstr_value (_r, vbs) -> List.iter (sub.value_binding sub) vbs + | Pstr_primitive vd -> sub.value_description sub vd + | Pstr_type (_rf, l) -> List.iter (sub.type_declaration sub) l + | Pstr_typext te -> sub.type_extension sub te + | Pstr_exception ed -> sub.extension_constructor sub ed + | Pstr_module x -> sub.module_binding sub x + | Pstr_recmodule l -> List.iter (sub.module_binding sub) l + | Pstr_modtype x -> sub.module_type_declaration sub x + | Pstr_open x -> sub.open_description sub x + | Pstr_class l -> List.iter (sub.class_declaration sub) l + | Pstr_class_type l -> + List.iter (sub.class_type_declaration sub) l + | Pstr_include x -> sub.include_declaration sub x + | Pstr_extension (x, attrs) -> + sub.extension sub x; sub.attributes sub attrs + | Pstr_attribute x -> sub.attribute sub x +end + +module E = struct + (* Value expressions for the core language *) + + let iter sub {pexp_loc = loc; pexp_desc = desc; pexp_attributes = attrs} = + sub.location sub loc; + sub.attributes sub attrs; + match desc with + | Pexp_ident x -> iter_loc sub x + | Pexp_constant _ -> () + | Pexp_let (_r, vbs, e) -> + List.iter (sub.value_binding sub) vbs; + sub.expr sub e + | Pexp_fun (_lab, def, p, e) -> + iter_opt (sub.expr sub) def; + sub.pat sub p; + sub.expr sub e + | Pexp_function pel -> sub.cases sub pel + | Pexp_apply (e, l) -> + sub.expr sub e; List.iter (iter_snd (sub.expr sub)) l + | Pexp_match (e, pel) -> + sub.expr sub e; sub.cases sub pel + | Pexp_try (e, pel) -> sub.expr sub e; sub.cases sub pel + | Pexp_tuple el -> List.iter (sub.expr sub) el + | Pexp_construct (lid, arg) -> + iter_loc sub lid; iter_opt (sub.expr sub) arg + | Pexp_variant (_lab, eo) -> + iter_opt (sub.expr sub) eo + | Pexp_record (l, eo) -> + List.iter (iter_tuple (iter_loc sub) (sub.expr sub)) l; + iter_opt (sub.expr sub) eo + | Pexp_field (e, lid) -> + sub.expr sub e; iter_loc sub lid + | Pexp_setfield (e1, lid, e2) -> + sub.expr sub e1; iter_loc sub lid; + sub.expr sub e2 + | Pexp_array el -> List.iter (sub.expr sub) el + | Pexp_ifthenelse (e1, e2, e3) -> + sub.expr sub e1; sub.expr sub e2; + iter_opt (sub.expr sub) e3 + | Pexp_sequence (e1, e2) -> + sub.expr sub e1; sub.expr sub e2 + | Pexp_while (e1, e2) -> + sub.expr sub e1; sub.expr sub e2 + | Pexp_for (p, e1, e2, _d, e3) -> + sub.pat sub p; sub.expr sub e1; sub.expr sub e2; + sub.expr sub e3 + | Pexp_coerce (e, t1, t2) -> + sub.expr sub e; iter_opt (sub.typ sub) t1; + sub.typ sub t2 + | Pexp_constraint (e, t) -> + sub.expr sub e; sub.typ sub t + | Pexp_send (e, _s) -> sub.expr sub e + | Pexp_new lid -> iter_loc sub lid + | Pexp_setinstvar (s, e) -> + iter_loc sub s; sub.expr sub e + | Pexp_override sel -> + List.iter (iter_tuple (iter_loc sub) (sub.expr sub)) sel + | Pexp_letmodule (s, me, e) -> + iter_loc sub s; sub.module_expr sub me; + sub.expr sub e + | Pexp_letexception (cd, e) -> + sub.extension_constructor sub cd; + sub.expr sub e + | Pexp_assert e -> sub.expr sub e + | Pexp_lazy e -> sub.expr sub e + | Pexp_poly (e, t) -> + sub.expr sub e; iter_opt (sub.typ sub) t + | Pexp_object cls -> sub.class_structure sub cls + | Pexp_newtype (_s, e) -> sub.expr sub e + | Pexp_pack me -> sub.module_expr sub me + | Pexp_open (_ovf, lid, e) -> + iter_loc sub lid; sub.expr sub e + | Pexp_extension x -> sub.extension sub x + | Pexp_unreachable -> () +end + +module P = struct + (* Patterns *) + + let iter sub {ppat_desc = desc; ppat_loc = loc; ppat_attributes = attrs} = + sub.location sub loc; + sub.attributes sub attrs; + match desc with + | Ppat_any -> () + | Ppat_var s -> iter_loc sub s + | Ppat_alias (p, s) -> sub.pat sub p; iter_loc sub s + | Ppat_constant _ -> () + | Ppat_interval _ -> () + | Ppat_tuple pl -> List.iter (sub.pat sub) pl + | Ppat_construct (l, p) -> + iter_loc sub l; iter_opt (sub.pat sub) p + | Ppat_variant (_l, p) -> iter_opt (sub.pat sub) p + | Ppat_record (lpl, _cf) -> + List.iter (iter_tuple (iter_loc sub) (sub.pat sub)) lpl + | Ppat_array pl -> List.iter (sub.pat sub) pl + | Ppat_or (p1, p2) -> sub.pat sub p1; sub.pat sub p2 + | Ppat_constraint (p, t) -> + sub.pat sub p; sub.typ sub t + | Ppat_type s -> iter_loc sub s + | Ppat_lazy p -> sub.pat sub p + | Ppat_unpack s -> iter_loc sub s + | Ppat_exception p -> sub.pat sub p + | Ppat_extension x -> sub.extension sub x + | Ppat_open (lid, p) -> + iter_loc sub lid; sub.pat sub p + +end + +module CE = struct + (* Value expressions for the class language *) + + let iter sub {pcl_loc = loc; pcl_desc = desc; pcl_attributes = attrs} = + sub.location sub loc; + sub.attributes sub attrs; + match desc with + | Pcl_constr (lid, tys) -> + iter_loc sub lid; List.iter (sub.typ sub) tys + | Pcl_structure s -> + sub.class_structure sub s + | Pcl_fun (_lab, e, p, ce) -> + iter_opt (sub.expr sub) e; + sub.pat sub p; + sub.class_expr sub ce + | Pcl_apply (ce, l) -> + sub.class_expr sub ce; + List.iter (iter_snd (sub.expr sub)) l + | Pcl_let (_r, vbs, ce) -> + List.iter (sub.value_binding sub) vbs; + sub.class_expr sub ce + | Pcl_constraint (ce, ct) -> + sub.class_expr sub ce; sub.class_type sub ct + | Pcl_extension x -> sub.extension sub x + | Pcl_open (_ovf, lid, e) -> + iter_loc sub lid; sub.class_expr sub e + + let iter_kind sub = function + | Cfk_concrete (_o, e) -> sub.expr sub e + | Cfk_virtual t -> sub.typ sub t + + let iter_field sub {pcf_desc = desc; pcf_loc = loc; pcf_attributes = attrs} = + sub.location sub loc; + sub.attributes sub attrs; + match desc with + | Pcf_inherit (_o, ce, _s) -> sub.class_expr sub ce + | Pcf_val (s, _m, k) -> iter_loc sub s; iter_kind sub k + | Pcf_method (s, _p, k) -> + iter_loc sub s; iter_kind sub k + | Pcf_constraint (t1, t2) -> + sub.typ sub t1; sub.typ sub t2 + | Pcf_initializer e -> sub.expr sub e + | Pcf_attribute x -> sub.attribute sub x + | Pcf_extension x -> sub.extension sub x + + let iter_structure sub {pcstr_self; pcstr_fields} = + sub.pat sub pcstr_self; + List.iter (sub.class_field sub) pcstr_fields + + let class_infos sub f {pci_virt = _; pci_params = pl; pci_name; pci_expr; + pci_loc; pci_attributes} = + List.iter (iter_fst (sub.typ sub)) pl; + iter_loc sub pci_name; + f pci_expr; + sub.location sub pci_loc; + sub.attributes sub pci_attributes +end + +(* Now, a generic AST mapper, to be extended to cover all kinds and + cases of the OCaml grammar. The default behavior of the mapper is + the identity. *) + +let default_iterator = + { + structure = (fun this l -> List.iter (this.structure_item this) l); + structure_item = M.iter_structure_item; + module_expr = M.iter; + signature = (fun this l -> List.iter (this.signature_item this) l); + signature_item = MT.iter_signature_item; + module_type = MT.iter; + with_constraint = MT.iter_with_constraint; + class_declaration = + (fun this -> CE.class_infos this (this.class_expr this)); + class_expr = CE.iter; + class_field = CE.iter_field; + class_structure = CE.iter_structure; + class_type = CT.iter; + class_type_field = CT.iter_field; + class_signature = CT.iter_signature; + class_type_declaration = + (fun this -> CE.class_infos this (this.class_type this)); + class_description = + (fun this -> CE.class_infos this (this.class_type this)); + type_declaration = T.iter_type_declaration; + type_kind = T.iter_type_kind; + typ = T.iter; + type_extension = T.iter_type_extension; + extension_constructor = T.iter_extension_constructor; + value_description = + (fun this {pval_name; pval_type; pval_prim = _; pval_loc; + pval_attributes} -> + iter_loc this pval_name; + this.typ this pval_type; + this.attributes this pval_attributes; + this.location this pval_loc + ); + + pat = P.iter; + expr = E.iter; + + module_declaration = + (fun this {pmd_name; pmd_type; pmd_attributes; pmd_loc} -> + iter_loc this pmd_name; + this.module_type this pmd_type; + this.attributes this pmd_attributes; + this.location this pmd_loc + ); + + module_type_declaration = + (fun this {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} -> + iter_loc this pmtd_name; + iter_opt (this.module_type this) pmtd_type; + this.attributes this pmtd_attributes; + this.location this pmtd_loc + ); + + module_binding = + (fun this {pmb_name; pmb_expr; pmb_attributes; pmb_loc} -> + iter_loc this pmb_name; this.module_expr this pmb_expr; + this.attributes this pmb_attributes; + this.location this pmb_loc + ); + + + open_description = + (fun this {popen_lid; popen_override = _; popen_attributes; popen_loc} -> + iter_loc this popen_lid; + this.location this popen_loc; + this.attributes this popen_attributes + ); + + + include_description = + (fun this {pincl_mod; pincl_attributes; pincl_loc} -> + this.module_type this pincl_mod; + this.location this pincl_loc; + this.attributes this pincl_attributes + ); + + include_declaration = + (fun this {pincl_mod; pincl_attributes; pincl_loc} -> + this.module_expr this pincl_mod; + this.location this pincl_loc; + this.attributes this pincl_attributes + ); + + + value_binding = + (fun this {pvb_pat; pvb_expr; pvb_attributes; pvb_loc} -> + this.pat this pvb_pat; + this.expr this pvb_expr; + this.location this pvb_loc; + this.attributes this pvb_attributes + ); + + + constructor_declaration = + (fun this {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} -> + iter_loc this pcd_name; + T.iter_constructor_arguments this pcd_args; + iter_opt (this.typ this) pcd_res; + this.location this pcd_loc; + this.attributes this pcd_attributes + ); + + label_declaration = + (fun this {pld_name; pld_type; pld_loc; pld_mutable = _; pld_attributes}-> + iter_loc this pld_name; + this.typ this pld_type; + this.location this pld_loc; + this.attributes this pld_attributes + ); + + cases = (fun this l -> List.iter (this.case this) l); + case = + (fun this {pc_lhs; pc_guard; pc_rhs} -> + this.pat this pc_lhs; + iter_opt (this.expr this) pc_guard; + this.expr this pc_rhs + ); + + location = (fun _this _l -> ()); + + extension = (fun this (s, e) -> iter_loc this s; this.payload this e); + attribute = (fun this (s, e) -> iter_loc this s; this.payload this e); + attributes = (fun this l -> List.iter (this.attribute this) l); + payload = + (fun this -> function + | PStr x -> this.structure this x + | PSig x -> this.signature this x + | PTyp x -> this.typ this x + | PPat (x, g) -> this.pat this x; iter_opt (this.expr this) g + ); + } diff --git a/res_syntax/compiler-libs-406/ast_iterator.mli b/res_syntax/compiler-libs-406/ast_iterator.mli new file mode 100755 index 0000000000..bd8e081687 --- /dev/null +++ b/res_syntax/compiler-libs-406/ast_iterator.mli @@ -0,0 +1,72 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Nicolas Ojeda Bar, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** {!iterator} allows to implement AST inspection using open recursion. A + typical mapper would be based on {!default_iterator}, a trivial iterator, + and will fall back on it for handling the syntax it does not modify. *) + +open Parsetree + +(** {1 A generic Parsetree iterator} *) + +type iterator = { + attribute: iterator -> attribute -> unit; + attributes: iterator -> attribute list -> unit; + case: iterator -> case -> unit; + cases: iterator -> case list -> unit; + class_declaration: iterator -> class_declaration -> unit; + class_description: iterator -> class_description -> unit; + class_expr: iterator -> class_expr -> unit; + class_field: iterator -> class_field -> unit; + class_signature: iterator -> class_signature -> unit; + class_structure: iterator -> class_structure -> unit; + class_type: iterator -> class_type -> unit; + class_type_declaration: iterator -> class_type_declaration -> unit; + class_type_field: iterator -> class_type_field -> unit; + constructor_declaration: iterator -> constructor_declaration -> unit; + expr: iterator -> expression -> unit; + extension: iterator -> extension -> unit; + extension_constructor: iterator -> extension_constructor -> unit; + include_declaration: iterator -> include_declaration -> unit; + include_description: iterator -> include_description -> unit; + label_declaration: iterator -> label_declaration -> unit; + location: iterator -> Location.t -> unit; + module_binding: iterator -> module_binding -> unit; + module_declaration: iterator -> module_declaration -> unit; + module_expr: iterator -> module_expr -> unit; + module_type: iterator -> module_type -> unit; + module_type_declaration: iterator -> module_type_declaration -> unit; + open_description: iterator -> open_description -> unit; + pat: iterator -> pattern -> unit; + payload: iterator -> payload -> unit; + signature: iterator -> signature -> unit; + signature_item: iterator -> signature_item -> unit; + structure: iterator -> structure -> unit; + structure_item: iterator -> structure_item -> unit; + typ: iterator -> core_type -> unit; + type_declaration: iterator -> type_declaration -> unit; + type_extension: iterator -> type_extension -> unit; + type_kind: iterator -> type_kind -> unit; + value_binding: iterator -> value_binding -> unit; + value_description: iterator -> value_description -> unit; + with_constraint: iterator -> with_constraint -> unit; +} +(** A [iterator] record implements one "method" per syntactic category, + using an open recursion style: each method takes as its first + argument the iterator to be applied to children in the syntax + tree. *) + +val default_iterator: iterator +(** A default iterator, which implements a "do not do anything" mapping. *) diff --git a/res_syntax/compiler-libs-406/ast_mapper.ml b/res_syntax/compiler-libs-406/ast_mapper.ml new file mode 100644 index 0000000000..783d0e2eea --- /dev/null +++ b/res_syntax/compiler-libs-406/ast_mapper.ml @@ -0,0 +1,944 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* A generic Parsetree mapping class *) + +(* +[@@@ocaml.warning "+9"] + (* Ensure that record patterns don't miss any field. *) +*) + + +open Parsetree +open Ast_helper +open Location + +type mapper = { + attribute: mapper -> attribute -> attribute; + attributes: mapper -> attribute list -> attribute list; + case: mapper -> case -> case; + cases: mapper -> case list -> case list; + class_declaration: mapper -> class_declaration -> class_declaration; + class_description: mapper -> class_description -> class_description; + class_expr: mapper -> class_expr -> class_expr; + class_field: mapper -> class_field -> class_field; + class_signature: mapper -> class_signature -> class_signature; + class_structure: mapper -> class_structure -> class_structure; + class_type: mapper -> class_type -> class_type; + class_type_declaration: mapper -> class_type_declaration + -> class_type_declaration; + class_type_field: mapper -> class_type_field -> class_type_field; + constructor_declaration: mapper -> constructor_declaration + -> constructor_declaration; + expr: mapper -> expression -> expression; + extension: mapper -> extension -> extension; + extension_constructor: mapper -> extension_constructor + -> extension_constructor; + include_declaration: mapper -> include_declaration -> include_declaration; + include_description: mapper -> include_description -> include_description; + label_declaration: mapper -> label_declaration -> label_declaration; + location: mapper -> Location.t -> Location.t; + module_binding: mapper -> module_binding -> module_binding; + module_declaration: mapper -> module_declaration -> module_declaration; + module_expr: mapper -> module_expr -> module_expr; + module_type: mapper -> module_type -> module_type; + module_type_declaration: mapper -> module_type_declaration + -> module_type_declaration; + open_description: mapper -> open_description -> open_description; + pat: mapper -> pattern -> pattern; + payload: mapper -> payload -> payload; + signature: mapper -> signature -> signature; + signature_item: mapper -> signature_item -> signature_item; + structure: mapper -> structure -> structure; + structure_item: mapper -> structure_item -> structure_item; + typ: mapper -> core_type -> core_type; + type_declaration: mapper -> type_declaration -> type_declaration; + type_extension: mapper -> type_extension -> type_extension; + type_kind: mapper -> type_kind -> type_kind; + value_binding: mapper -> value_binding -> value_binding; + value_description: mapper -> value_description -> value_description; + with_constraint: mapper -> with_constraint -> with_constraint; +} + +let map_fst f (x, y) = (f x, y) +let map_snd f (x, y) = (x, f y) +let map_tuple f1 f2 (x, y) = (f1 x, f2 y) +let map_tuple3 f1 f2 f3 (x, y, z) = (f1 x, f2 y, f3 z) +let map_opt f = function None -> None | Some x -> Some (f x) + +let map_loc sub {loc; txt} = {loc = sub.location sub loc; txt} + +module T = struct + (* Type expressions for the core language *) + + let row_field sub = function + | Rtag (l, attrs, b, tl) -> + Rtag (map_loc sub l, sub.attributes sub attrs, + b, List.map (sub.typ sub) tl) + | Rinherit t -> Rinherit (sub.typ sub t) + + let object_field sub = function + | Otag (l, attrs, t) -> + Otag (map_loc sub l, sub.attributes sub attrs, sub.typ sub t) + | Oinherit t -> Oinherit (sub.typ sub t) + + let map sub {ptyp_desc = desc; ptyp_loc = loc; ptyp_attributes = attrs} = + let open Typ in + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + match desc with + | Ptyp_any -> any ~loc ~attrs () + | Ptyp_var s -> var ~loc ~attrs s + | Ptyp_arrow (lab, t1, t2) -> + arrow ~loc ~attrs lab (sub.typ sub t1) (sub.typ sub t2) + | Ptyp_tuple tyl -> tuple ~loc ~attrs (List.map (sub.typ sub) tyl) + | Ptyp_constr (lid, tl) -> + constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tl) + | Ptyp_object (l, o) -> + object_ ~loc ~attrs (List.map (object_field sub) l) o + | Ptyp_class (lid, tl) -> + class_ ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tl) + | Ptyp_alias (t, s) -> alias ~loc ~attrs (sub.typ sub t) s + | Ptyp_variant (rl, b, ll) -> + variant ~loc ~attrs (List.map (row_field sub) rl) b ll + | Ptyp_poly (sl, t) -> poly ~loc ~attrs + (List.map (map_loc sub) sl) (sub.typ sub t) + | Ptyp_package (lid, l) -> + package ~loc ~attrs (map_loc sub lid) + (List.map (map_tuple (map_loc sub) (sub.typ sub)) l) + | Ptyp_extension x -> extension ~loc ~attrs (sub.extension sub x) + + let map_type_declaration sub + {ptype_name; ptype_params; ptype_cstrs; + ptype_kind; + ptype_private; + ptype_manifest; + ptype_attributes; + ptype_loc} = + Type.mk (map_loc sub ptype_name) + ~params:(List.map (map_fst (sub.typ sub)) ptype_params) + ~priv:ptype_private + ~cstrs:(List.map + (map_tuple3 (sub.typ sub) (sub.typ sub) (sub.location sub)) + ptype_cstrs) + ~kind:(sub.type_kind sub ptype_kind) + ?manifest:(map_opt (sub.typ sub) ptype_manifest) + ~loc:(sub.location sub ptype_loc) + ~attrs:(sub.attributes sub ptype_attributes) + + let map_type_kind sub = function + | Ptype_abstract -> Ptype_abstract + | Ptype_variant l -> + Ptype_variant (List.map (sub.constructor_declaration sub) l) + | Ptype_record l -> Ptype_record (List.map (sub.label_declaration sub) l) + | Ptype_open -> Ptype_open + + let map_constructor_arguments sub = function + | Pcstr_tuple l -> Pcstr_tuple (List.map (sub.typ sub) l) + | Pcstr_record l -> + Pcstr_record (List.map (sub.label_declaration sub) l) + + let map_type_extension sub + {ptyext_path; ptyext_params; + ptyext_constructors; + ptyext_private; + ptyext_attributes} = + Te.mk + (map_loc sub ptyext_path) + (List.map (sub.extension_constructor sub) ptyext_constructors) + ~params:(List.map (map_fst (sub.typ sub)) ptyext_params) + ~priv:ptyext_private + ~attrs:(sub.attributes sub ptyext_attributes) + + let map_extension_constructor_kind sub = function + Pext_decl(ctl, cto) -> + Pext_decl(map_constructor_arguments sub ctl, map_opt (sub.typ sub) cto) + | Pext_rebind li -> + Pext_rebind (map_loc sub li) + + let map_extension_constructor sub + {pext_name; + pext_kind; + pext_loc; + pext_attributes} = + Te.constructor + (map_loc sub pext_name) + (map_extension_constructor_kind sub pext_kind) + ~loc:(sub.location sub pext_loc) + ~attrs:(sub.attributes sub pext_attributes) + +end + +module CT = struct + (* Type expressions for the class language *) + + let map sub {pcty_loc = loc; pcty_desc = desc; pcty_attributes = attrs} = + let open Cty in + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + match desc with + | Pcty_constr (lid, tys) -> + constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tys) + | Pcty_signature x -> signature ~loc ~attrs (sub.class_signature sub x) + | Pcty_arrow (lab, t, ct) -> + arrow ~loc ~attrs lab (sub.typ sub t) (sub.class_type sub ct) + | Pcty_extension x -> extension ~loc ~attrs (sub.extension sub x) + | Pcty_open (ovf, lid, ct) -> + open_ ~loc ~attrs ovf (map_loc sub lid) (sub.class_type sub ct) + + let map_field sub {pctf_desc = desc; pctf_loc = loc; pctf_attributes = attrs} + = + let open Ctf in + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + match desc with + | Pctf_inherit ct -> inherit_ ~loc ~attrs (sub.class_type sub ct) + | Pctf_val (s, m, v, t) -> + val_ ~loc ~attrs (map_loc sub s) m v (sub.typ sub t) + | Pctf_method (s, p, v, t) -> + method_ ~loc ~attrs (map_loc sub s) p v (sub.typ sub t) + | Pctf_constraint (t1, t2) -> + constraint_ ~loc ~attrs (sub.typ sub t1) (sub.typ sub t2) + | Pctf_attribute x -> attribute ~loc (sub.attribute sub x) + | Pctf_extension x -> extension ~loc ~attrs (sub.extension sub x) + + let map_signature sub {pcsig_self; pcsig_fields} = + Csig.mk + (sub.typ sub pcsig_self) + (List.map (sub.class_type_field sub) pcsig_fields) +end + +module MT = struct + (* Type expressions for the module language *) + + let map sub {pmty_desc = desc; pmty_loc = loc; pmty_attributes = attrs} = + let open Mty in + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + match desc with + | Pmty_ident s -> ident ~loc ~attrs (map_loc sub s) + | Pmty_alias s -> alias ~loc ~attrs (map_loc sub s) + | Pmty_signature sg -> signature ~loc ~attrs (sub.signature sub sg) + | Pmty_functor (s, mt1, mt2) -> + functor_ ~loc ~attrs (map_loc sub s) + (Misc.may_map (sub.module_type sub) mt1) + (sub.module_type sub mt2) + | Pmty_with (mt, l) -> + with_ ~loc ~attrs (sub.module_type sub mt) + (List.map (sub.with_constraint sub) l) + | Pmty_typeof me -> typeof_ ~loc ~attrs (sub.module_expr sub me) + | Pmty_extension x -> extension ~loc ~attrs (sub.extension sub x) + + let map_with_constraint sub = function + | Pwith_type (lid, d) -> + Pwith_type (map_loc sub lid, sub.type_declaration sub d) + | Pwith_module (lid, lid2) -> + Pwith_module (map_loc sub lid, map_loc sub lid2) + | Pwith_typesubst (lid, d) -> + Pwith_typesubst (map_loc sub lid, sub.type_declaration sub d) + | Pwith_modsubst (s, lid) -> + Pwith_modsubst (map_loc sub s, map_loc sub lid) + + let map_signature_item sub {psig_desc = desc; psig_loc = loc} = + let open Sig in + let loc = sub.location sub loc in + match desc with + | Psig_value vd -> value ~loc (sub.value_description sub vd) + | Psig_type (rf, l) -> type_ ~loc rf (List.map (sub.type_declaration sub) l) + | Psig_typext te -> type_extension ~loc (sub.type_extension sub te) + | Psig_exception ed -> exception_ ~loc (sub.extension_constructor sub ed) + | Psig_module x -> module_ ~loc (sub.module_declaration sub x) + | Psig_recmodule l -> + rec_module ~loc (List.map (sub.module_declaration sub) l) + | Psig_modtype x -> modtype ~loc (sub.module_type_declaration sub x) + | Psig_open x -> open_ ~loc (sub.open_description sub x) + | Psig_include x -> include_ ~loc (sub.include_description sub x) + | Psig_class l -> class_ ~loc (List.map (sub.class_description sub) l) + | Psig_class_type l -> + class_type ~loc (List.map (sub.class_type_declaration sub) l) + | Psig_extension (x, attrs) -> + extension ~loc (sub.extension sub x) ~attrs:(sub.attributes sub attrs) + | Psig_attribute x -> attribute ~loc (sub.attribute sub x) +end + + +module M = struct + (* Value expressions for the module language *) + + let map sub {pmod_loc = loc; pmod_desc = desc; pmod_attributes = attrs} = + let open Mod in + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + match desc with + | Pmod_ident x -> ident ~loc ~attrs (map_loc sub x) + | Pmod_structure str -> structure ~loc ~attrs (sub.structure sub str) + | Pmod_functor (arg, arg_ty, body) -> + functor_ ~loc ~attrs (map_loc sub arg) + (Misc.may_map (sub.module_type sub) arg_ty) + (sub.module_expr sub body) + | Pmod_apply (m1, m2) -> + apply ~loc ~attrs (sub.module_expr sub m1) (sub.module_expr sub m2) + | Pmod_constraint (m, mty) -> + constraint_ ~loc ~attrs (sub.module_expr sub m) + (sub.module_type sub mty) + | Pmod_unpack e -> unpack ~loc ~attrs (sub.expr sub e) + | Pmod_extension x -> extension ~loc ~attrs (sub.extension sub x) + + let map_structure_item sub {pstr_loc = loc; pstr_desc = desc} = + let open Str in + let loc = sub.location sub loc in + match desc with + | Pstr_eval (x, attrs) -> + eval ~loc ~attrs:(sub.attributes sub attrs) (sub.expr sub x) + | Pstr_value (r, vbs) -> value ~loc r (List.map (sub.value_binding sub) vbs) + | Pstr_primitive vd -> primitive ~loc (sub.value_description sub vd) + | Pstr_type (rf, l) -> type_ ~loc rf (List.map (sub.type_declaration sub) l) + | Pstr_typext te -> type_extension ~loc (sub.type_extension sub te) + | Pstr_exception ed -> exception_ ~loc (sub.extension_constructor sub ed) + | Pstr_module x -> module_ ~loc (sub.module_binding sub x) + | Pstr_recmodule l -> rec_module ~loc (List.map (sub.module_binding sub) l) + | Pstr_modtype x -> modtype ~loc (sub.module_type_declaration sub x) + | Pstr_open x -> open_ ~loc (sub.open_description sub x) + | Pstr_class l -> class_ ~loc (List.map (sub.class_declaration sub) l) + | Pstr_class_type l -> + class_type ~loc (List.map (sub.class_type_declaration sub) l) + | Pstr_include x -> include_ ~loc (sub.include_declaration sub x) + | Pstr_extension (x, attrs) -> + extension ~loc (sub.extension sub x) ~attrs:(sub.attributes sub attrs) + | Pstr_attribute x -> attribute ~loc (sub.attribute sub x) +end + +module E = struct + (* Value expressions for the core language *) + + let map sub {pexp_loc = loc; pexp_desc = desc; pexp_attributes = attrs} = + let open Exp in + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + match desc with + | Pexp_ident x -> ident ~loc ~attrs (map_loc sub x) + | Pexp_constant x -> constant ~loc ~attrs x + | Pexp_let (r, vbs, e) -> + let_ ~loc ~attrs r (List.map (sub.value_binding sub) vbs) + (sub.expr sub e) + | Pexp_fun (lab, def, p, e) -> + fun_ ~loc ~attrs lab (map_opt (sub.expr sub) def) (sub.pat sub p) + (sub.expr sub e) + | Pexp_function pel -> function_ ~loc ~attrs (sub.cases sub pel) + | Pexp_apply (e, l) -> + apply ~loc ~attrs (sub.expr sub e) (List.map (map_snd (sub.expr sub)) l) + | Pexp_match (e, pel) -> + match_ ~loc ~attrs (sub.expr sub e) (sub.cases sub pel) + | Pexp_try (e, pel) -> try_ ~loc ~attrs (sub.expr sub e) (sub.cases sub pel) + | Pexp_tuple el -> tuple ~loc ~attrs (List.map (sub.expr sub) el) + | Pexp_construct (lid, arg) -> + construct ~loc ~attrs (map_loc sub lid) (map_opt (sub.expr sub) arg) + | Pexp_variant (lab, eo) -> + variant ~loc ~attrs lab (map_opt (sub.expr sub) eo) + | Pexp_record (l, eo) -> + record ~loc ~attrs (List.map (map_tuple (map_loc sub) (sub.expr sub)) l) + (map_opt (sub.expr sub) eo) + | Pexp_field (e, lid) -> + field ~loc ~attrs (sub.expr sub e) (map_loc sub lid) + | Pexp_setfield (e1, lid, e2) -> + setfield ~loc ~attrs (sub.expr sub e1) (map_loc sub lid) + (sub.expr sub e2) + | Pexp_array el -> array ~loc ~attrs (List.map (sub.expr sub) el) + | Pexp_ifthenelse (e1, e2, e3) -> + ifthenelse ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2) + (map_opt (sub.expr sub) e3) + | Pexp_sequence (e1, e2) -> + sequence ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2) + | Pexp_while (e1, e2) -> + while_ ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2) + | Pexp_for (p, e1, e2, d, e3) -> + for_ ~loc ~attrs (sub.pat sub p) (sub.expr sub e1) (sub.expr sub e2) d + (sub.expr sub e3) + | Pexp_coerce (e, t1, t2) -> + coerce ~loc ~attrs (sub.expr sub e) (map_opt (sub.typ sub) t1) + (sub.typ sub t2) + | Pexp_constraint (e, t) -> + constraint_ ~loc ~attrs (sub.expr sub e) (sub.typ sub t) + | Pexp_send (e, s) -> + send ~loc ~attrs (sub.expr sub e) (map_loc sub s) + | Pexp_new lid -> new_ ~loc ~attrs (map_loc sub lid) + | Pexp_setinstvar (s, e) -> + setinstvar ~loc ~attrs (map_loc sub s) (sub.expr sub e) + | Pexp_override sel -> + override ~loc ~attrs + (List.map (map_tuple (map_loc sub) (sub.expr sub)) sel) + | Pexp_letmodule (s, me, e) -> + letmodule ~loc ~attrs (map_loc sub s) (sub.module_expr sub me) + (sub.expr sub e) + | Pexp_letexception (cd, e) -> + letexception ~loc ~attrs + (sub.extension_constructor sub cd) + (sub.expr sub e) + | Pexp_assert e -> assert_ ~loc ~attrs (sub.expr sub e) + | Pexp_lazy e -> lazy_ ~loc ~attrs (sub.expr sub e) + | Pexp_poly (e, t) -> + poly ~loc ~attrs (sub.expr sub e) (map_opt (sub.typ sub) t) + | Pexp_object cls -> object_ ~loc ~attrs (sub.class_structure sub cls) + | Pexp_newtype (s, e) -> + newtype ~loc ~attrs (map_loc sub s) (sub.expr sub e) + | Pexp_pack me -> pack ~loc ~attrs (sub.module_expr sub me) + | Pexp_open (ovf, lid, e) -> + open_ ~loc ~attrs ovf (map_loc sub lid) (sub.expr sub e) + | Pexp_extension x -> extension ~loc ~attrs (sub.extension sub x) + | Pexp_unreachable -> unreachable ~loc ~attrs () +end + +module P = struct + (* Patterns *) + + let map sub {ppat_desc = desc; ppat_loc = loc; ppat_attributes = attrs} = + let open Pat in + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + match desc with + | Ppat_any -> any ~loc ~attrs () + | Ppat_var s -> var ~loc ~attrs (map_loc sub s) + | Ppat_alias (p, s) -> alias ~loc ~attrs (sub.pat sub p) (map_loc sub s) + | Ppat_constant c -> constant ~loc ~attrs c + | Ppat_interval (c1, c2) -> interval ~loc ~attrs c1 c2 + | Ppat_tuple pl -> tuple ~loc ~attrs (List.map (sub.pat sub) pl) + | Ppat_construct (l, p) -> + construct ~loc ~attrs (map_loc sub l) (map_opt (sub.pat sub) p) + | Ppat_variant (l, p) -> variant ~loc ~attrs l (map_opt (sub.pat sub) p) + | Ppat_record (lpl, cf) -> + record ~loc ~attrs + (List.map (map_tuple (map_loc sub) (sub.pat sub)) lpl) cf + | Ppat_array pl -> array ~loc ~attrs (List.map (sub.pat sub) pl) + | Ppat_or (p1, p2) -> or_ ~loc ~attrs (sub.pat sub p1) (sub.pat sub p2) + | Ppat_constraint (p, t) -> + constraint_ ~loc ~attrs (sub.pat sub p) (sub.typ sub t) + | Ppat_type s -> type_ ~loc ~attrs (map_loc sub s) + | Ppat_lazy p -> lazy_ ~loc ~attrs (sub.pat sub p) + | Ppat_unpack s -> unpack ~loc ~attrs (map_loc sub s) + | Ppat_open (lid,p) -> open_ ~loc ~attrs (map_loc sub lid) (sub.pat sub p) + | Ppat_exception p -> exception_ ~loc ~attrs (sub.pat sub p) + | Ppat_extension x -> extension ~loc ~attrs (sub.extension sub x) +end + +module CE = struct + (* Value expressions for the class language *) + + let map sub {pcl_loc = loc; pcl_desc = desc; pcl_attributes = attrs} = + let open Cl in + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + match desc with + | Pcl_constr (lid, tys) -> + constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tys) + | Pcl_structure s -> + structure ~loc ~attrs (sub.class_structure sub s) + | Pcl_fun (lab, e, p, ce) -> + fun_ ~loc ~attrs lab + (map_opt (sub.expr sub) e) + (sub.pat sub p) + (sub.class_expr sub ce) + | Pcl_apply (ce, l) -> + apply ~loc ~attrs (sub.class_expr sub ce) + (List.map (map_snd (sub.expr sub)) l) + | Pcl_let (r, vbs, ce) -> + let_ ~loc ~attrs r (List.map (sub.value_binding sub) vbs) + (sub.class_expr sub ce) + | Pcl_constraint (ce, ct) -> + constraint_ ~loc ~attrs (sub.class_expr sub ce) (sub.class_type sub ct) + | Pcl_extension x -> extension ~loc ~attrs (sub.extension sub x) + | Pcl_open (ovf, lid, ce) -> + open_ ~loc ~attrs ovf (map_loc sub lid) (sub.class_expr sub ce) + + let map_kind sub = function + | Cfk_concrete (o, e) -> Cfk_concrete (o, sub.expr sub e) + | Cfk_virtual t -> Cfk_virtual (sub.typ sub t) + + let map_field sub {pcf_desc = desc; pcf_loc = loc; pcf_attributes = attrs} = + let open Cf in + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + match desc with + | Pcf_inherit (o, ce, s) -> + inherit_ ~loc ~attrs o (sub.class_expr sub ce) + (map_opt (map_loc sub) s) + | Pcf_val (s, m, k) -> val_ ~loc ~attrs (map_loc sub s) m (map_kind sub k) + | Pcf_method (s, p, k) -> + method_ ~loc ~attrs (map_loc sub s) p (map_kind sub k) + | Pcf_constraint (t1, t2) -> + constraint_ ~loc ~attrs (sub.typ sub t1) (sub.typ sub t2) + | Pcf_initializer e -> initializer_ ~loc ~attrs (sub.expr sub e) + | Pcf_attribute x -> attribute ~loc (sub.attribute sub x) + | Pcf_extension x -> extension ~loc ~attrs (sub.extension sub x) + + let map_structure sub {pcstr_self; pcstr_fields} = + { + pcstr_self = sub.pat sub pcstr_self; + pcstr_fields = List.map (sub.class_field sub) pcstr_fields; + } + + let class_infos sub f {pci_virt; pci_params = pl; pci_name; pci_expr; + pci_loc; pci_attributes} = + Ci.mk + ~virt:pci_virt + ~params:(List.map (map_fst (sub.typ sub)) pl) + (map_loc sub pci_name) + (f pci_expr) + ~loc:(sub.location sub pci_loc) + ~attrs:(sub.attributes sub pci_attributes) +end + +(* Now, a generic AST mapper, to be extended to cover all kinds and + cases of the OCaml grammar. The default behavior of the mapper is + the identity. *) + +let default_mapper = + { + structure = (fun this l -> List.map (this.structure_item this) l); + structure_item = M.map_structure_item; + module_expr = M.map; + signature = (fun this l -> List.map (this.signature_item this) l); + signature_item = MT.map_signature_item; + module_type = MT.map; + with_constraint = MT.map_with_constraint; + class_declaration = + (fun this -> CE.class_infos this (this.class_expr this)); + class_expr = CE.map; + class_field = CE.map_field; + class_structure = CE.map_structure; + class_type = CT.map; + class_type_field = CT.map_field; + class_signature = CT.map_signature; + class_type_declaration = + (fun this -> CE.class_infos this (this.class_type this)); + class_description = + (fun this -> CE.class_infos this (this.class_type this)); + type_declaration = T.map_type_declaration; + type_kind = T.map_type_kind; + typ = T.map; + type_extension = T.map_type_extension; + extension_constructor = T.map_extension_constructor; + value_description = + (fun this {pval_name; pval_type; pval_prim; pval_loc; + pval_attributes} -> + Val.mk + (map_loc this pval_name) + (this.typ this pval_type) + ~attrs:(this.attributes this pval_attributes) + ~loc:(this.location this pval_loc) + ~prim:pval_prim + ); + + pat = P.map; + expr = E.map; + + module_declaration = + (fun this {pmd_name; pmd_type; pmd_attributes; pmd_loc} -> + Md.mk + (map_loc this pmd_name) + (this.module_type this pmd_type) + ~attrs:(this.attributes this pmd_attributes) + ~loc:(this.location this pmd_loc) + ); + + module_type_declaration = + (fun this {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} -> + Mtd.mk + (map_loc this pmtd_name) + ?typ:(map_opt (this.module_type this) pmtd_type) + ~attrs:(this.attributes this pmtd_attributes) + ~loc:(this.location this pmtd_loc) + ); + + module_binding = + (fun this {pmb_name; pmb_expr; pmb_attributes; pmb_loc} -> + Mb.mk (map_loc this pmb_name) (this.module_expr this pmb_expr) + ~attrs:(this.attributes this pmb_attributes) + ~loc:(this.location this pmb_loc) + ); + + + open_description = + (fun this {popen_lid; popen_override; popen_attributes; popen_loc} -> + Opn.mk (map_loc this popen_lid) + ~override:popen_override + ~loc:(this.location this popen_loc) + ~attrs:(this.attributes this popen_attributes) + ); + + + include_description = + (fun this {pincl_mod; pincl_attributes; pincl_loc} -> + Incl.mk (this.module_type this pincl_mod) + ~loc:(this.location this pincl_loc) + ~attrs:(this.attributes this pincl_attributes) + ); + + include_declaration = + (fun this {pincl_mod; pincl_attributes; pincl_loc} -> + Incl.mk (this.module_expr this pincl_mod) + ~loc:(this.location this pincl_loc) + ~attrs:(this.attributes this pincl_attributes) + ); + + + value_binding = + (fun this {pvb_pat; pvb_expr; pvb_attributes; pvb_loc} -> + Vb.mk + (this.pat this pvb_pat) + (this.expr this pvb_expr) + ~loc:(this.location this pvb_loc) + ~attrs:(this.attributes this pvb_attributes) + ); + + + constructor_declaration = + (fun this {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} -> + Type.constructor + (map_loc this pcd_name) + ~args:(T.map_constructor_arguments this pcd_args) + ?res:(map_opt (this.typ this) pcd_res) + ~loc:(this.location this pcd_loc) + ~attrs:(this.attributes this pcd_attributes) + ); + + label_declaration = + (fun this {pld_name; pld_type; pld_loc; pld_mutable; pld_attributes} -> + Type.field + (map_loc this pld_name) + (this.typ this pld_type) + ~mut:pld_mutable + ~loc:(this.location this pld_loc) + ~attrs:(this.attributes this pld_attributes) + ); + + cases = (fun this l -> List.map (this.case this) l); + case = + (fun this {pc_lhs; pc_guard; pc_rhs} -> + { + pc_lhs = this.pat this pc_lhs; + pc_guard = map_opt (this.expr this) pc_guard; + pc_rhs = this.expr this pc_rhs; + } + ); + + + + location = (fun _this l -> l); + + extension = (fun this (s, e) -> (map_loc this s, this.payload this e)); + attribute = (fun this (s, e) -> (map_loc this s, this.payload this e)); + attributes = (fun this l -> List.map (this.attribute this) l); + payload = + (fun this -> function + | PStr x -> PStr (this.structure this x) + | PSig x -> PSig (this.signature this x) + | PTyp x -> PTyp (this.typ this x) + | PPat (x, g) -> PPat (this.pat this x, map_opt (this.expr this) g) + ); + } + +let rec extension_of_error {loc; msg; if_highlight; sub} = + { loc; txt = "ocaml.error" }, + PStr ([Str.eval (Exp.constant (Pconst_string (msg, None))); + Str.eval (Exp.constant (Pconst_string (if_highlight, None)))] @ + (List.map (fun ext -> Str.extension (extension_of_error ext)) sub)) + +let attribute_of_warning loc s = + { loc; txt = "ocaml.ppwarning" }, + PStr ([Str.eval ~loc (Exp.constant (Pconst_string (s, None)))]) + +module StringMap = Map.Make(struct + type t = string + let compare = compare +end) + +let cookies = ref StringMap.empty + +let get_cookie k = + try Some (StringMap.find k !cookies) + with Not_found -> None + +let set_cookie k v = + cookies := StringMap.add k v !cookies + +let tool_name_ref = ref "_none_" + +let tool_name () = !tool_name_ref + + +module PpxContext = struct + open Longident + open Asttypes + open Ast_helper + + let lid name = { txt = Lident name; loc = Location.none } + + let make_string x = Exp.constant (Pconst_string (x, None)) + + let make_bool x = + if x + then Exp.construct (lid "true") None + else Exp.construct (lid "false") None + + let rec make_list f lst = + match lst with + | x :: rest -> + Exp.construct (lid "::") (Some (Exp.tuple [f x; make_list f rest])) + | [] -> + Exp.construct (lid "[]") None + + let make_pair f1 f2 (x1, x2) = + Exp.tuple [f1 x1; f2 x2] + + let make_option f opt = + match opt with + | Some x -> Exp.construct (lid "Some") (Some (f x)) + | None -> Exp.construct (lid "None") None + + let get_cookies () = + lid "cookies", + make_list (make_pair make_string (fun x -> x)) + (StringMap.bindings !cookies) + + let mk fields = + { txt = "ocaml.ppx.context"; loc = Location.none }, + Parsetree.PStr [Str.eval (Exp.record fields None)] + + let make ~tool_name () = + let fields = + [ + lid "tool_name", make_string tool_name; + lid "include_dirs", make_list make_string !Clflags.include_dirs; + lid "load_path", make_list make_string !Config.load_path; + lid "open_modules", make_list make_string !Clflags.open_modules; + lid "for_package", make_option make_string !Clflags.for_package; + lid "debug", make_bool !Clflags.debug; + lid "use_threads", make_bool !Clflags.use_threads; + lid "use_vmthreads", make_bool !Clflags.use_vmthreads; + get_cookies () + ] + in + mk fields + + let get_fields = function + | PStr [{pstr_desc = Pstr_eval + ({ pexp_desc = Pexp_record (fields, None) }, [])}] -> + fields + | _ -> + raise_errorf "Internal error: invalid [@@@ocaml.ppx.context] syntax" + + let restore fields = + let field name payload = + let rec get_string = function + | { pexp_desc = Pexp_constant (Pconst_string (str, None)) } -> str + | _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \ + { %s }] string syntax" name + and get_bool pexp = + match pexp with + | {pexp_desc = Pexp_construct ({txt = Longident.Lident "true"}, + None)} -> + true + | {pexp_desc = Pexp_construct ({txt = Longident.Lident "false"}, + None)} -> + false + | _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \ + { %s }] bool syntax" name + and get_list elem = function + | {pexp_desc = + Pexp_construct ({txt = Longident.Lident "::"}, + Some {pexp_desc = Pexp_tuple [exp; rest]}) } -> + elem exp :: get_list elem rest + | {pexp_desc = + Pexp_construct ({txt = Longident.Lident "[]"}, None)} -> + [] + | _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \ + { %s }] list syntax" name + and get_pair f1 f2 = function + | {pexp_desc = Pexp_tuple [e1; e2]} -> + (f1 e1, f2 e2) + | _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \ + { %s }] pair syntax" name + and get_option elem = function + | { pexp_desc = + Pexp_construct ({ txt = Longident.Lident "Some" }, Some exp) } -> + Some (elem exp) + | { pexp_desc = + Pexp_construct ({ txt = Longident.Lident "None" }, None) } -> + None + | _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \ + { %s }] option syntax" name + in + match name with + | "tool_name" -> + tool_name_ref := get_string payload + | "include_dirs" -> + Clflags.include_dirs := get_list get_string payload + | "load_path" -> + Config.load_path := get_list get_string payload + | "open_modules" -> + Clflags.open_modules := get_list get_string payload + | "for_package" -> + Clflags.for_package := get_option get_string payload + | "debug" -> + Clflags.debug := get_bool payload + | "use_threads" -> + Clflags.use_threads := get_bool payload + | "use_vmthreads" -> + Clflags.use_vmthreads := get_bool payload + | "cookies" -> + let l = get_list (get_pair get_string (fun x -> x)) payload in + cookies := + List.fold_left + (fun s (k, v) -> StringMap.add k v s) StringMap.empty + l + | _ -> + () + in + List.iter (function ({txt=Lident name}, x) -> field name x | _ -> ()) fields + + let update_cookies fields = + let fields = + List.filter + (function ({txt=Lident "cookies"}, _) -> false | _ -> true) + fields + in + fields @ [get_cookies ()] +end + +let ppx_context = PpxContext.make + +let extension_of_exn exn = + match error_of_exn exn with + | Some (`Ok error) -> extension_of_error error + | Some `Already_displayed -> { loc = Location.none; txt = "ocaml.error" }, PStr [] + | None -> raise exn + + +let apply_lazy ~source ~target mapper = + let implem ast = + let fields, ast = + match ast with + | {pstr_desc = Pstr_attribute ({txt = "ocaml.ppx.context"}, x)} :: l -> + PpxContext.get_fields x, l + | _ -> [], ast + in + PpxContext.restore fields; + let ast = + try + let mapper = mapper () in + mapper.structure mapper ast + with exn -> + [{pstr_desc = Pstr_extension (extension_of_exn exn, []); + pstr_loc = Location.none}] + in + let fields = PpxContext.update_cookies fields in + Str.attribute (PpxContext.mk fields) :: ast + in + let iface ast = + let fields, ast = + match ast with + | {psig_desc = Psig_attribute ({txt = "ocaml.ppx.context"}, x)} :: l -> + PpxContext.get_fields x, l + | _ -> [], ast + in + PpxContext.restore fields; + let ast = + try + let mapper = mapper () in + mapper.signature mapper ast + with exn -> + [{psig_desc = Psig_extension (extension_of_exn exn, []); + psig_loc = Location.none}] + in + let fields = PpxContext.update_cookies fields in + Sig.attribute (PpxContext.mk fields) :: ast + in + + let ic = open_in_bin source in + let magic = + really_input_string ic (String.length Config.ast_impl_magic_number) + in + + let rewrite transform = + Location.input_name := input_value ic; + let ast = input_value ic in + close_in ic; + let ast = transform ast in + let oc = open_out_bin target in + output_string oc magic; + output_value oc !Location.input_name; + output_value oc ast; + close_out oc + and fail () = + close_in ic; + failwith "Ast_mapper: OCaml version mismatch or malformed input"; + in + + if magic = Config.ast_impl_magic_number then + rewrite (implem : structure -> structure) + else if magic = Config.ast_intf_magic_number then + rewrite (iface : signature -> signature) + else fail () + +let drop_ppx_context_str ~restore = function + | {pstr_desc = Pstr_attribute({Location.txt = "ocaml.ppx.context"}, a)} + :: items -> + if restore then + PpxContext.restore (PpxContext.get_fields a); + items + | items -> items + +let drop_ppx_context_sig ~restore = function + | {psig_desc = Psig_attribute({Location.txt = "ocaml.ppx.context"}, a)} + :: items -> + if restore then + PpxContext.restore (PpxContext.get_fields a); + items + | items -> items + +let add_ppx_context_str ~tool_name ast = + Ast_helper.Str.attribute (ppx_context ~tool_name ()) :: ast + +let add_ppx_context_sig ~tool_name ast = + Ast_helper.Sig.attribute (ppx_context ~tool_name ()) :: ast + + +let apply ~source ~target mapper = + apply_lazy ~source ~target (fun () -> mapper) + +let run_main mapper = + try + let a = Sys.argv in + let n = Array.length a in + if n > 2 then + let mapper () = + try mapper (Array.to_list (Array.sub a 1 (n - 3))) + with exn -> + (* PR#6463 *) + let f _ _ = raise exn in + {default_mapper with structure = f; signature = f} + in + apply_lazy ~source:a.(n - 2) ~target:a.(n - 1) mapper + else begin + Printf.eprintf "Usage: %s [extra_args] \n%!" + Sys.executable_name; + exit 2 + end + with exn -> + prerr_endline (Printexc.to_string exn); + exit 2 + +let register_function = ref (fun _name f -> run_main f) +let register name f = !register_function name f diff --git a/res_syntax/compiler-libs-406/ast_mapper.mli b/res_syntax/compiler-libs-406/ast_mapper.mli new file mode 100644 index 0000000000..85b59e9c37 --- /dev/null +++ b/res_syntax/compiler-libs-406/ast_mapper.mli @@ -0,0 +1,200 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** The interface of a -ppx rewriter + + A -ppx rewriter is a program that accepts a serialized abstract syntax + tree and outputs another, possibly modified, abstract syntax tree. + This module encapsulates the interface between the compiler and + the -ppx rewriters, handling such details as the serialization format, + forwarding of command-line flags, and storing state. + + {!mapper} allows to implement AST rewriting using open recursion. + A typical mapper would be based on {!default_mapper}, a deep + identity mapper, and will fall back on it for handling the syntax it + does not modify. For example: + + {[ +open Asttypes +open Parsetree +open Ast_mapper + +let test_mapper argv = + { default_mapper with + expr = fun mapper expr -> + match expr with + | { pexp_desc = Pexp_extension ({ txt = "test" }, PStr [])} -> + Ast_helper.Exp.constant (Const_int 42) + | other -> default_mapper.expr mapper other; } + +let () = + register "ppx_test" test_mapper]} + + This -ppx rewriter, which replaces [[%test]] in expressions with + the constant [42], can be compiled using + [ocamlc -o ppx_test -I +compiler-libs ocamlcommon.cma ppx_test.ml]. + + *) + +open Parsetree + +(** {1 A generic Parsetree mapper} *) + +type mapper = { + attribute: mapper -> attribute -> attribute; + attributes: mapper -> attribute list -> attribute list; + case: mapper -> case -> case; + cases: mapper -> case list -> case list; + class_declaration: mapper -> class_declaration -> class_declaration; + class_description: mapper -> class_description -> class_description; + class_expr: mapper -> class_expr -> class_expr; + class_field: mapper -> class_field -> class_field; + class_signature: mapper -> class_signature -> class_signature; + class_structure: mapper -> class_structure -> class_structure; + class_type: mapper -> class_type -> class_type; + class_type_declaration: mapper -> class_type_declaration + -> class_type_declaration; + class_type_field: mapper -> class_type_field -> class_type_field; + constructor_declaration: mapper -> constructor_declaration + -> constructor_declaration; + expr: mapper -> expression -> expression; + extension: mapper -> extension -> extension; + extension_constructor: mapper -> extension_constructor + -> extension_constructor; + include_declaration: mapper -> include_declaration -> include_declaration; + include_description: mapper -> include_description -> include_description; + label_declaration: mapper -> label_declaration -> label_declaration; + location: mapper -> Location.t -> Location.t; + module_binding: mapper -> module_binding -> module_binding; + module_declaration: mapper -> module_declaration -> module_declaration; + module_expr: mapper -> module_expr -> module_expr; + module_type: mapper -> module_type -> module_type; + module_type_declaration: mapper -> module_type_declaration + -> module_type_declaration; + open_description: mapper -> open_description -> open_description; + pat: mapper -> pattern -> pattern; + payload: mapper -> payload -> payload; + signature: mapper -> signature -> signature; + signature_item: mapper -> signature_item -> signature_item; + structure: mapper -> structure -> structure; + structure_item: mapper -> structure_item -> structure_item; + typ: mapper -> core_type -> core_type; + type_declaration: mapper -> type_declaration -> type_declaration; + type_extension: mapper -> type_extension -> type_extension; + type_kind: mapper -> type_kind -> type_kind; + value_binding: mapper -> value_binding -> value_binding; + value_description: mapper -> value_description -> value_description; + with_constraint: mapper -> with_constraint -> with_constraint; +} +(** A mapper record implements one "method" per syntactic category, + using an open recursion style: each method takes as its first + argument the mapper to be applied to children in the syntax + tree. *) + +val default_mapper: mapper +(** A default mapper, which implements a "deep identity" mapping. *) + +(** {1 Apply mappers to compilation units} *) + +val tool_name: unit -> string +(** Can be used within a ppx preprocessor to know which tool is + calling it ["ocamlc"], ["ocamlopt"], ["ocamldoc"], ["ocamldep"], + ["ocaml"], ... Some global variables that reflect command-line + options are automatically synchronized between the calling tool + and the ppx preprocessor: {!Clflags.include_dirs}, + {!Config.load_path}, {!Clflags.open_modules}, {!Clflags.for_package}, + {!Clflags.debug}. *) + + +val apply: source:string -> target:string -> mapper -> unit +(** Apply a mapper (parametrized by the unit name) to a dumped + parsetree found in the [source] file and put the result in the + [target] file. The [structure] or [signature] field of the mapper + is applied to the implementation or interface. *) + +val run_main: (string list -> mapper) -> unit +(** Entry point to call to implement a standalone -ppx rewriter from a + mapper, parametrized by the command line arguments. The current + unit name can be obtained from {!Location.input_name}. This + function implements proper error reporting for uncaught + exceptions. *) + +(** {1 Registration API} *) + +val register_function: (string -> (string list -> mapper) -> unit) ref + +val register: string -> (string list -> mapper) -> unit +(** Apply the [register_function]. The default behavior is to run the + mapper immediately, taking arguments from the process command + line. This is to support a scenario where a mapper is linked as a + stand-alone executable. + + It is possible to overwrite the [register_function] to define + "-ppx drivers", which combine several mappers in a single process. + Typically, a driver starts by defining [register_function] to a + custom implementation, then lets ppx rewriters (linked statically + or dynamically) register themselves, and then run all or some of + them. It is also possible to have -ppx drivers apply rewriters to + only specific parts of an AST. + + The first argument to [register] is a symbolic name to be used by + the ppx driver. *) + + +(** {1 Convenience functions to write mappers} *) + +val map_opt: ('a -> 'b) -> 'a option -> 'b option + +val extension_of_error: Location.error -> extension +(** Encode an error into an 'ocaml.error' extension node which can be + inserted in a generated Parsetree. The compiler will be + responsible for reporting the error. *) + +val attribute_of_warning: Location.t -> string -> attribute +(** Encode a warning message into an 'ocaml.ppwarning' attribute which can be + inserted in a generated Parsetree. The compiler will be + responsible for reporting the warning. *) + +(** {1 Helper functions to call external mappers} *) + +val add_ppx_context_str: + tool_name:string -> Parsetree.structure -> Parsetree.structure +(** Extract information from the current environment and encode it + into an attribute which is prepended to the list of structure + items in order to pass the information to an external + processor. *) + +val add_ppx_context_sig: + tool_name:string -> Parsetree.signature -> Parsetree.signature +(** Same as [add_ppx_context_str], but for signatures. *) + +val drop_ppx_context_str: + restore:bool -> Parsetree.structure -> Parsetree.structure +(** Drop the ocaml.ppx.context attribute from a structure. If + [restore] is true, also restore the associated data in the current + process. *) + +val drop_ppx_context_sig: + restore:bool -> Parsetree.signature -> Parsetree.signature +(** Same as [drop_ppx_context_str], but for signatures. *) + +(** {1 Cookies} *) + +(** Cookies are used to pass information from a ppx processor to + a further invocation of itself, when called from the OCaml + toplevel (or other tools that support cookies). *) + +val set_cookie: string -> Parsetree.expression -> unit +val get_cookie: string -> Parsetree.expression option diff --git a/res_syntax/compiler-libs-406/asttypes.mli b/res_syntax/compiler-libs-406/asttypes.mli new file mode 100644 index 0000000000..2e3aa099e0 --- /dev/null +++ b/res_syntax/compiler-libs-406/asttypes.mli @@ -0,0 +1,58 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Auxiliary AST types used by parsetree and typedtree. *) + +type constant = + Const_int of int + | Const_char of int + | Const_string of string * string option + | Const_float of string + | Const_int32 of int32 + | Const_int64 of int64 + | Const_nativeint of nativeint + +type rec_flag = Nonrecursive | Recursive + +type direction_flag = Upto | Downto + +(* Order matters, used in polymorphic comparison *) +type private_flag = Private | Public + +type mutable_flag = Immutable | Mutable + +type virtual_flag = Virtual | Concrete + +type override_flag = Override | Fresh + +type closed_flag = Closed | Open + +type label = string + +type arg_label = + Nolabel + | Labelled of string (* label:T -> ... *) + | Optional of string (* ?label:T -> ... *) + +type 'a loc = 'a Location.loc = { + txt : 'a; + loc : Location.t; +} + + +type variance = + | Covariant + | Contravariant + | Invariant diff --git a/res_syntax/compiler-libs-406/attr_helper.ml b/res_syntax/compiler-libs-406/attr_helper.ml new file mode 100644 index 0000000000..ecf87787ad --- /dev/null +++ b/res_syntax/compiler-libs-406/attr_helper.ml @@ -0,0 +1,54 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jeremie Dimino, Jane Street Europe *) +(* *) +(* Copyright 2015 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Asttypes +open Parsetree + +type error = + | Multiple_attributes of string + | No_payload_expected of string + +exception Error of Location.t * error + +let get_no_payload_attribute alt_names attrs = + match List.filter (fun (n, _) -> List.mem n.txt alt_names) attrs with + | [] -> None + | [ (name, PStr []) ] -> Some name + | [ (name, _) ] -> + raise (Error (name.loc, No_payload_expected name.txt)) + | _ :: (name, _) :: _ -> + raise (Error (name.loc, Multiple_attributes name.txt)) + +let has_no_payload_attribute alt_names attrs = + match get_no_payload_attribute alt_names attrs with + | None -> false + | Some _ -> true + +open Format + +let report_error ppf = function + | Multiple_attributes name -> + fprintf ppf "Too many `%s' attributes" name + | No_payload_expected name -> + fprintf ppf "Attribute `%s' does not accept a payload" name + +let () = + Location.register_error_of_exn + (function + | Error (loc, err) -> + Some (Location.error_of_printer loc report_error err) + | _ -> + None + ) diff --git a/res_syntax/compiler-libs-406/attr_helper.mli b/res_syntax/compiler-libs-406/attr_helper.mli new file mode 100644 index 0000000000..3d7145c3cd --- /dev/null +++ b/res_syntax/compiler-libs-406/attr_helper.mli @@ -0,0 +1,36 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jeremie Dimino, Jane Street Europe *) +(* *) +(* Copyright 2015 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Helpers for attributes *) + +open Asttypes +open Parsetree + +type error = + | Multiple_attributes of string + | No_payload_expected of string + +(** The [string list] argument of the following functions is a list of + alternative names for the attribute we are looking for. For instance: + + {[ + ["foo"; "ocaml.foo"] + ]} *) +val get_no_payload_attribute : string list -> attributes -> string loc option +val has_no_payload_attribute : string list -> attributes -> bool + +exception Error of Location.t * error + +val report_error: Format.formatter -> error -> unit diff --git a/res_syntax/compiler-libs-406/bsc_warnings.ml b/res_syntax/compiler-libs-406/bsc_warnings.ml new file mode 100644 index 0000000000..1dc1fe285d --- /dev/null +++ b/res_syntax/compiler-libs-406/bsc_warnings.ml @@ -0,0 +1,77 @@ +(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + +(** + See the meanings of the warning codes here: https://caml.inria.fr/pub/docs/manual-ocaml/comp.html#sec281 + + - 30 Two labels or constructors of the same name are defined in two mutually recursive types. + - 40 Constructor or label name used out of scope. + + - 6 Label omitted in function application. + - 7 Method overridden. + - 9 Missing fields in a record pattern. (*Not always desired, in some cases need [@@@warning "+9"] *) + - 27 Innocuous unused variable: unused variable that is not bound with let nor as, and doesn’t start with an underscore (_) character. + - 29 Unescaped end-of-line in a string constant (non-portable code). + - 32 .. 39 Unused blabla + - 44 Open statement shadows an already defined identifier. + - 45 Open statement shadows an already defined label or constructor. + - 48 Implicit elimination of optional arguments. https://caml.inria.fr/mantis/view.php?id=6352 + - 101 (bsb-specific) unsafe polymorphic comparison. +*) + + +(* + The purpose of default warning set is to make it strict while + not annoy user too much + + -4 Fragile pattern matching: matching that will remain complete even if additional con- structors are added to one of the variant types matched. + We turn it off since common pattern + {[ + match x with | A -> .. | _ -> false + ]} + + -9 Missing fields in a record pattern. + only in some special cases that we need all fields being listed + + We encourage people to write code based on type based disambigution + 40,41,42 are enabled for compatiblity reasons + -40 Constructor or label name used out of scope + This is intentional, we should never warn it + - 41 Ambiguous constructor or label name. + It is turned off since it prevents such cases below: + {[ + type a = A |B + type b = A | B | C + ]} + - 42 Disambiguated constructor or label name (compatibility warning). + + - 50 Unexpected documentation comment. + + - 102 Bs_polymorphic_comparison +*) +let defaults_w = "+a-4-9-20-40-41-42-50-61-102" +let defaults_warn_error = "-a+5+6+101+109";; +(*TODO: add +10*) \ No newline at end of file diff --git a/res_syntax/compiler-libs-406/btype.ml b/res_syntax/compiler-libs-406/btype.ml new file mode 100644 index 0000000000..d94693b1ea --- /dev/null +++ b/res_syntax/compiler-libs-406/btype.ml @@ -0,0 +1,737 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy and Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Basic operations on core types *) + +open Misc +open Asttypes +open Types + +(**** Sets, maps and hashtables of types ****) + +module TypeSet = Set.Make(TypeOps) +module TypeMap = Map.Make (TypeOps) +module TypeHash = Hashtbl.Make(TypeOps) + +(**** Forward declarations ****) + +let print_raw = + ref (fun _ -> assert false : Format.formatter -> type_expr -> unit) + +(**** Type level management ****) + +let generic_level = 100000000 + +(* Used to mark a type during a traversal. *) +let lowest_level = 0 +let pivot_level = 2 * lowest_level - 1 + (* pivot_level - lowest_level < lowest_level *) + +(**** Some type creators ****) + +let new_id = ref (-1) + +let newty2 level desc = + incr new_id; { desc; level; id = !new_id } +let newgenty desc = newty2 generic_level desc +let newgenvar ?name () = newgenty (Tvar name) +(* +let newmarkedvar level = + incr new_id; { desc = Tvar; level = pivot_level - level; id = !new_id } +let newmarkedgenvar () = + incr new_id; + { desc = Tvar; level = pivot_level - generic_level; id = !new_id } +*) + +(**** Check some types ****) + +let is_Tvar = function {desc=Tvar _} -> true | _ -> false +let is_Tunivar = function {desc=Tunivar _} -> true | _ -> false +let is_Tconstr = function {desc=Tconstr _} -> true | _ -> false + +let dummy_method = "*dummy method*" +let default_mty = function + Some mty -> mty + | None -> Mty_signature [] + +(**** Definitions for backtracking ****) + +type change = + Ctype of type_expr * type_desc + | Ccompress of type_expr * type_desc * type_desc + | Clevel of type_expr * int + | Cname of + (Path.t * type_expr list) option ref * (Path.t * type_expr list) option + | Crow of row_field option ref * row_field option + | Ckind of field_kind option ref * field_kind option + | Ccommu of commutable ref * commutable + | Cuniv of type_expr option ref * type_expr option + | Ctypeset of TypeSet.t ref * TypeSet.t + +type changes = + Change of change * changes ref + | Unchanged + | Invalid + +let trail = Weak.create 1 + +let log_change ch = + match Weak.get trail 0 with None -> () + | Some r -> + let r' = ref Unchanged in + r := Change (ch, r'); + Weak.set trail 0 (Some r') + +(**** Representative of a type ****) + +let rec field_kind_repr = + function + Fvar {contents = Some kind} -> field_kind_repr kind + | kind -> kind + +let rec repr_link compress t d = + function + {desc = Tlink t' as d'} -> + repr_link true t d' t' + | {desc = Tfield (_, k, _, t') as d'} when field_kind_repr k = Fabsent -> + repr_link true t d' t' + | t' -> + if compress then begin + log_change (Ccompress (t, t.desc, d)); t.desc <- d + end; + t' + +let repr t = + match t.desc with + Tlink t' as d -> + repr_link false t d t' + | Tfield (_, k, _, t') as d when field_kind_repr k = Fabsent -> + repr_link false t d t' + | _ -> t + +let rec commu_repr = function + Clink r when !r <> Cunknown -> commu_repr !r + | c -> c + +let rec row_field_repr_aux tl = function + Reither(_, tl', _, {contents = Some fi}) -> + row_field_repr_aux (tl@tl') fi + | Reither(c, tl', m, r) -> + Reither(c, tl@tl', m, r) + | Rpresent (Some _) when tl <> [] -> + Rpresent (Some (List.hd tl)) + | fi -> fi + +let row_field_repr fi = row_field_repr_aux [] fi + +let rec rev_concat l ll = + match ll with + [] -> l + | l'::ll -> rev_concat (l'@l) ll + +let rec row_repr_aux ll row = + match (repr row.row_more).desc with + | Tvariant row' -> + let f = row.row_fields in + row_repr_aux (if f = [] then ll else f::ll) row' + | _ -> + if ll = [] then row else + {row with row_fields = rev_concat row.row_fields ll} + +let row_repr row = row_repr_aux [] row + +let rec row_field tag row = + let rec find = function + | (tag',f) :: fields -> + if tag = tag' then row_field_repr f else find fields + | [] -> + match repr row.row_more with + | {desc=Tvariant row'} -> row_field tag row' + | _ -> Rabsent + in find row.row_fields + +let rec row_more row = + match repr row.row_more with + | {desc=Tvariant row'} -> row_more row' + | ty -> ty + +let row_fixed row = + let row = row_repr row in + row.row_fixed || + match (repr row.row_more).desc with + Tvar _ | Tnil -> false + | Tunivar _ | Tconstr _ -> true + | _ -> assert false + +let static_row row = + let row = row_repr row in + row.row_closed && + List.for_all + (fun (_,f) -> match row_field_repr f with Reither _ -> false | _ -> true) + row.row_fields + +let hash_variant s = + let accu = ref 0 in + for i = 0 to String.length s - 1 do + accu := 223 * !accu + Char.code s.[i] + done; + (* reduce to 31 bits *) + accu := !accu land (1 lsl 31 - 1); + (* make it signed for 64 bits architectures *) + if !accu > 0x3FFFFFFF then !accu - (1 lsl 31) else !accu + +let proxy ty = + let ty0 = repr ty in + match ty0.desc with + | Tvariant row when not (static_row row) -> + row_more row + | Tobject (ty, _) -> + let rec proxy_obj ty = + match ty.desc with + Tfield (_, _, _, ty) | Tlink ty -> proxy_obj ty + | Tvar _ | Tunivar _ | Tconstr _ -> ty + | Tnil -> ty0 + | _ -> assert false + in proxy_obj ty + | _ -> ty0 + +(**** Utilities for fixed row private types ****) + +let row_of_type t = + match (repr t).desc with + Tobject(t,_) -> + let rec get_row t = + let t = repr t in + match t.desc with + Tfield(_,_,_,t) -> get_row t + | _ -> t + in get_row t + | Tvariant row -> + row_more row + | _ -> + t + +let has_constr_row t = + not (is_Tconstr t) && is_Tconstr (row_of_type t) + +let is_row_name s = + let l = String.length s in + if l < 4 then false else String.sub s (l-4) 4 = "#row" + +let is_constr_row ~allow_ident t = + match t.desc with + Tconstr (Path.Pident id, _, _) when allow_ident -> + is_row_name (Ident.name id) + | Tconstr (Path.Pdot (_, s, _), _, _) -> is_row_name s + | _ -> false + + + (**********************************) + (* Utilities for type traversal *) + (**********************************) + +let rec iter_row f row = + List.iter + (fun (_, fi) -> + match row_field_repr fi with + | Rpresent(Some ty) -> f ty + | Reither(_, tl, _, _) -> List.iter f tl + | _ -> ()) + row.row_fields; + match (repr row.row_more).desc with + Tvariant row -> iter_row f row + | Tvar _ | Tunivar _ | Tsubst _ | Tconstr _ | Tnil -> + Misc.may (fun (_,l) -> List.iter f l) row.row_name + | _ -> assert false + +let iter_type_expr f ty = + match ty.desc with + Tvar _ -> () + | Tarrow (_, ty1, ty2, _) -> f ty1; f ty2 + | Ttuple l -> List.iter f l + | Tconstr (_, l, _) -> List.iter f l + | Tobject(ty, {contents = Some (_, p)}) + -> f ty; List.iter f p + | Tobject (ty, _) -> f ty + | Tvariant row -> iter_row f row; f (row_more row) + | Tfield (_, _, ty1, ty2) -> f ty1; f ty2 + | Tnil -> () + | Tlink ty -> f ty + | Tsubst ty -> f ty + | Tunivar _ -> () + | Tpoly (ty, tyl) -> f ty; List.iter f tyl + | Tpackage (_, _, l) -> List.iter f l + +let rec iter_abbrev f = function + Mnil -> () + | Mcons(_, _, ty, ty', rem) -> f ty; f ty'; iter_abbrev f rem + | Mlink rem -> iter_abbrev f !rem + +type type_iterators = + { it_signature: type_iterators -> signature -> unit; + it_signature_item: type_iterators -> signature_item -> unit; + it_value_description: type_iterators -> value_description -> unit; + it_type_declaration: type_iterators -> type_declaration -> unit; + it_extension_constructor: type_iterators -> extension_constructor -> unit; + it_module_declaration: type_iterators -> module_declaration -> unit; + it_modtype_declaration: type_iterators -> modtype_declaration -> unit; + it_class_declaration: type_iterators -> class_declaration -> unit; + it_class_type_declaration: type_iterators -> class_type_declaration -> unit; + it_module_type: type_iterators -> module_type -> unit; + it_class_type: type_iterators -> class_type -> unit; + it_type_kind: type_iterators -> type_kind -> unit; + it_do_type_expr: type_iterators -> type_expr -> unit; + it_type_expr: type_iterators -> type_expr -> unit; + it_path: Path.t -> unit; } + +let iter_type_expr_cstr_args f = function + | Cstr_tuple tl -> List.iter f tl + | Cstr_record lbls -> List.iter (fun d -> f d.ld_type) lbls + +let map_type_expr_cstr_args f = function + | Cstr_tuple tl -> Cstr_tuple (List.map f tl) + | Cstr_record lbls -> + Cstr_record (List.map (fun d -> {d with ld_type=f d.ld_type}) lbls) + +let iter_type_expr_kind f = function + | Type_abstract -> () + | Type_variant cstrs -> + List.iter + (fun cd -> + iter_type_expr_cstr_args f cd.cd_args; + Misc.may f cd.cd_res + ) + cstrs + | Type_record(lbls, _) -> + List.iter (fun d -> f d.ld_type) lbls + | Type_open -> + () + + +let type_iterators = + let it_signature it = + List.iter (it.it_signature_item it) + and it_signature_item it = function + Sig_value (_, vd) -> it.it_value_description it vd + | Sig_type (_, td, _) -> it.it_type_declaration it td + | Sig_typext (_, td, _) -> it.it_extension_constructor it td + | Sig_module (_, md, _) -> it.it_module_declaration it md + | Sig_modtype (_, mtd) -> it.it_modtype_declaration it mtd + | Sig_class (_, cd, _) -> it.it_class_declaration it cd + | Sig_class_type (_, ctd, _) -> it.it_class_type_declaration it ctd + and it_value_description it vd = + it.it_type_expr it vd.val_type + and it_type_declaration it td = + List.iter (it.it_type_expr it) td.type_params; + may (it.it_type_expr it) td.type_manifest; + it.it_type_kind it td.type_kind + and it_extension_constructor it td = + it.it_path td.ext_type_path; + List.iter (it.it_type_expr it) td.ext_type_params; + iter_type_expr_cstr_args (it.it_type_expr it) td.ext_args; + may (it.it_type_expr it) td.ext_ret_type + and it_module_declaration it md = + it.it_module_type it md.md_type + and it_modtype_declaration it mtd = + may (it.it_module_type it) mtd.mtd_type + and it_class_declaration it cd = + List.iter (it.it_type_expr it) cd.cty_params; + it.it_class_type it cd.cty_type; + may (it.it_type_expr it) cd.cty_new; + it.it_path cd.cty_path + and it_class_type_declaration it ctd = + List.iter (it.it_type_expr it) ctd.clty_params; + it.it_class_type it ctd.clty_type; + it.it_path ctd.clty_path + and it_module_type it = function + Mty_ident p + | Mty_alias(_, p) -> it.it_path p + | Mty_signature sg -> it.it_signature it sg + | Mty_functor (_, mto, mt) -> + may (it.it_module_type it) mto; + it.it_module_type it mt + and it_class_type it = function + Cty_constr (p, tyl, cty) -> + it.it_path p; + List.iter (it.it_type_expr it) tyl; + it.it_class_type it cty + | Cty_signature cs -> + it.it_type_expr it cs.csig_self; + Vars.iter (fun _ (_,_,ty) -> it.it_type_expr it ty) cs.csig_vars; + List.iter + (fun (p, tl) -> it.it_path p; List.iter (it.it_type_expr it) tl) + cs.csig_inher + | Cty_arrow (_, ty, cty) -> + it.it_type_expr it ty; + it.it_class_type it cty + and it_type_kind it kind = + iter_type_expr_kind (it.it_type_expr it) kind + and it_do_type_expr it ty = + iter_type_expr (it.it_type_expr it) ty; + match ty.desc with + Tconstr (p, _, _) + | Tobject (_, {contents=Some (p, _)}) + | Tpackage (p, _, _) -> + it.it_path p + | Tvariant row -> + may (fun (p,_) -> it.it_path p) (row_repr row).row_name + | _ -> () + and it_path _p = () + in + { it_path; it_type_expr = it_do_type_expr; it_do_type_expr; + it_type_kind; it_class_type; it_module_type; + it_signature; it_class_type_declaration; it_class_declaration; + it_modtype_declaration; it_module_declaration; it_extension_constructor; + it_type_declaration; it_value_description; it_signature_item; } + +let copy_row f fixed row keep more = + let fields = List.map + (fun (l, fi) -> l, + match row_field_repr fi with + | Rpresent(Some ty) -> Rpresent(Some(f ty)) + | Reither(c, tl, m, e) -> + let e = if keep then e else ref None in + let m = if row.row_fixed then fixed else m in + let tl = List.map f tl in + Reither(c, tl, m, e) + | _ -> fi) + row.row_fields in + let name = + match row.row_name with None -> None + | Some (path, tl) -> Some (path, List.map f tl) in + { row_fields = fields; row_more = more; + row_bound = (); row_fixed = row.row_fixed && fixed; + row_closed = row.row_closed; row_name = name; } + +let rec copy_kind = function + Fvar{contents = Some k} -> copy_kind k + | Fvar _ -> Fvar (ref None) + | Fpresent -> Fpresent + | Fabsent -> assert false + +let copy_commu c = + if commu_repr c = Cok then Cok else Clink (ref Cunknown) + +(* Since univars may be used as row variables, we need to do some + encoding during substitution *) +let rec norm_univar ty = + match ty.desc with + Tunivar _ | Tsubst _ -> ty + | Tlink ty -> norm_univar ty + | Ttuple (ty :: _) -> norm_univar ty + | _ -> assert false + +let rec copy_type_desc ?(keep_names=false) f = function + Tvar _ as ty -> if keep_names then ty else Tvar None + | Tarrow (p, ty1, ty2, c)-> Tarrow (p, f ty1, f ty2, copy_commu c) + | Ttuple l -> Ttuple (List.map f l) + | Tconstr (p, l, _) -> Tconstr (p, List.map f l, ref Mnil) + | Tobject(ty, {contents = Some (p, tl)}) + -> Tobject (f ty, ref (Some(p, List.map f tl))) + | Tobject (ty, _) -> Tobject (f ty, ref None) + | Tvariant _ -> assert false (* too ambiguous *) + | Tfield (p, k, ty1, ty2) -> (* the kind is kept shared *) + Tfield (p, field_kind_repr k, f ty1, f ty2) + | Tnil -> Tnil + | Tlink ty -> copy_type_desc f ty.desc + | Tsubst _ -> assert false + | Tunivar _ as ty -> ty (* always keep the name *) + | Tpoly (ty, tyl) -> + let tyl = List.map (fun x -> norm_univar (f x)) tyl in + Tpoly (f ty, tyl) + | Tpackage (p, n, l) -> Tpackage (p, n, List.map f l) + +(* Utilities for copying *) + +let saved_desc = ref [] + (* Saved association of generic nodes with their description. *) + +let save_desc ty desc = + saved_desc := (ty, desc)::!saved_desc + +let saved_kinds = ref [] (* duplicated kind variables *) +let new_kinds = ref [] (* new kind variables *) +let dup_kind r = + (match !r with None -> () | Some _ -> assert false); + if not (List.memq r !new_kinds) then begin + saved_kinds := r :: !saved_kinds; + let r' = ref None in + new_kinds := r' :: !new_kinds; + r := Some (Fvar r') + end + +(* Restored type descriptions. *) +let cleanup_types () = + List.iter (fun (ty, desc) -> ty.desc <- desc) !saved_desc; + List.iter (fun r -> r := None) !saved_kinds; + saved_desc := []; saved_kinds := []; new_kinds := [] + +(* Mark a type. *) +let rec mark_type ty = + let ty = repr ty in + if ty.level >= lowest_level then begin + ty.level <- pivot_level - ty.level; + iter_type_expr mark_type ty + end + +let mark_type_node ty = + let ty = repr ty in + if ty.level >= lowest_level then begin + ty.level <- pivot_level - ty.level; + end + +let mark_type_params ty = + iter_type_expr mark_type ty + +let type_iterators = + let it_type_expr it ty = + let ty = repr ty in + if ty.level >= lowest_level then begin + mark_type_node ty; + it.it_do_type_expr it ty; + end + in + {type_iterators with it_type_expr} + + +(* Remove marks from a type. *) +let rec unmark_type ty = + let ty = repr ty in + if ty.level < lowest_level then begin + ty.level <- pivot_level - ty.level; + iter_type_expr unmark_type ty + end + +let unmark_iterators = + let it_type_expr _it ty = unmark_type ty in + {type_iterators with it_type_expr} + +let unmark_type_decl decl = + unmark_iterators.it_type_declaration unmark_iterators decl + +let unmark_extension_constructor ext = + List.iter unmark_type ext.ext_type_params; + iter_type_expr_cstr_args unmark_type ext.ext_args; + Misc.may unmark_type ext.ext_ret_type + +let unmark_class_signature sign = + unmark_type sign.csig_self; + Vars.iter (fun _l (_m, _v, t) -> unmark_type t) sign.csig_vars + +let unmark_class_type cty = + unmark_iterators.it_class_type unmark_iterators cty + + + (*******************************************) + (* Memorization of abbreviation expansion *) + (*******************************************) + +(* Search whether the expansion has been memorized. *) + +let lte_public p1 p2 = (* Private <= Public *) + match p1, p2 with + | Private, _ | _, Public -> true + | Public, Private -> false + +let rec find_expans priv p1 = function + Mnil -> None + | Mcons (priv', p2, _ty0, ty, _) + when lte_public priv priv' && Path.same p1 p2 -> Some ty + | Mcons (_, _, _, _, rem) -> find_expans priv p1 rem + | Mlink {contents = rem} -> find_expans priv p1 rem + +(* debug: check for cycles in abbreviation. only works with -principal +let rec check_expans visited ty = + let ty = repr ty in + assert (not (List.memq ty visited)); + match ty.desc with + Tconstr (path, args, abbrev) -> + begin match find_expans path !abbrev with + Some ty' -> check_expans (ty :: visited) ty' + | None -> () + end + | _ -> () +*) + +let memo = ref [] + (* Contains the list of saved abbreviation expansions. *) + +let cleanup_abbrev () = + (* Remove all memorized abbreviation expansions. *) + List.iter (fun abbr -> abbr := Mnil) !memo; + memo := [] + +let memorize_abbrev mem priv path v v' = + (* Memorize the expansion of an abbreviation. *) + mem := Mcons (priv, path, v, v', !mem); + (* check_expans [] v; *) + memo := mem :: !memo + +let rec forget_abbrev_rec mem path = + match mem with + Mnil -> + assert false + | Mcons (_, path', _, _, rem) when Path.same path path' -> + rem + | Mcons (priv, path', v, v', rem) -> + Mcons (priv, path', v, v', forget_abbrev_rec rem path) + | Mlink mem' -> + mem' := forget_abbrev_rec !mem' path; + raise Exit + +let forget_abbrev mem path = + try mem := forget_abbrev_rec !mem path with Exit -> () + +(* debug: check for invalid abbreviations +let rec check_abbrev_rec = function + Mnil -> true + | Mcons (_, ty1, ty2, rem) -> + repr ty1 != repr ty2 + | Mlink mem' -> + check_abbrev_rec !mem' + +let check_memorized_abbrevs () = + List.for_all (fun mem -> check_abbrev_rec !mem) !memo +*) + + (**********************************) + (* Utilities for labels *) + (**********************************) + +let is_optional = function Optional _ -> true | _ -> false + +let label_name = function + Nolabel -> "" + | Labelled s + | Optional s -> s + +let prefixed_label_name = function + Nolabel -> "" + | Labelled s -> "~" ^ s + | Optional s -> "?" ^ s + +let rec extract_label_aux hd l = function + [] -> raise Not_found + | (l',t as p) :: ls -> + if label_name l' = l then (l', t, List.rev hd, ls) + else extract_label_aux (p::hd) l ls + +let extract_label l ls = extract_label_aux [] l ls + + + (**********************************) + (* Utilities for backtracking *) + (**********************************) + +let undo_change = function + Ctype (ty, desc) -> ty.desc <- desc + | Ccompress (ty, desc, _) -> ty.desc <- desc + | Clevel (ty, level) -> ty.level <- level + | Cname (r, v) -> r := v + | Crow (r, v) -> r := v + | Ckind (r, v) -> r := v + | Ccommu (r, v) -> r := v + | Cuniv (r, v) -> r := v + | Ctypeset (r, v) -> r := v + +type snapshot = changes ref * int +let last_snapshot = ref 0 + +let log_type ty = + if ty.id <= !last_snapshot then log_change (Ctype (ty, ty.desc)) +let link_type ty ty' = + log_type ty; + let desc = ty.desc in + ty.desc <- Tlink ty'; + (* Name is a user-supplied name for this unification variable (obtained + * through a type annotation for instance). *) + match desc, ty'.desc with + Tvar name, Tvar name' -> + begin match name, name' with + | Some _, None -> log_type ty'; ty'.desc <- Tvar name + | None, Some _ -> () + | Some _, Some _ -> + if ty.level < ty'.level then (log_type ty'; ty'.desc <- Tvar name) + | None, None -> () + end + | _ -> () + (* ; assert (check_memorized_abbrevs ()) *) + (* ; check_expans [] ty' *) +let set_level ty level = + if ty.id <= !last_snapshot then log_change (Clevel (ty, ty.level)); + ty.level <- level +let set_univar rty ty = + log_change (Cuniv (rty, !rty)); rty := Some ty +let set_name nm v = + log_change (Cname (nm, !nm)); nm := v +let set_row_field e v = + log_change (Crow (e, !e)); e := Some v +let set_kind rk k = + log_change (Ckind (rk, !rk)); rk := Some k +let set_commu rc c = + log_change (Ccommu (rc, !rc)); rc := c +let set_typeset rs s = + log_change (Ctypeset (rs, !rs)); rs := s + +let snapshot () = + let old = !last_snapshot in + last_snapshot := !new_id; + match Weak.get trail 0 with Some r -> (r, old) + | None -> + let r = ref Unchanged in + Weak.set trail 0 (Some r); + (r, old) + +let rec rev_log accu = function + Unchanged -> accu + | Invalid -> assert false + | Change (ch, next) -> + let d = !next in + next := Invalid; + rev_log (ch::accu) d + +let backtrack (changes, old) = + match !changes with + Unchanged -> last_snapshot := old + | Invalid -> failwith "Btype.backtrack" + | Change _ as change -> + cleanup_abbrev (); + let backlog = rev_log [] change in + List.iter undo_change backlog; + changes := Unchanged; + last_snapshot := old; + Weak.set trail 0 (Some changes) + +let rec rev_compress_log log r = + match !r with + Unchanged | Invalid -> + log + | Change (Ccompress _, next) -> + rev_compress_log (r::log) next + | Change (_, next) -> + rev_compress_log log next + +let undo_compress (changes, _old) = + match !changes with + Unchanged + | Invalid -> () + | Change _ -> + let log = rev_compress_log [] changes in + List.iter + (fun r -> match !r with + Change (Ccompress (ty, desc, d), next) when ty.desc == d -> + ty.desc <- desc; r := !next + | _ -> ()) + log diff --git a/res_syntax/compiler-libs-406/btype.mli b/res_syntax/compiler-libs-406/btype.mli new file mode 100644 index 0000000000..aaa426a8ab --- /dev/null +++ b/res_syntax/compiler-libs-406/btype.mli @@ -0,0 +1,221 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Basic operations on core types *) + +open Asttypes +open Types + +(**** Sets, maps and hashtables of types ****) + +module TypeSet : Set.S with type elt = type_expr +module TypeMap : Map.S with type key = type_expr +module TypeHash : Hashtbl.S with type key = type_expr + +(**** Levels ****) + +val generic_level: int + +val newty2: int -> type_desc -> type_expr + (* Create a type *) +val newgenty: type_desc -> type_expr + (* Create a generic type *) +val newgenvar: ?name:string -> unit -> type_expr + (* Return a fresh generic variable *) + +(* Use Tsubst instead +val newmarkedvar: int -> type_expr + (* Return a fresh marked variable *) +val newmarkedgenvar: unit -> type_expr + (* Return a fresh marked generic variable *) +*) + +(**** Types ****) + +val is_Tvar: type_expr -> bool +val is_Tunivar: type_expr -> bool +val is_Tconstr: type_expr -> bool +val dummy_method: label +val default_mty: module_type option -> module_type + +val repr: type_expr -> type_expr + (* Return the canonical representative of a type. *) + +val field_kind_repr: field_kind -> field_kind + (* Return the canonical representative of an object field + kind. *) + +val commu_repr: commutable -> commutable + (* Return the canonical representative of a commutation lock *) + +(**** polymorphic variants ****) + +val row_repr: row_desc -> row_desc + (* Return the canonical representative of a row description *) +val row_field_repr: row_field -> row_field +val row_field: label -> row_desc -> row_field + (* Return the canonical representative of a row field *) +val row_more: row_desc -> type_expr + (* Return the extension variable of the row *) +val row_fixed: row_desc -> bool + (* Return whether the row should be treated as fixed or not *) +val static_row: row_desc -> bool + (* Return whether the row is static or not *) +val hash_variant: label -> int + (* Hash function for variant tags *) + +val proxy: type_expr -> type_expr + (* Return the proxy representative of the type: either itself + or a row variable *) + +(**** Utilities for private abbreviations with fixed rows ****) +val row_of_type: type_expr -> type_expr +val has_constr_row: type_expr -> bool +val is_row_name: string -> bool +val is_constr_row: allow_ident:bool -> type_expr -> bool + +(**** Utilities for type traversal ****) + +val iter_type_expr: (type_expr -> unit) -> type_expr -> unit + (* Iteration on types *) +val iter_row: (type_expr -> unit) -> row_desc -> unit + (* Iteration on types in a row *) +val iter_abbrev: (type_expr -> unit) -> abbrev_memo -> unit + (* Iteration on types in an abbreviation list *) + +type type_iterators = + { it_signature: type_iterators -> signature -> unit; + it_signature_item: type_iterators -> signature_item -> unit; + it_value_description: type_iterators -> value_description -> unit; + it_type_declaration: type_iterators -> type_declaration -> unit; + it_extension_constructor: type_iterators -> extension_constructor -> unit; + it_module_declaration: type_iterators -> module_declaration -> unit; + it_modtype_declaration: type_iterators -> modtype_declaration -> unit; + it_class_declaration: type_iterators -> class_declaration -> unit; + it_class_type_declaration: type_iterators -> class_type_declaration -> unit; + it_module_type: type_iterators -> module_type -> unit; + it_class_type: type_iterators -> class_type -> unit; + it_type_kind: type_iterators -> type_kind -> unit; + it_do_type_expr: type_iterators -> type_expr -> unit; + it_type_expr: type_iterators -> type_expr -> unit; + it_path: Path.t -> unit; } +val type_iterators: type_iterators + (* Iteration on arbitrary type information. + [it_type_expr] calls [mark_type_node] to avoid loops. *) +val unmark_iterators: type_iterators + (* Unmark any structure containing types. See [unmark_type] below. *) + +val copy_type_desc: + ?keep_names:bool -> (type_expr -> type_expr) -> type_desc -> type_desc + (* Copy on types *) +val copy_row: + (type_expr -> type_expr) -> + bool -> row_desc -> bool -> type_expr -> row_desc +val copy_kind: field_kind -> field_kind + +val save_desc: type_expr -> type_desc -> unit + (* Save a type description *) +val dup_kind: field_kind option ref -> unit + (* Save a None field_kind, and make it point to a fresh Fvar *) +val cleanup_types: unit -> unit + (* Restore type descriptions *) + +val lowest_level: int + (* Marked type: ty.level < lowest_level *) +val pivot_level: int + (* Type marking: ty.level <- pivot_level - ty.level *) +val mark_type: type_expr -> unit + (* Mark a type *) +val mark_type_node: type_expr -> unit + (* Mark a type node (but not its sons) *) +val mark_type_params: type_expr -> unit + (* Mark the sons of a type node *) +val unmark_type: type_expr -> unit +val unmark_type_decl: type_declaration -> unit +val unmark_extension_constructor: extension_constructor -> unit +val unmark_class_type: class_type -> unit +val unmark_class_signature: class_signature -> unit + (* Remove marks from a type *) + +(**** Memorization of abbreviation expansion ****) + +val find_expans: private_flag -> Path.t -> abbrev_memo -> type_expr option + (* Look up a memorized abbreviation *) +val cleanup_abbrev: unit -> unit + (* Flush the cache of abbreviation expansions. + When some types are saved (using [output_value]), this + function MUST be called just before. *) +val memorize_abbrev: + abbrev_memo ref -> + private_flag -> Path.t -> type_expr -> type_expr -> unit + (* Add an expansion in the cache *) +val forget_abbrev: + abbrev_memo ref -> Path.t -> unit + (* Remove an abbreviation from the cache *) + +(**** Utilities for labels ****) + +val is_optional : arg_label -> bool +val label_name : arg_label -> label + +(* Returns the label name with first character '?' or '~' as appropriate. *) +val prefixed_label_name : arg_label -> label + +val extract_label : + label -> (arg_label * 'a) list -> + arg_label * 'a * (arg_label * 'a) list * (arg_label * 'a) list + (* actual label, value, before list, after list *) + +(**** Utilities for backtracking ****) + +type snapshot + (* A snapshot for backtracking *) +val snapshot: unit -> snapshot + (* Make a snapshot for later backtracking. Costs nothing *) +val backtrack: snapshot -> unit + (* Backtrack to a given snapshot. Only possible if you have + not already backtracked to a previous snapshot. + Calls [cleanup_abbrev] internally *) +val undo_compress: snapshot -> unit + (* Backtrack only path compression. Only meaningful if you have + not already backtracked to a previous snapshot. + Does not call [cleanup_abbrev] *) + +(* Functions to use when modifying a type (only Ctype?) *) +val link_type: type_expr -> type_expr -> unit + (* Set the desc field of [t1] to [Tlink t2], logging the old + value if there is an active snapshot *) +val set_level: type_expr -> int -> unit +val set_name: + (Path.t * type_expr list) option ref -> + (Path.t * type_expr list) option -> unit +val set_row_field: row_field option ref -> row_field -> unit +val set_univar: type_expr option ref -> type_expr -> unit +val set_kind: field_kind option ref -> field_kind -> unit +val set_commu: commutable ref -> commutable -> unit +val set_typeset: TypeSet.t ref -> TypeSet.t -> unit + (* Set references, logging the old value *) +val log_type: type_expr -> unit + (* Log the old value of a type, before modifying it by hand *) + +(**** Forward declarations ****) +val print_raw: (Format.formatter -> type_expr -> unit) ref + +val iter_type_expr_kind: (type_expr -> unit) -> (type_kind -> unit) + +val iter_type_expr_cstr_args: (type_expr -> unit) -> + (constructor_arguments -> unit) +val map_type_expr_cstr_args: (type_expr -> type_expr) -> + (constructor_arguments -> constructor_arguments) diff --git a/res_syntax/compiler-libs-406/builtin_attributes.ml b/res_syntax/compiler-libs-406/builtin_attributes.ml new file mode 100755 index 0000000000..84503189e3 --- /dev/null +++ b/res_syntax/compiler-libs-406/builtin_attributes.ml @@ -0,0 +1,206 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Asttypes +open Parsetree + +let string_of_cst = function + | Pconst_string(s, _) -> Some s + | _ -> None + +let string_of_payload = function + | PStr[{pstr_desc=Pstr_eval({pexp_desc=Pexp_constant c},_)}] -> + string_of_cst c + | _ -> None + +let string_of_opt_payload p = + match string_of_payload p with + | Some s -> s + | None -> "" + +let rec error_of_extension ext = + match ext with + | ({txt = ("ocaml.error"|"error") as txt; loc}, p) -> + let rec sub_from inner = + match inner with + | {pstr_desc=Pstr_extension (ext, _)} :: rest -> + error_of_extension ext :: sub_from rest + | _ :: rest -> + (Location.errorf ~loc + "Invalid syntax for sub-error of extension '%s'." txt) :: + sub_from rest + | [] -> [] + in + begin match p with + | PStr [] -> raise Location.Already_displayed_error + | PStr({pstr_desc=Pstr_eval + ({pexp_desc=Pexp_constant(Pconst_string(msg,_))}, _)}:: + {pstr_desc=Pstr_eval + ({pexp_desc=Pexp_constant(Pconst_string(if_highlight,_))}, _)}:: + inner) -> + Location.error ~loc ~if_highlight ~sub:(sub_from inner) msg + | PStr({pstr_desc=Pstr_eval + ({pexp_desc=Pexp_constant(Pconst_string(msg,_))}, _)}::inner) -> + Location.error ~loc ~sub:(sub_from inner) msg + | _ -> Location.errorf ~loc "Invalid syntax for extension '%s'." txt + end + | ({txt; loc}, _) -> + Location.errorf ~loc "Uninterpreted extension '%s'." txt + +let cat s1 s2 = + if s2 = "" then s1 else + s1 ^ "\n" ^ s2 + +let rec deprecated_of_attrs = function + | [] -> None + | ({txt = "ocaml.deprecated"|"deprecated"; _}, p) :: _ -> + Some (string_of_opt_payload p) + | _ :: tl -> deprecated_of_attrs tl + +let check_deprecated loc attrs s = + match deprecated_of_attrs attrs with + | None -> () + | Some txt -> Location.deprecated loc (cat s txt) + +let check_deprecated_inclusion ~def ~use loc attrs1 attrs2 s = + match deprecated_of_attrs attrs1, deprecated_of_attrs attrs2 with + | None, _ | Some _, Some _ -> () + | Some txt, None -> Location.deprecated ~def ~use loc (cat s txt) + +let rec deprecated_mutable_of_attrs = function + | [] -> None + | ({txt = "ocaml.deprecated_mutable"|"deprecated_mutable"; _}, p) :: _ -> + Some (string_of_opt_payload p) + | _ :: tl -> deprecated_mutable_of_attrs tl + +let check_deprecated_mutable loc attrs s = + match deprecated_mutable_of_attrs attrs with + | None -> () + | Some txt -> + Location.deprecated loc (Printf.sprintf "mutating field %s" (cat s txt)) + +let check_deprecated_mutable_inclusion ~def ~use loc attrs1 attrs2 s = + match deprecated_mutable_of_attrs attrs1, + deprecated_mutable_of_attrs attrs2 + with + | None, _ | Some _, Some _ -> () + | Some txt, None -> + Location.deprecated ~def ~use loc + (Printf.sprintf "mutating field %s" (cat s txt)) + +let check_bs_attributes_inclusion = + ref (fun _attrs1 _attrs2 _s -> + None + ) + +let check_duplicated_labels : (_ -> _ option ) ref = ref (fun _lbls -> + None +) + +let rec deprecated_of_sig = function + | {psig_desc = Psig_attribute a} :: tl -> + begin match deprecated_of_attrs [a] with + | None -> deprecated_of_sig tl + | Some _ as r -> r + end + | _ -> None + + +let rec deprecated_of_str = function + | {pstr_desc = Pstr_attribute a} :: tl -> + begin match deprecated_of_attrs [a] with + | None -> deprecated_of_str tl + | Some _ as r -> r + end + | _ -> None + + +let warning_attribute ?(ppwarning = true) = + let process loc txt errflag payload = + match string_of_payload payload with + | Some s -> + begin try Warnings.parse_options errflag s + with Arg.Bad _ -> + Location.prerr_warning loc + (Warnings.Attribute_payload + (txt, "Ill-formed list of warnings")) + end + | None -> + Location.prerr_warning loc + (Warnings.Attribute_payload + (txt, "A single string literal is expected")) + in + function + | ({txt = ("ocaml.warning"|"warning") as txt; loc}, payload) -> + process loc txt false payload + | ({txt = ("ocaml.warnerror"|"warnerror") as txt; loc}, payload) -> + process loc txt true payload + | {txt="ocaml.ppwarning"|"ppwarning"}, + PStr[{pstr_desc=Pstr_eval({pexp_desc=Pexp_constant + (Pconst_string (s, _))},_); + pstr_loc}] when ppwarning -> + Location.prerr_warning pstr_loc (Warnings.Preprocessor s) + | _ -> + () + +let warning_scope ?ppwarning attrs f = + let prev = Warnings.backup () in + try + List.iter (warning_attribute ?ppwarning) (List.rev attrs); + let ret = f () in + Warnings.restore prev; + ret + with exn -> + Warnings.restore prev; + raise exn + + +let warn_on_literal_pattern = + List.exists + (function + | ({txt="ocaml.warn_on_literal_pattern"|"warn_on_literal_pattern"; _}, _) + -> true + | _ -> false + ) + +let explicit_arity = + List.exists + (function + | ({txt="ocaml.explicit_arity"|"explicit_arity"; _}, _) -> true + | _ -> false + ) + +let immediate = + List.exists + (function + | ({txt="ocaml.immediate"|"immediate"; _}, _) -> true + | _ -> false + ) + +(* The "ocaml.boxed (default)" and "ocaml.unboxed (default)" + attributes cannot be input by the user, they are added by the + compiler when applying the default setting. This is done to record + in the .cmi the default used by the compiler when compiling the + source file because the default can change between compiler + invocations. *) + +let check l (x, _) = List.mem x.txt l + +let has_unboxed attr = + List.exists (check ["ocaml.unboxed"; "unboxed"]) + attr + +let has_boxed attr = + List.exists (check ["ocaml.boxed"; "boxed"]) attr diff --git a/res_syntax/compiler-libs-406/builtin_attributes.mli b/res_syntax/compiler-libs-406/builtin_attributes.mli new file mode 100755 index 0000000000..7282dbbe2e --- /dev/null +++ b/res_syntax/compiler-libs-406/builtin_attributes.mli @@ -0,0 +1,84 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Support for some of the builtin attributes: + + ocaml.deprecated + ocaml.error + ocaml.ppwarning + ocaml.warning + ocaml.warnerror + ocaml.explicit_arity (for camlp4/camlp5) + ocaml.warn_on_literal_pattern + ocaml.deprecated_mutable + ocaml.immediate + ocaml.boxed / ocaml.unboxed +*) + + +val check_deprecated: Location.t -> Parsetree.attributes -> string -> unit +val check_deprecated_inclusion: + def:Location.t -> use:Location.t -> Location.t -> Parsetree.attributes -> + Parsetree.attributes -> string -> unit +val deprecated_of_attrs: Parsetree.attributes -> string option +val deprecated_of_sig: Parsetree.signature -> string option +val deprecated_of_str: Parsetree.structure -> string option + +val check_deprecated_mutable: + Location.t -> Parsetree.attributes -> string -> unit +val check_deprecated_mutable_inclusion: + def:Location.t -> use:Location.t -> Location.t -> Parsetree.attributes -> + Parsetree.attributes -> string -> unit + +val check_bs_attributes_inclusion: + (Parsetree.attributes -> + Parsetree.attributes -> string -> (string*string) option ) ref + +val check_duplicated_labels: + (Parsetree.label_declaration list -> + string Asttypes.loc option + ) ref +val error_of_extension: Parsetree.extension -> Location.error + +val warning_attribute: ?ppwarning:bool -> Parsetree.attribute -> unit + (** Apply warning settings from the specified attribute. + "ocaml.warning"/"ocaml.warnerror" (and variants without the prefix) + are processed and other attributes are ignored. + + Also implement ocaml.ppwarning (unless ~ppwarning:false is + passed). + *) + +val warning_scope: + ?ppwarning:bool -> + Parsetree.attributes -> (unit -> 'a) -> 'a + (** Execute a function in a new scope for warning settings. This + means that the effect of any call to [warning_attribute] during + the execution of this function will be discarded after + execution. + + The function also takes a list of attributes which are processed + with [warning_attribute] in the fresh scope before the function + is executed. + *) + +val warn_on_literal_pattern: Parsetree.attributes -> bool +val explicit_arity: Parsetree.attributes -> bool + + +val immediate: Parsetree.attributes -> bool + +val has_unboxed: Parsetree.attributes -> bool +val has_boxed: Parsetree.attributes -> bool diff --git a/res_syntax/compiler-libs-406/callback.ml b/res_syntax/compiler-libs-406/callback.ml new file mode 100644 index 0000000000..e0cfb6553a --- /dev/null +++ b/res_syntax/compiler-libs-406/callback.ml @@ -0,0 +1,27 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Registering OCaml values with the C runtime for later callbacks *) + +external register_named_value : string -> Obj.t -> unit + = "caml_register_named_value" + +let register name v = + register_named_value name (Obj.repr v) + +let register_exception name (exn : exn) = + let exn = Obj.repr exn in + let slot = if Obj.tag exn = Obj.object_tag then exn else Obj.field exn 0 in + register_named_value name slot diff --git a/res_syntax/compiler-libs-406/callback.mli b/res_syntax/compiler-libs-406/callback.mli new file mode 100644 index 0000000000..27c8b5004b --- /dev/null +++ b/res_syntax/compiler-libs-406/callback.mli @@ -0,0 +1,34 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Registering OCaml values with the C runtime. + + This module allows OCaml values to be registered with the C runtime + under a symbolic name, so that C code can later call back registered + OCaml functions, or raise registered OCaml exceptions. +*) + +val register : string -> 'a -> unit +(** [Callback.register n v] registers the value [v] under + the name [n]. C code can later retrieve a handle to [v] + by calling [caml_named_value(n)]. *) + +val register_exception : string -> exn -> unit +(** [Callback.register_exception n exn] registers the + exception contained in the exception value [exn] + under the name [n]. C code can later retrieve a handle to + the exception by calling [caml_named_value(n)]. The exception + value thus obtained is suitable for passing as first argument + to [raise_constant] or [raise_with_arg]. *) diff --git a/res_syntax/compiler-libs-406/camlinternalBigarray.ml b/res_syntax/compiler-libs-406/camlinternalBigarray.ml new file mode 100644 index 0000000000..04d908beab --- /dev/null +++ b/res_syntax/compiler-libs-406/camlinternalBigarray.ml @@ -0,0 +1,54 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Manuel Serrano and Xavier Leroy, INRIA Rocquencourt *) +(* *) +(* Copyright 2000 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Bigarray types. These must be kept in sync with the tables in + ../typing/typeopt.ml *) + +type float32_elt = Float32_elt +type float64_elt = Float64_elt +type int8_signed_elt = Int8_signed_elt +type int8_unsigned_elt = Int8_unsigned_elt +type int16_signed_elt = Int16_signed_elt +type int16_unsigned_elt = Int16_unsigned_elt +type int32_elt = Int32_elt +type int64_elt = Int64_elt +type int_elt = Int_elt +type nativeint_elt = Nativeint_elt +type complex32_elt = Complex32_elt +type complex64_elt = Complex64_elt + +type ('a, 'b) kind = + Float32 : (float, float32_elt) kind + | Float64 : (float, float64_elt) kind + | Int8_signed : (int, int8_signed_elt) kind + | Int8_unsigned : (int, int8_unsigned_elt) kind + | Int16_signed : (int, int16_signed_elt) kind + | Int16_unsigned : (int, int16_unsigned_elt) kind + | Int32 : (int32, int32_elt) kind + | Int64 : (int64, int64_elt) kind + | Int : (int, int_elt) kind + | Nativeint : (nativeint, nativeint_elt) kind + | Complex32 : (Complex.t, complex32_elt) kind + | Complex64 : (Complex.t, complex64_elt) kind + | Char : (char, int8_unsigned_elt) kind + +type c_layout = C_layout_typ +type fortran_layout = Fortran_layout_typ + +type 'a layout = + C_layout: c_layout layout + | Fortran_layout: fortran_layout layout + +type ('a, 'b, 'c) genarray diff --git a/res_syntax/compiler-libs-406/camlinternalLazy.ml b/res_syntax/compiler-libs-406/camlinternalLazy.ml new file mode 100644 index 0000000000..f64be62e30 --- /dev/null +++ b/res_syntax/compiler-libs-406/camlinternalLazy.ml @@ -0,0 +1,65 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Internals of forcing lazy values. *) + +exception Undefined + +let raise_undefined = Obj.repr (fun () -> raise Undefined) + +(* Assume [blk] is a block with tag lazy *) +let force_lazy_block (blk : 'arg lazy_t) = + let closure = (Obj.obj (Obj.field (Obj.repr blk) 0) : unit -> 'arg) in + Obj.set_field (Obj.repr blk) 0 raise_undefined; + try + let result = closure () in + (* do set_field BEFORE set_tag *) + Obj.set_field (Obj.repr blk) 0 (Obj.repr result); + Obj.set_tag (Obj.repr blk) Obj.forward_tag; + result + with e -> + Obj.set_field (Obj.repr blk) 0 (Obj.repr (fun () -> raise e)); + raise e + + +(* Assume [blk] is a block with tag lazy *) +let force_val_lazy_block (blk : 'arg lazy_t) = + let closure = (Obj.obj (Obj.field (Obj.repr blk) 0) : unit -> 'arg) in + Obj.set_field (Obj.repr blk) 0 raise_undefined; + let result = closure () in + (* do set_field BEFORE set_tag *) + Obj.set_field (Obj.repr blk) 0 (Obj.repr result); + Obj.set_tag (Obj.repr blk) (Obj.forward_tag); + result + + +(* [force] is not used, since [Lazy.force] is declared as a primitive + whose code inlines the tag tests of its argument. This function is + here for the sake of completeness, and for debugging purpose. *) + +let force (lzv : 'arg lazy_t) = + let x = Obj.repr lzv in + let t = Obj.tag x in + if t = Obj.forward_tag then (Obj.obj (Obj.field x 0) : 'arg) else + if t <> Obj.lazy_tag then (Obj.obj x : 'arg) + else force_lazy_block lzv + + +let force_val (lzv : 'arg lazy_t) = + let x = Obj.repr lzv in + let t = Obj.tag x in + if t = Obj.forward_tag then (Obj.obj (Obj.field x 0) : 'arg) else + if t <> Obj.lazy_tag then (Obj.obj x : 'arg) + else force_val_lazy_block lzv diff --git a/res_syntax/compiler-libs-406/camlinternalLazy.mli b/res_syntax/compiler-libs-406/camlinternalLazy.mli new file mode 100644 index 0000000000..101535cd45 --- /dev/null +++ b/res_syntax/compiler-libs-406/camlinternalLazy.mli @@ -0,0 +1,27 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Run-time support for lazy values. + All functions in this module are for system use only, not for the + casual user. *) + +exception Undefined + +val force_lazy_block : 'a lazy_t -> 'a + +val force_val_lazy_block : 'a lazy_t -> 'a + +val force : 'a lazy_t -> 'a +val force_val : 'a lazy_t -> 'a diff --git a/res_syntax/compiler-libs-406/camlinternalMod.ml b/res_syntax/compiler-libs-406/camlinternalMod.ml new file mode 100644 index 0000000000..5ccf92893b --- /dev/null +++ b/res_syntax/compiler-libs-406/camlinternalMod.ml @@ -0,0 +1,74 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2004 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type shape = + | Function + | Lazy + | Class + | Module of shape array + | Value of Obj.t + +let overwrite o n = + assert (Obj.size o >= Obj.size n); + for i = 0 to Obj.size n - 1 do + Obj.set_field o i (Obj.field n i) + done + +let rec init_mod loc shape = + match shape with + | Function -> + (* Two code pointer words (curried and full application), arity + and eight environment entries makes 11 words. *) + let closure = Obj.new_block Obj.closure_tag 11 in + let template = + Obj.repr (fun _ -> raise (Undefined_recursive_module loc)) + in + overwrite closure template; + closure + | Lazy -> + Obj.repr (lazy (raise (Undefined_recursive_module loc))) + | Class -> + Obj.repr (CamlinternalOO.dummy_class loc) + | Module comps -> + Obj.repr (Array.map (init_mod loc) comps) + | Value v -> + v + +let rec update_mod shape o n = + match shape with + | Function -> + if Obj.tag n = Obj.closure_tag && Obj.size n <= Obj.size o + then begin overwrite o n; Obj.truncate o (Obj.size n) (* PR#4008 *) end + else overwrite o (Obj.repr (fun x -> (Obj.obj n : _ -> _) x)) + | Lazy -> + if Obj.tag n = Obj.lazy_tag then + Obj.set_field o 0 (Obj.field n 0) + else if Obj.tag n = Obj.forward_tag then begin (* PR#4316 *) + Obj.set_tag o Obj.forward_tag; + Obj.set_field o 0 (Obj.field n 0) + end else begin + (* forwarding pointer was shortcut by GC *) + Obj.set_tag o Obj.forward_tag; + Obj.set_field o 0 n + end + | Class -> + assert (Obj.tag n = 0 && Obj.size n = 4); + overwrite o n + | Module comps -> + assert (Obj.tag n = 0 && Obj.size n >= Array.length comps); + for i = 0 to Array.length comps - 1 do + update_mod comps.(i) (Obj.field o i) (Obj.field n i) + done + | Value _ -> () (* the value is already there *) diff --git a/res_syntax/compiler-libs-406/camlinternalMod.mli b/res_syntax/compiler-libs-406/camlinternalMod.mli new file mode 100644 index 0000000000..cf7ffb09db --- /dev/null +++ b/res_syntax/compiler-libs-406/camlinternalMod.mli @@ -0,0 +1,28 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2004 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Run-time support for recursive modules. + All functions in this module are for system use only, not for the + casual user. *) + +type shape = + | Function + | Lazy + | Class + | Module of shape array + | Value of Obj.t + +val init_mod: string * int * int -> shape -> Obj.t +val update_mod: shape -> Obj.t -> Obj.t -> unit diff --git a/res_syntax/compiler-libs-406/camlinternalOO.ml b/res_syntax/compiler-libs-406/camlinternalOO.ml new file mode 100644 index 0000000000..0188c148cd --- /dev/null +++ b/res_syntax/compiler-libs-406/camlinternalOO.ml @@ -0,0 +1,613 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2002 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Obj + +(**** Object representation ****) + +external set_id: 'a -> 'a = "caml_set_oo_id" [@@noalloc] + +(**** Object copy ****) + +let copy o = + let o = (Obj.obj (Obj.dup (Obj.repr o))) in + set_id o + +(**** Compression options ****) +(* Parameters *) +type params = { + mutable compact_table : bool; + mutable copy_parent : bool; + mutable clean_when_copying : bool; + mutable retry_count : int; + mutable bucket_small_size : int + } + +let params = { + compact_table = true; + copy_parent = true; + clean_when_copying = true; + retry_count = 3; + bucket_small_size = 16 +} + +(**** Parameters ****) + +let initial_object_size = 2 + +(**** Items ****) + +type item = DummyA | DummyB | DummyC of int +let _ = [DummyA; DummyB; DummyC 0] (* to avoid warnings *) + +let dummy_item = (magic () : item) + +(**** Types ****) + +type tag +type label = int +type closure = item +type t = DummyA | DummyB | DummyC of int +let _ = [DummyA; DummyB; DummyC 0] (* to avoid warnings *) + +type obj = t array +external ret : (obj -> 'a) -> closure = "%identity" + +(**** Labels ****) + +let public_method_label s : tag = + let accu = ref 0 in + for i = 0 to String.length s - 1 do + accu := 223 * !accu + Char.code s.[i] + done; + (* reduce to 31 bits *) + accu := !accu land (1 lsl 31 - 1); + (* make it signed for 64 bits architectures *) + let tag = if !accu > 0x3FFFFFFF then !accu - (1 lsl 31) else !accu in + (* Printf.eprintf "%s = %d\n" s tag; flush stderr; *) + magic tag + +(**** Sparse array ****) + +module Vars = + Map.Make(struct type t = string let compare (x:t) y = compare x y end) +type vars = int Vars.t + +module Meths = + Map.Make(struct type t = string let compare (x:t) y = compare x y end) +type meths = label Meths.t +module Labs = + Map.Make(struct type t = label let compare (x:t) y = compare x y end) +type labs = bool Labs.t + +(* The compiler assumes that the first field of this structure is [size]. *) +type table = + { mutable size: int; + mutable methods: closure array; + mutable methods_by_name: meths; + mutable methods_by_label: labs; + mutable previous_states: + (meths * labs * (label * item) list * vars * + label list * string list) list; + mutable hidden_meths: (label * item) list; + mutable vars: vars; + mutable initializers: (obj -> unit) list } + +let dummy_table = + { methods = [| dummy_item |]; + methods_by_name = Meths.empty; + methods_by_label = Labs.empty; + previous_states = []; + hidden_meths = []; + vars = Vars.empty; + initializers = []; + size = 0 } + +let table_count = ref 0 + +(* dummy_met should be a pointer, so use an atom *) +let dummy_met : item = obj (Obj.new_block 0 0) +(* if debugging is needed, this could be a good idea: *) +(* let dummy_met () = failwith "Undefined method" *) + +let rec fit_size n = + if n <= 2 then n else + fit_size ((n+1)/2) * 2 + +let new_table pub_labels = + incr table_count; + let len = Array.length pub_labels in + let methods = Array.make (len*2+2) dummy_met in + methods.(0) <- magic len; + methods.(1) <- magic (fit_size len * Sys.word_size / 8 - 1); + for i = 0 to len - 1 do methods.(i*2+3) <- magic pub_labels.(i) done; + { methods = methods; + methods_by_name = Meths.empty; + methods_by_label = Labs.empty; + previous_states = []; + hidden_meths = []; + vars = Vars.empty; + initializers = []; + size = initial_object_size } + +let resize array new_size = + let old_size = Array.length array.methods in + if new_size > old_size then begin + let new_buck = Array.make new_size dummy_met in + Array.blit array.methods 0 new_buck 0 old_size; + array.methods <- new_buck + end + +let put array label element = + resize array (label + 1); + array.methods.(label) <- element + +(**** Classes ****) + +let method_count = ref 0 +let inst_var_count = ref 0 + +(* type t *) +type meth = item + +let new_method table = + let index = Array.length table.methods in + resize table (index + 1); + index + +let get_method_label table name = + try + Meths.find name table.methods_by_name + with Not_found -> + let label = new_method table in + table.methods_by_name <- Meths.add name label table.methods_by_name; + table.methods_by_label <- Labs.add label true table.methods_by_label; + label + +let get_method_labels table names = + Array.map (get_method_label table) names + +let set_method table label element = + incr method_count; + if Labs.find label table.methods_by_label then + put table label element + else + table.hidden_meths <- (label, element) :: table.hidden_meths + +let get_method table label = + try List.assoc label table.hidden_meths + with Not_found -> table.methods.(label) + +let to_list arr = + if arr == magic 0 then [] else Array.to_list arr + +let narrow table vars virt_meths concr_meths = + let vars = to_list vars + and virt_meths = to_list virt_meths + and concr_meths = to_list concr_meths in + let virt_meth_labs = List.map (get_method_label table) virt_meths in + let concr_meth_labs = List.map (get_method_label table) concr_meths in + table.previous_states <- + (table.methods_by_name, table.methods_by_label, table.hidden_meths, + table.vars, virt_meth_labs, vars) + :: table.previous_states; + table.vars <- + Vars.fold + (fun lab info tvars -> + if List.mem lab vars then Vars.add lab info tvars else tvars) + table.vars Vars.empty; + let by_name = ref Meths.empty in + let by_label = ref Labs.empty in + List.iter2 + (fun met label -> + by_name := Meths.add met label !by_name; + by_label := + Labs.add label + (try Labs.find label table.methods_by_label with Not_found -> true) + !by_label) + concr_meths concr_meth_labs; + List.iter2 + (fun met label -> + by_name := Meths.add met label !by_name; + by_label := Labs.add label false !by_label) + virt_meths virt_meth_labs; + table.methods_by_name <- !by_name; + table.methods_by_label <- !by_label; + table.hidden_meths <- + List.fold_right + (fun ((lab, _) as met) hm -> + if List.mem lab virt_meth_labs then hm else met::hm) + table.hidden_meths + [] + +let widen table = + let (by_name, by_label, saved_hidden_meths, saved_vars, virt_meths, vars) = + List.hd table.previous_states + in + table.previous_states <- List.tl table.previous_states; + table.vars <- + List.fold_left + (fun s v -> Vars.add v (Vars.find v table.vars) s) + saved_vars vars; + table.methods_by_name <- by_name; + table.methods_by_label <- by_label; + table.hidden_meths <- + List.fold_right + (fun ((lab, _) as met) hm -> + if List.mem lab virt_meths then hm else met::hm) + table.hidden_meths + saved_hidden_meths + +let new_slot table = + let index = table.size in + table.size <- index + 1; + index + +let new_variable table name = + try Vars.find name table.vars + with Not_found -> + let index = new_slot table in + if name <> "" then table.vars <- Vars.add name index table.vars; + index + +let to_array arr = + if arr = Obj.magic 0 then [||] else arr + +let new_methods_variables table meths vals = + let meths = to_array meths in + let nmeths = Array.length meths and nvals = Array.length vals in + let res = Array.make (nmeths + nvals) 0 in + for i = 0 to nmeths - 1 do + res.(i) <- get_method_label table meths.(i) + done; + for i = 0 to nvals - 1 do + res.(i+nmeths) <- new_variable table vals.(i) + done; + res + +let get_variable table name = + try Vars.find name table.vars with Not_found -> assert false + +let get_variables table names = + Array.map (get_variable table) names + +let add_initializer table f = + table.initializers <- f::table.initializers + +(* +module Keys = + Map.Make(struct type t = tag array let compare (x:t) y = compare x y end) +let key_map = ref Keys.empty +let get_key tags : item = + try magic (Keys.find tags !key_map : tag array) + with Not_found -> + key_map := Keys.add tags tags !key_map; + magic tags +*) + +let create_table public_methods = + if public_methods == magic 0 then new_table [||] else + (* [public_methods] must be in ascending order for bytecode *) + let tags = Array.map public_method_label public_methods in + let table = new_table tags in + Array.iteri + (fun i met -> + let lab = i*2+2 in + table.methods_by_name <- Meths.add met lab table.methods_by_name; + table.methods_by_label <- Labs.add lab true table.methods_by_label) + public_methods; + table + +let init_class table = + inst_var_count := !inst_var_count + table.size - 1; + table.initializers <- List.rev table.initializers; + resize table (3 + magic table.methods.(1) * 16 / Sys.word_size) + +let inherits cla vals virt_meths concr_meths (_, super, _, env) top = + narrow cla vals virt_meths concr_meths; + let init = + if top then super cla env else Obj.repr (super cla) in + widen cla; + Array.concat + [[| repr init |]; + magic (Array.map (get_variable cla) (to_array vals) : int array); + Array.map + (fun nm -> repr (get_method cla (get_method_label cla nm) : closure)) + (to_array concr_meths) ] + +let make_class pub_meths class_init = + let table = create_table pub_meths in + let env_init = class_init table in + init_class table; + (env_init (Obj.repr 0), class_init, env_init, Obj.repr 0) + +type init_table = { mutable env_init: t; mutable class_init: table -> t } + +let make_class_store pub_meths class_init init_table = + let table = create_table pub_meths in + let env_init = class_init table in + init_class table; + init_table.class_init <- class_init; + init_table.env_init <- env_init + +let dummy_class loc = + let undef = fun _ -> raise (Undefined_recursive_module loc) in + (Obj.magic undef, undef, undef, Obj.repr 0) + +(**** Objects ****) + +let create_object table = + (* XXX Appel de [obj_block] | Call to [obj_block] *) + let obj = Obj.new_block Obj.object_tag table.size in + (* XXX Appel de [caml_modify] | Call to [caml_modify] *) + Obj.set_field obj 0 (Obj.repr table.methods); + Obj.obj (set_id obj) + +let create_object_opt obj_0 table = + if (Obj.magic obj_0 : bool) then obj_0 else begin + (* XXX Appel de [obj_block] | Call to [obj_block] *) + let obj = Obj.new_block Obj.object_tag table.size in + (* XXX Appel de [caml_modify] | Call to [caml_modify] *) + Obj.set_field obj 0 (Obj.repr table.methods); + Obj.obj (set_id obj) + end + +let rec iter_f obj = + function + [] -> () + | f::l -> f obj; iter_f obj l + +let run_initializers obj table = + let inits = table.initializers in + if inits <> [] then + iter_f obj inits + +let run_initializers_opt obj_0 obj table = + if (Obj.magic obj_0 : bool) then obj else begin + let inits = table.initializers in + if inits <> [] then iter_f obj inits; + obj + end + +let create_object_and_run_initializers obj_0 table = + if (Obj.magic obj_0 : bool) then obj_0 else begin + let obj = create_object table in + run_initializers obj table; + obj + end + +(* Equivalent primitive below +let sendself obj lab = + (magic obj : (obj -> t) array array).(0).(lab) obj +*) +external send : obj -> tag -> 'a = "%send" +external sendcache : obj -> tag -> t -> int -> 'a = "%sendcache" +external sendself : obj -> label -> 'a = "%sendself" +external get_public_method : obj -> tag -> closure + = "caml_get_public_method" [@@noalloc] + +(**** table collection access ****) + +type tables = + | Empty + | Cons of {key : closure; mutable data: tables; mutable next: tables} + +let set_data tables v = match tables with + | Empty -> assert false + | Cons tables -> tables.data <- v +let set_next tables v = match tables with + | Empty -> assert false + | Cons tables -> tables.next <- v +let get_key = function + | Empty -> assert false + | Cons tables -> tables.key +let get_data = function + | Empty -> assert false + | Cons tables -> tables.data +let get_next = function + | Empty -> assert false + | Cons tables -> tables.next + +let build_path n keys tables = + let res = Cons {key = Obj.magic 0; data = Empty; next = Empty} in + let r = ref res in + for i = 0 to n do + r := Cons {key = keys.(i); data = !r; next = Empty} + done; + set_data tables !r; + res + +let rec lookup_keys i keys tables = + if i < 0 then tables else + let key = keys.(i) in + let rec lookup_key (tables:tables) = + if get_key tables == key then + match get_data tables with + | Empty -> assert false + | Cons _ as tables_data -> + lookup_keys (i-1) keys tables_data + else + match get_next tables with + | Cons _ as next -> lookup_key next + | Empty -> + let next : tables = Cons {key; data = Empty; next = Empty} in + set_next tables next; + build_path (i-1) keys next + in + lookup_key tables + +let lookup_tables root keys = + match get_data root with + | Cons _ as root_data -> + lookup_keys (Array.length keys - 1) keys root_data + | Empty -> + build_path (Array.length keys - 1) keys root + +(**** builtin methods ****) + +let get_const x = ret (fun _obj -> x) +let get_var n = ret (fun obj -> Array.unsafe_get obj n) +let get_env e n = + ret (fun obj -> + Array.unsafe_get (Obj.magic (Array.unsafe_get obj e) : obj) n) +let get_meth n = ret (fun obj -> sendself obj n) +let set_var n = ret (fun obj x -> Array.unsafe_set obj n x) +let app_const f x = ret (fun _obj -> f x) +let app_var f n = ret (fun obj -> f (Array.unsafe_get obj n)) +let app_env f e n = + ret (fun obj -> + f (Array.unsafe_get (Obj.magic (Array.unsafe_get obj e) : obj) n)) +let app_meth f n = ret (fun obj -> f (sendself obj n)) +let app_const_const f x y = ret (fun _obj -> f x y) +let app_const_var f x n = ret (fun obj -> f x (Array.unsafe_get obj n)) +let app_const_meth f x n = ret (fun obj -> f x (sendself obj n)) +let app_var_const f n x = ret (fun obj -> f (Array.unsafe_get obj n) x) +let app_meth_const f n x = ret (fun obj -> f (sendself obj n) x) +let app_const_env f x e n = + ret (fun obj -> + f x (Array.unsafe_get (Obj.magic (Array.unsafe_get obj e) : obj) n)) +let app_env_const f e n x = + ret (fun obj -> + f (Array.unsafe_get (Obj.magic (Array.unsafe_get obj e) : obj) n) x) +let meth_app_const n x = ret (fun obj -> (sendself obj n : _ -> _) x) +let meth_app_var n m = + ret (fun obj -> (sendself obj n : _ -> _) (Array.unsafe_get obj m)) +let meth_app_env n e m = + ret (fun obj -> (sendself obj n : _ -> _) + (Array.unsafe_get (Obj.magic (Array.unsafe_get obj e) : obj) m)) +let meth_app_meth n m = + ret (fun obj -> (sendself obj n : _ -> _) (sendself obj m)) +let send_const m x c = + ret (fun obj -> sendcache x m (Array.unsafe_get obj 0) c) +let send_var m n c = + ret (fun obj -> + sendcache (Obj.magic (Array.unsafe_get obj n) : obj) m + (Array.unsafe_get obj 0) c) +let send_env m e n c = + ret (fun obj -> + sendcache + (Obj.magic (Array.unsafe_get + (Obj.magic (Array.unsafe_get obj e) : obj) n) : obj) + m (Array.unsafe_get obj 0) c) +let send_meth m n c = + ret (fun obj -> + sendcache (sendself obj n) m (Array.unsafe_get obj 0) c) +let new_cache table = + let n = new_method table in + let n = + if n mod 2 = 0 || n > 2 + magic table.methods.(1) * 16 / Sys.word_size + then n else new_method table + in + table.methods.(n) <- Obj.magic 0; + n + +type impl = + GetConst + | GetVar + | GetEnv + | GetMeth + | SetVar + | AppConst + | AppVar + | AppEnv + | AppMeth + | AppConstConst + | AppConstVar + | AppConstEnv + | AppConstMeth + | AppVarConst + | AppEnvConst + | AppMethConst + | MethAppConst + | MethAppVar + | MethAppEnv + | MethAppMeth + | SendConst + | SendVar + | SendEnv + | SendMeth + | Closure of closure + +let method_impl table i arr = + let next () = incr i; magic arr.(!i) in + match next() with + GetConst -> let x : t = next() in get_const x + | GetVar -> let n = next() in get_var n + | GetEnv -> let e = next() in let n = next() in get_env e n + | GetMeth -> let n = next() in get_meth n + | SetVar -> let n = next() in set_var n + | AppConst -> let f = next() in let x = next() in app_const f x + | AppVar -> let f = next() in let n = next () in app_var f n + | AppEnv -> + let f = next() in let e = next() in let n = next() in + app_env f e n + | AppMeth -> let f = next() in let n = next () in app_meth f n + | AppConstConst -> + let f = next() in let x = next() in let y = next() in + app_const_const f x y + | AppConstVar -> + let f = next() in let x = next() in let n = next() in + app_const_var f x n + | AppConstEnv -> + let f = next() in let x = next() in let e = next () in let n = next() in + app_const_env f x e n + | AppConstMeth -> + let f = next() in let x = next() in let n = next() in + app_const_meth f x n + | AppVarConst -> + let f = next() in let n = next() in let x = next() in + app_var_const f n x + | AppEnvConst -> + let f = next() in let e = next () in let n = next() in let x = next() in + app_env_const f e n x + | AppMethConst -> + let f = next() in let n = next() in let x = next() in + app_meth_const f n x + | MethAppConst -> + let n = next() in let x = next() in meth_app_const n x + | MethAppVar -> + let n = next() in let m = next() in meth_app_var n m + | MethAppEnv -> + let n = next() in let e = next() in let m = next() in + meth_app_env n e m + | MethAppMeth -> + let n = next() in let m = next() in meth_app_meth n m + | SendConst -> + let m = next() in let x = next() in send_const m x (new_cache table) + | SendVar -> + let m = next() in let n = next () in send_var m n (new_cache table) + | SendEnv -> + let m = next() in let e = next() in let n = next() in + send_env m e n (new_cache table) + | SendMeth -> + let m = next() in let n = next () in send_meth m n (new_cache table) + | Closure _ as clo -> magic clo + +let set_methods table methods = + let len = Array.length methods in let i = ref 0 in + while !i < len do + let label = methods.(!i) in let clo = method_impl table i methods in + set_method table label clo; + incr i + done + +(**** Statistics ****) + +type stats = + { classes: int; methods: int; inst_vars: int; } + +let stats () = + { classes = !table_count; + methods = !method_count; inst_vars = !inst_var_count; } diff --git a/res_syntax/compiler-libs-406/camlinternalOO.mli b/res_syntax/compiler-libs-406/camlinternalOO.mli new file mode 100644 index 0000000000..b6ffc70d7f --- /dev/null +++ b/res_syntax/compiler-libs-406/camlinternalOO.mli @@ -0,0 +1,153 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Run-time support for objects and classes. + All functions in this module are for system use only, not for the + casual user. *) + +(** {1 Classes} *) + +type tag +type label +type table +type meth +type t +type obj +type closure +val public_method_label : string -> tag +val new_method : table -> label +val new_variable : table -> string -> int +val new_methods_variables : + table -> string array -> string array -> label array +val get_variable : table -> string -> int +val get_variables : table -> string array -> int array +val get_method_label : table -> string -> label +val get_method_labels : table -> string array -> label array +val get_method : table -> label -> meth +val set_method : table -> label -> meth -> unit +val set_methods : table -> label array -> unit +val narrow : table -> string array -> string array -> string array -> unit +val widen : table -> unit +val add_initializer : table -> (obj -> unit) -> unit +val dummy_table : table +val create_table : string array -> table +val init_class : table -> unit +val inherits : + table -> string array -> string array -> string array -> + (t * (table -> obj -> Obj.t) * t * obj) -> bool -> Obj.t array +val make_class : + string array -> (table -> Obj.t -> t) -> + (t * (table -> Obj.t -> t) * (Obj.t -> t) * Obj.t) +type init_table +val make_class_store : + string array -> (table -> t) -> init_table -> unit +val dummy_class : + string * int * int -> + (t * (table -> Obj.t -> t) * (Obj.t -> t) * Obj.t) + +(** {1 Objects} *) + +val copy : (< .. > as 'a) -> 'a +val create_object : table -> obj +val create_object_opt : obj -> table -> obj +val run_initializers : obj -> table -> unit +val run_initializers_opt : obj -> obj -> table -> obj +val create_object_and_run_initializers : obj -> table -> obj +external send : obj -> tag -> t = "%send" +external sendcache : obj -> tag -> t -> int -> t = "%sendcache" +external sendself : obj -> label -> t = "%sendself" +external get_public_method : obj -> tag -> closure + = "caml_get_public_method" [@@noalloc] + +(** {1 Table cache} *) + +type tables +val lookup_tables : tables -> closure array -> tables + +(** {1 Builtins to reduce code size} *) + +(* +val get_const : t -> closure +val get_var : int -> closure +val get_env : int -> int -> closure +val get_meth : label -> closure +val set_var : int -> closure +val app_const : (t -> t) -> t -> closure +val app_var : (t -> t) -> int -> closure +val app_env : (t -> t) -> int -> int -> closure +val app_meth : (t -> t) -> label -> closure +val app_const_const : (t -> t -> t) -> t -> t -> closure +val app_const_var : (t -> t -> t) -> t -> int -> closure +val app_const_env : (t -> t -> t) -> t -> int -> int -> closure +val app_const_meth : (t -> t -> t) -> t -> label -> closure +val app_var_const : (t -> t -> t) -> int -> t -> closure +val app_env_const : (t -> t -> t) -> int -> int -> t -> closure +val app_meth_const : (t -> t -> t) -> label -> t -> closure +val meth_app_const : label -> t -> closure +val meth_app_var : label -> int -> closure +val meth_app_env : label -> int -> int -> closure +val meth_app_meth : label -> label -> closure +val send_const : tag -> obj -> int -> closure +val send_var : tag -> int -> int -> closure +val send_env : tag -> int -> int -> int -> closure +val send_meth : tag -> label -> int -> closure +*) + +type impl = + GetConst + | GetVar + | GetEnv + | GetMeth + | SetVar + | AppConst + | AppVar + | AppEnv + | AppMeth + | AppConstConst + | AppConstVar + | AppConstEnv + | AppConstMeth + | AppVarConst + | AppEnvConst + | AppMethConst + | MethAppConst + | MethAppVar + | MethAppEnv + | MethAppMeth + | SendConst + | SendVar + | SendEnv + | SendMeth + | Closure of closure + +(** {1 Parameters} *) + +(* currently disabled *) +type params = + { mutable compact_table : bool; + mutable copy_parent : bool; + mutable clean_when_copying : bool; + mutable retry_count : int; + mutable bucket_small_size : int } + +val params : params + +(** {1 Statistics} *) + +type stats = + { classes : int; + methods : int; + inst_vars : int } +val stats : unit -> stats diff --git a/res_syntax/compiler-libs-406/ccomp.ml b/res_syntax/compiler-libs-406/ccomp.ml new file mode 100644 index 0000000000..3f186b3bb0 --- /dev/null +++ b/res_syntax/compiler-libs-406/ccomp.ml @@ -0,0 +1,204 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Compiling C files and building C libraries *) + +let command cmdline = + if !Clflags.verbose then begin + prerr_string "+ "; + prerr_string cmdline; + prerr_newline() + end; + Sys.command cmdline + +let run_command cmdline = ignore(command cmdline) + +(* Build @responsefile to work around Windows limitations on + command-line length *) +let build_diversion lst = + let (responsefile, oc) = Filename.open_temp_file "camlresp" "" in + List.iter (fun f -> Printf.fprintf oc "%s\n" f) lst; + close_out oc; + at_exit (fun () -> Misc.remove_file responsefile); + "@" ^ responsefile + +let quote_files lst = + let lst = List.filter (fun f -> f <> "") lst in + let quoted = List.map Filename.quote lst in + let s = String.concat " " quoted in + if String.length s >= 4096 && Sys.os_type = "Win32" + then build_diversion quoted + else s + +let quote_prefixed pr lst = + let lst = List.filter (fun f -> f <> "") lst in + let lst = List.map (fun f -> pr ^ f) lst in + quote_files lst + +let quote_optfile = function + | None -> "" + | Some f -> Filename.quote f + +let display_msvc_output file name = + let c = open_in file in + try + let first = input_line c in + if first <> Filename.basename name then + print_string first; + while true do + print_string (input_line c) + done + with _ -> + close_in c; + Sys.remove file + +let compile_file ?output ?(opt="") name = + let (pipe, file) = + if Config.ccomp_type = "msvc" && not !Clflags.verbose then + try + let (t, c) = Filename.open_temp_file "msvc" "stdout" in + close_out c; + (Printf.sprintf " > %s" (Filename.quote t), t) + with _ -> + ("", "") + else + ("", "") in + let exit = + command + (Printf.sprintf + "%s %s %s -c %s %s %s %s %s%s" + (match !Clflags.c_compiler with + | Some cc -> cc + | None -> + let (cflags, cppflags) = + if !Clflags.native_code + then (Config.ocamlopt_cflags, Config.ocamlopt_cppflags) + else (Config.ocamlc_cflags, Config.ocamlc_cppflags) in + (String.concat " " [Config.c_compiler; cflags; cppflags])) + (match output with + | None -> "" + | Some o -> Printf.sprintf "%s%s" Config.c_output_obj o) + opt + (if !Clflags.debug && Config.ccomp_type <> "msvc" then "-g" else "") + (String.concat " " (List.rev !Clflags.all_ccopts)) + (quote_prefixed "-I" (List.rev !Clflags.include_dirs)) + (Clflags.std_include_flag "-I") + (Filename.quote name) + (* cl tediously includes the name of the C file as the first thing it + outputs (in fairness, the tedious thing is that there's no switch to + disable this behaviour). In the absence of the Unix module, use + a temporary file to filter the output (cannot pipe the output to a + filter because this removes the exit status of cl, which is wanted. + *) + pipe) in + if pipe <> "" + then display_msvc_output file name; + exit + +let macos_create_empty_archive ~quoted_archive = + let result = + command (Printf.sprintf "%s rc %s /dev/null" Config.ar quoted_archive) + in + if result <> 0 then result + else + let result = + command (Printf.sprintf "%s %s 2> /dev/null" Config.ranlib quoted_archive) + in + if result <> 0 then result + else + command (Printf.sprintf "%s d %s /dev/null" Config.ar quoted_archive) + +let create_archive archive file_list = + Misc.remove_file archive; + let quoted_archive = Filename.quote archive in + match Config.ccomp_type with + "msvc" -> + command(Printf.sprintf "link /lib /nologo /out:%s %s" + quoted_archive (quote_files file_list)) + | _ -> + assert(String.length Config.ar > 0); + let is_macosx = + match Config.system with + | "macosx" -> true + | _ -> false + in + if is_macosx && file_list = [] then (* PR#6550 *) + macos_create_empty_archive ~quoted_archive + else + let r1 = + command(Printf.sprintf "%s rc %s %s" + Config.ar quoted_archive (quote_files file_list)) in + if r1 <> 0 || String.length Config.ranlib = 0 + then r1 + else command(Config.ranlib ^ " " ^ quoted_archive) + +let expand_libname name = + if String.length name < 2 || String.sub name 0 2 <> "-l" + then name + else begin + let libname = + "lib" ^ String.sub name 2 (String.length name - 2) ^ Config.ext_lib in + try + Misc.find_in_path !Config.load_path libname + with Not_found -> + libname + end + +type link_mode = + | Exe + | Dll + | MainDll + | Partial + +let remove_Wl cclibs = + cclibs |> List.map (fun cclib -> + (* -Wl,-foo,bar -> -foo bar *) + if String.length cclib >= 4 && "-Wl," = String.sub cclib 0 4 then + String.map (function ',' -> ' ' | c -> c) + (String.sub cclib 4 (String.length cclib - 4)) + else cclib) + +let call_linker mode output_name files extra = + let cmd = + if mode = Partial then + let l_prefix = + match Config.ccomp_type with + | "msvc" -> "/libpath:" + | _ -> "-L" + in + Printf.sprintf "%s%s %s %s %s" + Config.native_pack_linker + (Filename.quote output_name) + (quote_prefixed l_prefix !Config.load_path) + (quote_files (remove_Wl files)) + extra + else + Printf.sprintf "%s -o %s %s %s %s %s %s %s" + (match !Clflags.c_compiler, mode with + | Some cc, _ -> cc + | None, Exe -> Config.mkexe + | None, Dll -> Config.mkdll + | None, MainDll -> Config.mkmaindll + | None, Partial -> assert false + ) + (Filename.quote output_name) + (if !Clflags.gprofile then Config.cc_profile else "") + "" (*(Clflags.std_include_flag "-I")*) + (quote_prefixed "-L" !Config.load_path) + (String.concat " " (List.rev !Clflags.all_ccopts)) + (quote_files files) + extra + in + command cmd = 0 diff --git a/res_syntax/compiler-libs-406/ccomp.mli b/res_syntax/compiler-libs-406/ccomp.mli new file mode 100644 index 0000000000..17094ba213 --- /dev/null +++ b/res_syntax/compiler-libs-406/ccomp.mli @@ -0,0 +1,33 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Compiling C files and building C libraries *) + +val command: string -> int +val run_command: string -> unit +val compile_file: ?output:string -> ?opt:string -> string -> int +val create_archive: string -> string list -> int +val expand_libname: string -> string +val quote_files: string list -> string +val quote_optfile: string option -> string +(*val make_link_options: string list -> string*) + +type link_mode = + | Exe + | Dll + | MainDll + | Partial + +val call_linker: link_mode -> string -> string list -> string -> bool diff --git a/res_syntax/compiler-libs-406/char.ml b/res_syntax/compiler-libs-406/char.ml new file mode 100644 index 0000000000..fb7660d077 --- /dev/null +++ b/res_syntax/compiler-libs-406/char.ml @@ -0,0 +1,76 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Character operations *) + +external code: char -> int = "%identity" +external unsafe_chr: int -> char = "%identity" + +let chr n = + if n < 0 || n > 255 then invalid_arg "Char.chr" else unsafe_chr n + +external bytes_create: int -> bytes = "caml_create_bytes" +external bytes_unsafe_set : bytes -> int -> char -> unit + = "%bytes_unsafe_set" +external unsafe_to_string : bytes -> string = "%bytes_to_string" + +let escaped = function + | '\'' -> "\\'" + | '\\' -> "\\\\" + | '\n' -> "\\n" + | '\t' -> "\\t" + | '\r' -> "\\r" + | '\b' -> "\\b" + | ' ' .. '~' as c -> + let s = bytes_create 1 in + bytes_unsafe_set s 0 c; + unsafe_to_string s + | c -> + let n = code c in + let s = bytes_create 4 in + bytes_unsafe_set s 0 '\\'; + bytes_unsafe_set s 1 (unsafe_chr (48 + n / 100)); + bytes_unsafe_set s 2 (unsafe_chr (48 + (n / 10) mod 10)); + bytes_unsafe_set s 3 (unsafe_chr (48 + n mod 10)); + unsafe_to_string s + +let lowercase c = + if (c >= 'A' && c <= 'Z') + || (c >= '\192' && c <= '\214') + || (c >= '\216' && c <= '\222') + then unsafe_chr(code c + 32) + else c + +let uppercase c = + if (c >= 'a' && c <= 'z') + || (c >= '\224' && c <= '\246') + || (c >= '\248' && c <= '\254') + then unsafe_chr(code c - 32) + else c + +let lowercase_ascii c = + if (c >= 'A' && c <= 'Z') + then unsafe_chr(code c + 32) + else c + +let uppercase_ascii c = + if (c >= 'a' && c <= 'z') + then unsafe_chr(code c - 32) + else c + +type t = char + +let compare c1 c2 = code c1 - code c2 +let equal (c1: t) (c2: t) = compare c1 c2 = 0 diff --git a/res_syntax/compiler-libs-406/char.mli b/res_syntax/compiler-libs-406/char.mli new file mode 100644 index 0000000000..9c62700565 --- /dev/null +++ b/res_syntax/compiler-libs-406/char.mli @@ -0,0 +1,73 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Character operations. *) + +external code : char -> int = "%identity" +(** Return the ASCII code of the argument. *) + +val chr : int -> char +(** Return the character with the given ASCII code. + Raise [Invalid_argument "Char.chr"] if the argument is + outside the range 0--255. *) + +val escaped : char -> string +(** Return a string representing the given character, + with special characters escaped following the lexical conventions + of OCaml. + All characters outside the ASCII printable range (32..126) are + escaped, as well as backslash, double-quote, and single-quote. *) + +val lowercase : char -> char + [@@ocaml.deprecated "Use Char.lowercase_ascii instead."] +(** Convert the given character to its equivalent lowercase character, + using the ISO Latin-1 (8859-1) character set. + @deprecated Functions operating on Latin-1 character set are deprecated. *) + +val uppercase : char -> char + [@@ocaml.deprecated "Use Char.uppercase_ascii instead."] +(** Convert the given character to its equivalent uppercase character, + using the ISO Latin-1 (8859-1) character set. + @deprecated Functions operating on Latin-1 character set are deprecated. *) + +val lowercase_ascii : char -> char +(** Convert the given character to its equivalent lowercase character, + using the US-ASCII character set. + @since 4.03.0 *) + +val uppercase_ascii : char -> char +(** Convert the given character to its equivalent uppercase character, + using the US-ASCII character set. + @since 4.03.0 *) + +type t = char +(** An alias for the type of characters. *) + +val compare: t -> t -> int +(** The comparison function for characters, with the same specification as + {!Stdlib.compare}. Along with the type [t], this function [compare] + allows the module [Char] to be passed as argument to the functors + {!Set.Make} and {!Map.Make}. *) + +val equal: t -> t -> bool +(** The equal function for chars. + @since 4.03.0 *) + +(**/**) + +(* The following is for system use only. Do not call directly. *) + +external unsafe_chr : int -> char = "%identity" + diff --git a/res_syntax/compiler-libs-406/clflags.ml b/res_syntax/compiler-libs-406/clflags.ml new file mode 100644 index 0000000000..c502c41cdd --- /dev/null +++ b/res_syntax/compiler-libs-406/clflags.ml @@ -0,0 +1,405 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Command-line parameters *) + +module Int_arg_helper = Arg_helper.Make (struct + module Key = struct + include Numbers.Int + let of_string = int_of_string + end + + module Value = struct + include Numbers.Int + let of_string = int_of_string + end +end) +module Float_arg_helper = Arg_helper.Make (struct + module Key = struct + include Numbers.Int + let of_string = int_of_string + end + + module Value = struct + include Numbers.Float + let of_string = float_of_string + end +end) + +let objfiles = ref ([] : string list) (* .cmo and .cma files *) +and ccobjs = ref ([] : string list) (* .o, .a, .so and -cclib -lxxx *) +and dllibs = ref ([] : string list) (* .so and -dllib -lxxx *) + +let compile_only = ref false (* -c *) +and output_name = ref (None : string option) (* -o *) +and include_dirs = ref ([] : string list)(* -I *) +and no_std_include = ref false (* -nostdlib *) +and print_types = ref false (* -i *) +and make_archive = ref false (* -a *) +and debug = ref false (* -g *) +and fast = ref false (* -unsafe *) +and use_linscan = ref false (* -linscan *) +and link_everything = ref false (* -linkall *) +and custom_runtime = ref false (* -custom *) +and no_check_prims = ref false (* -no-check-prims *) +and bytecode_compatible_32 = ref false (* -compat-32 *) +and output_c_object = ref false (* -output-obj *) +and output_complete_object = ref false (* -output-complete-obj *) +and all_ccopts = ref ([] : string list) (* -ccopt *) +and classic = ref false (* -nolabels *) +and nopervasives = ref false (* -nopervasives *) +and preprocessor = ref(None : string option) (* -pp *) +and all_ppx = ref ([] : string list) (* -ppx *) +let annotations = ref false (* -annot *) +let binary_annotations = ref false (* -annot *) +and use_threads = ref false (* -thread *) +and use_vmthreads = ref false (* -vmthread *) +and noassert = ref false (* -noassert *) +and verbose = ref false (* -verbose *) +and noversion = ref false (* -no-version *) +and noprompt = ref false (* -noprompt *) +and nopromptcont = ref false (* -nopromptcont *) +and init_file = ref (None : string option) (* -init *) +and noinit = ref false (* -noinit *) +and open_modules = ref [] (* -open *) +and use_prims = ref "" (* -use-prims ... *) +and use_runtime = ref "" (* -use-runtime ... *) +and principal = ref false (* -principal *) +and real_paths = ref true (* -short-paths *) +and recursive_types = ref false (* -rectypes *) +and strict_sequence = ref false (* -strict-sequence *) +and strict_formats = ref false (* -strict-formats *) +and applicative_functors = ref true (* -no-app-funct *) +and make_runtime = ref false (* -make-runtime *) +and gprofile = ref false (* -p *) +and c_compiler = ref (None: string option) (* -cc *) +and no_auto_link = ref false (* -noautolink *) +and dllpaths = ref ([] : string list) (* -dllpath *) +and make_package = ref false (* -pack *) +and for_package = ref (None: string option) (* -for-pack *) +and error_size = ref 500 (* -error-size *) +and float_const_prop = ref true (* -no-float-const-prop *) +and transparent_modules = ref false (* -trans-mod *) +let dump_source = ref false (* -dsource *) +let dump_parsetree = ref false (* -dparsetree *) +and dump_typedtree = ref false (* -dtypedtree *) +and dump_rawlambda = ref false (* -drawlambda *) +and dump_lambda = ref false (* -dlambda *) +and dump_rawclambda = ref false (* -drawclambda *) +and dump_clambda = ref false (* -dclambda *) +and dump_rawflambda = ref false (* -drawflambda *) +and dump_flambda = ref false (* -dflambda *) +and dump_flambda_let = ref (None : int option) (* -dflambda-let=... *) +and dump_flambda_verbose = ref false (* -dflambda-verbose *) +and dump_instr = ref false (* -dinstr *) + +let keep_asm_file = ref false (* -S *) +let optimize_for_speed = ref true (* -compact *) +and opaque = ref false (* -opaque *) + +and dump_cmm = ref false (* -dcmm *) +let dump_selection = ref false (* -dsel *) +let dump_cse = ref false (* -dcse *) +let dump_live = ref false (* -dlive *) +let dump_avail = ref false (* -davail *) +let dump_spill = ref false (* -dspill *) +let dump_split = ref false (* -dsplit *) +let dump_interf = ref false (* -dinterf *) +let dump_prefer = ref false (* -dprefer *) +let dump_regalloc = ref false (* -dalloc *) +let dump_reload = ref false (* -dreload *) +let dump_scheduling = ref false (* -dscheduling *) +let dump_linear = ref false (* -dlinear *) +let dump_interval = ref false (* -dinterval *) +let keep_startup_file = ref false (* -dstartup *) +let dump_combine = ref false (* -dcombine *) +let profile_columns : Profile.column list ref = ref [] (* -dprofile/-dtimings *) + +let debug_runavail = ref false (* -drunavail *) + +let native_code = ref false (* set to true under ocamlopt *) + +let force_slash = ref false (* for ocamldep *) +let clambda_checks = ref false (* -clambda-checks *) + +let flambda_invariant_checks = ref true (* -flambda-invariants *) + +let dont_write_files = ref false (* set to true under ocamldoc *) + +let std_include_flag prefix = + if !no_std_include then "" + else (prefix ^ (Filename.quote Config.standard_library)) +;; + +let std_include_dir () = + if !no_std_include then [] else [Config.standard_library] +;; + +let shared = ref false (* -shared *) +let dlcode = ref true (* not -nodynlink *) + +let pic_code = ref (match Config.architecture with (* -fPIC *) + | "amd64" -> true + | _ -> false) + +let runtime_variant = ref "";; (* -runtime-variant *) + +let keep_docs = ref false (* -keep-docs *) +let keep_locs = ref true (* -keep-locs *) +let unsafe_string = + if Config.safe_string then ref false + else ref (not Config.default_safe_string) + (* -safe-string / -unsafe-string *) + +let classic_inlining = ref false (* -Oclassic *) +let inlining_report = ref false (* -inlining-report *) + +let afl_instrument = ref Config.afl_instrument (* -afl-instrument *) +let afl_inst_ratio = ref 100 (* -afl-inst-ratio *) + +let simplify_rounds = ref None (* -rounds *) +let default_simplify_rounds = ref 1 (* -rounds *) +let rounds () = + match !simplify_rounds with + | None -> !default_simplify_rounds + | Some r -> r + +let default_inline_threshold = if Config.flambda then 10. else 10. /. 8. +let inline_toplevel_multiplier = 16 +let default_inline_toplevel_threshold = + int_of_float ((float inline_toplevel_multiplier) *. default_inline_threshold) +let default_inline_call_cost = 5 +let default_inline_alloc_cost = 7 +let default_inline_prim_cost = 3 +let default_inline_branch_cost = 5 +let default_inline_indirect_cost = 4 +let default_inline_branch_factor = 0.1 +let default_inline_lifting_benefit = 1300 +let default_inline_max_unroll = 0 +let default_inline_max_depth = 1 + +let inline_threshold = ref (Float_arg_helper.default default_inline_threshold) +let inline_toplevel_threshold = + ref (Int_arg_helper.default default_inline_toplevel_threshold) +let inline_call_cost = ref (Int_arg_helper.default default_inline_call_cost) +let inline_alloc_cost = ref (Int_arg_helper.default default_inline_alloc_cost) +let inline_prim_cost = ref (Int_arg_helper.default default_inline_prim_cost) +let inline_branch_cost = + ref (Int_arg_helper.default default_inline_branch_cost) +let inline_indirect_cost = + ref (Int_arg_helper.default default_inline_indirect_cost) +let inline_branch_factor = + ref (Float_arg_helper.default default_inline_branch_factor) +let inline_lifting_benefit = + ref (Int_arg_helper.default default_inline_lifting_benefit) +let inline_max_unroll = + ref (Int_arg_helper.default default_inline_max_unroll) +let inline_max_depth = + ref (Int_arg_helper.default default_inline_max_depth) + + +let unbox_specialised_args = ref true (* -no-unbox-specialised-args *) +let unbox_free_vars_of_closures = ref true +let unbox_closures = ref false (* -unbox-closures *) +let default_unbox_closures_factor = 10 +let unbox_closures_factor = + ref default_unbox_closures_factor (* -unbox-closures-factor *) +let remove_unused_arguments = ref false (* -remove-unused-arguments *) + +type inlining_arguments = { + inline_call_cost : int option; + inline_alloc_cost : int option; + inline_prim_cost : int option; + inline_branch_cost : int option; + inline_indirect_cost : int option; + inline_lifting_benefit : int option; + inline_branch_factor : float option; + inline_max_depth : int option; + inline_max_unroll : int option; + inline_threshold : float option; + inline_toplevel_threshold : int option; +} + +let set_int_arg round (arg:Int_arg_helper.parsed ref) default value = + let value : int = + match value with + | None -> default + | Some value -> value + in + match round with + | None -> + arg := Int_arg_helper.set_base_default value + (Int_arg_helper.reset_base_overrides !arg) + | Some round -> + arg := Int_arg_helper.add_base_override round value !arg + +let set_float_arg round (arg:Float_arg_helper.parsed ref) default value = + let value = + match value with + | None -> default + | Some value -> value + in + match round with + | None -> + arg := Float_arg_helper.set_base_default value + (Float_arg_helper.reset_base_overrides !arg) + | Some round -> + arg := Float_arg_helper.add_base_override round value !arg + +let use_inlining_arguments_set ?round (arg:inlining_arguments) = + let set_int = set_int_arg round in + let set_float = set_float_arg round in + set_int inline_call_cost default_inline_call_cost arg.inline_call_cost; + set_int inline_alloc_cost default_inline_alloc_cost arg.inline_alloc_cost; + set_int inline_prim_cost default_inline_prim_cost arg.inline_prim_cost; + set_int inline_branch_cost + default_inline_branch_cost arg.inline_branch_cost; + set_int inline_indirect_cost + default_inline_indirect_cost arg.inline_indirect_cost; + set_int inline_lifting_benefit + default_inline_lifting_benefit arg.inline_lifting_benefit; + set_float inline_branch_factor + default_inline_branch_factor arg.inline_branch_factor; + set_int inline_max_depth + default_inline_max_depth arg.inline_max_depth; + set_int inline_max_unroll + default_inline_max_unroll arg.inline_max_unroll; + set_float inline_threshold + default_inline_threshold arg.inline_threshold; + set_int inline_toplevel_threshold + default_inline_toplevel_threshold arg.inline_toplevel_threshold + +(* o1 is the default *) +let o1_arguments = { + inline_call_cost = None; + inline_alloc_cost = None; + inline_prim_cost = None; + inline_branch_cost = None; + inline_indirect_cost = None; + inline_lifting_benefit = None; + inline_branch_factor = None; + inline_max_depth = None; + inline_max_unroll = None; + inline_threshold = None; + inline_toplevel_threshold = None; +} + +let classic_arguments = { + inline_call_cost = None; + inline_alloc_cost = None; + inline_prim_cost = None; + inline_branch_cost = None; + inline_indirect_cost = None; + inline_lifting_benefit = None; + inline_branch_factor = None; + inline_max_depth = None; + inline_max_unroll = None; + (* [inline_threshold] matches the current compiler's default. + Note that this particular fraction can be expressed exactly in + floating point. *) + inline_threshold = Some (10. /. 8.); + (* [inline_toplevel_threshold] is not used in classic mode. *) + inline_toplevel_threshold = Some 1; +} + +let o2_arguments = { + inline_call_cost = Some (2 * default_inline_call_cost); + inline_alloc_cost = Some (2 * default_inline_alloc_cost); + inline_prim_cost = Some (2 * default_inline_prim_cost); + inline_branch_cost = Some (2 * default_inline_branch_cost); + inline_indirect_cost = Some (2 * default_inline_indirect_cost); + inline_lifting_benefit = None; + inline_branch_factor = None; + inline_max_depth = Some 2; + inline_max_unroll = None; + inline_threshold = Some 25.; + inline_toplevel_threshold = Some (25 * inline_toplevel_multiplier); +} + +let o3_arguments = { + inline_call_cost = Some (3 * default_inline_call_cost); + inline_alloc_cost = Some (3 * default_inline_alloc_cost); + inline_prim_cost = Some (3 * default_inline_prim_cost); + inline_branch_cost = Some (3 * default_inline_branch_cost); + inline_indirect_cost = Some (3 * default_inline_indirect_cost); + inline_lifting_benefit = None; + inline_branch_factor = Some 0.; + inline_max_depth = Some 3; + inline_max_unroll = Some 1; + inline_threshold = Some 50.; + inline_toplevel_threshold = Some (50 * inline_toplevel_multiplier); +} + +let all_passes = ref [] +let dumped_passes_list = ref [] +let dumped_pass s = + assert(List.mem s !all_passes); + List.mem s !dumped_passes_list + +let set_dumped_pass s enabled = + if (List.mem s !all_passes) then begin + let passes_without_s = List.filter ((<>) s) !dumped_passes_list in + let dumped_passes = + if enabled then + s :: passes_without_s + else + passes_without_s + in + dumped_passes_list := dumped_passes + end + +let parse_color_setting = function + | "auto" -> Some Misc.Color.Auto + | "always" -> Some Misc.Color.Always + | "never" -> Some Misc.Color.Never + | _ -> None +let color = ref None ;; (* -color *) + +let unboxed_types = ref false + +let arg_spec = ref [] +let arg_names = ref Misc.StringMap.empty + +let reset_arguments () = + arg_spec := []; + arg_names := Misc.StringMap.empty + +let add_arguments loc args = + List.iter (function (arg_name, _, _) as arg -> + try + let loc2 = Misc.StringMap.find arg_name !arg_names in + Printf.eprintf + "Warning: plugin argument %s is already defined:\n" arg_name; + Printf.eprintf " First definition: %s\n" loc2; + Printf.eprintf " New definition: %s\n" loc; + with Not_found -> + arg_spec := !arg_spec @ [ arg ]; + arg_names := Misc.StringMap.add arg_name loc !arg_names + ) args + +let print_arguments usage = + Arg.usage !arg_spec usage + +(* This function is almost the same as [Arg.parse_expand], except + that [Arg.parse_expand] could not be used because it does not take a + reference for [arg_spec].*) +let parse_arguments f msg = + try + let argv = ref Sys.argv in + let current = ref (!Arg.current) in + Arg.parse_and_expand_argv_dynamic current argv arg_spec f msg + with + | Arg.Bad msg -> Printf.eprintf "%s" msg; exit 2 + | Arg.Help msg -> Printf.printf "%s" msg; exit 0 diff --git a/res_syntax/compiler-libs-406/clflags.mli b/res_syntax/compiler-libs-406/clflags.mli new file mode 100644 index 0000000000..9a15649fef --- /dev/null +++ b/res_syntax/compiler-libs-406/clflags.mli @@ -0,0 +1,233 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2005 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Command line flags *) + +(** Optimization parameters represented as ints indexed by round number. *) +module Int_arg_helper : sig + type parsed + + val parse : string -> string -> parsed ref -> unit + + type parse_result = + | Ok + | Parse_failed of exn + val parse_no_error : string -> parsed ref -> parse_result + + val get : key:int -> parsed -> int +end + +(** Optimization parameters represented as floats indexed by round number. *) +module Float_arg_helper : sig + type parsed + + val parse : string -> string -> parsed ref -> unit + + type parse_result = + | Ok + | Parse_failed of exn + val parse_no_error : string -> parsed ref -> parse_result + + val get : key:int -> parsed -> float +end + +type inlining_arguments = { + inline_call_cost : int option; + inline_alloc_cost : int option; + inline_prim_cost : int option; + inline_branch_cost : int option; + inline_indirect_cost : int option; + inline_lifting_benefit : int option; + inline_branch_factor : float option; + inline_max_depth : int option; + inline_max_unroll : int option; + inline_threshold : float option; + inline_toplevel_threshold : int option; +} + +val classic_arguments : inlining_arguments +val o1_arguments : inlining_arguments +val o2_arguments : inlining_arguments +val o3_arguments : inlining_arguments + +(** Set all the inlining arguments for a round. + The default is set if no round is provided. *) +val use_inlining_arguments_set : ?round:int -> inlining_arguments -> unit + +val objfiles : string list ref +val ccobjs : string list ref +val dllibs : string list ref +val compile_only : bool ref +val output_name : string option ref +val include_dirs : string list ref +val no_std_include : bool ref +val print_types : bool ref +val make_archive : bool ref +val debug : bool ref +val fast : bool ref +val use_linscan : bool ref +val link_everything : bool ref +val custom_runtime : bool ref +val no_check_prims : bool ref +val bytecode_compatible_32 : bool ref +val output_c_object : bool ref +val output_complete_object : bool ref +val all_ccopts : string list ref +val classic : bool ref +val nopervasives : bool ref +val open_modules : string list ref +val preprocessor : string option ref +val all_ppx : string list ref +val annotations : bool ref +val binary_annotations : bool ref +val use_threads : bool ref +val use_vmthreads : bool ref +val noassert : bool ref +val verbose : bool ref +val noprompt : bool ref +val nopromptcont : bool ref +val init_file : string option ref +val noinit : bool ref +val noversion : bool ref +val use_prims : string ref +val use_runtime : string ref +val principal : bool ref +val real_paths : bool ref +val recursive_types : bool ref +val strict_sequence : bool ref +val strict_formats : bool ref +val applicative_functors : bool ref +val make_runtime : bool ref +val gprofile : bool ref +val c_compiler : string option ref +val no_auto_link : bool ref +val dllpaths : string list ref +val make_package : bool ref +val for_package : string option ref +val error_size : int ref +val float_const_prop : bool ref +val transparent_modules : bool ref +val dump_source : bool ref +val dump_parsetree : bool ref +val dump_typedtree : bool ref +val dump_rawlambda : bool ref +val dump_lambda : bool ref +val dump_rawclambda : bool ref +val dump_clambda : bool ref +val dump_rawflambda : bool ref +val dump_flambda : bool ref +val dump_flambda_let : int option ref +val dump_instr : bool ref +val keep_asm_file : bool ref +val optimize_for_speed : bool ref +val dump_cmm : bool ref +val dump_selection : bool ref +val dump_cse : bool ref +val dump_live : bool ref +val dump_avail : bool ref +val debug_runavail : bool ref +val dump_spill : bool ref +val dump_split : bool ref +val dump_interf : bool ref +val dump_prefer : bool ref +val dump_regalloc : bool ref +val dump_reload : bool ref +val dump_scheduling : bool ref +val dump_linear : bool ref +val dump_interval : bool ref +val keep_startup_file : bool ref +val dump_combine : bool ref +val native_code : bool ref +val default_inline_threshold : float +val inline_threshold : Float_arg_helper.parsed ref +val inlining_report : bool ref +val simplify_rounds : int option ref +val default_simplify_rounds : int ref +val rounds : unit -> int +val default_inline_max_unroll : int +val inline_max_unroll : Int_arg_helper.parsed ref +val default_inline_toplevel_threshold : int +val inline_toplevel_threshold : Int_arg_helper.parsed ref +val default_inline_call_cost : int +val default_inline_alloc_cost : int +val default_inline_prim_cost : int +val default_inline_branch_cost : int +val default_inline_indirect_cost : int +val default_inline_lifting_benefit : int +val inline_call_cost : Int_arg_helper.parsed ref +val inline_alloc_cost : Int_arg_helper.parsed ref +val inline_prim_cost : Int_arg_helper.parsed ref +val inline_branch_cost : Int_arg_helper.parsed ref +val inline_indirect_cost : Int_arg_helper.parsed ref +val inline_lifting_benefit : Int_arg_helper.parsed ref +val default_inline_branch_factor : float +val inline_branch_factor : Float_arg_helper.parsed ref +val dont_write_files : bool ref +val std_include_flag : string -> string +val std_include_dir : unit -> string list +val shared : bool ref +val dlcode : bool ref +val pic_code : bool ref +val runtime_variant : string ref +val force_slash : bool ref +val keep_docs : bool ref +val keep_locs : bool ref +val unsafe_string : bool ref +val opaque : bool ref +val profile_columns : Profile.column list ref +val flambda_invariant_checks : bool ref +val unbox_closures : bool ref +val unbox_closures_factor : int ref +val default_unbox_closures_factor : int +val unbox_free_vars_of_closures : bool ref +val unbox_specialised_args : bool ref +val clambda_checks : bool ref +val default_inline_max_depth : int +val inline_max_depth : Int_arg_helper.parsed ref +val remove_unused_arguments : bool ref +val dump_flambda_verbose : bool ref +val classic_inlining : bool ref +val afl_instrument : bool ref +val afl_inst_ratio : int ref + +val all_passes : string list ref +val dumped_pass : string -> bool +val set_dumped_pass : string -> bool -> unit + +val parse_color_setting : string -> Misc.Color.setting option +val color : Misc.Color.setting option ref + +val unboxed_types : bool ref + +val arg_spec : (string * Arg.spec * string) list ref + +(* [add_arguments __LOC__ args] will add the arguments from [args] at + the end of [arg_spec], checking that they have not already been + added by [add_arguments] before. A warning is printed showing the + locations of the function from which the argument was previously + added. *) +val add_arguments : string -> (string * Arg.spec * string) list -> unit + +(* [parse_arguments anon_arg usage] will parse the arguments, using + the arguments provided in [Clflags.arg_spec]. It allows plugins to + provide their own arguments. +*) +val parse_arguments : Arg.anon_fun -> string -> unit + +(* [print_arguments usage] print the standard usage message *) +val print_arguments : string -> unit + +(* [reset_arguments ()] clear all declared arguments *) +val reset_arguments : unit -> unit diff --git a/res_syntax/compiler-libs-406/cmi_format.ml b/res_syntax/compiler-libs-406/cmi_format.ml new file mode 100644 index 0000000000..1b21d3621b --- /dev/null +++ b/res_syntax/compiler-libs-406/cmi_format.ml @@ -0,0 +1,141 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Fabrice Le Fessant, INRIA Saclay *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type pers_flags = + | Rectypes + | Deprecated of string + | Opaque + | Unsafe_string + +type error = + Not_an_interface of string + | Wrong_version_interface of string * string + | Corrupted_interface of string + +exception Error of error + +type cmi_infos = { + cmi_name : string; + cmi_sign : Types.signature_item list; + cmi_crcs : (string * Digest.t option) list; + cmi_flags : pers_flags list; +} + +let input_cmi ic = + let (name, sign) = input_value ic in + let crcs = input_value ic in + let flags = input_value ic in + { + cmi_name = name; + cmi_sign = sign; + cmi_crcs = crcs; + cmi_flags = flags; + } + +let read_cmi filename = + let ic = open_in_bin filename in + try + let buffer = + really_input_string ic (String.length Config.cmi_magic_number) + in + if buffer <> Config.cmi_magic_number then begin + close_in ic; + let pre_len = String.length Config.cmi_magic_number - 3 in + if String.sub buffer 0 pre_len + = String.sub Config.cmi_magic_number 0 pre_len then + begin + let msg = + if buffer < Config.cmi_magic_number then "an older" else "a newer" in + raise (Error (Wrong_version_interface (filename, msg))) + end else begin + raise(Error(Not_an_interface filename)) + end + end; + let cmi = input_cmi ic in + close_in ic; + cmi + with End_of_file | Failure _ -> + close_in ic; + raise(Error(Corrupted_interface(filename))) + | Error e -> + close_in ic; + raise (Error e) + +let output_cmi filename oc cmi = +(* beware: the provided signature must have been substituted for saving *) + output_string oc Config.cmi_magic_number; + output_value oc (cmi.cmi_name, cmi.cmi_sign); + flush oc; + let crc = Digest.file filename in + let crcs = (cmi.cmi_name, Some crc) :: cmi.cmi_crcs in + output_value oc crcs; + output_value oc cmi.cmi_flags; + crc + +(* This function is also called by [save_cmt] as cmi_format is subset of + cmt_format, so dont close the channel yet +*) +let create_cmi ?check_exists filename (cmi : cmi_infos) = + (* beware: the provided signature must have been substituted for saving *) + let content = + Config.cmi_magic_number ^ Marshal.to_string (cmi.cmi_name, cmi.cmi_sign) [] + (* checkout [output_value] in {!Pervasives} module *) + in + let crc = Digest.string content in + let cmi_infos = + if check_exists <> None && Sys.file_exists filename then + Some (read_cmi filename) + else None in + match cmi_infos with + | Some {cmi_name = _; cmi_sign = _; cmi_crcs = (old_name, Some old_crc)::rest ; cmi_flags} + (* TODO: design the cmi format so that we don't need read the whole cmi *) + when + cmi.cmi_name = old_name && + crc = old_crc && + cmi.cmi_crcs = rest && + cmi_flags = cmi.cmi_flags -> + crc + | _ -> + let crcs = (cmi.cmi_name, Some crc) :: cmi.cmi_crcs in + let oc = open_out_bin filename in + output_string oc content; + output_value oc crcs; + output_value oc cmi.cmi_flags; + close_out oc; + crc + +(* Error report *) + +open Format + +let report_error ppf = function + | Not_an_interface filename -> + fprintf ppf "%a@ is not a compiled interface" + Location.print_filename filename + | Wrong_version_interface (filename, older_newer) -> + fprintf ppf + "%a@ is not a compiled interface for this version of OCaml.@.\ + It seems to be for %s version of OCaml." + Location.print_filename filename older_newer + | Corrupted_interface filename -> + fprintf ppf "Corrupted compiled interface@ %a" + Location.print_filename filename + +let () = + Location.register_error_of_exn + (function + | Error err -> Some (Location.error_of_printer_file report_error err) + | _ -> None + ) diff --git a/res_syntax/compiler-libs-406/cmi_format.mli b/res_syntax/compiler-libs-406/cmi_format.mli new file mode 100644 index 0000000000..9385deb5b9 --- /dev/null +++ b/res_syntax/compiler-libs-406/cmi_format.mli @@ -0,0 +1,51 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Fabrice Le Fessant, INRIA Saclay *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type pers_flags = + | Rectypes + | Deprecated of string + | Opaque + | Unsafe_string + +type cmi_infos = { + cmi_name : string; + cmi_sign : Types.signature_item list; + cmi_crcs : (string * Digest.t option) list; + cmi_flags : pers_flags list; +} + +(* write the magic + the cmi information *) +val output_cmi : string -> out_channel -> cmi_infos -> Digest.t + +val create_cmi : ?check_exists:unit -> string -> cmi_infos -> Digest.t + +(* read the cmi information (the magic is supposed to have already been read) *) +val input_cmi : in_channel -> cmi_infos + +(* read a cmi from a filename, checking the magic *) +val read_cmi : string -> cmi_infos + +(* Error report *) + +type error = + Not_an_interface of string + | Wrong_version_interface of string * string + | Corrupted_interface of string + +exception Error of error + +open Format + +val report_error: formatter -> error -> unit diff --git a/res_syntax/compiler-libs-406/cmt_format.ml b/res_syntax/compiler-libs-406/cmt_format.ml new file mode 100644 index 0000000000..fda1a4a56f --- /dev/null +++ b/res_syntax/compiler-libs-406/cmt_format.ml @@ -0,0 +1,196 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Fabrice Le Fessant, INRIA Saclay *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Cmi_format +open Typedtree + +(* Note that in Typerex, there is an awful hack to save a cmt file + together with the interface file that was generated by ocaml (this + is because the installed version of ocaml might differ from the one + integrated in Typerex). +*) + + + +let read_magic_number ic = + let len_magic_number = String.length Config.cmt_magic_number in + really_input_string ic len_magic_number + +type binary_annots = + | Packed of Types.signature * string list + | Implementation of structure + | Interface of signature + | Partial_implementation of binary_part array + | Partial_interface of binary_part array + +and binary_part = +| Partial_structure of structure +| Partial_structure_item of structure_item +| Partial_expression of expression +| Partial_pattern of pattern +| Partial_class_expr of class_expr +| Partial_signature of signature +| Partial_signature_item of signature_item +| Partial_module_type of module_type + +type cmt_infos = { + cmt_modname : string; + cmt_annots : binary_annots; + cmt_value_dependencies : + (Types.value_description * Types.value_description) list; + cmt_comments : (string * Location.t) list; + cmt_args : string array; + cmt_sourcefile : string option; + cmt_builddir : string; + cmt_loadpath : string list; + cmt_source_digest : Digest.t option; + cmt_initial_env : Env.t; + cmt_imports : (string * Digest.t option) list; + cmt_interface_digest : Digest.t option; + cmt_use_summaries : bool; +} + +type error = + Not_a_typedtree of string + +let need_to_clear_env = + try ignore (Sys.getenv "OCAML_BINANNOT_WITHENV"); false + with Not_found -> true + +let keep_only_summary = Env.keep_only_summary + +open Tast_mapper + +let cenv = + {Tast_mapper.default with env = fun _sub env -> keep_only_summary env} + +let clear_part = function + | Partial_structure s -> Partial_structure (cenv.structure cenv s) + | Partial_structure_item s -> + Partial_structure_item (cenv.structure_item cenv s) + | Partial_expression e -> Partial_expression (cenv.expr cenv e) + | Partial_pattern p -> Partial_pattern (cenv.pat cenv p) + | Partial_class_expr ce -> Partial_class_expr (cenv.class_expr cenv ce) + | Partial_signature s -> Partial_signature (cenv.signature cenv s) + | Partial_signature_item s -> + Partial_signature_item (cenv.signature_item cenv s) + | Partial_module_type s -> Partial_module_type (cenv.module_type cenv s) + +let clear_env binary_annots = + if need_to_clear_env then + match binary_annots with + | Implementation s -> Implementation (cenv.structure cenv s) + | Interface s -> Interface (cenv.signature cenv s) + | Packed _ -> binary_annots + | Partial_implementation array -> + Partial_implementation (Array.map clear_part array) + | Partial_interface array -> + Partial_interface (Array.map clear_part array) + + else binary_annots + +exception Error of error + +let input_cmt ic = (input_value ic : cmt_infos) + +let output_cmt oc cmt = + output_string oc Config.cmt_magic_number; + output_value oc (cmt : cmt_infos) + +let read filename = +(* Printf.fprintf stderr "Cmt_format.read %s\n%!" filename; *) + let ic = open_in_bin filename in + try + let magic_number = read_magic_number ic in + let cmi, cmt = + if magic_number = Config.cmt_magic_number then + None, Some (input_cmt ic) + else if magic_number = Config.cmi_magic_number then + let cmi = Cmi_format.input_cmi ic in + let cmt = try + let magic_number = read_magic_number ic in + if magic_number = Config.cmt_magic_number then + let cmt = input_cmt ic in + Some cmt + else None + with _ -> None + in + Some cmi, cmt + else + raise(Cmi_format.Error(Cmi_format.Not_an_interface filename)) + in + close_in ic; +(* Printf.fprintf stderr "Cmt_format.read done\n%!"; *) + cmi, cmt + with e -> + close_in ic; + raise e + +let read_cmt filename = + match read filename with + _, None -> raise (Error (Not_a_typedtree filename)) + | _, Some cmt -> cmt + +let read_cmi filename = + match read filename with + None, _ -> + raise (Cmi_format.Error (Cmi_format.Not_an_interface filename)) + | Some cmi, _ -> cmi + +let saved_types = ref [] +let value_deps = ref [] + +let clear () = + saved_types := []; + value_deps := [] + +let add_saved_type b = saved_types := b :: !saved_types +let get_saved_types () = !saved_types +let set_saved_types l = saved_types := l + +let record_value_dependency vd1 vd2 = + if vd1.Types.val_loc <> vd2.Types.val_loc then + value_deps := (vd1, vd2) :: !value_deps + +let save_cmt filename modname binary_annots sourcefile initial_env cmi = + if !Clflags.binary_annotations && not !Clflags.print_types then begin + Misc.output_to_file_via_temporary + ~mode:[Open_binary] filename + (fun temp_file_name oc -> + let this_crc = + match cmi with + | None -> None + | Some cmi -> Some (output_cmi temp_file_name oc cmi) + in + let source_digest = Misc.may_map Digest.file sourcefile in + let cmt = { + cmt_modname = modname; + cmt_annots = clear_env binary_annots; + cmt_value_dependencies = !value_deps; + cmt_comments = Lexer.comments (); + cmt_args = Sys.argv; + cmt_sourcefile = sourcefile; + cmt_builddir = Sys.getcwd (); + cmt_loadpath = !Config.load_path; + cmt_source_digest = source_digest; + cmt_initial_env = if need_to_clear_env then + keep_only_summary initial_env else initial_env; + cmt_imports = List.sort compare (Env.imports ()); + cmt_interface_digest = this_crc; + cmt_use_summaries = need_to_clear_env; + } in + output_cmt oc cmt) + end; + clear () diff --git a/res_syntax/compiler-libs-406/cmt_format.mli b/res_syntax/compiler-libs-406/cmt_format.mli new file mode 100644 index 0000000000..617bc1ed85 --- /dev/null +++ b/res_syntax/compiler-libs-406/cmt_format.mli @@ -0,0 +1,121 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Fabrice Le Fessant, INRIA Saclay *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** cmt and cmti files format. *) + +(** The layout of a cmt file is as follows: + := \{\} \{cmt infos\} \{\} + where is the cmi file format: + := . + More precisely, the optional part must be present if and only if + the file is: + - a cmti, or + - a cmt, for a ml file which has no corresponding mli (hence no + corresponding cmti). + + Thus, we provide a common reading function for cmi and cmt(i) + files which returns an option for each of the three parts: cmi + info, cmt info, source info. *) + +open Typedtree + +type binary_annots = + | Packed of Types.signature * string list + | Implementation of structure + | Interface of signature + | Partial_implementation of binary_part array + | Partial_interface of binary_part array + +and binary_part = + | Partial_structure of structure + | Partial_structure_item of structure_item + | Partial_expression of expression + | Partial_pattern of pattern + | Partial_class_expr of class_expr + | Partial_signature of signature + | Partial_signature_item of signature_item + | Partial_module_type of module_type + +type cmt_infos = { + cmt_modname : string; + cmt_annots : binary_annots; + cmt_value_dependencies : + (Types.value_description * Types.value_description) list; + cmt_comments : (string * Location.t) list; + cmt_args : string array; + cmt_sourcefile : string option; + cmt_builddir : string; + cmt_loadpath : string list; + cmt_source_digest : string option; + cmt_initial_env : Env.t; + cmt_imports : (string * Digest.t option) list; + cmt_interface_digest : Digest.t option; + cmt_use_summaries : bool; +} + +type error = + Not_a_typedtree of string + +exception Error of error + +(** [read filename] opens filename, and extract both the cmi_infos, if + it exists, and the cmt_infos, if it exists. Thus, it can be used + with .cmi, .cmt and .cmti files. + + .cmti files always contain a cmi_infos at the beginning. .cmt files + only contain a cmi_infos at the beginning if there is no associated + .cmti file. +*) +val read : string -> Cmi_format.cmi_infos option * cmt_infos option + +val read_cmt : string -> cmt_infos +val read_cmi : string -> Cmi_format.cmi_infos + +(** [save_cmt filename modname binary_annots sourcefile initial_env cmi] + writes a cmt(i) file. *) +val save_cmt : + string -> (* filename.cmt to generate *) + string -> (* module name *) + binary_annots -> + string option -> (* source file *) + Env.t -> (* initial env *) + Cmi_format.cmi_infos option -> (* if a .cmi was generated *) + unit + +(* Miscellaneous functions *) + +val read_magic_number : in_channel -> string + +val clear: unit -> unit + +val add_saved_type : binary_part -> unit +val get_saved_types : unit -> binary_part list +val set_saved_types : binary_part list -> unit + +val record_value_dependency: + Types.value_description -> Types.value_description -> unit + + +(* + + val is_magic_number : string -> bool + val read : in_channel -> Env.cmi_infos option * t + val write_magic_number : out_channel -> unit + val write : out_channel -> t -> unit + + val find : string list -> string -> string + val read_signature : 'a -> string -> Types.signature * 'b list * 'c list + +*) diff --git a/res_syntax/compiler-libs-406/compenv.ml b/res_syntax/compiler-libs-406/compenv.ml new file mode 100644 index 0000000000..194fa617c8 --- /dev/null +++ b/res_syntax/compiler-libs-406/compenv.ml @@ -0,0 +1,640 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Fabrice Le Fessant, EPI Gallium, INRIA Paris-Rocquencourt *) +(* *) +(* Copyright 2013 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Clflags + +let output_prefix name = + let oname = + match !output_name with + | None -> name + | Some n -> if !compile_only then (output_name := None; n) else name in + Filename.remove_extension oname + +let print_version_and_library compiler = + Printf.printf "The OCaml %s, version " compiler; + print_string Config.version; print_newline(); + print_string "Standard library directory: "; + print_string Config.standard_library; print_newline(); + exit 0 + +let print_version_string () = + print_string Config.version; print_newline(); exit 0 + +let print_standard_library () = + print_string Config.standard_library; print_newline(); exit 0 + +let fatal err = + prerr_endline err; + exit 2 + +let extract_output = function + | Some s -> s + | None -> + fatal "Please specify the name of the output file, using option -o" + +let default_output = function + | Some s -> s + | None -> Config.default_executable_name + +let implicit_modules = ref [] +let first_include_dirs = ref [] +let last_include_dirs = ref [] +let first_ccopts = ref [] +let last_ccopts = ref [] +let first_ppx = ref [] +let last_ppx = ref [] +let first_objfiles = ref [] +let last_objfiles = ref [] + +(* Check validity of module name *) +let is_unit_name name = + try + if name = "" then raise Exit; + begin match name.[0] with + | 'A'..'Z' -> () + | _ -> + raise Exit; + end; + for i = 1 to String.length name - 1 do + match name.[i] with + | 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '\'' -> () + | _ -> + raise Exit; + done; + true + with Exit -> false +;; + +let check_unit_name ppf filename name = + if not (is_unit_name name) then + Location.print_warning (Location.in_file filename) ppf + (Warnings.Bad_module_name name);; + +(* Compute name of module from output file name *) +let module_of_filename ppf inputfile outputprefix = + let basename = Filename.basename outputprefix in + let name = + try + let pos = String.index basename '.' in + String.sub basename 0 pos + with Not_found -> basename + in + let name = String.capitalize_ascii name in + check_unit_name ppf inputfile name; + name +;; + +type filename = string + +type readenv_position = + Before_args | Before_compile of filename | Before_link + +(* Syntax of OCAMLPARAM: (name=VALUE,)* _ (,name=VALUE)* + where VALUE should not contain ',' *) +exception SyntaxError of string + +let parse_args s = + let args = String.split_on_char ',' s in + let rec iter is_after args before after = + match args with + [] -> + if not is_after then + raise (SyntaxError "no '_' separator found") + else + (List.rev before, List.rev after) + | "_" :: _ when is_after -> raise (SyntaxError "too many '_' separators") + | "_" :: tail -> iter true tail before after + | arg :: tail -> + let binding = try + Misc.cut_at arg '=' + with Not_found -> + raise (SyntaxError ("missing '=' in " ^ arg)) + in + if is_after then + iter is_after tail before (binding :: after) + else + iter is_after tail (binding :: before) after + in + iter false args [] [] + +let setter ppf f name options s = + try + let bool = match s with + | "0" -> false + | "1" -> true + | _ -> raise Not_found + in + List.iter (fun b -> b := f bool) options + with Not_found -> + Location.print_warning Location.none ppf + (Warnings.Bad_env_variable ("OCAMLPARAM", + Printf.sprintf "bad value for %s" name)) + +let int_setter ppf name option s = + try + option := int_of_string s + with _ -> + Location.print_warning Location.none ppf + (Warnings.Bad_env_variable + ("OCAMLPARAM", Printf.sprintf "non-integer parameter for \"%s\"" name)) + +let int_option_setter ppf name option s = + try + option := Some (int_of_string s) + with _ -> + Location.print_warning Location.none ppf + (Warnings.Bad_env_variable + ("OCAMLPARAM", Printf.sprintf "non-integer parameter for \"%s\"" name)) + +(* +let float_setter ppf name option s = + try + option := float_of_string s + with _ -> + Location.print_warning Location.none ppf + (Warnings.Bad_env_variable + ("OCAMLPARAM", Printf.sprintf "non-float parameter for \"%s\"" name)) +*) + +let load_plugin = ref (fun _ -> ()) + +let check_bool ppf name s = + match s with + | "0" -> false + | "1" -> true + | _ -> + Location.print_warning Location.none ppf + (Warnings.Bad_env_variable ("OCAMLPARAM", + Printf.sprintf "bad value for %s" name)); + false + +(* 'can-discard=' specifies which arguments can be discarded without warning + because they are not understood by some versions of OCaml. *) +let can_discard = ref [] + +let read_one_param ppf position name v = + let set name options s = setter ppf (fun b -> b) name options s in + let clear name options s = setter ppf (fun b -> not b) name options s in + match name with + | "g" -> set "g" [ Clflags.debug ] v + | "p" -> set "p" [ Clflags.gprofile ] v + | "bin-annot" -> set "bin-annot" [ Clflags.binary_annotations ] v + | "afl-instrument" -> set "afl-instrument" [ Clflags.afl_instrument ] v + | "afl-inst-ratio" -> + int_setter ppf "afl-inst-ratio" afl_inst_ratio v + | "annot" -> set "annot" [ Clflags.annotations ] v + | "absname" -> set "absname" [ Location.absname ] v + | "compat-32" -> set "compat-32" [ bytecode_compatible_32 ] v + | "noassert" -> set "noassert" [ noassert ] v + | "noautolink" -> set "noautolink" [ no_auto_link ] v + | "nostdlib" -> set "nostdlib" [ no_std_include ] v + | "linkall" -> set "linkall" [ link_everything ] v + | "nolabels" -> set "nolabels" [ classic ] v + | "principal" -> set "principal" [ principal ] v + | "rectypes" -> set "rectypes" [ recursive_types ] v + | "safe-string" -> clear "safe-string" [ unsafe_string ] v + | "strict-sequence" -> set "strict-sequence" [ strict_sequence ] v + | "strict-formats" -> set "strict-formats" [ strict_formats ] v + | "thread" -> set "thread" [ use_threads ] v + | "unboxed-types" -> set "unboxed-types" [ unboxed_types ] v + | "unsafe" -> set "unsafe" [ fast ] v + | "verbose" -> set "verbose" [ verbose ] v + | "nopervasives" -> set "nopervasives" [ nopervasives ] v + | "slash" -> set "slash" [ force_slash ] v (* for ocamldep *) + | "keep-docs" -> set "keep-docs" [ Clflags.keep_docs ] v + | "keep-locs" -> set "keep-locs" [ Clflags.keep_locs ] v + + | "compact" -> clear "compact" [ optimize_for_speed ] v + | "no-app-funct" -> clear "no-app-funct" [ applicative_functors ] v + | "nodynlink" -> clear "nodynlink" [ dlcode ] v + | "short-paths" -> clear "short-paths" [ real_paths ] v + | "trans-mod" -> set "trans-mod" [ transparent_modules ] v + | "opaque" -> set "opaque" [ opaque ] v + + | "pp" -> preprocessor := Some v + | "runtime-variant" -> runtime_variant := v + | "cc" -> c_compiler := Some v + + | "clambda-checks" -> set "clambda-checks" [ clambda_checks ] v + + (* assembly sources *) + | "s" -> + set "s" [ Clflags.keep_asm_file ; Clflags.keep_startup_file ] v + | "S" -> set "S" [ Clflags.keep_asm_file ] v + | "dstartup" -> set "dstartup" [ Clflags.keep_startup_file ] v + + (* warn-errors *) + | "we" | "warn-error" -> Warnings.parse_options true v + (* warnings *) + | "w" -> Warnings.parse_options false v + (* warn-errors *) + | "wwe" -> Warnings.parse_options false v + + (* inlining *) + | "inline" -> + let module F = Float_arg_helper in + begin match F.parse_no_error v inline_threshold with + | F.Ok -> () + | F.Parse_failed exn -> + let error = + Printf.sprintf "bad syntax for \"inline\": %s" + (Printexc.to_string exn) + in + Location.print_warning Location.none ppf + (Warnings.Bad_env_variable ("OCAMLPARAM", error)) + end + + | "inline-toplevel" -> + Int_arg_helper.parse v + "Bad syntax in OCAMLPARAM for 'inline-toplevel'" + inline_toplevel_threshold + + | "rounds" -> int_option_setter ppf "rounds" simplify_rounds v + | "inline-max-unroll" -> + Int_arg_helper.parse v "Bad syntax in OCAMLPARAM for 'inline-max-unroll'" + inline_max_unroll + | "inline-call-cost" -> + Int_arg_helper.parse v + "Bad syntax in OCAMLPARAM for 'inline-call-cost'" + inline_call_cost + | "inline-alloc-cost" -> + Int_arg_helper.parse v + "Bad syntax in OCAMLPARAM for 'inline-alloc-cost'" + inline_alloc_cost + | "inline-prim-cost" -> + Int_arg_helper.parse v + "Bad syntax in OCAMLPARAM for 'inline-prim-cost'" + inline_prim_cost + | "inline-branch-cost" -> + Int_arg_helper.parse v + "Bad syntax in OCAMLPARAM for 'inline-branch-cost'" + inline_branch_cost + | "inline-indirect-cost" -> + Int_arg_helper.parse v + "Bad syntax in OCAMLPARAM for 'inline-indirect-cost'" + inline_indirect_cost + | "inline-lifting-benefit" -> + Int_arg_helper.parse v + "Bad syntax in OCAMLPARAM for 'inline-lifting-benefit'" + inline_lifting_benefit + | "inline-branch-factor" -> + Float_arg_helper.parse v + "Bad syntax in OCAMLPARAM for 'inline-branch-factor'" + inline_branch_factor + | "inline-max-depth" -> + Int_arg_helper.parse v + "Bad syntax in OCAMLPARAM for 'inline-max-depth'" + inline_max_depth + + | "Oclassic" -> + set "Oclassic" [ classic_inlining ] v + | "O2" -> + if check_bool ppf "O2" v then begin + default_simplify_rounds := 2; + use_inlining_arguments_set o2_arguments; + use_inlining_arguments_set ~round:0 o1_arguments + end + + | "O3" -> + if check_bool ppf "O3" v then begin + default_simplify_rounds := 3; + use_inlining_arguments_set o3_arguments; + use_inlining_arguments_set ~round:1 o2_arguments; + use_inlining_arguments_set ~round:0 o1_arguments + end + | "unbox-closures" -> + set "unbox-closures" [ unbox_closures ] v + | "unbox-closures-factor" -> + int_setter ppf "unbox-closures-factor" unbox_closures_factor v + | "remove-unused-arguments" -> + set "remove-unused-arguments" [ remove_unused_arguments ] v + + | "inlining-report" -> + if !native_code then + set "inlining-report" [ inlining_report ] v + + | "flambda-verbose" -> + set "flambda-verbose" [ dump_flambda_verbose ] v + | "flambda-invariants" -> + set "flambda-invariants" [ flambda_invariant_checks ] v + + (* color output *) + | "color" -> + begin match parse_color_setting v with + | None -> + Location.print_warning Location.none ppf + (Warnings.Bad_env_variable ("OCAMLPARAM", + "bad value for \"color\", \ + (expected \"auto\", \"always\" or \"never\")")) + | Some setting -> color := Some setting + end + + | "intf-suffix" -> Config.interface_suffix := v + + | "I" -> begin + match position with + | Before_args -> first_include_dirs := v :: !first_include_dirs + | Before_link | Before_compile _ -> + last_include_dirs := v :: !last_include_dirs + end + + | "cclib" -> + begin + match position with + | Before_compile _ -> () + | Before_link | Before_args -> + ccobjs := Misc.rev_split_words v @ !ccobjs + end + + | "ccopts" -> + begin + match position with + | Before_link | Before_compile _ -> + last_ccopts := v :: !last_ccopts + | Before_args -> + first_ccopts := v :: !first_ccopts + end + + | "ppx" -> + begin + match position with + | Before_link | Before_compile _ -> + last_ppx := v :: !last_ppx + | Before_args -> + first_ppx := v :: !first_ppx + end + + + | "cmo" | "cma" -> + if not !native_code then + begin + match position with + | Before_link | Before_compile _ -> + last_objfiles := v ::! last_objfiles + | Before_args -> + first_objfiles := v :: !first_objfiles + end + + | "cmx" | "cmxa" -> + if !native_code then + begin + match position with + | Before_link | Before_compile _ -> + last_objfiles := v ::! last_objfiles + | Before_args -> + first_objfiles := v :: !first_objfiles + end + + | "pic" -> + if !native_code then + set "pic" [ pic_code ] v + + | "can-discard" -> + can_discard := v ::!can_discard + + | "timings" | "profile" -> + let if_on = if name = "timings" then [ `Time ] else Profile.all_columns in + profile_columns := if check_bool ppf name v then if_on else [] + + | "plugin" -> !load_plugin v + + | _ -> + if not (List.mem name !can_discard) then begin + can_discard := name :: !can_discard; + Printf.eprintf + "Warning: discarding value of variable %S in OCAMLPARAM\n%!" + name + end + +let read_OCAMLPARAM ppf position = + try + let s = Sys.getenv "OCAMLPARAM" in + let (before, after) = + try + parse_args s + with SyntaxError s -> + Location.print_warning Location.none ppf + (Warnings.Bad_env_variable ("OCAMLPARAM", s)); + [],[] + in + List.iter (fun (name, v) -> read_one_param ppf position name v) + (match position with + Before_args -> before + | Before_compile _ | Before_link -> after) + with Not_found -> () + +(* OCAMLPARAM passed as file *) + +type pattern = + | Filename of string + | Any + +type file_option = { + pattern : pattern; + name : string; + value : string; +} + +let scan_line ic = + Scanf.bscanf ic "%[0-9a-zA-Z_.*] : %[a-zA-Z_-] = %s " + (fun pattern name value -> + let pattern = + match pattern with + | "*" -> Any + | _ -> Filename pattern + in + { pattern; name; value }) + +let load_config ppf filename = + match open_in_bin filename with + | exception e -> + Location.print_error ppf (Location.in_file filename); + Format.fprintf ppf "Cannot open file %s@." (Printexc.to_string e); + raise Exit + | ic -> + let sic = Scanf.Scanning.from_channel ic in + let rec read line_number line_start acc = + match scan_line sic with + | exception End_of_file -> + close_in ic; + acc + | exception Scanf.Scan_failure error -> + let position = Lexing.{ + pos_fname = filename; + pos_lnum = line_number; + pos_bol = line_start; + pos_cnum = pos_in ic; + } + in + let loc = Location.{ + loc_start = position; + loc_end = position; + loc_ghost = false; + } + in + Location.print_error ppf loc; + Format.fprintf ppf "Configuration file error %s@." error; + close_in ic; + raise Exit + | line -> + read (line_number + 1) (pos_in ic) (line :: acc) + in + let lines = read 0 0 [] in + lines + +let matching_filename filename { pattern } = + match pattern with + | Any -> true + | Filename pattern -> + let filename = String.lowercase_ascii filename in + let pattern = String.lowercase_ascii pattern in + filename = pattern + +let apply_config_file ppf position = + let config_file = + Filename.concat Config.standard_library "ocaml_compiler_internal_params" + in + let config = + if Sys.file_exists config_file then + load_config ppf config_file + else + [] + in + let config = + match position with + | Before_compile filename -> + List.filter (matching_filename filename) config + | Before_args | Before_link -> + List.filter (fun { pattern } -> pattern = Any) config + in + List.iter (fun { name; value } -> read_one_param ppf position name value) + config + +let readenv ppf position = + last_include_dirs := []; + last_ccopts := []; + last_ppx := []; + last_objfiles := []; + apply_config_file ppf position; + read_OCAMLPARAM ppf position; + all_ccopts := !last_ccopts @ !first_ccopts; + all_ppx := !last_ppx @ !first_ppx + +let get_objfiles ~with_ocamlparam = + if with_ocamlparam then + List.rev (!last_objfiles @ !objfiles @ !first_objfiles) + else + List.rev !objfiles + + + + + + +type deferred_action = + | ProcessImplementation of string + | ProcessInterface of string + | ProcessCFile of string + | ProcessOtherFile of string + | ProcessObjects of string list + | ProcessDLLs of string list + +let c_object_of_filename name = + Filename.chop_suffix (Filename.basename name) ".c" ^ Config.ext_obj + +let process_action + (ppf, implementation, interface, ocaml_mod_ext, ocaml_lib_ext) action = + match action with + | ProcessImplementation name -> + readenv ppf (Before_compile name); + let opref = output_prefix name in + implementation ppf name opref; + objfiles := (opref ^ ocaml_mod_ext) :: !objfiles + | ProcessInterface name -> + readenv ppf (Before_compile name); + let opref = output_prefix name in + interface ppf name opref; + if !make_package then objfiles := (opref ^ ".cmi") :: !objfiles + | ProcessCFile name -> + readenv ppf (Before_compile name); + Location.input_name := name; + if Ccomp.compile_file name <> 0 then exit 2; + ccobjs := c_object_of_filename name :: !ccobjs + | ProcessObjects names -> + ccobjs := names @ !ccobjs + | ProcessDLLs names -> + dllibs := names @ !dllibs + | ProcessOtherFile name -> + if Filename.check_suffix name ocaml_mod_ext + || Filename.check_suffix name ocaml_lib_ext then + objfiles := name :: !objfiles + else if Filename.check_suffix name ".cmi" && !make_package then + objfiles := name :: !objfiles + else if Filename.check_suffix name Config.ext_obj + || Filename.check_suffix name Config.ext_lib then + ccobjs := name :: !ccobjs + else if not !native_code && Filename.check_suffix name Config.ext_dll then + dllibs := name :: !dllibs + else + raise(Arg.Bad("don't know what to do with " ^ name)) + + +let action_of_file name = + if Filename.check_suffix name ".ml" + || Filename.check_suffix name ".mlt" then + ProcessImplementation name + else if Filename.check_suffix name !Config.interface_suffix then + ProcessInterface name + else if Filename.check_suffix name ".c" then + ProcessCFile name + else + ProcessOtherFile name + +let deferred_actions = ref [] +let defer action = + deferred_actions := action :: !deferred_actions + +let anonymous filename = defer (action_of_file filename) +let impl filename = defer (ProcessImplementation filename) +let intf filename = defer (ProcessInterface filename) + +let process_deferred_actions env = + let final_output_name = !output_name in + (* Make sure the intermediate products don't clash with the final one + when we're invoked like: ocamlopt -o foo bar.c baz.ml. *) + if not !compile_only then output_name := None; + begin + match final_output_name with + | None -> () + | Some output_name -> + if !compile_only then begin + if List.filter (function + | ProcessCFile name -> c_object_of_filename name <> output_name + | _ -> false) !deferred_actions <> [] then + fatal "Options -c and -o are incompatible when compiling C files"; + + if List.length (List.filter (function + | ProcessImplementation _ + | ProcessInterface _ + | _ -> false) !deferred_actions) > 1 then + fatal "Options -c -o are incompatible with compiling multiple files" + end; + end; + if !make_archive && List.exists (function + | ProcessOtherFile name -> Filename.check_suffix name ".cmxa" + | _ -> false) !deferred_actions then + fatal "Option -a cannot be used with .cmxa input files."; + List.iter (process_action env) (List.rev !deferred_actions); + output_name := final_output_name; diff --git a/res_syntax/compiler-libs-406/compenv.mli b/res_syntax/compiler-libs-406/compenv.mli new file mode 100644 index 0000000000..0ee9871a6c --- /dev/null +++ b/res_syntax/compiler-libs-406/compenv.mli @@ -0,0 +1,78 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Fabrice Le Fessant, EPI Gallium, INRIA Paris-Rocquencourt *) +(* *) +(* Copyright 2013 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +val module_of_filename : Format.formatter -> string -> string -> string + +val output_prefix : string -> string +val extract_output : string option -> string +val default_output : string option -> string + +val print_version_and_library : string -> 'a +val print_version_string : unit -> 'a +val print_standard_library : unit -> 'a +val fatal : string -> 'a + +val first_ccopts : string list ref +val first_ppx : string list ref +val first_include_dirs : string list ref +val last_include_dirs : string list ref +val implicit_modules : string list ref + +(* function to call on plugin=XXX *) +val load_plugin : (string -> unit) ref + +(* return the list of objfiles, after OCAMLPARAM and List.rev *) +val get_objfiles : with_ocamlparam:bool -> string list +val last_objfiles : string list ref +val first_objfiles : string list ref + +type filename = string + +type readenv_position = + Before_args | Before_compile of filename | Before_link + +val readenv : Format.formatter -> readenv_position -> unit + +(* [is_unit_name name] returns true only if [name] can be used as a + correct module name *) +val is_unit_name : string -> bool +(* [check_unit_name ppf filename name] prints a warning in [filename] + on [ppf] if [name] should not be used as a module name. *) +val check_unit_name : Format.formatter -> string -> string -> unit + +(* Deferred actions of the compiler, while parsing arguments *) + +type deferred_action = + | ProcessImplementation of string + | ProcessInterface of string + | ProcessCFile of string + | ProcessOtherFile of string + | ProcessObjects of string list + | ProcessDLLs of string list + +val c_object_of_filename : string -> string + +val defer : deferred_action -> unit +val anonymous : string -> unit +val impl : string -> unit +val intf : string -> unit + +val process_deferred_actions : + Format.formatter * + (Format.formatter -> string -> string -> unit) * (* compile implementation *) + (Format.formatter -> string -> string -> unit) * (* compile interface *) + string * (* ocaml module extension *) + string -> (* ocaml library extension *) + unit diff --git a/res_syntax/compiler-libs-406/complex.ml b/res_syntax/compiler-libs-406/complex.ml new file mode 100644 index 0000000000..4df53cbadc --- /dev/null +++ b/res_syntax/compiler-libs-406/complex.ml @@ -0,0 +1,87 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2002 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Complex numbers *) + +type t = { re: float; im: float } + +let zero = { re = 0.0; im = 0.0 } +let one = { re = 1.0; im = 0.0 } +let i = { re = 0.0; im = 1.0 } + +let add x y = { re = x.re +. y.re; im = x.im +. y.im } + +let sub x y = { re = x.re -. y.re; im = x.im -. y.im } + +let neg x = { re = -. x.re; im = -. x.im } + +let conj x = { re = x.re; im = -. x.im } + +let mul x y = { re = x.re *. y.re -. x.im *. y.im; + im = x.re *. y.im +. x.im *. y.re } + +let div x y = + if abs_float y.re >= abs_float y.im then + let r = y.im /. y.re in + let d = y.re +. r *. y.im in + { re = (x.re +. r *. x.im) /. d; + im = (x.im -. r *. x.re) /. d } + else + let r = y.re /. y.im in + let d = y.im +. r *. y.re in + { re = (r *. x.re +. x.im) /. d; + im = (r *. x.im -. x.re) /. d } + +let inv x = div one x + +let norm2 x = x.re *. x.re +. x.im *. x.im + +let norm x = + (* Watch out for overflow in computing re^2 + im^2 *) + let r = abs_float x.re and i = abs_float x.im in + if r = 0.0 then i + else if i = 0.0 then r + else if r >= i then + let q = i /. r in r *. sqrt(1.0 +. q *. q) + else + let q = r /. i in i *. sqrt(1.0 +. q *. q) + +let arg x = atan2 x.im x.re + +let polar n a = { re = cos a *. n; im = sin a *. n } + +let sqrt x = + if x.re = 0.0 && x.im = 0.0 then { re = 0.0; im = 0.0 } + else begin + let r = abs_float x.re and i = abs_float x.im in + let w = + if r >= i then begin + let q = i /. r in + sqrt(r) *. sqrt(0.5 *. (1.0 +. sqrt(1.0 +. q *. q))) + end else begin + let q = r /. i in + sqrt(i) *. sqrt(0.5 *. (q +. sqrt(1.0 +. q *. q))) + end in + if x.re >= 0.0 + then { re = w; im = 0.5 *. x.im /. w } + else { re = 0.5 *. i /. w; im = if x.im >= 0.0 then w else -. w } + end + +let exp x = + let e = exp x.re in { re = e *. cos x.im; im = e *. sin x.im } + +let log x = { re = log (norm x); im = atan2 x.im x.re } + +let pow x y = exp (mul y (log x)) diff --git a/res_syntax/compiler-libs-406/complex.mli b/res_syntax/compiler-libs-406/complex.mli new file mode 100644 index 0000000000..2080eccc28 --- /dev/null +++ b/res_syntax/compiler-libs-406/complex.mli @@ -0,0 +1,86 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2002 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Complex numbers. + + This module provides arithmetic operations on complex numbers. + Complex numbers are represented by their real and imaginary parts + (cartesian representation). Each part is represented by a + double-precision floating-point number (type [float]). *) + +type t = { re: float; im: float } +(** The type of complex numbers. [re] is the real part and [im] the + imaginary part. *) + +val zero: t +(** The complex number [0]. *) + +val one: t +(** The complex number [1]. *) + +val i: t +(** The complex number [i]. *) + +val neg: t -> t +(** Unary negation. *) + +val conj: t -> t +(** Conjugate: given the complex [x + i.y], returns [x - i.y]. *) + +val add: t -> t -> t +(** Addition *) + +val sub: t -> t -> t +(** Subtraction *) + +val mul: t -> t -> t +(** Multiplication *) + +val inv: t -> t +(** Multiplicative inverse ([1/z]). *) + +val div: t -> t -> t +(** Division *) + +val sqrt: t -> t +(** Square root. The result [x + i.y] is such that [x > 0] or + [x = 0] and [y >= 0]. + This function has a discontinuity along the negative real axis. *) + +val norm2: t -> float +(** Norm squared: given [x + i.y], returns [x^2 + y^2]. *) + +val norm: t -> float +(** Norm: given [x + i.y], returns [sqrt(x^2 + y^2)]. *) + +val arg: t -> float +(** Argument. The argument of a complex number is the angle + in the complex plane between the positive real axis and a line + passing through zero and the number. This angle ranges from + [-pi] to [pi]. This function has a discontinuity along the + negative real axis. *) + +val polar: float -> float -> t +(** [polar norm arg] returns the complex having norm [norm] + and argument [arg]. *) + +val exp: t -> t +(** Exponentiation. [exp z] returns [e] to the [z] power. *) + +val log: t -> t +(** Natural logarithm (in base [e]). *) + +val pow: t -> t -> t +(** Power function. [pow z1 z2] returns [z1] to the [z2] power. *) diff --git a/res_syntax/compiler-libs-406/compmisc.ml b/res_syntax/compiler-libs-406/compmisc.ml new file mode 100644 index 0000000000..a0839f34ca --- /dev/null +++ b/res_syntax/compiler-libs-406/compmisc.ml @@ -0,0 +1,80 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Fabrice Le Fessant, EPI Gallium, INRIA Paris-Rocquencourt *) +(* *) +(* Copyright 2013 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Compenv + +(* Initialize the search path. + [dir] is always searched first (default: the current directory), + then the directories specified with the -I option (in command-line order), + then the standard library directory (unless the -nostdlib option is given). + *) + +let init_path ?(dir="") native = + let dirs = + if !Clflags.use_threads then "+threads" :: !Clflags.include_dirs + else if !Clflags.use_vmthreads && not native then + "+vmthreads" :: !Clflags.include_dirs + else + !Clflags.include_dirs + in + let dirs = + !last_include_dirs @ dirs @ Config.flexdll_dirs @ !first_include_dirs + in + let exp_dirs = + List.map (Misc.expand_directory Config.standard_library) dirs in + Config.load_path := dir :: + List.rev_append exp_dirs (Clflags.std_include_dir ()); + Env.reset_cache () + +(* Return the initial environment in which compilation proceeds. *) + +(* Note: do not do init_path() in initial_env, this breaks + toplevel initialization (PR#1775) *) + +let open_implicit_module m env = + let open Asttypes in + let lid = {loc = Location.in_file "command line"; + txt = Longident.parse m } in + snd (Typemod.type_open_ Override env lid.loc lid) + +let initial_env () = + Ident.reinit(); + let initial = + if Config.safe_string then Env.initial_safe_string + else if !Clflags.unsafe_string then Env.initial_unsafe_string + else Env.initial_safe_string + in + let env = + if !Clflags.nopervasives then initial else + open_implicit_module "Pervasives" initial + in + List.fold_left (fun env m -> + open_implicit_module m env + ) env (!implicit_modules @ List.rev !Clflags.open_modules) + + +let read_color_env ppf = + try + match Clflags.parse_color_setting (Sys.getenv "OCAML_COLOR") with + | None -> + Location.print_warning Location.none ppf + (Warnings.Bad_env_variable + ("OCAML_COLOR", + "expected \"auto\", \"always\" or \"never\"")); + | Some x -> match !Clflags.color with + | None -> Clflags.color := Some x + | Some _ -> () + with + Not_found -> () diff --git a/res_syntax/compiler-libs-406/config.ml b/res_syntax/compiler-libs-406/config.ml new file mode 100644 index 0000000000..faa66e8579 --- /dev/null +++ b/res_syntax/compiler-libs-406/config.ml @@ -0,0 +1,212 @@ +#2 "utils/config.mlp" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* The main OCaml version string has moved to ../VERSION *) +let version = Sys.ocaml_version + +let standard_library_default = "/usr/local/lib/ocaml" + +let standard_library = + try + Sys.getenv "OCAMLLIB" + with Not_found -> + try + Sys.getenv "CAMLLIB" + with Not_found -> + standard_library_default + +let standard_runtime = "/usr/local/bin/ocamlrun" +let ccomp_type = "cc" +let c_compiler = "gcc" +let c_output_obj = "-o " +let ocamlc_cflags = "-O2 -fno-strict-aliasing -fwrapv " +let ocamlc_cppflags = "-D_FILE_OFFSET_BITS=64 -D_REENTRANT" +let ocamlopt_cflags = "-O2 -fno-strict-aliasing -fwrapv" +let ocamlopt_cppflags = "-D_FILE_OFFSET_BITS=64 -D_REENTRANT" +let bytecomp_c_libraries = "-lpthread " +(* bytecomp_c_compiler and native_c_compiler have been supported for a + long time and are retained for backwards compatibility. + For programs that don't need compatibility with older OCaml releases + the recommended approach is to use the constituent variables + c_compiler, ocamlc_cflags, ocamlc_cppflags etc., directly. +*) +let bytecomp_c_compiler = + c_compiler ^ " " ^ ocamlc_cflags ^ " " ^ ocamlc_cppflags +let native_c_compiler = + c_compiler ^ " " ^ ocamlopt_cflags ^ " " ^ ocamlopt_cppflags +let native_c_libraries = "" +let native_pack_linker = "ld -r -arch x86_64 -o\ " +let ranlib = "ranlib" +let ar = "ar" +let cc_profile = "-pg" +let mkdll, mkexe, mkmaindll = + (* @@DRA Cygwin - but only if shared libraries are enabled, which we + should be able to detect? *) + if Sys.os_type = "Win32" then + try + let flexlink = + let flexlink = Sys.getenv "OCAML_FLEXLINK" in + let f i = + let c = flexlink.[i] in + if c = '/' then '\\' else c in + (String.init (String.length flexlink) f) ^ " " in + flexlink, + flexlink ^ " -exe", + flexlink ^ " -maindll" + with Not_found -> + "gcc -shared -flat_namespace -undefined suppress -Wl,-no_compact_unwind", "gcc -O2 -fno-strict-aliasing -fwrapv -Wall -Werror -D_FILE_OFFSET_BITS=64 -D_REENTRANT -DCAML_NAME_SPACE -Wl,-no_compact_unwind", "gcc -shared -flat_namespace -undefined suppress -Wl,-no_compact_unwind" + else + "gcc -shared -flat_namespace -undefined suppress -Wl,-no_compact_unwind", "gcc -O2 -fno-strict-aliasing -fwrapv -Wall -Werror -D_FILE_OFFSET_BITS=64 -D_REENTRANT -DCAML_NAME_SPACE -Wl,-no_compact_unwind", "gcc -shared -flat_namespace -undefined suppress -Wl,-no_compact_unwind" + +let profiling = true +let flambda = false +let safe_string = false +let default_safe_string = true +let windows_unicode = 0 != 0 + +let flat_float_array = true + +let afl_instrument = false + +let exec_magic_number = "Caml1999X011" +and cmi_magic_number = "Caml1999I022" +and cmo_magic_number = "Caml1999O022" +and cma_magic_number = "Caml1999A022" +and cmx_magic_number = + if flambda then + "Caml1999y022" + else + "Caml1999Y022" +and cmxa_magic_number = + if flambda then + "Caml1999z022" + else + "Caml1999Z022" +and ast_impl_magic_number = "Caml1999M022" +and ast_intf_magic_number = "Caml1999N022" +and cmxs_magic_number = "Caml1999D022" + (* cmxs_magic_number is duplicated in otherlibs/dynlink/natdynlink.ml *) +and cmt_magic_number = "Caml1999T022" + +let load_path = ref ([] : string list) + +let interface_suffix = ref ".mli" + +let max_tag = 245 +(* This is normally the same as in obj.ml, but we have to define it + separately because it can differ when we're in the middle of a + bootstrapping phase. *) +let lazy_tag = 246 + +let max_young_wosize = 256 +let stack_threshold = 256 (* see byterun/config.h *) +let stack_safety_margin = 60 + +let architecture = "amd64" +let model = "default" +let system = "macosx" + +let asm = "clang -arch x86_64 -Wno-trigraphs -c" +let asm_cfi_supported = true +let with_frame_pointers = false +let spacetime = false +let enable_call_counts = true +let libunwind_available = false +let libunwind_link_flags = "" +let profinfo = false +let profinfo_width = 0 + +let ext_exe = "" +let ext_obj = ".o" +let ext_asm = ".s" +let ext_lib = ".a" +let ext_dll = ".so" + +let host = "x86_64-apple-darwin21.4.0" +let target = "x86_64-apple-darwin21.4.0" + +let default_executable_name = + match Sys.os_type with + "Unix" -> "a.out" + | "Win32" | "Cygwin" -> "camlprog.exe" + | _ -> "camlprog" + +let systhread_supported = true;; + +let flexdll_dirs = [];; + +let print_config oc = + let p name valu = Printf.fprintf oc "%s: %s\n" name valu in + let p_int name valu = Printf.fprintf oc "%s: %d\n" name valu in + let p_bool name valu = Printf.fprintf oc "%s: %B\n" name valu in + p "version" version; + p "standard_library_default" standard_library_default; + p "standard_library" standard_library; + p "standard_runtime" standard_runtime; + p "ccomp_type" ccomp_type; + p "c_compiler" c_compiler; + p "ocamlc_cflags" ocamlc_cflags; + p "ocamlc_cppflags" ocamlc_cppflags; + p "ocamlopt_cflags" ocamlopt_cflags; + p "ocamlopt_cppflags" ocamlopt_cppflags; + p "bytecomp_c_compiler" bytecomp_c_compiler; + p "native_c_compiler" native_c_compiler; + p "bytecomp_c_libraries" bytecomp_c_libraries; + p "native_c_libraries" native_c_libraries; + p "native_pack_linker" native_pack_linker; + p "ranlib" ranlib; + p "cc_profile" cc_profile; + p "architecture" architecture; + p "model" model; + p_int "int_size" Sys.int_size; + p_int "word_size" Sys.word_size; + p "system" system; + p "asm" asm; + p_bool "asm_cfi_supported" asm_cfi_supported; + p_bool "with_frame_pointers" with_frame_pointers; + p "ext_exe" ext_exe; + p "ext_obj" ext_obj; + p "ext_asm" ext_asm; + p "ext_lib" ext_lib; + p "ext_dll" ext_dll; + p "os_type" Sys.os_type; + p "default_executable_name" default_executable_name; + p_bool "systhread_supported" systhread_supported; + p "host" host; + p "target" target; + p_bool "profiling" profiling; + p_bool "flambda" flambda; + p_bool "spacetime" spacetime; + p_bool "safe_string" safe_string; + p_bool "default_safe_string" default_safe_string; + p_bool "flat_float_array" flat_float_array; + p_bool "afl_instrument" afl_instrument; + p_bool "windows_unicode" windows_unicode; + + (* print the magic number *) + p "exec_magic_number" exec_magic_number; + p "cmi_magic_number" cmi_magic_number; + p "cmo_magic_number" cmo_magic_number; + p "cma_magic_number" cma_magic_number; + p "cmx_magic_number" cmx_magic_number; + p "cmxa_magic_number" cmxa_magic_number; + p "ast_impl_magic_number" ast_impl_magic_number; + p "ast_intf_magic_number" ast_intf_magic_number; + p "cmxs_magic_number" cmxs_magic_number; + p "cmt_magic_number" cmt_magic_number; + + flush oc; +;; diff --git a/res_syntax/compiler-libs-406/config.mli b/res_syntax/compiler-libs-406/config.mli new file mode 100644 index 0000000000..2e0bd86901 --- /dev/null +++ b/res_syntax/compiler-libs-406/config.mli @@ -0,0 +1,184 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* System configuration *) + +val version: string + (* The current version number of the system *) + +val standard_library: string + (* The directory containing the standard libraries *) +val standard_runtime: string + (* The full path to the standard bytecode interpreter ocamlrun *) +val ccomp_type: string + (* The "kind" of the C compiler, assembler and linker used: one of + "cc" (for Unix-style C compilers) + "msvc" (for Microsoft Visual C++ and MASM) *) +val c_compiler: string + (* The compiler to use for compiling C files *) +val c_output_obj: string + (* Name of the option of the C compiler for specifying the output file *) +val ocamlc_cflags : string + (* The flags ocamlc should pass to the C compiler *) +val ocamlc_cppflags : string + (* The flags ocamlc should pass to the C preprocessor *) +val ocamlopt_cflags : string + (* The flags ocamlopt should pass to the C compiler *) +val ocamlopt_cppflags : string + (* The flags ocamlopt should pass to the C preprocessor *) +val bytecomp_c_libraries: string + (* The C libraries to link with custom runtimes *) +val native_c_libraries: string + (* The C libraries to link with native-code programs *) +val native_pack_linker: string + (* The linker to use for packaging (ocamlopt -pack) and for partial + links (ocamlopt -output-obj). *) +val mkdll: string + (* The linker command line to build dynamic libraries. *) +val mkexe: string + (* The linker command line to build executables. *) +val mkmaindll: string + (* The linker command line to build main programs as dlls. *) +val ranlib: string + (* Command to randomize a library, or "" if not needed *) +val ar: string + (* Name of the ar command, or "" if not needed (MSVC) *) +val cc_profile : string + (* The command line option to the C compiler to enable profiling. *) + +val load_path: string list ref + (* Directories in the search path for .cmi and .cmo files *) + +val interface_suffix: string ref + (* Suffix for interface file names *) + +val exec_magic_number: string + (* Magic number for bytecode executable files *) +val cmi_magic_number: string + (* Magic number for compiled interface files *) +val cmo_magic_number: string + (* Magic number for object bytecode files *) +val cma_magic_number: string + (* Magic number for archive files *) +val cmx_magic_number: string + (* Magic number for compilation unit descriptions *) +val cmxa_magic_number: string + (* Magic number for libraries of compilation unit descriptions *) +val ast_intf_magic_number: string + (* Magic number for file holding an interface syntax tree *) +val ast_impl_magic_number: string + (* Magic number for file holding an implementation syntax tree *) +val cmxs_magic_number: string + (* Magic number for dynamically-loadable plugins *) +val cmt_magic_number: string + (* Magic number for compiled interface files *) + +val max_tag: int + (* Biggest tag that can be stored in the header of a regular block. *) +val lazy_tag : int + (* Normally the same as Obj.lazy_tag. Separate definition because + of technical reasons for bootstrapping. *) +val max_young_wosize: int + (* Maximal size of arrays that are directly allocated in the + minor heap *) +val stack_threshold: int + (* Size in words of safe area at bottom of VM stack, + see byterun/config.h *) +val stack_safety_margin: int + (* Size in words of the safety margin between the bottom of + the stack and the stack pointer. This margin can be used by + intermediate computations of some instructions, or the event + handler. *) + +val architecture: string + (* Name of processor type for the native-code compiler *) +val model: string + (* Name of processor submodel for the native-code compiler *) +val system: string + (* Name of operating system for the native-code compiler *) + +val asm: string + (* The assembler (and flags) to use for assembling + ocamlopt-generated code. *) + +val asm_cfi_supported: bool + (* Whether assembler understands CFI directives *) +val with_frame_pointers : bool + (* Whether assembler should maintain frame pointers *) + +val ext_obj: string + (* Extension for object files, e.g. [.o] under Unix. *) +val ext_asm: string + (* Extension for assembler files, e.g. [.s] under Unix. *) +val ext_lib: string + (* Extension for library files, e.g. [.a] under Unix. *) +val ext_dll: string + (* Extension for dynamically-loaded libraries, e.g. [.so] under Unix.*) + +val default_executable_name: string + (* Name of executable produced by linking if none is given with -o, + e.g. [a.out] under Unix. *) + +val systhread_supported : bool + (* Whether the system thread library is implemented *) + +val flexdll_dirs : string list + (* Directories needed for the FlexDLL objects *) + +val host : string + (* Whether the compiler is a cross-compiler *) + +val target : string + (* Whether the compiler is a cross-compiler *) + +val print_config : out_channel -> unit;; + +val profiling : bool + (* Whether profiling with gprof is supported on this platform *) + +val flambda : bool + (* Whether the compiler was configured for flambda *) + +val spacetime : bool + (* Whether the compiler was configured for Spacetime profiling *) +val enable_call_counts : bool + (* Whether call counts are to be available when Spacetime profiling *) +val profinfo : bool + (* Whether the compiler was configured for profiling *) +val profinfo_width : int + (* How many bits are to be used in values' headers for profiling + information *) +val libunwind_available : bool + (* Whether the libunwind library is available on the target *) +val libunwind_link_flags : string + (* Linker flags to use libunwind *) + +val safe_string: bool + (* Whether the compiler was configured with -force-safe-string; + in that case, the -unsafe-string compile-time option is unavailable + + @since 4.05.0 *) +val default_safe_string: bool + (* Whether the compiler was configured to use the -safe-string + or -unsafe-string compile-time option by default. + + @since 4.06.0 *) +val flat_float_array : bool + (* Whether the compiler and runtime automagically flatten float + arrays *) +val windows_unicode: bool + (* Whether Windows Unicode runtime is enabled *) +val afl_instrument : bool + (* Whether afl-fuzz instrumentation is generated by default *) diff --git a/res_syntax/compiler-libs-406/consistbl.ml b/res_syntax/compiler-libs-406/consistbl.ml new file mode 100644 index 0000000000..dbba5d1f5a --- /dev/null +++ b/res_syntax/compiler-libs-406/consistbl.ml @@ -0,0 +1,66 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2002 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Consistency tables: for checking consistency of module CRCs *) + +type t = (string, Digest.t * string) Hashtbl.t + +let create () = Hashtbl.create 13 + +let clear = Hashtbl.clear + +exception Inconsistency of string * string * string + +exception Not_available of string + +let check tbl name crc source = + try + let (old_crc, old_source) = Hashtbl.find tbl name in + if crc <> old_crc then raise(Inconsistency(name, source, old_source)) + with Not_found -> + Hashtbl.add tbl name (crc, source) + +let check_noadd tbl name crc source = + try + let (old_crc, old_source) = Hashtbl.find tbl name in + if crc <> old_crc then raise(Inconsistency(name, source, old_source)) + with Not_found -> + raise (Not_available name) + +let set tbl name crc source = Hashtbl.add tbl name (crc, source) + +let source tbl name = snd (Hashtbl.find tbl name) + +let extract l tbl = + let l = List.sort_uniq String.compare l in + List.fold_left + (fun assc name -> + try + let (crc, _) = Hashtbl.find tbl name in + (name, Some crc) :: assc + with Not_found -> + (name, None) :: assc) + [] l + +let filter p tbl = + let to_remove = ref [] in + Hashtbl.iter + (fun name _ -> + if not (p name) then to_remove := name :: !to_remove) + tbl; + List.iter + (fun name -> + while Hashtbl.mem tbl name do Hashtbl.remove tbl name done) + !to_remove diff --git a/res_syntax/compiler-libs-406/consistbl.mli b/res_syntax/compiler-libs-406/consistbl.mli new file mode 100644 index 0000000000..c532bddfe8 --- /dev/null +++ b/res_syntax/compiler-libs-406/consistbl.mli @@ -0,0 +1,62 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2002 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Consistency tables: for checking consistency of module CRCs *) + +type t + +val create: unit -> t + +val clear: t -> unit + +val check: t -> string -> Digest.t -> string -> unit + (* [check tbl name crc source] + checks consistency of ([name], [crc]) with infos previously + stored in [tbl]. If no CRC was previously associated with + [name], record ([name], [crc]) in [tbl]. + [source] is the name of the file from which the information + comes from. This is used for error reporting. *) + +val check_noadd: t -> string -> Digest.t -> string -> unit + (* Same as [check], but raise [Not_available] if no CRC was previously + associated with [name]. *) + +val set: t -> string -> Digest.t -> string -> unit + (* [set tbl name crc source] forcefully associates [name] with + [crc] in [tbl], even if [name] already had a different CRC + associated with [name] in [tbl]. *) + +val source: t -> string -> string + (* [source tbl name] returns the file name associated with [name] + if the latter has an associated CRC in [tbl]. + Raise [Not_found] otherwise. *) + +val extract: string list -> t -> (string * Digest.t option) list + (* [extract tbl names] returns an associative list mapping each string + in [names] to the CRC associated with it in [tbl]. If no CRC is + associated with a name then it is mapped to [None]. *) + +val filter: (string -> bool) -> t -> unit + (* [filter pred tbl] removes from [tbl] table all (name, CRC) pairs + such that [pred name] is [false]. *) + +exception Inconsistency of string * string * string + (* Raised by [check] when a CRC mismatch is detected. + First string is the name of the compilation unit. + Second string is the source that caused the inconsistency. + Third string is the source that set the CRC. *) + +exception Not_available of string + (* Raised by [check_noadd] when a name doesn't have an associated CRC. *) diff --git a/res_syntax/compiler-libs-406/ctype.ml b/res_syntax/compiler-libs-406/ctype.ml new file mode 100644 index 0000000000..df46de1f67 --- /dev/null +++ b/res_syntax/compiler-libs-406/ctype.ml @@ -0,0 +1,4576 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy and Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Operations on core types *) + +open Misc +open Asttypes +open Types +open Btype + +(* + Type manipulation after type inference + ====================================== + If one wants to manipulate a type after type inference (for + instance, during code generation or in the debugger), one must + first make sure that the type levels are correct, using the + function [correct_levels]. Then, this type can be correctly + manipulated by [apply], [expand_head] and [moregeneral]. +*) + +(* + General notes + ============= + - As much sharing as possible should be kept : it makes types + smaller and better abbreviated. + When necessary, some sharing can be lost. Types will still be + printed correctly (+++ TO DO...), and abbreviations defined by a + class do not depend on sharing thanks to constrained + abbreviations. (Of course, even if some sharing is lost, typing + will still be correct.) + - All nodes of a type have a level : that way, one know whether a + node need to be duplicated or not when instantiating a type. + - Levels of a type are decreasing (generic level being considered + as greatest). + - The level of a type constructor is superior to the binding + time of its path. + - Recursive types without limitation should be handled (even if + there is still an occur check). This avoid treating specially the + case for objects, for instance. Furthermore, the occur check + policy can then be easily changed. +*) + +(**** Errors ****) + +exception Unify of (type_expr * type_expr) list + +exception Tags of label * label + +let () = + Location.register_error_of_exn + (function + | Tags (l, l') -> + Some + Location. + (errorf ~loc:(in_file !input_name) + "In this program,@ variant constructors@ `%s and `%s@ \ + have the same hash value.@ Change one of them." l l' + ) + | _ -> None + ) + +exception Subtype of + (type_expr * type_expr) list * (type_expr * type_expr) list + +exception Cannot_expand + +exception Cannot_apply + +exception Recursive_abbrev + +(* GADT: recursive abbrevs can appear as a result of local constraints *) +exception Unification_recursive_abbrev of (type_expr * type_expr) list + +(**** Type level management ****) + +let current_level = ref 0 +let nongen_level = ref 0 +let global_level = ref 1 +let saved_level = ref [] + +type levels = + { current_level: int; nongen_level: int; global_level: int; + saved_level: (int * int) list; } +let save_levels () = + { current_level = !current_level; + nongen_level = !nongen_level; + global_level = !global_level; + saved_level = !saved_level } +let set_levels l = + current_level := l.current_level; + nongen_level := l.nongen_level; + global_level := l.global_level; + saved_level := l.saved_level + +let get_current_level () = !current_level +let init_def level = current_level := level; nongen_level := level +let begin_def () = + saved_level := (!current_level, !nongen_level) :: !saved_level; + incr current_level; nongen_level := !current_level +let begin_class_def () = + saved_level := (!current_level, !nongen_level) :: !saved_level; + incr current_level +let raise_nongen_level () = + saved_level := (!current_level, !nongen_level) :: !saved_level; + nongen_level := !current_level +let end_def () = + let (cl, nl) = List.hd !saved_level in + saved_level := List.tl !saved_level; + current_level := cl; nongen_level := nl + +let reset_global_level () = + global_level := !current_level + 1 +let increase_global_level () = + let gl = !global_level in + global_level := !current_level; + gl +let restore_global_level gl = + global_level := gl + +(**** Whether a path points to an object type (with hidden row variable) ****) +let is_object_type path = + let name = + match path with Path.Pident id -> Ident.name id + | Path.Pdot(_, s,_) -> s + | Path.Papply _ -> assert false + in name.[0] = '#' + +(**** Control tracing of GADT instances *) + +let trace_gadt_instances = ref false +let check_trace_gadt_instances env = + not !trace_gadt_instances && Env.has_local_constraints env && + (trace_gadt_instances := true; cleanup_abbrev (); true) + +let reset_trace_gadt_instances b = + if b then trace_gadt_instances := false + +let wrap_trace_gadt_instances env f x = + let b = check_trace_gadt_instances env in + let y = f x in + reset_trace_gadt_instances b; + y + +(**** Abbreviations without parameters ****) +(* Shall reset after generalizing *) + +let simple_abbrevs = ref Mnil + +let proper_abbrevs path tl abbrev = + if tl <> [] || !trace_gadt_instances || !Clflags.principal || + is_object_type path + then abbrev + else simple_abbrevs + +(**** Some type creators ****) + +(* Re-export generic type creators *) + +let newty2 = Btype.newty2 +let newty desc = newty2 !current_level desc + +let newvar ?name () = newty2 !current_level (Tvar name) +let newvar2 ?name level = newty2 level (Tvar name) +let new_global_var ?name () = newty2 !global_level (Tvar name) + +let newobj fields = newty (Tobject (fields, ref None)) + +let newconstr path tyl = newty (Tconstr (path, tyl, ref Mnil)) + +let none = newty (Ttuple []) (* Clearly ill-formed type *) + +(**** Representative of a type ****) + +(* Re-export repr *) +let repr = repr + +(**** Type maps ****) + +module TypePairs = + Hashtbl.Make (struct + type t = type_expr * type_expr + let equal (t1, t1') (t2, t2') = (t1 == t2) && (t1' == t2') + let hash (t, t') = t.id + 93 * t'.id + end) + + +(**** unification mode ****) + +type unification_mode = + | Expression (* unification in expression *) + | Pattern (* unification in pattern which may add local constraints *) + +let umode = ref Expression +let generate_equations = ref false +let assume_injective = ref false + +let set_mode_pattern ~generate ~injective f = + let old_unification_mode = !umode + and old_gen = !generate_equations + and old_inj = !assume_injective in + try + umode := Pattern; + generate_equations := generate; + assume_injective := injective; + let ret = f () in + umode := old_unification_mode; + generate_equations := old_gen; + assume_injective := old_inj; + ret + with e -> + umode := old_unification_mode; + generate_equations := old_gen; + assume_injective := old_inj; + raise e + +(*** Checks for type definitions ***) + +let in_current_module = function + | Path.Pident _ -> true + | Path.Pdot _ | Path.Papply _ -> false + +let in_pervasives p = + in_current_module p && + try ignore (Env.find_type p Env.initial_safe_string); true + with Not_found -> false + +let is_datatype decl= + match decl.type_kind with + Type_record _ | Type_variant _ | Type_open -> true + | Type_abstract -> false + + + (**********************************************) + (* Miscellaneous operations on object types *) + (**********************************************) + +(* Note: + We need to maintain some invariants: + * cty_self must be a Tobject + * ... +*) + +(**** Object field manipulation. ****) + +let object_fields ty = + match (repr ty).desc with + Tobject (fields, _) -> fields + | _ -> assert false + +let flatten_fields ty = + let rec flatten l ty = + let ty = repr ty in + match ty.desc with + Tfield(s, k, ty1, ty2) -> + flatten ((s, k, ty1)::l) ty2 + | _ -> + (l, ty) + in + let (l, r) = flatten [] ty in + (List.sort (fun (n, _, _) (n', _, _) -> compare n n') l, r) + +let build_fields level = + List.fold_right + (fun (s, k, ty1) ty2 -> newty2 level (Tfield(s, k, ty1, ty2))) + +let associate_fields fields1 fields2 = + let rec associate p s s' = + function + (l, []) -> + (List.rev p, (List.rev s) @ l, List.rev s') + | ([], l') -> + (List.rev p, List.rev s, (List.rev s') @ l') + | ((n, k, t)::r, (n', k', t')::r') when n = n' -> + associate ((n, k, t, k', t')::p) s s' (r, r') + | ((n, k, t)::r, ((n', _k', _t')::_ as l')) when n < n' -> + associate p ((n, k, t)::s) s' (r, l') + | (((_n, _k, _t)::_ as l), (n', k', t')::r') (* when n > n' *) -> + associate p s ((n', k', t')::s') (l, r') + in + associate [] [] [] (fields1, fields2) + +(**** Check whether an object is open ****) + +(* +++ The abbreviation should eventually be expanded *) +let rec object_row ty = + let ty = repr ty in + match ty.desc with + Tobject (t, _) -> object_row t + | Tfield(_, _, _, t) -> object_row t + | _ -> ty + +let opened_object ty = + match (object_row ty).desc with + | Tvar _ | Tunivar _ | Tconstr _ -> true + | _ -> false + +let concrete_object ty = + match (object_row ty).desc with + | Tvar _ -> false + | _ -> true + +(**** Close an object ****) + +let close_object ty = + let rec close ty = + let ty = repr ty in + match ty.desc with + Tvar _ -> + link_type ty (newty2 ty.level Tnil) + | Tfield(_, _, _, ty') -> close ty' + | _ -> assert false + in + match (repr ty).desc with + Tobject (ty, _) -> close ty + | _ -> assert false + +(**** Row variable of an object type ****) + +let row_variable ty = + let rec find ty = + let ty = repr ty in + match ty.desc with + Tfield (_, _, _, ty) -> find ty + | Tvar _ -> ty + | _ -> assert false + in + match (repr ty).desc with + Tobject (fi, _) -> find fi + | _ -> assert false + +(**** Object name manipulation ****) +(* +++ Bientot obsolete *) + +let set_object_name id rv params ty = + match (repr ty).desc with + Tobject (_fi, nm) -> + set_name nm (Some (Path.Pident id, rv::params)) + | _ -> + assert false + +let remove_object_name ty = + match (repr ty).desc with + Tobject (_, nm) -> set_name nm None + | Tconstr (_, _, _) -> () + | _ -> fatal_error "Ctype.remove_object_name" + +(**** Hiding of private methods ****) + +let hide_private_methods ty = + match (repr ty).desc with + Tobject (fi, nm) -> + nm := None; + let (fl, _) = flatten_fields fi in + List.iter + (function (_, k, _) -> + match field_kind_repr k with + Fvar r -> set_kind r Fabsent + | _ -> ()) + fl + | _ -> + assert false + + + (*******************************) + (* Operations on class types *) + (*******************************) + + +let rec signature_of_class_type = + function + Cty_constr (_, _, cty) -> signature_of_class_type cty + | Cty_signature sign -> sign + | Cty_arrow (_, _, cty) -> signature_of_class_type cty + +let self_type cty = + repr (signature_of_class_type cty).csig_self + +let rec class_type_arity = + function + Cty_constr (_, _, cty) -> class_type_arity cty + | Cty_signature _ -> 0 + | Cty_arrow (_, _, cty) -> 1 + class_type_arity cty + + + (*******************************************) + (* Miscellaneous operations on row types *) + (*******************************************) + +let sort_row_fields = List.sort (fun (p,_) (q,_) -> compare p q) + +let rec merge_rf r1 r2 pairs fi1 fi2 = + match fi1, fi2 with + (l1,f1 as p1)::fi1', (l2,f2 as p2)::fi2' -> + if l1 = l2 then merge_rf r1 r2 ((l1,f1,f2)::pairs) fi1' fi2' else + if l1 < l2 then merge_rf (p1::r1) r2 pairs fi1' fi2 else + merge_rf r1 (p2::r2) pairs fi1 fi2' + | [], _ -> (List.rev r1, List.rev_append r2 fi2, pairs) + | _, [] -> (List.rev_append r1 fi1, List.rev r2, pairs) + +let merge_row_fields fi1 fi2 = + match fi1, fi2 with + [], _ | _, [] -> (fi1, fi2, []) + | [p1], _ when not (List.mem_assoc (fst p1) fi2) -> (fi1, fi2, []) + | _, [p2] when not (List.mem_assoc (fst p2) fi1) -> (fi1, fi2, []) + | _ -> merge_rf [] [] [] (sort_row_fields fi1) (sort_row_fields fi2) + +let rec filter_row_fields erase = function + [] -> [] + | (_l,f as p)::fi -> + let fi = filter_row_fields erase fi in + match row_field_repr f with + Rabsent -> fi + | Reither(_,_,false,e) when erase -> set_row_field e Rabsent; fi + | _ -> p :: fi + + (**************************************) + (* Check genericity of type schemes *) + (**************************************) + + +exception Non_closed of type_expr * bool + +let free_variables = ref [] +let really_closed = ref None + +let rec free_vars_rec real ty = + let ty = repr ty in + if ty.level >= lowest_level then begin + ty.level <- pivot_level - ty.level; + begin match ty.desc, !really_closed with + Tvar _, _ -> + free_variables := (ty, real) :: !free_variables + | Tconstr (path, tl, _), Some env -> + begin try + let (_, body, _) = Env.find_type_expansion path env in + if (repr body).level <> generic_level then + free_variables := (ty, real) :: !free_variables + with Not_found -> () + end; + List.iter (free_vars_rec true) tl +(* Do not count "virtual" free variables + | Tobject(ty, {contents = Some (_, p)}) -> + free_vars_rec false ty; List.iter (free_vars_rec true) p +*) + | Tobject (ty, _), _ -> + free_vars_rec false ty + | Tfield (_, _, ty1, ty2), _ -> + free_vars_rec true ty1; free_vars_rec false ty2 + | Tvariant row, _ -> + let row = row_repr row in + iter_row (free_vars_rec true) row; + if not (static_row row) then free_vars_rec false row.row_more + | _ -> + iter_type_expr (free_vars_rec true) ty + end; + end + +let free_vars ?env ty = + free_variables := []; + really_closed := env; + free_vars_rec true ty; + let res = !free_variables in + free_variables := []; + really_closed := None; + res + +let free_variables ?env ty = + let tl = List.map fst (free_vars ?env ty) in + unmark_type ty; + tl + +let closed_type ty = + match free_vars ty with + [] -> () + | (v, real) :: _ -> raise (Non_closed (v, real)) + +let closed_parameterized_type params ty = + List.iter mark_type params; + let ok = + try closed_type ty; true with Non_closed _ -> false in + List.iter unmark_type params; + unmark_type ty; + ok + +let closed_type_decl decl = + try + List.iter mark_type decl.type_params; + begin match decl.type_kind with + Type_abstract -> + () + | Type_variant v -> + List.iter + (fun {cd_args; cd_res; _} -> + match cd_res with + | Some _ -> () + | None -> + match cd_args with + | Cstr_tuple l -> List.iter closed_type l + | Cstr_record l -> List.iter (fun l -> closed_type l.ld_type) l + ) + v + | Type_record(r, _rep) -> + List.iter (fun l -> closed_type l.ld_type) r + | Type_open -> () + end; + begin match decl.type_manifest with + None -> () + | Some ty -> closed_type ty + end; + unmark_type_decl decl; + None + with Non_closed (ty, _) -> + unmark_type_decl decl; + Some ty + +let closed_extension_constructor ext = + try + List.iter mark_type ext.ext_type_params; + begin match ext.ext_ret_type with + | Some _ -> () + | None -> iter_type_expr_cstr_args closed_type ext.ext_args + end; + unmark_extension_constructor ext; + None + with Non_closed (ty, _) -> + unmark_extension_constructor ext; + Some ty + +type closed_class_failure = + CC_Method of type_expr * bool * string * type_expr + | CC_Value of type_expr * bool * string * type_expr + +exception CCFailure of closed_class_failure + +let closed_class params sign = + let ty = object_fields (repr sign.csig_self) in + let (fields, rest) = flatten_fields ty in + List.iter mark_type params; + mark_type rest; + List.iter + (fun (lab, _, ty) -> if lab = dummy_method then mark_type ty) + fields; + try + mark_type_node (repr sign.csig_self); + List.iter + (fun (lab, kind, ty) -> + if field_kind_repr kind = Fpresent then + try closed_type ty with Non_closed (ty0, real) -> + raise (CCFailure (CC_Method (ty0, real, lab, ty)))) + fields; + mark_type_params (repr sign.csig_self); + List.iter unmark_type params; + unmark_class_signature sign; + None + with CCFailure reason -> + mark_type_params (repr sign.csig_self); + List.iter unmark_type params; + unmark_class_signature sign; + Some reason + + + (**********************) + (* Type duplication *) + (**********************) + + +(* Duplicate a type, preserving only type variables *) +let duplicate_type ty = + Subst.type_expr Subst.identity ty + +(* Same, for class types *) +let duplicate_class_type ty = + Subst.class_type Subst.identity ty + + + (*****************************) + (* Type level manipulation *) + (*****************************) + +(* + It would be a bit more efficient to remove abbreviation expansions + rather than generalizing them: these expansions will usually not be + used anymore. However, this is not possible in the general case, as + [expand_abbrev] (via [subst]) requires these expansions to be + preserved. Does it worth duplicating this code ? +*) +let rec generalize ty = + let ty = repr ty in + if (ty.level > !current_level) && (ty.level <> generic_level) then begin + set_level ty generic_level; + begin match ty.desc with + Tconstr (_, _, abbrev) -> + iter_abbrev generalize !abbrev + | _ -> () + end; + iter_type_expr generalize ty + end + +let generalize ty = + simple_abbrevs := Mnil; + generalize ty + +(* Generalize the structure and lower the variables *) + +let rec generalize_structure var_level ty = + let ty = repr ty in + if ty.level <> generic_level then begin + if is_Tvar ty && ty.level > var_level then + set_level ty var_level + else if + ty.level > !current_level && + match ty.desc with + Tconstr (p, _, abbrev) -> + not (is_object_type p) && (abbrev := Mnil; true) + | _ -> true + then begin + set_level ty generic_level; + iter_type_expr (generalize_structure var_level) ty + end + end + +let generalize_structure var_level ty = + simple_abbrevs := Mnil; + generalize_structure var_level ty + +(* Generalize the spine of a function, if the level >= !current_level *) + +let rec generalize_spine ty = + let ty = repr ty in + if ty.level < !current_level || ty.level = generic_level then () else + match ty.desc with + Tarrow (_, ty1, ty2, _) -> + set_level ty generic_level; + generalize_spine ty1; + generalize_spine ty2; + | Tpoly (ty', _) -> + set_level ty generic_level; + generalize_spine ty' + | Ttuple tyl + | Tpackage (_, _, tyl) -> + set_level ty generic_level; + List.iter generalize_spine tyl + | Tconstr (p, tyl, memo) when not (is_object_type p) -> + set_level ty generic_level; + memo := Mnil; + List.iter generalize_spine tyl + | _ -> () + +let forward_try_expand_once = (* Forward declaration *) + ref (fun _env _ty -> raise Cannot_expand) + +(* + Lower the levels of a type (assume [level] is not + [generic_level]). +*) +(* + The level of a type constructor must be greater than its binding + time. That way, a type constructor cannot escape the scope of its + definition, as would be the case in + let x = ref [] + module M = struct type t let _ = (x : t list ref) end + (without this constraint, the type system would actually be unsound.) +*) +let get_level env p = + try + match (Env.find_type p env).type_newtype_level with + | None -> Path.binding_time p + | Some (x, _) -> x + with + | Not_found -> + (* no newtypes in predef *) + Path.binding_time p + +let rec normalize_package_path env p = + let t = + try (Env.find_modtype p env).mtd_type + with Not_found -> None + in + match t with + | Some (Mty_ident p) -> normalize_package_path env p + | Some (Mty_signature _ | Mty_functor _ | Mty_alias _) | None -> + match p with + Path.Pdot (p1, s, n) -> + (* For module aliases *) + let p1' = Env.normalize_path None env p1 in + if Path.same p1 p1' then p else + normalize_package_path env (Path.Pdot (p1', s, n)) + | _ -> p + +let rec update_level env level expand ty = + let ty = repr ty in + if ty.level > level then begin + begin match Env.gadt_instance_level env ty with + Some lv -> if level < lv then raise (Unify [(ty, newvar2 level)]) + | None -> () + end; + match ty.desc with + Tconstr(p, _tl, _abbrev) when level < get_level env p -> + (* Try first to replace an abbreviation by its expansion. *) + begin try + (* if is_newtype env p then raise Cannot_expand; *) + link_type ty (!forward_try_expand_once env ty); + update_level env level expand ty + with Cannot_expand -> + (* +++ Levels should be restored... *) + (* Format.printf "update_level: %i < %i@." level (get_level env p); *) + if level < get_level env p then raise (Unify [(ty, newvar2 level)]); + iter_type_expr (update_level env level expand) ty + end + | Tconstr(_, _ :: _, _) when expand -> + begin try + link_type ty (!forward_try_expand_once env ty); + update_level env level expand ty + with Cannot_expand -> + set_level ty level; + iter_type_expr (update_level env level expand) ty + end + | Tpackage (p, nl, tl) when level < Path.binding_time p -> + let p' = normalize_package_path env p in + if Path.same p p' then raise (Unify [(ty, newvar2 level)]); + log_type ty; ty.desc <- Tpackage (p', nl, tl); + update_level env level expand ty + | Tobject(_, ({contents=Some(p, _tl)} as nm)) + when level < get_level env p -> + set_name nm None; + update_level env level expand ty + | Tvariant row -> + let row = row_repr row in + begin match row.row_name with + | Some (p, _tl) when level < get_level env p -> + log_type ty; + ty.desc <- Tvariant {row with row_name = None} + | _ -> () + end; + set_level ty level; + iter_type_expr (update_level env level expand) ty + | Tfield(lab, _, ty1, _) + when lab = dummy_method && (repr ty1).level > level -> + raise (Unify [(ty1, newvar2 level)]) + | _ -> + set_level ty level; + (* XXX what about abbreviations in Tconstr ? *) + iter_type_expr (update_level env level expand) ty + end + +(* First try without expanding, then expand everything, + to avoid combinatorial blow-up *) +let update_level env level ty = + let ty = repr ty in + if ty.level > level then begin + let snap = snapshot () in + try + update_level env level false ty + with Unify _ -> + backtrack snap; + update_level env level true ty + end + +(* Generalize and lower levels of contravariant branches simultaneously *) + +let rec generalize_expansive env var_level visited ty = + let ty = repr ty in + if ty.level = generic_level || ty.level <= var_level then () else + if not (Hashtbl.mem visited ty.id) then begin + Hashtbl.add visited ty.id (); + match ty.desc with + Tconstr (path, tyl, abbrev) -> + let variance = + try (Env.find_type path env).type_variance + with Not_found -> List.map (fun _ -> Variance.may_inv) tyl in + abbrev := Mnil; + List.iter2 + (fun v t -> + if Variance.(mem May_weak v) + then generalize_structure var_level t + else generalize_expansive env var_level visited t) + variance tyl + | Tpackage (_, _, tyl) -> + List.iter (generalize_structure var_level) tyl + | Tarrow (_, t1, t2, _) -> + generalize_structure var_level t1; + generalize_expansive env var_level visited t2 + | _ -> + iter_type_expr (generalize_expansive env var_level visited) ty + end + +let generalize_expansive env ty = + simple_abbrevs := Mnil; + try + generalize_expansive env !nongen_level (Hashtbl.create 7) ty + with Unify ([_, ty'] as tr) -> + raise (Unify ((ty, ty') :: tr)) + +let generalize_global ty = generalize_structure !global_level ty +let generalize_structure ty = generalize_structure !current_level ty + +(* Correct the levels of type [ty]. *) +let correct_levels ty = + duplicate_type ty + +(* Only generalize the type ty0 in ty *) +let limited_generalize ty0 ty = + let ty0 = repr ty0 in + + let graph = Hashtbl.create 17 in + let idx = ref lowest_level in + let roots = ref [] in + + let rec inverse pty ty = + let ty = repr ty in + if (ty.level > !current_level) || (ty.level = generic_level) then begin + decr idx; + Hashtbl.add graph !idx (ty, ref pty); + if (ty.level = generic_level) || (ty == ty0) then + roots := ty :: !roots; + set_level ty !idx; + iter_type_expr (inverse [ty]) ty + end else if ty.level < lowest_level then begin + let (_, parents) = Hashtbl.find graph ty.level in + parents := pty @ !parents + end + + and generalize_parents ty = + let idx = ty.level in + if idx <> generic_level then begin + set_level ty generic_level; + List.iter generalize_parents !(snd (Hashtbl.find graph idx)); + (* Special case for rows: must generalize the row variable *) + match ty.desc with + Tvariant row -> + let more = row_more row in + let lv = more.level in + if (lv < lowest_level || lv > !current_level) + && lv <> generic_level then set_level more generic_level + | _ -> () + end + in + + inverse [] ty; + if ty0.level < lowest_level then + iter_type_expr (inverse []) ty0; + List.iter generalize_parents !roots; + Hashtbl.iter + (fun _ (ty, _) -> + if ty.level <> generic_level then set_level ty !current_level) + graph + + +(* Compute statically the free univars of all nodes in a type *) +(* This avoids doing it repeatedly during instantiation *) + +type inv_type_expr = + { inv_type : type_expr; + mutable inv_parents : inv_type_expr list } + +let rec inv_type hash pty ty = + let ty = repr ty in + try + let inv = TypeHash.find hash ty in + inv.inv_parents <- pty @ inv.inv_parents + with Not_found -> + let inv = { inv_type = ty; inv_parents = pty } in + TypeHash.add hash ty inv; + iter_type_expr (inv_type hash [inv]) ty + +let compute_univars ty = + let inverted = TypeHash.create 17 in + inv_type inverted [] ty; + let node_univars = TypeHash.create 17 in + let rec add_univar univ inv = + match inv.inv_type.desc with + Tpoly (_ty, tl) when List.memq univ (List.map repr tl) -> () + | _ -> + try + let univs = TypeHash.find node_univars inv.inv_type in + if not (TypeSet.mem univ !univs) then begin + univs := TypeSet.add univ !univs; + List.iter (add_univar univ) inv.inv_parents + end + with Not_found -> + TypeHash.add node_univars inv.inv_type (ref(TypeSet.singleton univ)); + List.iter (add_univar univ) inv.inv_parents + in + TypeHash.iter (fun ty inv -> if is_Tunivar ty then add_univar ty inv) + inverted; + fun ty -> + try !(TypeHash.find node_univars ty) with Not_found -> TypeSet.empty + + + (*******************) + (* Instantiation *) + (*******************) + + +let rec find_repr p1 = + function + Mnil -> + None + | Mcons (Public, p2, ty, _, _) when Path.same p1 p2 -> + Some ty + | Mcons (_, _, _, _, rem) -> + find_repr p1 rem + | Mlink {contents = rem} -> + find_repr p1 rem + +(* + Generic nodes are duplicated, while non-generic nodes are left + as-is. + During instantiation, the description of a generic node is first + replaced by a link to a stub ([Tsubst (newvar ())]). Once the + copy is made, it replaces the stub. + After instantiation, the description of generic node, which was + stored by [save_desc], must be put back, using [cleanup_types]. +*) + +let abbreviations = ref (ref Mnil) + (* Abbreviation memorized. *) + +(* partial: we may not wish to copy the non generic types + before we call type_pat *) +let rec copy ?env ?partial ?keep_names ty = + let copy = copy ?env ?partial ?keep_names in + let ty = repr ty in + match ty.desc with + Tsubst ty -> ty + | _ -> + if ty.level <> generic_level && partial = None then ty else + (* We only forget types that are non generic and do not contain + free univars *) + let forget = + if ty.level = generic_level then generic_level else + match partial with + None -> assert false + | Some (free_univars, keep) -> + if TypeSet.is_empty (free_univars ty) then + if keep then ty.level else !current_level + else generic_level + in + if forget <> generic_level then newty2 forget (Tvar None) else + let desc = ty.desc in + save_desc ty desc; + let t = newvar() in (* Stub *) + begin match env with + Some env when Env.has_local_constraints env -> + begin match Env.gadt_instance_level env ty with + Some lv -> Env.add_gadt_instances env lv [t] + | None -> () + end + | _ -> () + end; + ty.desc <- Tsubst t; + t.desc <- + begin match desc with + | Tconstr (p, tl, _) -> + let abbrevs = proper_abbrevs p tl !abbreviations in + begin match find_repr p !abbrevs with + Some ty when repr ty != t -> + Tlink ty + | _ -> + (* + One must allocate a new reference, so that abbrevia- + tions belonging to different branches of a type are + independent. + Moreover, a reference containing a [Mcons] must be + shared, so that the memorized expansion of an abbrevi- + ation can be released by changing the content of just + one reference. + *) + Tconstr (p, List.map copy tl, + ref (match !(!abbreviations) with + Mcons _ -> Mlink !abbreviations + | abbrev -> abbrev)) + end + | Tvariant row0 -> + let row = row_repr row0 in + let more = repr row.row_more in + (* We must substitute in a subtle way *) + (* Tsubst takes a tuple containing the row var and the variant *) + begin match more.desc with + Tsubst {desc = Ttuple [_;ty2]} -> + (* This variant type has been already copied *) + ty.desc <- Tsubst ty2; (* avoid Tlink in the new type *) + Tlink ty2 + | _ -> + (* If the row variable is not generic, we must keep it *) + let keep = more.level <> generic_level in + let more' = + match more.desc with + Tsubst ty -> ty + | Tconstr _ | Tnil -> + if keep then save_desc more more.desc; + copy more + | Tvar _ | Tunivar _ -> + save_desc more more.desc; + if keep then more else newty more.desc + | _ -> assert false + in + let row = + match repr more' with (* PR#6163 *) + {desc=Tconstr _} when not row.row_fixed -> + {row with row_fixed = true} + | _ -> row + in + (* Open row if partial for pattern and contains Reither *) + let more', row = + match partial with + Some (free_univars, false) -> + let more' = + if more.id != more'.id then more' else + let lv = if keep then more.level else !current_level in + newty2 lv (Tvar None) + in + let not_reither (_, f) = + match row_field_repr f with + Reither _ -> false + | _ -> true + in + if row.row_closed && not row.row_fixed + && TypeSet.is_empty (free_univars ty) + && not (List.for_all not_reither row.row_fields) then + (more', + {row_fields = List.filter not_reither row.row_fields; + row_more = more'; row_bound = (); + row_closed = false; row_fixed = false; row_name = None}) + else (more', row) + | _ -> (more', row) + in + (* Register new type first for recursion *) + more.desc <- Tsubst(newgenty(Ttuple[more';t])); + (* Return a new copy *) + Tvariant (copy_row copy true row keep more') + end + | Tfield (_p, k, _ty1, ty2) -> + begin match field_kind_repr k with + Fabsent -> Tlink (copy ty2) + | Fpresent -> copy_type_desc copy desc + | Fvar r -> + dup_kind r; + copy_type_desc copy desc + end + | Tobject (ty1, _) when partial <> None -> + Tobject (copy ty1, ref None) + | _ -> copy_type_desc ?keep_names copy desc + end; + t + +let simple_copy t = copy t + +(**** Variants of instantiations ****) + +let gadt_env env = + if Env.has_local_constraints env + then Some env + else None + +let instance ?partial env sch = + let env = gadt_env env in + let partial = + match partial with + None -> None + | Some keep -> Some (compute_univars sch, keep) + in + let ty = copy ?env ?partial sch in + cleanup_types (); + ty + +let instance_def sch = + let ty = copy sch in + cleanup_types (); + ty + +let generic_instance env sch = + let old = !current_level in + current_level := generic_level; + let ty = instance env sch in + current_level := old; + ty + +let instance_list env schl = + let env = gadt_env env in + let tyl = List.map (fun t -> copy ?env t) schl in + cleanup_types (); + tyl + +let reified_var_counter = ref Vars.empty +let reset_reified_var_counter () = + reified_var_counter := Vars.empty + +(* names given to new type constructors. + Used for existential types and + local constraints *) +let get_new_abstract_name s = + let index = + try Vars.find s !reified_var_counter + 1 + with Not_found -> 0 in + reified_var_counter := Vars.add s index !reified_var_counter; + if index = 0 && s <> "" && s.[String.length s - 1] <> '$' then s else + Printf.sprintf "%s%d" s index + +let new_declaration newtype manifest = + { + type_params = []; + type_arity = 0; + type_kind = Type_abstract; + type_private = Public; + type_manifest = manifest; + type_variance = []; + type_newtype_level = newtype; + type_loc = Location.none; + type_attributes = []; + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } + +let instance_constructor ?in_pattern cstr = + begin match in_pattern with + | None -> () + | Some (env, newtype_lev) -> + let process existential = + let decl = new_declaration (Some (newtype_lev, newtype_lev)) None in + let name = + match repr existential with + {desc = Tvar (Some name)} -> "$" ^ cstr.cstr_name ^ "_'" ^ name + | _ -> "$" ^ cstr.cstr_name + in + let path = Path.Pident (Ident.create (get_new_abstract_name name)) in + let new_env = Env.add_local_type path decl !env in + env := new_env; + let to_unify = newty (Tconstr (path,[],ref Mnil)) in + let tv = copy existential in + assert (is_Tvar tv); + link_type tv to_unify + in + List.iter process cstr.cstr_existentials + end; + let ty_res = copy cstr.cstr_res in + let ty_args = List.map simple_copy cstr.cstr_args in + cleanup_types (); + (ty_args, ty_res) + +let instance_parameterized_type ?keep_names sch_args sch = + let ty_args = List.map (fun t -> copy ?keep_names t) sch_args in + let ty = copy sch in + cleanup_types (); + (ty_args, ty) + +let instance_parameterized_type_2 sch_args sch_lst sch = + let ty_args = List.map simple_copy sch_args in + let ty_lst = List.map simple_copy sch_lst in + let ty = copy sch in + cleanup_types (); + (ty_args, ty_lst, ty) + +let map_kind f = function + | Type_abstract -> Type_abstract + | Type_open -> Type_open + | Type_variant cl -> + Type_variant ( + List.map + (fun c -> + {c with + cd_args = map_type_expr_cstr_args f c.cd_args; + cd_res = may_map f c.cd_res + }) + cl) + | Type_record (fl, rr) -> + Type_record ( + List.map + (fun l -> + {l with ld_type = f l.ld_type} + ) fl, rr) + + +let instance_declaration decl = + let decl = + {decl with type_params = List.map simple_copy decl.type_params; + type_manifest = may_map simple_copy decl.type_manifest; + type_kind = map_kind simple_copy decl.type_kind; + } + in + cleanup_types (); + decl + +let instance_class params cty = + let rec copy_class_type = + function + Cty_constr (path, tyl, cty) -> + Cty_constr (path, List.map simple_copy tyl, copy_class_type cty) + | Cty_signature sign -> + Cty_signature + {csig_self = copy sign.csig_self; + csig_vars = + Vars.map (function (m, v, ty) -> (m, v, copy ty)) sign.csig_vars; + csig_concr = sign.csig_concr; + csig_inher = + List.map (fun (p,tl) -> (p, List.map simple_copy tl)) + sign.csig_inher} + | Cty_arrow (l, ty, cty) -> + Cty_arrow (l, copy ty, copy_class_type cty) + in + let params' = List.map simple_copy params in + let cty' = copy_class_type cty in + cleanup_types (); + (params', cty') + +(**** Instantiation for types with free universal variables ****) + +let rec diff_list l1 l2 = + if l1 == l2 then [] else + match l1 with [] -> invalid_arg "Ctype.diff_list" + | a :: l1 -> a :: diff_list l1 l2 + +let conflicts free bound = + let bound = List.map repr bound in + TypeSet.exists (fun t -> List.memq (repr t) bound) free + +let delayed_copy = ref [] + (* copying to do later *) + +(* Copy without sharing until there are no free univars left *) +(* all free univars must be included in [visited] *) +let rec copy_sep fixed free bound visited ty = + let ty = repr ty in + let univars = free ty in + if TypeSet.is_empty univars then + if ty.level <> generic_level then ty else + let t = newvar () in + delayed_copy := + lazy (t.desc <- Tlink (copy ty)) + :: !delayed_copy; + t + else try + let t, bound_t = List.assq ty visited in + let dl = if is_Tunivar ty then [] else diff_list bound bound_t in + if dl <> [] && conflicts univars dl then raise Not_found; + t + with Not_found -> begin + let t = newvar() in (* Stub *) + let visited = + match ty.desc with + Tarrow _ | Ttuple _ | Tvariant _ | Tconstr _ | Tobject _ | Tpackage _ -> + (ty,(t,bound)) :: visited + | _ -> visited in + let copy_rec = copy_sep fixed free bound visited in + t.desc <- + begin match ty.desc with + | Tvariant row0 -> + let row = row_repr row0 in + let more = repr row.row_more in + (* We shall really check the level on the row variable *) + let keep = is_Tvar more && more.level <> generic_level in + let more' = copy_rec more in + let fixed' = fixed && is_Tvar (repr more') in + let row = copy_row copy_rec fixed' row keep more' in + Tvariant row + | Tpoly (t1, tl) -> + let tl = List.map repr tl in + let tl' = List.map (fun t -> newty t.desc) tl in + let bound = tl @ bound in + let visited = + List.map2 (fun ty t -> ty,(t,bound)) tl tl' @ visited in + Tpoly (copy_sep fixed free bound visited t1, tl') + | _ -> copy_type_desc copy_rec ty.desc + end; + t + end + +let instance_poly ?(keep_names=false) fixed univars sch = + let univars = List.map repr univars in + let copy_var ty = + match ty.desc with + Tunivar name -> if keep_names then newty (Tvar name) else newvar () + | _ -> assert false + in + let vars = List.map copy_var univars in + let pairs = List.map2 (fun u v -> u, (v, [])) univars vars in + delayed_copy := []; + let ty = copy_sep fixed (compute_univars sch) [] pairs sch in + List.iter Lazy.force !delayed_copy; + delayed_copy := []; + cleanup_types (); + vars, ty + +let instance_label fixed lbl = + let ty_res = copy lbl.lbl_res in + let vars, ty_arg = + match repr lbl.lbl_arg with + {desc = Tpoly (ty, tl)} -> + instance_poly fixed tl ty + | _ -> + [], copy lbl.lbl_arg + in + cleanup_types (); + (vars, ty_arg, ty_res) + +(**** Instantiation with parameter substitution ****) + +let unify' = (* Forward declaration *) + ref (fun _env _ty1 _ty2 -> raise (Unify [])) + +let subst env level priv abbrev ty params args body = + if List.length params <> List.length args then raise (Unify []); + let old_level = !current_level in + current_level := level; + try + let body0 = newvar () in (* Stub *) + begin match ty with + None -> () + | Some ({desc = Tconstr (path, tl, _)} as ty) -> + let abbrev = proper_abbrevs path tl abbrev in + memorize_abbrev abbrev priv path ty body0 + | _ -> + assert false + end; + abbreviations := abbrev; + let (params', body') = instance_parameterized_type params body in + abbreviations := ref Mnil; + !unify' env body0 body'; + List.iter2 (!unify' env) params' args; + current_level := old_level; + body' + with Unify _ as exn -> + current_level := old_level; + raise exn + +(* + Only the shape of the type matters, not whether it is generic or + not. [generic_level] might be somewhat slower, but it ensures + invariants on types are enforced (decreasing levels), and we don't + care about efficiency here. +*) +let apply env params body args = + try + subst env generic_level Public (ref Mnil) None params args body + with + Unify _ -> raise Cannot_apply + +let () = Subst.ctype_apply_env_empty := apply Env.empty + + (****************************) + (* Abbreviation expansion *) + (****************************) + +(* + If the environment has changed, memorized expansions might not + be correct anymore, and so we flush the cache. This is safe but + quite pessimistic: it would be enough to flush the cache when a + type or module definition is overridden in the environment. +*) +let previous_env = ref Env.empty +(*let string_of_kind = function Public -> "public" | Private -> "private"*) +let check_abbrev_env env = + if env != !previous_env then begin + (* prerr_endline "cleanup expansion cache"; *) + cleanup_abbrev (); + previous_env := env + end + + +(* Expand an abbreviation. The expansion is memorized. *) +(* + Assume the level is greater than the path binding time of the + expanded abbreviation. +*) +(* + An abbreviation expansion will fail in either of these cases: + 1. The type constructor does not correspond to a manifest type. + 2. The type constructor is defined in an external file, and this + file is not in the path (missing -I options). + 3. The type constructor is not in the "local" environment. This can + happens when a non-generic type variable has been instantiated + afterwards to the not yet defined type constructor. (Actually, + this cannot happen at the moment due to the strong constraints + between type levels and constructor binding time.) + 4. The expansion requires the expansion of another abbreviation, + and this other expansion fails. +*) +let expand_abbrev_gen kind find_type_expansion env ty = + check_abbrev_env env; + match ty with + {desc = Tconstr (path, args, abbrev); level = level} -> + let lookup_abbrev = proper_abbrevs path args abbrev in + begin match find_expans kind path !lookup_abbrev with + Some ty' -> + (* prerr_endline + ("found a "^string_of_kind kind^" expansion for "^Path.name path);*) + if level <> generic_level then + begin try + update_level env level ty' + with Unify _ -> + (* XXX This should not happen. + However, levels are not correctly restored after a + typing error *) + () + end; + let ty' = repr ty' in + (* assert (ty != ty'); *) (* PR#7324 *) + ty' + | None -> + match find_type_expansion path env with + | exception Not_found -> + (* another way to expand is to normalize the path itself *) + let path' = Env.normalize_path None env path in + if Path.same path path' then raise Cannot_expand + else newty2 level (Tconstr (path', args, abbrev)) + | (params, body, lv) -> + (* prerr_endline + ("add a "^string_of_kind kind^" expansion for "^Path.name path);*) + let ty' = subst env level kind abbrev (Some ty) params args body in + (* For gadts, remember type as non exportable *) + (* The ambiguous level registered for ty' should be the highest *) + if !trace_gadt_instances then begin + match max lv (Env.gadt_instance_level env ty) with + None -> () + | Some lv -> + if level < lv then raise (Unify [(ty, newvar2 level)]); + Env.add_gadt_instances env lv [ty; ty'] + end; + ty' + end + | _ -> + assert false + +(* Expand respecting privacy *) +let expand_abbrev env ty = + expand_abbrev_gen Public Env.find_type_expansion env ty + +(* Expand once the head of a type *) +let expand_head_once env ty = + try expand_abbrev env (repr ty) with Cannot_expand -> assert false + +(* Check whether a type can be expanded *) +let safe_abbrev env ty = + let snap = Btype.snapshot () in + try ignore (expand_abbrev env ty); true + with Cannot_expand | Unify _ -> + Btype.backtrack snap; + false + +(* Expand the head of a type once. + Raise Cannot_expand if the type cannot be expanded. + May raise Unify, if a recursion was hidden in the type. *) +let try_expand_once env ty = + let ty = repr ty in + match ty.desc with + Tconstr _ -> repr (expand_abbrev env ty) + | _ -> raise Cannot_expand + +(* This one only raises Cannot_expand *) +let try_expand_safe env ty = + let snap = Btype.snapshot () in + try try_expand_once env ty + with Unify _ -> + Btype.backtrack snap; raise Cannot_expand + +(* Fully expand the head of a type. *) +let rec try_expand_head try_once env ty = + let ty' = try_once env ty in + try try_expand_head try_once env ty' + with Cannot_expand -> ty' + +let try_expand_head try_once env ty = + let ty' = try_expand_head try_once env ty in + begin match Env.gadt_instance_level env ty' with + None -> () + | Some lv -> Env.add_gadt_instance_chain env lv ty + end; + ty' + +(* Unsafe full expansion, may raise Unify. *) +let expand_head_unif env ty = + try try_expand_head try_expand_once env ty with Cannot_expand -> repr ty + +(* Safe version of expand_head, never fails *) +let expand_head env ty = + try try_expand_head try_expand_safe env ty with Cannot_expand -> repr ty + +let _ = forward_try_expand_once := try_expand_safe + + +(* Expand until we find a non-abstract type declaration *) + +let rec extract_concrete_typedecl env ty = + let ty = repr ty in + match ty.desc with + Tconstr (p, _, _) -> + let decl = Env.find_type p env in + if decl.type_kind <> Type_abstract then (p, p, decl) else + let ty = + try try_expand_once env ty with Cannot_expand -> raise Not_found + in + let (_, p', decl) = extract_concrete_typedecl env ty in + (p, p', decl) + | _ -> raise Not_found + +(* Implementing function [expand_head_opt], the compiler's own version of + [expand_head] used for type-based optimisations. + [expand_head_opt] uses [Env.find_type_expansion_opt] to access the + manifest type information of private abstract data types which is + normally hidden to the type-checker out of the implementation module of + the private abbreviation. *) + +let expand_abbrev_opt = + expand_abbrev_gen Private Env.find_type_expansion_opt + +let try_expand_once_opt env ty = + let ty = repr ty in + match ty.desc with + Tconstr _ -> repr (expand_abbrev_opt env ty) + | _ -> raise Cannot_expand + +let rec try_expand_head_opt env ty = + let ty' = try_expand_once_opt env ty in + begin try + try_expand_head_opt env ty' + with Cannot_expand -> + ty' + end + +let expand_head_opt env ty = + let snap = Btype.snapshot () in + try try_expand_head_opt env ty + with Cannot_expand | Unify _ -> (* expand_head shall never fail *) + Btype.backtrack snap; + repr ty + +(* Make sure that the type parameters of the type constructor [ty] + respect the type constraints *) +let enforce_constraints env ty = + match ty with + {desc = Tconstr (path, args, _abbrev); level = level} -> + begin try + let decl = Env.find_type path env in + ignore + (subst env level Public (ref Mnil) None decl.type_params args + (newvar2 level)) + with Not_found -> () + end + | _ -> + assert false + +(* Recursively expand the head of a type. + Also expand #-types. *) +let full_expand env ty = + let ty = repr (expand_head env ty) in + match ty.desc with + Tobject (fi, {contents = Some (_, v::_)}) when is_Tvar (repr v) -> + newty2 ty.level (Tobject (fi, ref None)) + | _ -> + ty + +(* + Check whether the abbreviation expands to a well-defined type. + During the typing of a class, abbreviations for correspondings + types expand to non-generic types. +*) +let generic_abbrev env path = + try + let (_, body, _) = Env.find_type_expansion path env in + (repr body).level = generic_level + with + Not_found -> + false + +let generic_private_abbrev env path = + try + match Env.find_type path env with + {type_kind = Type_abstract; + type_private = Private; + type_manifest = Some body} -> + (repr body).level = generic_level + | _ -> false + with Not_found -> false + +let is_contractive env p = + try + let decl = Env.find_type p env in + in_pervasives p && decl.type_manifest = None || is_datatype decl + with Not_found -> false + + + (*****************) + (* Occur check *) + (*****************) + + +exception Occur + +let rec occur_rec env allow_recursive visited ty0 = function + | {desc=Tlink ty} -> + occur_rec env allow_recursive visited ty0 ty + | ty -> + if ty == ty0 then raise Occur; + match ty.desc with + Tconstr(p, _tl, _abbrev) -> + if allow_recursive && is_contractive env p then () else + begin try + if TypeSet.mem ty visited then raise Occur; + let visited = TypeSet.add ty visited in + iter_type_expr (occur_rec env allow_recursive visited ty0) ty + with Occur -> try + let ty' = try_expand_head try_expand_once env ty in + (* This call used to be inlined, but there seems no reason for it. + Message was referring to change in rev. 1.58 of the CVS repo. *) + occur_rec env allow_recursive visited ty0 ty' + with Cannot_expand -> + raise Occur + end + | Tobject _ | Tvariant _ -> + () + | _ -> + if allow_recursive || TypeSet.mem ty visited then () else begin + let visited = TypeSet.add ty visited in + iter_type_expr (occur_rec env allow_recursive visited ty0) ty + end + +let type_changed = ref false (* trace possible changes to the studied type *) + +let merge r b = if b then r := true + +let occur env ty0 ty = + let allow_recursive = !Clflags.recursive_types || !umode = Pattern in + let old = !type_changed in + try + while + type_changed := false; + occur_rec env allow_recursive TypeSet.empty ty0 ty; + !type_changed + do () (* prerr_endline "changed" *) done; + merge type_changed old + with exn -> + merge type_changed old; + raise (match exn with Occur -> Unify [] | _ -> exn) + +let occur_in env ty0 t = + try occur env ty0 t; false with Unify _ -> true + +(* Check that a local constraint is well-founded *) +(* PR#6405: not needed since we allow recursion and work on normalized types *) +(* PR#6992: we actually need it for contractiveness *) +(* This is a simplified version of occur, only for the rectypes case *) + +let rec local_non_recursive_abbrev strict visited env p ty = + (*Format.eprintf "@[Check %s =@ %a@]@." (Path.name p) !Btype.print_raw ty;*) + let ty = repr ty in + if not (List.memq ty visited) then begin + match ty.desc with + Tconstr(p', args, _abbrev) -> + if Path.same p p' then raise Occur; + if not strict && is_contractive env p' then () else + let visited = ty :: visited in + begin try + (* try expanding, since [p] could be hidden *) + local_non_recursive_abbrev strict visited env p + (try_expand_head try_expand_once env ty) + with Cannot_expand -> + let params = + try (Env.find_type p' env).type_params + with Not_found -> args + in + List.iter2 + (fun tv ty -> + let strict = strict || not (is_Tvar (repr tv)) in + local_non_recursive_abbrev strict visited env p ty) + params args + end + | _ -> + if strict then (* PR#7374 *) + let visited = ty :: visited in + iter_type_expr (local_non_recursive_abbrev true visited env p) ty + end + +let local_non_recursive_abbrev env p ty = + try (* PR#7397: need to check trace_gadt_instances *) + wrap_trace_gadt_instances env + (local_non_recursive_abbrev false [] env p) ty; + true + with Occur -> false + + + (*****************************) + (* Polymorphic Unification *) + (*****************************) + +(* Since we cannot duplicate universal variables, unification must + be done at meta-level, using bindings in univar_pairs *) +let rec unify_univar t1 t2 = function + (cl1, cl2) :: rem -> + let find_univ t cl = + try + let (_, r) = List.find (fun (t',_) -> t == repr t') cl in + Some r + with Not_found -> None + in + begin match find_univ t1 cl1, find_univ t2 cl2 with + Some {contents=Some t'2}, Some _ when t2 == repr t'2 -> + () + | Some({contents=None} as r1), Some({contents=None} as r2) -> + set_univar r1 t2; set_univar r2 t1 + | None, None -> + unify_univar t1 t2 rem + | _ -> + raise (Unify []) + end + | [] -> raise (Unify []) + +(* Test the occurrence of free univars in a type *) +(* that's way too expensive. Must do some kind of caching *) +let occur_univar env ty = + let visited = ref TypeMap.empty in + let rec occur_rec bound ty = + let ty = repr ty in + if ty.level >= lowest_level && + if TypeSet.is_empty bound then + (ty.level <- pivot_level - ty.level; true) + else try + let bound' = TypeMap.find ty !visited in + if TypeSet.exists (fun x -> not (TypeSet.mem x bound)) bound' then + (visited := TypeMap.add ty (TypeSet.inter bound bound') !visited; + true) + else false + with Not_found -> + visited := TypeMap.add ty bound !visited; + true + then + match ty.desc with + Tunivar _ -> + if not (TypeSet.mem ty bound) then raise (Unify [ty, newgenvar ()]) + | Tpoly (ty, tyl) -> + let bound = List.fold_right TypeSet.add (List.map repr tyl) bound in + occur_rec bound ty + | Tconstr (_, [], _) -> () + | Tconstr (p, tl, _) -> + begin try + let td = Env.find_type p env in + List.iter2 + (fun t v -> + if Variance.(mem May_pos v || mem May_neg v) + then occur_rec bound t) + tl td.type_variance + with Not_found -> + List.iter (occur_rec bound) tl + end + | _ -> iter_type_expr (occur_rec bound) ty + in + try + occur_rec TypeSet.empty ty; unmark_type ty + with exn -> + unmark_type ty; raise exn + +(* Grouping univars by families according to their binders *) +let add_univars = + List.fold_left (fun s (t,_) -> TypeSet.add (repr t) s) + +let get_univar_family univar_pairs univars = + if univars = [] then TypeSet.empty else + let insert s = function + cl1, (_::_ as cl2) -> + if List.exists (fun (t1,_) -> TypeSet.mem (repr t1) s) cl1 then + add_univars s cl2 + else s + | _ -> s + in + let s = List.fold_right TypeSet.add univars TypeSet.empty in + List.fold_left insert s univar_pairs + +(* Whether a family of univars escapes from a type *) +let univars_escape env univar_pairs vl ty = + let family = get_univar_family univar_pairs vl in + let visited = ref TypeSet.empty in + let rec occur t = + let t = repr t in + if TypeSet.mem t !visited then () else begin + visited := TypeSet.add t !visited; + match t.desc with + Tpoly (t, tl) -> + if List.exists (fun t -> TypeSet.mem (repr t) family) tl then () + else occur t + | Tunivar _ -> + if TypeSet.mem t family then raise Occur + | Tconstr (_, [], _) -> () + | Tconstr (p, tl, _) -> + begin try + let td = Env.find_type p env in + List.iter2 + (fun t v -> + if Variance.(mem May_pos v || mem May_neg v) then occur t) + tl td.type_variance + with Not_found -> + List.iter occur tl + end + | _ -> + iter_type_expr occur t + end + in + try occur ty; false with Occur -> true + +(* Wrapper checking that no variable escapes and updating univar_pairs *) +let enter_poly env univar_pairs t1 tl1 t2 tl2 f = + let old_univars = !univar_pairs in + let known_univars = + List.fold_left (fun s (cl,_) -> add_univars s cl) + TypeSet.empty old_univars + in + let tl1 = List.map repr tl1 and tl2 = List.map repr tl2 in + if List.exists (fun t -> TypeSet.mem t known_univars) tl1 && + univars_escape env old_univars tl1 (newty(Tpoly(t2,tl2))) + || List.exists (fun t -> TypeSet.mem t known_univars) tl2 && + univars_escape env old_univars tl2 (newty(Tpoly(t1,tl1))) + then raise (Unify []); + let cl1 = List.map (fun t -> t, ref None) tl1 + and cl2 = List.map (fun t -> t, ref None) tl2 in + univar_pairs := (cl1,cl2) :: (cl2,cl1) :: old_univars; + try let res = f t1 t2 in univar_pairs := old_univars; res + with exn -> univar_pairs := old_univars; raise exn + +let univar_pairs = ref [] + + + (*****************) + (* Unification *) + (*****************) + + + +let rec has_cached_expansion p abbrev = + match abbrev with + Mnil -> false + | Mcons(_, p', _, _, rem) -> Path.same p p' || has_cached_expansion p rem + | Mlink rem -> has_cached_expansion p !rem + +(**** Transform error trace ****) +(* +++ Move it to some other place ? *) + +let expand_trace env trace = + List.fold_right + (fun (t1, t2) rem -> + (repr t1, full_expand env t1)::(repr t2, full_expand env t2)::rem) + trace [] + +(* build a dummy variant type *) +let mkvariant fields closed = + newgenty + (Tvariant + {row_fields = fields; row_closed = closed; row_more = newvar(); + row_bound = (); row_fixed = false; row_name = None }) + +(**** Unification ****) + +(* Return whether [t0] occurs in [ty]. Objects are also traversed. *) +let deep_occur t0 ty = + let rec occur_rec ty = + let ty = repr ty in + if ty.level >= lowest_level then begin + if ty == t0 then raise Occur; + ty.level <- pivot_level - ty.level; + iter_type_expr occur_rec ty + end + in + try + occur_rec ty; unmark_type ty; false + with Occur -> + unmark_type ty; true + +(* + 1. When unifying two non-abbreviated types, one type is made a link + to the other. When unifying an abbreviated type with a + non-abbreviated type, the non-abbreviated type is made a link to + the other one. When unifying to abbreviated types, these two + types are kept distincts, but they are made to (temporally) + expand to the same type. + 2. Abbreviations with at least one parameter are systematically + expanded. The overhead does not seem too high, and that way + abbreviations where some parameters does not appear in the + expansion, such as ['a t = int], are correctly handled. In + particular, for this example, unifying ['a t] with ['b t] keeps + ['a] and ['b] distincts. (Is it really important ?) + 3. Unifying an abbreviation ['a t = 'a] with ['a] should not yield + ['a t as 'a]. Indeed, the type variable would otherwise be lost. + This problem occurs for abbreviations expanding to a type + variable, but also to many other constrained abbreviations (for + instance, [(< x : 'a > -> unit) t = ]). The solution is + that, if an abbreviation is unified with some subpart of its + parameters, then the parameter actually does not get + abbreviated. It would be possible to check whether some + information is indeed lost, but it probably does not worth it. +*) + +let newtype_level = ref None + +let get_newtype_level () = + match !newtype_level with + | None -> assert false + | Some x -> x + +(* a local constraint can be added only if the rhs + of the constraint does not contain any Tvars. + They need to be removed using this function *) +let reify env t = + let newtype_level = get_newtype_level () in + let create_fresh_constr lev name = + let decl = new_declaration (Some (newtype_level, newtype_level)) None in + let name = match name with Some s -> "$'"^s | _ -> "$" in + let path = Path.Pident (Ident.create (get_new_abstract_name name)) in + let new_env = Env.add_local_type path decl !env in + let t = newty2 lev (Tconstr (path,[],ref Mnil)) in + env := new_env; + t + in + let visited = ref TypeSet.empty in + let rec iterator ty = + let ty = repr ty in + if TypeSet.mem ty !visited then () else begin + visited := TypeSet.add ty !visited; + match ty.desc with + Tvar o -> + let t = create_fresh_constr ty.level o in + link_type ty t; + if ty.level < newtype_level then + raise (Unify [t, newvar2 ty.level]) + | Tvariant r -> + let r = row_repr r in + if not (static_row r) then begin + if r.row_fixed then iterator (row_more r) else + let m = r.row_more in + match m.desc with + Tvar o -> + let t = create_fresh_constr m.level o in + let row = + {r with row_fields=[]; row_fixed=true; row_more = t} in + link_type m (newty2 m.level (Tvariant row)); + if m.level < newtype_level then + raise (Unify [t, newvar2 m.level]) + | _ -> assert false + end; + iter_row iterator r + | Tconstr (p, _, _) when is_object_type p -> + iter_type_expr iterator (full_expand !env ty) + | _ -> + iter_type_expr iterator ty + end + in + iterator t + +let is_newtype env p = + try + let decl = Env.find_type p env in + decl.type_newtype_level <> None && + decl.type_kind = Type_abstract && + decl.type_private = Public + with Not_found -> false + +let non_aliasable p decl = + (* in_pervasives p || (subsumed by in_current_module) *) + in_current_module p && decl.type_newtype_level = None + +let is_instantiable env p = + try + let decl = Env.find_type p env in + decl.type_kind = Type_abstract && + decl.type_private = Public && + decl.type_arity = 0 && + decl.type_manifest = None && + not (non_aliasable p decl) + with Not_found -> false + + +(* PR#7113: -safe-string should be a global property *) +let compatible_paths p1 p2 = + let open Predef in + Path.same p1 p2 || + Path.same p1 path_bytes && Path.same p2 path_string || + Path.same p1 path_string && Path.same p2 path_bytes + +(* Check for datatypes carefully; see PR#6348 *) +let rec expands_to_datatype env ty = + let ty = repr ty in + match ty.desc with + Tconstr (p, _, _) -> + begin try + is_datatype (Env.find_type p env) || + expands_to_datatype env (try_expand_once env ty) + with Not_found | Cannot_expand -> false + end + | _ -> false + +(* mcomp type_pairs subst env t1 t2 does not raise an + exception if it is possible that t1 and t2 are actually + equal, assuming the types in type_pairs are equal and + that the mapping subst holds. + Assumes that both t1 and t2 do not contain any tvars + and that both their objects and variants are closed + *) + +let rec mcomp type_pairs env t1 t2 = + if t1 == t2 then () else + let t1 = repr t1 in + let t2 = repr t2 in + if t1 == t2 then () else + match (t1.desc, t2.desc) with + | (Tvar _, _) + | (_, Tvar _) -> + () + | (Tconstr (p1, [], _), Tconstr (p2, [], _)) when Path.same p1 p2 -> + () + | _ -> + let t1' = expand_head_opt env t1 in + let t2' = expand_head_opt env t2 in + (* Expansion may have changed the representative of the types... *) + let t1' = repr t1' and t2' = repr t2' in + if t1' == t2' then () else + begin try TypePairs.find type_pairs (t1', t2') + with Not_found -> + TypePairs.add type_pairs (t1', t2') (); + match (t1'.desc, t2'.desc) with + (Tvar _, Tvar _) -> assert false + | (Tarrow (l1, t1, u1, _), Tarrow (l2, t2, u2, _)) + when l1 = l2 || not (is_optional l1 || is_optional l2) -> + mcomp type_pairs env t1 t2; + mcomp type_pairs env u1 u2; + | (Ttuple tl1, Ttuple tl2) -> + mcomp_list type_pairs env tl1 tl2 + | (Tconstr (p1, tl1, _), Tconstr (p2, tl2, _)) -> + mcomp_type_decl type_pairs env p1 p2 tl1 tl2 + | (Tconstr (p, _, _), _) | (_, Tconstr (p, _, _)) -> + begin try + let decl = Env.find_type p env in + if non_aliasable p decl || is_datatype decl then raise (Unify []) + with Not_found -> () + end + (* + | (Tpackage (p1, n1, tl1), Tpackage (p2, n2, tl2)) when n1 = n2 -> + mcomp_list type_pairs env tl1 tl2 + *) + | (Tpackage _, Tpackage _) -> () + | (Tvariant row1, Tvariant row2) -> + mcomp_row type_pairs env row1 row2 + | (Tobject (fi1, _), Tobject (fi2, _)) -> + mcomp_fields type_pairs env fi1 fi2 + | (Tfield _, Tfield _) -> (* Actually unused *) + mcomp_fields type_pairs env t1' t2' + | (Tnil, Tnil) -> + () + | (Tpoly (t1, []), Tpoly (t2, [])) -> + mcomp type_pairs env t1 t2 + | (Tpoly (t1, tl1), Tpoly (t2, tl2)) -> + enter_poly env univar_pairs t1 tl1 t2 tl2 + (mcomp type_pairs env) + | (Tunivar _, Tunivar _) -> + unify_univar t1' t2' !univar_pairs + | (_, _) -> + raise (Unify []) + end + +and mcomp_list type_pairs env tl1 tl2 = + if List.length tl1 <> List.length tl2 then + raise (Unify []); + List.iter2 (mcomp type_pairs env) tl1 tl2 + +and mcomp_fields type_pairs env ty1 ty2 = + if not (concrete_object ty1 && concrete_object ty2) then assert false; + let (fields2, rest2) = flatten_fields ty2 in + let (fields1, rest1) = flatten_fields ty1 in + let (pairs, miss1, miss2) = associate_fields fields1 fields2 in + let has_present = + List.exists (fun (_, k, _) -> field_kind_repr k = Fpresent) in + mcomp type_pairs env rest1 rest2; + if has_present miss1 && (object_row ty2).desc = Tnil + || has_present miss2 && (object_row ty1).desc = Tnil then raise (Unify []); + List.iter + (function (_n, k1, t1, k2, t2) -> + mcomp_kind k1 k2; + mcomp type_pairs env t1 t2) + pairs + +and mcomp_kind k1 k2 = + let k1 = field_kind_repr k1 in + let k2 = field_kind_repr k2 in + match k1, k2 with + (Fpresent, Fabsent) + | (Fabsent, Fpresent) -> raise (Unify []) + | _ -> () + +and mcomp_row type_pairs env row1 row2 = + let row1 = row_repr row1 and row2 = row_repr row2 in + let r1, r2, pairs = merge_row_fields row1.row_fields row2.row_fields in + let cannot_erase (_,f) = + match row_field_repr f with + Rpresent _ -> true + | Rabsent | Reither _ -> false + in + if row1.row_closed && List.exists cannot_erase r2 + || row2.row_closed && List.exists cannot_erase r1 then raise (Unify []); + List.iter + (fun (_,f1,f2) -> + match row_field_repr f1, row_field_repr f2 with + | Rpresent None, (Rpresent (Some _) | Reither (_, _::_, _, _) | Rabsent) + | Rpresent (Some _), (Rpresent None | Reither (true, _, _, _) | Rabsent) + | (Reither (_, _::_, _, _) | Rabsent), Rpresent None + | (Reither (true, _, _, _) | Rabsent), Rpresent (Some _) -> + raise (Unify []) + | Rpresent(Some t1), Rpresent(Some t2) -> + mcomp type_pairs env t1 t2 + | Rpresent(Some t1), Reither(false, tl2, _, _) -> + List.iter (mcomp type_pairs env t1) tl2 + | Reither(false, tl1, _, _), Rpresent(Some t2) -> + List.iter (mcomp type_pairs env t2) tl1 + | _ -> ()) + pairs + +and mcomp_type_decl type_pairs env p1 p2 tl1 tl2 = + try + let decl = Env.find_type p1 env in + let decl' = Env.find_type p2 env in + if compatible_paths p1 p2 then begin + let inj = + try List.map Variance.(mem Inj) (Env.find_type p1 env).type_variance + with Not_found -> List.map (fun _ -> false) tl1 + in + List.iter2 + (fun i (t1,t2) -> if i then mcomp type_pairs env t1 t2) + inj (List.combine tl1 tl2) + end else if non_aliasable p1 decl && non_aliasable p2 decl' then + raise (Unify []) + else + match decl.type_kind, decl'.type_kind with + | Type_record (lst,r), Type_record (lst',r') when r = r' -> + mcomp_list type_pairs env tl1 tl2; + mcomp_record_description type_pairs env lst lst' + | Type_variant v1, Type_variant v2 -> + mcomp_list type_pairs env tl1 tl2; + mcomp_variant_description type_pairs env v1 v2 + | Type_open, Type_open -> + mcomp_list type_pairs env tl1 tl2 + | Type_abstract, Type_abstract -> () + | Type_abstract, _ when not (non_aliasable p1 decl)-> () + | _, Type_abstract when not (non_aliasable p2 decl') -> () + | _ -> raise (Unify []) + with Not_found -> () + +and mcomp_type_option type_pairs env t t' = + match t, t' with + None, None -> () + | Some t, Some t' -> mcomp type_pairs env t t' + | _ -> raise (Unify []) + +and mcomp_variant_description type_pairs env xs ys = + let rec iter = fun x y -> + match x, y with + | c1 :: xs, c2 :: ys -> + mcomp_type_option type_pairs env c1.cd_res c2.cd_res; + begin match c1.cd_args, c2.cd_args with + | Cstr_tuple l1, Cstr_tuple l2 -> mcomp_list type_pairs env l1 l2 + | Cstr_record l1, Cstr_record l2 -> + mcomp_record_description type_pairs env l1 l2 + | _ -> raise (Unify []) + end; + if Ident.name c1.cd_id = Ident.name c2.cd_id + then iter xs ys + else raise (Unify []) + | [],[] -> () + | _ -> raise (Unify []) + in + iter xs ys + +and mcomp_record_description type_pairs env = + let rec iter x y = + match x, y with + | l1 :: xs, l2 :: ys -> + mcomp type_pairs env l1.ld_type l2.ld_type; + if Ident.name l1.ld_id = Ident.name l2.ld_id && + l1.ld_mutable = l2.ld_mutable + then iter xs ys + else raise (Unify []) + | [], [] -> () + | _ -> raise (Unify []) + in + iter + +let mcomp env t1 t2 = + mcomp (TypePairs.create 4) env t1 t2 + +(* Real unification *) + +let find_lowest_level ty = + let lowest = ref generic_level in + let rec find ty = + let ty = repr ty in + if ty.level >= lowest_level then begin + if ty.level < !lowest then lowest := ty.level; + ty.level <- pivot_level - ty.level; + iter_type_expr find ty + end + in find ty; unmark_type ty; !lowest + +let find_newtype_level env path = + try match (Env.find_type path env).type_newtype_level with + Some x -> x + | None -> raise Not_found + with Not_found -> let lev = Path.binding_time path in (lev, lev) + +let add_gadt_equation env source destination = + if local_non_recursive_abbrev !env source destination then begin + let destination = duplicate_type destination in + let source_lev = find_newtype_level !env source in + let decl = new_declaration (Some source_lev) (Some destination) in + let newtype_level = get_newtype_level () in + env := Env.add_local_constraint source decl newtype_level !env; + cleanup_abbrev () + end + +let unify_eq_set = TypePairs.create 11 + +let order_type_pair t1 t2 = + if t1.id <= t2.id then (t1, t2) else (t2, t1) + +let add_type_equality t1 t2 = + TypePairs.add unify_eq_set (order_type_pair t1 t2) () + +let eq_package_path env p1 p2 = + Path.same p1 p2 || + Path.same (normalize_package_path env p1) (normalize_package_path env p2) + +let nondep_type' = ref (fun _ _ _ -> assert false) +let package_subtype = ref (fun _ _ _ _ _ _ _ -> assert false) + +let rec concat_longident lid1 = + let open Longident in + function + Lident s -> Ldot (lid1, s) + | Ldot (lid2, s) -> Ldot (concat_longident lid1 lid2, s) + | Lapply (lid2, lid) -> Lapply (concat_longident lid1 lid2, lid) + +let nondep_instance env level id ty = + let ty = !nondep_type' env id ty in + if level = generic_level then duplicate_type ty else + let old = !current_level in + current_level := level; + let ty = instance env ty in + current_level := old; + ty + +(* Find the type paths nl1 in the module type mty2, and add them to the + list (nl2, tl2). raise Not_found if impossible *) +let complete_type_list ?(allow_absent=false) env nl1 lv2 mty2 nl2 tl2 = + let id2 = Ident.create "Pkg" in + let env' = Env.add_module id2 mty2 env in + let rec complete nl1 ntl2 = + match nl1, ntl2 with + [], _ -> ntl2 + | n :: nl, (n2, _ as nt2) :: ntl' when n >= n2 -> + nt2 :: complete (if n = n2 then nl else nl1) ntl' + | n :: nl, _ -> + try + let path = + Env.lookup_type (concat_longident (Longident.Lident "Pkg") n) env' + in + match Env.find_type path env' with + {type_arity = 0; type_kind = Type_abstract; + type_private = Public; type_manifest = Some t2} -> + (n, nondep_instance env' lv2 id2 t2) :: complete nl ntl2 + | {type_arity = 0; type_kind = Type_abstract; + type_private = Public; type_manifest = None} when allow_absent -> + complete nl ntl2 + | _ -> raise Exit + with + | Not_found when allow_absent -> complete nl ntl2 + | Exit -> raise Not_found + in + complete nl1 (List.combine nl2 tl2) + +(* raise Not_found rather than Unify if the module types are incompatible *) +let unify_package env unify_list lv1 p1 n1 tl1 lv2 p2 n2 tl2 = + let ntl2 = complete_type_list env n1 lv2 (Mty_ident p2) n2 tl2 + and ntl1 = complete_type_list env n2 lv1 (Mty_ident p1) n1 tl1 in + unify_list (List.map snd ntl1) (List.map snd ntl2); + if eq_package_path env p1 p2 + || !package_subtype env p1 n1 tl1 p2 n2 tl2 + && !package_subtype env p2 n2 tl2 p1 n1 tl1 then () else raise Not_found + + +(* force unification in Reither when one side has a non-conjunctive type *) +let rigid_variants = ref false + +(* drop not force unification in Reither, even in fixed case + (not sound, only use it when checking exhaustiveness) *) +let passive_variants = ref false +let with_passive_variants f x = + if !passive_variants then f x else + match passive_variants := true; f x with + | r -> passive_variants := false; r + | exception e -> passive_variants := false; raise e + +let unify_eq t1 t2 = + t1 == t2 || + match !umode with + | Expression -> false + | Pattern -> + try TypePairs.find unify_eq_set (order_type_pair t1 t2); true + with Not_found -> false + +let unify1_var env t1 t2 = + assert (is_Tvar t1); + occur env t1 t2; + occur_univar env t2; + let d1 = t1.desc in + link_type t1 t2; + try + update_level env t1.level t2 + with Unify _ as e -> + t1.desc <- d1; + raise e + +let rec unify (env:Env.t ref) t1 t2 = + (* First step: special cases (optimizations) *) + if t1 == t2 then () else + let t1 = repr t1 in + let t2 = repr t2 in + if unify_eq t1 t2 then () else + let reset_tracing = check_trace_gadt_instances !env in + + try + type_changed := true; + begin match (t1.desc, t2.desc) with + (Tvar _, Tconstr _) when deep_occur t1 t2 -> + unify2 env t1 t2 + | (Tconstr _, Tvar _) when deep_occur t2 t1 -> + unify2 env t1 t2 + | (Tvar _, _) -> + unify1_var !env t1 t2 + | (_, Tvar _) -> + unify1_var !env t2 t1 + | (Tunivar _, Tunivar _) -> + unify_univar t1 t2 !univar_pairs; + update_level !env t1.level t2; + link_type t1 t2 + | (Tconstr (p1, [], a1), Tconstr (p2, [], a2)) + when Path.same p1 p2 (* && actual_mode !env = Old *) + (* This optimization assumes that t1 does not expand to t2 + (and conversely), so we fall back to the general case + when any of the types has a cached expansion. *) + && not (has_cached_expansion p1 !a1 + || has_cached_expansion p2 !a2) -> + update_level !env t1.level t2; + link_type t1 t2 + | (Tconstr (p1, [], _), Tconstr (p2, [], _)) + when Env.has_local_constraints !env + && is_newtype !env p1 && is_newtype !env p2 -> + (* Do not use local constraints more than necessary *) + begin try + if find_newtype_level !env p1 < find_newtype_level !env p2 then + unify env t1 (try_expand_once !env t2) + else + unify env (try_expand_once !env t1) t2 + with Cannot_expand -> + unify2 env t1 t2 + end + | _ -> + unify2 env t1 t2 + end; + reset_trace_gadt_instances reset_tracing; + with Unify trace -> + reset_trace_gadt_instances reset_tracing; + raise (Unify ((t1, t2)::trace)) + +and unify2 env t1 t2 = + (* Second step: expansion of abbreviations *) + (* Expansion may change the representative of the types. *) + ignore (expand_head_unif !env t1); + ignore (expand_head_unif !env t2); + let t1' = expand_head_unif !env t1 in + let t2' = expand_head_unif !env t2 in + let lv = min t1'.level t2'.level in + update_level !env lv t2; + update_level !env lv t1; + if unify_eq t1' t2' then () else + + let t1 = repr t1 and t2 = repr t2 in + if !trace_gadt_instances then begin + (* All types in chains already have the same ambiguity levels *) + let ilevel t = + match Env.gadt_instance_level !env t with None -> 0 | Some lv -> lv in + let lv1 = ilevel t1 and lv2 = ilevel t2 in + if lv1 > lv2 then Env.add_gadt_instance_chain !env lv1 t2 else + if lv2 > lv1 then Env.add_gadt_instance_chain !env lv2 t1 + end; + let t1, t2 = + if !Clflags.principal + && (find_lowest_level t1' < lv || find_lowest_level t2' < lv) then + (* Expand abbreviations hiding a lower level *) + (* Should also do it for parameterized types, after unification... *) + (match t1.desc with Tconstr (_, [], _) -> t1' | _ -> t1), + (match t2.desc with Tconstr (_, [], _) -> t2' | _ -> t2) + else (t1, t2) + in + if unify_eq t1 t1' || not (unify_eq t2 t2') then + unify3 env t1 t1' t2 t2' + else + try unify3 env t2 t2' t1 t1' with Unify trace -> + raise (Unify (List.map (fun (x, y) -> (y, x)) trace)) + +and unify3 env t1 t1' t2 t2' = + (* Third step: truly unification *) + (* Assumes either [t1 == t1'] or [t2 != t2'] *) + let d1 = t1'.desc and d2 = t2'.desc in + let create_recursion = (t2 != t2') && (deep_occur t1' t2) in + + begin match (d1, d2) with (* handle vars and univars specially *) + (Tunivar _, Tunivar _) -> + unify_univar t1' t2' !univar_pairs; + link_type t1' t2' + | (Tvar _, _) -> + occur !env t1' t2; + occur_univar !env t2; + link_type t1' t2; + | (_, Tvar _) -> + occur !env t2' t1; + occur_univar !env t1; + link_type t2' t1; + | (Tfield _, Tfield _) -> (* special case for GADTs *) + unify_fields env t1' t2' + | _ -> + begin match !umode with + | Expression -> + occur !env t1' t2'; + link_type t1' t2 + | Pattern -> + add_type_equality t1' t2' + end; + try + begin match (d1, d2) with + (Tarrow (l1, t1, u1, c1), Tarrow (l2, t2, u2, c2)) when l1 = l2 || + (!Clflags.classic || !umode = Pattern) && + not (is_optional l1 || is_optional l2) -> + unify env t1 t2; unify env u1 u2; + begin match commu_repr c1, commu_repr c2 with + Clink r, c2 -> set_commu r c2 + | c1, Clink r -> set_commu r c1 + | _ -> () + end + | (Ttuple tl1, Ttuple tl2) -> + unify_list env tl1 tl2 + | (Tconstr (p1, tl1, _), Tconstr (p2, tl2, _)) when Path.same p1 p2 -> + if !umode = Expression || not !generate_equations then + unify_list env tl1 tl2 + else if !assume_injective then + set_mode_pattern ~generate:true ~injective:false + (fun () -> unify_list env tl1 tl2) + else if in_current_module p1 (* || in_pervasives p1 *) + || List.exists (expands_to_datatype !env) [t1'; t1; t2] then + unify_list env tl1 tl2 + else + let inj = + try List.map Variance.(mem Inj) + (Env.find_type p1 !env).type_variance + with Not_found -> List.map (fun _ -> false) tl1 + in + List.iter2 + (fun i (t1, t2) -> + if i then unify env t1 t2 else + set_mode_pattern ~generate:false ~injective:false + begin fun () -> + let snap = snapshot () in + try unify env t1 t2 with Unify _ -> + backtrack snap; + reify env t1; reify env t2 + end) + inj (List.combine tl1 tl2) + | (Tconstr (path,[],_), + Tconstr (path',[],_)) + when is_instantiable !env path && is_instantiable !env path' + && !generate_equations -> + let source, destination = + if find_newtype_level !env path > find_newtype_level !env path' + then path , t2' + else path', t1' + in + add_gadt_equation env source destination + | (Tconstr (path,[],_), _) + when is_instantiable !env path && !generate_equations -> + reify env t2'; + add_gadt_equation env path t2' + | (_, Tconstr (path,[],_)) + when is_instantiable !env path && !generate_equations -> + reify env t1'; + add_gadt_equation env path t1' + | (Tconstr (_,_,_), _) | (_, Tconstr (_,_,_)) when !umode = Pattern -> + reify env t1'; + reify env t2'; + if !generate_equations then mcomp !env t1' t2' + | (Tobject (fi1, nm1), Tobject (fi2, _)) -> + unify_fields env fi1 fi2; + (* Type [t2'] may have been instantiated by [unify_fields] *) + (* XXX One should do some kind of unification... *) + begin match (repr t2').desc with + Tobject (_, {contents = Some (_, va::_)}) when + (match (repr va).desc with + Tvar _|Tunivar _|Tnil -> true | _ -> false) -> () + | Tobject (_, nm2) -> set_name nm2 !nm1 + | _ -> () + end + | (Tvariant row1, Tvariant row2) -> + if !umode = Expression then + unify_row env row1 row2 + else begin + let snap = snapshot () in + try unify_row env row1 row2 + with Unify _ -> + backtrack snap; + reify env t1'; + reify env t2'; + if !generate_equations then mcomp !env t1' t2' + end + | (Tfield(f,kind,_,rem), Tnil) | (Tnil, Tfield(f,kind,_,rem)) -> + begin match field_kind_repr kind with + Fvar r when f <> dummy_method -> + set_kind r Fabsent; + if d2 = Tnil then unify env rem t2' + else unify env (newty2 rem.level Tnil) rem + | _ -> raise (Unify []) + end + | (Tnil, Tnil) -> + () + | (Tpoly (t1, []), Tpoly (t2, [])) -> + unify env t1 t2 + | (Tpoly (t1, tl1), Tpoly (t2, tl2)) -> + enter_poly !env univar_pairs t1 tl1 t2 tl2 (unify env) + | (Tpackage (p1, n1, tl1), Tpackage (p2, n2, tl2)) -> + begin try + unify_package !env (unify_list env) + t1.level p1 n1 tl1 t2.level p2 n2 tl2 + with Not_found -> + if !umode = Expression then raise (Unify []); + List.iter (reify env) (tl1 @ tl2); + (* if !generate_equations then List.iter2 (mcomp !env) tl1 tl2 *) + end + | (_, _) -> + raise (Unify []) + end; + (* XXX Commentaires + changer "create_recursion" + ||| Comments + change "create_recursion" *) + if create_recursion then + match t2.desc with + Tconstr (p, tl, abbrev) -> + forget_abbrev abbrev p; + let t2'' = expand_head_unif !env t2 in + if not (closed_parameterized_type tl t2'') then + link_type (repr t2) (repr t2') + | _ -> + () (* t2 has already been expanded by update_level *) + with Unify trace -> + t1'.desc <- d1; + raise (Unify trace) + end + +and unify_list env tl1 tl2 = + if List.length tl1 <> List.length tl2 then + raise (Unify []); + List.iter2 (unify env) tl1 tl2 + +(* Build a fresh row variable for unification *) +and make_rowvar level use1 rest1 use2 rest2 = + let set_name ty name = + match ty.desc with + Tvar None -> log_type ty; ty.desc <- Tvar name + | _ -> () + in + let name = + match rest1.desc, rest2.desc with + Tvar (Some _ as name1), Tvar (Some _ as name2) -> + if rest1.level <= rest2.level then name1 else name2 + | Tvar (Some _ as name), _ -> + if use2 then set_name rest2 name; name + | _, Tvar (Some _ as name) -> + if use1 then set_name rest2 name; name + | _ -> None + in + if use1 then rest1 else + if use2 then rest2 else newvar2 ?name level + +and unify_fields env ty1 ty2 = (* Optimization *) + let (fields1, rest1) = flatten_fields ty1 + and (fields2, rest2) = flatten_fields ty2 in + let (pairs, miss1, miss2) = associate_fields fields1 fields2 in + let l1 = (repr ty1).level and l2 = (repr ty2).level in + let va = make_rowvar (min l1 l2) (miss2=[]) rest1 (miss1=[]) rest2 in + let d1 = rest1.desc and d2 = rest2.desc in + try + unify env (build_fields l1 miss1 va) rest2; + unify env rest1 (build_fields l2 miss2 va); + List.iter + (fun (n, k1, t1, k2, t2) -> + unify_kind k1 k2; + try + if !trace_gadt_instances then update_level !env va.level t1; + unify env t1 t2 + with Unify trace -> + raise (Unify ((newty (Tfield(n, k1, t1, newty Tnil)), + newty (Tfield(n, k2, t2, newty Tnil)))::trace))) + pairs + with exn -> + log_type rest1; rest1.desc <- d1; + log_type rest2; rest2.desc <- d2; + raise exn + +and unify_kind k1 k2 = + let k1 = field_kind_repr k1 in + let k2 = field_kind_repr k2 in + if k1 == k2 then () else + match k1, k2 with + (Fvar r, (Fvar _ | Fpresent)) -> set_kind r k2 + | (Fpresent, Fvar r) -> set_kind r k1 + | (Fpresent, Fpresent) -> () + | _ -> assert false + +and unify_row env row1 row2 = + let row1 = row_repr row1 and row2 = row_repr row2 in + let rm1 = row_more row1 and rm2 = row_more row2 in + if unify_eq rm1 rm2 then () else + let r1, r2, pairs = merge_row_fields row1.row_fields row2.row_fields in + if r1 <> [] && r2 <> [] then begin + let ht = Hashtbl.create (List.length r1) in + List.iter (fun (l,_) -> Hashtbl.add ht (hash_variant l) l) r1; + List.iter + (fun (l,_) -> + try raise (Tags(l, Hashtbl.find ht (hash_variant l))) + with Not_found -> ()) + r2 + end; + let fixed1 = row_fixed row1 and fixed2 = row_fixed row2 in + let more = + if fixed1 then rm1 else + if fixed2 then rm2 else + newty2 (min rm1.level rm2.level) (Tvar None) in + let fixed = fixed1 || fixed2 + and closed = row1.row_closed || row2.row_closed in + let keep switch = + List.for_all + (fun (_,f1,f2) -> + let f1, f2 = switch f1 f2 in + row_field_repr f1 = Rabsent || row_field_repr f2 <> Rabsent) + pairs + in + let empty fields = + List.for_all (fun (_,f) -> row_field_repr f = Rabsent) fields in + (* Check whether we are going to build an empty type *) + if closed && (empty r1 || row2.row_closed) && (empty r2 || row1.row_closed) + && List.for_all + (fun (_,f1,f2) -> + row_field_repr f1 = Rabsent || row_field_repr f2 = Rabsent) + pairs + then raise (Unify [mkvariant [] true, mkvariant [] true]); + let name = + if row1.row_name <> None && (row1.row_closed || empty r2) && + (not row2.row_closed || keep (fun f1 f2 -> f1, f2) && empty r1) + then row1.row_name + else if row2.row_name <> None && (row2.row_closed || empty r1) && + (not row1.row_closed || keep (fun f1 f2 -> f2, f1) && empty r2) + then row2.row_name + else None + in + let row0 = {row_fields = []; row_more = more; row_bound = (); + row_closed = closed; row_fixed = fixed; row_name = name} in + let set_more row rest = + let rest = + if closed then + filter_row_fields row.row_closed rest + else rest in + if rest <> [] && (row.row_closed || row_fixed row) + || closed && row_fixed row && not row.row_closed then begin + let t1 = mkvariant [] true and t2 = mkvariant rest false in + raise (Unify [if row == row1 then (t1,t2) else (t2,t1)]) + end; + (* The following test is not principal... should rather use Tnil *) + let rm = row_more row in + (*if !trace_gadt_instances && rm.desc = Tnil then () else*) + if !trace_gadt_instances then + update_level !env rm.level (newgenty (Tvariant row)); + if row_fixed row then + if more == rm then () else + if is_Tvar rm then link_type rm more else unify env rm more + else + let ty = newgenty (Tvariant {row0 with row_fields = rest}) in + update_level !env rm.level ty; + link_type rm ty + in + let md1 = rm1.desc and md2 = rm2.desc in + begin try + set_more row2 r1; + set_more row1 r2; + List.iter + (fun (l,f1,f2) -> + try unify_row_field env fixed1 fixed2 more l f1 f2 + with Unify trace -> + raise (Unify ((mkvariant [l,f1] true, + mkvariant [l,f2] true) :: trace))) + pairs; + if static_row row1 then begin + let rm = row_more row1 in + if is_Tvar rm then link_type rm (newty2 rm.level Tnil) + end + with exn -> + log_type rm1; rm1.desc <- md1; log_type rm2; rm2.desc <- md2; raise exn + end + +and unify_row_field env fixed1 fixed2 more l f1 f2 = + let f1 = row_field_repr f1 and f2 = row_field_repr f2 in + if f1 == f2 then () else + match f1, f2 with + Rpresent(Some t1), Rpresent(Some t2) -> unify env t1 t2 + | Rpresent None, Rpresent None -> () + | Reither(c1, tl1, m1, e1), Reither(c2, tl2, m2, e2) -> + if e1 == e2 then () else + if (fixed1 || fixed2) && not (c1 || c2) + && List.length tl1 = List.length tl2 then begin + (* PR#7496 *) + let f = Reither (c1 || c2, [], m1 || m2, ref None) in + set_row_field e1 f; set_row_field e2 f; + List.iter2 (unify env) tl1 tl2 + end + else let redo = + not !passive_variants && + (m1 || m2 || fixed1 || fixed2 || + !rigid_variants && (List.length tl1 = 1 || List.length tl2 = 1)) && + begin match tl1 @ tl2 with [] -> false + | t1 :: tl -> + if c1 || c2 then raise (Unify []); + List.iter (unify env t1) tl; + !e1 <> None || !e2 <> None + end in + if redo then unify_row_field env fixed1 fixed2 more l f1 f2 else + let tl1 = List.map repr tl1 and tl2 = List.map repr tl2 in + let rec remq tl = function [] -> [] + | ty :: tl' -> + if List.memq ty tl then remq tl tl' else ty :: remq tl tl' + in + let tl2' = remq tl2 tl1 and tl1' = remq tl1 tl2 in + (* PR#6744 *) + let split_univars = + List.partition + (fun ty -> try occur_univar !env ty; true with Unify _ -> false) in + let (tl1',tlu1) = split_univars tl1' + and (tl2',tlu2) = split_univars tl2' in + begin match tlu1, tlu2 with + [], [] -> () + | (tu1::tlu1), _ :: _ -> + (* Attempt to merge all the types containing univars *) + if not !passive_variants then + List.iter (unify env tu1) (tlu1@tlu2) + | (tu::_, []) | ([], tu::_) -> occur_univar !env tu + end; + (* Is this handling of levels really principal? *) + List.iter (update_level !env (repr more).level) (tl1' @ tl2'); + let e = ref None in + let f1' = Reither(c1 || c2, tl1', m1 || m2, e) + and f2' = Reither(c1 || c2, tl2', m1 || m2, e) in + set_row_field e1 f1'; set_row_field e2 f2'; + | Reither(_, _, false, e1), Rabsent when not fixed1 -> set_row_field e1 f2 + | Rabsent, Reither(_, _, false, e2) when not fixed2 -> set_row_field e2 f1 + | Rabsent, Rabsent -> () + | Reither(false, tl, _, e1), Rpresent(Some t2) when not fixed1 -> + set_row_field e1 f2; + update_level !env (repr more).level t2; + (try List.iter (fun t1 -> unify env t1 t2) tl + with exn -> e1 := None; raise exn) + | Rpresent(Some t1), Reither(false, tl, _, e2) when not fixed2 -> + set_row_field e2 f1; + update_level !env (repr more).level t1; + (try List.iter (unify env t1) tl + with exn -> e2 := None; raise exn) + | Reither(true, [], _, e1), Rpresent None when not fixed1 -> + set_row_field e1 f2 + | Rpresent None, Reither(true, [], _, e2) when not fixed2 -> + set_row_field e2 f1 + | _ -> raise (Unify []) + + +let unify env ty1 ty2 = + let snap = Btype.snapshot () in + try + unify env ty1 ty2 + with + Unify trace -> + undo_compress snap; + raise (Unify (expand_trace !env trace)) + | Recursive_abbrev -> + undo_compress snap; + raise (Unification_recursive_abbrev (expand_trace !env [(ty1,ty2)])) + +let unify_gadt ~newtype_level:lev (env:Env.t ref) ty1 ty2 = + try + univar_pairs := []; + newtype_level := Some lev; + set_mode_pattern ~generate:true ~injective:true + (fun () -> unify env ty1 ty2); + newtype_level := None; + TypePairs.clear unify_eq_set; + with e -> + newtype_level := None; + TypePairs.clear unify_eq_set; + raise e + +let unify_var env t1 t2 = + let t1 = repr t1 and t2 = repr t2 in + if t1 == t2 then () else + match t1.desc, t2.desc with + Tvar _, Tconstr _ when deep_occur t1 t2 -> + unify (ref env) t1 t2 + | Tvar _, _ -> + let reset_tracing = check_trace_gadt_instances env in + begin try + occur env t1 t2; + update_level env t1.level t2; + link_type t1 t2; + reset_trace_gadt_instances reset_tracing; + with Unify trace -> + reset_trace_gadt_instances reset_tracing; + let expanded_trace = expand_trace env ((t1,t2)::trace) in + raise (Unify expanded_trace) + end + | _ -> + unify (ref env) t1 t2 + +let _ = unify' := unify_var + +let unify_pairs env ty1 ty2 pairs = + univar_pairs := pairs; + unify env ty1 ty2 + +let unify env ty1 ty2 = + unify_pairs (ref env) ty1 ty2 [] + + + +(**** Special cases of unification ****) + +let expand_head_trace env t = + let reset_tracing = check_trace_gadt_instances env in + let t = expand_head_unif env t in + reset_trace_gadt_instances reset_tracing; + t + +(* + Unify [t] and [l:'a -> 'b]. Return ['a] and ['b]. + In label mode, label mismatch is accepted when + (1) the requested label is "" + (2) the original label is not optional +*) + +let filter_arrow env t l = + let t = expand_head_trace env t in + match t.desc with + Tvar _ -> + let lv = t.level in + let t1 = newvar2 lv and t2 = newvar2 lv in + let t' = newty2 lv (Tarrow (l, t1, t2, Cok)) in + link_type t t'; + (t1, t2) + | Tarrow(l', t1, t2, _) + when l = l' || !Clflags.classic && l = Nolabel && not (is_optional l') -> + (t1, t2) + | _ -> + raise (Unify []) + +(* Used by [filter_method]. *) +let rec filter_method_field env name priv ty = + let ty = expand_head_trace env ty in + match ty.desc with + Tvar _ -> + let level = ty.level in + let ty1 = newvar2 level and ty2 = newvar2 level in + let ty' = newty2 level (Tfield (name, + begin match priv with + Private -> Fvar (ref None) + | Public -> Fpresent + end, + ty1, ty2)) + in + link_type ty ty'; + ty1 + | Tfield(n, kind, ty1, ty2) -> + let kind = field_kind_repr kind in + if (n = name) && (kind <> Fabsent) then begin + if priv = Public then + unify_kind kind Fpresent; + ty1 + end else + filter_method_field env name priv ty2 + | _ -> + raise (Unify []) + +(* Unify [ty] and [< name : 'a; .. >]. Return ['a]. *) +let filter_method env name priv ty = + let ty = expand_head_trace env ty in + match ty.desc with + Tvar _ -> + let ty1 = newvar () in + let ty' = newobj ty1 in + update_level env ty.level ty'; + link_type ty ty'; + filter_method_field env name priv ty1 + | Tobject(f, _) -> + filter_method_field env name priv f + | _ -> + raise (Unify []) + +let check_filter_method env name priv ty = + ignore(filter_method env name priv ty) + +let filter_self_method env lab priv meths ty = + let ty' = filter_method env lab priv ty in + try + Meths.find lab !meths + with Not_found -> + let pair = (Ident.create lab, ty') in + meths := Meths.add lab pair !meths; + pair + + + (***********************************) + (* Matching between type schemes *) + (***********************************) + +(* + Update the level of [ty]. First check that the levels of generic + variables from the subject are not lowered. +*) +let moregen_occur env level ty = + let rec occur ty = + let ty = repr ty in + if ty.level > level then begin + if is_Tvar ty && ty.level >= generic_level - 1 then raise Occur; + ty.level <- pivot_level - ty.level; + match ty.desc with + Tvariant row when static_row row -> + iter_row occur row + | _ -> + iter_type_expr occur ty + end + in + begin try + occur ty; unmark_type ty + with Occur -> + unmark_type ty; raise (Unify []) + end; + (* also check for free univars *) + occur_univar env ty; + update_level env level ty + +let may_instantiate inst_nongen t1 = + if inst_nongen then t1.level <> generic_level - 1 + else t1.level = generic_level + +let rec moregen inst_nongen type_pairs env t1 t2 = + if t1 == t2 then () else + let t1 = repr t1 in + let t2 = repr t2 in + if t1 == t2 then () else + + try + match (t1.desc, t2.desc) with + (Tvar _, _) when may_instantiate inst_nongen t1 -> + moregen_occur env t1.level t2; + occur env t1 t2; + link_type t1 t2 + | (Tconstr (p1, [], _), Tconstr (p2, [], _)) when Path.same p1 p2 -> + () + | _ -> + let t1' = expand_head env t1 in + let t2' = expand_head env t2 in + (* Expansion may have changed the representative of the types... *) + let t1' = repr t1' and t2' = repr t2' in + if t1' == t2' then () else + begin try + TypePairs.find type_pairs (t1', t2') + with Not_found -> + TypePairs.add type_pairs (t1', t2') (); + match (t1'.desc, t2'.desc) with + (Tvar _, _) when may_instantiate inst_nongen t1' -> + moregen_occur env t1'.level t2; + link_type t1' t2 + | (Tarrow (l1, t1, u1, _), Tarrow (l2, t2, u2, _)) when l1 = l2 + || !Clflags.classic && not (is_optional l1 || is_optional l2) -> + moregen inst_nongen type_pairs env t1 t2; + moregen inst_nongen type_pairs env u1 u2 + | (Ttuple tl1, Ttuple tl2) -> + moregen_list inst_nongen type_pairs env tl1 tl2 + | (Tconstr (p1, tl1, _), Tconstr (p2, tl2, _)) + when Path.same p1 p2 -> + moregen_list inst_nongen type_pairs env tl1 tl2 + | (Tpackage (p1, n1, tl1), Tpackage (p2, n2, tl2)) -> + begin try + unify_package env (moregen_list inst_nongen type_pairs env) + t1'.level p1 n1 tl1 t2'.level p2 n2 tl2 + with Not_found -> raise (Unify []) + end + | (Tvariant row1, Tvariant row2) -> + moregen_row inst_nongen type_pairs env row1 row2 + | (Tobject (fi1, _nm1), Tobject (fi2, _nm2)) -> + moregen_fields inst_nongen type_pairs env fi1 fi2 + | (Tfield _, Tfield _) -> (* Actually unused *) + moregen_fields inst_nongen type_pairs env t1' t2' + | (Tnil, Tnil) -> + () + | (Tpoly (t1, []), Tpoly (t2, [])) -> + moregen inst_nongen type_pairs env t1 t2 + | (Tpoly (t1, tl1), Tpoly (t2, tl2)) -> + enter_poly env univar_pairs t1 tl1 t2 tl2 + (moregen inst_nongen type_pairs env) + | (Tunivar _, Tunivar _) -> + unify_univar t1' t2' !univar_pairs + | (_, _) -> + raise (Unify []) + end + with Unify trace -> + raise (Unify ((t1, t2)::trace)) + +and moregen_list inst_nongen type_pairs env tl1 tl2 = + if List.length tl1 <> List.length tl2 then + raise (Unify []); + List.iter2 (moregen inst_nongen type_pairs env) tl1 tl2 + +and moregen_fields inst_nongen type_pairs env ty1 ty2 = + let (fields1, rest1) = flatten_fields ty1 + and (fields2, rest2) = flatten_fields ty2 in + let (pairs, miss1, miss2) = associate_fields fields1 fields2 in + if miss1 <> [] then raise (Unify []); + moregen inst_nongen type_pairs env rest1 + (build_fields (repr ty2).level miss2 rest2); + List.iter + (fun (n, k1, t1, k2, t2) -> + moregen_kind k1 k2; + try moregen inst_nongen type_pairs env t1 t2 with Unify trace -> + raise (Unify ((newty (Tfield(n, k1, t1, rest2)), + newty (Tfield(n, k2, t2, rest2)))::trace))) + pairs + +and moregen_kind k1 k2 = + let k1 = field_kind_repr k1 in + let k2 = field_kind_repr k2 in + if k1 == k2 then () else + match k1, k2 with + (Fvar r, (Fvar _ | Fpresent)) -> set_kind r k2 + | (Fpresent, Fpresent) -> () + | _ -> raise (Unify []) + +and moregen_row inst_nongen type_pairs env row1 row2 = + let row1 = row_repr row1 and row2 = row_repr row2 in + let rm1 = repr row1.row_more and rm2 = repr row2.row_more in + if rm1 == rm2 then () else + let may_inst = + is_Tvar rm1 && may_instantiate inst_nongen rm1 || rm1.desc = Tnil in + let r1, r2, pairs = merge_row_fields row1.row_fields row2.row_fields in + let r1, r2 = + if row2.row_closed then + filter_row_fields may_inst r1, filter_row_fields false r2 + else r1, r2 + in + if r1 <> [] || row1.row_closed && (not row2.row_closed || r2 <> []) + then raise (Unify []); + begin match rm1.desc, rm2.desc with + Tunivar _, Tunivar _ -> + unify_univar rm1 rm2 !univar_pairs + | Tunivar _, _ | _, Tunivar _ -> + raise (Unify []) + | _ when static_row row1 -> () + | _ when may_inst -> + let ext = + newgenty (Tvariant {row2 with row_fields = r2; row_name = None}) + in + moregen_occur env rm1.level ext; + link_type rm1 ext + | Tconstr _, Tconstr _ -> + moregen inst_nongen type_pairs env rm1 rm2 + | _ -> raise (Unify []) + end; + List.iter + (fun (_l,f1,f2) -> + let f1 = row_field_repr f1 and f2 = row_field_repr f2 in + if f1 == f2 then () else + match f1, f2 with + Rpresent(Some t1), Rpresent(Some t2) -> + moregen inst_nongen type_pairs env t1 t2 + | Rpresent None, Rpresent None -> () + | Reither(false, tl1, _, e1), Rpresent(Some t2) when may_inst -> + set_row_field e1 f2; + List.iter (fun t1 -> moregen inst_nongen type_pairs env t1 t2) tl1 + | Reither(c1, tl1, _, e1), Reither(c2, tl2, m2, e2) -> + if e1 != e2 then begin + if c1 && not c2 then raise(Unify []); + set_row_field e1 (Reither (c2, [], m2, e2)); + if List.length tl1 = List.length tl2 then + List.iter2 (moregen inst_nongen type_pairs env) tl1 tl2 + else match tl2 with + t2 :: _ -> + List.iter (fun t1 -> moregen inst_nongen type_pairs env t1 t2) + tl1 + | [] -> + if tl1 <> [] then raise (Unify []) + end + | Reither(true, [], _, e1), Rpresent None when may_inst -> + set_row_field e1 f2 + | Reither(_, _, _, e1), Rabsent when may_inst -> + set_row_field e1 f2 + | Rabsent, Rabsent -> () + | _ -> raise (Unify [])) + pairs + +(* Must empty univar_pairs first *) +let moregen inst_nongen type_pairs env patt subj = + univar_pairs := []; + moregen inst_nongen type_pairs env patt subj + +(* + Non-generic variable can be instantiated only if [inst_nongen] is + true. So, [inst_nongen] should be set to false if the subject might + contain non-generic variables (and we do not want them to be + instantiated). + Usually, the subject is given by the user, and the pattern + is unimportant. So, no need to propagate abbreviations. +*) +let moregeneral env inst_nongen pat_sch subj_sch = + let old_level = !current_level in + current_level := generic_level - 1; + (* + Generic variables are first duplicated with [instance]. So, + their levels are lowered to [generic_level - 1]. The subject is + then copied with [duplicate_type]. That way, its levels won't be + changed. + *) + let subj = duplicate_type (instance env subj_sch) in + current_level := generic_level; + (* Duplicate generic variables *) + let patt = instance env pat_sch in + let res = + try moregen inst_nongen (TypePairs.create 13) env patt subj; true with + Unify _ -> false + in + current_level := old_level; + res + + +(* Alternative approach: "rigidify" a type scheme, + and check validity after unification *) +(* Simpler, no? *) + +let rec rigidify_rec vars ty = + let ty = repr ty in + if ty.level >= lowest_level then begin + ty.level <- pivot_level - ty.level; + match ty.desc with + | Tvar _ -> + if not (List.memq ty !vars) then vars := ty :: !vars + | Tvariant row -> + let row = row_repr row in + let more = repr row.row_more in + if is_Tvar more && not (row_fixed row) then begin + let more' = newty2 more.level more.desc in + let row' = {row with row_fixed=true; row_fields=[]; row_more=more'} + in link_type more (newty2 ty.level (Tvariant row')) + end; + iter_row (rigidify_rec vars) row; + (* only consider the row variable if the variant is not static *) + if not (static_row row) then rigidify_rec vars (row_more row) + | _ -> + iter_type_expr (rigidify_rec vars) ty + end + +let rigidify ty = + let vars = ref [] in + rigidify_rec vars ty; + unmark_type ty; + !vars + +let all_distinct_vars env vars = + let tyl = ref [] in + List.for_all + (fun ty -> + let ty = expand_head env ty in + if List.memq ty !tyl then false else + (tyl := ty :: !tyl; is_Tvar ty)) + vars + +let matches env ty ty' = + let snap = snapshot () in + let vars = rigidify ty in + cleanup_abbrev (); + let ok = + try unify env ty ty'; all_distinct_vars env vars + with Unify _ -> false + in + backtrack snap; + ok + + + (*********************************************) + (* Equivalence between parameterized types *) + (*********************************************) + +let expand_head_rigid env ty = + let old = !rigid_variants in + rigid_variants := true; + let ty' = expand_head env ty in + rigid_variants := old; ty' + +let normalize_subst subst = + if List.exists + (function {desc=Tlink _}, _ | _, {desc=Tlink _} -> true | _ -> false) + !subst + then subst := List.map (fun (t1,t2) -> repr t1, repr t2) !subst + +let rec eqtype rename type_pairs subst env t1 t2 = + if t1 == t2 then () else + let t1 = repr t1 in + let t2 = repr t2 in + if t1 == t2 then () else + + try + match (t1.desc, t2.desc) with + (Tvar _, Tvar _) when rename -> + begin try + normalize_subst subst; + if List.assq t1 !subst != t2 then raise (Unify []) + with Not_found -> + if List.exists (fun (_, t) -> t == t2) !subst then raise (Unify []); + subst := (t1, t2) :: !subst + end + | (Tconstr (p1, [], _), Tconstr (p2, [], _)) when Path.same p1 p2 -> + () + | _ -> + let t1' = expand_head_rigid env t1 in + let t2' = expand_head_rigid env t2 in + (* Expansion may have changed the representative of the types... *) + let t1' = repr t1' and t2' = repr t2' in + if t1' == t2' then () else + begin try + TypePairs.find type_pairs (t1', t2') + with Not_found -> + TypePairs.add type_pairs (t1', t2') (); + match (t1'.desc, t2'.desc) with + (Tvar _, Tvar _) when rename -> + begin try + normalize_subst subst; + if List.assq t1' !subst != t2' then raise (Unify []) + with Not_found -> + if List.exists (fun (_, t) -> t == t2') !subst + then raise (Unify []); + subst := (t1', t2') :: !subst + end + | (Tarrow (l1, t1, u1, _), Tarrow (l2, t2, u2, _)) when l1 = l2 + || !Clflags.classic && not (is_optional l1 || is_optional l2) -> + eqtype rename type_pairs subst env t1 t2; + eqtype rename type_pairs subst env u1 u2; + | (Ttuple tl1, Ttuple tl2) -> + eqtype_list rename type_pairs subst env tl1 tl2 + | (Tconstr (p1, tl1, _), Tconstr (p2, tl2, _)) + when Path.same p1 p2 -> + eqtype_list rename type_pairs subst env tl1 tl2 + | (Tpackage (p1, n1, tl1), Tpackage (p2, n2, tl2)) -> + begin try + unify_package env (eqtype_list rename type_pairs subst env) + t1'.level p1 n1 tl1 t2'.level p2 n2 tl2 + with Not_found -> raise (Unify []) + end + | (Tvariant row1, Tvariant row2) -> + eqtype_row rename type_pairs subst env row1 row2 + | (Tobject (fi1, _nm1), Tobject (fi2, _nm2)) -> + eqtype_fields rename type_pairs subst env fi1 fi2 + | (Tfield _, Tfield _) -> (* Actually unused *) + eqtype_fields rename type_pairs subst env t1' t2' + | (Tnil, Tnil) -> + () + | (Tpoly (t1, []), Tpoly (t2, [])) -> + eqtype rename type_pairs subst env t1 t2 + | (Tpoly (t1, tl1), Tpoly (t2, tl2)) -> + enter_poly env univar_pairs t1 tl1 t2 tl2 + (eqtype rename type_pairs subst env) + | (Tunivar _, Tunivar _) -> + unify_univar t1' t2' !univar_pairs + | (_, _) -> + raise (Unify []) + end + with Unify trace -> + raise (Unify ((t1, t2)::trace)) + +and eqtype_list rename type_pairs subst env tl1 tl2 = + if List.length tl1 <> List.length tl2 then + raise (Unify []); + List.iter2 (eqtype rename type_pairs subst env) tl1 tl2 + +and eqtype_fields rename type_pairs subst env ty1 ty2 = + let (fields1, rest1) = flatten_fields ty1 in + let (fields2, rest2) = flatten_fields ty2 in + (* First check if same row => already equal *) + let same_row = + rest1 == rest2 || TypePairs.mem type_pairs (rest1,rest2) || + (rename && List.mem (rest1, rest2) !subst) + in + if same_row then () else + (* Try expansion, needed when called from Includecore.type_manifest *) + match expand_head_rigid env rest2 with + {desc=Tobject(ty2,_)} -> eqtype_fields rename type_pairs subst env ty1 ty2 + | _ -> + let (pairs, miss1, miss2) = associate_fields fields1 fields2 in + eqtype rename type_pairs subst env rest1 rest2; + if (miss1 <> []) || (miss2 <> []) then raise (Unify []); + List.iter + (function (n, k1, t1, k2, t2) -> + eqtype_kind k1 k2; + try eqtype rename type_pairs subst env t1 t2 with Unify trace -> + raise (Unify ((newty (Tfield(n, k1, t1, rest2)), + newty (Tfield(n, k2, t2, rest2)))::trace))) + pairs + +and eqtype_kind k1 k2 = + let k1 = field_kind_repr k1 in + let k2 = field_kind_repr k2 in + match k1, k2 with + (Fvar _, Fvar _) + | (Fpresent, Fpresent) -> () + | _ -> raise (Unify []) + +and eqtype_row rename type_pairs subst env row1 row2 = + (* Try expansion, needed when called from Includecore.type_manifest *) + match expand_head_rigid env (row_more row2) with + {desc=Tvariant row2} -> eqtype_row rename type_pairs subst env row1 row2 + | _ -> + let row1 = row_repr row1 and row2 = row_repr row2 in + let r1, r2, pairs = merge_row_fields row1.row_fields row2.row_fields in + if row1.row_closed <> row2.row_closed + || not row1.row_closed && (r1 <> [] || r2 <> []) + || filter_row_fields false (r1 @ r2) <> [] + then raise (Unify []); + if not (static_row row1) then + eqtype rename type_pairs subst env row1.row_more row2.row_more; + List.iter + (fun (_,f1,f2) -> + match row_field_repr f1, row_field_repr f2 with + Rpresent(Some t1), Rpresent(Some t2) -> + eqtype rename type_pairs subst env t1 t2 + | Reither(c1, [], _, _), Reither(c2, [], _, _) when c1 = c2 -> + () + | Reither(c1, t1::tl1, _, _), Reither(c2, t2::tl2, _, _) when c1 = c2 -> + eqtype rename type_pairs subst env t1 t2; + if List.length tl1 = List.length tl2 then + (* if same length allow different types (meaning?) *) + List.iter2 (eqtype rename type_pairs subst env) tl1 tl2 + else begin + (* otherwise everything must be equal *) + List.iter (eqtype rename type_pairs subst env t1) tl2; + List.iter (fun t1 -> eqtype rename type_pairs subst env t1 t2) tl1 + end + | Rpresent None, Rpresent None -> () + | Rabsent, Rabsent -> () + | _ -> raise (Unify [])) + pairs + +(* Must empty univar_pairs first *) +let eqtype_list rename type_pairs subst env tl1 tl2 = + univar_pairs := []; + let snap = Btype.snapshot () in + try eqtype_list rename type_pairs subst env tl1 tl2; backtrack snap + with exn -> backtrack snap; raise exn + +let eqtype rename type_pairs subst env t1 t2 = + eqtype_list rename type_pairs subst env [t1] [t2] + +(* Two modes: with or without renaming of variables *) +let equal env rename tyl1 tyl2 = + try + eqtype_list rename (TypePairs.create 11) (ref []) env tyl1 tyl2; true + with + Unify _ -> false + + + (*************************) + (* Class type matching *) + (*************************) + + +type class_match_failure = + CM_Virtual_class + | CM_Parameter_arity_mismatch of int * int + | CM_Type_parameter_mismatch of Env.t * (type_expr * type_expr) list + | CM_Class_type_mismatch of Env.t * class_type * class_type + | CM_Parameter_mismatch of Env.t * (type_expr * type_expr) list + | CM_Val_type_mismatch of string * Env.t * (type_expr * type_expr) list + | CM_Meth_type_mismatch of string * Env.t * (type_expr * type_expr) list + | CM_Non_mutable_value of string + | CM_Non_concrete_value of string + | CM_Missing_value of string + | CM_Missing_method of string + | CM_Hide_public of string + | CM_Hide_virtual of string * string + | CM_Public_method of string + | CM_Private_method of string + | CM_Virtual_method of string + +exception Failure of class_match_failure list + +let rec moregen_clty trace type_pairs env cty1 cty2 = + try + match cty1, cty2 with + Cty_constr (_, _, cty1), _ -> + moregen_clty true type_pairs env cty1 cty2 + | _, Cty_constr (_, _, cty2) -> + moregen_clty true type_pairs env cty1 cty2 + | Cty_arrow (l1, ty1, cty1'), Cty_arrow (l2, ty2, cty2') when l1 = l2 -> + begin try moregen true type_pairs env ty1 ty2 with Unify trace -> + raise (Failure [CM_Parameter_mismatch (env, expand_trace env trace)]) + end; + moregen_clty false type_pairs env cty1' cty2' + | Cty_signature sign1, Cty_signature sign2 -> + let ty1 = object_fields (repr sign1.csig_self) in + let ty2 = object_fields (repr sign2.csig_self) in + let (fields1, _rest1) = flatten_fields ty1 + and (fields2, _rest2) = flatten_fields ty2 in + let (pairs, _miss1, _miss2) = associate_fields fields1 fields2 in + List.iter + (fun (lab, _k1, t1, _k2, t2) -> + begin try moregen true type_pairs env t1 t2 with Unify trace -> + raise (Failure [CM_Meth_type_mismatch + (lab, env, expand_trace env trace)]) + end) + pairs; + Vars.iter + (fun lab (_mut, _v, ty) -> + let (_mut', _v', ty') = Vars.find lab sign1.csig_vars in + try moregen true type_pairs env ty' ty with Unify trace -> + raise (Failure [CM_Val_type_mismatch + (lab, env, expand_trace env trace)])) + sign2.csig_vars + | _ -> + raise (Failure []) + with + Failure error when trace || error = [] -> + raise (Failure (CM_Class_type_mismatch (env, cty1, cty2)::error)) + +let match_class_types ?(trace=true) env pat_sch subj_sch = + let type_pairs = TypePairs.create 53 in + let old_level = !current_level in + current_level := generic_level - 1; + (* + Generic variables are first duplicated with [instance]. So, + their levels are lowered to [generic_level - 1]. The subject is + then copied with [duplicate_type]. That way, its levels won't be + changed. + *) + let (_, subj_inst) = instance_class [] subj_sch in + let subj = duplicate_class_type subj_inst in + current_level := generic_level; + (* Duplicate generic variables *) + let (_, patt) = instance_class [] pat_sch in + let res = + let sign1 = signature_of_class_type patt in + let sign2 = signature_of_class_type subj in + let t1 = repr sign1.csig_self in + let t2 = repr sign2.csig_self in + TypePairs.add type_pairs (t1, t2) (); + let (fields1, rest1) = flatten_fields (object_fields t1) + and (fields2, rest2) = flatten_fields (object_fields t2) in + let (pairs, miss1, miss2) = associate_fields fields1 fields2 in + let error = + List.fold_right + (fun (lab, k, _) err -> + let err = + let k = field_kind_repr k in + begin match k with + Fvar r -> set_kind r Fabsent; err + | _ -> CM_Hide_public lab::err + end + in + if Concr.mem lab sign1.csig_concr then err + else CM_Hide_virtual ("method", lab) :: err) + miss1 [] + in + let missing_method = List.map (fun (m, _, _) -> m) miss2 in + let error = + (List.map (fun m -> CM_Missing_method m) missing_method) @ error + in + (* Always succeeds *) + moregen true type_pairs env rest1 rest2; + let error = + List.fold_right + (fun (lab, k1, _t1, k2, _t2) err -> + try moregen_kind k1 k2; err with + Unify _ -> CM_Public_method lab::err) + pairs error + in + let error = + Vars.fold + (fun lab (mut, vr, _ty) err -> + try + let (mut', vr', _ty') = Vars.find lab sign1.csig_vars in + if mut = Mutable && mut' <> Mutable then + CM_Non_mutable_value lab::err + else if vr = Concrete && vr' <> Concrete then + CM_Non_concrete_value lab::err + else + err + with Not_found -> + CM_Missing_value lab::err) + sign2.csig_vars error + in + let error = + Vars.fold + (fun lab (_,vr,_) err -> + if vr = Virtual && not (Vars.mem lab sign2.csig_vars) then + CM_Hide_virtual ("instance variable", lab) :: err + else err) + sign1.csig_vars error + in + let error = + List.fold_right + (fun e l -> + if List.mem e missing_method then l else CM_Virtual_method e::l) + (Concr.elements (Concr.diff sign2.csig_concr sign1.csig_concr)) + error + in + match error with + [] -> + begin try + moregen_clty trace type_pairs env patt subj; + [] + with + Failure r -> r + end + | error -> + CM_Class_type_mismatch (env, patt, subj)::error + in + current_level := old_level; + res + +let rec equal_clty trace type_pairs subst env cty1 cty2 = + try + match cty1, cty2 with + Cty_constr (_, _, cty1), Cty_constr (_, _, cty2) -> + equal_clty true type_pairs subst env cty1 cty2 + | Cty_constr (_, _, cty1), _ -> + equal_clty true type_pairs subst env cty1 cty2 + | _, Cty_constr (_, _, cty2) -> + equal_clty true type_pairs subst env cty1 cty2 + | Cty_arrow (l1, ty1, cty1'), Cty_arrow (l2, ty2, cty2') when l1 = l2 -> + begin try eqtype true type_pairs subst env ty1 ty2 with Unify trace -> + raise (Failure [CM_Parameter_mismatch (env, expand_trace env trace)]) + end; + equal_clty false type_pairs subst env cty1' cty2' + | Cty_signature sign1, Cty_signature sign2 -> + let ty1 = object_fields (repr sign1.csig_self) in + let ty2 = object_fields (repr sign2.csig_self) in + let (fields1, _rest1) = flatten_fields ty1 + and (fields2, _rest2) = flatten_fields ty2 in + let (pairs, _miss1, _miss2) = associate_fields fields1 fields2 in + List.iter + (fun (lab, _k1, t1, _k2, t2) -> + begin try eqtype true type_pairs subst env t1 t2 with + Unify trace -> + raise (Failure [CM_Meth_type_mismatch + (lab, env, expand_trace env trace)]) + end) + pairs; + Vars.iter + (fun lab (_, _, ty) -> + let (_, _, ty') = Vars.find lab sign1.csig_vars in + try eqtype true type_pairs subst env ty' ty with Unify trace -> + raise (Failure [CM_Val_type_mismatch + (lab, env, expand_trace env trace)])) + sign2.csig_vars + | _ -> + raise + (Failure (if trace then [] + else [CM_Class_type_mismatch (env, cty1, cty2)])) + with + Failure error when trace -> + raise (Failure (CM_Class_type_mismatch (env, cty1, cty2)::error)) + +let match_class_declarations env patt_params patt_type subj_params subj_type = + let type_pairs = TypePairs.create 53 in + let subst = ref [] in + let sign1 = signature_of_class_type patt_type in + let sign2 = signature_of_class_type subj_type in + let t1 = repr sign1.csig_self in + let t2 = repr sign2.csig_self in + TypePairs.add type_pairs (t1, t2) (); + let (fields1, rest1) = flatten_fields (object_fields t1) + and (fields2, rest2) = flatten_fields (object_fields t2) in + let (pairs, miss1, miss2) = associate_fields fields1 fields2 in + let error = + List.fold_right + (fun (lab, k, _) err -> + let err = + let k = field_kind_repr k in + begin match k with + Fvar _ -> err + | _ -> CM_Hide_public lab::err + end + in + if Concr.mem lab sign1.csig_concr then err + else CM_Hide_virtual ("method", lab) :: err) + miss1 [] + in + let missing_method = List.map (fun (m, _, _) -> m) miss2 in + let error = + (List.map (fun m -> CM_Missing_method m) missing_method) @ error + in + (* Always succeeds *) + eqtype true type_pairs subst env rest1 rest2; + let error = + List.fold_right + (fun (lab, k1, _t1, k2, _t2) err -> + let k1 = field_kind_repr k1 in + let k2 = field_kind_repr k2 in + match k1, k2 with + (Fvar _, Fvar _) + | (Fpresent, Fpresent) -> err + | (Fvar _, Fpresent) -> CM_Private_method lab::err + | (Fpresent, Fvar _) -> CM_Public_method lab::err + | _ -> assert false) + pairs error + in + let error = + Vars.fold + (fun lab (mut, vr, _ty) err -> + try + let (mut', vr', _ty') = Vars.find lab sign1.csig_vars in + if mut = Mutable && mut' <> Mutable then + CM_Non_mutable_value lab::err + else if vr = Concrete && vr' <> Concrete then + CM_Non_concrete_value lab::err + else + err + with Not_found -> + CM_Missing_value lab::err) + sign2.csig_vars error + in + let error = + Vars.fold + (fun lab (_,vr,_) err -> + if vr = Virtual && not (Vars.mem lab sign2.csig_vars) then + CM_Hide_virtual ("instance variable", lab) :: err + else err) + sign1.csig_vars error + in + let error = + List.fold_right + (fun e l -> + if List.mem e missing_method then l else CM_Virtual_method e::l) + (Concr.elements (Concr.diff sign2.csig_concr sign1.csig_concr)) + error + in + match error with + [] -> + begin try + let lp = List.length patt_params in + let ls = List.length subj_params in + if lp <> ls then + raise (Failure [CM_Parameter_arity_mismatch (lp, ls)]); + List.iter2 (fun p s -> + try eqtype true type_pairs subst env p s with Unify trace -> + raise (Failure [CM_Type_parameter_mismatch + (env, expand_trace env trace)])) + patt_params subj_params; + (* old code: equal_clty false type_pairs subst env patt_type subj_type; *) + equal_clty false type_pairs subst env + (Cty_signature sign1) (Cty_signature sign2); + (* Use moregeneral for class parameters, need to recheck everything to + keeps relationships (PR#4824) *) + let clty_params = + List.fold_right (fun ty cty -> Cty_arrow (Labelled "*",ty,cty)) in + match_class_types ~trace:false env + (clty_params patt_params patt_type) + (clty_params subj_params subj_type) + with + Failure r -> r + end + | error -> + error + + + (***************) + (* Subtyping *) + (***************) + + +(**** Build a subtype of a given type. ****) + +(* build_subtype: + [visited] traces traversed object and variant types + [loops] is a mapping from variables to variables, to reproduce + positive loops in a class type + [posi] true if the current variance is positive + [level] number of expansions/enlargement allowed on this branch *) + +let warn = ref false (* whether double coercion might do better *) +let pred_expand n = if n mod 2 = 0 && n > 0 then pred n else n +let pred_enlarge n = if n mod 2 = 1 then pred n else n + +type change = Unchanged | Equiv | Changed +let collect l = List.fold_left (fun c1 (_, c2) -> max c1 c2) Unchanged l + +let rec filter_visited = function + [] -> [] + | {desc=Tobject _|Tvariant _} :: _ as l -> l + | _ :: l -> filter_visited l + +let memq_warn t visited = + if List.memq t visited then (warn := true; true) else false + +let rec lid_of_path ?(hash="") = function + Path.Pident id -> + Longident.Lident (hash ^ Ident.name id) + | Path.Pdot (p1, s, _) -> + Longident.Ldot (lid_of_path p1, hash ^ s) + | Path.Papply (p1, p2) -> + Longident.Lapply (lid_of_path ~hash p1, lid_of_path p2) + +let find_cltype_for_path env p = + let cl_path = Env.lookup_type (lid_of_path ~hash:"#" p) env in + let cl_abbr = Env.find_type cl_path env in + + match cl_abbr.type_manifest with + Some ty -> + begin match (repr ty).desc with + Tobject(_,{contents=Some(p',_)}) when Path.same p p' -> cl_abbr, ty + | _ -> raise Not_found + end + | None -> assert false + +let has_constr_row' env t = + has_constr_row (expand_abbrev env t) + +let rec build_subtype env visited loops posi level t = + let t = repr t in + match t.desc with + Tvar _ -> + if posi then + try + let t' = List.assq t loops in + warn := true; + (t', Equiv) + with Not_found -> + (t, Unchanged) + else + (t, Unchanged) + | Tarrow(l, t1, t2, _) -> + if memq_warn t visited then (t, Unchanged) else + let visited = t :: visited in + let (t1', c1) = build_subtype env visited loops (not posi) level t1 in + let (t2', c2) = build_subtype env visited loops posi level t2 in + let c = max c1 c2 in + if c > Unchanged then (newty (Tarrow(l, t1', t2', Cok)), c) + else (t, Unchanged) + | Ttuple tlist -> + if memq_warn t visited then (t, Unchanged) else + let visited = t :: visited in + let tlist' = + List.map (build_subtype env visited loops posi level) tlist + in + let c = collect tlist' in + if c > Unchanged then (newty (Ttuple (List.map fst tlist')), c) + else (t, Unchanged) + | Tconstr(p, tl, abbrev) + when level > 0 && generic_abbrev env p && safe_abbrev env t + && not (has_constr_row' env t) -> + let t' = repr (expand_abbrev env t) in + let level' = pred_expand level in + begin try match t'.desc with + Tobject _ when posi && not (opened_object t') -> + let cl_abbr, body = find_cltype_for_path env p in + let ty = + subst env !current_level Public abbrev None + cl_abbr.type_params tl body in + let ty = repr ty in + let ty1, tl1 = + match ty.desc with + Tobject(ty1,{contents=Some(p',tl1)}) when Path.same p p' -> + ty1, tl1 + | _ -> raise Not_found + in + (* Fix PR#4505: do not set ty to Tvar when it appears in tl1, + as this occurrence might break the occur check. + XXX not clear whether this correct anyway... *) + if List.exists (deep_occur ty) tl1 then raise Not_found; + ty.desc <- Tvar None; + let t'' = newvar () in + let loops = (ty, t'') :: loops in + (* May discard [visited] as level is going down *) + let (ty1', c) = + build_subtype env [t'] loops posi (pred_enlarge level') ty1 in + assert (is_Tvar t''); + let nm = + if c > Equiv || deep_occur ty ty1' then None else Some(p,tl1) in + t''.desc <- Tobject (ty1', ref nm); + (try unify_var env ty t with Unify _ -> assert false); + (t'', Changed) + | _ -> raise Not_found + with Not_found -> + let (t'',c) = build_subtype env visited loops posi level' t' in + if c > Unchanged then (t'',c) + else (t, Unchanged) + end + | Tconstr(p, tl, _abbrev) -> + (* Must check recursion on constructors, since we do not always + expand them *) + if memq_warn t visited then (t, Unchanged) else + let visited = t :: visited in + begin try + let decl = Env.find_type p env in + if level = 0 && generic_abbrev env p && safe_abbrev env t + && not (has_constr_row' env t) + then warn := true; + let tl' = + List.map2 + (fun v t -> + let (co,cn) = Variance.get_upper v in + if cn then + if co then (t, Unchanged) + else build_subtype env visited loops (not posi) level t + else + if co then build_subtype env visited loops posi level t + else (newvar(), Changed)) + decl.type_variance tl + in + let c = collect tl' in + if c > Unchanged then (newconstr p (List.map fst tl'), c) + else (t, Unchanged) + with Not_found -> + (t, Unchanged) + end + | Tvariant row -> + let row = row_repr row in + if memq_warn t visited || not (static_row row) then (t, Unchanged) else + let level' = pred_enlarge level in + let visited = + t :: if level' < level then [] else filter_visited visited in + let fields = filter_row_fields false row.row_fields in + let fields = + List.map + (fun (l,f as orig) -> match row_field_repr f with + Rpresent None -> + if posi then + (l, Reither(true, [], false, ref None)), Unchanged + else + orig, Unchanged + | Rpresent(Some t) -> + let (t', c) = build_subtype env visited loops posi level' t in + let f = + if posi && level > 0 + then Reither(false, [t'], false, ref None) + else Rpresent(Some t') + in (l, f), c + | _ -> assert false) + fields + in + let c = collect fields in + let row = + { row_fields = List.map fst fields; row_more = newvar(); + row_bound = (); row_closed = posi; row_fixed = false; + row_name = if c > Unchanged then None else row.row_name } + in + (newty (Tvariant row), Changed) + | Tobject (t1, _) -> + if memq_warn t visited || opened_object t1 then (t, Unchanged) else + let level' = pred_enlarge level in + let visited = + t :: if level' < level then [] else filter_visited visited in + let (t1', c) = build_subtype env visited loops posi level' t1 in + if c > Unchanged then (newty (Tobject (t1', ref None)), c) + else (t, Unchanged) + | Tfield(s, _, t1, t2) (* Always present *) -> + let (t1', c1) = build_subtype env visited loops posi level t1 in + let (t2', c2) = build_subtype env visited loops posi level t2 in + let c = max c1 c2 in + if c > Unchanged then (newty (Tfield(s, Fpresent, t1', t2')), c) + else (t, Unchanged) + | Tnil -> + if posi then + let v = newvar () in + (v, Changed) + else begin + warn := true; + (t, Unchanged) + end + | Tsubst _ | Tlink _ -> + assert false + | Tpoly(t1, tl) -> + let (t1', c) = build_subtype env visited loops posi level t1 in + if c > Unchanged then (newty (Tpoly(t1', tl)), c) + else (t, Unchanged) + | Tunivar _ | Tpackage _ -> + (t, Unchanged) + +let enlarge_type env ty = + warn := false; + (* [level = 4] allows 2 expansions involving objects/variants *) + let (ty', _) = build_subtype env [] [] true 4 ty in + (ty', !warn) + +(**** Check whether a type is a subtype of another type. ****) + +(* + During the traversal, a trace of visited types is maintained. It + is printed in case of error. + Constraints (pairs of types that must be equals) are accumulated + rather than being enforced straight. Indeed, the result would + otherwise depend on the order in which these constraints are + enforced. + A function enforcing these constraints is returned. That way, type + variables can be bound to their actual values before this function + is called (see Typecore). + Only well-defined abbreviations are expanded (hence the tests + [generic_abbrev ...]). +*) + +let subtypes = TypePairs.create 17 + +let subtype_error env trace = + raise (Subtype (expand_trace env (List.rev trace), [])) + +let rec subtype_rec env trace t1 t2 cstrs = + let t1 = repr t1 in + let t2 = repr t2 in + if t1 == t2 then cstrs else + + begin try + TypePairs.find subtypes (t1, t2); + cstrs + with Not_found -> + TypePairs.add subtypes (t1, t2) (); + match (t1.desc, t2.desc) with + (Tvar _, _) | (_, Tvar _) -> + (trace, t1, t2, !univar_pairs)::cstrs + | (Tarrow(l1, t1, u1, _), Tarrow(l2, t2, u2, _)) when l1 = l2 + || !Clflags.classic && not (is_optional l1 || is_optional l2) -> + let cstrs = subtype_rec env ((t2, t1)::trace) t2 t1 cstrs in + subtype_rec env ((u1, u2)::trace) u1 u2 cstrs + | (Ttuple tl1, Ttuple tl2) -> + subtype_list env trace tl1 tl2 cstrs + | (Tconstr(p1, [], _), Tconstr(p2, [], _)) when Path.same p1 p2 -> + cstrs + | (Tconstr(p1, _tl1, _abbrev1), _) + when generic_abbrev env p1 && safe_abbrev env t1 -> + subtype_rec env trace (expand_abbrev env t1) t2 cstrs + | (_, Tconstr(p2, _tl2, _abbrev2)) + when generic_abbrev env p2 && safe_abbrev env t2 -> + subtype_rec env trace t1 (expand_abbrev env t2) cstrs + | (Tconstr(p1, tl1, _), Tconstr(p2, tl2, _)) when Path.same p1 p2 -> + begin try + let decl = Env.find_type p1 env in + List.fold_left2 + (fun cstrs v (t1, t2) -> + let (co, cn) = Variance.get_upper v in + if co then + if cn then + (trace, newty2 t1.level (Ttuple[t1]), + newty2 t2.level (Ttuple[t2]), !univar_pairs) :: cstrs + else subtype_rec env ((t1, t2)::trace) t1 t2 cstrs + else + if cn then subtype_rec env ((t2, t1)::trace) t2 t1 cstrs + else cstrs) + cstrs decl.type_variance (List.combine tl1 tl2) + with Not_found -> + (trace, t1, t2, !univar_pairs)::cstrs + end + | (Tconstr(p1, _, _), _) when generic_private_abbrev env p1 -> + subtype_rec env trace (expand_abbrev_opt env t1) t2 cstrs +(* | (_, Tconstr(p2, _, _)) when generic_private_abbrev false env p2 -> + subtype_rec env trace t1 (expand_abbrev_opt env t2) cstrs *) + | (Tobject (f1, _), Tobject (f2, _)) + when is_Tvar (object_row f1) && is_Tvar (object_row f2) -> + (* Same row variable implies same object. *) + (trace, t1, t2, !univar_pairs)::cstrs + | (Tobject (f1, _), Tobject (f2, _)) -> + subtype_fields env trace f1 f2 cstrs + | (Tvariant row1, Tvariant row2) -> + begin try + subtype_row env trace row1 row2 cstrs + with Exit -> + (trace, t1, t2, !univar_pairs)::cstrs + end + | (Tpoly (u1, []), Tpoly (u2, [])) -> + subtype_rec env trace u1 u2 cstrs + | (Tpoly (u1, tl1), Tpoly (u2, [])) -> + let _, u1' = instance_poly false tl1 u1 in + subtype_rec env trace u1' u2 cstrs + | (Tpoly (u1, tl1), Tpoly (u2,tl2)) -> + begin try + enter_poly env univar_pairs u1 tl1 u2 tl2 + (fun t1 t2 -> subtype_rec env trace t1 t2 cstrs) + with Unify _ -> + (trace, t1, t2, !univar_pairs)::cstrs + end + | (Tpackage (p1, nl1, tl1), Tpackage (p2, nl2, tl2)) -> + begin try + let ntl1 = complete_type_list env nl2 t1.level (Mty_ident p1) nl1 tl1 + and ntl2 = complete_type_list env nl1 t2.level (Mty_ident p2) nl2 tl2 + ~allow_absent:true in + let cstrs' = + List.map + (fun (n2,t2) -> (trace, List.assoc n2 ntl1, t2, !univar_pairs)) + ntl2 + in + if eq_package_path env p1 p2 then cstrs' @ cstrs + else begin + (* need to check module subtyping *) + let snap = Btype.snapshot () in + try + List.iter (fun (_, t1, t2, _) -> unify env t1 t2) cstrs'; + if !package_subtype env p1 nl1 tl1 p2 nl2 tl2 + then (Btype.backtrack snap; cstrs' @ cstrs) + else raise (Unify []) + with Unify _ -> + Btype.backtrack snap; raise Not_found + end + with Not_found -> + (trace, t1, t2, !univar_pairs)::cstrs + end + | (_, _) -> + (trace, t1, t2, !univar_pairs)::cstrs + end + +and subtype_list env trace tl1 tl2 cstrs = + if List.length tl1 <> List.length tl2 then + subtype_error env trace; + List.fold_left2 + (fun cstrs t1 t2 -> subtype_rec env ((t1, t2)::trace) t1 t2 cstrs) + cstrs tl1 tl2 + +and subtype_fields env trace ty1 ty2 cstrs = + (* Assume that either rest1 or rest2 is not Tvar *) + let (fields1, rest1) = flatten_fields ty1 in + let (fields2, rest2) = flatten_fields ty2 in + let (pairs, miss1, miss2) = associate_fields fields1 fields2 in + let cstrs = + if rest2.desc = Tnil then cstrs else + if miss1 = [] then + subtype_rec env ((rest1, rest2)::trace) rest1 rest2 cstrs + else + (trace, build_fields (repr ty1).level miss1 rest1, rest2, + !univar_pairs) :: cstrs + in + let cstrs = + if miss2 = [] then cstrs else + (trace, rest1, build_fields (repr ty2).level miss2 (newvar ()), + !univar_pairs) :: cstrs + in + List.fold_left + (fun cstrs (_, _k1, t1, _k2, t2) -> + (* These fields are always present *) + subtype_rec env ((t1, t2)::trace) t1 t2 cstrs) + cstrs pairs + +and subtype_row env trace row1 row2 cstrs = + let row1 = row_repr row1 and row2 = row_repr row2 in + let r1, r2, pairs = + merge_row_fields row1.row_fields row2.row_fields in + let more1 = repr row1.row_more + and more2 = repr row2.row_more in + match more1.desc, more2.desc with + Tconstr(p1,_,_), Tconstr(p2,_,_) when Path.same p1 p2 -> + subtype_rec env ((more1,more2)::trace) more1 more2 cstrs + | (Tvar _|Tconstr _|Tnil), (Tvar _|Tconstr _|Tnil) + when row1.row_closed && r1 = [] -> + List.fold_left + (fun cstrs (_,f1,f2) -> + match row_field_repr f1, row_field_repr f2 with + (Rpresent None|Reither(true,_,_,_)), Rpresent None -> + cstrs + | Rpresent(Some t1), Rpresent(Some t2) -> + subtype_rec env ((t1, t2)::trace) t1 t2 cstrs + | Reither(false, t1::_, _, _), Rpresent(Some t2) -> + subtype_rec env ((t1, t2)::trace) t1 t2 cstrs + | Rabsent, _ -> cstrs + | _ -> raise Exit) + cstrs pairs + | Tunivar _, Tunivar _ + when row1.row_closed = row2.row_closed && r1 = [] && r2 = [] -> + let cstrs = + subtype_rec env ((more1,more2)::trace) more1 more2 cstrs in + List.fold_left + (fun cstrs (_,f1,f2) -> + match row_field_repr f1, row_field_repr f2 with + Rpresent None, Rpresent None + | Reither(true,[],_,_), Reither(true,[],_,_) + | Rabsent, Rabsent -> + cstrs + | Rpresent(Some t1), Rpresent(Some t2) + | Reither(false,[t1],_,_), Reither(false,[t2],_,_) -> + subtype_rec env ((t1, t2)::trace) t1 t2 cstrs + | _ -> raise Exit) + cstrs pairs + | _ -> + raise Exit + +let subtype env ty1 ty2 = + TypePairs.clear subtypes; + univar_pairs := []; + (* Build constraint set. *) + let cstrs = subtype_rec env [(ty1, ty2)] ty1 ty2 [] in + TypePairs.clear subtypes; + (* Enforce constraints. *) + function () -> + List.iter + (function (trace0, t1, t2, pairs) -> + try unify_pairs (ref env) t1 t2 pairs with Unify trace -> + raise (Subtype (expand_trace env (List.rev trace0), + List.tl (List.tl trace)))) + (List.rev cstrs) + + (*******************) + (* Miscellaneous *) + (*******************) + +(* Utility for printing. The resulting type is not used in computation. *) +let rec unalias_object ty = + let ty = repr ty in + match ty.desc with + Tfield (s, k, t1, t2) -> + newty2 ty.level (Tfield (s, k, t1, unalias_object t2)) + | Tvar _ | Tnil -> + newty2 ty.level ty.desc + | Tunivar _ -> + ty + | Tconstr _ -> + newvar2 ty.level + | _ -> + assert false + +let unalias ty = + let ty = repr ty in + match ty.desc with + Tvar _ | Tunivar _ -> + ty + | Tvariant row -> + let row = row_repr row in + let more = row.row_more in + newty2 ty.level + (Tvariant {row with row_more = newty2 more.level more.desc}) + | Tobject (ty, nm) -> + newty2 ty.level (Tobject (unalias_object ty, nm)) + | _ -> + newty2 ty.level ty.desc + +(* Return the arity (as for curried functions) of the given type. *) +let rec arity ty = + match (repr ty).desc with + Tarrow(_, _t1, t2, _) -> 1 + arity t2 + | _ -> 0 + +(* Check whether an abbreviation expands to itself. *) +let cyclic_abbrev env id ty = + let rec check_cycle seen ty = + let ty = repr ty in + match ty.desc with + Tconstr (p, _tl, _abbrev) -> + p = Path.Pident id || List.memq ty seen || + begin try + check_cycle (ty :: seen) (expand_abbrev_opt env ty) + with + Cannot_expand -> false + | Unify _ -> true + end + | _ -> + false + in check_cycle [] ty + +(* Check for non-generalizable type variables *) +exception Non_closed0 +let visited = ref TypeSet.empty + +let rec closed_schema_rec env ty = + let ty = repr ty in + if TypeSet.mem ty !visited then () else begin + visited := TypeSet.add ty !visited; + match ty.desc with + Tvar _ when ty.level <> generic_level -> + raise Non_closed0 + | Tconstr _ -> + let old = !visited in + begin try iter_type_expr (closed_schema_rec env) ty + with Non_closed0 -> try + visited := old; + closed_schema_rec env (try_expand_head try_expand_safe env ty) + with Cannot_expand -> + raise Non_closed0 + end + | Tfield(_, kind, t1, t2) -> + if field_kind_repr kind = Fpresent then + closed_schema_rec env t1; + closed_schema_rec env t2 + | Tvariant row -> + let row = row_repr row in + iter_row (closed_schema_rec env) row; + if not (static_row row) then closed_schema_rec env row.row_more + | _ -> + iter_type_expr (closed_schema_rec env) ty + end + +(* Return whether all variables of type [ty] are generic. *) +let closed_schema env ty = + visited := TypeSet.empty; + try + closed_schema_rec env ty; + visited := TypeSet.empty; + true + with Non_closed0 -> + visited := TypeSet.empty; + false + +(* Normalize a type before printing, saving... *) +(* Cannot use mark_type because deep_occur uses it too *) +let rec normalize_type_rec env visited ty = + let ty = repr ty in + if not (TypeSet.mem ty !visited) then begin + visited := TypeSet.add ty !visited; + let tm = row_of_type ty in + begin if not (is_Tconstr ty) && is_constr_row ~allow_ident:false tm then + match tm.desc with (* PR#7348 *) + Tconstr (Path.Pdot(m,i,pos), tl, _abbrev) -> + let i' = String.sub i 0 (String.length i - 4) in + log_type ty; + ty.desc <- Tconstr(Path.Pdot(m,i',pos), tl, ref Mnil) + | _ -> assert false + else match ty.desc with + | Tvariant row -> + let row = row_repr row in + let fields = List.map + (fun (l,f0) -> + let f = row_field_repr f0 in l, + match f with Reither(b, ty::(_::_ as tyl), m, e) -> + let tyl' = + List.fold_left + (fun tyl ty -> + if List.exists (fun ty' -> equal env false [ty] [ty']) tyl + then tyl else ty::tyl) + [ty] tyl + in + if f != f0 || List.length tyl' < List.length tyl then + Reither(b, List.rev tyl', m, e) + else f + | _ -> f) + row.row_fields in + let fields = + List.sort (fun (p,_) (q,_) -> compare p q) + (List.filter (fun (_,fi) -> fi <> Rabsent) fields) in + log_type ty; + ty.desc <- Tvariant {row with row_fields = fields} + | Tobject (fi, nm) -> + begin match !nm with + | None -> () + | Some (n, v :: l) -> + if deep_occur ty (newgenty (Ttuple l)) then + (* The abbreviation may be hiding something, so remove it *) + set_name nm None + else let v' = repr v in + begin match v'.desc with + | Tvar _ | Tunivar _ -> + if v' != v then set_name nm (Some (n, v' :: l)) + | Tnil -> + log_type ty; ty.desc <- Tconstr (n, l, ref Mnil) + | _ -> set_name nm None + end + | _ -> + fatal_error "Ctype.normalize_type_rec" + end; + let fi = repr fi in + if fi.level < lowest_level then () else + let fields, row = flatten_fields fi in + let fi' = build_fields fi.level fields row in + log_type ty; fi.desc <- fi'.desc + | _ -> () + end; + iter_type_expr (normalize_type_rec env visited) ty + end + +let normalize_type env ty = + normalize_type_rec env (ref TypeSet.empty) ty + + + (*************************) + (* Remove dependencies *) + (*************************) + + +(* + Variables are left unchanged. Other type nodes are duplicated, with + levels set to generic level. + We cannot use Tsubst here, because unification may be called by + expand_abbrev. +*) + +let nondep_hash = TypeHash.create 47 +let nondep_variants = TypeHash.create 17 +let clear_hash () = + TypeHash.clear nondep_hash; TypeHash.clear nondep_variants + +let rec nondep_type_rec env id ty = + match ty.desc with + Tvar _ | Tunivar _ -> ty + | Tlink ty -> nondep_type_rec env id ty + | _ -> try TypeHash.find nondep_hash ty + with Not_found -> + let ty' = newgenvar () in (* Stub *) + TypeHash.add nondep_hash ty ty'; + ty'.desc <- + begin match ty.desc with + | Tconstr(p, tl, _abbrev) -> + if Path.isfree id p then + begin try + Tlink (nondep_type_rec env id + (expand_abbrev env (newty2 ty.level ty.desc))) + (* + The [Tlink] is important. The expanded type may be a + variable, or may not be completely copied yet + (recursive type), so one cannot just take its + description. + *) + with Cannot_expand | Unify _ -> + raise Not_found + end + else + Tconstr(p, List.map (nondep_type_rec env id) tl, ref Mnil) + | Tpackage(p, nl, tl) when Path.isfree id p -> + let p' = normalize_package_path env p in + if Path.isfree id p' then raise Not_found; + Tpackage (p', nl, List.map (nondep_type_rec env id) tl) + | Tobject (t1, name) -> + Tobject (nondep_type_rec env id t1, + ref (match !name with + None -> None + | Some (p, tl) -> + if Path.isfree id p then None + else Some (p, List.map (nondep_type_rec env id) tl))) + | Tvariant row -> + let row = row_repr row in + let more = repr row.row_more in + (* We must keep sharing according to the row variable *) + begin try + let ty2 = TypeHash.find nondep_variants more in + (* This variant type has been already copied *) + TypeHash.add nondep_hash ty ty2; + Tlink ty2 + with Not_found -> + (* Register new type first for recursion *) + TypeHash.add nondep_variants more ty'; + let static = static_row row in + let more' = if static then newgenty Tnil else more in + (* Return a new copy *) + let row = + copy_row (nondep_type_rec env id) true row true more' in + match row.row_name with + Some (p, _tl) when Path.isfree id p -> + Tvariant {row with row_name = None} + | _ -> Tvariant row + end + | _ -> copy_type_desc (nondep_type_rec env id) ty.desc + end; + ty' + +let nondep_type env id ty = + try + let ty' = nondep_type_rec env id ty in + clear_hash (); + ty' + with Not_found -> + clear_hash (); + raise Not_found + +let () = nondep_type' := nondep_type + +let unroll_abbrev id tl ty = + let ty = repr ty and path = Path.Pident id in + if is_Tvar ty || (List.exists (deep_occur ty) tl) + || is_object_type path then + ty + else + let ty' = newty2 ty.level ty.desc in + link_type ty (newty2 ty.level (Tconstr (path, tl, ref Mnil))); + ty' + +(* Preserve sharing inside type declarations. *) +let nondep_type_decl env mid id is_covariant decl = + try + let params = List.map (nondep_type_rec env mid) decl.type_params in + let tk = + try map_kind (nondep_type_rec env mid) decl.type_kind + with Not_found when is_covariant -> Type_abstract + and tm = + try match decl.type_manifest with + None -> None + | Some ty -> + Some (unroll_abbrev id params (nondep_type_rec env mid ty)) + with Not_found when is_covariant -> + None + in + clear_hash (); + let priv = + match tm with + | Some ty when Btype.has_constr_row ty -> Private + | _ -> decl.type_private + in + { type_params = params; + type_arity = decl.type_arity; + type_kind = tk; + type_manifest = tm; + type_private = priv; + type_variance = decl.type_variance; + type_newtype_level = None; + type_loc = decl.type_loc; + type_attributes = decl.type_attributes; + type_immediate = decl.type_immediate; + type_unboxed = decl.type_unboxed; + } + with Not_found -> + clear_hash (); + raise Not_found + +(* Preserve sharing inside extension constructors. *) +let nondep_extension_constructor env mid ext = + try + let type_path, type_params = + if Path.isfree mid ext.ext_type_path then + begin + let ty = + newgenty (Tconstr(ext.ext_type_path, ext.ext_type_params, ref Mnil)) + in + let ty' = nondep_type_rec env mid ty in + match (repr ty').desc with + Tconstr(p, tl, _) -> p, tl + | _ -> raise Not_found + end + else + let type_params = + List.map (nondep_type_rec env mid) ext.ext_type_params + in + ext.ext_type_path, type_params + in + let args = map_type_expr_cstr_args (nondep_type_rec env mid) ext.ext_args in + let ret_type = may_map (nondep_type_rec env mid) ext.ext_ret_type in + clear_hash (); + { ext_type_path = type_path; + ext_type_params = type_params; + ext_args = args; + ext_ret_type = ret_type; + ext_private = ext.ext_private; + ext_attributes = ext.ext_attributes; + ext_loc = ext.ext_loc; + } + with Not_found -> + clear_hash (); + raise Not_found + + +(* Preserve sharing inside class types. *) +let nondep_class_signature env id sign = + { csig_self = nondep_type_rec env id sign.csig_self; + csig_vars = + Vars.map (function (m, v, t) -> (m, v, nondep_type_rec env id t)) + sign.csig_vars; + csig_concr = sign.csig_concr; + csig_inher = + List.map (fun (p,tl) -> (p, List.map (nondep_type_rec env id) tl)) + sign.csig_inher } + +let rec nondep_class_type env id = + function + Cty_constr (p, _, cty) when Path.isfree id p -> + nondep_class_type env id cty + | Cty_constr (p, tyl, cty) -> + Cty_constr (p, List.map (nondep_type_rec env id) tyl, + nondep_class_type env id cty) + | Cty_signature sign -> + Cty_signature (nondep_class_signature env id sign) + | Cty_arrow (l, ty, cty) -> + Cty_arrow (l, nondep_type_rec env id ty, nondep_class_type env id cty) + +let nondep_class_declaration env id decl = + assert (not (Path.isfree id decl.cty_path)); + let decl = + { cty_params = List.map (nondep_type_rec env id) decl.cty_params; + cty_variance = decl.cty_variance; + cty_type = nondep_class_type env id decl.cty_type; + cty_path = decl.cty_path; + cty_new = + begin match decl.cty_new with + None -> None + | Some ty -> Some (nondep_type_rec env id ty) + end; + cty_loc = decl.cty_loc; + cty_attributes = decl.cty_attributes; + } + in + clear_hash (); + decl + +let nondep_cltype_declaration env id decl = + assert (not (Path.isfree id decl.clty_path)); + let decl = + { clty_params = List.map (nondep_type_rec env id) decl.clty_params; + clty_variance = decl.clty_variance; + clty_type = nondep_class_type env id decl.clty_type; + clty_path = decl.clty_path; + clty_loc = decl.clty_loc; + clty_attributes = decl.clty_attributes; + } + in + clear_hash (); + decl + +(* collapse conjunctive types in class parameters *) +let rec collapse_conj env visited ty = + let ty = repr ty in + if List.memq ty visited then () else + let visited = ty :: visited in + match ty.desc with + Tvariant row -> + let row = row_repr row in + List.iter + (fun (_l,fi) -> + match row_field_repr fi with + Reither (c, t1::(_::_ as tl), m, e) -> + List.iter (unify env t1) tl; + set_row_field e (Reither (c, [t1], m, ref None)) + | _ -> + ()) + row.row_fields; + iter_row (collapse_conj env visited) row + | _ -> + iter_type_expr (collapse_conj env visited) ty + +let collapse_conj_params env params = + List.iter (collapse_conj env []) params + +let same_constr env t1 t2 = + let t1 = expand_head env t1 in + let t2 = expand_head env t2 in + match t1.desc, t2.desc with + | Tconstr (p1, _, _), Tconstr (p2, _, _) -> Path.same p1 p2 + | _ -> false + +let () = + Env.same_constr := same_constr + +let maybe_pointer_type env typ = + match (repr typ).desc with + | Tconstr(p, _args, _abbrev) -> + begin try + let type_decl = Env.find_type p env in + not type_decl.type_immediate + with Not_found -> true + (* This can happen due to e.g. missing -I options, + causing some .cmi files to be unavailable. + Maybe we should emit a warning. *) + end + | Tvariant row -> + let row = Btype.row_repr row in + (* if all labels are devoid of arguments, not a pointer *) + not row.row_closed + || List.exists + (function + | _, (Rpresent (Some _) | Reither (false, _, _, _)) -> true + | _ -> false) + row.row_fields + | _ -> true diff --git a/res_syntax/compiler-libs-406/ctype.mli b/res_syntax/compiler-libs-406/ctype.mli new file mode 100644 index 0000000000..00daacd53d --- /dev/null +++ b/res_syntax/compiler-libs-406/ctype.mli @@ -0,0 +1,292 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Operations on core types *) + +open Asttypes +open Types + +exception Unify of (type_expr * type_expr) list +exception Tags of label * label +exception Subtype of + (type_expr * type_expr) list * (type_expr * type_expr) list +exception Cannot_expand +exception Cannot_apply +exception Recursive_abbrev +exception Unification_recursive_abbrev of (type_expr * type_expr) list + +val init_def: int -> unit + (* Set the initial variable level *) +val begin_def: unit -> unit + (* Raise the variable level by one at the beginning of a definition. *) +val end_def: unit -> unit + (* Lower the variable level by one at the end of a definition *) +val begin_class_def: unit -> unit +val raise_nongen_level: unit -> unit +val reset_global_level: unit -> unit + (* Reset the global level before typing an expression *) +val increase_global_level: unit -> int +val restore_global_level: int -> unit + (* This pair of functions is only used in Typetexp *) +type levels = + { current_level: int; nongen_level: int; global_level: int; + saved_level: (int * int) list; } +val save_levels: unit -> levels +val set_levels: levels -> unit + +val newty: type_desc -> type_expr +val newvar: ?name:string -> unit -> type_expr +val newvar2: ?name:string -> int -> type_expr + (* Return a fresh variable *) +val new_global_var: ?name:string -> unit -> type_expr + (* Return a fresh variable, bound at toplevel + (as type variables ['a] in type constraints). *) +val newobj: type_expr -> type_expr +val newconstr: Path.t -> type_expr list -> type_expr +val none: type_expr + (* A dummy type expression *) + +val repr: type_expr -> type_expr + (* Return the canonical representative of a type. *) + +val object_fields: type_expr -> type_expr +val flatten_fields: + type_expr -> (string * field_kind * type_expr) list * type_expr + (* Transform a field type into a list of pairs label-type *) + (* The fields are sorted *) +val associate_fields: + (string * field_kind * type_expr) list -> + (string * field_kind * type_expr) list -> + (string * field_kind * type_expr * field_kind * type_expr) list * + (string * field_kind * type_expr) list * + (string * field_kind * type_expr) list +val opened_object: type_expr -> bool +val close_object: type_expr -> unit +val row_variable: type_expr -> type_expr + (* Return the row variable of an open object type *) +val set_object_name: + Ident.t -> type_expr -> type_expr list -> type_expr -> unit +val remove_object_name: type_expr -> unit +val hide_private_methods: type_expr -> unit +val find_cltype_for_path: Env.t -> Path.t -> type_declaration * type_expr +val lid_of_path: ?hash:string -> Path.t -> Longident.t + +val sort_row_fields: (label * row_field) list -> (label * row_field) list +val merge_row_fields: + (label * row_field) list -> (label * row_field) list -> + (label * row_field) list * (label * row_field) list * + (label * row_field * row_field) list +val filter_row_fields: + bool -> (label * row_field) list -> (label * row_field) list + +val generalize: type_expr -> unit + (* Generalize in-place the given type *) +val generalize_expansive: Env.t -> type_expr -> unit + (* Generalize the covariant part of a type, making + contravariant branches non-generalizable *) +val generalize_global: type_expr -> unit + (* Generalize the structure of a type, lowering variables + to !global_level *) +val generalize_structure: type_expr -> unit + (* Same, but variables are only lowered to !current_level *) +val generalize_spine: type_expr -> unit + (* Special function to generalize a method during inference *) +val correct_levels: type_expr -> type_expr + (* Returns a copy with decreasing levels *) +val limited_generalize: type_expr -> type_expr -> unit + (* Only generalize some part of the type + Make the remaining of the type non-generalizable *) + +val instance: ?partial:bool -> Env.t -> type_expr -> type_expr + (* Take an instance of a type scheme *) + (* partial=None -> normal + partial=false -> newvar() for non generic subterms + partial=true -> newty2 ty.level Tvar for non generic subterms *) +val instance_def: type_expr -> type_expr + (* use defaults *) +val generic_instance: Env.t -> type_expr -> type_expr + (* Same as instance, but new nodes at generic_level *) +val instance_list: Env.t -> type_expr list -> type_expr list + (* Take an instance of a list of type schemes *) +val instance_constructor: + ?in_pattern:Env.t ref * int -> + constructor_description -> type_expr list * type_expr + (* Same, for a constructor *) +val instance_parameterized_type: + ?keep_names:bool -> + type_expr list -> type_expr -> type_expr list * type_expr +val instance_parameterized_type_2: + type_expr list -> type_expr list -> type_expr -> + type_expr list * type_expr list * type_expr +val instance_declaration: type_declaration -> type_declaration +val instance_class: + type_expr list -> class_type -> type_expr list * class_type +val instance_poly: + ?keep_names:bool -> + bool -> type_expr list -> type_expr -> type_expr list * type_expr + (* Take an instance of a type scheme containing free univars *) +val instance_label: + bool -> label_description -> type_expr list * type_expr * type_expr + (* Same, for a label *) +val apply: + Env.t -> type_expr list -> type_expr -> type_expr list -> type_expr + (* [apply [p1...pN] t [a1...aN]] match the arguments [ai] to + the parameters [pi] and returns the corresponding instance of + [t]. Exception [Cannot_apply] is raised in case of failure. *) + +val expand_head_once: Env.t -> type_expr -> type_expr +val expand_head: Env.t -> type_expr -> type_expr +val try_expand_once_opt: Env.t -> type_expr -> type_expr +val expand_head_opt: Env.t -> type_expr -> type_expr +(** The compiler's own version of [expand_head] necessary for type-based + optimisations. *) + +val full_expand: Env.t -> type_expr -> type_expr +val extract_concrete_typedecl: + Env.t -> type_expr -> Path.t * Path.t * type_declaration + (* Return the original path of the types, and the first concrete + type declaration found expanding it. + Raise [Not_found] if none appears or not a type constructor. *) + +val enforce_constraints: Env.t -> type_expr -> unit + +val unify: Env.t -> type_expr -> type_expr -> unit + (* Unify the two types given. Raise [Unify] if not possible. *) +val unify_gadt: newtype_level:int -> Env.t ref -> type_expr -> type_expr -> unit + (* Unify the two types given and update the environment with the + local constraints. Raise [Unify] if not possible. *) +val unify_var: Env.t -> type_expr -> type_expr -> unit + (* Same as [unify], but allow free univars when first type + is a variable. *) +val with_passive_variants: ('a -> 'b) -> ('a -> 'b) + (* Call [f] in passive_variants mode, for exhaustiveness check. *) +val filter_arrow: Env.t -> type_expr -> arg_label -> type_expr * type_expr + (* A special case of unification (with l:'a -> 'b). *) +val filter_method: Env.t -> string -> private_flag -> type_expr -> type_expr + (* A special case of unification (with {m : 'a; 'b}). *) +val check_filter_method: Env.t -> string -> private_flag -> type_expr -> unit + (* A special case of unification (with {m : 'a; 'b}), returning unit. *) +val occur_in: Env.t -> type_expr -> type_expr -> bool +val deep_occur: type_expr -> type_expr -> bool +val filter_self_method: + Env.t -> string -> private_flag -> (Ident.t * type_expr) Meths.t ref -> + type_expr -> Ident.t * type_expr +val moregeneral: Env.t -> bool -> type_expr -> type_expr -> bool + (* Check if the first type scheme is more general than the second. *) + +val rigidify: type_expr -> type_expr list + (* "Rigidify" a type and return its type variable *) +val all_distinct_vars: Env.t -> type_expr list -> bool + (* Check those types are all distinct type variables *) +val matches: Env.t -> type_expr -> type_expr -> bool + (* Same as [moregeneral false], implemented using the two above + functions and backtracking. Ignore levels *) + +type class_match_failure = + CM_Virtual_class + | CM_Parameter_arity_mismatch of int * int + | CM_Type_parameter_mismatch of Env.t * (type_expr * type_expr) list + | CM_Class_type_mismatch of Env.t * class_type * class_type + | CM_Parameter_mismatch of Env.t * (type_expr * type_expr) list + | CM_Val_type_mismatch of string * Env.t * (type_expr * type_expr) list + | CM_Meth_type_mismatch of string * Env.t * (type_expr * type_expr) list + | CM_Non_mutable_value of string + | CM_Non_concrete_value of string + | CM_Missing_value of string + | CM_Missing_method of string + | CM_Hide_public of string + | CM_Hide_virtual of string * string + | CM_Public_method of string + | CM_Private_method of string + | CM_Virtual_method of string +val match_class_types: + ?trace:bool -> Env.t -> class_type -> class_type -> class_match_failure list + (* Check if the first class type is more general than the second. *) +val equal: Env.t -> bool -> type_expr list -> type_expr list -> bool + (* [equal env [x1...xn] tau [y1...yn] sigma] + checks whether the parameterized types + [/\x1.../\xn.tau] and [/\y1.../\yn.sigma] are equivalent. *) +val match_class_declarations: + Env.t -> type_expr list -> class_type -> type_expr list -> + class_type -> class_match_failure list + (* Check if the first class type is more general than the second. *) + +val enlarge_type: Env.t -> type_expr -> type_expr * bool + (* Make a type larger, flag is true if some pruning had to be done *) +val subtype: Env.t -> type_expr -> type_expr -> unit -> unit + (* [subtype env t1 t2] checks that [t1] is a subtype of [t2]. + It accumulates the constraints the type variables must + enforce and returns a function that enforces this + constraints. *) + +val nondep_type: Env.t -> Ident.t -> type_expr -> type_expr + (* Return a type equivalent to the given type but without + references to the given module identifier. Raise [Not_found] + if no such type exists. *) +val nondep_type_decl: + Env.t -> Ident.t -> Ident.t -> bool -> type_declaration -> + type_declaration + (* Same for type declarations. *) +val nondep_extension_constructor: + Env.t -> Ident.t -> extension_constructor -> + extension_constructor + (* Same for extension constructor *) +val nondep_class_declaration: + Env.t -> Ident.t -> class_declaration -> class_declaration + (* Same for class declarations. *) +val nondep_cltype_declaration: + Env.t -> Ident.t -> class_type_declaration -> class_type_declaration + (* Same for class type declarations. *) +(*val correct_abbrev: Env.t -> Path.t -> type_expr list -> type_expr -> unit*) +val cyclic_abbrev: Env.t -> Ident.t -> type_expr -> bool +val is_contractive: Env.t -> Path.t -> bool +val normalize_type: Env.t -> type_expr -> unit + +val closed_schema: Env.t -> type_expr -> bool + (* Check whether the given type scheme contains no non-generic + type variables *) + +val free_variables: ?env:Env.t -> type_expr -> type_expr list + (* If env present, then check for incomplete definitions too *) +val closed_type_decl: type_declaration -> type_expr option +val closed_extension_constructor: extension_constructor -> type_expr option +type closed_class_failure = + CC_Method of type_expr * bool * string * type_expr + | CC_Value of type_expr * bool * string * type_expr +val closed_class: + type_expr list -> class_signature -> closed_class_failure option + (* Check whether all type variables are bound *) + +val unalias: type_expr -> type_expr +val signature_of_class_type: class_type -> class_signature +val self_type: class_type -> type_expr +val class_type_arity: class_type -> int +val arity: type_expr -> int + (* Return the arity (as for curried functions) of the given type. *) + +val collapse_conj_params: Env.t -> type_expr list -> unit + (* Collapse conjunctive types in class parameters *) + +val get_current_level: unit -> int +val wrap_trace_gadt_instances: Env.t -> ('a -> 'b) -> 'a -> 'b +val reset_reified_var_counter: unit -> unit + +val maybe_pointer_type : Env.t -> type_expr -> bool + (* True if type is possibly pointer, false if definitely not a pointer *) + +(* Stubs *) +val package_subtype : + (Env.t -> Path.t -> Longident.t list -> type_expr list -> + Path.t -> Longident.t list -> type_expr list -> bool) ref diff --git a/res_syntax/compiler-libs-406/datarepr.ml b/res_syntax/compiler-libs-406/datarepr.ml new file mode 100644 index 0000000000..bce6ff212f --- /dev/null +++ b/res_syntax/compiler-libs-406/datarepr.ml @@ -0,0 +1,250 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Compute constructor and label descriptions from type declarations, + determining their representation. *) + +open Asttypes +open Types +open Btype + +(* Simplified version of Ctype.free_vars *) +let free_vars ?(param=false) ty = + let ret = ref TypeSet.empty in + let rec loop ty = + let ty = repr ty in + if ty.level >= lowest_level then begin + ty.level <- pivot_level - ty.level; + match ty.desc with + | Tvar _ -> + ret := TypeSet.add ty !ret + | Tvariant row -> + let row = row_repr row in + iter_row loop row; + if not (static_row row) then begin + match row.row_more.desc with + | Tvar _ when param -> ret := TypeSet.add ty !ret + | _ -> loop row.row_more + end + (* XXX: What about Tobject ? *) + | _ -> + iter_type_expr loop ty + end + in + loop ty; + unmark_type ty; + !ret + +let newgenconstr path tyl = newgenty (Tconstr (path, tyl, ref Mnil)) + +let constructor_existentials cd_args cd_res = + let tyl = + match cd_args with + | Cstr_tuple l -> l + | Cstr_record l -> List.map (fun l -> l.ld_type) l + in + let existentials = + match cd_res with + | None -> [] + | Some type_ret -> + let arg_vars_set = free_vars (newgenty (Ttuple tyl)) in + let res_vars = free_vars type_ret in + TypeSet.elements (TypeSet.diff arg_vars_set res_vars) + in + (tyl, existentials) + +let constructor_args priv cd_args cd_res path rep = + let tyl, existentials = constructor_existentials cd_args cd_res in + match cd_args with + | Cstr_tuple l -> existentials, l, None + | Cstr_record lbls -> + let arg_vars_set = free_vars ~param:true (newgenty (Ttuple tyl)) in + let type_params = TypeSet.elements arg_vars_set in + let type_unboxed = + match rep with + | Record_unboxed _ -> unboxed_true_default_false + | _ -> unboxed_false_default_false + in + let tdecl = + { + type_params; + type_arity = List.length type_params; + type_kind = Type_record (lbls, rep); + type_private = priv; + type_manifest = None; + type_variance = List.map (fun _ -> Variance.full) type_params; + type_newtype_level = None; + type_loc = Location.none; + type_attributes = []; + type_immediate = false; + type_unboxed; + } + in + existentials, + [ newgenconstr path type_params ], + Some tdecl + +let constructor_descrs ty_path decl cstrs = + let ty_res = newgenconstr ty_path decl.type_params in + let num_consts = ref 0 and num_nonconsts = ref 0 and num_normal = ref 0 in + List.iter + (fun {cd_args; cd_res; _} -> + if cd_args = Cstr_tuple [] then incr num_consts else incr num_nonconsts; + if cd_res = None then incr num_normal) + cstrs; + let rec describe_constructors idx_const idx_nonconst = function + [] -> [] + | {cd_id; cd_args; cd_res; cd_loc; cd_attributes} :: rem -> + let ty_res = + match cd_res with + | Some ty_res' -> ty_res' + | None -> ty_res + in + let (tag, descr_rem) = + match cd_args with + | _ when decl.type_unboxed.unboxed -> + assert (rem = []); + (Cstr_unboxed, []) + | Cstr_tuple [] -> (Cstr_constant idx_const, + describe_constructors (idx_const+1) idx_nonconst rem) + | _ -> (Cstr_block idx_nonconst, + describe_constructors idx_const (idx_nonconst+1) rem) in + let cstr_name = Ident.name cd_id in + let existentials, cstr_args, cstr_inlined = + let representation = + if decl.type_unboxed.unboxed + then Record_unboxed true + else Record_inlined idx_nonconst + in + constructor_args decl.type_private cd_args cd_res + (Path.Pdot (ty_path, cstr_name, Path.nopos)) representation + in + let cstr = + { cstr_name; + cstr_res = ty_res; + cstr_existentials = existentials; + cstr_args; + cstr_arity = List.length cstr_args; + cstr_tag = tag; + cstr_consts = !num_consts; + cstr_nonconsts = !num_nonconsts; + cstr_normal = !num_normal; + cstr_private = decl.type_private; + cstr_generalized = cd_res <> None; + cstr_loc = cd_loc; + cstr_attributes = cd_attributes; + cstr_inlined; + } in + (cd_id, cstr) :: descr_rem in + describe_constructors 0 0 cstrs + +let extension_descr path_ext ext = + let ty_res = + match ext.ext_ret_type with + Some type_ret -> type_ret + | None -> newgenconstr ext.ext_type_path ext.ext_type_params + in + let existentials, cstr_args, cstr_inlined = + constructor_args ext.ext_private ext.ext_args ext.ext_ret_type + path_ext Record_extension + in + { cstr_name = Path.last path_ext; + cstr_res = ty_res; + cstr_existentials = existentials; + cstr_args; + cstr_arity = List.length cstr_args; + cstr_tag = Cstr_extension(path_ext, cstr_args = []); + cstr_consts = -1; + cstr_nonconsts = -1; + cstr_private = ext.ext_private; + cstr_normal = -1; + cstr_generalized = ext.ext_ret_type <> None; + cstr_loc = ext.ext_loc; + cstr_attributes = ext.ext_attributes; + cstr_inlined; + } + +let none = {desc = Ttuple []; level = -1; id = -1} + (* Clearly ill-formed type *) +let dummy_label = + { lbl_name = ""; lbl_res = none; lbl_arg = none; lbl_mut = Immutable; + lbl_pos = (-1); lbl_all = [||]; lbl_repres = Record_regular; + lbl_private = Public; + lbl_loc = Location.none; + lbl_attributes = []; + } + +let label_descrs ty_res lbls repres priv = + let all_labels = Array.make (List.length lbls) dummy_label in + let rec describe_labels num = function + [] -> [] + | l :: rest -> + let lbl = + { lbl_name = Ident.name l.ld_id; + lbl_res = ty_res; + lbl_arg = l.ld_type; + lbl_mut = l.ld_mutable; + lbl_pos = num; + lbl_all = all_labels; + lbl_repres = repres; + lbl_private = priv; + lbl_loc = l.ld_loc; + lbl_attributes = l.ld_attributes; + } in + all_labels.(num) <- lbl; + (l.ld_id, lbl) :: describe_labels (num+1) rest in + describe_labels 0 lbls + +exception Constr_not_found + +let rec find_constr tag num_const num_nonconst = function + [] -> + raise Constr_not_found + | {cd_args = Cstr_tuple []; _} as c :: rem -> + if tag = Cstr_constant num_const + then c + else find_constr tag (num_const + 1) num_nonconst rem + | c :: rem -> + if tag = Cstr_block num_nonconst || tag = Cstr_unboxed + then c + else find_constr tag num_const (num_nonconst + 1) rem + +let find_constr_by_tag tag cstrlist = + find_constr tag 0 0 cstrlist + +let constructors_of_type ty_path decl = + match decl.type_kind with + | Type_variant cstrs -> constructor_descrs ty_path decl cstrs + | Type_record _ | Type_abstract | Type_open -> [] + +let labels_of_type ty_path decl = + match decl.type_kind with + | Type_record(labels, rep) -> + label_descrs (newgenconstr ty_path decl.type_params) + labels rep decl.type_private + | Type_variant _ | Type_abstract | Type_open -> [] + +(* Set row_name in Env, cf. GPR#1204/1329 *) +let set_row_name decl path = + match decl.type_manifest with + None -> () + | Some ty -> + let ty = repr ty in + match ty.desc with + Tvariant row when static_row row -> + let row = {(row_repr row) with + row_name = Some (path, decl.type_params)} in + ty.desc <- Tvariant row + | _ -> () diff --git a/res_syntax/compiler-libs-406/datarepr.mli b/res_syntax/compiler-libs-406/datarepr.mli new file mode 100644 index 0000000000..30dc1f1f6c --- /dev/null +++ b/res_syntax/compiler-libs-406/datarepr.mli @@ -0,0 +1,48 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Compute constructor and label descriptions from type declarations, + determining their representation. *) + +open Types + +val extension_descr: + Path.t -> extension_constructor -> constructor_description + +val labels_of_type: + Path.t -> type_declaration -> + (Ident.t * label_description) list +val constructors_of_type: + Path.t -> type_declaration -> + (Ident.t * constructor_description) list + + +exception Constr_not_found + +val find_constr_by_tag: + constructor_tag -> constructor_declaration list -> + constructor_declaration + +val constructor_existentials : + constructor_arguments -> type_expr option -> type_expr list * type_expr list +(** Takes [cd_args] and [cd_res] from a [constructor_declaration] and + returns: + - the types of the constructor's arguments + - the existential variables introduced by the constructor + *) + + +(* Set the polymorphic variant row_name field *) +val set_row_name : type_declaration -> Path.t -> unit diff --git a/res_syntax/compiler-libs-406/delayed_checks.ml b/res_syntax/compiler-libs-406/delayed_checks.ml new file mode 100644 index 0000000000..029831a745 --- /dev/null +++ b/res_syntax/compiler-libs-406/delayed_checks.ml @@ -0,0 +1,15 @@ +let delayed_checks = ref [] +let reset_delayed_checks () = delayed_checks := [] +let add_delayed_check f = + delayed_checks := (f, Warnings.backup ()) :: !delayed_checks + +let force_delayed_checks () = + (* checks may change type levels *) + let snap = Btype.snapshot () in + let w_old = Warnings.backup () in + List.iter + (fun (f, w) -> Warnings.restore w; f ()) + (List.rev !delayed_checks); + Warnings.restore w_old; + reset_delayed_checks (); + Btype.backtrack snap \ No newline at end of file diff --git a/res_syntax/compiler-libs-406/delayed_checks.mli b/res_syntax/compiler-libs-406/delayed_checks.mli new file mode 100644 index 0000000000..df0a34692b --- /dev/null +++ b/res_syntax/compiler-libs-406/delayed_checks.mli @@ -0,0 +1,6 @@ + + + +val reset_delayed_checks : unit -> unit +val add_delayed_check : (unit -> unit) -> unit +val force_delayed_checks : unit -> unit diff --git a/res_syntax/compiler-libs-406/digest.ml b/res_syntax/compiler-libs-406/digest.ml new file mode 100644 index 0000000000..408194b501 --- /dev/null +++ b/res_syntax/compiler-libs-406/digest.ml @@ -0,0 +1,76 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Message digest (MD5) *) + +type t = string + +let compare = String.compare +let equal = String.equal + +external unsafe_string: string -> int -> int -> t = "caml_md5_string" +external channel: in_channel -> int -> t = "caml_md5_chan" + +let string str = + unsafe_string str 0 (String.length str) + +let bytes b = string (Bytes.unsafe_to_string b) + +let substring str ofs len = + if ofs < 0 || len < 0 || ofs > String.length str - len + then invalid_arg "Digest.substring" + else unsafe_string str ofs len + +let subbytes b ofs len = substring (Bytes.unsafe_to_string b) ofs len + +let file filename = + let ic = open_in_bin filename in + match channel ic (-1) with + | d -> close_in ic; d + | exception e -> close_in ic; raise e + +let output chan digest = + output_string chan digest + +let input chan = really_input_string chan 16 + +let char_hex n = + Char.unsafe_chr (n + if n < 10 then Char.code '0' else (Char.code 'a' - 10)) + +let to_hex d = + if String.length d <> 16 then invalid_arg "Digest.to_hex"; + let result = Bytes.create 32 in + for i = 0 to 15 do + let x = Char.code d.[i] in + Bytes.unsafe_set result (i*2) (char_hex (x lsr 4)); + Bytes.unsafe_set result (i*2+1) (char_hex (x land 0x0f)); + done; + Bytes.unsafe_to_string result + +let from_hex s = + if String.length s <> 32 then invalid_arg "Digest.from_hex"; + let digit c = + match c with + | '0'..'9' -> Char.code c - Char.code '0' + | 'A'..'F' -> Char.code c - Char.code 'A' + 10 + | 'a'..'f' -> Char.code c - Char.code 'a' + 10 + | _ -> raise (Invalid_argument "Digest.from_hex") + in + let byte i = digit s.[i] lsl 4 + digit s.[i+1] in + let result = Bytes.create 16 in + for i = 0 to 15 do + Bytes.set result i (Char.chr (byte (2 * i))); + done; + Bytes.unsafe_to_string result diff --git a/res_syntax/compiler-libs-406/digest.mli b/res_syntax/compiler-libs-406/digest.mli new file mode 100644 index 0000000000..9f9fd53426 --- /dev/null +++ b/res_syntax/compiler-libs-406/digest.mli @@ -0,0 +1,84 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** MD5 message digest. + + This module provides functions to compute 128-bit 'digests' of + arbitrary-length strings or files. The digests are of cryptographic + quality: it is very hard, given a digest, to forge a string having + that digest. The algorithm used is MD5. This module should not be + used for secure and sensitive cryptographic applications. For these + kind of applications more recent and stronger cryptographic + primitives should be used instead. +*) + +type t = string +(** The type of digests: 16-character strings. *) + +val compare : t -> t -> int +(** The comparison function for 16-character digest, with the same + specification as {!Stdlib.compare} and the implementation + shared with {!String.compare}. Along with the type [t], this + function [compare] allows the module [Digest] to be passed as + argument to the functors {!Set.Make} and {!Map.Make}. + @since 4.00.0 *) + +val equal : t -> t -> bool +(** The equal function for 16-character digest. + @since 4.03.0 *) + +val string : string -> t +(** Return the digest of the given string. *) + +val bytes : bytes -> t +(** Return the digest of the given byte sequence. + @since 4.02.0 *) + +val substring : string -> int -> int -> t +(** [Digest.substring s ofs len] returns the digest of the substring + of [s] starting at index [ofs] and containing [len] characters. *) + +val subbytes : bytes -> int -> int -> t +(** [Digest.subbytes s ofs len] returns the digest of the subsequence + of [s] starting at index [ofs] and containing [len] bytes. + @since 4.02.0 *) + +external channel : in_channel -> int -> t = "caml_md5_chan" +(** If [len] is nonnegative, [Digest.channel ic len] reads [len] + characters from channel [ic] and returns their digest, or raises + [End_of_file] if end-of-file is reached before [len] characters + are read. If [len] is negative, [Digest.channel ic len] reads + all characters from [ic] until end-of-file is reached and return + their digest. *) + +val file : string -> t +(** Return the digest of the file whose name is given. *) + +val output : out_channel -> t -> unit +(** Write a digest on the given output channel. *) + +val input : in_channel -> t +(** Read a digest from the given input channel. *) + +val to_hex : t -> string +(** Return the printable hexadecimal representation of the given digest. + Raise [Invalid_argument] if the argument is not exactly 16 bytes. + *) + +val from_hex : string -> t +(** Convert a hexadecimal representation back into the corresponding digest. + Raise [Invalid_argument] if the argument is not exactly 32 hexadecimal + characters. + @since 4.00.0 *) diff --git a/res_syntax/compiler-libs-406/docstrings.ml b/res_syntax/compiler-libs-406/docstrings.ml new file mode 100644 index 0000000000..5de6d4d4fa --- /dev/null +++ b/res_syntax/compiler-libs-406/docstrings.ml @@ -0,0 +1,343 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Leo White *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Location + +(* Docstrings *) + +(* A docstring is "attached" if it has been inserted in the AST. This + is used for generating unexpected docstring warnings. *) +type ds_attached = + | Unattached (* Not yet attached anything.*) + | Info (* Attached to a field or constructor. *) + | Docs (* Attached to an item or as floating text. *) + +(* A docstring is "associated" with an item if there are no blank lines between + them. This is used for generating docstring ambiguity warnings. *) +type ds_associated = + | Zero (* Not associated with an item *) + | One (* Associated with one item *) + | Many (* Associated with multiple items (ambiguity) *) + +type docstring = + { ds_body: string; + ds_loc: Location.t; + mutable ds_attached: ds_attached; + mutable ds_associated: ds_associated; } + +(* List of docstrings *) + +let docstrings : docstring list ref = ref [] + +(* Warn for unused and ambiguous docstrings *) + +let warn_bad_docstrings () = + if Warnings.is_active (Warnings.Bad_docstring true) then begin + List.iter + (fun ds -> + match ds.ds_attached with + | Info -> () + | Unattached -> + prerr_warning ds.ds_loc (Warnings.Bad_docstring true) + | Docs -> + match ds.ds_associated with + | Zero | One -> () + | Many -> + prerr_warning ds.ds_loc (Warnings.Bad_docstring false)) + (List.rev !docstrings) +end + +(* Docstring constructors and destructors *) + +let docstring body loc = + let ds = + { ds_body = body; + ds_loc = loc; + ds_attached = Unattached; + ds_associated = Zero; } + in + ds + +let register ds = + docstrings := ds :: !docstrings + +let docstring_body ds = ds.ds_body + +let docstring_loc ds = ds.ds_loc + +(* Docstrings attached to items *) + +type docs = + { docs_pre: docstring option; + docs_post: docstring option; } + +let empty_docs = { docs_pre = None; docs_post = None } + +let doc_loc = {txt = "ocaml.doc"; loc = Location.none} + +let docs_attr ds = + let open Parsetree in + let exp = + { pexp_desc = Pexp_constant (Pconst_string(ds.ds_body, None)); + pexp_loc = ds.ds_loc; + pexp_attributes = []; } + in + let item = + { pstr_desc = Pstr_eval (exp, []); pstr_loc = exp.pexp_loc } + in + (doc_loc, PStr [item]) + +let add_docs_attrs docs attrs = + let attrs = + match docs.docs_pre with + | None | Some { ds_body=""; _ } -> attrs + | Some ds -> docs_attr ds :: attrs + in + let attrs = + match docs.docs_post with + | None | Some { ds_body=""; _ } -> attrs + | Some ds -> attrs @ [docs_attr ds] + in + attrs + +(* Docstrings attached to constructors or fields *) + +type info = docstring option + +let empty_info = None + +let info_attr = docs_attr + +let add_info_attrs info attrs = + match info with + | None | Some {ds_body=""; _} -> attrs + | Some ds -> attrs @ [info_attr ds] + +(* Docstrings not attached to a specific item *) + +type text = docstring list + +let empty_text = [] +let empty_text_lazy = lazy [] + +let text_loc = {txt = "ocaml.text"; loc = Location.none} + +let text_attr ds = + let open Parsetree in + let exp = + { pexp_desc = Pexp_constant (Pconst_string(ds.ds_body, None)); + pexp_loc = ds.ds_loc; + pexp_attributes = []; } + in + let item = + { pstr_desc = Pstr_eval (exp, []); pstr_loc = exp.pexp_loc } + in + (text_loc, PStr [item]) + +let add_text_attrs dsl attrs = + let fdsl = List.filter (function {ds_body=""} -> false| _ ->true) dsl in + (List.map text_attr fdsl) @ attrs + +(* Find the first non-info docstring in a list, attach it and return it *) +let get_docstring ~info dsl = + let rec loop = function + | [] -> None + | {ds_attached = Info; _} :: rest -> loop rest + | ds :: _ -> + ds.ds_attached <- if info then Info else Docs; + Some ds + in + loop dsl + +(* Find all the non-info docstrings in a list, attach them and return them *) +let get_docstrings dsl = + let rec loop acc = function + | [] -> List.rev acc + | {ds_attached = Info; _} :: rest -> loop acc rest + | ds :: rest -> + ds.ds_attached <- Docs; + loop (ds :: acc) rest + in + loop [] dsl + +(* "Associate" all the docstrings in a list *) +let associate_docstrings dsl = + List.iter + (fun ds -> + match ds.ds_associated with + | Zero -> ds.ds_associated <- One + | (One | Many) -> ds.ds_associated <- Many) + dsl + +(* Map from positions to pre docstrings *) + +let pre_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_pre_docstrings pos dsl = + if dsl <> [] then Hashtbl.add pre_table pos dsl + +let get_pre_docs pos = + try + let dsl = Hashtbl.find pre_table pos in + associate_docstrings dsl; + get_docstring ~info:false dsl + with Not_found -> None + +let mark_pre_docs pos = + try + let dsl = Hashtbl.find pre_table pos in + associate_docstrings dsl + with Not_found -> () + +(* Map from positions to post docstrings *) + +let post_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_post_docstrings pos dsl = + if dsl <> [] then Hashtbl.add post_table pos dsl + +let get_post_docs pos = + try + let dsl = Hashtbl.find post_table pos in + associate_docstrings dsl; + get_docstring ~info:false dsl + with Not_found -> None + +let mark_post_docs pos = + try + let dsl = Hashtbl.find post_table pos in + associate_docstrings dsl + with Not_found -> () + +let get_info pos = + try + let dsl = Hashtbl.find post_table pos in + get_docstring ~info:true dsl + with Not_found -> None + +(* Map from positions to floating docstrings *) + +let floating_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_floating_docstrings pos dsl = + if dsl <> [] then Hashtbl.add floating_table pos dsl + +let get_text pos = + try + let dsl = Hashtbl.find floating_table pos in + get_docstrings dsl + with Not_found -> [] + +(* Maps from positions to extra docstrings *) + +let pre_extra_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_pre_extra_docstrings pos dsl = + if dsl <> [] then Hashtbl.add pre_extra_table pos dsl + +let get_pre_extra_text pos = + try + let dsl = Hashtbl.find pre_extra_table pos in + get_docstrings dsl + with Not_found -> [] + +let post_extra_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_post_extra_docstrings pos dsl = + if dsl <> [] then Hashtbl.add post_extra_table pos dsl + +let get_post_extra_text pos = + try + let dsl = Hashtbl.find post_extra_table pos in + get_docstrings dsl + with Not_found -> [] + +(* Docstrings from parser actions *) + +let symbol_docs () = + { docs_pre = get_pre_docs (Parsing.symbol_start_pos ()); + docs_post = get_post_docs (Parsing.symbol_end_pos ()); } + +let symbol_docs_lazy () = + let p1 = Parsing.symbol_start_pos () in + let p2 = Parsing.symbol_end_pos () in + lazy { docs_pre = get_pre_docs p1; + docs_post = get_post_docs p2; } + +let rhs_docs pos1 pos2 = + { docs_pre = get_pre_docs (Parsing.rhs_start_pos pos1); + docs_post = get_post_docs (Parsing.rhs_end_pos pos2); } + +let rhs_docs_lazy pos1 pos2 = + let p1 = Parsing.rhs_start_pos pos1 in + let p2 = Parsing.rhs_end_pos pos2 in + lazy { docs_pre = get_pre_docs p1; + docs_post = get_post_docs p2; } + +let mark_symbol_docs () = + mark_pre_docs (Parsing.symbol_start_pos ()); + mark_post_docs (Parsing.symbol_end_pos ()) + +let mark_rhs_docs pos1 pos2 = + mark_pre_docs (Parsing.rhs_start_pos pos1); + mark_post_docs (Parsing.rhs_end_pos pos2) + +let symbol_info () = + get_info (Parsing.symbol_end_pos ()) + +let rhs_info pos = + get_info (Parsing.rhs_end_pos pos) + +let symbol_text () = + get_text (Parsing.symbol_start_pos ()) + +let symbol_text_lazy () = + let pos = Parsing.symbol_start_pos () in + lazy (get_text pos) + +let rhs_text pos = + get_text (Parsing.rhs_start_pos pos) + +let rhs_text_lazy pos = + let pos = Parsing.rhs_start_pos pos in + lazy (get_text pos) + +let symbol_pre_extra_text () = + get_pre_extra_text (Parsing.symbol_start_pos ()) + +let symbol_post_extra_text () = + get_post_extra_text (Parsing.symbol_end_pos ()) + +let rhs_pre_extra_text pos = + get_pre_extra_text (Parsing.rhs_start_pos pos) + +let rhs_post_extra_text pos = + get_post_extra_text (Parsing.rhs_end_pos pos) + + +(* (Re)Initialise all comment state *) + +let init () = + docstrings := []; + Hashtbl.reset pre_table; + Hashtbl.reset post_table; + Hashtbl.reset floating_table; + Hashtbl.reset pre_extra_table; + Hashtbl.reset post_extra_table diff --git a/res_syntax/compiler-libs-406/docstrings.mli b/res_syntax/compiler-libs-406/docstrings.mli new file mode 100644 index 0000000000..892a80e278 --- /dev/null +++ b/res_syntax/compiler-libs-406/docstrings.mli @@ -0,0 +1,157 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Leo White *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Documentation comments *) + +(** (Re)Initialise all docstring state *) +val init : unit -> unit + +(** Emit warnings for unattached and ambiguous docstrings *) +val warn_bad_docstrings : unit -> unit + +(** {2 Docstrings} *) + +(** Documentation comments *) +type docstring + +(** Create a docstring *) +val docstring : string -> Location.t -> docstring + +(** Register a docstring *) +val register : docstring -> unit + +(** Get the text of a docstring *) +val docstring_body : docstring -> string + +(** Get the location of a docstring *) +val docstring_loc : docstring -> Location.t + +(** {2 Set functions} + + These functions are used by the lexer to associate docstrings to + the locations of tokens. *) + +(** Docstrings immediately preceding a token *) +val set_pre_docstrings : Lexing.position -> docstring list -> unit + +(** Docstrings immediately following a token *) +val set_post_docstrings : Lexing.position -> docstring list -> unit + +(** Docstrings not immediately adjacent to a token *) +val set_floating_docstrings : Lexing.position -> docstring list -> unit + +(** Docstrings immediately following the token which precedes this one *) +val set_pre_extra_docstrings : Lexing.position -> docstring list -> unit + +(** Docstrings immediately preceding the token which follows this one *) +val set_post_extra_docstrings : Lexing.position -> docstring list -> unit + +(** {2 Items} + + The {!docs} type represents documentation attached to an item. *) + +type docs = + { docs_pre: docstring option; + docs_post: docstring option; } + +val empty_docs : docs + +val docs_attr : docstring -> Parsetree.attribute + +(** Convert item documentation to attributes and add them to an + attribute list *) +val add_docs_attrs : docs -> Parsetree.attributes -> Parsetree.attributes + +(** Fetch the item documentation for the current symbol. This also + marks this documentation (for ambiguity warnings). *) +val symbol_docs : unit -> docs +val symbol_docs_lazy : unit -> docs Lazy.t + +(** Fetch the item documentation for the symbols between two + positions. This also marks this documentation (for ambiguity + warnings). *) +val rhs_docs : int -> int -> docs +val rhs_docs_lazy : int -> int -> docs Lazy.t + +(** Mark the item documentation for the current symbol (for ambiguity + warnings). *) +val mark_symbol_docs : unit -> unit + +(** Mark as associated the item documentation for the symbols between + two positions (for ambiguity warnings) *) +val mark_rhs_docs : int -> int -> unit + +(** {2 Fields and constructors} + + The {!info} type represents documentation attached to a field or + constructor. *) + +type info = docstring option + +val empty_info : info + +val info_attr : docstring -> Parsetree.attribute + +(** Convert field info to attributes and add them to an + attribute list *) +val add_info_attrs : info -> Parsetree.attributes -> Parsetree.attributes + +(** Fetch the field info for the current symbol. *) +val symbol_info : unit -> info + +(** Fetch the field info following the symbol at a given position. *) +val rhs_info : int -> info + +(** {2 Unattached comments} + + The {!text} type represents documentation which is not attached to + anything. *) + +type text = docstring list + +val empty_text : text +val empty_text_lazy : text Lazy.t + +val text_attr : docstring -> Parsetree.attribute + +(** Convert text to attributes and add them to an attribute list *) +val add_text_attrs : text -> Parsetree.attributes -> Parsetree.attributes + +(** Fetch the text preceding the current symbol. *) +val symbol_text : unit -> text +val symbol_text_lazy : unit -> text Lazy.t + +(** Fetch the text preceding the symbol at the given position. *) +val rhs_text : int -> text +val rhs_text_lazy : int -> text Lazy.t + +(** {2 Extra text} + + There may be additional text attached to the delimiters of a block + (e.g. [struct] and [end]). This is fetched by the following + functions, which are applied to the contents of the block rather + than the delimiters. *) + +(** Fetch additional text preceding the current symbol *) +val symbol_pre_extra_text : unit -> text + +(** Fetch additional text following the current symbol *) +val symbol_post_extra_text : unit -> text + +(** Fetch additional text preceding the symbol at the given position *) +val rhs_pre_extra_text : int -> text + +(** Fetch additional text following the symbol at the given position *) +val rhs_post_extra_text : int -> text diff --git a/res_syntax/compiler-libs-406/dune b/res_syntax/compiler-libs-406/dune new file mode 100644 index 0000000000..cd83c554fa --- /dev/null +++ b/res_syntax/compiler-libs-406/dune @@ -0,0 +1,4 @@ +(library + (name compilerlibs406) + (flags -w -8-9) + (modules_without_implementation parsetree asttypes outcometree annot)) diff --git a/res_syntax/compiler-libs-406/env.ml b/res_syntax/compiler-libs-406/env.ml new file mode 100644 index 0000000000..4d51aa9b6d --- /dev/null +++ b/res_syntax/compiler-libs-406/env.ml @@ -0,0 +1,2373 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Environment handling *) + +open Cmi_format +open Config +open Misc +open Asttypes +open Longident +open Path +open Types +open Btype + +let add_delayed_check_forward = ref (fun _ -> assert false) + +let value_declarations : ((string * Location.t), (unit -> unit)) Hashtbl.t = + Hashtbl.create 16 + (* This table is used to usage of value declarations. A declaration is + identified with its name and location. The callback attached to a + declaration is called whenever the value is used explicitly + (lookup_value) or implicitly (inclusion test between signatures, + cf Includemod.value_descriptions). *) + +let type_declarations = Hashtbl.create 16 +let module_declarations = Hashtbl.create 16 + +type constructor_usage = Positive | Pattern | Privatize +type constructor_usages = + { + mutable cu_positive: bool; + mutable cu_pattern: bool; + mutable cu_privatize: bool; + } +let add_constructor_usage cu = function + | Positive -> cu.cu_positive <- true + | Pattern -> cu.cu_pattern <- true + | Privatize -> cu.cu_privatize <- true +let constructor_usages () = + {cu_positive = false; cu_pattern = false; cu_privatize = false} + +let used_constructors : + (string * Location.t * string, (constructor_usage -> unit)) Hashtbl.t + = Hashtbl.create 16 + +let prefixed_sg = Hashtbl.create 113 + +type error = + | Illegal_renaming of string * string * string + | Inconsistent_import of string * string * string + | Need_recursive_types of string * string + | Depend_on_unsafe_string_unit of string * string + | Missing_module of Location.t * Path.t * Path.t + | Illegal_value_name of Location.t * string + +exception Error of error + +let error err = raise (Error err) + +module EnvLazy : sig + type ('a,'b) t + + type log + + val force : ('a -> 'b) -> ('a,'b) t -> 'b + val create : 'a -> ('a,'b) t + val get_arg : ('a,'b) t -> 'a option + + (* [force_logged log f t] is equivalent to [force f t] but if [f] returns [None] then + [t] is recorded in [log]. [backtrack log] will then reset all the recorded [t]s back + to their original state. *) + val log : unit -> log + val force_logged : log -> ('a -> 'b option) -> ('a,'b option) t -> 'b option + val backtrack : log -> unit + +end = struct + + type ('a,'b) t = ('a,'b) eval ref + + and ('a,'b) eval = + | Done of 'b + | Raise of exn + | Thunk of 'a + + type undo = + | Nil + | Cons : ('a, 'b) t * 'a * undo -> undo + + type log = undo ref + + let force f x = + match !x with + | Done x -> x + | Raise e -> raise e + | Thunk e -> + match f e with + | y -> + x := Done y; + y + | exception e -> + x := Raise e; + raise e + + let get_arg x = + match !x with Thunk a -> Some a | _ -> None + + let create x = + ref (Thunk x) + + let log () = + ref Nil + + let force_logged log f x = + match !x with + | Done x -> x + | Raise e -> raise e + | Thunk e -> + match f e with + | None -> + x := Done None; + log := Cons(x, e, !log); + None + | Some _ as y -> + x := Done y; + y + | exception e -> + x := Raise e; + raise e + + let backtrack log = + let rec loop = function + | Nil -> () + | Cons(x, e, rest) -> + x := Thunk e; + loop rest + in + loop !log + +end + +module PathMap = Map.Make(Path) + +type summary = + Env_empty + | Env_value of summary * Ident.t * value_description + | Env_type of summary * Ident.t * type_declaration + | Env_extension of summary * Ident.t * extension_constructor + | Env_module of summary * Ident.t * module_declaration + | Env_modtype of summary * Ident.t * modtype_declaration + | Env_class of summary * Ident.t * class_declaration + | Env_cltype of summary * Ident.t * class_type_declaration + | Env_open of summary * Path.t + | Env_functor_arg of summary * Ident.t + | Env_constraints of summary * type_declaration PathMap.t + | Env_copy_types of summary * string list + +module TycompTbl = + struct + (** This module is used to store components of types (i.e. labels + and constructors). We keep a representation of each nested + "open" and the set of local bindings between each of them. *) + + type 'a t = { + current: 'a Ident.tbl; + (** Local bindings since the last open. *) + + opened: 'a opened option; + (** Symbolic representation of the last (innermost) open, if any. *) + } + + and 'a opened = { + components: (string, 'a list) Tbl.t; + (** Components from the opened module. We keep a list of + bindings for each name, as in comp_labels and + comp_constrs. *) + + using: (string -> ('a * 'a) option -> unit) option; + (** A callback to be applied when a component is used from this + "open". This is used to detect unused "opens". The + arguments are used to detect shadowing. *) + + next: 'a t; + (** The table before opening the module. *) + } + + let empty = { current = Ident.empty; opened = None } + + let add id x tbl = + {tbl with current = Ident.add id x tbl.current} + + let add_open slot wrap components next = + let using = + match slot with + | None -> None + | Some f -> Some (fun s x -> f s (wrap x)) + in + { + current = Ident.empty; + opened = Some {using; components; next}; + } + + let rec find_same id tbl = + try Ident.find_same id tbl.current + with Not_found as exn -> + begin match tbl.opened with + | Some {next; _} -> find_same id next + | None -> raise exn + end + + let nothing = fun () -> () + + let mk_callback rest name desc = function + | None -> nothing + | Some f -> + (fun () -> + match rest with + | [] -> f name None + | (hidden, _) :: _ -> f name (Some (desc, hidden)) + ) + + let rec find_all name tbl = + List.map (fun (_id, desc) -> desc, nothing) + (Ident.find_all name tbl.current) @ + match tbl.opened with + | None -> [] + | Some {using; next; components} -> + let rest = find_all name next in + match Tbl.find_str name components with + | exception Not_found -> rest + | opened -> + List.map + (fun desc -> desc, mk_callback rest name desc using) + opened + @ rest + + let rec fold_name f tbl acc = + let acc = Ident.fold_name (fun _id d -> f d) tbl.current acc in + match tbl.opened with + | Some {using = _; next; components} -> + acc + |> Tbl.fold + (fun _name -> List.fold_right (fun desc -> f desc)) + components + |> fold_name f next + | None -> + acc + + let rec local_keys tbl acc = + let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in + match tbl.opened with + | Some o -> local_keys o.next acc + | None -> acc + + let diff_keys is_local tbl1 tbl2 = + let keys2 = local_keys tbl2 [] in + List.filter + (fun id -> + is_local (find_same id tbl2) && + try ignore (find_same id tbl1); false + with Not_found -> true) + keys2 + + end + + +module IdTbl = + struct + (** This module is used to store all kinds of components except + (labels and constructors) in environments. We keep a + representation of each nested "open" and the set of local + bindings between each of them. *) + + + type 'a t = { + current: 'a Ident.tbl; + (** Local bindings since the last open *) + + opened: 'a opened option; + (** Symbolic representation of the last (innermost) open, if any. *) + } + + and 'a opened = { + root: Path.t; + (** The path of the opened module, to be prefixed in front of + its local names to produce a valid path in the current + environment. *) + + components: (string, 'a * int) Tbl.t; + (** Components from the opened module. *) + + using: (string -> ('a * 'a) option -> unit) option; + (** A callback to be applied when a component is used from this + "open". This is used to detect unused "opens". The + arguments are used to detect shadowing. *) + + next: 'a t; + (** The table before opening the module. *) + } + + let empty = { current = Ident.empty; opened = None } + + let add id x tbl = + {tbl with current = Ident.add id x tbl.current} + + let add_open slot wrap root components next = + let using = + match slot with + | None -> None + | Some f -> Some (fun s x -> f s (wrap x)) + in + { + current = Ident.empty; + opened = Some {using; root; components; next}; + } + + let rec find_same id tbl = + try Ident.find_same id tbl.current + with Not_found as exn -> + begin match tbl.opened with + | Some {next; _} -> find_same id next + | None -> raise exn + end + + let rec find_name mark name tbl = + try + let (id, desc) = Ident.find_name name tbl.current in + Pident id, desc + with Not_found as exn -> + begin match tbl.opened with + | Some {using; root; next; components} -> + begin try + let (descr, pos) = Tbl.find_str name components in + let res = Pdot (root, name, pos), descr in + if mark then begin match using with + | None -> () + | Some f -> + begin try f name (Some (snd (find_name false name next), snd res)) + with Not_found -> f name None + end + end; + res + with Not_found -> + find_name mark name next + end + | None -> + raise exn + end + + let find_name name tbl = find_name true name tbl + + let rec update name f tbl = + try + let (id, desc) = Ident.find_name name tbl.current in + let new_desc = f desc in + {tbl with current = Ident.add id new_desc tbl.current} + with Not_found -> + begin match tbl.opened with + | Some {root; using; next; components} -> + begin try + let (desc, pos) = Tbl.find_str name components in + let new_desc = f desc in + let components = Tbl.add name (new_desc, pos) components in + {tbl with opened = Some {root; using; next; components}} + with Not_found -> + let next = update name f next in + {tbl with opened = Some {root; using; next; components}} + end + | None -> + tbl + end + + + + let rec find_all name tbl = + List.map (fun (id, desc) -> Pident id, desc) (Ident.find_all name tbl.current) @ + match tbl.opened with + | None -> [] + | Some {root; using = _; next; components} -> + try + let (desc, pos) = Tbl.find_str name components in + (Pdot (root, name, pos), desc) :: find_all name next + with Not_found -> + find_all name next + + let rec fold_name f tbl acc = + let acc = Ident.fold_name (fun id d -> f (Ident.name id) (Pident id, d)) tbl.current acc in + match tbl.opened with + | Some {root; using = _; next; components} -> + acc + |> Tbl.fold + (fun name (desc, pos) -> f name (Pdot (root, name, pos), desc)) + components + |> fold_name f next + | None -> + acc + + let rec local_keys tbl acc = + let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in + match tbl.opened with + | Some o -> local_keys o.next acc + | None -> acc + + + let rec iter f tbl = + Ident.iter (fun id desc -> f id (Pident id, desc)) tbl.current; + match tbl.opened with + | Some {root; using = _; next; components} -> + Tbl.iter + (fun s (x, pos) -> f (Ident.hide (Ident.create s) (* ??? *)) (Pdot (root, s, pos), x)) + components; + iter f next + | None -> () + + let diff_keys tbl1 tbl2 = + let keys2 = local_keys tbl2 [] in + List.filter + (fun id -> + try ignore (find_same id tbl1); false + with Not_found -> true) + keys2 + + + end + +type type_descriptions = + constructor_description list * label_description list + +let in_signature_flag = 0x01 +let implicit_coercion_flag = 0x02 + +type t = { + values: value_description IdTbl.t; + constrs: constructor_description TycompTbl.t; + labels: label_description TycompTbl.t; + types: (type_declaration * type_descriptions) IdTbl.t; + modules: (Subst.t * module_declaration, module_declaration) EnvLazy.t IdTbl.t; + modtypes: modtype_declaration IdTbl.t; + components: module_components IdTbl.t; + classes: class_declaration IdTbl.t; + cltypes: class_type_declaration IdTbl.t; + functor_args: unit Ident.tbl; + summary: summary; + local_constraints: type_declaration PathMap.t; + gadt_instances: (int * TypeSet.t ref) list; + flags: int; +} + +and module_components = + { + deprecated: string option; + loc: Location.t; + comps: + (t * Subst.t * Path.t * Types.module_type, module_components_repr option) + EnvLazy.t; + } + +and module_components_repr = + Structure_comps of structure_components + | Functor_comps of functor_components + +and 'a comp_tbl = (string, ('a * int)) Tbl.t + +and structure_components = { + mutable comp_values: value_description comp_tbl; + mutable comp_constrs: (string, constructor_description list) Tbl.t; + mutable comp_labels: (string, label_description list) Tbl.t; + mutable comp_types: (type_declaration * type_descriptions) comp_tbl; + mutable comp_modules: + (Subst.t * module_declaration, module_declaration) EnvLazy.t comp_tbl; + mutable comp_modtypes: modtype_declaration comp_tbl; + mutable comp_components: module_components comp_tbl; + mutable comp_classes: class_declaration comp_tbl; + mutable comp_cltypes: class_type_declaration comp_tbl; +} + +and functor_components = { + fcomp_param: Ident.t; (* Formal parameter *) + fcomp_arg: module_type option; (* Argument signature *) + fcomp_res: module_type; (* Result signature *) + fcomp_cache: (Path.t, module_components) Hashtbl.t; (* For memoization *) + fcomp_subst_cache: (Path.t, module_type) Hashtbl.t +} + +let copy_local ~from env = + { env with + local_constraints = from.local_constraints; + gadt_instances = from.gadt_instances; + flags = from.flags } + +let same_constr = ref (fun _ _ _ -> assert false) + +(* Helper to decide whether to report an identifier shadowing + by some 'open'. For labels and constructors, we do not report + if the two elements are from the same re-exported declaration. + + Later, one could also interpret some attributes on value and + type declarations to silence the shadowing warnings. *) + +let check_shadowing env = function + | `Constructor (Some (c1, c2)) + when not (!same_constr env c1.cstr_res c2.cstr_res) -> + Some "constructor" + | `Label (Some (l1, l2)) + when not (!same_constr env l1.lbl_res l2.lbl_res) -> + Some "label" + | `Value (Some _) -> Some "value" + | `Type (Some _) -> Some "type" + | `Module (Some _) | `Component (Some _) -> Some "module" + | `Module_type (Some _) -> Some "module type" + | `Class (Some _) -> Some "class" + | `Class_type (Some _) -> Some "class type" + | `Constructor _ | `Label _ + | `Value None | `Type None | `Module None | `Module_type None + | `Class None | `Class_type None | `Component None -> + None + +let subst_modtype_maker (subst, md) = + if subst == Subst.identity then md + else {md with md_type = Subst.modtype subst md.md_type} + +let empty = { + values = IdTbl.empty; constrs = TycompTbl.empty; + labels = TycompTbl.empty; types = IdTbl.empty; + modules = IdTbl.empty; modtypes = IdTbl.empty; + components = IdTbl.empty; classes = IdTbl.empty; + cltypes = IdTbl.empty; + summary = Env_empty; local_constraints = PathMap.empty; gadt_instances = []; + flags = 0; + functor_args = Ident.empty; + } + +let in_signature b env = + let flags = + if b then env.flags lor in_signature_flag + else env.flags land (lnot in_signature_flag) + in + {env with flags} + +let implicit_coercion env = + {env with flags = env.flags lor implicit_coercion_flag} + +let is_in_signature env = env.flags land in_signature_flag <> 0 +let is_implicit_coercion env = env.flags land implicit_coercion_flag <> 0 + +let is_ident = function + Pident _ -> true + | Pdot _ | Papply _ -> false + +let is_local_ext = function + | {cstr_tag = Cstr_extension(p, _)} -> is_ident p + | _ -> false + +let diff env1 env2 = + IdTbl.diff_keys env1.values env2.values @ + TycompTbl.diff_keys is_local_ext env1.constrs env2.constrs @ + IdTbl.diff_keys env1.modules env2.modules @ + IdTbl.diff_keys env1.classes env2.classes + +type can_load_cmis = + | Can_load_cmis + | Cannot_load_cmis of EnvLazy.log + +let can_load_cmis = ref Can_load_cmis + +let without_cmis f x = + let log = EnvLazy.log () in + let res = + Misc.(protect_refs + [R (can_load_cmis, Cannot_load_cmis log)] + (fun () -> f x)) + in + EnvLazy.backtrack log; + res + +(* Forward declarations *) + +let components_of_module' = + ref ((fun ~deprecated:_ ~loc:_ _env _sub _path _mty -> assert false) : + deprecated:string option -> loc:Location.t -> t -> Subst.t -> + Path.t -> module_type -> + module_components) +let components_of_module_maker' = + ref ((fun (_env, _sub, _path, _mty) -> assert false) : + t * Subst.t * Path.t * module_type -> module_components_repr option) +let components_of_functor_appl' = + ref ((fun _f _env _p1 _p2 -> assert false) : + functor_components -> t -> Path.t -> Path.t -> module_components) +let check_modtype_inclusion = + (* to be filled with Includemod.check_modtype_inclusion *) + ref ((fun ~loc:_ _env _mty1 _path1 _mty2 -> assert false) : + loc:Location.t -> t -> module_type -> Path.t -> module_type -> unit) +let strengthen = + (* to be filled with Mtype.strengthen *) + ref ((fun ~aliasable:_ _env _mty _path -> assert false) : + aliasable:bool -> t -> module_type -> Path.t -> module_type) + +let md md_type = + {md_type; md_attributes=[]; md_loc=Location.none} + +let get_components_opt c = + match !can_load_cmis with + | Can_load_cmis -> + EnvLazy.force !components_of_module_maker' c.comps + | Cannot_load_cmis log -> + EnvLazy.force_logged log !components_of_module_maker' c.comps + +let empty_structure = + Structure_comps { + comp_values = Tbl.empty; + comp_constrs = Tbl.empty; + comp_labels = Tbl.empty; + comp_types = Tbl.empty; + comp_modules = Tbl.empty; comp_modtypes = Tbl.empty; + comp_components = Tbl.empty; comp_classes = Tbl.empty; + comp_cltypes = Tbl.empty } + +let get_components c = + match get_components_opt c with + | None -> empty_structure + | Some c -> c + +(* The name of the compilation unit currently compiled. + "" if outside a compilation unit. *) + +let current_unit = ref "" + +(* Persistent structure descriptions *) + +type pers_struct = + { ps_name: string; + ps_sig: signature Lazy.t; + ps_comps: module_components; + ps_crcs: (string * Digest.t option) list; + ps_filename: string; + ps_flags: pers_flags list } + +let persistent_structures = + (Hashtbl.create 17 : (string, pers_struct option) Hashtbl.t) + +(* Consistency between persistent structures *) + +let crc_units = Consistbl.create() + +module StringSet = + Set.Make(struct type t = string let compare = String.compare end) + +let imported_units = ref StringSet.empty + +let add_import s = + imported_units := StringSet.add s !imported_units + +let imported_opaque_units = ref StringSet.empty + +let add_imported_opaque s = + imported_opaque_units := StringSet.add s !imported_opaque_units + +let clear_imports () = + Consistbl.clear crc_units; + imported_units := StringSet.empty; + imported_opaque_units := StringSet.empty + +let check_consistency ps = + try + List.iter + (fun (name, crco) -> + match crco with + None -> () + | Some crc -> + add_import name; + Consistbl.check crc_units name crc ps.ps_filename) + ps.ps_crcs; + with Consistbl.Inconsistency(name, source, auth) -> + error (Inconsistent_import(name, auth, source)) + +(* Reading persistent structures from .cmi files *) + +let save_pers_struct crc ps = + let modname = ps.ps_name in + Hashtbl.add persistent_structures modname (Some ps); + List.iter + (function + | Rectypes -> () + | Deprecated _ -> () + | Unsafe_string -> () + | Opaque -> add_imported_opaque modname) + ps.ps_flags; + Consistbl.set crc_units modname crc ps.ps_filename; + add_import modname + +module Persistent_signature = struct + type t = + { filename : string; + cmi : Cmi_format.cmi_infos } + + let load = ref (fun ~unit_name -> + match find_in_path_uncap !load_path (unit_name ^ ".cmi") with + | filename -> Some { filename; cmi = read_cmi filename } + | exception Not_found -> None) +end + +let acknowledge_pers_struct check modname + { Persistent_signature.filename; cmi } = + let name = cmi.cmi_name in + let sign = cmi.cmi_sign in + let crcs = cmi.cmi_crcs in + let flags = cmi.cmi_flags in + let deprecated = + List.fold_left (fun acc -> function Deprecated s -> Some s | _ -> acc) None + flags + in + let comps = + !components_of_module' ~deprecated ~loc:Location.none + empty Subst.identity + (Pident(Ident.create_persistent name)) + (Mty_signature sign) + in + let ps = { ps_name = name; + ps_sig = lazy (Subst.signature Subst.identity sign); + ps_comps = comps; + ps_crcs = crcs; + ps_filename = filename; + ps_flags = flags; + } in + if ps.ps_name <> modname then + error (Illegal_renaming(modname, ps.ps_name, filename)); + + List.iter + (function + | Rectypes -> + if not !Clflags.recursive_types then + error (Need_recursive_types(ps.ps_name, !current_unit)) + | Unsafe_string -> + if Config.safe_string then + error (Depend_on_unsafe_string_unit (ps.ps_name, !current_unit)); + | Deprecated _ -> () + | Opaque -> add_imported_opaque modname) + ps.ps_flags; + if check then check_consistency ps; + Hashtbl.add persistent_structures modname (Some ps); + ps + +let read_pers_struct check modname filename = + add_import modname; + let cmi = read_cmi filename in + acknowledge_pers_struct check modname + { Persistent_signature.filename; cmi } + +let find_pers_struct check name = + if name = "*predef*" then raise Not_found; + match Hashtbl.find persistent_structures name with + | Some ps -> ps + | None -> raise Not_found + | exception Not_found -> + match !can_load_cmis with + | Cannot_load_cmis _ -> raise Not_found + | Can_load_cmis -> + let ps = + match !Persistent_signature.load ~unit_name:name with + | Some ps -> ps + | None -> + Hashtbl.add persistent_structures name None; + raise Not_found + in + add_import name; + acknowledge_pers_struct check name ps + +(* Emits a warning if there is no valid cmi for name *) +let check_pers_struct name = + try + ignore (find_pers_struct false name) + with + | Not_found -> + let warn = Warnings.No_cmi_file(name, None) in + Location.prerr_warning Location.none warn + | Cmi_format.Error err -> + let msg = Format.asprintf "%a" Cmi_format.report_error err in + let warn = Warnings.No_cmi_file(name, Some msg) in + Location.prerr_warning Location.none warn + | Error err -> + let msg = + match err with + | Illegal_renaming(name, ps_name, filename) -> + Format.asprintf + " %a@ contains the compiled interface for @ \ + %s when %s was expected" + Location.print_filename filename ps_name name + | Inconsistent_import _ -> assert false + | Need_recursive_types(name, _) -> + Format.sprintf + "%s uses recursive types" + name + | Depend_on_unsafe_string_unit (name, _) -> + Printf.sprintf "%s uses -unsafe-string" + name + | Missing_module _ -> assert false + | Illegal_value_name _ -> assert false + in + let warn = Warnings.No_cmi_file(name, Some msg) in + Location.prerr_warning Location.none warn + +let read_pers_struct modname filename = + read_pers_struct true modname filename + +let find_pers_struct name = + find_pers_struct true name + +let check_pers_struct name = + if not (Hashtbl.mem persistent_structures name) then begin + (* PR#6843: record the weak dependency ([add_import]) regardless of + whether the check succeeds, to help make builds more + deterministic. *) + add_import name; + if (Warnings.is_active (Warnings.No_cmi_file("", None))) then + !add_delayed_check_forward + (fun () -> check_pers_struct name) + end + +let reset_cache () = + current_unit := ""; + Hashtbl.clear persistent_structures; + clear_imports (); + Hashtbl.clear value_declarations; + Hashtbl.clear type_declarations; + Hashtbl.clear module_declarations; + Hashtbl.clear used_constructors; + Hashtbl.clear prefixed_sg + +let reset_cache_toplevel () = + (* Delete 'missing cmi' entries from the cache. *) + let l = + Hashtbl.fold + (fun name r acc -> if r = None then name :: acc else acc) + persistent_structures [] + in + List.iter (Hashtbl.remove persistent_structures) l; + Hashtbl.clear value_declarations; + Hashtbl.clear type_declarations; + Hashtbl.clear module_declarations; + Hashtbl.clear used_constructors; + Hashtbl.clear prefixed_sg + + +let set_unit_name name = + current_unit := name + +let get_unit_name () = + !current_unit + +(* Lookup by identifier *) + +let rec find_module_descr path env = + match path with + Pident id -> + begin try + IdTbl.find_same id env.components + with Not_found -> + if Ident.persistent id && not (Ident.name id = !current_unit) + then (find_pers_struct (Ident.name id)).ps_comps + else raise Not_found + end + | Pdot(p, s, _pos) -> + begin match get_components (find_module_descr p env) with + Structure_comps c -> + let (descr, _pos) = Tbl.find_str s c.comp_components in + descr + | Functor_comps _ -> + raise Not_found + end + | Papply(p1, p2) -> + begin match get_components (find_module_descr p1 env) with + Functor_comps f -> + !components_of_functor_appl' f env p1 p2 + | Structure_comps _ -> + raise Not_found + end + +let find proj1 proj2 path env = + match path with + Pident id -> + IdTbl.find_same id (proj1 env) + | Pdot(p, s, _pos) -> + begin match get_components (find_module_descr p env) with + Structure_comps c -> + let (data, _pos) = Tbl.find_str s (proj2 c) in data + | Functor_comps _ -> + raise Not_found + end + | Papply _ -> + raise Not_found + +let find_value = + find (fun env -> env.values) (fun sc -> sc.comp_values) +and find_type_full = + find (fun env -> env.types) (fun sc -> sc.comp_types) +and find_modtype = + find (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes) +and find_class = + find (fun env -> env.classes) (fun sc -> sc.comp_classes) +and find_cltype = + find (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes) + +let type_of_cstr path = function + | {cstr_inlined = Some d; _} -> + (d, ([], List.map snd (Datarepr.labels_of_type path d))) + | _ -> + assert false + +let find_type_full path env = + match Path.constructor_typath path with + | Regular p -> + (try (PathMap.find p env.local_constraints, ([], [])) + with Not_found -> find_type_full p env) + | Cstr (ty_path, s) -> + let (_, (cstrs, _)) = + try find_type_full ty_path env + with Not_found -> assert false + in + let cstr = + try List.find (fun cstr -> cstr.cstr_name = s) cstrs + with Not_found -> assert false + in + type_of_cstr path cstr + | LocalExt id -> + let cstr = + try TycompTbl.find_same id env.constrs + with Not_found -> assert false + in + type_of_cstr path cstr + | Ext (mod_path, s) -> + let comps = + try find_module_descr mod_path env + with Not_found -> assert false + in + let comps = + match get_components comps with + | Structure_comps c -> c + | Functor_comps _ -> assert false + in + let exts = + List.filter + (function {cstr_tag=Cstr_extension _} -> true | _ -> false) + (try Tbl.find_str s comps.comp_constrs + with Not_found -> assert false) + in + match exts with + | [cstr] -> type_of_cstr path cstr + | _ -> assert false + +let find_type p env = + fst (find_type_full p env) +let find_type_descrs p env = + snd (find_type_full p env) + +let find_module ~alias path env = + match path with + Pident id -> + begin try + let data = IdTbl.find_same id env.modules in + EnvLazy.force subst_modtype_maker data + with Not_found -> + if Ident.persistent id && not (Ident.name id = !current_unit) then + let ps = find_pers_struct (Ident.name id) in + md (Mty_signature(Lazy.force ps.ps_sig)) + else raise Not_found + end + | Pdot(p, s, _pos) -> + begin match get_components (find_module_descr p env) with + Structure_comps c -> + let (data, _pos) = Tbl.find_str s c.comp_modules in + EnvLazy.force subst_modtype_maker data + | Functor_comps _ -> + raise Not_found + end + | Papply(p1, p2) -> + let desc1 = find_module_descr p1 env in + begin match get_components desc1 with + Functor_comps f -> + md begin match f.fcomp_res with + | Mty_alias _ as mty -> mty + | mty -> + if alias then mty else + try + Hashtbl.find f.fcomp_subst_cache p2 + with Not_found -> + let mty = + Subst.modtype + (Subst.add_module f.fcomp_param p2 Subst.identity) + f.fcomp_res in + Hashtbl.add f.fcomp_subst_cache p2 mty; + mty + end + | Structure_comps _ -> + raise Not_found + end + +let required_globals = ref [] +let reset_required_globals () = required_globals := [] +let get_required_globals () = !required_globals +let add_required_global id = + if Ident.global id && not !Clflags.transparent_modules + && not (List.exists (Ident.same id) !required_globals) + then required_globals := id :: !required_globals + +let rec normalize_path lax env path = + let path = + match path with + Pdot(p, s, pos) -> + Pdot(normalize_path lax env p, s, pos) + | Papply(p1, p2) -> + Papply(normalize_path lax env p1, normalize_path true env p2) + | _ -> path + in + try match find_module ~alias:true path env with + {md_type=Mty_alias(_, path1)} -> + let path' = normalize_path lax env path1 in + if lax || !Clflags.transparent_modules then path' else + let id = Path.head path in + if Ident.global id && not (Ident.same id (Path.head path')) + then add_required_global id; + path' + | _ -> path + with Not_found when lax + || (match path with Pident id -> not (Ident.persistent id) | _ -> true) -> + path + +let normalize_path oloc env path = + try normalize_path (oloc = None) env path + with Not_found -> + match oloc with None -> assert false + | Some loc -> + raise (Error(Missing_module(loc, path, normalize_path true env path))) + +let normalize_path_prefix oloc env path = + match path with + Pdot(p, s, pos) -> + Pdot(normalize_path oloc env p, s, pos) + | Pident _ -> + path + | Papply _ -> + assert false + + +let find_module = find_module ~alias:false + +(* Find the manifest type associated to a type when appropriate: + - the type should be public or should have a private row, + - the type should have an associated manifest type. *) +let find_type_expansion path env = + let decl = find_type path env in + match decl.type_manifest with + | Some body when decl.type_private = Public + || decl.type_kind <> Type_abstract + || Btype.has_constr_row body -> + (decl.type_params, body, may_map snd decl.type_newtype_level) + (* The manifest type of Private abstract data types without + private row are still considered unknown to the type system. + Hence, this case is caught by the following clause that also handles + purely abstract data types without manifest type definition. *) + | _ -> raise Not_found + +(* Find the manifest type information associated to a type, i.e. + the necessary information for the compiler's type-based optimisations. + In particular, the manifest type associated to a private abstract type + is revealed for the sake of compiler's type-based optimisations. *) +let find_type_expansion_opt path env = + let decl = find_type path env in + match decl.type_manifest with + (* The manifest type of Private abstract data types can still get + an approximation using their manifest type. *) + | Some body -> (decl.type_params, body, may_map snd decl.type_newtype_level) + | _ -> raise Not_found + +let find_modtype_expansion path env = + match (find_modtype path env).mtd_type with + | None -> raise Not_found + | Some mty -> mty + +let rec is_functor_arg path env = + match path with + Pident id -> + begin try Ident.find_same id env.functor_args; true + with Not_found -> false + end + | Pdot (p, _s, _) -> is_functor_arg p env + | Papply _ -> true + +(* Lookup by name *) + +exception Recmodule + +let report_deprecated ?loc p deprecated = + match loc, deprecated with + | Some loc, Some txt -> + let txt = if txt = "" then "" else "\n" ^ txt in + Location.deprecated loc (Printf.sprintf "module %s%s" (Path.name p) txt) + | _ -> () + +let mark_module_used env name loc = + if not (is_implicit_coercion env) then + try Hashtbl.find module_declarations (name, loc) () + with Not_found -> () + +let rec lookup_module_descr_aux ?loc lid env = + match lid with + Lident s -> + begin try + IdTbl.find_name s env.components + with Not_found -> + if s = !current_unit then raise Not_found; + let ps = find_pers_struct s in + (Pident(Ident.create_persistent s), ps.ps_comps) + end + | Ldot(l, s) -> + let (p, descr) = lookup_module_descr ?loc l env in + begin match get_components descr with + Structure_comps c -> + let (descr, pos) = Tbl.find_str s c.comp_components in + (Pdot(p, s, pos), descr) + | Functor_comps _ -> + raise Not_found + end + | Lapply(l1, l2) -> + let (p1, desc1) = lookup_module_descr ?loc l1 env in + let p2 = lookup_module ~load:true ?loc l2 env in + let {md_type=mty2} = find_module p2 env in + begin match get_components desc1 with + Functor_comps f -> + let loc = match loc with Some l -> l | None -> Location.none in + Misc.may (!check_modtype_inclusion ~loc env mty2 p2) f.fcomp_arg; + (Papply(p1, p2), !components_of_functor_appl' f env p1 p2) + | Structure_comps _ -> + raise Not_found + end + +and lookup_module_descr ?loc lid env = + let (p, comps) as res = lookup_module_descr_aux ?loc lid env in + mark_module_used env (Path.last p) comps.loc; +(* + Format.printf "USE module %s at %a@." (Path.last p) + Location.print comps.loc; +*) + report_deprecated ?loc p comps.deprecated; + res + +and lookup_module ~load ?loc lid env : Path.t = + match lid with + Lident s -> + begin try + let (p, data) = IdTbl.find_name s env.modules in + let {md_loc; md_attributes; md_type} = + EnvLazy.force subst_modtype_maker data + in + mark_module_used env s md_loc; + begin match md_type with + | Mty_ident (Path.Pident id) when Ident.name id = "#recmod#" -> + (* see #5965 *) + raise Recmodule + | _ -> () + end; + report_deprecated ?loc p + (Builtin_attributes.deprecated_of_attrs md_attributes); + p + with Not_found -> + if s = !current_unit then raise Not_found; + let p = Pident(Ident.create_persistent s) in + if !Clflags.transparent_modules && not load then check_pers_struct s + else begin + let ps = find_pers_struct s in + report_deprecated ?loc p ps.ps_comps.deprecated + end; + p + end + | Ldot(l, s) -> + let (p, descr) = lookup_module_descr ?loc l env in + begin match get_components descr with + Structure_comps c -> + let (_data, pos) = Tbl.find_str s c.comp_modules in + let (comps, _) = Tbl.find_str s c.comp_components in + mark_module_used env s comps.loc; + let p = Pdot(p, s, pos) in + report_deprecated ?loc p comps.deprecated; + p + | Functor_comps _ -> + raise Not_found + end + | Lapply(l1, l2) -> + let (p1, desc1) = lookup_module_descr ?loc l1 env in + let p2 = lookup_module ~load:true ?loc l2 env in + let {md_type=mty2} = find_module p2 env in + let p = Papply(p1, p2) in + begin match get_components desc1 with + Functor_comps f -> + let loc = match loc with Some l -> l | None -> Location.none in + Misc.may (!check_modtype_inclusion ~loc env mty2 p2) f.fcomp_arg; + p + | Structure_comps _ -> + raise Not_found + end + +let lookup proj1 proj2 ?loc lid env = + match lid with + Lident s -> + IdTbl.find_name s (proj1 env) + | Ldot(l, s) -> + let (p, desc) = lookup_module_descr ?loc l env in + begin match get_components desc with + Structure_comps c -> + let (data, pos) = Tbl.find_str s (proj2 c) in + (Pdot(p, s, pos), data) + | Functor_comps _ -> + raise Not_found + end + | Lapply _ -> + raise Not_found + +let lookup_all_simple proj1 proj2 shadow ?loc lid env = + match lid with + Lident s -> + let xl = TycompTbl.find_all s (proj1 env) in + let rec do_shadow = + function + | [] -> [] + | ((x, f) :: xs) -> + (x, f) :: + (do_shadow (List.filter (fun (y, _) -> not (shadow x y)) xs)) + in + do_shadow xl + | Ldot(l, s) -> + let (_p, desc) = lookup_module_descr ?loc l env in + begin match get_components desc with + Structure_comps c -> + let comps = + try Tbl.find_str s (proj2 c) with Not_found -> [] + in + List.map + (fun data -> (data, (fun () -> ()))) + comps + | Functor_comps _ -> + raise Not_found + end + | Lapply _ -> + raise Not_found + +let has_local_constraints env = not (PathMap.is_empty env.local_constraints) + +let cstr_shadow cstr1 cstr2 = + match cstr1.cstr_tag, cstr2.cstr_tag with + | Cstr_extension _, Cstr_extension _ -> true + | _ -> false + +let lbl_shadow _lbl1 _lbl2 = false + +let lookup_value = + lookup (fun env -> env.values) (fun sc -> sc.comp_values) +let lookup_all_constructors = + lookup_all_simple (fun env -> env.constrs) (fun sc -> sc.comp_constrs) + cstr_shadow +let lookup_all_labels = + lookup_all_simple (fun env -> env.labels) (fun sc -> sc.comp_labels) + lbl_shadow +let lookup_type = + lookup (fun env -> env.types) (fun sc -> sc.comp_types) +let lookup_modtype = + lookup (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes) +let lookup_class = + lookup (fun env -> env.classes) (fun sc -> sc.comp_classes) +let lookup_cltype = + lookup (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes) + +let copy_types l env = + let f desc = {desc with val_type = Subst.type_expr Subst.identity desc.val_type} in + let values = List.fold_left (fun env s -> IdTbl.update s f env) env.values l in + {env with values; summary = Env_copy_types (env.summary, l)} + +let mark_value_used env name vd = + if not (is_implicit_coercion env) then + try Hashtbl.find value_declarations (name, vd.val_loc) () + with Not_found -> () + +let mark_type_used env name vd = + if not (is_implicit_coercion env) then + try Hashtbl.find type_declarations (name, vd.type_loc) () + with Not_found -> () + +let mark_constructor_used usage env name vd constr = + if not (is_implicit_coercion env) then + try Hashtbl.find used_constructors (name, vd.type_loc, constr) usage + with Not_found -> () + +let mark_extension_used usage env ext name = + if not (is_implicit_coercion env) then + let ty_name = Path.last ext.ext_type_path in + try Hashtbl.find used_constructors (ty_name, ext.ext_loc, name) usage + with Not_found -> () + +let set_value_used_callback name vd callback = + let key = (name, vd.val_loc) in + try + let old = Hashtbl.find value_declarations key in + Hashtbl.replace value_declarations key (fun () -> old (); callback ()) + (* this is to support cases like: + let x = let x = 1 in x in x + where the two declarations have the same location + (e.g. resulting from Camlp4 expansion of grammar entries) *) + with Not_found -> + Hashtbl.add value_declarations key callback + +let set_type_used_callback name td callback = + let loc = td.type_loc in + if loc.Location.loc_ghost then () + else let key = (name, loc) in + let old = + try Hashtbl.find type_declarations key + with Not_found -> assert false + in + Hashtbl.replace type_declarations key (fun () -> callback old) + +let lookup_value ?loc lid env = + let (_, desc) as r = lookup_value ?loc lid env in + mark_value_used env (Longident.last lid) desc; + r + +let lookup_type ?loc lid env = + let (path, (decl, _)) = lookup_type ?loc lid env in + mark_type_used env (Longident.last lid) decl; + path + +let mark_type_path env path = + try + let decl = find_type path env in + mark_type_used env (Path.last path) decl + with Not_found -> () + +let ty_path t = + match repr t with + | {desc=Tconstr(path, _, _)} -> path + | _ -> assert false + +let lookup_constructor ?loc lid env = + match lookup_all_constructors ?loc lid env with + [] -> raise Not_found + | (desc, use) :: _ -> + mark_type_path env (ty_path desc.cstr_res); + use (); + desc + +let is_lident = function + Lident _ -> true + | _ -> false + +let lookup_all_constructors ?loc lid env = + try + let cstrs = lookup_all_constructors ?loc lid env in + let wrap_use desc use () = + mark_type_path env (ty_path desc.cstr_res); + use () + in + List.map (fun (cstr, use) -> (cstr, wrap_use cstr use)) cstrs + with + Not_found when is_lident lid -> [] + +let mark_constructor usage env name desc = + if not (is_implicit_coercion env) + then match desc.cstr_tag with + | Cstr_extension _ -> + begin + let ty_path = ty_path desc.cstr_res in + let ty_name = Path.last ty_path in + try Hashtbl.find used_constructors (ty_name, desc.cstr_loc, name) usage + with Not_found -> () + end + | _ -> + let ty_path = ty_path desc.cstr_res in + let ty_decl = try find_type ty_path env with Not_found -> assert false in + let ty_name = Path.last ty_path in + mark_constructor_used usage env ty_name ty_decl name + +let lookup_label ?loc lid env = + match lookup_all_labels ?loc lid env with + [] -> raise Not_found + | (desc, use) :: _ -> + mark_type_path env (ty_path desc.lbl_res); + use (); + desc + +let lookup_all_labels ?loc lid env = + try + let lbls = lookup_all_labels ?loc lid env in + let wrap_use desc use () = + mark_type_path env (ty_path desc.lbl_res); + use () + in + List.map (fun (lbl, use) -> (lbl, wrap_use lbl use)) lbls + with + Not_found when is_lident lid -> [] + +let lookup_class ?loc lid env = + let (_, desc) as r = lookup_class ?loc lid env in + (* special support for Typeclass.unbound_class *) + if Path.name desc.cty_path = "" then ignore (lookup_type ?loc lid env) + else mark_type_path env desc.cty_path; + r + +let lookup_cltype ?loc lid env = + let (_, desc) as r = lookup_cltype ?loc lid env in + if Path.name desc.clty_path = "" then ignore (lookup_type ?loc lid env) + else mark_type_path env desc.clty_path; + mark_type_path env desc.clty_path; + r + +(* Iter on an environment (ignoring the body of functors and + not yet evaluated structures) *) + +type iter_cont = unit -> unit +let iter_env_cont = ref [] + +let rec scrape_alias_for_visit env mty = + match mty with + | Mty_alias(_, Pident id) + when Ident.persistent id + && not (Hashtbl.mem persistent_structures (Ident.name id)) -> false + | Mty_alias(_, path) -> (* PR#6600: find_module may raise Not_found *) + begin try scrape_alias_for_visit env (find_module path env).md_type + with Not_found -> false + end + | _ -> true + +let iter_env proj1 proj2 f env () = + IdTbl.iter (fun id x -> f (Pident id) x) (proj1 env); + let rec iter_components path path' mcomps = + let cont () = + let visit = + match EnvLazy.get_arg mcomps.comps with + | None -> true + | Some (env, _sub, _path, mty) -> scrape_alias_for_visit env mty + in + if not visit then () else + match get_components mcomps with + Structure_comps comps -> + Tbl.iter + (fun s (d, n) -> f (Pdot (path, s, n)) (Pdot (path', s, n), d)) + (proj2 comps); + Tbl.iter + (fun s (c, n) -> + iter_components (Pdot (path, s, n)) (Pdot (path', s, n)) c) + comps.comp_components + | Functor_comps _ -> () + in iter_env_cont := (path, cont) :: !iter_env_cont + in + Hashtbl.iter + (fun s pso -> + match pso with None -> () + | Some ps -> + let id = Pident (Ident.create_persistent s) in + iter_components id id ps.ps_comps) + persistent_structures; + IdTbl.iter + (fun id (path, comps) -> iter_components (Pident id) path comps) + env.components + +let run_iter_cont l = + iter_env_cont := []; + List.iter (fun c -> c ()) l; + let cont = List.rev !iter_env_cont in + iter_env_cont := []; + cont + +let iter_types f = iter_env (fun env -> env.types) (fun sc -> sc.comp_types) f + +let same_types env1 env2 = + env1.types == env2.types && env1.components == env2.components + +let used_persistent () = + let r = ref Concr.empty in + Hashtbl.iter (fun s pso -> if pso != None then r := Concr.add s !r) + persistent_structures; + !r + +let find_all_comps proj s (p,mcomps) = + match get_components mcomps with + Functor_comps _ -> [] + | Structure_comps comps -> + try let (c,n) = Tbl.find_str s (proj comps) in [Pdot(p,s,n), c] + with Not_found -> [] + +let rec find_shadowed_comps path env = + match path with + Pident id -> + IdTbl.find_all (Ident.name id) env.components + | Pdot (p, s, _) -> + let l = find_shadowed_comps p env in + let l' = + List.map (find_all_comps (fun comps -> comps.comp_components) s) l in + List.flatten l' + | Papply _ -> [] + +let find_shadowed proj1 proj2 path env = + match path with + Pident id -> + IdTbl.find_all (Ident.name id) (proj1 env) + | Pdot (p, s, _) -> + let l = find_shadowed_comps p env in + let l' = List.map (find_all_comps proj2 s) l in + List.flatten l' + | Papply _ -> [] + +let find_shadowed_types path env = + List.map fst + (find_shadowed + (fun env -> env.types) (fun comps -> comps.comp_types) path env) + + +(* GADT instance tracking *) + +let add_gadt_instance_level lv env = + {env with + gadt_instances = (lv, ref TypeSet.empty) :: env.gadt_instances} + +let is_Tlink = function {desc = Tlink _} -> true | _ -> false + +let gadt_instance_level env t = + let rec find_instance = function + [] -> None + | (lv, r) :: rem -> + if TypeSet.exists is_Tlink !r then + (* Should we use set_typeset ? *) + r := TypeSet.fold (fun ty -> TypeSet.add (repr ty)) !r TypeSet.empty; + if TypeSet.mem t !r then Some lv else find_instance rem + in find_instance env.gadt_instances + +let add_gadt_instances env lv tl = + let r = + try List.assoc lv env.gadt_instances with Not_found -> assert false in + (* Format.eprintf "Added"; + List.iter (fun ty -> Format.eprintf "@ %a" !Btype.print_raw ty) tl; + Format.eprintf "@."; *) + set_typeset r (List.fold_right TypeSet.add tl !r) + +(* Only use this after expand_head! *) +let add_gadt_instance_chain env lv t = + let r = + try List.assoc lv env.gadt_instances with Not_found -> assert false in + let rec add_instance t = + let t = repr t in + if not (TypeSet.mem t !r) then begin + (* Format.eprintf "@ %a" !Btype.print_raw t; *) + set_typeset r (TypeSet.add t !r); + match t.desc with + Tconstr (p, _, memo) -> + may add_instance (find_expans Private p !memo) + | _ -> () + end + in + (* Format.eprintf "Added chain"; *) + add_instance t + (* Format.eprintf "@." *) + +(* Expand manifest module type names at the top of the given module type *) + +let rec scrape_alias env ?path mty = + match mty, path with + Mty_ident p, _ -> + begin try + scrape_alias env (find_modtype_expansion p env) ?path + with Not_found -> + mty + end + | Mty_alias(_, path), _ -> + begin try + scrape_alias env (find_module path env).md_type ~path + with Not_found -> + (*Location.prerr_warning Location.none + (Warnings.No_cmi_file (Path.name path));*) + mty + end + | mty, Some path -> + !strengthen ~aliasable:true env mty path + | _ -> mty + +let scrape_alias env mty = scrape_alias env mty + +(* Given a signature and a root path, prefix all idents in the signature + by the root path and build the corresponding substitution. *) + +let rec prefix_idents root pos sub = function + [] -> ([], sub) + | Sig_value(id, decl) :: rem -> + let p = Pdot(root, Ident.name id, pos) in + let nextpos = match decl.val_kind with Val_prim _ -> pos | _ -> pos+1 in + let (pl, final_sub) = prefix_idents root nextpos sub rem in + (p::pl, final_sub) + | Sig_type(id, _, _) :: rem -> + let p = Pdot(root, Ident.name id, nopos) in + let (pl, final_sub) = + prefix_idents root pos (Subst.add_type id p sub) rem in + (p::pl, final_sub) + | Sig_typext(id, _, _) :: rem -> + let p = Pdot(root, Ident.name id, pos) in + (* we extend the substitution in case of an inlined record *) + let (pl, final_sub) = + prefix_idents root (pos+1) (Subst.add_type id p sub) rem in + (p::pl, final_sub) + | Sig_module(id, _, _) :: rem -> + let p = Pdot(root, Ident.name id, pos) in + let (pl, final_sub) = + prefix_idents root (pos+1) (Subst.add_module id p sub) rem in + (p::pl, final_sub) + | Sig_modtype(id, _) :: rem -> + let p = Pdot(root, Ident.name id, nopos) in + let (pl, final_sub) = + prefix_idents root pos + (Subst.add_modtype id (Mty_ident p) sub) rem in + (p::pl, final_sub) + | Sig_class(id, _, _) :: rem -> + (* pretend this is a type, cf. PR#6650 *) + let p = Pdot(root, Ident.name id, pos) in + let (pl, final_sub) = + prefix_idents root (pos + 1) (Subst.add_type id p sub) rem in + (p::pl, final_sub) + | Sig_class_type(id, _, _) :: rem -> + let p = Pdot(root, Ident.name id, nopos) in + let (pl, final_sub) = + prefix_idents root pos (Subst.add_type id p sub) rem in + (p::pl, final_sub) + +let prefix_idents root sub sg = + if sub = Subst.identity then + let sgs = + try + Hashtbl.find prefixed_sg root + with Not_found -> + let sgs = ref [] in + Hashtbl.add prefixed_sg root sgs; + sgs + in + try + List.assq sg !sgs + with Not_found -> + let r = prefix_idents root 0 sub sg in + sgs := (sg, r) :: !sgs; + r + else + prefix_idents root 0 sub sg + +(* Compute structure descriptions *) + +let add_to_tbl id decl tbl = + let decls = + try Tbl.find_str id tbl with Not_found -> [] in + Tbl.add id (decl :: decls) tbl + +let rec components_of_module ~deprecated ~loc env sub path mty = + { + deprecated; + loc; + comps = EnvLazy.create (env, sub, path, mty) + } + +and components_of_module_maker (env, sub, path, mty) = + match scrape_alias env mty with + Mty_signature sg -> + let c = + { comp_values = Tbl.empty; + comp_constrs = Tbl.empty; + comp_labels = Tbl.empty; comp_types = Tbl.empty; + comp_modules = Tbl.empty; comp_modtypes = Tbl.empty; + comp_components = Tbl.empty; comp_classes = Tbl.empty; + comp_cltypes = Tbl.empty } in + let pl, sub = prefix_idents path sub sg in + let env = ref env in + let pos = ref 0 in + List.iter2 (fun item path -> + match item with + Sig_value(id, decl) -> + let decl' = Subst.value_description sub decl in + c.comp_values <- + Tbl.add (Ident.name id) (decl', !pos) c.comp_values; + begin match decl.val_kind with + Val_prim _ -> () | _ -> incr pos + end + | Sig_type(id, decl, _) -> + let decl' = Subst.type_declaration sub decl in + Datarepr.set_row_name decl' (Subst.type_path sub (Path.Pident id)); + let constructors = + List.map snd (Datarepr.constructors_of_type path decl') in + let labels = + List.map snd (Datarepr.labels_of_type path decl') in + c.comp_types <- + Tbl.add (Ident.name id) + ((decl', (constructors, labels)), nopos) + c.comp_types; + List.iter + (fun descr -> + c.comp_constrs <- + add_to_tbl descr.cstr_name descr c.comp_constrs) + constructors; + List.iter + (fun descr -> + c.comp_labels <- + add_to_tbl descr.lbl_name descr c.comp_labels) + labels; + env := store_type_infos id decl !env + | Sig_typext(id, ext, _) -> + let ext' = Subst.extension_constructor sub ext in + let descr = Datarepr.extension_descr path ext' in + c.comp_constrs <- + add_to_tbl (Ident.name id) descr c.comp_constrs; + incr pos + | Sig_module(id, md, _) -> + let md' = EnvLazy.create (sub, md) in + c.comp_modules <- + Tbl.add (Ident.name id) (md', !pos) c.comp_modules; + let deprecated = + Builtin_attributes.deprecated_of_attrs md.md_attributes + in + let comps = + components_of_module ~deprecated ~loc:md.md_loc !env sub path + md.md_type + in + c.comp_components <- + Tbl.add (Ident.name id) (comps, !pos) c.comp_components; + env := store_module ~check:false id md !env; + incr pos + | Sig_modtype(id, decl) -> + let decl' = Subst.modtype_declaration sub decl in + c.comp_modtypes <- + Tbl.add (Ident.name id) (decl', nopos) c.comp_modtypes; + env := store_modtype id decl !env + | Sig_class(id, decl, _) -> + let decl' = Subst.class_declaration sub decl in + c.comp_classes <- + Tbl.add (Ident.name id) (decl', !pos) c.comp_classes; + incr pos + | Sig_class_type(id, decl, _) -> + let decl' = Subst.cltype_declaration sub decl in + c.comp_cltypes <- + Tbl.add (Ident.name id) (decl', !pos) c.comp_cltypes) + sg pl; + Some (Structure_comps c) + | Mty_functor(param, ty_arg, ty_res) -> + Some (Functor_comps { + fcomp_param = param; + (* fcomp_arg and fcomp_res must be prefixed eagerly, because + they are interpreted in the outer environment *) + fcomp_arg = may_map (Subst.modtype sub) ty_arg; + fcomp_res = Subst.modtype sub ty_res; + fcomp_cache = Hashtbl.create 17; + fcomp_subst_cache = Hashtbl.create 17 }) + | Mty_ident _ + | Mty_alias _ -> None + +(* Insertion of bindings by identifier + path *) + +and check_usage loc id warn tbl = + if not loc.Location.loc_ghost && Warnings.is_active (warn "") then begin + let name = Ident.name id in + let key = (name, loc) in + if Hashtbl.mem tbl key then () + else let used = ref false in + Hashtbl.add tbl key (fun () -> used := true); + if not (name = "" || name.[0] = '_' || name.[0] = '#') + then + !add_delayed_check_forward + (fun () -> if not !used then Location.prerr_warning loc (warn name)) + end; + +and check_value_name name loc = + (* Note: we could also check here general validity of the + identifier, to protect against bad identifiers forged by -pp or + -ppx preprocessors. *) + + if String.length name > 0 && (name.[0] = '#') then + for i = 1 to String.length name - 1 do + if name.[i] = '#' then + raise (Error(Illegal_value_name(loc, name))) + done + + +and store_value ?check id decl env = + check_value_name (Ident.name id) decl.val_loc; + may (fun f -> check_usage decl.val_loc id f value_declarations) check; + { env with + values = IdTbl.add id decl env.values; + summary = Env_value(env.summary, id, decl) } + +and store_type ~check id info env = + let loc = info.type_loc in + if check then + check_usage loc id (fun s -> Warnings.Unused_type_declaration s) + type_declarations; + let path = Pident id in + let constructors = Datarepr.constructors_of_type path info in + let labels = Datarepr.labels_of_type path info in + let descrs = (List.map snd constructors, List.map snd labels) in + + if check && not loc.Location.loc_ghost && + Warnings.is_active (Warnings.Unused_constructor ("", false, false)) + then begin + let ty = Ident.name id in + List.iter + begin fun (_, {cstr_name = c; _}) -> + let k = (ty, loc, c) in + if not (Hashtbl.mem used_constructors k) then + let used = constructor_usages () in + Hashtbl.add used_constructors k (add_constructor_usage used); + if not (ty = "" || ty.[0] = '_') + then !add_delayed_check_forward + (fun () -> + if not (is_in_signature env) && not used.cu_positive then + Location.prerr_warning loc + (Warnings.Unused_constructor + (c, used.cu_pattern, used.cu_privatize))) + end + constructors + end; + { env with + constrs = + List.fold_right + (fun (id, descr) constrs -> TycompTbl.add id descr constrs) + constructors + env.constrs; + labels = + List.fold_right + (fun (id, descr) labels -> TycompTbl.add id descr labels) + labels + env.labels; + types = + IdTbl.add id (info, descrs) env.types; + summary = Env_type(env.summary, id, info) } + +and store_type_infos id info env = + (* Simplified version of store_type that doesn't compute and store + constructor and label infos, but simply record the arity and + manifest-ness of the type. Used in components_of_module to + keep track of type abbreviations (e.g. type t = float) in the + computation of label representations. *) + { env with + types = IdTbl.add id (info,([],[])) + env.types; + summary = Env_type(env.summary, id, info) } + +and store_extension ~check id ext env = + let loc = ext.ext_loc in + if check && not loc.Location.loc_ghost && + Warnings.is_active (Warnings.Unused_extension ("", false, false, false)) + then begin + let is_exception = Path.same ext.ext_type_path Predef.path_exn in + let ty = Path.last ext.ext_type_path in + let n = Ident.name id in + let k = (ty, loc, n) in + if not (Hashtbl.mem used_constructors k) then begin + let used = constructor_usages () in + Hashtbl.add used_constructors k (add_constructor_usage used); + !add_delayed_check_forward + (fun () -> + if not (is_in_signature env) && not used.cu_positive then + Location.prerr_warning loc + (Warnings.Unused_extension + (n, is_exception, used.cu_pattern, used.cu_privatize) + ) + ) + end; + end; + { env with + constrs = TycompTbl.add id + (Datarepr.extension_descr (Pident id) ext) + env.constrs; + summary = Env_extension(env.summary, id, ext) } + +and store_module ~check id md env = + let loc = md.md_loc in + if check then + check_usage loc id (fun s -> Warnings.Unused_module s) + module_declarations; + + let deprecated = Builtin_attributes.deprecated_of_attrs md.md_attributes in + { env with + modules = IdTbl.add id (EnvLazy.create (Subst.identity, md)) env.modules; + components = + IdTbl.add id + (components_of_module ~deprecated ~loc:md.md_loc + env Subst.identity (Pident id) md.md_type) + env.components; + summary = Env_module(env.summary, id, md) } + +and store_modtype id info env = + { env with + modtypes = IdTbl.add id info env.modtypes; + summary = Env_modtype(env.summary, id, info) } + +and store_class id desc env = + { env with + classes = IdTbl.add id desc env.classes; + summary = Env_class(env.summary, id, desc) } + +and store_cltype id desc env = + { env with + cltypes = IdTbl.add id desc env.cltypes; + summary = Env_cltype(env.summary, id, desc) } + +(* Compute the components of a functor application in a path. *) + +let components_of_functor_appl f env p1 p2 = + try + Hashtbl.find f.fcomp_cache p2 + with Not_found -> + let p = Papply(p1, p2) in + let sub = Subst.add_module f.fcomp_param p2 Subst.identity in + let mty = Subst.modtype sub f.fcomp_res in + let comps = components_of_module ~deprecated:None ~loc:Location.none + (*???*) + env Subst.identity p mty in + Hashtbl.add f.fcomp_cache p2 comps; + comps + +(* Define forward functions *) + +let _ = + components_of_module' := components_of_module; + components_of_functor_appl' := components_of_functor_appl; + components_of_module_maker' := components_of_module_maker + +(* Insertion of bindings by identifier *) + +let add_functor_arg id env = + {env with + functor_args = Ident.add id () env.functor_args; + summary = Env_functor_arg (env.summary, id)} + +let add_value ?check id desc env = + store_value ?check id desc env + +let add_type ~check id info env = + store_type ~check id info env + +and add_extension ~check id ext env = + store_extension ~check id ext env + +and add_module_declaration ?(arg=false) ~check id md env = + let env = store_module ~check id md env in + if arg then add_functor_arg id env else env + +and add_modtype id info env = + store_modtype id info env + +and add_class id ty env = + store_class id ty env + +and add_cltype id ty env = + store_cltype id ty env + +let add_module ?arg id mty env = + add_module_declaration ~check:false ?arg id (md mty) env + +let add_local_type path info env = + { env with + local_constraints = PathMap.add path info env.local_constraints } + +let add_local_constraint path info elv env = + match info with + {type_manifest = Some _; type_newtype_level = Some (lv, _)} -> + (* elv is the expansion level, lv is the definition level *) + let info = {info with type_newtype_level = Some (lv, elv)} in + add_local_type path info env + | _ -> assert false + + +(* Insertion of bindings by name *) + +let enter store_fun name data env = + let id = Ident.create name in (id, store_fun id data env) + +let enter_value ?check = enter (store_value ?check) +and enter_type = enter (store_type ~check:true) +and enter_extension = enter (store_extension ~check:true) +and enter_module_declaration ?arg id md env = + add_module_declaration ?arg ~check:true id md env + (* let (id, env) = enter store_module name md env in + (id, add_functor_arg ?arg id env) *) +and enter_modtype = enter store_modtype +and enter_class = enter store_class +and enter_cltype = enter store_cltype + +let enter_module ?arg s mty env = + let id = Ident.create s in + (id, enter_module_declaration ?arg id (md mty) env) + +(* Insertion of all components of a signature *) + +let add_item comp env = + match comp with + Sig_value(id, decl) -> add_value id decl env + | Sig_type(id, decl, _) -> add_type ~check:false id decl env + | Sig_typext(id, ext, _) -> add_extension ~check:false id ext env + | Sig_module(id, md, _) -> add_module_declaration ~check:false id md env + | Sig_modtype(id, decl) -> add_modtype id decl env + | Sig_class(id, decl, _) -> add_class id decl env + | Sig_class_type(id, decl, _) -> add_cltype id decl env + +let rec add_signature sg env = + match sg with + [] -> env + | comp :: rem -> add_signature rem (add_item comp env) + +(* Open a signature path *) + +let add_components slot root env0 comps = + let add_l w comps env0 = + TycompTbl.add_open slot w comps env0 + in + + let add w comps env0 = IdTbl.add_open slot w root comps env0 in + + let constrs = + add_l (fun x -> `Constructor x) comps.comp_constrs env0.constrs + in + let labels = + add_l (fun x -> `Label x) comps.comp_labels env0.labels + in + + let values = + add (fun x -> `Value x) comps.comp_values env0.values + in + let types = + add (fun x -> `Type x) comps.comp_types env0.types + in + let modtypes = + add (fun x -> `Module_type x) comps.comp_modtypes env0.modtypes + in + let classes = + add (fun x -> `Class x) comps.comp_classes env0.classes + in + let cltypes = + add (fun x -> `Class_type x) comps.comp_cltypes env0.cltypes + in + let components = + add (fun x -> `Component x) comps.comp_components env0.components + in + + let modules = + add (fun x -> `Module x) comps.comp_modules env0.modules + in + + { env0 with + summary = Env_open(env0.summary, root); + constrs; + labels; + values; + types; + modtypes; + classes; + cltypes; + components; + modules; + } + +let open_signature slot root env0 = + match get_components (find_module_descr root env0) with + | Functor_comps _ -> None + | Structure_comps comps -> Some (add_components slot root env0 comps) + + +(* Open a signature from a file *) + +let open_pers_signature name env = + match open_signature None (Pident(Ident.create_persistent name)) env with + | Some env -> env + | None -> assert false (* a compilation unit cannot refer to a functor *) + +let open_signature + ?(used_slot = ref false) + ?(loc = Location.none) ?(toplevel = false) ovf root env = + if not toplevel && ovf = Asttypes.Fresh && not loc.Location.loc_ghost + && (Warnings.is_active (Warnings.Unused_open "") + || Warnings.is_active (Warnings.Open_shadow_identifier ("", "")) + || Warnings.is_active (Warnings.Open_shadow_label_constructor ("",""))) + then begin + let used = used_slot in + !add_delayed_check_forward + (fun () -> + if not !used then begin + used := true; + Location.prerr_warning loc (Warnings.Unused_open (Path.name root)) + end + ); + let shadowed = ref [] in + let slot s b = + begin match check_shadowing env b with + | Some kind when not (List.mem (kind, s) !shadowed) -> + shadowed := (kind, s) :: !shadowed; + let w = + match kind with + | "label" | "constructor" -> + Warnings.Open_shadow_label_constructor (kind, s) + | _ -> Warnings.Open_shadow_identifier (kind, s) + in + Location.prerr_warning loc w + | _ -> () + end; + used := true + in + open_signature (Some slot) root env + end + else open_signature None root env + +(* Read a signature from a file *) + +let read_signature modname filename = + let ps = read_pers_struct modname filename in + Lazy.force ps.ps_sig + +(* Return the CRC of the interface of the given compilation unit *) + +let crc_of_unit name = + let ps = find_pers_struct name in + let crco = + try + List.assoc name ps.ps_crcs + with Not_found -> + assert false + in + match crco with + None -> assert false + | Some crc -> crc + +(* Return the list of imported interfaces with their CRCs *) + +let imports () = + Consistbl.extract (StringSet.elements !imported_units) crc_units + +(* Returns true if [s] is an opaque imported module *) +let is_imported_opaque s = + StringSet.mem s !imported_opaque_units + +(* Save a signature to a file *) + +let save_signature_with_imports ~deprecated sg modname filename imports = + (*prerr_endline filename; + List.iter (fun (name, crc) -> prerr_endline name) imports;*) + Btype.cleanup_abbrev (); + Subst.reset_for_saving (); + let sg = Subst.signature (Subst.for_saving Subst.identity) sg in + let flags = + List.concat [ + if !Clflags.recursive_types then [Cmi_format.Rectypes] else []; + if !Clflags.opaque then [Cmi_format.Opaque] else []; + (if !Clflags.unsafe_string then [Cmi_format.Unsafe_string] else []); + (match deprecated with Some s -> [Deprecated s] | None -> []); + ] + in + try + let cmi = { + cmi_name = modname; + cmi_sign = sg; + cmi_crcs = imports; + cmi_flags = flags; + } in + let crc = + output_to_file_via_temporary (* see MPR#7472, MPR#4991 *) + ~mode: [Open_binary] filename + (fun temp_filename oc -> output_cmi temp_filename oc cmi) in + (* Enter signature in persistent table so that imported_unit() + will also return its crc *) + let comps = + components_of_module ~deprecated ~loc:Location.none + empty Subst.identity + (Pident(Ident.create_persistent modname)) (Mty_signature sg) in + let ps = + { ps_name = modname; + ps_sig = lazy (Subst.signature Subst.identity sg); + ps_comps = comps; + ps_crcs = (cmi.cmi_name, Some crc) :: imports; + ps_filename = filename; + ps_flags = cmi.cmi_flags; + } in + save_pers_struct crc ps; + cmi + with exn -> + remove_file filename; + raise exn + +let save_signature ~deprecated sg modname filename = + save_signature_with_imports ~deprecated sg modname filename (imports()) + +(* Folding on environments *) + +let find_all proj1 proj2 f lid env acc = + match lid with + | None -> + IdTbl.fold_name + (fun name (p, data) acc -> f name p data acc) + (proj1 env) acc + | Some l -> + let p, desc = lookup_module_descr l env in + begin match get_components desc with + Structure_comps c -> + Tbl.fold + (fun s (data, pos) acc -> f s (Pdot (p, s, pos)) data acc) + (proj2 c) acc + | Functor_comps _ -> + acc + end + +let find_all_simple_list proj1 proj2 f lid env acc = + match lid with + | None -> + TycompTbl.fold_name + (fun data acc -> f data acc) + (proj1 env) acc + | Some l -> + let (_p, desc) = lookup_module_descr l env in + begin match get_components desc with + Structure_comps c -> + Tbl.fold + (fun _s comps acc -> + match comps with + [] -> acc + | data :: _ -> + f data acc) + (proj2 c) acc + | Functor_comps _ -> + acc + end + +let fold_modules f lid env acc = + match lid with + | None -> + let acc = + IdTbl.fold_name + (fun name (p, data) acc -> + let data = EnvLazy.force subst_modtype_maker data in + f name p data acc + ) + env.modules + acc + in + Hashtbl.fold + (fun name ps acc -> + match ps with + None -> acc + | Some ps -> + f name (Pident(Ident.create_persistent name)) + (md (Mty_signature (Lazy.force ps.ps_sig))) acc) + persistent_structures + acc + | Some l -> + let p, desc = lookup_module_descr l env in + begin match get_components desc with + Structure_comps c -> + Tbl.fold + (fun s (data, pos) acc -> + f s (Pdot (p, s, pos)) + (EnvLazy.force subst_modtype_maker data) acc) + c.comp_modules + acc + | Functor_comps _ -> + acc + end + +let fold_values f = + find_all (fun env -> env.values) (fun sc -> sc.comp_values) f +and fold_constructors f = + find_all_simple_list (fun env -> env.constrs) (fun sc -> sc.comp_constrs) f +and fold_labels f = + find_all_simple_list (fun env -> env.labels) (fun sc -> sc.comp_labels) f +and fold_types f = + find_all (fun env -> env.types) (fun sc -> sc.comp_types) f +and fold_modtypes f = + find_all (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes) f +and fold_classs f = + find_all (fun env -> env.classes) (fun sc -> sc.comp_classes) f +and fold_cltypes f = + find_all (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes) f + + +(* Make the initial environment *) +let (initial_safe_string, initial_unsafe_string) = + Predef.build_initial_env + (add_type ~check:false) + (add_extension ~check:false) + empty + +(* Return the environment summary *) + +let summary env = + if PathMap.is_empty env.local_constraints then env.summary + else Env_constraints (env.summary, env.local_constraints) + +let last_env = ref empty +let last_reduced_env = ref empty + +let keep_only_summary env = + if !last_env == env then !last_reduced_env + else begin + let new_env = + { + empty with + summary = env.summary; + local_constraints = env.local_constraints; + flags = env.flags; + } + in + last_env := env; + last_reduced_env := new_env; + new_env + end + + +let env_of_only_summary env_from_summary env = + let new_env = env_from_summary env.summary Subst.identity in + { new_env with + local_constraints = env.local_constraints; + flags = env.flags; + } + +(* Error report *) + +open Format + +let report_error ppf = function + | Illegal_renaming(modname, ps_name, filename) -> fprintf ppf + "Wrong file naming: %a@ contains the compiled interface for @ \ + %s when %s was expected" + Location.print_filename filename ps_name modname + | Inconsistent_import(name, source1, source2) -> fprintf ppf + "@[The files %a@ and %a@ \ + make inconsistent assumptions@ over interface %s@]" + Location.print_filename source1 Location.print_filename source2 name + | Need_recursive_types(import, export) -> + fprintf ppf + "@[Unit %s imports from %s, which uses recursive types.@ %s@]" + export import "The compilation flag -rectypes is required" + | Depend_on_unsafe_string_unit(import, export) -> + fprintf ppf + "@[Unit %s imports from %s, compiled with -unsafe-string.@ %s@]" + export import "This compiler has been configured in strict \ + safe-string mode (-force-safe-string)" + | Missing_module(_, path1, path2) -> + fprintf ppf "@[@["; + if Path.same path1 path2 then + fprintf ppf "Internal path@ %s@ is dangling." (Path.name path1) + else + fprintf ppf "Internal path@ %s@ expands to@ %s@ which is dangling." + (Path.name path1) (Path.name path2); + fprintf ppf "@]@ @[%s@ %s@ %s.@]@]" + "The compiled interface for module" (Ident.name (Path.head path2)) + "was not found" + | Illegal_value_name(_loc, name) -> + fprintf ppf "'%s' is not a valid value identifier." + name + +let () = + Location.register_error_of_exn + (function + | Error (Missing_module (loc, _, _) + | Illegal_value_name (loc, _) + as err) when loc <> Location.none -> + Some (Location.error_of_printer loc report_error err) + | Error err -> Some (Location.error_of_printer_file report_error err) + | _ -> None + ) diff --git a/res_syntax/compiler-libs-406/env.mli b/res_syntax/compiler-libs-406/env.mli new file mode 100644 index 0000000000..f96c76b7c1 --- /dev/null +++ b/res_syntax/compiler-libs-406/env.mli @@ -0,0 +1,324 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Environment handling *) + +open Types + +module PathMap : Map.S with type key = Path.t + and type 'a t = 'a Map.Make(Path).t + +type summary = + Env_empty + | Env_value of summary * Ident.t * value_description + | Env_type of summary * Ident.t * type_declaration + | Env_extension of summary * Ident.t * extension_constructor + | Env_module of summary * Ident.t * module_declaration + | Env_modtype of summary * Ident.t * modtype_declaration + | Env_class of summary * Ident.t * class_declaration + | Env_cltype of summary * Ident.t * class_type_declaration + | Env_open of summary * Path.t + | Env_functor_arg of summary * Ident.t + | Env_constraints of summary * type_declaration PathMap.t + | Env_copy_types of summary * string list + +type t + +val empty: t +val initial_safe_string: t +val initial_unsafe_string: t +val diff: t -> t -> Ident.t list +val copy_local: from:t -> t -> t + +type type_descriptions = + constructor_description list * label_description list + +(* For short-paths *) +type iter_cont +val iter_types: + (Path.t -> Path.t * (type_declaration * type_descriptions) -> unit) -> + t -> iter_cont +val run_iter_cont: iter_cont list -> (Path.t * iter_cont) list +val same_types: t -> t -> bool +val used_persistent: unit -> Concr.t +val find_shadowed_types: Path.t -> t -> Path.t list +val without_cmis: ('a -> 'b) -> 'a -> 'b + (* [without_cmis f arg] applies [f] to [arg], but does not + allow opening cmis during its execution *) + +(* Lookup by paths *) + +val find_value: Path.t -> t -> value_description +val find_type: Path.t -> t -> type_declaration +val find_type_descrs: Path.t -> t -> type_descriptions +val find_module: Path.t -> t -> module_declaration +val find_modtype: Path.t -> t -> modtype_declaration +val find_class: Path.t -> t -> class_declaration +val find_cltype: Path.t -> t -> class_type_declaration + +val find_type_expansion: + Path.t -> t -> type_expr list * type_expr * int option +val find_type_expansion_opt: + Path.t -> t -> type_expr list * type_expr * int option +(* Find the manifest type information associated to a type for the sake + of the compiler's type-based optimisations. *) +val find_modtype_expansion: Path.t -> t -> module_type +val add_functor_arg: Ident.t -> t -> t +val is_functor_arg: Path.t -> t -> bool +val normalize_path: Location.t option -> t -> Path.t -> Path.t +(* Normalize the path to a concrete value or module. + If the option is None, allow returning dangling paths. + Otherwise raise a Missing_module error, and may add forgotten + head as required global. *) +val normalize_path_prefix: Location.t option -> t -> Path.t -> Path.t +(* Only normalize the prefix part of the path *) +val reset_required_globals: unit -> unit +val get_required_globals: unit -> Ident.t list +val add_required_global: Ident.t -> unit + +val has_local_constraints: t -> bool +val add_gadt_instance_level: int -> t -> t +val gadt_instance_level: t -> type_expr -> int option +val add_gadt_instances: t -> int -> type_expr list -> unit +val add_gadt_instance_chain: t -> int -> type_expr -> unit + +(* Lookup by long identifiers *) + +(* ?loc is used to report 'deprecated module' warnings *) + +val lookup_value: + ?loc:Location.t -> Longident.t -> t -> Path.t * value_description +val lookup_constructor: + ?loc:Location.t -> Longident.t -> t -> constructor_description +val lookup_all_constructors: + ?loc:Location.t -> + Longident.t -> t -> (constructor_description * (unit -> unit)) list +val lookup_label: + ?loc:Location.t -> Longident.t -> t -> label_description +val lookup_all_labels: + ?loc:Location.t -> + Longident.t -> t -> (label_description * (unit -> unit)) list +val lookup_type: + ?loc:Location.t -> Longident.t -> t -> Path.t + (* Since 4.04, this function no longer returns [type_description]. + To obtain it, you should either call [Env.find_type], or replace + it by [Typetexp.find_type] *) +val lookup_module: + load:bool -> ?loc:Location.t -> Longident.t -> t -> Path.t +val lookup_modtype: + ?loc:Location.t -> Longident.t -> t -> Path.t * modtype_declaration +val lookup_class: + ?loc:Location.t -> Longident.t -> t -> Path.t * class_declaration +val lookup_cltype: + ?loc:Location.t -> Longident.t -> t -> Path.t * class_type_declaration + +val copy_types: string list -> t -> t + (* Used only in Typecore.duplicate_ident_types. *) + +exception Recmodule + (* Raise by lookup_module when the identifier refers + to one of the modules of a recursive definition + during the computation of its approximation (see #5965). *) + +(* Insertion by identifier *) + +val add_value: + ?check:(string -> Warnings.t) -> Ident.t -> value_description -> t -> t +val add_type: check:bool -> Ident.t -> type_declaration -> t -> t +val add_extension: check:bool -> Ident.t -> extension_constructor -> t -> t +val add_module: ?arg:bool -> Ident.t -> module_type -> t -> t +val add_module_declaration: ?arg:bool -> check:bool -> Ident.t -> + module_declaration -> t -> t +val add_modtype: Ident.t -> modtype_declaration -> t -> t +val add_class: Ident.t -> class_declaration -> t -> t +val add_cltype: Ident.t -> class_type_declaration -> t -> t +val add_local_constraint: Path.t -> type_declaration -> int -> t -> t +val add_local_type: Path.t -> type_declaration -> t -> t + +(* Insertion of all fields of a signature. *) + +val add_item: signature_item -> t -> t +val add_signature: signature -> t -> t + +(* Insertion of all fields of a signature, relative to the given path. + Used to implement open. Returns None if the path refers to a functor, + not a structure. *) +val open_signature: + ?used_slot:bool ref -> + ?loc:Location.t -> ?toplevel:bool -> Asttypes.override_flag -> Path.t -> + t -> t option + +val open_pers_signature: string -> t -> t + +(* Insertion by name *) + +val enter_value: + ?check:(string -> Warnings.t) -> + string -> value_description -> t -> Ident.t * t +val enter_type: string -> type_declaration -> t -> Ident.t * t +val enter_extension: string -> extension_constructor -> t -> Ident.t * t +val enter_module: ?arg:bool -> string -> module_type -> t -> Ident.t * t +val enter_module_declaration: + ?arg:bool -> Ident.t -> module_declaration -> t -> t +val enter_modtype: string -> modtype_declaration -> t -> Ident.t * t +val enter_class: string -> class_declaration -> t -> Ident.t * t +val enter_cltype: string -> class_type_declaration -> t -> Ident.t * t + +(* Initialize the cache of in-core module interfaces. *) +val reset_cache: unit -> unit + +(* To be called before each toplevel phrase. *) +val reset_cache_toplevel: unit -> unit + +(* Remember the name of the current compilation unit. *) +val set_unit_name: string -> unit +val get_unit_name: unit -> string + +(* Read, save a signature to/from a file *) + +val read_signature: string -> string -> signature + (* Arguments: module name, file name. Results: signature. *) +val save_signature: + deprecated:string option -> signature -> string -> string -> Cmi_format.cmi_infos + (* Arguments: signature, module name, file name. *) +val save_signature_with_imports: + deprecated:string option -> + signature -> string -> string -> (string * Digest.t option) list + -> Cmi_format.cmi_infos + (* Arguments: signature, module name, file name, + imported units with their CRCs. *) + +(* Return the CRC of the interface of the given compilation unit *) + +val crc_of_unit: string -> Digest.t + +(* Return the set of compilation units imported, with their CRC *) + +val imports: unit -> (string * Digest.t option) list + +(* [is_imported_opaque md] returns true if [md] is an opaque imported module *) +val is_imported_opaque: string -> bool + +(* Direct access to the table of imported compilation units with their CRC *) + +val crc_units: Consistbl.t +val add_import: string -> unit + +(* Summaries -- compact representation of an environment, to be + exported in debugging information. *) + +val summary: t -> summary + +(* Return an equivalent environment where all fields have been reset, + except the summary. The initial environment can be rebuilt from the + summary, using Envaux.env_of_only_summary. *) + +val keep_only_summary : t -> t +val env_of_only_summary : (summary -> Subst.t -> t) -> t -> t + +(* Error report *) + +type error = + | Illegal_renaming of string * string * string + | Inconsistent_import of string * string * string + | Need_recursive_types of string * string + | Depend_on_unsafe_string_unit of string * string + | Missing_module of Location.t * Path.t * Path.t + | Illegal_value_name of Location.t * string + +exception Error of error + +open Format + +val report_error: formatter -> error -> unit + + +val mark_value_used: t -> string -> value_description -> unit +val mark_module_used: t -> string -> Location.t -> unit +val mark_type_used: t -> string -> type_declaration -> unit + +type constructor_usage = Positive | Pattern | Privatize +val mark_constructor_used: + constructor_usage -> t -> string -> type_declaration -> string -> unit +val mark_constructor: + constructor_usage -> t -> string -> constructor_description -> unit +val mark_extension_used: + constructor_usage -> t -> extension_constructor -> string -> unit + +val in_signature: bool -> t -> t +val implicit_coercion: t -> t + +val is_in_signature: t -> bool + +val set_value_used_callback: + string -> value_description -> (unit -> unit) -> unit +val set_type_used_callback: + string -> type_declaration -> ((unit -> unit) -> unit) -> unit + +(* Forward declaration to break mutual recursion with Includemod. *) +val check_modtype_inclusion: + (loc:Location.t -> t -> module_type -> Path.t -> module_type -> unit) ref +(* Forward declaration to break mutual recursion with Typecore. *) +val add_delayed_check_forward: ((unit -> unit) -> unit) ref +(* Forward declaration to break mutual recursion with Mtype. *) +val strengthen: + (aliasable:bool -> t -> module_type -> Path.t -> module_type) ref +(* Forward declaration to break mutual recursion with Ctype. *) +val same_constr: (t -> type_expr -> type_expr -> bool) ref + +(** Folding over all identifiers (for analysis purpose) *) + +val fold_values: + (string -> Path.t -> value_description -> 'a -> 'a) -> + Longident.t option -> t -> 'a -> 'a +val fold_types: + (string -> Path.t -> type_declaration * type_descriptions -> 'a -> 'a) -> + Longident.t option -> t -> 'a -> 'a +val fold_constructors: + (constructor_description -> 'a -> 'a) -> + Longident.t option -> t -> 'a -> 'a +val fold_labels: + (label_description -> 'a -> 'a) -> + Longident.t option -> t -> 'a -> 'a + +(** Persistent structures are only traversed if they are already loaded. *) +val fold_modules: + (string -> Path.t -> module_declaration -> 'a -> 'a) -> + Longident.t option -> t -> 'a -> 'a + +val fold_modtypes: + (string -> Path.t -> modtype_declaration -> 'a -> 'a) -> + Longident.t option -> t -> 'a -> 'a +val fold_classs: + (string -> Path.t -> class_declaration -> 'a -> 'a) -> + Longident.t option -> t -> 'a -> 'a +val fold_cltypes: + (string -> Path.t -> class_type_declaration -> 'a -> 'a) -> + Longident.t option -> t -> 'a -> 'a + +(** Utilities *) +val scrape_alias: t -> module_type -> module_type +val check_value_name: string -> Location.t -> unit + +module Persistent_signature : sig + type t = + { filename : string; (** Name of the file containing the signature. *) + cmi : Cmi_format.cmi_infos } + + (** Function used to load a persistent signature. The default is to look for + the .cmi file in the load path. This function can be overridden to load + it from memory, for instance to build a self-contained toplevel. *) + val load : (unit_name:string -> t option) ref +end diff --git a/res_syntax/compiler-libs-406/genlex.ml b/res_syntax/compiler-libs-406/genlex.ml new file mode 100644 index 0000000000..b015bb95aa --- /dev/null +++ b/res_syntax/compiler-libs-406/genlex.ml @@ -0,0 +1,201 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type token = + Kwd of string + | Ident of string + | Int of int + | Float of float + | String of string + | Char of char + +(* The string buffering machinery *) + +let initial_buffer = Bytes.create 32 + +let buffer = ref initial_buffer +let bufpos = ref 0 + +let reset_buffer () = buffer := initial_buffer; bufpos := 0 + +let store c = + if !bufpos >= Bytes.length !buffer then begin + let newbuffer = Bytes.create (2 * !bufpos) in + Bytes.blit !buffer 0 newbuffer 0 !bufpos; + buffer := newbuffer + end; + Bytes.set !buffer !bufpos c; + incr bufpos + +let get_string () = + let s = Bytes.sub_string !buffer 0 !bufpos in buffer := initial_buffer; s + +(* The lexer *) + +let make_lexer keywords = + let kwd_table = Hashtbl.create 17 in + List.iter (fun s -> Hashtbl.add kwd_table s (Kwd s)) keywords; + let ident_or_keyword id = + try Hashtbl.find kwd_table id with + Not_found -> Ident id + and keyword_or_error c = + let s = String.make 1 c in + try Hashtbl.find kwd_table s with + Not_found -> raise (Stream.Error ("Illegal character " ^ s)) + in + let rec next_token (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some (' ' | '\010' | '\013' | '\009' | '\026' | '\012') -> + Stream.junk strm__; next_token strm__ + | Some ('A'..'Z' | 'a'..'z' | '_' | '\192'..'\255' as c) -> + Stream.junk strm__; + let s = strm__ in reset_buffer (); store c; ident s + | Some + ('!' | '%' | '&' | '$' | '#' | '+' | '/' | ':' | '<' | '=' | '>' | + '?' | '@' | '\\' | '~' | '^' | '|' | '*' as c) -> + Stream.junk strm__; + let s = strm__ in reset_buffer (); store c; ident2 s + | Some ('0'..'9' as c) -> + Stream.junk strm__; + let s = strm__ in reset_buffer (); store c; number s + | Some '\'' -> + Stream.junk strm__; + let c = + try char strm__ with + Stream.Failure -> raise (Stream.Error "") + in + begin match Stream.peek strm__ with + Some '\'' -> Stream.junk strm__; Some (Char c) + | _ -> raise (Stream.Error "") + end + | Some '\"' -> + Stream.junk strm__; + let s = strm__ in reset_buffer (); Some (String (string s)) + | Some '-' -> Stream.junk strm__; neg_number strm__ + | Some '(' -> Stream.junk strm__; maybe_comment strm__ + | Some c -> Stream.junk strm__; Some (keyword_or_error c) + | _ -> None + and ident (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some + ('A'..'Z' | 'a'..'z' | '\192'..'\255' | '0'..'9' | '_' | '\'' as c) -> + Stream.junk strm__; let s = strm__ in store c; ident s + | _ -> Some (ident_or_keyword (get_string ())) + and ident2 (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some + ('!' | '%' | '&' | '$' | '#' | '+' | '-' | '/' | ':' | '<' | '=' | + '>' | '?' | '@' | '\\' | '~' | '^' | '|' | '*' as c) -> + Stream.junk strm__; let s = strm__ in store c; ident2 s + | _ -> Some (ident_or_keyword (get_string ())) + and neg_number (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some ('0'..'9' as c) -> + Stream.junk strm__; + let s = strm__ in reset_buffer (); store '-'; store c; number s + | _ -> let s = strm__ in reset_buffer (); store '-'; ident2 s + and number (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some ('0'..'9' as c) -> + Stream.junk strm__; let s = strm__ in store c; number s + | Some '.' -> + Stream.junk strm__; let s = strm__ in store '.'; decimal_part s + | Some ('e' | 'E') -> + Stream.junk strm__; let s = strm__ in store 'E'; exponent_part s + | _ -> Some (Int (int_of_string (get_string ()))) + and decimal_part (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some ('0'..'9' as c) -> + Stream.junk strm__; let s = strm__ in store c; decimal_part s + | Some ('e' | 'E') -> + Stream.junk strm__; let s = strm__ in store 'E'; exponent_part s + | _ -> Some (Float (float_of_string (get_string ()))) + and exponent_part (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some ('+' | '-' as c) -> + Stream.junk strm__; let s = strm__ in store c; end_exponent_part s + | _ -> end_exponent_part strm__ + and end_exponent_part (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some ('0'..'9' as c) -> + Stream.junk strm__; let s = strm__ in store c; end_exponent_part s + | _ -> Some (Float (float_of_string (get_string ()))) + and string (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some '\"' -> Stream.junk strm__; get_string () + | Some '\\' -> + Stream.junk strm__; + let c = + try escape strm__ with + Stream.Failure -> raise (Stream.Error "") + in + let s = strm__ in store c; string s + | Some c -> Stream.junk strm__; let s = strm__ in store c; string s + | _ -> raise Stream.Failure + and char (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some '\\' -> + Stream.junk strm__; + begin try escape strm__ with + Stream.Failure -> raise (Stream.Error "") + end + | Some c -> Stream.junk strm__; c + | _ -> raise Stream.Failure + and escape (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some 'n' -> Stream.junk strm__; '\n' + | Some 'r' -> Stream.junk strm__; '\r' + | Some 't' -> Stream.junk strm__; '\t' + | Some ('0'..'9' as c1) -> + Stream.junk strm__; + begin match Stream.peek strm__ with + Some ('0'..'9' as c2) -> + Stream.junk strm__; + begin match Stream.peek strm__ with + Some ('0'..'9' as c3) -> + Stream.junk strm__; + Char.chr + ((Char.code c1 - 48) * 100 + (Char.code c2 - 48) * 10 + + (Char.code c3 - 48)) + | _ -> raise (Stream.Error "") + end + | _ -> raise (Stream.Error "") + end + | Some c -> Stream.junk strm__; c + | _ -> raise Stream.Failure + and maybe_comment (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some '*' -> + Stream.junk strm__; let s = strm__ in comment s; next_token s + | _ -> Some (keyword_or_error '(') + and comment (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some '(' -> Stream.junk strm__; maybe_nested_comment strm__ + | Some '*' -> Stream.junk strm__; maybe_end_comment strm__ + | Some _ -> Stream.junk strm__; comment strm__ + | _ -> raise Stream.Failure + and maybe_nested_comment (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some '*' -> Stream.junk strm__; let s = strm__ in comment s; comment s + | Some _ -> Stream.junk strm__; comment strm__ + | _ -> raise Stream.Failure + and maybe_end_comment (strm__ : _ Stream.t) = + match Stream.peek strm__ with + Some ')' -> Stream.junk strm__; () + | Some '*' -> Stream.junk strm__; maybe_end_comment strm__ + | Some _ -> Stream.junk strm__; comment strm__ + | _ -> raise Stream.Failure + in + fun input -> Stream.from (fun _count -> next_token input) diff --git a/res_syntax/compiler-libs-406/genlex.mli b/res_syntax/compiler-libs-406/genlex.mli new file mode 100644 index 0000000000..473949269a --- /dev/null +++ b/res_syntax/compiler-libs-406/genlex.mli @@ -0,0 +1,73 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** A generic lexical analyzer. + + + This module implements a simple 'standard' lexical analyzer, presented + as a function from character streams to token streams. It implements + roughly the lexical conventions of OCaml, but is parameterized by the + set of keywords of your language. + + + Example: a lexer suitable for a desk calculator is obtained by + {[ let lexer = make_lexer ["+";"-";"*";"/";"let";"="; "("; ")"] ]} + + The associated parser would be a function from [token stream] + to, for instance, [int], and would have rules such as: + + {[ + let rec parse_expr = parser + | [< n1 = parse_atom; n2 = parse_remainder n1 >] -> n2 + and parse_atom = parser + | [< 'Int n >] -> n + | [< 'Kwd "("; n = parse_expr; 'Kwd ")" >] -> n + and parse_remainder n1 = parser + | [< 'Kwd "+"; n2 = parse_expr >] -> n1+n2 + | [< >] -> n1 + ]} + + One should notice that the use of the [parser] keyword and associated + notation for streams are only available through camlp4 extensions. This + means that one has to preprocess its sources {i e. g.} by using the + ["-pp"] command-line switch of the compilers. +*) + +(** The type of tokens. The lexical classes are: [Int] and [Float] + for integer and floating-point numbers; [String] for + string literals, enclosed in double quotes; [Char] for + character literals, enclosed in single quotes; [Ident] for + identifiers (either sequences of letters, digits, underscores + and quotes, or sequences of 'operator characters' such as + [+], [*], etc); and [Kwd] for keywords (either identifiers or + single 'special characters' such as [(], [}], etc). *) +type token = + Kwd of string + | Ident of string + | Int of int + | Float of float + | String of string + | Char of char + +val make_lexer : string list -> char Stream.t -> token Stream.t +(** Construct the lexer function. The first argument is the list of + keywords. An identifier [s] is returned as [Kwd s] if [s] + belongs to this list, and as [Ident s] otherwise. + A special character [s] is returned as [Kwd s] if [s] + belongs to this list, and cause a lexical error (exception + {!Stream.Error} with the offending lexeme as its parameter) otherwise. + Blanks and newlines are skipped. Comments delimited by [(*] and [*)] + are skipped as well, and can be nested. A {!Stream.Failure} exception + is raised if end of stream is unexpectedly reached.*) diff --git a/res_syntax/compiler-libs-406/ident.ml b/res_syntax/compiler-libs-406/ident.ml new file mode 100644 index 0000000000..1041fb51c7 --- /dev/null +++ b/res_syntax/compiler-libs-406/ident.ml @@ -0,0 +1,249 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Format + +type t = { stamp: int; name: string; mutable flags: int } + +let global_flag = 1 +let predef_exn_flag = 2 + +(* A stamp of 0 denotes a persistent identifier *) + +let currentstamp = ref 0 + +let create s = + incr currentstamp; + { name = s; stamp = !currentstamp; flags = 0 } + +let create_predef_exn s = + incr currentstamp; + { name = s; stamp = !currentstamp; flags = predef_exn_flag } + +let create_persistent s = + { name = s; stamp = 0; flags = global_flag } + +let rename i = + incr currentstamp; + { i with stamp = !currentstamp } + +let name i = i.name + +let unique_name i = i.name ^ "_" ^ string_of_int i.stamp + +let unique_toplevel_name i = i.name ^ "/" ^ string_of_int i.stamp + +let persistent i = (i.stamp = 0) + +let equal i1 i2 = i1.name = i2.name + +let same i1 i2 = i1 = i2 + (* Possibly more efficient version (with a real compiler, at least): + if i1.stamp <> 0 + then i1.stamp = i2.stamp + else i2.stamp = 0 && i1.name = i2.name *) + +let compare i1 i2 = Stdlib.compare i1 i2 + +let binding_time i = i.stamp + +let current_time() = !currentstamp +let set_current_time t = currentstamp := max !currentstamp t + +let reinit_level = ref (-1) + +let reinit () = + if !reinit_level < 0 + then reinit_level := !currentstamp + else currentstamp := !reinit_level + +let hide i = + { i with stamp = -1 } + +let make_global i = + i.flags <- i.flags lor global_flag + +let global i = + (i.flags land global_flag) <> 0 + +let is_predef_exn i = + (i.flags land predef_exn_flag) <> 0 + +let print ppf i = + match i.stamp with + | 0 -> fprintf ppf "%s!" i.name + | -1 -> fprintf ppf "%s#" i.name + | n -> fprintf ppf "%s/%i%s" i.name n (if global i then "g" else "") + +type 'a tbl = + Empty + | Node of 'a tbl * 'a data * 'a tbl * int + +and 'a data = + { ident: t; + data: 'a; + previous: 'a data option } + +let empty = Empty + +(* Inline expansion of height for better speed + * let height = function + * Empty -> 0 + * | Node(_,_,_,h) -> h + *) + +let mknode l d r = + let hl = match l with Empty -> 0 | Node(_,_,_,h) -> h + and hr = match r with Empty -> 0 | Node(_,_,_,h) -> h in + Node(l, d, r, (if hl >= hr then hl + 1 else hr + 1)) + +let balance l d r = + let hl = match l with Empty -> 0 | Node(_,_,_,h) -> h + and hr = match r with Empty -> 0 | Node(_,_,_,h) -> h in + if hl > hr + 1 then + match l with + | Node (ll, ld, lr, _) + when (match ll with Empty -> 0 | Node(_,_,_,h) -> h) >= + (match lr with Empty -> 0 | Node(_,_,_,h) -> h) -> + mknode ll ld (mknode lr d r) + | Node (ll, ld, Node(lrl, lrd, lrr, _), _) -> + mknode (mknode ll ld lrl) lrd (mknode lrr d r) + | _ -> assert false + else if hr > hl + 1 then + match r with + | Node (rl, rd, rr, _) + when (match rr with Empty -> 0 | Node(_,_,_,h) -> h) >= + (match rl with Empty -> 0 | Node(_,_,_,h) -> h) -> + mknode (mknode l d rl) rd rr + | Node (Node (rll, rld, rlr, _), rd, rr, _) -> + mknode (mknode l d rll) rld (mknode rlr rd rr) + | _ -> assert false + else + mknode l d r + +let rec add id data = function + Empty -> + Node(Empty, {ident = id; data = data; previous = None}, Empty, 1) + | Node(l, k, r, h) -> + let c = compare id.name k.ident.name in + if c = 0 then + Node(l, {ident = id; data = data; previous = Some k}, r, h) + else if c < 0 then + balance (add id data l) k r + else + balance l k (add id data r) + +let rec find_stamp s = function + None -> + raise Not_found + | Some k -> + if k.ident.stamp = s then k.data else find_stamp s k.previous + +let rec find_same id = function + Empty -> + raise Not_found + | Node(l, k, r, _) -> + let c = compare id.name k.ident.name in + if c = 0 then + if id.stamp = k.ident.stamp + then k.data + else find_stamp id.stamp k.previous + else + find_same id (if c < 0 then l else r) + +let rec find_name name = function + Empty -> + raise Not_found + | Node(l, k, r, _) -> + let c = compare name k.ident.name in + if c = 0 then + k.ident, k.data + else + find_name name (if c < 0 then l else r) + +let rec get_all = function + | None -> [] + | Some k -> (k.ident, k.data) :: get_all k.previous + +let rec find_all name = function + Empty -> + [] + | Node(l, k, r, _) -> + let c = compare name k.ident.name in + if c = 0 then + (k.ident, k.data) :: get_all k.previous + else + find_all name (if c < 0 then l else r) + +let rec fold_aux f stack accu = function + Empty -> + begin match stack with + [] -> accu + | a :: l -> fold_aux f l accu a + end + | Node(l, k, r, _) -> + fold_aux f (l :: stack) (f k accu) r + +let fold_name f tbl accu = fold_aux (fun k -> f k.ident k.data) [] accu tbl + +let rec fold_data f d accu = + match d with + None -> accu + | Some k -> f k.ident k.data (fold_data f k.previous accu) + +let fold_all f tbl accu = + fold_aux (fun k -> fold_data f (Some k)) [] accu tbl + +(* let keys tbl = fold_name (fun k _ accu -> k::accu) tbl [] *) + +let rec iter f = function + Empty -> () + | Node(l, k, r, _) -> + iter f l; f k.ident k.data; iter f r + +(* Idents for sharing keys *) + +(* They should be 'totally fresh' -> neg numbers *) +let key_name = "" + +let make_key_generator () = + let c = ref 1 in + fun id -> + let stamp = !c in + decr c ; + { id with name = key_name; stamp = stamp; } + +let compare x y = + let c = x.stamp - y.stamp in + if c <> 0 then c + else + let c = compare x.name y.name in + if c <> 0 then c + else + compare x.flags y.flags + +let output oc id = output_string oc (unique_name id) +let hash i = (Char.code i.name.[0]) lxor i.stamp + +let original_equal = equal +include Identifiable.Make (struct + type nonrec t = t + let compare = compare + let output = output + let print = print + let hash = hash + let equal = same +end) +let equal = original_equal diff --git a/res_syntax/compiler-libs-406/ident.mli b/res_syntax/compiler-libs-406/ident.mli new file mode 100644 index 0000000000..c2983edbed --- /dev/null +++ b/res_syntax/compiler-libs-406/ident.mli @@ -0,0 +1,73 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Identifiers (unique names) *) + +type t = { stamp: int; name: string; mutable flags: int } + +include Identifiable.S with type t := t +(* Notes: + - [equal] compares identifiers by name + - [compare x y] is 0 if [same x y] is true. + - [compare] compares identifiers by binding location +*) + + +val create: string -> t +val create_persistent: string -> t +val create_predef_exn: string -> t +val rename: t -> t +val name: t -> string +val unique_name: t -> string +val unique_toplevel_name: t -> string +val persistent: t -> bool +val same: t -> t -> bool + (* Compare identifiers by binding location. + Two identifiers are the same either if they are both + non-persistent and have been created by the same call to + [new], or if they are both persistent and have the same + name. *) +val compare: t -> t -> int +val hide: t -> t + (* Return an identifier with same name as the given identifier, + but stamp different from any stamp returned by new. + When put in a 'a tbl, this identifier can only be looked + up by name. *) + +val make_global: t -> unit +val global: t -> bool +val is_predef_exn: t -> bool + +val binding_time: t -> int +val current_time: unit -> int +val set_current_time: int -> unit +val reinit: unit -> unit + +type 'a tbl + (* Association tables from identifiers to type 'a. *) + +val empty: 'a tbl +val add: t -> 'a -> 'a tbl -> 'a tbl +val find_same: t -> 'a tbl -> 'a +val find_name: string -> 'a tbl -> t * 'a +val find_all: string -> 'a tbl -> (t * 'a) list +val fold_name: (t -> 'a -> 'b -> 'b) -> 'a tbl -> 'b -> 'b +val fold_all: (t -> 'a -> 'b -> 'b) -> 'a tbl -> 'b -> 'b +val iter: (t -> 'a -> unit) -> 'a tbl -> unit + + +(* Idents for sharing keys *) + +val make_key_generator : unit -> (t -> t) diff --git a/res_syntax/compiler-libs-406/identifiable.ml b/res_syntax/compiler-libs-406/identifiable.ml new file mode 100644 index 0000000000..b0b706f833 --- /dev/null +++ b/res_syntax/compiler-libs-406/identifiable.ml @@ -0,0 +1,254 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2013--2016 OCamlPro SAS *) +(* Copyright 2014--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +module type Thing = sig + type t + + include Hashtbl.HashedType with type t := t + include Map.OrderedType with type t := t + + val output : out_channel -> t -> unit + val print : Format.formatter -> t -> unit +end + +module type Set = sig + module T : Set.OrderedType + include Set.S + with type elt = T.t + and type t = Set.Make (T).t + + val output : out_channel -> t -> unit + val print : Format.formatter -> t -> unit + val to_string : t -> string + val of_list : elt list -> t + val map : (elt -> elt) -> t -> t +end + +module type Map = sig + module T : Map.OrderedType + include Map.S + with type key = T.t + and type 'a t = 'a Map.Make (T).t + + val filter_map : 'a t -> f:(key -> 'a -> 'b option) -> 'b t + val of_list : (key * 'a) list -> 'a t + + val disjoint_union : ?eq:('a -> 'a -> bool) -> ?print:(Format.formatter -> 'a -> unit) -> 'a t -> 'a t -> 'a t + + val union_right : 'a t -> 'a t -> 'a t + + val union_left : 'a t -> 'a t -> 'a t + + val union_merge : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t + val rename : key t -> key -> key + val map_keys : (key -> key) -> 'a t -> 'a t + val keys : 'a t -> Set.Make(T).t + val data : 'a t -> 'a list + val of_set : (key -> 'a) -> Set.Make(T).t -> 'a t + val transpose_keys_and_data : key t -> key t + val transpose_keys_and_data_set : key t -> Set.Make(T).t t + val print : + (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit +end + +module type Tbl = sig + module T : sig + type t + include Map.OrderedType with type t := t + include Hashtbl.HashedType with type t := t + end + include Hashtbl.S + with type key = T.t + and type 'a t = 'a Hashtbl.Make (T).t + + val to_list : 'a t -> (T.t * 'a) list + val of_list : (T.t * 'a) list -> 'a t + + val to_map : 'a t -> 'a Map.Make(T).t + val of_map : 'a Map.Make(T).t -> 'a t + val memoize : 'a t -> (key -> 'a) -> key -> 'a + val map : 'a t -> ('a -> 'b) -> 'b t +end + +module Pair (A : Thing) (B : Thing) : Thing with type t = A.t * B.t = struct + type t = A.t * B.t + + let compare (a1, b1) (a2, b2) = + let c = A.compare a1 a2 in + if c <> 0 then c + else B.compare b1 b2 + + let output oc (a, b) = Printf.fprintf oc " (%a, %a)" A.output a B.output b + let hash (a, b) = Hashtbl.hash (A.hash a, B.hash b) + let equal (a1, b1) (a2, b2) = A.equal a1 a2 && B.equal b1 b2 + let print ppf (a, b) = Format.fprintf ppf " (%a, @ %a)" A.print a B.print b +end + +module Make_map (T : Thing) = struct + include Map.Make (T) + + let filter_map t ~f = + fold (fun id v map -> + match f id v with + | None -> map + | Some r -> add id r map) t empty + + let of_list l = + List.fold_left (fun map (id, v) -> add id v map) empty l + + let disjoint_union ?eq ?print m1 m2 = + union (fun id v1 v2 -> + let ok = match eq with + | None -> false + | Some eq -> eq v1 v2 + in + if not ok then + let err = + match print with + | None -> + Format.asprintf "Map.disjoint_union %a" T.print id + | Some print -> + Format.asprintf "Map.disjoint_union %a => %a <> %a" + T.print id print v1 print v2 + in + Misc.fatal_error err + else Some v1) + m1 m2 + + let union_right m1 m2 = + merge (fun _id x y -> match x, y with + | None, None -> None + | None, Some v + | Some v, None + | Some _, Some v -> Some v) + m1 m2 + + let union_left m1 m2 = union_right m2 m1 + + let union_merge f m1 m2 = + let aux _ m1 m2 = + match m1, m2 with + | None, m | m, None -> m + | Some m1, Some m2 -> Some (f m1 m2) + in + merge aux m1 m2 + + let rename m v = + try find v m + with Not_found -> v + + let map_keys f m = + of_list (List.map (fun (k, v) -> f k, v) (bindings m)) + + let print f ppf s = + let elts ppf s = iter (fun id v -> + Format.fprintf ppf "@ (@[%a@ %a@])" T.print id f v) s in + Format.fprintf ppf "@[<1>{@[%a@ @]}@]" elts s + + module T_set = Set.Make (T) + + let keys map = fold (fun k _ set -> T_set.add k set) map T_set.empty + + let data t = List.map snd (bindings t) + + let of_set f set = T_set.fold (fun e map -> add e (f e) map) set empty + + let transpose_keys_and_data map = fold (fun k v m -> add v k m) map empty + let transpose_keys_and_data_set map = + fold (fun k v m -> + let set = + match find v m with + | exception Not_found -> + T_set.singleton k + | set -> + T_set.add k set + in + add v set m) + map empty +end + +module Make_set (T : Thing) = struct + include Set.Make (T) + + let output oc s = + Printf.fprintf oc " ( "; + iter (fun v -> Printf.fprintf oc "%a " T.output v) s; + Printf.fprintf oc ")" + + let print ppf s = + let elts ppf s = iter (fun e -> Format.fprintf ppf "@ %a" T.print e) s in + Format.fprintf ppf "@[<1>{@[%a@ @]}@]" elts s + + let to_string s = Format.asprintf "%a" print s + + let of_list l = match l with + | [] -> empty + | [t] -> singleton t + | t :: q -> List.fold_left (fun acc e -> add e acc) (singleton t) q + + let map f s = of_list (List.map f (elements s)) +end + +module Make_tbl (T : Thing) = struct + include Hashtbl.Make (T) + + module T_map = Make_map (T) + + let to_list t = + fold (fun key datum elts -> (key, datum)::elts) t [] + + let of_list elts = + let t = create 42 in + List.iter (fun (key, datum) -> add t key datum) elts; + t + + let to_map v = fold T_map.add v T_map.empty + + let of_map m = + let t = create (T_map.cardinal m) in + T_map.iter (fun k v -> add t k v) m; + t + + let memoize t f = fun key -> + try find t key with + | Not_found -> + let r = f key in + add t key r; + r + + let map t f = + of_map (T_map.map f (to_map t)) +end + +module type S = sig + type t + + module T : Thing with type t = t + include Thing with type t := T.t + + module Set : Set with module T := T + module Map : Map with module T := T + module Tbl : Tbl with module T := T +end + +module Make (T : Thing) = struct + module T = T + include T + + module Set = Make_set (T) + module Map = Make_map (T) + module Tbl = Make_tbl (T) +end diff --git a/res_syntax/compiler-libs-406/identifiable.mli b/res_syntax/compiler-libs-406/identifiable.mli new file mode 100644 index 0000000000..50e3ac577a --- /dev/null +++ b/res_syntax/compiler-libs-406/identifiable.mli @@ -0,0 +1,107 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2013--2016 OCamlPro SAS *) +(* Copyright 2014--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Uniform interface for common data structures over various things. *) + +module type Thing = sig + type t + + include Hashtbl.HashedType with type t := t + include Map.OrderedType with type t := t + + val output : out_channel -> t -> unit + val print : Format.formatter -> t -> unit +end + +module Pair : functor (A : Thing) (B : Thing) -> Thing with type t = A.t * B.t + +module type Set = sig + module T : Set.OrderedType + include Set.S + with type elt = T.t + and type t = Set.Make (T).t + + val output : out_channel -> t -> unit + val print : Format.formatter -> t -> unit + val to_string : t -> string + val of_list : elt list -> t + val map : (elt -> elt) -> t -> t +end + +module type Map = sig + module T : Map.OrderedType + include Map.S + with type key = T.t + and type 'a t = 'a Map.Make (T).t + + val filter_map : 'a t -> f:(key -> 'a -> 'b option) -> 'b t + val of_list : (key * 'a) list -> 'a t + + (** [disjoint_union m1 m2] contains all bindings from [m1] and + [m2]. If some binding is present in both and the associated + value is not equal, a Fatal_error is raised *) + val disjoint_union : ?eq:('a -> 'a -> bool) -> ?print:(Format.formatter -> 'a -> unit) -> 'a t -> 'a t -> 'a t + + (** [union_right m1 m2] contains all bindings from [m1] and [m2]. If + some binding is present in both, the one from [m2] is taken *) + val union_right : 'a t -> 'a t -> 'a t + + (** [union_left m1 m2 = union_right m2 m1] *) + val union_left : 'a t -> 'a t -> 'a t + + val union_merge : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t + val rename : key t -> key -> key + val map_keys : (key -> key) -> 'a t -> 'a t + val keys : 'a t -> Set.Make(T).t + val data : 'a t -> 'a list + val of_set : (key -> 'a) -> Set.Make(T).t -> 'a t + val transpose_keys_and_data : key t -> key t + val transpose_keys_and_data_set : key t -> Set.Make(T).t t + val print : + (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit +end + +module type Tbl = sig + module T : sig + type t + include Map.OrderedType with type t := t + include Hashtbl.HashedType with type t := t + end + include Hashtbl.S + with type key = T.t + and type 'a t = 'a Hashtbl.Make (T).t + + val to_list : 'a t -> (T.t * 'a) list + val of_list : (T.t * 'a) list -> 'a t + + val to_map : 'a t -> 'a Map.Make(T).t + val of_map : 'a Map.Make(T).t -> 'a t + val memoize : 'a t -> (key -> 'a) -> key -> 'a + val map : 'a t -> ('a -> 'b) -> 'b t +end + +module type S = sig + type t + + module T : Thing with type t = t + include Thing with type t := T.t + + module Set : Set with module T := T + module Map : Map with module T := T + module Tbl : Tbl with module T := T +end + +module Make (T : Thing) : S with type t := T.t diff --git a/res_syntax/compiler-libs-406/includeclass.ml b/res_syntax/compiler-libs-406/includeclass.ml new file mode 100644 index 0000000000..59e363ca3d --- /dev/null +++ b/res_syntax/compiler-libs-406/includeclass.ml @@ -0,0 +1,116 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Inclusion checks for the class language *) + +open Types + +let class_types env cty1 cty2 = + Ctype.match_class_types env cty1 cty2 + +let class_type_declarations ~loc env cty1 cty2 = + Builtin_attributes.check_deprecated_inclusion + ~def:cty1.clty_loc + ~use:cty2.clty_loc + loc + cty1.clty_attributes cty2.clty_attributes + (Path.last cty1.clty_path); + Ctype.match_class_declarations env + cty1.clty_params cty1.clty_type + cty2.clty_params cty2.clty_type + +let class_declarations env cty1 cty2 = + match cty1.cty_new, cty2.cty_new with + None, Some _ -> + [Ctype.CM_Virtual_class] + | _ -> + Ctype.match_class_declarations env + cty1.cty_params cty1.cty_type + cty2.cty_params cty2.cty_type + +open Format +open Ctype + +(* +let rec hide_params = function + Tcty_arrow ("*", _, cty) -> hide_params cty + | cty -> cty +*) + +let include_err ppf = + function + | CM_Virtual_class -> + fprintf ppf "A class cannot be changed from virtual to concrete" + | CM_Parameter_arity_mismatch _ -> + fprintf ppf + "The classes do not have the same number of type parameters" + | CM_Type_parameter_mismatch (env, trace) -> + Printtyp.report_unification_error ppf env ~unif:false trace + (function ppf -> + fprintf ppf "A type parameter has type") + (function ppf -> + fprintf ppf "but is expected to have type") + | CM_Class_type_mismatch (env, cty1, cty2) -> + Printtyp.wrap_printing_env env (fun () -> + fprintf ppf + "@[The class type@;<1 2>%a@ %s@;<1 2>%a@]" + Printtyp.class_type cty1 + "is not matched by the class type" + Printtyp.class_type cty2) + | CM_Parameter_mismatch (env, trace) -> + Printtyp.report_unification_error ppf env ~unif:false trace + (function ppf -> + fprintf ppf "A parameter has type") + (function ppf -> + fprintf ppf "but is expected to have type") + | CM_Val_type_mismatch (lab, env, trace) -> + Printtyp.report_unification_error ppf env ~unif:false trace + (function ppf -> + fprintf ppf "The instance variable %s@ has type" lab) + (function ppf -> + fprintf ppf "but is expected to have type") + | CM_Meth_type_mismatch (lab, env, trace) -> + Printtyp.report_unification_error ppf env ~unif:false trace + (function ppf -> + fprintf ppf "The method %s@ has type" lab) + (function ppf -> + fprintf ppf "but is expected to have type") + | CM_Non_mutable_value lab -> + fprintf ppf + "@[The non-mutable instance variable %s cannot become mutable@]" lab + | CM_Non_concrete_value lab -> + fprintf ppf + "@[The virtual instance variable %s cannot become concrete@]" lab + | CM_Missing_value lab -> + fprintf ppf "@[The first class type has no instance variable %s@]" lab + | CM_Missing_method lab -> + fprintf ppf "@[The first class type has no method %s@]" lab + | CM_Hide_public lab -> + fprintf ppf "@[The public method %s cannot be hidden@]" lab + | CM_Hide_virtual (k, lab) -> + fprintf ppf "@[The virtual %s %s cannot be hidden@]" k lab + | CM_Public_method lab -> + fprintf ppf "@[The public method %s cannot become private" lab + | CM_Virtual_method lab -> + fprintf ppf "@[The virtual method %s cannot become concrete" lab + | CM_Private_method lab -> + fprintf ppf "The private method %s cannot become public" lab + +let report_error ppf = function + | [] -> () + | err :: errs -> + let print_errs ppf errs = + List.iter (fun err -> fprintf ppf "@ %a" include_err err) errs in + fprintf ppf "@[%a%a@]" include_err err print_errs errs diff --git a/res_syntax/compiler-libs-406/includeclass.mli b/res_syntax/compiler-libs-406/includeclass.mli new file mode 100644 index 0000000000..ebfa97897f --- /dev/null +++ b/res_syntax/compiler-libs-406/includeclass.mli @@ -0,0 +1,32 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Inclusion checks for the class language *) + +open Types +open Ctype +open Format + +val class_types: + Env.t -> class_type -> class_type -> class_match_failure list +val class_type_declarations: + loc:Location.t -> + Env.t -> class_type_declaration -> class_type_declaration -> + class_match_failure list +val class_declarations: + Env.t -> class_declaration -> class_declaration -> + class_match_failure list + +val report_error: formatter -> class_match_failure list -> unit diff --git a/res_syntax/compiler-libs-406/includecore.ml b/res_syntax/compiler-libs-406/includecore.ml new file mode 100644 index 0000000000..4982a8a5c2 --- /dev/null +++ b/res_syntax/compiler-libs-406/includecore.ml @@ -0,0 +1,375 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Inclusion checks for the core language *) + +open Asttypes +open Path +open Types +open Typedtree + +(* Inclusion between value descriptions *) + +exception Dont_match + +let value_descriptions ~loc env name + (vd1 : Types.value_description) + (vd2 : Types.value_description) = + Builtin_attributes.check_deprecated_inclusion + ~def:vd1.val_loc + ~use:vd2.val_loc + loc + vd1.val_attributes vd2.val_attributes + name; + if Ctype.moregeneral env true vd1.val_type vd2.val_type then begin + match (vd1.val_kind, vd2.val_kind) with + (Val_prim p1, Val_prim p2) -> + if p1 = p2 then Tcoerce_none else raise Dont_match + | (Val_prim p, _) -> + let pc = {pc_desc = p; pc_type = vd2.Types.val_type; + pc_env = env; pc_loc = vd1.Types.val_loc; } in + Tcoerce_primitive pc + | (_, Val_prim _) -> raise Dont_match + | (_, _) -> Tcoerce_none + end else + raise Dont_match + +(* Inclusion between "private" annotations *) + +let private_flags decl1 decl2 = + match decl1.type_private, decl2.type_private with + | Private, Public -> + decl2.type_kind = Type_abstract && + (decl2.type_manifest = None || decl1.type_kind <> Type_abstract) + | _, _ -> true + +(* Inclusion between manifest types (particularly for private row types) *) + +let is_absrow env ty = + match ty.desc with + Tconstr(Pident _, _, _) -> + begin match Ctype.expand_head env ty with + {desc=Tobject _|Tvariant _} -> true + | _ -> false + end + | _ -> false + +let type_manifest env ty1 params1 ty2 params2 priv2 = + let ty1' = Ctype.expand_head env ty1 and ty2' = Ctype.expand_head env ty2 in + match ty1'.desc, ty2'.desc with + Tvariant row1, Tvariant row2 when is_absrow env (Btype.row_more row2) -> + let row1 = Btype.row_repr row1 and row2 = Btype.row_repr row2 in + Ctype.equal env true (ty1::params1) (row2.row_more::params2) && + begin match row1.row_more with + {desc=Tvar _|Tconstr _|Tnil} -> true + | _ -> false + end && + let r1, r2, pairs = + Ctype.merge_row_fields row1.row_fields row2.row_fields in + (not row2.row_closed || + row1.row_closed && Ctype.filter_row_fields false r1 = []) && + List.for_all + (fun (_,f) -> match Btype.row_field_repr f with + Rabsent | Reither _ -> true | Rpresent _ -> false) + r2 && + let to_equal = ref (List.combine params1 params2) in + List.for_all + (fun (_, f1, f2) -> + match Btype.row_field_repr f1, Btype.row_field_repr f2 with + Rpresent(Some t1), + (Rpresent(Some t2) | Reither(false, [t2], _, _)) -> + to_equal := (t1,t2) :: !to_equal; true + | Rpresent None, (Rpresent None | Reither(true, [], _, _)) -> true + | Reither(c1,tl1,_,_), Reither(c2,tl2,_,_) + when List.length tl1 = List.length tl2 && c1 = c2 -> + to_equal := List.combine tl1 tl2 @ !to_equal; true + | Rabsent, (Reither _ | Rabsent) -> true + | _ -> false) + pairs && + let tl1, tl2 = List.split !to_equal in + Ctype.equal env true tl1 tl2 + | Tobject (fi1, _), Tobject (fi2, _) + when is_absrow env (snd(Ctype.flatten_fields fi2)) -> + let (fields2,rest2) = Ctype.flatten_fields fi2 in + Ctype.equal env true (ty1::params1) (rest2::params2) && + let (fields1,rest1) = Ctype.flatten_fields fi1 in + (match rest1 with {desc=Tnil|Tvar _|Tconstr _} -> true | _ -> false) && + let pairs, _miss1, miss2 = Ctype.associate_fields fields1 fields2 in + miss2 = [] && + let tl1, tl2 = + List.split (List.map (fun (_,_,t1,_,t2) -> t1, t2) pairs) in + Ctype.equal env true (params1 @ tl1) (params2 @ tl2) + | _ -> + let rec check_super ty1 = + Ctype.equal env true (ty1 :: params1) (ty2 :: params2) || + priv2 = Private && + try check_super + (Ctype.try_expand_once_opt env (Ctype.expand_head env ty1)) + with Ctype.Cannot_expand -> false + in check_super ty1 + +(* Inclusion between type declarations *) + +type type_mismatch = + Arity + | Privacy + | Kind + | Constraint + | Manifest + | Variance + | Field_type of Ident.t + | Field_mutable of Ident.t + | Field_arity of Ident.t + | Field_names of int * Ident.t * Ident.t + | Field_missing of bool * Ident.t + | Record_representation of bool (* true means second one is unboxed float *) + | Unboxed_representation of bool (* true means second one is unboxed *) + | Immediate + +let report_type_mismatch0 first second decl ppf err = + let pr fmt = Format.fprintf ppf fmt in + match err with + Arity -> pr "They have different arities" + | Privacy -> pr "A private type would be revealed" + | Kind -> pr "Their kinds differ" + | Constraint -> pr "Their constraints differ" + | Manifest -> () + | Variance -> pr "Their variances do not agree" + | Field_type s -> + pr "The types for field %s are not equal" (Ident.name s) + | Field_mutable s -> + pr "The mutability of field %s is different" (Ident.name s) + | Field_arity s -> + pr "The arities for field %s differ" (Ident.name s) + | Field_names (n, name1, name2) -> + pr "Fields number %i have different names, %s and %s" + n (Ident.name name1) (Ident.name name2) + | Field_missing (b, s) -> + pr "The field %s is only present in %s %s" + (Ident.name s) (if b then second else first) decl + | Record_representation b -> + pr "Their internal representations differ:@ %s %s %s" + (if b then second else first) decl + "uses unboxed float representation" + | Unboxed_representation b -> + pr "Their internal representations differ:@ %s %s %s" + (if b then second else first) decl + "uses unboxed representation" + | Immediate -> pr "%s is not an immediate type" first + +let report_type_mismatch first second decl ppf = + List.iter + (fun err -> + if err = Manifest then () else + Format.fprintf ppf "@ %a." (report_type_mismatch0 first second decl) err) + +let rec compare_constructor_arguments ~loc env cstr params1 params2 arg1 arg2 = + match arg1, arg2 with + | Types.Cstr_tuple arg1, Types.Cstr_tuple arg2 -> + if List.length arg1 <> List.length arg2 then [Field_arity cstr] + else if + (* Ctype.equal must be called on all arguments at once, cf. PR#7378 *) + Ctype.equal env true (params1 @ arg1) (params2 @ arg2) + then [] else [Field_type cstr] + | Types.Cstr_record l1, Types.Cstr_record l2 -> + compare_records env ~loc params1 params2 0 l1 l2 + | _ -> [Field_type cstr] + +and compare_variants ~loc env params1 params2 n + (cstrs1 : Types.constructor_declaration list) + (cstrs2 : Types.constructor_declaration list) = + match cstrs1, cstrs2 with + [], [] -> [] + | [], c::_ -> [Field_missing (true, c.Types.cd_id)] + | c::_, [] -> [Field_missing (false, c.Types.cd_id)] + | cd1::rem1, cd2::rem2 -> + if Ident.name cd1.cd_id <> Ident.name cd2.cd_id then + [Field_names (n, cd1.cd_id, cd2.cd_id)] + else begin + Builtin_attributes.check_deprecated_inclusion + ~def:cd1.cd_loc + ~use:cd2.cd_loc + loc + cd1.cd_attributes cd2.cd_attributes + (Ident.name cd1.cd_id); + let r = + match cd1.cd_res, cd2.cd_res with + | Some r1, Some r2 -> + if Ctype.equal env true [r1] [r2] then + compare_constructor_arguments ~loc env cd1.cd_id [r1] [r2] + cd1.cd_args cd2.cd_args + else [Field_type cd1.cd_id] + | Some _, None | None, Some _ -> + [Field_type cd1.cd_id] + | _ -> + compare_constructor_arguments ~loc env cd1.cd_id + params1 params2 cd1.cd_args cd2.cd_args + in + if r <> [] then r + else compare_variants ~loc env params1 params2 (n+1) rem1 rem2 + end + + +and compare_records ~loc env params1 params2 n + (labels1 : Types.label_declaration list) + (labels2 : Types.label_declaration list) = + match labels1, labels2 with + [], [] -> [] + | [], l::_ -> [Field_missing (true, l.Types.ld_id)] + | l::_, [] -> [Field_missing (false, l.Types.ld_id)] + | ld1::rem1, ld2::rem2 -> + if Ident.name ld1.ld_id <> Ident.name ld2.ld_id + then [Field_names (n, ld1.ld_id, ld2.ld_id)] + else if ld1.ld_mutable <> ld2.ld_mutable then [Field_mutable ld1.ld_id] else begin + Builtin_attributes.check_deprecated_mutable_inclusion + ~def:ld1.ld_loc + ~use:ld2.ld_loc + loc + ld1.ld_attributes ld2.ld_attributes + (Ident.name ld1.ld_id); + if Ctype.equal env true (ld1.ld_type::params1)(ld2.ld_type::params2) + then (* add arguments to the parameters, cf. PR#7378 *) + compare_records ~loc env + (ld1.ld_type::params1) (ld2.ld_type::params2) + (n+1) + rem1 rem2 + else + [Field_type ld1.ld_id] + end + +let type_declarations ?(equality = false) ~loc env name decl1 id decl2 = + Builtin_attributes.check_deprecated_inclusion + ~def:decl1.type_loc + ~use:decl2.type_loc + loc + decl1.type_attributes decl2.type_attributes + name; + if decl1.type_arity <> decl2.type_arity then [Arity] else + if not (private_flags decl1 decl2) then [Privacy] else + let err = match (decl1.type_manifest, decl2.type_manifest) with + (_, None) -> + if Ctype.equal env true decl1.type_params decl2.type_params + then [] else [Constraint] + | (Some ty1, Some ty2) -> + if type_manifest env ty1 decl1.type_params ty2 decl2.type_params + decl2.type_private + then [] else [Manifest] + | (None, Some ty2) -> + let ty1 = + Btype.newgenty (Tconstr(Pident id, decl2.type_params, ref Mnil)) + in + if Ctype.equal env true decl1.type_params decl2.type_params then + if Ctype.equal env false [ty1] [ty2] then [] + else [Manifest] + else [Constraint] + in + if err <> [] then err else + let err = + match (decl2.type_kind, decl1.type_unboxed.unboxed, + decl2.type_unboxed.unboxed) with + | Type_abstract, _, _ -> [] + | _, true, false -> [Unboxed_representation false] + | _, false, true -> [Unboxed_representation true] + | _ -> [] + in + if err <> [] then err else + let err = match (decl1.type_kind, decl2.type_kind) with + (_, Type_abstract) -> [] + | (Type_variant cstrs1, Type_variant cstrs2) -> + let mark cstrs usage name decl = + List.iter + (fun c -> + Env.mark_constructor_used usage env name decl + (Ident.name c.Types.cd_id)) + cstrs + in + let usage = + if decl1.type_private = Private || decl2.type_private = Public + then Env.Positive else Env.Privatize + in + mark cstrs1 usage name decl1; + if equality then mark cstrs2 Env.Positive (Ident.name id) decl2; + compare_variants ~loc env decl1.type_params decl2.type_params 1 cstrs1 cstrs2 + | (Type_record(labels1,rep1), Type_record(labels2,rep2)) -> + let err = compare_records ~loc env decl1.type_params decl2.type_params + 1 labels1 labels2 in + if err <> [] || rep1 = rep2 then err else + [Record_representation (rep2 = Record_float)] + | (Type_open, Type_open) -> [] + | (_, _) -> [Kind] + in + if err <> [] then err else + let abstr = decl2.type_kind = Type_abstract && decl2.type_manifest = None in + (* If attempt to assign a non-immediate type (e.g. string) to a type that + * must be immediate, then we error *) + let err = + if abstr && + not decl1.type_immediate && + decl2.type_immediate then + [Immediate] + else [] + in + if err <> [] then err else + let need_variance = + abstr || decl1.type_private = Private || decl1.type_kind = Type_open in + if not need_variance then [] else + let abstr = abstr || decl2.type_private = Private in + let opn = decl2.type_kind = Type_open && decl2.type_manifest = None in + let constrained ty = not (Btype.(is_Tvar (repr ty))) in + if List.for_all2 + (fun ty (v1,v2) -> + let open Variance in + let imp a b = not a || b in + let (co1,cn1) = get_upper v1 and (co2,cn2) = get_upper v2 in + (if abstr then (imp co1 co2 && imp cn1 cn2) + else if opn || constrained ty then (co1 = co2 && cn1 = cn2) + else true) && + let (p1,n1,i1,j1) = get_lower v1 and (p2,n2,i2,j2) = get_lower v2 in + imp abstr (imp p2 p1 && imp n2 n1 && imp i2 i1 && imp j2 j1)) + decl2.type_params (List.combine decl1.type_variance decl2.type_variance) + then [] else [Variance] + +(* Inclusion between extension constructors *) + +let extension_constructors ~loc env id ext1 ext2 = + let usage = + if ext1.ext_private = Private || ext2.ext_private = Public + then Env.Positive else Env.Privatize + in + Env.mark_extension_used usage env ext1 (Ident.name id); + let ty1 = + Btype.newgenty (Tconstr(ext1.ext_type_path, ext1.ext_type_params, ref Mnil)) + in + let ty2 = + Btype.newgenty (Tconstr(ext2.ext_type_path, ext2.ext_type_params, ref Mnil)) + in + if Ctype.equal env true + (ty1 :: ext1.ext_type_params) + (ty2 :: ext2.ext_type_params) + then + if compare_constructor_arguments ~loc env (Ident.create "") + ext1.ext_type_params ext2.ext_type_params + ext1.ext_args ext2.ext_args = [] then + if match ext1.ext_ret_type, ext2.ext_ret_type with + Some r1, Some r2 when not (Ctype.equal env true [r1] [r2]) -> false + | Some _, None | None, Some _ -> false + | _ -> true + then + match ext1.ext_private, ext2.ext_private with + Private, Public -> false + | _, _ -> true + else false + else false + else false diff --git a/res_syntax/compiler-libs-406/includecore.mli b/res_syntax/compiler-libs-406/includecore.mli new file mode 100644 index 0000000000..e3b8cac106 --- /dev/null +++ b/res_syntax/compiler-libs-406/includecore.mli @@ -0,0 +1,59 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Inclusion checks for the core language *) + +open Typedtree +open Types + +exception Dont_match + +type type_mismatch = + Arity + | Privacy + | Kind + | Constraint + | Manifest + | Variance + | Field_type of Ident.t + | Field_mutable of Ident.t + | Field_arity of Ident.t + | Field_names of int * Ident.t * Ident.t + | Field_missing of bool * Ident.t + | Record_representation of bool + | Unboxed_representation of bool + | Immediate + +val value_descriptions: + loc:Location.t -> Env.t -> string -> + value_description -> value_description -> module_coercion + +val type_declarations: + ?equality:bool -> + loc:Location.t -> + Env.t -> string -> + type_declaration -> Ident.t -> type_declaration -> type_mismatch list + +val extension_constructors: + loc:Location.t -> + Env.t -> Ident.t -> + extension_constructor -> extension_constructor -> bool +(* +val class_types: + Env.t -> class_type -> class_type -> bool +*) + +val report_type_mismatch: + string -> string -> string -> Format.formatter -> type_mismatch list -> unit diff --git a/res_syntax/compiler-libs-406/includemod.ml b/res_syntax/compiler-libs-406/includemod.ml new file mode 100644 index 0000000000..9b12e77855 --- /dev/null +++ b/res_syntax/compiler-libs-406/includemod.ml @@ -0,0 +1,674 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Inclusion checks for the module language *) + +open Misc +open Path +open Typedtree +open Types + +type symptom = + Missing_field of Ident.t * Location.t * string (* kind *) + | Value_descriptions of Ident.t * value_description * value_description + | Type_declarations of Ident.t * type_declaration + * type_declaration * Includecore.type_mismatch list + | Extension_constructors of + Ident.t * extension_constructor * extension_constructor + | Module_types of module_type * module_type + | Modtype_infos of Ident.t * modtype_declaration * modtype_declaration + | Modtype_permutation + | Interface_mismatch of string * string + | Class_type_declarations of + Ident.t * class_type_declaration * class_type_declaration * + Ctype.class_match_failure list + | Class_declarations of + Ident.t * class_declaration * class_declaration * + Ctype.class_match_failure list + | Unbound_modtype_path of Path.t + | Unbound_module_path of Path.t + | Invalid_module_alias of Path.t + +type pos = + Module of Ident.t | Modtype of Ident.t | Arg of Ident.t | Body of Ident.t +type error = pos list * Env.t * symptom + +exception Error of error list + +(* All functions "blah env x1 x2" check that x1 is included in x2, + i.e. that x1 is the type of an implementation that fulfills the + specification x2. If not, Error is raised with a backtrace of the error. *) + +(* Inclusion between value descriptions *) + +let value_descriptions ~loc env cxt subst id vd1 vd2 = + Cmt_format.record_value_dependency vd1 vd2; + Env.mark_value_used env (Ident.name id) vd1; + let vd2 = Subst.value_description subst vd2 in + try + Includecore.value_descriptions ~loc env (Ident.name id) vd1 vd2 + with Includecore.Dont_match -> + raise(Error[cxt, env, Value_descriptions(id, vd1, vd2)]) + +(* Inclusion between type declarations *) + +let type_declarations ~loc env ?(old_env=env) cxt subst id decl1 decl2 = + Env.mark_type_used env (Ident.name id) decl1; + let decl2 = Subst.type_declaration subst decl2 in + let err = + Includecore.type_declarations ~loc env (Ident.name id) decl1 id decl2 + in + if err <> [] then + raise(Error[cxt, old_env, Type_declarations(id, decl1, decl2, err)]) + +(* Inclusion between extension constructors *) + +let extension_constructors ~loc env cxt subst id ext1 ext2 = + let ext2 = Subst.extension_constructor subst ext2 in + if Includecore.extension_constructors ~loc env id ext1 ext2 + then () + else raise(Error[cxt, env, Extension_constructors(id, ext1, ext2)]) + +(* Inclusion between class declarations *) + +let class_type_declarations ~loc ~old_env env cxt subst id decl1 decl2 = + let decl2 = Subst.cltype_declaration subst decl2 in + match Includeclass.class_type_declarations ~loc env decl1 decl2 with + [] -> () + | reason -> + raise(Error[cxt, old_env, + Class_type_declarations(id, decl1, decl2, reason)]) + +let class_declarations ~old_env env cxt subst id decl1 decl2 = + let decl2 = Subst.class_declaration subst decl2 in + match Includeclass.class_declarations env decl1 decl2 with + [] -> () + | reason -> + raise(Error[cxt, old_env, Class_declarations(id, decl1, decl2, reason)]) + +(* Expand a module type identifier when possible *) + +exception Dont_match + +let may_expand_module_path env path = + try ignore (Env.find_modtype_expansion path env); true + with Not_found -> false + +let expand_module_path env cxt path = + try + Env.find_modtype_expansion path env + with Not_found -> + raise(Error[cxt, env, Unbound_modtype_path path]) + +let expand_module_alias env cxt path = + try (Env.find_module path env).md_type + with Not_found -> + raise(Error[cxt, env, Unbound_module_path path]) + +(* +let rec normalize_module_path env cxt path = + match expand_module_alias env cxt path with + Mty_alias path' -> normalize_module_path env cxt path' + | _ -> path +*) + +(* Extract name, kind and ident from a signature item *) + +type field_desc = + Field_value of string + | Field_type of string + | Field_typext of string + | Field_module of string + | Field_modtype of string + | Field_class of string + | Field_classtype of string + +let kind_of_field_desc = function + | Field_value _ -> "value" + | Field_type _ -> "type" + | Field_typext _ -> "extension constructor" + | Field_module _ -> "module" + | Field_modtype _ -> "module type" + | Field_class _ -> "class" + | Field_classtype _ -> "class type" + +let item_ident_name = function + Sig_value(id, d) -> (id, d.val_loc, Field_value(Ident.name id)) + | Sig_type(id, d, _) -> (id, d.type_loc, Field_type(Ident.name id)) + | Sig_typext(id, d, _) -> (id, d.ext_loc, Field_typext(Ident.name id)) + | Sig_module(id, d, _) -> (id, d.md_loc, Field_module(Ident.name id)) + | Sig_modtype(id, d) -> (id, d.mtd_loc, Field_modtype(Ident.name id)) + | Sig_class(id, d, _) -> (id, d.cty_loc, Field_class(Ident.name id)) + | Sig_class_type(id, d, _) -> (id, d.clty_loc, Field_classtype(Ident.name id)) + +let is_runtime_component = function + | Sig_value(_,{val_kind = Val_prim _}) + | Sig_type(_,_,_) + | Sig_modtype(_,_) + | Sig_class_type(_,_,_) -> false + | Sig_value(_,_) + | Sig_typext(_,_,_) + | Sig_module(_,_,_) + | Sig_class(_, _,_) -> true + +(* Print a coercion *) + +let rec print_list pr ppf = function + [] -> () + | [a] -> pr ppf a + | a :: l -> pr ppf a; Format.fprintf ppf ";@ "; print_list pr ppf l +let print_list pr ppf l = + Format.fprintf ppf "[@[%a@]]" (print_list pr) l + +let rec print_coercion ppf c = + let pr fmt = Format.fprintf ppf fmt in + match c with + Tcoerce_none -> pr "id" + | Tcoerce_structure (fl, nl) -> + pr "@[<2>struct@ %a@ %a@]" + (print_list print_coercion2) fl + (print_list print_coercion3) nl + | Tcoerce_functor (inp, out) -> + pr "@[<2>functor@ (%a)@ (%a)@]" + print_coercion inp + print_coercion out + | Tcoerce_primitive {pc_desc; pc_env = _; pc_type} -> + pr "prim %s@ (%a)" pc_desc.Primitive.prim_name + Printtyp.raw_type_expr pc_type + | Tcoerce_alias (p, c) -> + pr "@[<2>alias %a@ (%a)@]" + Printtyp.path p + print_coercion c +and print_coercion2 ppf (n, c) = + Format.fprintf ppf "@[%d,@ %a@]" n print_coercion c +and print_coercion3 ppf (i, n, c) = + Format.fprintf ppf "@[%s, %d,@ %a@]" + (Ident.unique_name i) n print_coercion c + +(* Simplify a structure coercion *) + +let simplify_structure_coercion cc id_pos_list = + let rec is_identity_coercion pos = function + | [] -> + true + | (n, c) :: rem -> + n = pos && c = Tcoerce_none && is_identity_coercion (pos + 1) rem in + if is_identity_coercion 0 cc + then Tcoerce_none + else Tcoerce_structure (cc, id_pos_list) + +(* Inclusion between module types. + Return the restriction that transforms a value of the smaller type + into a value of the bigger type. *) + +let rec modtypes ~loc env cxt subst mty1 mty2 = + try + try_modtypes ~loc env cxt subst mty1 mty2 + with + Dont_match -> + raise(Error[cxt, env, Module_types(mty1, Subst.modtype subst mty2)]) + | Error reasons as err -> + match mty1, mty2 with + Mty_alias _, _ + | _, Mty_alias _ -> raise err + | _ -> + raise(Error((cxt, env, Module_types(mty1, Subst.modtype subst mty2)) + :: reasons)) + +and try_modtypes ~loc env cxt subst mty1 mty2 = + match (mty1, mty2) with + | (Mty_alias(pres1, p1), Mty_alias(pres2, p2)) -> begin + if Env.is_functor_arg p2 env then + raise (Error[cxt, env, Invalid_module_alias p2]); + if not (Path.same p1 p2) then begin + let p1 = Env.normalize_path None env p1 + and p2 = Env.normalize_path None env (Subst.module_path subst p2) in + if not (Path.same p1 p2) then raise Dont_match + end; + match pres1, pres2 with + | Mta_present, Mta_present -> Tcoerce_none + (* Should really be Tcoerce_ignore if it existed *) + | Mta_absent, Mta_absent -> Tcoerce_none + (* Should really be Tcoerce_empty if it existed *) + | Mta_present, Mta_absent -> Tcoerce_none + | Mta_absent, Mta_present -> + let p1 = try + Env.normalize_path (Some Location.none) env p1 + with Env.Error (Env.Missing_module (_, _, path)) -> + raise (Error[cxt, env, Unbound_module_path path]) + in + Tcoerce_alias (p1, Tcoerce_none) + end + | (Mty_alias(pres1, p1), _) -> begin + let p1 = try + Env.normalize_path (Some Location.none) env p1 + with Env.Error (Env.Missing_module (_, _, path)) -> + raise (Error[cxt, env, Unbound_module_path path]) + in + let mty1 = + Mtype.strengthen ~aliasable:true env + (expand_module_alias env cxt p1) p1 + in + let cc = modtypes ~loc env cxt subst mty1 mty2 in + match pres1 with + | Mta_present -> cc + | Mta_absent -> Tcoerce_alias (p1, cc) + end + | (Mty_ident p1, _) when may_expand_module_path env p1 -> + try_modtypes ~loc env cxt subst (expand_module_path env cxt p1) mty2 + | (_, Mty_ident _) -> + try_modtypes2 ~loc env cxt mty1 (Subst.modtype subst mty2) + | (Mty_signature sig1, Mty_signature sig2) -> + signatures ~loc env cxt subst sig1 sig2 + | (Mty_functor(param1, None, res1), Mty_functor(_param2, None, res2)) -> + begin match modtypes ~loc env (Body param1::cxt) subst res1 res2 with + Tcoerce_none -> Tcoerce_none + | cc -> Tcoerce_functor (Tcoerce_none, cc) + end + | (Mty_functor(param1, Some arg1, res1), + Mty_functor(param2, Some arg2, res2)) -> + let arg2' = Subst.modtype subst arg2 in + let cc_arg = modtypes ~loc env (Arg param1::cxt) Subst.identity arg2' arg1 in + let cc_res = + modtypes ~loc (Env.add_module param1 arg2' env) (Body param1::cxt) + (Subst.add_module param2 (Pident param1) subst) res1 res2 in + begin match (cc_arg, cc_res) with + (Tcoerce_none, Tcoerce_none) -> Tcoerce_none + | _ -> Tcoerce_functor(cc_arg, cc_res) + end + | (_, _) -> + raise Dont_match + +and try_modtypes2 ~loc env cxt mty1 mty2 = + (* mty2 is an identifier *) + match (mty1, mty2) with + (Mty_ident p1, Mty_ident p2) + when Path.same (Env.normalize_path_prefix None env p1) + (Env.normalize_path_prefix None env p2) -> + Tcoerce_none + | (_, Mty_ident p2) when may_expand_module_path env p2 -> + try_modtypes ~loc env cxt Subst.identity mty1 (expand_module_path env cxt p2) + | (_, _) -> + raise Dont_match + +(* Inclusion between signatures *) + +and signatures ~loc env cxt subst sig1 sig2 = + (* Environment used to check inclusion of components *) + let new_env = + Env.add_signature sig1 (Env.in_signature true env) in + (* Keep ids for module aliases *) + let (id_pos_list,_) = + List.fold_left + (fun (l,pos) -> function + Sig_module (id, _, _) -> + ((id,pos,Tcoerce_none)::l , pos+1) + | item -> (l, if is_runtime_component item then pos+1 else pos)) + ([], 0) sig1 in + (* Build a table of the components of sig1, along with their positions. + The table is indexed by kind and name of component *) + let rec build_component_table pos tbl = function + [] -> pos, tbl + | item :: rem -> + let (id, _loc, name) = item_ident_name item in + let nextpos = if is_runtime_component item then pos + 1 else pos in + build_component_table nextpos + (Tbl.add name (id, item, pos) tbl) rem in + let len1, comps1 = + build_component_table 0 Tbl.empty sig1 in + let len2 = + List.fold_left + (fun n i -> if is_runtime_component i then n + 1 else n) + 0 + sig2 + in + (* Pair each component of sig2 with a component of sig1, + identifying the names along the way. + Return a coercion list indicating, for all run-time components + of sig2, the position of the matching run-time components of sig1 + and the coercion to be applied to it. *) + let rec pair_components subst paired unpaired = function + [] -> + begin match unpaired with + [] -> + let cc = + signature_components ~loc env new_env cxt subst + (List.rev paired) + in + if len1 = len2 then (* see PR#5098 *) + simplify_structure_coercion cc id_pos_list + else + Tcoerce_structure (cc, id_pos_list) + | _ -> raise(Error unpaired) + end + | item2 :: rem -> + let (id2, loc, name2) = item_ident_name item2 in + let name2, report = + match item2, name2 with + Sig_type (_, {type_manifest=None}, _), Field_type s + when Btype.is_row_name s -> + (* Do not report in case of failure, + as the main type will generate an error *) + Field_type (String.sub s 0 (String.length s - 4)), false + | _ -> name2, true + in + begin try + let (id1, item1, pos1) = Tbl.find name2 comps1 in + let new_subst = + match item2 with + Sig_type _ -> + Subst.add_type id2 (Pident id1) subst + | Sig_module _ -> + Subst.add_module id2 (Pident id1) subst + | Sig_modtype _ -> + Subst.add_modtype id2 (Mty_ident (Pident id1)) subst + | Sig_value _ | Sig_typext _ + | Sig_class _ | Sig_class_type _ -> + subst + in + pair_components new_subst + ((item1, item2, pos1) :: paired) unpaired rem + with Not_found -> + let unpaired = + if report then + (cxt, env, Missing_field (id2, loc, kind_of_field_desc name2)) :: + unpaired + else unpaired in + pair_components subst paired unpaired rem + end in + (* Do the pairing and checking, and return the final coercion *) + pair_components subst [] [] sig2 + +(* Inclusion between signature components *) + +and signature_components ~loc old_env env cxt subst paired = + let comps_rec rem = signature_components ~loc old_env env cxt subst rem in + match paired with + [] -> [] + | (Sig_value(id1, valdecl1), Sig_value(_id2, valdecl2), pos) :: rem -> + let cc = value_descriptions ~loc env cxt subst id1 valdecl1 valdecl2 in + begin match valdecl2.val_kind with + Val_prim _ -> comps_rec rem + | _ -> (pos, cc) :: comps_rec rem + end + | (Sig_type(id1, tydecl1, _), Sig_type(_id2, tydecl2, _), _pos) :: rem -> + type_declarations ~loc ~old_env env cxt subst id1 tydecl1 tydecl2; + comps_rec rem + | (Sig_typext(id1, ext1, _), Sig_typext(_id2, ext2, _), pos) + :: rem -> + extension_constructors ~loc env cxt subst id1 ext1 ext2; + (pos, Tcoerce_none) :: comps_rec rem + | (Sig_module(id1, mty1, _), Sig_module(_id2, mty2, _), pos) :: rem -> + let cc = module_declarations ~loc env cxt subst id1 mty1 mty2 in + (pos, cc) :: comps_rec rem + | (Sig_modtype(id1, info1), Sig_modtype(_id2, info2), _pos) :: rem -> + modtype_infos ~loc env cxt subst id1 info1 info2; + comps_rec rem + | (Sig_class(id1, decl1, _), Sig_class(_id2, decl2, _), pos) :: rem -> + class_declarations ~old_env env cxt subst id1 decl1 decl2; + (pos, Tcoerce_none) :: comps_rec rem + | (Sig_class_type(id1, info1, _), + Sig_class_type(_id2, info2, _), _pos) :: rem -> + class_type_declarations ~loc ~old_env env cxt subst id1 info1 info2; + comps_rec rem + | _ -> + assert false + +and module_declarations ~loc env cxt subst id1 md1 md2 = + Builtin_attributes.check_deprecated_inclusion + ~def:md1.md_loc + ~use:md2.md_loc + loc + md1.md_attributes md2.md_attributes + (Ident.name id1); + let p1 = Pident id1 in + Env.mark_module_used env (Ident.name id1) md1.md_loc; + modtypes ~loc env (Module id1::cxt) subst + (Mtype.strengthen ~aliasable:true env md1.md_type p1) md2.md_type + +(* Inclusion between module type specifications *) + +and modtype_infos ~loc env cxt subst id info1 info2 = + Builtin_attributes.check_deprecated_inclusion + ~def:info1.mtd_loc + ~use:info2.mtd_loc + loc + info1.mtd_attributes info2.mtd_attributes + (Ident.name id); + let info2 = Subst.modtype_declaration subst info2 in + let cxt' = Modtype id :: cxt in + try + match (info1.mtd_type, info2.mtd_type) with + (None, None) -> () + | (Some _, None) -> () + | (Some mty1, Some mty2) -> + check_modtype_equiv ~loc env cxt' mty1 mty2 + | (None, Some mty2) -> + check_modtype_equiv ~loc env cxt' (Mty_ident(Pident id)) mty2 + with Error reasons -> + raise(Error((cxt, env, Modtype_infos(id, info1, info2)) :: reasons)) + +and check_modtype_equiv ~loc env cxt mty1 mty2 = + match + (modtypes ~loc env cxt Subst.identity mty1 mty2, + modtypes ~loc env cxt Subst.identity mty2 mty1) + with + (Tcoerce_none, Tcoerce_none) -> () + | (_c1, _c2) -> + (* Format.eprintf "@[c1 = %a@ c2 = %a@]@." + print_coercion _c1 print_coercion _c2; *) + raise(Error [cxt, env, Modtype_permutation]) + +(* Simplified inclusion check between module types (for Env) *) + +let can_alias env path = + let rec no_apply = function + | Pident _ -> true + | Pdot(p, _, _) -> no_apply p + | Papply _ -> false + in + no_apply path && not (Env.is_functor_arg path env) + +let check_modtype_inclusion ~loc env mty1 path1 mty2 = + try + let aliasable = can_alias env path1 in + ignore(modtypes ~loc env [] Subst.identity + (Mtype.strengthen ~aliasable env mty1 path1) mty2) + with Error _ -> + raise Not_found + +let _ = Env.check_modtype_inclusion := check_modtype_inclusion + +(* Check that an implementation of a compilation unit meets its + interface. *) + +let compunit env impl_name impl_sig intf_name intf_sig = + try + signatures ~loc:(Location.in_file impl_name) env [] Subst.identity + impl_sig intf_sig + with Error reasons -> + raise(Error(([], Env.empty,Interface_mismatch(impl_name, intf_name)) + :: reasons)) + +(* Hide the context and substitution parameters to the outside world *) + +let modtypes ~loc env mty1 mty2 = modtypes ~loc env [] Subst.identity mty1 mty2 +let signatures env sig1 sig2 = + signatures ~loc:Location.none env [] Subst.identity sig1 sig2 +let type_declarations ~loc env id decl1 decl2 = + type_declarations ~loc env [] Subst.identity id decl1 decl2 + +(* +let modtypes env m1 m2 = + let c = modtypes env m1 m2 in + Format.eprintf "@[<2>modtypes@ %a@ %a =@ %a@]@." + Printtyp.modtype m1 Printtyp.modtype m2 + print_coercion c; + c +*) + +(* Error report *) + +open Format +open Printtyp + +let show_loc msg ppf loc = + let pos = loc.Location.loc_start in + if List.mem pos.Lexing.pos_fname [""; "_none_"; "//toplevel//"] then () + else fprintf ppf "@\n@[<2>%a:@ %s@]" Location.print_loc loc msg + +let show_locs ppf (loc1, loc2) = + show_loc "Expected declaration" ppf loc2; + show_loc "Actual declaration" ppf loc1 + +let include_err ppf = function + | Missing_field (id, loc, kind) -> + fprintf ppf "The %s `%a' is required but not provided" kind ident id; + show_loc "Expected declaration" ppf loc + | Value_descriptions(id, d1, d2) -> + fprintf ppf + "@[Values do not match:@ %a@;<1 -2>is not included in@ %a@]" + (value_description id) d1 (value_description id) d2; + show_locs ppf (d1.val_loc, d2.val_loc); + | Type_declarations(id, d1, d2, errs) -> + fprintf ppf "@[@[%s:@;<1 2>%a@ %s@;<1 2>%a@]%a%a@]" + "Type declarations do not match" + (type_declaration id) d1 + "is not included in" + (type_declaration id) d2 + show_locs (d1.type_loc, d2.type_loc) + (Includecore.report_type_mismatch + "the first" "the second" "declaration") errs + | Extension_constructors(id, x1, x2) -> + fprintf ppf + "@[Extension declarations do not match:@ \ + %a@;<1 -2>is not included in@ %a@]" + (extension_constructor id) x1 + (extension_constructor id) x2; + show_locs ppf (x1.ext_loc, x2.ext_loc) + | Module_types(mty1, mty2)-> + fprintf ppf + "@[Modules do not match:@ \ + %a@;<1 -2>is not included in@ %a@]" + modtype mty1 + modtype mty2 + | Modtype_infos(id, d1, d2) -> + fprintf ppf + "@[Module type declarations do not match:@ \ + %a@;<1 -2>does not match@ %a@]" + (modtype_declaration id) d1 + (modtype_declaration id) d2 + | Modtype_permutation -> + fprintf ppf "Illegal permutation of structure fields" + | Interface_mismatch(impl_name, intf_name) -> + fprintf ppf "@[The implementation %s@ does not match the interface %s:" + impl_name intf_name + | Class_type_declarations(id, d1, d2, reason) -> + fprintf ppf + "@[Class type declarations do not match:@ \ + %a@;<1 -2>does not match@ %a@]@ %a" + (Printtyp.cltype_declaration id) d1 + (Printtyp.cltype_declaration id) d2 + Includeclass.report_error reason + | Class_declarations(id, d1, d2, reason) -> + fprintf ppf + "@[Class declarations do not match:@ \ + %a@;<1 -2>does not match@ %a@]@ %a" + (Printtyp.class_declaration id) d1 + (Printtyp.class_declaration id) d2 + Includeclass.report_error reason + | Unbound_modtype_path path -> + fprintf ppf "Unbound module type %a" Printtyp.path path + | Unbound_module_path path -> + fprintf ppf "Unbound module %a" Printtyp.path path + | Invalid_module_alias path -> + fprintf ppf "Module %a cannot be aliased" Printtyp.path path + +let rec context ppf = function + Module id :: rem -> + fprintf ppf "@[<2>module %a%a@]" ident id args rem + | Modtype id :: rem -> + fprintf ppf "@[<2>module type %a =@ %a@]" ident id context_mty rem + | Body x :: rem -> + fprintf ppf "functor (%s) ->@ %a" (argname x) context_mty rem + | Arg x :: rem -> + fprintf ppf "functor (%a : %a) -> ..." ident x context_mty rem + | [] -> + fprintf ppf "" +and context_mty ppf = function + (Module _ | Modtype _) :: _ as rem -> + fprintf ppf "@[<2>sig@ %a@;<1 -2>end@]" context rem + | cxt -> context ppf cxt +and args ppf = function + Body x :: rem -> + fprintf ppf "(%s)%a" (argname x) args rem + | Arg x :: rem -> + fprintf ppf "(%a :@ %a) : ..." ident x context_mty rem + | cxt -> + fprintf ppf " :@ %a" context_mty cxt +and argname x = + let s = Ident.name x in + if s = "*" then "" else s + +let path_of_context = function + Module id :: rem -> + let rec subm path = function + [] -> path + | Module id :: rem -> subm (Pdot (path, Ident.name id, -1)) rem + | _ -> assert false + in subm (Pident id) rem + | _ -> assert false + +let context ppf cxt = + if cxt = [] then () else + if List.for_all (function Module _ -> true | _ -> false) cxt then + fprintf ppf "In module %a:@ " path (path_of_context cxt) + else + fprintf ppf "@[At position@ %a@]@ " context cxt + +let include_err ppf (cxt, env, err) = + Printtyp.wrap_printing_env env (fun () -> + fprintf ppf "@[%a%a@]" context (List.rev cxt) include_err err) + +let buffer = ref Bytes.empty +let is_big obj = + let size = !Clflags.error_size in + size > 0 && + begin + if Bytes.length !buffer < size then buffer := Bytes.create size; + try ignore (Marshal.to_buffer !buffer 0 size obj []); false + with _ -> true + end + +let report_error ppf errs = + if errs = [] then () else + let (errs , err) = split_last errs in + let pe = ref true in + let include_err' ppf (_,_,obj as err) = + if not (is_big obj) then fprintf ppf "%a@ " include_err err + else if !pe then (fprintf ppf "...@ "; pe := false) + in + let print_errs ppf = List.iter (include_err' ppf) in + fprintf ppf "@[%a%a@]" print_errs errs include_err err + + +(* We could do a better job to split the individual error items + as sub-messages of the main interface mismatch on the whole unit. *) +let () = + Location.register_error_of_exn + (function + | Error err -> Some (Location.error_of_printer_file report_error err) + | _ -> None + ) diff --git a/res_syntax/compiler-libs-406/includemod.mli b/res_syntax/compiler-libs-406/includemod.mli new file mode 100644 index 0000000000..d5d3cbfc48 --- /dev/null +++ b/res_syntax/compiler-libs-406/includemod.mli @@ -0,0 +1,65 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Inclusion checks for the module language *) + +open Typedtree +open Types +open Format + +val modtypes: + loc:Location.t -> Env.t -> + module_type -> module_type -> module_coercion + +val signatures: Env.t -> signature -> signature -> module_coercion + +val compunit: + Env.t -> string -> signature -> string -> signature -> module_coercion + +val type_declarations: + loc:Location.t -> Env.t -> + Ident.t -> type_declaration -> type_declaration -> unit + +val print_coercion: formatter -> module_coercion -> unit + +type symptom = + Missing_field of Ident.t * Location.t * string (* kind *) + | Value_descriptions of Ident.t * value_description * value_description + | Type_declarations of Ident.t * type_declaration + * type_declaration * Includecore.type_mismatch list + | Extension_constructors of + Ident.t * extension_constructor * extension_constructor + | Module_types of module_type * module_type + | Modtype_infos of Ident.t * modtype_declaration * modtype_declaration + | Modtype_permutation + | Interface_mismatch of string * string + | Class_type_declarations of + Ident.t * class_type_declaration * class_type_declaration * + Ctype.class_match_failure list + | Class_declarations of + Ident.t * class_declaration * class_declaration * + Ctype.class_match_failure list + | Unbound_modtype_path of Path.t + | Unbound_module_path of Path.t + | Invalid_module_alias of Path.t + +type pos = + Module of Ident.t | Modtype of Ident.t | Arg of Ident.t | Body of Ident.t +type error = pos list * Env.t * symptom + +exception Error of error list + +val report_error: formatter -> error list -> unit +val expand_module_alias: Env.t -> pos list -> Path.t -> Types.module_type diff --git a/res_syntax/compiler-libs-406/int32.ml b/res_syntax/compiler-libs-406/int32.ml new file mode 100644 index 0000000000..9e1eabf5e4 --- /dev/null +++ b/res_syntax/compiler-libs-406/int32.ml @@ -0,0 +1,68 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Module [Int32]: 32-bit integers *) + +external neg : int32 -> int32 = "%int32_neg" +external add : int32 -> int32 -> int32 = "%int32_add" +external sub : int32 -> int32 -> int32 = "%int32_sub" +external mul : int32 -> int32 -> int32 = "%int32_mul" +external div : int32 -> int32 -> int32 = "%int32_div" +external rem : int32 -> int32 -> int32 = "%int32_mod" +external logand : int32 -> int32 -> int32 = "%int32_and" +external logor : int32 -> int32 -> int32 = "%int32_or" +external logxor : int32 -> int32 -> int32 = "%int32_xor" +external shift_left : int32 -> int -> int32 = "%int32_lsl" +external shift_right : int32 -> int -> int32 = "%int32_asr" +external shift_right_logical : int32 -> int -> int32 = "%int32_lsr" +external of_int : int -> int32 = "%int32_of_int" +external to_int : int32 -> int = "%int32_to_int" +external of_float : float -> int32 + = "caml_int32_of_float" "caml_int32_of_float_unboxed" + [@@unboxed] [@@noalloc] +external to_float : int32 -> float + = "caml_int32_to_float" "caml_int32_to_float_unboxed" + [@@unboxed] [@@noalloc] +external bits_of_float : float -> int32 + = "caml_int32_bits_of_float" "caml_int32_bits_of_float_unboxed" + [@@unboxed] [@@noalloc] +external float_of_bits : int32 -> float + = "caml_int32_float_of_bits" "caml_int32_float_of_bits_unboxed" + [@@unboxed] [@@noalloc] + +let zero = 0l +let one = 1l +let minus_one = -1l +let succ n = add n 1l +let pred n = sub n 1l +let abs n = if n >= 0l then n else neg n +let min_int = 0x80000000l +let max_int = 0x7FFFFFFFl +let lognot n = logxor n (-1l) + +external format : string -> int32 -> string = "caml_int32_format" +let to_string n = format "%d" n + +external of_string : string -> int32 = "caml_int32_of_string" + +let of_string_opt s = + (* TODO: expose a non-raising primitive directly. *) + try Some (of_string s) + with Failure _ -> None + +type t = int32 + +let compare (x: t) (y: t) = Stdlib.compare x y +let equal (x: t) (y: t) = compare x y = 0 diff --git a/res_syntax/compiler-libs-406/int32.mli b/res_syntax/compiler-libs-406/int32.mli new file mode 100644 index 0000000000..3b2e7b0552 --- /dev/null +++ b/res_syntax/compiler-libs-406/int32.mli @@ -0,0 +1,189 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** 32-bit integers. + + This module provides operations on the type [int32] + of signed 32-bit integers. Unlike the built-in [int] type, + the type [int32] is guaranteed to be exactly 32-bit wide on all + platforms. All arithmetic operations over [int32] are taken + modulo 2{^32}. + + Performance notice: values of type [int32] occupy more memory + space than values of type [int], and arithmetic operations on + [int32] are generally slower than those on [int]. Use [int32] + only when the application requires exact 32-bit arithmetic. *) + +val zero : int32 +(** The 32-bit integer 0. *) + +val one : int32 +(** The 32-bit integer 1. *) + +val minus_one : int32 +(** The 32-bit integer -1. *) + +external neg : int32 -> int32 = "%int32_neg" +(** Unary negation. *) + +external add : int32 -> int32 -> int32 = "%int32_add" +(** Addition. *) + +external sub : int32 -> int32 -> int32 = "%int32_sub" +(** Subtraction. *) + +external mul : int32 -> int32 -> int32 = "%int32_mul" +(** Multiplication. *) + +external div : int32 -> int32 -> int32 = "%int32_div" +(** Integer division. Raise [Division_by_zero] if the second + argument is zero. This division rounds the real quotient of + its arguments towards zero, as specified for {!Pervasives.(/)}. *) + +external rem : int32 -> int32 -> int32 = "%int32_mod" +(** Integer remainder. If [y] is not zero, the result + of [Int32.rem x y] satisfies the following property: + [x = Int32.add (Int32.mul (Int32.div x y) y) (Int32.rem x y)]. + If [y = 0], [Int32.rem x y] raises [Division_by_zero]. *) + +val succ : int32 -> int32 +(** Successor. [Int32.succ x] is [Int32.add x Int32.one]. *) + +val pred : int32 -> int32 +(** Predecessor. [Int32.pred x] is [Int32.sub x Int32.one]. *) + +val abs : int32 -> int32 +(** Return the absolute value of its argument. *) + +val max_int : int32 +(** The greatest representable 32-bit integer, 2{^31} - 1. *) + +val min_int : int32 +(** The smallest representable 32-bit integer, -2{^31}. *) + + +external logand : int32 -> int32 -> int32 = "%int32_and" +(** Bitwise logical and. *) + +external logor : int32 -> int32 -> int32 = "%int32_or" +(** Bitwise logical or. *) + +external logxor : int32 -> int32 -> int32 = "%int32_xor" +(** Bitwise logical exclusive or. *) + +val lognot : int32 -> int32 +(** Bitwise logical negation. *) + +external shift_left : int32 -> int -> int32 = "%int32_lsl" +(** [Int32.shift_left x y] shifts [x] to the left by [y] bits. + The result is unspecified if [y < 0] or [y >= 32]. *) + +external shift_right : int32 -> int -> int32 = "%int32_asr" +(** [Int32.shift_right x y] shifts [x] to the right by [y] bits. + This is an arithmetic shift: the sign bit of [x] is replicated + and inserted in the vacated bits. + The result is unspecified if [y < 0] or [y >= 32]. *) + +external shift_right_logical : int32 -> int -> int32 = "%int32_lsr" +(** [Int32.shift_right_logical x y] shifts [x] to the right by [y] bits. + This is a logical shift: zeroes are inserted in the vacated bits + regardless of the sign of [x]. + The result is unspecified if [y < 0] or [y >= 32]. *) + +external of_int : int -> int32 = "%int32_of_int" +(** Convert the given integer (type [int]) to a 32-bit integer + (type [int32]). *) + +external to_int : int32 -> int = "%int32_to_int" +(** Convert the given 32-bit integer (type [int32]) to an + integer (type [int]). On 32-bit platforms, the 32-bit integer + is taken modulo 2{^31}, i.e. the high-order bit is lost + during the conversion. On 64-bit platforms, the conversion + is exact. *) + +external of_float : float -> int32 + = "caml_int32_of_float" "caml_int32_of_float_unboxed" + [@@unboxed] [@@noalloc] +(** Convert the given floating-point number to a 32-bit integer, + discarding the fractional part (truncate towards 0). + The result of the conversion is undefined if, after truncation, + the number is outside the range \[{!Int32.min_int}, {!Int32.max_int}\]. *) + +external to_float : int32 -> float + = "caml_int32_to_float" "caml_int32_to_float_unboxed" + [@@unboxed] [@@noalloc] +(** Convert the given 32-bit integer to a floating-point number. *) + +external of_string : string -> int32 = "caml_int32_of_string" +(** Convert the given string to a 32-bit integer. + The string is read in decimal (by default, or if the string + begins with [0u]) or in hexadecimal, octal or binary if the + string begins with [0x], [0o] or [0b] respectively. + + The [0u] prefix reads the input as an unsigned integer in the range + [[0, 2*Int32.max_int+1]]. If the input exceeds {!Int32.max_int} + it is converted to the signed integer + [Int32.min_int + input - Int32.max_int - 1]. + + The [_] (underscore) character can appear anywhere in the string + and is ignored. + Raise [Failure "Int32.of_string"] if the given string is not + a valid representation of an integer, or if the integer represented + exceeds the range of integers representable in type [int32]. *) + +val of_string_opt: string -> int32 option +(** Same as [of_string], but return [None] instead of raising. + @since 4.05 *) + + +val to_string : int32 -> string +(** Return the string representation of its argument, in signed decimal. *) + +external bits_of_float : float -> int32 + = "caml_int32_bits_of_float" "caml_int32_bits_of_float_unboxed" + [@@unboxed] [@@noalloc] +(** Return the internal representation of the given float according + to the IEEE 754 floating-point 'single format' bit layout. + Bit 31 of the result represents the sign of the float; + bits 30 to 23 represent the (biased) exponent; bits 22 to 0 + represent the mantissa. *) + +external float_of_bits : int32 -> float + = "caml_int32_float_of_bits" "caml_int32_float_of_bits_unboxed" + [@@unboxed] [@@noalloc] +(** Return the floating-point number whose internal representation, + according to the IEEE 754 floating-point 'single format' bit layout, + is the given [int32]. *) + +type t = int32 +(** An alias for the type of 32-bit integers. *) + +val compare: t -> t -> int +(** The comparison function for 32-bit integers, with the same specification as + {!Stdlib.compare}. Along with the type [t], this function [compare] + allows the module [Int32] to be passed as argument to the functors + {!Set.Make} and {!Map.Make}. *) + +val equal: t -> t -> bool +(** The equal function for int32s. + @since 4.03.0 *) + +(**/**) + +(** {1 Deprecated functions} *) + +external format : string -> int32 -> string = "caml_int32_format" +(** Do not use this deprecated function. Instead, + used {!Printf.sprintf} with a [%l...] format. *) diff --git a/res_syntax/compiler-libs-406/int64.ml b/res_syntax/compiler-libs-406/int64.ml new file mode 100644 index 0000000000..59e6164816 --- /dev/null +++ b/res_syntax/compiler-libs-406/int64.ml @@ -0,0 +1,75 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Module [Int64]: 64-bit integers *) + +external neg : int64 -> int64 = "%int64_neg" +external add : int64 -> int64 -> int64 = "%int64_add" +external sub : int64 -> int64 -> int64 = "%int64_sub" +external mul : int64 -> int64 -> int64 = "%int64_mul" +external div : int64 -> int64 -> int64 = "%int64_div" +external rem : int64 -> int64 -> int64 = "%int64_mod" +external logand : int64 -> int64 -> int64 = "%int64_and" +external logor : int64 -> int64 -> int64 = "%int64_or" +external logxor : int64 -> int64 -> int64 = "%int64_xor" +external shift_left : int64 -> int -> int64 = "%int64_lsl" +external shift_right : int64 -> int -> int64 = "%int64_asr" +external shift_right_logical : int64 -> int -> int64 = "%int64_lsr" +external of_int : int -> int64 = "%int64_of_int" +external to_int : int64 -> int = "%int64_to_int" +external of_float : float -> int64 + = "caml_int64_of_float" "caml_int64_of_float_unboxed" + [@@unboxed] [@@noalloc] +external to_float : int64 -> float + = "caml_int64_to_float" "caml_int64_to_float_unboxed" + [@@unboxed] [@@noalloc] +external of_int32 : int32 -> int64 = "%int64_of_int32" +external to_int32 : int64 -> int32 = "%int64_to_int32" +external of_nativeint : nativeint -> int64 = "%int64_of_nativeint" +external to_nativeint : int64 -> nativeint = "%int64_to_nativeint" + +let zero = 0L +let one = 1L +let minus_one = -1L +let succ n = add n 1L +let pred n = sub n 1L +let abs n = if n >= 0L then n else neg n +let min_int = 0x8000000000000000L +let max_int = 0x7FFFFFFFFFFFFFFFL +let lognot n = logxor n (-1L) + +external format : string -> int64 -> string = "caml_int64_format" +let to_string n = format "%d" n + +external of_string : string -> int64 = "caml_int64_of_string" + +let of_string_opt s = + (* TODO: expose a non-raising primitive directly. *) + try Some (of_string s) + with Failure _ -> None + + + +external bits_of_float : float -> int64 + = "caml_int64_bits_of_float" "caml_int64_bits_of_float_unboxed" + [@@unboxed] [@@noalloc] +external float_of_bits : int64 -> float + = "caml_int64_float_of_bits" "caml_int64_float_of_bits_unboxed" + [@@unboxed] [@@noalloc] + +type t = int64 + +let compare (x: t) (y: t) = Stdlib.compare x y +let equal (x: t) (y: t) = compare x y = 0 diff --git a/res_syntax/compiler-libs-406/int64.mli b/res_syntax/compiler-libs-406/int64.mli new file mode 100644 index 0000000000..3f970c262f --- /dev/null +++ b/res_syntax/compiler-libs-406/int64.mli @@ -0,0 +1,210 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** 64-bit integers. + + This module provides operations on the type [int64] of + signed 64-bit integers. Unlike the built-in [int] type, + the type [int64] is guaranteed to be exactly 64-bit wide on all + platforms. All arithmetic operations over [int64] are taken + modulo 2{^64} + + Performance notice: values of type [int64] occupy more memory + space than values of type [int], and arithmetic operations on + [int64] are generally slower than those on [int]. Use [int64] + only when the application requires exact 64-bit arithmetic. +*) + +val zero : int64 +(** The 64-bit integer 0. *) + +val one : int64 +(** The 64-bit integer 1. *) + +val minus_one : int64 +(** The 64-bit integer -1. *) + +external neg : int64 -> int64 = "%int64_neg" +(** Unary negation. *) + +external add : int64 -> int64 -> int64 = "%int64_add" +(** Addition. *) + +external sub : int64 -> int64 -> int64 = "%int64_sub" +(** Subtraction. *) + +external mul : int64 -> int64 -> int64 = "%int64_mul" +(** Multiplication. *) + +external div : int64 -> int64 -> int64 = "%int64_div" +(** Integer division. Raise [Division_by_zero] if the second + argument is zero. This division rounds the real quotient of + its arguments towards zero, as specified for {!Pervasives.(/)}. *) + +external rem : int64 -> int64 -> int64 = "%int64_mod" +(** Integer remainder. If [y] is not zero, the result + of [Int64.rem x y] satisfies the following property: + [x = Int64.add (Int64.mul (Int64.div x y) y) (Int64.rem x y)]. + If [y = 0], [Int64.rem x y] raises [Division_by_zero]. *) + +val succ : int64 -> int64 +(** Successor. [Int64.succ x] is [Int64.add x Int64.one]. *) + +val pred : int64 -> int64 +(** Predecessor. [Int64.pred x] is [Int64.sub x Int64.one]. *) + +val abs : int64 -> int64 +(** Return the absolute value of its argument. *) + +val max_int : int64 +(** The greatest representable 64-bit integer, 2{^63} - 1. *) + +val min_int : int64 +(** The smallest representable 64-bit integer, -2{^63}. *) + +external logand : int64 -> int64 -> int64 = "%int64_and" +(** Bitwise logical and. *) + +external logor : int64 -> int64 -> int64 = "%int64_or" +(** Bitwise logical or. *) + +external logxor : int64 -> int64 -> int64 = "%int64_xor" +(** Bitwise logical exclusive or. *) + +val lognot : int64 -> int64 +(** Bitwise logical negation. *) + +external shift_left : int64 -> int -> int64 = "%int64_lsl" +(** [Int64.shift_left x y] shifts [x] to the left by [y] bits. + The result is unspecified if [y < 0] or [y >= 64]. *) + +external shift_right : int64 -> int -> int64 = "%int64_asr" +(** [Int64.shift_right x y] shifts [x] to the right by [y] bits. + This is an arithmetic shift: the sign bit of [x] is replicated + and inserted in the vacated bits. + The result is unspecified if [y < 0] or [y >= 64]. *) + +external shift_right_logical : int64 -> int -> int64 = "%int64_lsr" +(** [Int64.shift_right_logical x y] shifts [x] to the right by [y] bits. + This is a logical shift: zeroes are inserted in the vacated bits + regardless of the sign of [x]. + The result is unspecified if [y < 0] or [y >= 64]. *) + +external of_int : int -> int64 = "%int64_of_int" +(** Convert the given integer (type [int]) to a 64-bit integer + (type [int64]). *) + +external to_int : int64 -> int = "%int64_to_int" +(** Convert the given 64-bit integer (type [int64]) to an + integer (type [int]). On 64-bit platforms, the 64-bit integer + is taken modulo 2{^63}, i.e. the high-order bit is lost + during the conversion. On 32-bit platforms, the 64-bit integer + is taken modulo 2{^31}, i.e. the top 33 bits are lost + during the conversion. *) + +external of_float : float -> int64 + = "caml_int64_of_float" "caml_int64_of_float_unboxed" + [@@unboxed] [@@noalloc] +(** Convert the given floating-point number to a 64-bit integer, + discarding the fractional part (truncate towards 0). + The result of the conversion is undefined if, after truncation, + the number is outside the range \[{!Int64.min_int}, {!Int64.max_int}\]. *) + +external to_float : int64 -> float + = "caml_int64_to_float" "caml_int64_to_float_unboxed" + [@@unboxed] [@@noalloc] +(** Convert the given 64-bit integer to a floating-point number. *) + + +external of_int32 : int32 -> int64 = "%int64_of_int32" +(** Convert the given 32-bit integer (type [int32]) + to a 64-bit integer (type [int64]). *) + +external to_int32 : int64 -> int32 = "%int64_to_int32" +(** Convert the given 64-bit integer (type [int64]) to a + 32-bit integer (type [int32]). The 64-bit integer + is taken modulo 2{^32}, i.e. the top 32 bits are lost + during the conversion. *) + +external of_nativeint : nativeint -> int64 = "%int64_of_nativeint" +(** Convert the given native integer (type [nativeint]) + to a 64-bit integer (type [int64]). *) + +external to_nativeint : int64 -> nativeint = "%int64_to_nativeint" +(** Convert the given 64-bit integer (type [int64]) to a + native integer. On 32-bit platforms, the 64-bit integer + is taken modulo 2{^32}. On 64-bit platforms, + the conversion is exact. *) + +external of_string : string -> int64 = "caml_int64_of_string" +(** Convert the given string to a 64-bit integer. + The string is read in decimal (by default, or if the string + begins with [0u]) or in hexadecimal, octal or binary if the + string begins with [0x], [0o] or [0b] respectively. + + The [0u] prefix reads the input as an unsigned integer in the range + [[0, 2*Int64.max_int+1]]. If the input exceeds {!Int64.max_int} + it is converted to the signed integer + [Int64.min_int + input - Int64.max_int - 1]. + + The [_] (underscore) character can appear anywhere in the string + and is ignored. + Raise [Failure "Int64.of_string"] if the given string is not + a valid representation of an integer, or if the integer represented + exceeds the range of integers representable in type [int64]. *) + +val of_string_opt: string -> int64 option +(** Same as [of_string], but return [None] instead of raising. + @since 4.05 *) + +val to_string : int64 -> string +(** Return the string representation of its argument, in decimal. *) + +external bits_of_float : float -> int64 + = "caml_int64_bits_of_float" "caml_int64_bits_of_float_unboxed" + [@@unboxed] [@@noalloc] +(** Return the internal representation of the given float according + to the IEEE 754 floating-point 'double format' bit layout. + Bit 63 of the result represents the sign of the float; + bits 62 to 52 represent the (biased) exponent; bits 51 to 0 + represent the mantissa. *) + +external float_of_bits : int64 -> float + = "caml_int64_float_of_bits" "caml_int64_float_of_bits_unboxed" + [@@unboxed] [@@noalloc] +(** Return the floating-point number whose internal representation, + according to the IEEE 754 floating-point 'double format' bit layout, + is the given [int64]. *) + +type t = int64 +(** An alias for the type of 64-bit integers. *) + +val compare: t -> t -> int +(** The comparison function for 64-bit integers, with the same specification as + {!Stdlib.compare}. Along with the type [t], this function [compare] + allows the module [Int64] to be passed as argument to the functors + {!Set.Make} and {!Map.Make}. *) + +val equal: t -> t -> bool +(** The equal function for int64s. + @since 4.03.0 *) + +(**/**) + +(** {1 Deprecated functions} *) + +external format : string -> int64 -> string = "caml_int64_format" +(** Do not use this deprecated function. Instead, + used {!Printf.sprintf} with a [%L...] format. *) diff --git a/res_syntax/compiler-libs-406/lambda.ml b/res_syntax/compiler-libs-406/lambda.ml new file mode 100644 index 0000000000..94570d60a6 --- /dev/null +++ b/res_syntax/compiler-libs-406/lambda.ml @@ -0,0 +1,724 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Misc +open Path +open Asttypes + +type compile_time_constant = + | Big_endian + | Word_size + | Int_size + | Max_wosize + | Ostype_unix + | Ostype_win32 + | Ostype_cygwin + | Backend_type + +type loc_kind = + | Loc_FILE + | Loc_LINE + | Loc_MODULE + | Loc_LOC + | Loc_POS + +type immediate_or_pointer = + | Immediate + | Pointer + +type initialization_or_assignment = + | Assignment + | Heap_initialization + | Root_initialization + +type is_safe = + | Safe + | Unsafe + +type primitive = + | Pidentity + | Pbytes_to_string + | Pbytes_of_string + | Pignore + | Prevapply + | Pdirapply + | Ploc of loc_kind + (* Globals *) + | Pgetglobal of Ident.t + | Psetglobal of Ident.t + (* Operations on heap blocks *) + | Pmakeblock of int * mutable_flag * block_shape + | Pfield of int + | Pfield_computed + | Psetfield of int * immediate_or_pointer * initialization_or_assignment + | Psetfield_computed of immediate_or_pointer * initialization_or_assignment + | Pfloatfield of int + | Psetfloatfield of int * initialization_or_assignment + | Pduprecord of Types.record_representation * int + (* Force lazy values *) + | Plazyforce + (* External call *) + | Pccall of Primitive.description + (* Exceptions *) + | Praise of raise_kind + (* Boolean operations *) + | Psequand | Psequor | Pnot + (* Integer operations *) + | Pnegint | Paddint | Psubint | Pmulint + | Pdivint of is_safe | Pmodint of is_safe + | Pandint | Porint | Pxorint + | Plslint | Plsrint | Pasrint + | Pintcomp of comparison + | Poffsetint of int + | Poffsetref of int + (* Float operations *) + | Pintoffloat | Pfloatofint + | Pnegfloat | Pabsfloat + | Paddfloat | Psubfloat | Pmulfloat | Pdivfloat + | Pfloatcomp of comparison + (* String operations *) + | Pstringlength | Pstringrefu | Pstringrefs + | Pbyteslength | Pbytesrefu | Pbytessetu | Pbytesrefs | Pbytessets + (* Array operations *) + | Pmakearray of array_kind * mutable_flag + | Pduparray of array_kind * mutable_flag + | Parraylength of array_kind + | Parrayrefu of array_kind + | Parraysetu of array_kind + | Parrayrefs of array_kind + | Parraysets of array_kind + (* Test if the argument is a block or an immediate integer *) + | Pisint + (* Test if the (integer) argument is outside an interval *) + | Pisout + (* Bitvect operations *) + | Pbittest + (* Operations on boxed integers (Nativeint.t, Int32.t, Int64.t) *) + | Pbintofint of boxed_integer + | Pintofbint of boxed_integer + | Pcvtbint of boxed_integer (*source*) * boxed_integer (*destination*) + | Pnegbint of boxed_integer + | Paddbint of boxed_integer + | Psubbint of boxed_integer + | Pmulbint of boxed_integer + | Pdivbint of { size : boxed_integer; is_safe : is_safe } + | Pmodbint of { size : boxed_integer; is_safe : is_safe } + | Pandbint of boxed_integer + | Porbint of boxed_integer + | Pxorbint of boxed_integer + | Plslbint of boxed_integer + | Plsrbint of boxed_integer + | Pasrbint of boxed_integer + | Pbintcomp of boxed_integer * comparison + (* Operations on big arrays: (unsafe, #dimensions, kind, layout) *) + | Pbigarrayref of bool * int * bigarray_kind * bigarray_layout + | Pbigarrayset of bool * int * bigarray_kind * bigarray_layout + (* size of the nth dimension of a big array *) + | Pbigarraydim of int + (* load/set 16,32,64 bits from a string: (unsafe)*) + | Pstring_load_16 of bool + | Pstring_load_32 of bool + | Pstring_load_64 of bool + | Pstring_set_16 of bool + | Pstring_set_32 of bool + | Pstring_set_64 of bool + (* load/set 16,32,64 bits from a + (char, int8_unsigned_elt, c_layout) Bigarray.Array1.t : (unsafe) *) + | Pbigstring_load_16 of bool + | Pbigstring_load_32 of bool + | Pbigstring_load_64 of bool + | Pbigstring_set_16 of bool + | Pbigstring_set_32 of bool + | Pbigstring_set_64 of bool + (* Compile time constants *) + | Pctconst of compile_time_constant + (* byte swap *) + | Pbswap16 + | Pbbswap of boxed_integer + (* Integer to external pointer *) + | Pint_as_pointer + (* Inhibition of optimisation *) + | Popaque + +and comparison = + Ceq | Cneq | Clt | Cgt | Cle | Cge + +and value_kind = + Pgenval | Pfloatval | Pboxedintval of boxed_integer | Pintval + +and block_shape = + value_kind list option + +and array_kind = + Pgenarray | Paddrarray | Pintarray | Pfloatarray + +and boxed_integer = Primitive.boxed_integer = + Pnativeint | Pint32 | Pint64 + +and bigarray_kind = + Pbigarray_unknown + | Pbigarray_float32 | Pbigarray_float64 + | Pbigarray_sint8 | Pbigarray_uint8 + | Pbigarray_sint16 | Pbigarray_uint16 + | Pbigarray_int32 | Pbigarray_int64 + | Pbigarray_caml_int | Pbigarray_native_int + | Pbigarray_complex32 | Pbigarray_complex64 + +and bigarray_layout = + Pbigarray_unknown_layout + | Pbigarray_c_layout + | Pbigarray_fortran_layout + +and raise_kind = + | Raise_regular + | Raise_reraise + | Raise_notrace + +type structured_constant = + Const_base of constant + | Const_pointer of int + | Const_block of int * structured_constant list + | Const_float_array of string list + | Const_immstring of string + +type inline_attribute = + | Always_inline (* [@inline] or [@inline always] *) + | Never_inline (* [@inline never] *) + | Unroll of int (* [@unroll x] *) + | Default_inline (* no [@inline] attribute *) + +type specialise_attribute = + | Always_specialise (* [@specialise] or [@specialise always] *) + | Never_specialise (* [@specialise never] *) + | Default_specialise (* no [@specialise] attribute *) + +type function_kind = Curried | Tupled + +type let_kind = Strict | Alias | StrictOpt | Variable + +type meth_kind = Self | Public | Cached + +type shared_code = (int * int) list + +type function_attribute = { + inline : inline_attribute; + specialise : specialise_attribute; + is_a_functor: bool; + stub: bool; +} + +type lambda = + Lvar of Ident.t + | Lconst of structured_constant + | Lapply of lambda_apply + | Lfunction of lfunction + | Llet of let_kind * value_kind * Ident.t * lambda * lambda + | Lletrec of (Ident.t * lambda) list * lambda + | Lprim of primitive * lambda list * Location.t + | Lswitch of lambda * lambda_switch * Location.t + | Lstringswitch of + lambda * (string * lambda) list * lambda option * Location.t + | Lstaticraise of int * lambda list + | Lstaticcatch of lambda * (int * Ident.t list) * lambda + | Ltrywith of lambda * Ident.t * lambda + | Lifthenelse of lambda * lambda * lambda + | Lsequence of lambda * lambda + | Lwhile of lambda * lambda + | Lfor of Ident.t * lambda * lambda * direction_flag * lambda + | Lassign of Ident.t * lambda + | Lsend of meth_kind * lambda * lambda * lambda list * Location.t + | Levent of lambda * lambda_event + | Lifused of Ident.t * lambda + +and lfunction = + { kind: function_kind; + params: Ident.t list; + body: lambda; + attr: function_attribute; (* specified with [@inline] attribute *) + loc: Location.t; } + +and lambda_apply = + { ap_func : lambda; + ap_args : lambda list; + ap_loc : Location.t; + ap_should_be_tailcall : bool; + ap_inlined : inline_attribute; + ap_specialised : specialise_attribute; } + +and lambda_switch = + { sw_numconsts: int; + sw_consts: (int * lambda) list; + sw_numblocks: int; + sw_blocks: (int * lambda) list; + sw_failaction : lambda option} + +and lambda_event = + { lev_loc: Location.t; + lev_kind: lambda_event_kind; + lev_repr: int ref option; + lev_env: Env.summary } + +and lambda_event_kind = + Lev_before + | Lev_after of Types.type_expr + | Lev_function + | Lev_pseudo + | Lev_module_definition of Ident.t + +type program = + { module_ident : Ident.t; + main_module_block_size : int; + required_globals : Ident.Set.t; + code : lambda } + +let const_unit = Const_pointer 0 + +let lambda_unit = Lconst const_unit + +let default_function_attribute = { + inline = Default_inline; + specialise = Default_specialise; + is_a_functor = false; + stub = false; +} + +let default_stub_attribute = + { default_function_attribute with stub = true } + +(* Build sharing keys *) +(* + Those keys are later compared with Stdlib.compare. + For that reason, they should not include cycles. +*) + +exception Not_simple + +let max_raw = 32 + +let make_key e = + let count = ref 0 (* Used for controling size *) + and make_key = Ident.make_key_generator () in + (* make_key is used for normalizing let-bound variables *) + let rec tr_rec env e = + incr count ; + if !count > max_raw then raise Not_simple ; (* Too big ! *) + match e with + | Lvar id -> + begin + try Ident.find_same id env + with Not_found -> e + end + | Lconst (Const_base (Const_string _)) -> + (* Mutable constants are not shared *) + raise Not_simple + | Lconst _ -> e + | Lapply ap -> + Lapply {ap with ap_func = tr_rec env ap.ap_func; + ap_args = tr_recs env ap.ap_args; + ap_loc = Location.none} + | Llet (Alias,_k,x,ex,e) -> (* Ignore aliases -> substitute *) + let ex = tr_rec env ex in + tr_rec (Ident.add x ex env) e + | Llet ((Strict | StrictOpt),_k,x,ex,Lvar v) when Ident.same v x -> + tr_rec env ex + | Llet (str,k,x,ex,e) -> + (* Because of side effects, keep other lets with normalized names *) + let ex = tr_rec env ex in + let y = make_key x in + Llet (str,k,y,ex,tr_rec (Ident.add x (Lvar y) env) e) + | Lprim (p,es,_) -> + Lprim (p,tr_recs env es, Location.none) + | Lswitch (e,sw,loc) -> + Lswitch (tr_rec env e,tr_sw env sw,loc) + | Lstringswitch (e,sw,d,_) -> + Lstringswitch + (tr_rec env e, + List.map (fun (s,e) -> s,tr_rec env e) sw, + tr_opt env d, + Location.none) + | Lstaticraise (i,es) -> + Lstaticraise (i,tr_recs env es) + | Lstaticcatch (e1,xs,e2) -> + Lstaticcatch (tr_rec env e1,xs,tr_rec env e2) + | Ltrywith (e1,x,e2) -> + Ltrywith (tr_rec env e1,x,tr_rec env e2) + | Lifthenelse (cond,ifso,ifnot) -> + Lifthenelse (tr_rec env cond,tr_rec env ifso,tr_rec env ifnot) + | Lsequence (e1,e2) -> + Lsequence (tr_rec env e1,tr_rec env e2) + | Lassign (x,e) -> + Lassign (x,tr_rec env e) + | Lsend (m,e1,e2,es,_loc) -> + Lsend (m,tr_rec env e1,tr_rec env e2,tr_recs env es,Location.none) + | Lifused (id,e) -> Lifused (id,tr_rec env e) + | Lletrec _|Lfunction _ + | Lfor _ | Lwhile _ +(* Beware: (PR#6412) the event argument to Levent + may include cyclic structure of type Type.typexpr *) + | Levent _ -> + raise Not_simple + + and tr_recs env es = List.map (tr_rec env) es + + and tr_sw env sw = + { sw with + sw_consts = List.map (fun (i,e) -> i,tr_rec env e) sw.sw_consts ; + sw_blocks = List.map (fun (i,e) -> i,tr_rec env e) sw.sw_blocks ; + sw_failaction = tr_opt env sw.sw_failaction ; } + + and tr_opt env = function + | None -> None + | Some e -> Some (tr_rec env e) in + + try + Some (tr_rec Ident.empty e) + with Not_simple -> None + +(***************) + +let name_lambda strict arg fn = + match arg with + Lvar id -> fn id + | _ -> let id = Ident.create "let" in Llet(strict, Pgenval, id, arg, fn id) + +let name_lambda_list args fn = + let rec name_list names = function + [] -> fn (List.rev names) + | (Lvar _ as arg) :: rem -> + name_list (arg :: names) rem + | arg :: rem -> + let id = Ident.create "let" in + Llet(Strict, Pgenval, id, arg, name_list (Lvar id :: names) rem) in + name_list [] args + + +let iter_opt f = function + | None -> () + | Some e -> f e + +let iter f = function + Lvar _ + | Lconst _ -> () + | Lapply{ap_func = fn; ap_args = args} -> + f fn; List.iter f args + | Lfunction{body} -> + f body + | Llet(_str, _k, _id, arg, body) -> + f arg; f body + | Lletrec(decl, body) -> + f body; + List.iter (fun (_id, exp) -> f exp) decl + | Lprim(_p, args, _loc) -> + List.iter f args + | Lswitch(arg, sw,_) -> + f arg; + List.iter (fun (_key, case) -> f case) sw.sw_consts; + List.iter (fun (_key, case) -> f case) sw.sw_blocks; + iter_opt f sw.sw_failaction + | Lstringswitch (arg,cases,default,_) -> + f arg ; + List.iter (fun (_,act) -> f act) cases ; + iter_opt f default + | Lstaticraise (_,args) -> + List.iter f args + | Lstaticcatch(e1, _, e2) -> + f e1; f e2 + | Ltrywith(e1, _, e2) -> + f e1; f e2 + | Lifthenelse(e1, e2, e3) -> + f e1; f e2; f e3 + | Lsequence(e1, e2) -> + f e1; f e2 + | Lwhile(e1, e2) -> + f e1; f e2 + | Lfor(_v, e1, e2, _dir, e3) -> + f e1; f e2; f e3 + | Lassign(_, e) -> + f e + | Lsend (_k, met, obj, args, _) -> + List.iter f (met::obj::args) + | Levent (lam, _evt) -> + f lam + | Lifused (_v, e) -> + f e + + +module IdentSet = Set.Make(Ident) + +let free_ids get l = + let fv = ref IdentSet.empty in + let rec free l = + iter free l; + fv := List.fold_right IdentSet.add (get l) !fv; + match l with + Lfunction{params} -> + List.iter (fun param -> fv := IdentSet.remove param !fv) params + | Llet(_str, _k, id, _arg, _body) -> + fv := IdentSet.remove id !fv + | Lletrec(decl, _body) -> + List.iter (fun (id, _exp) -> fv := IdentSet.remove id !fv) decl + | Lstaticcatch(_e1, (_,vars), _e2) -> + List.iter (fun id -> fv := IdentSet.remove id !fv) vars + | Ltrywith(_e1, exn, _e2) -> + fv := IdentSet.remove exn !fv + | Lfor(v, _e1, _e2, _dir, _e3) -> + fv := IdentSet.remove v !fv + | Lassign(id, _e) -> + fv := IdentSet.add id !fv + | Lvar _ | Lconst _ | Lapply _ + | Lprim _ | Lswitch _ | Lstringswitch _ | Lstaticraise _ + | Lifthenelse _ | Lsequence _ | Lwhile _ + | Lsend _ | Levent _ | Lifused _ -> () + in free l; !fv + +let free_variables l = + free_ids (function Lvar id -> [id] | _ -> []) l + +let free_methods l = + free_ids (function Lsend(Self, Lvar meth, _, _, _) -> [meth] | _ -> []) l + +(* Check if an action has a "when" guard *) +let raise_count = ref 0 + +let next_raise_count () = + incr raise_count ; + !raise_count + +let negative_raise_count = ref 0 + +let next_negative_raise_count () = + decr negative_raise_count ; + !negative_raise_count + +(* Anticipated staticraise, for guards *) +let staticfail = Lstaticraise (0,[]) + +let rec is_guarded = function + | Lifthenelse(_cond, _body, Lstaticraise (0,[])) -> true + | Llet(_str, _k, _id, _lam, body) -> is_guarded body + | Levent(lam, _ev) -> is_guarded lam + | _ -> false + +let rec patch_guarded patch = function + | Lifthenelse (cond, body, Lstaticraise (0,[])) -> + Lifthenelse (cond, body, patch) + | Llet(str, k, id, lam, body) -> + Llet (str, k, id, lam, patch_guarded patch body) + | Levent(lam, ev) -> + Levent (patch_guarded patch lam, ev) + | _ -> fatal_error "Lambda.patch_guarded" + +(* Translate an access path *) + +let rec transl_normal_path = function + Pident id -> + if Ident.global id + then Lprim(Pgetglobal id, [], Location.none) + else Lvar id + | Pdot(p, _s, pos) -> + Lprim(Pfield pos, [transl_normal_path p], Location.none) + | Papply _ -> + fatal_error "Lambda.transl_path" + +(* Translation of identifiers *) + +let transl_module_path ?(loc=Location.none) env path = + transl_normal_path (Env.normalize_path (Some loc) env path) + +let transl_value_path ?(loc=Location.none) env path = + transl_normal_path (Env.normalize_path_prefix (Some loc) env path) + +let transl_class_path = transl_value_path +let transl_extension_path = transl_value_path + +(* compatibility alias, deprecated in the .mli *) +let transl_path = transl_value_path + +(* Compile a sequence of expressions *) + +let rec make_sequence fn = function + [] -> lambda_unit + | [x] -> fn x + | x::rem -> + let lam = fn x in Lsequence(lam, make_sequence fn rem) + +(* Apply a substitution to a lambda-term. + Assumes that the bound variables of the lambda-term do not + belong to the domain of the substitution. + Assumes that the image of the substitution is out of reach + of the bound variables of the lambda-term (no capture). *) + +let subst_lambda s lam = + let rec subst = function + Lvar id as l -> + begin try Ident.find_same id s with Not_found -> l end + | Lconst _ as l -> l + | Lapply ap -> + Lapply{ap with ap_func = subst ap.ap_func; + ap_args = List.map subst ap.ap_args} + | Lfunction{kind; params; body; attr; loc} -> + Lfunction{kind; params; body = subst body; attr; loc} + | Llet(str, k, id, arg, body) -> Llet(str, k, id, subst arg, subst body) + | Lletrec(decl, body) -> Lletrec(List.map subst_decl decl, subst body) + | Lprim(p, args, loc) -> Lprim(p, List.map subst args, loc) + | Lswitch(arg, sw, loc) -> + Lswitch(subst arg, + {sw with sw_consts = List.map subst_case sw.sw_consts; + sw_blocks = List.map subst_case sw.sw_blocks; + sw_failaction = subst_opt sw.sw_failaction; }, + loc) + | Lstringswitch (arg,cases,default,loc) -> + Lstringswitch + (subst arg,List.map subst_strcase cases,subst_opt default,loc) + | Lstaticraise (i,args) -> Lstaticraise (i, List.map subst args) + | Lstaticcatch(e1, io, e2) -> Lstaticcatch(subst e1, io, subst e2) + | Ltrywith(e1, exn, e2) -> Ltrywith(subst e1, exn, subst e2) + | Lifthenelse(e1, e2, e3) -> Lifthenelse(subst e1, subst e2, subst e3) + | Lsequence(e1, e2) -> Lsequence(subst e1, subst e2) + | Lwhile(e1, e2) -> Lwhile(subst e1, subst e2) + | Lfor(v, e1, e2, dir, e3) -> Lfor(v, subst e1, subst e2, dir, subst e3) + | Lassign(id, e) -> Lassign(id, subst e) + | Lsend (k, met, obj, args, loc) -> + Lsend (k, subst met, subst obj, List.map subst args, loc) + | Levent (lam, evt) -> Levent (subst lam, evt) + | Lifused (v, e) -> Lifused (v, subst e) + and subst_decl (id, exp) = (id, subst exp) + and subst_case (key, case) = (key, subst case) + and subst_strcase (key, case) = (key, subst case) + and subst_opt = function + | None -> None + | Some e -> Some (subst e) + in subst lam + +let rec map f lam = + let lam = + match lam with + | Lvar _ -> lam + | Lconst _ -> lam + | Lapply { ap_func; ap_args; ap_loc; ap_should_be_tailcall; + ap_inlined; ap_specialised } -> + Lapply { + ap_func = map f ap_func; + ap_args = List.map (map f) ap_args; + ap_loc; + ap_should_be_tailcall; + ap_inlined; + ap_specialised; + } + | Lfunction { kind; params; body; attr; loc; } -> + Lfunction { kind; params; body = map f body; attr; loc; } + | Llet (str, k, v, e1, e2) -> + Llet (str, k, v, map f e1, map f e2) + | Lletrec (idel, e2) -> + Lletrec (List.map (fun (v, e) -> (v, map f e)) idel, map f e2) + | Lprim (p, el, loc) -> + Lprim (p, List.map (map f) el, loc) + | Lswitch (e, sw, loc) -> + Lswitch (map f e, + { sw_numconsts = sw.sw_numconsts; + sw_consts = List.map (fun (n, e) -> (n, map f e)) sw.sw_consts; + sw_numblocks = sw.sw_numblocks; + sw_blocks = List.map (fun (n, e) -> (n, map f e)) sw.sw_blocks; + sw_failaction = Misc.may_map (map f) sw.sw_failaction; + }, + loc) + | Lstringswitch (e, sw, default, loc) -> + Lstringswitch ( + map f e, + List.map (fun (s, e) -> (s, map f e)) sw, + Misc.may_map (map f) default, + loc) + | Lstaticraise (i, args) -> + Lstaticraise (i, List.map (map f) args) + | Lstaticcatch (body, id, handler) -> + Lstaticcatch (map f body, id, map f handler) + | Ltrywith (e1, v, e2) -> + Ltrywith (map f e1, v, map f e2) + | Lifthenelse (e1, e2, e3) -> + Lifthenelse (map f e1, map f e2, map f e3) + | Lsequence (e1, e2) -> + Lsequence (map f e1, map f e2) + | Lwhile (e1, e2) -> + Lwhile (map f e1, map f e2) + | Lfor (v, e1, e2, dir, e3) -> + Lfor (v, map f e1, map f e2, dir, map f e3) + | Lassign (v, e) -> + Lassign (v, map f e) + | Lsend (k, m, o, el, loc) -> + Lsend (k, map f m, map f o, List.map (map f) el, loc) + | Levent (l, ev) -> + Levent (map f l, ev) + | Lifused (v, e) -> + Lifused (v, map f e) + in + f lam + +(* To let-bind expressions to variables *) + +let bind str var exp body = + match exp with + Lvar var' when Ident.same var var' -> body + | _ -> Llet(str, Pgenval, var, exp, body) + +and commute_comparison = function +| Ceq -> Ceq| Cneq -> Cneq +| Clt -> Cgt | Cle -> Cge +| Cgt -> Clt | Cge -> Cle + +and negate_comparison = function +| Ceq -> Cneq| Cneq -> Ceq +| Clt -> Cge | Cle -> Cgt +| Cgt -> Cle | Cge -> Clt + +let raise_kind = function + | Raise_regular -> "raise" + | Raise_reraise -> "reraise" + | Raise_notrace -> "raise_notrace" + +let lam_of_loc kind loc = + let loc_start = loc.Location.loc_start in + let (file, lnum, cnum) = Location.get_pos_info loc_start in + let enum = loc.Location.loc_end.Lexing.pos_cnum - + loc_start.Lexing.pos_cnum + cnum in + match kind with + | Loc_POS -> + Lconst (Const_block (0, [ + Const_immstring file; + Const_base (Const_int lnum); + Const_base (Const_int cnum); + Const_base (Const_int enum); + ])) + | Loc_FILE -> Lconst (Const_immstring file) + | Loc_MODULE -> + let filename = Filename.basename file in + let name = Env.get_unit_name () in + let module_name = if name = "" then "//"^filename^"//" else name in + Lconst (Const_immstring module_name) + | Loc_LOC -> + let loc = Printf.sprintf "File %S, line %d, characters %d-%d" + file lnum cnum enum in + Lconst (Const_immstring loc) + | Loc_LINE -> Lconst (Const_base (Const_int lnum)) + +let merge_inline_attributes attr1 attr2 = + match attr1, attr2 with + | Default_inline, _ -> Some attr2 + | _, Default_inline -> Some attr1 + | _, _ -> + if attr1 = attr2 then Some attr1 + else None + +let reset () = + raise_count := 0 diff --git a/res_syntax/compiler-libs-406/lambda.mli b/res_syntax/compiler-libs-406/lambda.mli new file mode 100644 index 0000000000..fef608d4a7 --- /dev/null +++ b/res_syntax/compiler-libs-406/lambda.mli @@ -0,0 +1,377 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* The "lambda" intermediate code *) + +open Asttypes + +type compile_time_constant = + | Big_endian + | Word_size + | Int_size + | Max_wosize + | Ostype_unix + | Ostype_win32 + | Ostype_cygwin + | Backend_type + +type loc_kind = + | Loc_FILE + | Loc_LINE + | Loc_MODULE + | Loc_LOC + | Loc_POS + +type immediate_or_pointer = + | Immediate + | Pointer + +type initialization_or_assignment = + | Assignment + (* Initialization of in heap values, like [caml_initialize] C primitive. The + field should not have been read before and initialization should happen + only once. *) + | Heap_initialization + (* Initialization of roots only. Compiles to a simple store. + No checks are done to preserve GC invariants. *) + | Root_initialization + +type is_safe = + | Safe + | Unsafe + +type primitive = + | Pidentity + | Pbytes_to_string + | Pbytes_of_string + | Pignore + | Prevapply + | Pdirapply + | Ploc of loc_kind + (* Globals *) + | Pgetglobal of Ident.t + | Psetglobal of Ident.t + (* Operations on heap blocks *) + | Pmakeblock of int * mutable_flag * block_shape + | Pfield of int + | Pfield_computed + | Psetfield of int * immediate_or_pointer * initialization_or_assignment + | Psetfield_computed of immediate_or_pointer * initialization_or_assignment + | Pfloatfield of int + | Psetfloatfield of int * initialization_or_assignment + | Pduprecord of Types.record_representation * int + (* Force lazy values *) + | Plazyforce + (* External call *) + | Pccall of Primitive.description + (* Exceptions *) + | Praise of raise_kind + (* Boolean operations *) + | Psequand | Psequor | Pnot + (* Integer operations *) + | Pnegint | Paddint | Psubint | Pmulint + | Pdivint of is_safe | Pmodint of is_safe + | Pandint | Porint | Pxorint + | Plslint | Plsrint | Pasrint + | Pintcomp of comparison + | Poffsetint of int + | Poffsetref of int + (* Float operations *) + | Pintoffloat | Pfloatofint + | Pnegfloat | Pabsfloat + | Paddfloat | Psubfloat | Pmulfloat | Pdivfloat + | Pfloatcomp of comparison + (* String operations *) + | Pstringlength | Pstringrefu | Pstringrefs + | Pbyteslength | Pbytesrefu | Pbytessetu | Pbytesrefs | Pbytessets + (* Array operations *) + | Pmakearray of array_kind * mutable_flag + | Pduparray of array_kind * mutable_flag + (** For [Pduparray], the argument must be an immutable array. + The arguments of [Pduparray] give the kind and mutability of the + array being *produced* by the duplication. *) + | Parraylength of array_kind + | Parrayrefu of array_kind + | Parraysetu of array_kind + | Parrayrefs of array_kind + | Parraysets of array_kind + (* Test if the argument is a block or an immediate integer *) + | Pisint + (* Test if the (integer) argument is outside an interval *) + | Pisout + (* Bitvect operations *) + | Pbittest + (* Operations on boxed integers (Nativeint.t, Int32.t, Int64.t) *) + | Pbintofint of boxed_integer + | Pintofbint of boxed_integer + | Pcvtbint of boxed_integer (*source*) * boxed_integer (*destination*) + | Pnegbint of boxed_integer + | Paddbint of boxed_integer + | Psubbint of boxed_integer + | Pmulbint of boxed_integer + | Pdivbint of { size : boxed_integer; is_safe : is_safe } + | Pmodbint of { size : boxed_integer; is_safe : is_safe } + | Pandbint of boxed_integer + | Porbint of boxed_integer + | Pxorbint of boxed_integer + | Plslbint of boxed_integer + | Plsrbint of boxed_integer + | Pasrbint of boxed_integer + | Pbintcomp of boxed_integer * comparison + (* Operations on big arrays: (unsafe, #dimensions, kind, layout) *) + | Pbigarrayref of bool * int * bigarray_kind * bigarray_layout + | Pbigarrayset of bool * int * bigarray_kind * bigarray_layout + (* size of the nth dimension of a big array *) + | Pbigarraydim of int + (* load/set 16,32,64 bits from a string: (unsafe)*) + | Pstring_load_16 of bool + | Pstring_load_32 of bool + | Pstring_load_64 of bool + | Pstring_set_16 of bool + | Pstring_set_32 of bool + | Pstring_set_64 of bool + (* load/set 16,32,64 bits from a + (char, int8_unsigned_elt, c_layout) Bigarray.Array1.t : (unsafe) *) + | Pbigstring_load_16 of bool + | Pbigstring_load_32 of bool + | Pbigstring_load_64 of bool + | Pbigstring_set_16 of bool + | Pbigstring_set_32 of bool + | Pbigstring_set_64 of bool + (* Compile time constants *) + | Pctconst of compile_time_constant + (* byte swap *) + | Pbswap16 + | Pbbswap of boxed_integer + (* Integer to external pointer *) + | Pint_as_pointer + (* Inhibition of optimisation *) + | Popaque + +and comparison = + Ceq | Cneq | Clt | Cgt | Cle | Cge + +and array_kind = + Pgenarray | Paddrarray | Pintarray | Pfloatarray + +and value_kind = + Pgenval | Pfloatval | Pboxedintval of boxed_integer | Pintval + +and block_shape = + value_kind list option + +and boxed_integer = Primitive.boxed_integer = + Pnativeint | Pint32 | Pint64 + +and bigarray_kind = + Pbigarray_unknown + | Pbigarray_float32 | Pbigarray_float64 + | Pbigarray_sint8 | Pbigarray_uint8 + | Pbigarray_sint16 | Pbigarray_uint16 + | Pbigarray_int32 | Pbigarray_int64 + | Pbigarray_caml_int | Pbigarray_native_int + | Pbigarray_complex32 | Pbigarray_complex64 + +and bigarray_layout = + Pbigarray_unknown_layout + | Pbigarray_c_layout + | Pbigarray_fortran_layout + +and raise_kind = + | Raise_regular + | Raise_reraise + | Raise_notrace + +type structured_constant = + Const_base of constant + | Const_pointer of int + | Const_block of int * structured_constant list + | Const_float_array of string list + | Const_immstring of string + +type inline_attribute = + | Always_inline (* [@inline] or [@inline always] *) + | Never_inline (* [@inline never] *) + | Unroll of int (* [@unroll x] *) + | Default_inline (* no [@inline] attribute *) + +type specialise_attribute = + | Always_specialise (* [@specialise] or [@specialise always] *) + | Never_specialise (* [@specialise never] *) + | Default_specialise (* no [@specialise] attribute *) + +type function_kind = Curried | Tupled + +type let_kind = Strict | Alias | StrictOpt | Variable +(* Meaning of kinds for let x = e in e': + Strict: e may have side-effects; always evaluate e first + (If e is a simple expression, e.g. a variable or constant, + we may still substitute e'[x/e].) + Alias: e is pure, we can substitute e'[x/e] if x has 0 or 1 occurrences + in e' + StrictOpt: e does not have side-effects, but depend on the store; + we can discard e if x does not appear in e' + Variable: the variable x is assigned later in e' + *) + +type meth_kind = Self | Public | Cached + +type shared_code = (int * int) list (* stack size -> code label *) + +type function_attribute = { + inline : inline_attribute; + specialise : specialise_attribute; + is_a_functor: bool; + stub: bool; +} + +type lambda = + Lvar of Ident.t + | Lconst of structured_constant + | Lapply of lambda_apply + | Lfunction of lfunction + | Llet of let_kind * value_kind * Ident.t * lambda * lambda + | Lletrec of (Ident.t * lambda) list * lambda + | Lprim of primitive * lambda list * Location.t + | Lswitch of lambda * lambda_switch * Location.t +(* switch on strings, clauses are sorted by string order, + strings are pairwise distinct *) + | Lstringswitch of + lambda * (string * lambda) list * lambda option * Location.t + | Lstaticraise of int * lambda list + | Lstaticcatch of lambda * (int * Ident.t list) * lambda + | Ltrywith of lambda * Ident.t * lambda + | Lifthenelse of lambda * lambda * lambda + | Lsequence of lambda * lambda + | Lwhile of lambda * lambda + | Lfor of Ident.t * lambda * lambda * direction_flag * lambda + | Lassign of Ident.t * lambda + | Lsend of meth_kind * lambda * lambda * lambda list * Location.t + | Levent of lambda * lambda_event + | Lifused of Ident.t * lambda + +and lfunction = + { kind: function_kind; + params: Ident.t list; + body: lambda; + attr: function_attribute; (* specified with [@inline] attribute *) + loc : Location.t; } + +and lambda_apply = + { ap_func : lambda; + ap_args : lambda list; + ap_loc : Location.t; + ap_should_be_tailcall : bool; (* true if [@tailcall] was specified *) + ap_inlined : inline_attribute; (* specified with the [@inlined] attribute *) + ap_specialised : specialise_attribute; } + +and lambda_switch = + { sw_numconsts: int; (* Number of integer cases *) + sw_consts: (int * lambda) list; (* Integer cases *) + sw_numblocks: int; (* Number of tag block cases *) + sw_blocks: (int * lambda) list; (* Tag block cases *) + sw_failaction : lambda option} (* Action to take if failure *) +and lambda_event = + { lev_loc: Location.t; + lev_kind: lambda_event_kind; + lev_repr: int ref option; + lev_env: Env.summary } + +and lambda_event_kind = + Lev_before + | Lev_after of Types.type_expr + | Lev_function + | Lev_pseudo + | Lev_module_definition of Ident.t + +type program = + { module_ident : Ident.t; + main_module_block_size : int; + required_globals : Ident.Set.t; (* Modules whose initializer side effects + must occur before [code]. *) + code : lambda } +(* Lambda code for the middle-end. + * In the closure case the code is a sequence of assignments to a + preallocated block of size [main_module_block_size] using + (Setfield(Getglobal(module_ident))). The size is used to preallocate + the block. + * In the flambda case the code is an expression returning a block + value of size [main_module_block_size]. The size is used to build + the module root as an initialize_symbol + Initialize_symbol(module_name, 0, + [getfield 0; ...; getfield (main_module_block_size - 1)]) +*) + +(* Sharing key *) +val make_key: lambda -> lambda option + +val const_unit: structured_constant +val lambda_unit: lambda +val name_lambda: let_kind -> lambda -> (Ident.t -> lambda) -> lambda +val name_lambda_list: lambda list -> (lambda list -> lambda) -> lambda + +val iter: (lambda -> unit) -> lambda -> unit +module IdentSet: Set.S with type elt = Ident.t +val free_variables: lambda -> IdentSet.t +val free_methods: lambda -> IdentSet.t + +val transl_normal_path: Path.t -> lambda (* Path.t is already normal *) +val transl_path: ?loc:Location.t -> Env.t -> Path.t -> lambda +[@@ocaml.deprecated "use transl_{module,value,extension,class}_path instead"] + +val transl_module_path: ?loc:Location.t -> Env.t -> Path.t -> lambda +val transl_value_path: ?loc:Location.t -> Env.t -> Path.t -> lambda +val transl_extension_path: ?loc:Location.t -> Env.t -> Path.t -> lambda +val transl_class_path: ?loc:Location.t -> Env.t -> Path.t -> lambda + +val make_sequence: ('a -> lambda) -> 'a list -> lambda + +val subst_lambda: lambda Ident.tbl -> lambda -> lambda +val map : (lambda -> lambda) -> lambda -> lambda +val bind : let_kind -> Ident.t -> lambda -> lambda -> lambda + +val commute_comparison : comparison -> comparison +val negate_comparison : comparison -> comparison + +val default_function_attribute : function_attribute +val default_stub_attribute : function_attribute + +(***********************) +(* For static failures *) +(***********************) + +(* Get a new static failure ident *) +val next_raise_count : unit -> int +val next_negative_raise_count : unit -> int + (* Negative raise counts are used to compile 'match ... with + exception x -> ...'. This disabled some simplifications + performed by the Simplif module that assume that static raises + are in tail position in their handler. *) + +val staticfail : lambda (* Anticipated static failure *) + +(* Check anticipated failure, substitute its final value *) +val is_guarded: lambda -> bool +val patch_guarded : lambda -> lambda -> lambda + +val raise_kind: raise_kind -> string +val lam_of_loc : loc_kind -> Location.t -> lambda + +val merge_inline_attributes + : inline_attribute + -> inline_attribute + -> inline_attribute option + +val reset: unit -> unit diff --git a/res_syntax/compiler-libs-406/lazy.ml b/res_syntax/compiler-libs-406/lazy.ml new file mode 100644 index 0000000000..7dc1e9ddfe --- /dev/null +++ b/res_syntax/compiler-libs-406/lazy.ml @@ -0,0 +1,83 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Module [Lazy]: deferred computations *) + + +(* + WARNING: some purple magic is going on here. Do not take this file + as an example of how to program in OCaml. +*) + + +(* We make use of two special tags provided by the runtime: + [lazy_tag] and [forward_tag]. + + A value of type ['a Lazy.t] can be one of three things: + 1. A block of size 1 with tag [lazy_tag]. Its field is a closure of + type [unit -> 'a] that computes the value. + 2. A block of size 1 with tag [forward_tag]. Its field is the value + of type ['a] that was computed. + 3. Anything else except a float. This has type ['a] and is the value + that was computed. + Exceptions are stored in format (1). + The GC will magically change things from (2) to (3) according to its + fancy. + + If OCaml was configured with the -flat-float-array option (which is + currently the default), the following is also true: + We cannot use representation (3) for a [float Lazy.t] because + [caml_make_array] assumes that only a [float] value can have tag + [Double_tag]. + + We have to use the built-in type constructor [lazy_t] to + let the compiler implement the special typing and compilation + rules for the [lazy] keyword. +*) + +type 'a t = 'a lazy_t + +exception Undefined = CamlinternalLazy.Undefined + +external make_forward : 'a -> 'a lazy_t = "caml_lazy_make_forward" + +external force : 'a t -> 'a = "%lazy_force" + +(* let force = force *) + +let force_val = CamlinternalLazy.force_val + +let from_fun (f : unit -> 'arg) = + let x = Obj.new_block Obj.lazy_tag 1 in + Obj.set_field x 0 (Obj.repr f); + (Obj.obj x : 'arg t) + + +let from_val (v : 'arg) = + let t = Obj.tag (Obj.repr v) in + if t = Obj.forward_tag || t = Obj.lazy_tag || t = Obj.double_tag then begin + make_forward v + end else begin + (Obj.magic v : 'arg t) + end + + +let is_val (l : 'arg t) = Obj.tag (Obj.repr l) <> Obj.lazy_tag + +let lazy_from_fun = from_fun + +let lazy_from_val = from_val + +let lazy_is_val = is_val diff --git a/res_syntax/compiler-libs-406/lazy.mli b/res_syntax/compiler-libs-406/lazy.mli new file mode 100644 index 0000000000..ee10366ee8 --- /dev/null +++ b/res_syntax/compiler-libs-406/lazy.mli @@ -0,0 +1,95 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Deferred computations. *) + +type 'a t = 'a lazy_t +(** A value of type ['a Lazy.t] is a deferred computation, called + a suspension, that has a result of type ['a]. The special + expression syntax [lazy (expr)] makes a suspension of the + computation of [expr], without computing [expr] itself yet. + "Forcing" the suspension will then compute [expr] and return its + result. + + Note: [lazy_t] is the built-in type constructor used by the compiler + for the [lazy] keyword. You should not use it directly. Always use + [Lazy.t] instead. + + Note: [Lazy.force] is not thread-safe. If you use this module in + a multi-threaded program, you will need to add some locks. + + Note: if the program is compiled with the [-rectypes] option, + ill-founded recursive definitions of the form [let rec x = lazy x] + or [let rec x = lazy(lazy(...(lazy x)))] are accepted by the type-checker + and lead, when forced, to ill-formed values that trigger infinite + loops in the garbage collector and other parts of the run-time system. + Without the [-rectypes] option, such ill-founded recursive definitions + are rejected by the type-checker. +*) + + +exception Undefined + +(* val force : 'a t -> 'a *) +external force : 'a t -> 'a = "%lazy_force" +(** [force x] forces the suspension [x] and returns its result. + If [x] has already been forced, [Lazy.force x] returns the + same value again without recomputing it. If it raised an exception, + the same exception is raised again. + Raise {!Undefined} if the forcing of [x] tries to force [x] itself + recursively. +*) + +val force_val : 'a t -> 'a +(** [force_val x] forces the suspension [x] and returns its + result. If [x] has already been forced, [force_val x] + returns the same value again without recomputing it. + Raise {!Undefined} if the forcing of [x] tries to force [x] itself + recursively. + If the computation of [x] raises an exception, it is unspecified + whether [force_val x] raises the same exception or {!Undefined}. +*) + +val from_fun : (unit -> 'a) -> 'a t +(** [from_fun f] is the same as [lazy (f ())] but slightly more efficient. + + [from_fun] should only be used if the function [f] is already defined. + In particular it is always less efficient to write + [from_fun (fun () -> expr)] than [lazy expr]. + + @since 4.00.0 *) + +val from_val : 'a -> 'a t +(** [from_val v] returns an already-forced suspension of [v]. + This is for special purposes only and should not be confused with + [lazy (v)]. + @since 4.00.0 *) + +val is_val : 'a t -> bool +(** [is_val x] returns [true] if [x] has already been forced and + did not raise an exception. + @since 4.00.0 *) + +val lazy_from_fun : (unit -> 'a) -> 'a t + [@@ocaml.deprecated "Use Lazy.from_fun instead."] +(** @deprecated synonym for [from_fun]. *) + +val lazy_from_val : 'a -> 'a t + [@@ocaml.deprecated "Use Lazy.from_val instead."] +(** @deprecated synonym for [from_val]. *) + +val lazy_is_val : 'a t -> bool + [@@ocaml.deprecated "Use Lazy.is_val instead."] +(** @deprecated synonym for [is_val]. *) diff --git a/res_syntax/compiler-libs-406/lexer.ml b/res_syntax/compiler-libs-406/lexer.ml new file mode 100644 index 0000000000..299bda8484 --- /dev/null +++ b/res_syntax/compiler-libs-406/lexer.ml @@ -0,0 +1,2887 @@ +# 18 "parsing/lexer.mll" + +open Lexing +open Misc +open Parser + +type error = + | Illegal_character of char + | Illegal_escape of string + | Unterminated_comment of Location.t + | Unterminated_string + | Unterminated_string_in_comment of Location.t * Location.t + | Keyword_as_label of string + | Invalid_literal of string + | Invalid_directive of string * string option +;; + +exception Error of error * Location.t;; + +(* The table of keywords *) + +let keyword_table = + create_hashtable 149 [ + "and", AND; + "as", AS; + "assert", ASSERT; + "begin", BEGIN; + "class", CLASS; + "constraint", CONSTRAINT; + "do", DO; + "done", DONE; + "downto", DOWNTO; + "else", ELSE; + "end", END; + "exception", EXCEPTION; + "external", EXTERNAL; + "false", FALSE; + "for", FOR; + "fun", FUN; + "function", FUNCTION; + "functor", FUNCTOR; + "if", IF; + "in", IN; + "include", INCLUDE; + "inherit", INHERIT; + "initializer", INITIALIZER; + "lazy", LAZY; + "let", LET; + "match", MATCH; + "method", METHOD; + "module", MODULE; + "mutable", MUTABLE; + "new", NEW; + "nonrec", NONREC; + "object", OBJECT; + "of", OF; + "open", OPEN; + "or", OR; +(* "parser", PARSER; *) + "private", PRIVATE; + "rec", REC; + "sig", SIG; + "struct", STRUCT; + "then", THEN; + "to", TO; + "true", TRUE; + "try", TRY; + "type", TYPE; + "val", VAL; + "virtual", VIRTUAL; + "when", WHEN; + "while", WHILE; + "with", WITH; + + "lor", INFIXOP3("lor"); (* Should be INFIXOP2 *) + "lxor", INFIXOP3("lxor"); (* Should be INFIXOP2 *) + "mod", INFIXOP3("mod"); + "land", INFIXOP3("land"); + "lsl", INFIXOP4("lsl"); + "lsr", INFIXOP4("lsr"); + "asr", INFIXOP4("asr") +] + +(* To buffer string literals *) + +let string_buffer = Buffer.create 256 +let reset_string_buffer () = Buffer.reset string_buffer +let get_stored_string () = Buffer.contents string_buffer + +let store_string_char c = Buffer.add_char string_buffer c +let store_string_utf_8_uchar u = Buffer.add_utf_8_uchar string_buffer u +let store_string s = Buffer.add_string string_buffer s +let store_lexeme lexbuf = store_string (Lexing.lexeme lexbuf) + +(* To store the position of the beginning of a string and comment *) +let string_start_loc = ref Location.none;; +let comment_start_loc = ref [];; +let in_comment () = !comment_start_loc <> [];; +let is_in_string = ref false +let in_string () = !is_in_string +let print_warnings = ref true + +(* Escaped chars are interpreted in strings unless they are in comments. *) +let store_escaped_char lexbuf c = + if in_comment () then store_lexeme lexbuf else store_string_char c + +let store_escaped_uchar lexbuf u = + if in_comment () then store_lexeme lexbuf else store_string_utf_8_uchar u + +let with_comment_buffer comment lexbuf = + let start_loc = Location.curr lexbuf in + comment_start_loc := [start_loc]; + reset_string_buffer (); + let end_loc = comment lexbuf in + let s = get_stored_string () in + reset_string_buffer (); + let loc = { start_loc with Location.loc_end = end_loc.Location.loc_end } in + s, loc + +(* To translate escape sequences *) + +let hex_digit_value d = (* assert (d in '0'..'9' 'a'..'f' 'A'..'F') *) + let d = Char.code d in + if d >= 97 then d - 87 else + if d >= 65 then d - 55 else + d - 48 + +let hex_num_value lexbuf ~first ~last = + let rec loop acc i = match i > last with + | true -> acc + | false -> + let value = hex_digit_value (Lexing.lexeme_char lexbuf i) in + loop (16 * acc + value) (i + 1) + in + loop 0 first + +let char_for_backslash = function + | 'n' -> '\010' + | 'r' -> '\013' + | 'b' -> '\008' + | 't' -> '\009' + | c -> c + +let char_for_decimal_code lexbuf i = + let c = 100 * (Char.code(Lexing.lexeme_char lexbuf i) - 48) + + 10 * (Char.code(Lexing.lexeme_char lexbuf (i+1)) - 48) + + (Char.code(Lexing.lexeme_char lexbuf (i+2)) - 48) in + if (c < 0 || c > 255) then + if in_comment () + then 'x' + else raise (Error(Illegal_escape (Lexing.lexeme lexbuf), + Location.curr lexbuf)) + else Char.chr c + +let char_for_octal_code lexbuf i = + let c = 64 * (Char.code(Lexing.lexeme_char lexbuf i) - 48) + + 8 * (Char.code(Lexing.lexeme_char lexbuf (i+1)) - 48) + + (Char.code(Lexing.lexeme_char lexbuf (i+2)) - 48) in + Char.chr c + +let char_for_hexadecimal_code lexbuf i = + let byte = hex_num_value lexbuf ~first:i ~last:(i+1) in + Char.chr byte + +let uchar_for_uchar_escape lexbuf = + let err e = + raise + (Error (Illegal_escape (Lexing.lexeme lexbuf ^ e), Location.curr lexbuf)) + in + let len = Lexing.lexeme_end lexbuf - Lexing.lexeme_start lexbuf in + let first = 3 (* skip opening \u{ *) in + let last = len - 2 (* skip closing } *) in + let digit_count = last - first + 1 in + match digit_count > 6 with + | true -> err ", too many digits, expected 1 to 6 hexadecimal digits" + | false -> + let cp = hex_num_value lexbuf ~first ~last in + if Uchar.is_valid cp then Uchar.unsafe_of_int cp else + err (", " ^ Printf.sprintf "%X" cp ^ " is not a Unicode scalar value") + +(* recover the name from a LABEL or OPTLABEL token *) + +let get_label_name lexbuf = + let s = Lexing.lexeme lexbuf in + let name = String.sub s 1 (String.length s - 2) in + if Hashtbl.mem keyword_table name then + raise (Error(Keyword_as_label name, Location.curr lexbuf)); + name +;; + +(* Update the current location with file name and line number. *) + +let update_loc lexbuf file line absolute chars = + let pos = lexbuf.lex_curr_p in + let new_file = match file with + | None -> pos.pos_fname + | Some s -> s + in + lexbuf.lex_curr_p <- { pos with + pos_fname = new_file; + pos_lnum = if absolute then line else pos.pos_lnum + line; + pos_bol = pos.pos_cnum - chars; + } +;; + +let preprocessor = ref None + +let escaped_newlines = ref false + +(* Warn about Latin-1 characters used in idents *) + +let warn_latin1 lexbuf = + Location.deprecated (Location.curr lexbuf)"ISO-Latin1 characters in identifiers" + +let handle_docstrings = ref true +let comment_list = ref [] + +let add_comment com = + comment_list := com :: !comment_list + +let add_docstring_comment ds = + let com = + ("*" ^ Docstrings.docstring_body ds, Docstrings.docstring_loc ds) + in + add_comment com + +let comments () = List.rev !comment_list + +(* Error report *) + +open Format + +let report_error ppf = function + | Illegal_character c -> + fprintf ppf "Illegal character (%s)" (Char.escaped c) + | Illegal_escape s -> + fprintf ppf "Illegal backslash escape in string or character (%s)" s + | Unterminated_comment _ -> + fprintf ppf "Comment not terminated" + | Unterminated_string -> + fprintf ppf "String literal not terminated" + | Unterminated_string_in_comment (_, loc) -> + fprintf ppf "This comment contains an unterminated string literal@.\ + %aString literal begins here" + Location.print_error loc + | Keyword_as_label kwd -> + fprintf ppf "`%s' is a keyword, it cannot be used as label name" kwd + | Invalid_literal s -> + fprintf ppf "Invalid literal %s" s + | Invalid_directive (dir, explanation) -> + fprintf ppf "Invalid lexer directive %S" dir; + begin match explanation with + | None -> () + | Some expl -> fprintf ppf ": %s" expl + end + +let () = + Location.register_error_of_exn + (function + | Error (err, loc) -> + Some (Location.error_of_printer loc report_error err) + | _ -> + None + ) + + +# 268 "parsing/lexer.ml" +let __ocaml_lex_tables = { + Lexing.lex_base = + "\000\000\162\255\163\255\224\000\003\001\038\001\073\001\108\001\ + \143\001\186\255\178\001\215\001\194\255\091\000\252\001\031\002\ + \068\000\071\000\065\002\100\002\212\255\214\255\217\255\135\002\ + \230\002\009\003\088\000\255\000\039\003\236\255\123\003\207\003\ + \035\004\243\004\195\005\147\006\114\007\206\007\158\008\122\000\ + \254\255\001\000\005\000\255\255\006\000\007\000\125\009\155\009\ + \107\010\250\255\249\255\059\011\011\012\247\255\246\255\219\012\ + \047\013\131\013\215\013\043\014\127\014\211\014\039\015\123\015\ + \207\015\035\016\087\000\119\016\203\016\031\017\115\017\199\017\ + \108\000\192\255\235\255\007\003\034\018\106\000\107\000\011\000\ + \234\255\233\255\228\255\152\002\099\000\118\000\113\000\232\255\ + \128\000\147\000\231\255\224\000\003\001\148\000\230\255\110\004\ + \149\000\229\255\148\000\224\255\217\000\223\255\222\000\034\018\ + \222\255\073\018\101\005\009\003\221\255\012\000\014\001\080\001\ + \115\001\024\001\221\255\013\000\119\018\158\018\193\018\231\018\ + \010\019\209\255\204\255\205\255\206\255\202\255\045\019\154\000\ + \183\000\195\255\196\255\197\255\217\000\182\255\180\255\189\255\ + \080\019\185\255\187\255\115\019\150\019\185\019\220\019\130\005\ + \243\255\244\255\017\000\245\255\174\001\223\005\253\255\248\000\ + \249\000\255\255\254\255\252\255\005\006\238\019\003\001\004\001\ + \018\000\251\255\250\255\249\255\222\006\026\003\005\001\248\255\ + \036\003\008\001\247\255\066\008\020\001\246\255\059\001\234\001\ + \245\255\246\255\247\255\060\001\055\020\255\255\248\255\193\000\ + \233\008\038\001\133\004\253\255\073\001\094\001\113\001\143\004\ + \252\255\192\002\027\004\251\255\230\009\250\255\182\010\089\020\ + \249\255\129\001\130\001\252\255\085\007\254\255\255\255\146\001\ + \147\001\253\255\177\007\033\001\044\001\148\001\151\001\045\001\ + \153\001\044\001\019\000\255\255"; + Lexing.lex_backtrk = + "\255\255\255\255\255\255\090\000\089\000\086\000\085\000\078\000\ + \076\000\255\255\067\000\064\000\255\255\057\000\056\000\054\000\ + \052\000\048\000\045\000\081\000\255\255\255\255\255\255\036\000\ + \035\000\042\000\040\000\039\000\062\000\255\255\014\000\014\000\ + \013\000\012\000\011\000\010\000\007\000\004\000\003\000\002\000\ + \255\255\093\000\093\000\255\255\255\255\255\255\084\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\018\000\ + \018\000\016\000\015\000\018\000\015\000\015\000\014\000\016\000\ + \015\000\016\000\255\255\017\000\017\000\014\000\014\000\016\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\027\000\027\000\027\000\027\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\028\000\255\255\029\000\255\255\030\000\088\000\ + \255\255\091\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\037\000\087\000\082\000\044\000\ + \047\000\255\255\255\255\255\255\255\255\255\255\055\000\074\000\ + \071\000\255\255\255\255\255\255\072\000\255\255\255\255\255\255\ + \065\000\255\255\255\255\083\000\077\000\080\000\079\000\255\255\ + \255\255\255\255\012\000\255\255\012\000\012\000\255\255\012\000\ + \012\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\010\000\010\000\255\255\255\255\007\000\ + \007\000\007\000\007\000\255\255\001\000\007\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\003\000\255\255\255\255\003\000\ + \255\255\255\255\255\255\002\000\255\255\255\255\001\000\255\255\ + \255\255\255\255\255\255\255\255"; + Lexing.lex_default = + "\001\000\000\000\000\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\000\000\255\255\255\255\000\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\000\000\000\000\000\000\255\255\ + \255\255\255\255\255\255\077\000\255\255\000\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \000\000\255\255\255\255\000\000\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\255\255\255\255\000\000\000\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\255\255\082\000\255\255\255\255\255\255\ + \000\000\000\000\000\000\255\255\255\255\255\255\255\255\000\000\ + \255\255\255\255\000\000\255\255\255\255\255\255\000\000\255\255\ + \255\255\000\000\255\255\000\000\255\255\000\000\255\255\255\255\ + \000\000\255\255\110\000\255\255\000\000\255\255\110\000\111\000\ + \110\000\113\000\000\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\000\000\000\000\000\000\255\255\255\255\ + \255\255\000\000\000\000\000\000\255\255\000\000\000\000\000\000\ + \255\255\000\000\000\000\255\255\255\255\255\255\255\255\144\000\ + \000\000\000\000\255\255\000\000\158\000\255\255\000\000\255\255\ + \255\255\000\000\000\000\000\000\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\000\000\255\255\255\255\255\255\000\000\ + \255\255\255\255\000\000\255\255\255\255\000\000\255\255\176\000\ + \000\000\000\000\000\000\255\255\182\000\000\000\000\000\255\255\ + \255\255\255\255\255\255\000\000\255\255\255\255\255\255\255\255\ + \000\000\255\255\255\255\000\000\255\255\000\000\255\255\255\255\ + \000\000\255\255\203\000\000\000\255\255\000\000\000\000\255\255\ + \255\255\000\000\255\255\255\255\255\255\213\000\216\000\255\255\ + \216\000\255\255\255\255\000\000"; + Lexing.lex_trans = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\039\000\040\000\040\000\039\000\041\000\045\000\043\000\ + \043\000\040\000\044\000\044\000\045\000\078\000\108\000\114\000\ + \079\000\109\000\115\000\145\000\159\000\219\000\174\000\160\000\ + \039\000\008\000\029\000\024\000\006\000\004\000\023\000\027\000\ + \026\000\021\000\025\000\007\000\020\000\019\000\018\000\003\000\ + \031\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\017\000\016\000\015\000\014\000\010\000\036\000\ + \005\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\013\000\042\000\012\000\005\000\038\000\ + \022\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\028\000\011\000\009\000\037\000\125\000\ + \127\000\124\000\098\000\039\000\123\000\122\000\039\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\081\000\080\000\091\000\091\000\091\000\091\000\130\000\ + \087\000\129\000\039\000\128\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\088\000\088\000\ + \088\000\088\000\088\000\088\000\088\000\088\000\088\000\088\000\ + \089\000\089\000\089\000\089\000\089\000\089\000\089\000\089\000\ + \089\000\089\000\090\000\094\000\097\000\099\000\100\000\134\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\131\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\132\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \002\000\003\000\101\000\102\000\003\000\003\000\003\000\101\000\ + \102\000\078\000\003\000\003\000\079\000\003\000\003\000\003\000\ + \092\000\092\000\092\000\092\000\092\000\092\000\092\000\092\000\ + \108\000\133\000\003\000\109\000\003\000\003\000\003\000\003\000\ + \003\000\154\000\114\000\153\000\003\000\115\000\255\255\003\000\ + \003\000\003\000\163\000\162\000\167\000\003\000\003\000\170\000\ + \003\000\003\000\003\000\093\000\093\000\093\000\093\000\093\000\ + \093\000\093\000\093\000\173\000\198\000\003\000\003\000\003\000\ + \003\000\003\000\003\000\003\000\212\000\145\000\178\000\005\000\ + \174\000\201\000\005\000\005\000\005\000\213\000\217\000\218\000\ + \005\000\005\000\188\000\005\000\005\000\005\000\193\000\193\000\ + \193\000\193\000\108\000\076\000\003\000\109\000\003\000\000\000\ + \005\000\003\000\005\000\005\000\005\000\005\000\005\000\000\000\ + \188\000\188\000\006\000\190\000\000\000\006\000\006\000\006\000\ + \000\000\000\000\113\000\006\000\006\000\000\000\006\000\006\000\ + \006\000\000\000\000\000\188\000\112\000\108\000\190\000\003\000\ + \109\000\003\000\000\000\006\000\005\000\006\000\006\000\006\000\ + \006\000\006\000\000\000\178\000\206\000\117\000\201\000\207\000\ + \117\000\117\000\117\000\112\000\000\000\111\000\117\000\117\000\ + \000\000\117\000\142\000\117\000\206\000\206\000\214\000\208\000\ + \208\000\215\000\005\000\215\000\005\000\000\000\117\000\006\000\ + \117\000\141\000\117\000\117\000\117\000\000\000\000\000\000\000\ + \139\000\000\000\000\000\139\000\139\000\139\000\000\000\000\000\ + \159\000\139\000\139\000\160\000\139\000\139\000\139\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\006\000\000\000\006\000\ + \000\000\139\000\117\000\139\000\140\000\139\000\139\000\139\000\ + \000\000\000\000\000\000\006\000\000\000\161\000\006\000\006\000\ + \006\000\000\000\000\000\000\000\006\000\006\000\000\000\006\000\ + \006\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \117\000\000\000\117\000\000\000\006\000\139\000\006\000\006\000\ + \006\000\006\000\006\000\000\000\178\000\000\000\000\000\179\000\ + \006\000\000\000\000\000\006\000\006\000\006\000\204\000\255\255\ + \000\000\006\000\006\000\000\000\006\000\006\000\006\000\000\000\ + \000\000\000\000\157\000\139\000\181\000\139\000\255\255\138\000\ + \006\000\006\000\000\000\006\000\006\000\006\000\006\000\006\000\ + \255\255\000\000\000\000\000\000\000\000\006\000\000\000\000\000\ + \006\000\006\000\006\000\000\000\000\000\000\000\006\000\006\000\ + \000\000\006\000\006\000\006\000\000\000\000\000\006\000\137\000\ + \006\000\000\000\000\000\000\000\135\000\006\000\006\000\000\000\ + \006\000\006\000\006\000\006\000\006\000\000\000\000\000\000\000\ + \006\000\000\000\000\000\006\000\006\000\006\000\180\000\000\000\ + \000\000\006\000\006\000\000\000\126\000\006\000\006\000\000\000\ + \255\255\000\000\000\000\136\000\000\000\006\000\000\000\000\000\ + \000\000\006\000\006\000\006\000\006\000\006\000\006\000\006\000\ + \000\000\000\000\120\000\000\000\000\000\120\000\120\000\120\000\ + \000\000\000\000\000\000\120\000\120\000\000\000\120\000\121\000\ + \120\000\000\000\000\000\255\255\000\000\000\000\000\000\000\000\ + \006\000\000\000\006\000\120\000\000\000\006\000\120\000\120\000\ + \120\000\120\000\205\000\000\000\000\000\117\000\000\000\000\000\ + \117\000\117\000\117\000\000\000\000\000\000\000\117\000\117\000\ + \000\000\117\000\118\000\117\000\255\255\000\000\000\000\255\255\ + \000\000\255\255\000\000\006\000\000\000\006\000\117\000\120\000\ + \117\000\117\000\119\000\117\000\117\000\000\000\000\000\000\000\ + \006\000\000\000\000\000\006\000\006\000\116\000\255\255\000\000\ + \000\000\006\000\006\000\000\000\006\000\006\000\006\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\120\000\000\000\120\000\ + \000\000\006\000\117\000\006\000\006\000\006\000\006\000\006\000\ + \095\000\095\000\095\000\095\000\095\000\095\000\095\000\095\000\ + \095\000\095\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\095\000\095\000\095\000\095\000\095\000\095\000\000\000\ + \117\000\000\000\117\000\000\000\000\000\006\000\000\000\000\000\ + \000\000\000\000\177\000\000\000\000\000\000\000\000\000\107\000\ + \194\000\194\000\194\000\194\000\194\000\194\000\194\000\194\000\ + \000\000\095\000\095\000\095\000\095\000\095\000\095\000\000\000\ + \000\000\000\000\000\000\006\000\000\000\006\000\107\000\105\000\ + \000\000\105\000\105\000\105\000\105\000\000\000\000\000\000\000\ + \105\000\105\000\107\000\105\000\105\000\105\000\106\000\106\000\ + \106\000\106\000\106\000\106\000\106\000\106\000\106\000\106\000\ + \105\000\000\000\105\000\105\000\105\000\105\000\105\000\000\000\ + \000\000\107\000\003\000\000\000\000\000\003\000\003\000\003\000\ + \000\000\000\000\104\000\103\000\003\000\000\000\003\000\003\000\ + \003\000\106\000\106\000\106\000\106\000\106\000\106\000\106\000\ + \106\000\106\000\106\000\003\000\105\000\003\000\003\000\003\000\ + \003\000\003\000\168\000\168\000\168\000\168\000\168\000\168\000\ + \168\000\168\000\168\000\168\000\169\000\169\000\169\000\169\000\ + \169\000\169\000\169\000\169\000\169\000\169\000\000\000\000\000\ + \000\000\000\000\105\000\073\000\105\000\000\000\075\000\003\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\000\000\074\000\000\000\003\000\075\000\003\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\055\000\074\000\000\000\000\000\000\000\000\000\ + \000\000\057\000\000\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \056\000\055\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\000\000\000\000\ + \000\000\000\000\030\000\000\000\055\000\055\000\055\000\055\000\ + \056\000\055\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\057\000\000\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\059\000\055\000\055\000\056\000\055\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\060\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\061\000\ + \058\000\058\000\000\000\000\000\000\000\000\000\030\000\000\000\ + \055\000\059\000\055\000\055\000\056\000\055\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\060\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\061\000\ + \058\000\058\000\032\000\195\000\195\000\195\000\195\000\195\000\ + \195\000\195\000\195\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\000\000\000\000\ + \000\000\000\000\032\000\000\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\096\000\096\000\ + \096\000\096\000\096\000\096\000\096\000\096\000\096\000\096\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\096\000\ + \096\000\096\000\096\000\096\000\096\000\191\000\191\000\191\000\ + \191\000\191\000\191\000\191\000\191\000\191\000\191\000\192\000\ + \192\000\192\000\192\000\192\000\192\000\192\000\192\000\192\000\ + \192\000\000\000\000\000\000\000\000\000\000\000\000\000\096\000\ + \096\000\096\000\096\000\096\000\096\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\000\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\033\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\000\000\000\000\ + \000\000\000\000\033\000\000\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\112\000\108\000\ + \000\000\000\000\109\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\112\000\000\000\111\000\ + \000\000\000\000\000\000\000\000\145\000\000\000\000\000\146\000\ + \000\000\000\000\000\000\000\000\000\000\106\000\106\000\106\000\ + \106\000\106\000\106\000\106\000\106\000\106\000\106\000\000\000\ + \000\000\000\000\000\000\000\000\150\000\000\000\000\000\000\000\ + \000\000\148\000\152\000\000\000\151\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\000\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\034\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\149\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\000\000\000\000\ + \000\000\000\000\034\000\000\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\156\000\000\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\000\000\155\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\156\000\255\255\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \000\000\155\000\147\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\000\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\035\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\000\000\000\000\ + \000\000\000\000\035\000\000\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\171\000\171\000\ + \171\000\171\000\171\000\171\000\171\000\171\000\171\000\171\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\171\000\ + \171\000\171\000\171\000\171\000\171\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\171\000\ + \171\000\171\000\171\000\171\000\171\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\000\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\000\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\046\000\000\000\000\000\046\000\046\000\ + \046\000\000\000\000\000\000\000\046\000\046\000\000\000\046\000\ + \046\000\046\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\046\000\000\000\046\000\046\000\ + \046\000\046\000\046\000\000\000\210\000\000\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \046\000\052\000\209\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\000\000\046\000\046\000\ + \046\000\000\000\046\000\046\000\046\000\000\000\000\000\000\000\ + \046\000\046\000\000\000\046\000\046\000\046\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \046\000\000\000\046\000\046\000\046\000\046\000\046\000\000\000\ + \210\000\000\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\046\000\048\000\209\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\000\000\046\000\000\000\046\000\000\000\000\000\000\000\ + \000\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\000\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\172\000\172\000\172\000\172\000\172\000\172\000\ + \172\000\172\000\172\000\172\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\172\000\172\000\172\000\172\000\172\000\ + \172\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\172\000\172\000\172\000\172\000\172\000\ + \172\000\000\000\000\000\000\000\000\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\035\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\000\000\000\000\000\000\000\000\035\000\000\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\196\000\196\000\196\000\196\000\196\000\196\000\196\000\ + \196\000\196\000\196\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\196\000\196\000\196\000\196\000\196\000\196\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\196\000\196\000\196\000\196\000\196\000\196\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\000\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\000\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\046\000\000\000\ + \000\000\046\000\046\000\046\000\000\000\000\000\000\000\046\000\ + \046\000\000\000\046\000\046\000\046\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\046\000\ + \000\000\046\000\046\000\046\000\046\000\046\000\000\000\000\000\ + \000\000\000\000\047\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\050\000\000\000\000\000\ + \000\000\000\000\000\000\046\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\000\000\000\000\ + \000\000\046\000\047\000\046\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\197\000\197\000\ + \197\000\197\000\197\000\197\000\197\000\197\000\197\000\197\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\197\000\ + \197\000\197\000\197\000\197\000\197\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\197\000\ + \197\000\197\000\197\000\197\000\197\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\048\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\049\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\000\000\000\000\ + \000\000\000\000\048\000\000\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\199\000\199\000\ + \199\000\199\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\199\000\ + \199\000\199\000\199\000\199\000\199\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\199\000\ + \199\000\199\000\199\000\199\000\199\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\051\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\054\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\000\000\000\000\ + \000\000\000\000\051\000\000\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\000\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\052\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\053\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\000\000\000\000\ + \000\000\000\000\052\000\000\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\000\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\055\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\000\000\000\000\ + \000\000\000\000\055\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\000\000\ + \000\000\000\000\072\000\000\000\072\000\000\000\000\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \056\000\055\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\000\000\000\000\ + \000\000\000\000\057\000\000\000\055\000\055\000\055\000\055\000\ + \056\000\055\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\070\000\070\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\000\000\000\000\ + \000\000\000\000\055\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\055\000\ + \055\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\000\000\000\000\ + \000\000\000\000\055\000\000\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\063\000\000\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\064\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\000\000\000\000\000\000\000\000\062\000\000\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\064\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\068\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\000\000\000\000\ + \000\000\000\000\063\000\000\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\068\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\055\000\000\000\ + \000\000\000\000\066\000\000\000\066\000\000\000\000\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\000\000\000\000\ + \000\000\000\000\065\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\066\000\000\000\ + \066\000\000\000\000\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\000\000\000\000\ + \000\000\000\000\055\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\055\000\ + \055\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\000\000\000\000\000\000\000\000\069\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\070\000\070\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\000\000\000\000\ + \000\000\000\000\070\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\000\000\000\000\000\000\000\000\071\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\086\000\103\000\086\000\000\000\103\000\103\000\ + \103\000\086\000\000\000\000\000\103\000\103\000\000\000\103\000\ + \103\000\103\000\085\000\085\000\085\000\085\000\085\000\085\000\ + \085\000\085\000\085\000\085\000\103\000\000\000\103\000\103\000\ + \103\000\103\000\103\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\105\000\000\000\105\000\105\000\105\000\105\000\ + \000\000\000\000\000\000\105\000\105\000\000\000\105\000\105\000\ + \105\000\000\000\000\000\000\000\000\000\000\000\086\000\000\000\ + \103\000\000\000\000\000\105\000\086\000\105\000\105\000\105\000\ + \105\000\105\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \086\000\084\000\000\000\000\000\086\000\000\000\086\000\000\000\ + \006\000\000\000\083\000\006\000\006\000\006\000\103\000\000\000\ + \103\000\006\000\006\000\000\000\006\000\006\000\006\000\105\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\006\000\000\000\006\000\006\000\006\000\006\000\006\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\117\000\ + \000\000\000\000\117\000\117\000\117\000\105\000\000\000\105\000\ + \117\000\117\000\000\000\117\000\117\000\117\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\006\000\000\000\000\000\ + \117\000\000\000\117\000\117\000\117\000\117\000\117\000\000\000\ + \000\000\000\000\117\000\000\000\000\000\117\000\117\000\117\000\ + \000\000\000\000\000\000\117\000\117\000\000\000\117\000\117\000\ + \117\000\000\000\000\000\006\000\000\000\006\000\000\000\000\000\ + \000\000\000\000\000\000\117\000\117\000\117\000\117\000\117\000\ + \117\000\117\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \117\000\000\000\000\000\117\000\117\000\117\000\000\000\000\000\ + \000\000\117\000\117\000\000\000\117\000\117\000\117\000\000\000\ + \000\000\000\000\117\000\000\000\117\000\000\000\000\000\117\000\ + \000\000\117\000\255\255\117\000\117\000\117\000\117\000\117\000\ + \000\000\000\000\000\000\120\000\000\000\000\000\120\000\120\000\ + \120\000\000\000\000\000\000\000\120\000\120\000\000\000\120\000\ + \120\000\120\000\000\000\000\000\000\000\117\000\000\000\117\000\ + \000\000\000\000\000\000\000\000\120\000\117\000\120\000\120\000\ + \120\000\120\000\120\000\000\000\000\000\000\000\006\000\000\000\ + \000\000\006\000\006\000\006\000\000\000\000\000\000\000\006\000\ + \006\000\000\000\006\000\006\000\006\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\117\000\000\000\117\000\000\000\006\000\ + \120\000\006\000\006\000\006\000\006\000\006\000\000\000\000\000\ + \000\000\006\000\000\000\000\000\006\000\006\000\006\000\000\000\ + \000\000\000\000\006\000\006\000\000\000\006\000\006\000\006\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\120\000\000\000\ + \120\000\000\000\006\000\006\000\006\000\006\000\006\000\006\000\ + \006\000\000\000\000\000\000\000\139\000\000\000\000\000\139\000\ + \139\000\139\000\000\000\000\000\000\000\139\000\139\000\000\000\ + \139\000\139\000\139\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\006\000\000\000\006\000\000\000\139\000\006\000\139\000\ + \139\000\139\000\139\000\139\000\000\000\000\000\000\000\139\000\ + \000\000\000\000\139\000\139\000\139\000\000\000\000\000\000\000\ + \139\000\139\000\000\000\139\000\139\000\139\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\006\000\000\000\006\000\000\000\ + \139\000\139\000\139\000\139\000\139\000\139\000\139\000\000\000\ + \000\000\000\000\117\000\000\000\000\000\117\000\117\000\117\000\ + \000\000\000\000\000\000\117\000\117\000\000\000\117\000\117\000\ + \117\000\000\000\000\000\000\000\000\000\000\000\000\000\139\000\ + \000\000\139\000\000\000\117\000\139\000\117\000\117\000\117\000\ + \117\000\117\000\000\000\000\000\000\000\117\000\000\000\000\000\ + \117\000\117\000\117\000\000\000\000\000\000\000\117\000\117\000\ + \000\000\117\000\117\000\117\000\000\000\000\000\166\000\000\000\ + \166\000\000\000\139\000\000\000\139\000\166\000\117\000\117\000\ + \117\000\117\000\117\000\117\000\117\000\000\000\165\000\165\000\ + \165\000\165\000\165\000\165\000\165\000\165\000\165\000\165\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\117\000\000\000\117\000\ + \000\000\000\000\117\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\188\000\000\000\000\000\189\000\000\000\000\000\000\000\ + \000\000\000\000\166\000\000\000\000\000\000\000\000\000\000\000\ + \166\000\000\000\000\000\000\000\000\000\000\000\000\000\187\000\ + \117\000\187\000\117\000\000\000\166\000\000\000\187\000\000\000\ + \166\000\000\000\166\000\000\000\000\000\000\000\164\000\186\000\ + \186\000\186\000\186\000\186\000\186\000\186\000\186\000\186\000\ + \186\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\199\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \199\000\199\000\199\000\187\000\000\000\000\000\000\000\000\000\ + \000\000\187\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \000\000\000\000\000\000\000\000\000\000\187\000\185\000\000\000\ + \000\000\187\000\000\000\187\000\183\000\000\000\000\000\184\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\255\255\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000"; + Lexing.lex_check = + "\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\041\000\000\000\000\000\041\000\042\000\ + \044\000\045\000\042\000\044\000\045\000\079\000\109\000\115\000\ + \079\000\109\000\115\000\146\000\160\000\218\000\146\000\160\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\016\000\ + \013\000\017\000\026\000\039\000\017\000\017\000\039\000\066\000\ + \066\000\066\000\066\000\066\000\066\000\066\000\066\000\066\000\ + \066\000\077\000\078\000\084\000\084\000\084\000\084\000\013\000\ + \086\000\013\000\039\000\013\000\072\000\072\000\072\000\072\000\ + \072\000\072\000\072\000\072\000\072\000\072\000\085\000\085\000\ + \085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\ + \088\000\088\000\088\000\088\000\088\000\088\000\088\000\088\000\ + \088\000\088\000\089\000\093\000\096\000\098\000\098\000\127\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\013\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\128\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\003\000\100\000\100\000\003\000\003\000\003\000\102\000\ + \102\000\027\000\003\000\003\000\027\000\003\000\003\000\003\000\ + \091\000\091\000\091\000\091\000\091\000\091\000\091\000\091\000\ + \110\000\132\000\003\000\110\000\003\000\003\000\003\000\003\000\ + \003\000\151\000\113\000\152\000\004\000\113\000\027\000\004\000\ + \004\000\004\000\158\000\159\000\166\000\004\000\004\000\169\000\ + \004\000\004\000\004\000\092\000\092\000\092\000\092\000\092\000\ + \092\000\092\000\092\000\172\000\183\000\004\000\003\000\004\000\ + \004\000\004\000\004\000\004\000\211\000\174\000\179\000\005\000\ + \174\000\179\000\005\000\005\000\005\000\212\000\215\000\217\000\ + \005\000\005\000\188\000\005\000\005\000\005\000\185\000\185\000\ + \185\000\185\000\111\000\027\000\003\000\111\000\003\000\255\255\ + \005\000\004\000\005\000\005\000\005\000\005\000\005\000\255\255\ + \189\000\188\000\006\000\189\000\255\255\006\000\006\000\006\000\ + \255\255\255\255\111\000\006\000\006\000\255\255\006\000\006\000\ + \006\000\255\255\255\255\190\000\112\000\112\000\190\000\004\000\ + \112\000\004\000\255\255\006\000\005\000\006\000\006\000\006\000\ + \006\000\006\000\255\255\201\000\202\000\007\000\201\000\202\000\ + \007\000\007\000\007\000\112\000\255\255\112\000\007\000\007\000\ + \255\255\007\000\007\000\007\000\207\000\208\000\213\000\207\000\ + \208\000\214\000\005\000\216\000\005\000\255\255\007\000\006\000\ + \007\000\007\000\007\000\007\000\007\000\255\255\255\255\255\255\ + \008\000\255\255\255\255\008\000\008\000\008\000\255\255\255\255\ + \148\000\008\000\008\000\148\000\008\000\008\000\008\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\006\000\255\255\006\000\ + \255\255\008\000\007\000\008\000\008\000\008\000\008\000\008\000\ + \255\255\255\255\255\255\010\000\255\255\148\000\010\000\010\000\ + \010\000\255\255\255\255\255\255\010\000\010\000\255\255\010\000\ + \010\000\010\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \007\000\255\255\007\000\255\255\010\000\008\000\010\000\010\000\ + \010\000\010\000\010\000\255\255\175\000\255\255\255\255\175\000\ + \011\000\255\255\255\255\011\000\011\000\011\000\202\000\027\000\ + \255\255\011\000\011\000\255\255\011\000\011\000\011\000\255\255\ + \255\255\255\255\148\000\008\000\175\000\008\000\110\000\010\000\ + \010\000\011\000\255\255\011\000\011\000\011\000\011\000\011\000\ + \113\000\255\255\255\255\255\255\255\255\014\000\255\255\255\255\ + \014\000\014\000\014\000\255\255\255\255\255\255\014\000\014\000\ + \255\255\014\000\014\000\014\000\255\255\255\255\010\000\010\000\ + \010\000\255\255\255\255\255\255\011\000\011\000\014\000\255\255\ + \014\000\014\000\014\000\014\000\014\000\255\255\255\255\255\255\ + \015\000\255\255\255\255\015\000\015\000\015\000\175\000\255\255\ + \255\255\015\000\015\000\255\255\015\000\015\000\015\000\255\255\ + \111\000\255\255\255\255\011\000\255\255\011\000\255\255\255\255\ + \255\255\015\000\014\000\015\000\015\000\015\000\015\000\015\000\ + \255\255\255\255\018\000\255\255\255\255\018\000\018\000\018\000\ + \255\255\255\255\255\255\018\000\018\000\255\255\018\000\018\000\ + \018\000\255\255\255\255\112\000\255\255\255\255\255\255\255\255\ + \014\000\255\255\014\000\018\000\255\255\015\000\018\000\018\000\ + \018\000\018\000\202\000\255\255\255\255\019\000\255\255\255\255\ + \019\000\019\000\019\000\255\255\255\255\255\255\019\000\019\000\ + \255\255\019\000\019\000\019\000\213\000\255\255\255\255\214\000\ + \255\255\216\000\255\255\015\000\255\255\015\000\019\000\018\000\ + \019\000\019\000\019\000\019\000\019\000\255\255\255\255\255\255\ + \023\000\255\255\255\255\023\000\023\000\023\000\148\000\255\255\ + \255\255\023\000\023\000\255\255\023\000\023\000\023\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\018\000\255\255\018\000\ + \255\255\023\000\019\000\023\000\023\000\023\000\023\000\023\000\ + \083\000\083\000\083\000\083\000\083\000\083\000\083\000\083\000\ + \083\000\083\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\083\000\083\000\083\000\083\000\083\000\083\000\255\255\ + \019\000\255\255\019\000\255\255\255\255\023\000\255\255\255\255\ + \255\255\255\255\175\000\255\255\255\255\255\255\255\255\024\000\ + \193\000\193\000\193\000\193\000\193\000\193\000\193\000\193\000\ + \255\255\083\000\083\000\083\000\083\000\083\000\083\000\255\255\ + \255\255\255\255\255\255\023\000\255\255\023\000\024\000\024\000\ + \255\255\024\000\024\000\024\000\024\000\255\255\255\255\255\255\ + \024\000\024\000\107\000\024\000\024\000\024\000\024\000\024\000\ + \024\000\024\000\024\000\024\000\024\000\024\000\024\000\024\000\ + \024\000\255\255\024\000\024\000\024\000\024\000\024\000\255\255\ + \255\255\107\000\025\000\255\255\255\255\025\000\025\000\025\000\ + \255\255\255\255\025\000\025\000\025\000\255\255\025\000\025\000\ + \025\000\107\000\107\000\107\000\107\000\107\000\107\000\107\000\ + \107\000\107\000\107\000\025\000\024\000\025\000\025\000\025\000\ + \025\000\025\000\165\000\165\000\165\000\165\000\165\000\165\000\ + \165\000\165\000\165\000\165\000\168\000\168\000\168\000\168\000\ + \168\000\168\000\168\000\168\000\168\000\168\000\255\255\255\255\ + \255\255\255\255\024\000\028\000\024\000\255\255\075\000\025\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\255\255\075\000\255\255\025\000\028\000\025\000\ + \028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\ + \028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\ + \028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\ + \028\000\028\000\030\000\028\000\255\255\255\255\255\255\255\255\ + \255\255\030\000\255\255\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\255\255\255\255\ + \255\255\255\255\030\000\255\255\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\031\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\031\000\255\255\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\255\255\255\255\255\255\255\255\031\000\255\255\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\032\000\194\000\194\000\194\000\194\000\194\000\ + \194\000\194\000\194\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\255\255\255\255\ + \255\255\255\255\032\000\255\255\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\095\000\095\000\ + \095\000\095\000\095\000\095\000\095\000\095\000\095\000\095\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\095\000\ + \095\000\095\000\095\000\095\000\095\000\186\000\186\000\186\000\ + \186\000\186\000\186\000\186\000\186\000\186\000\186\000\191\000\ + \191\000\191\000\191\000\191\000\191\000\191\000\191\000\191\000\ + \191\000\255\255\255\255\255\255\255\255\255\255\255\255\095\000\ + \095\000\095\000\095\000\095\000\095\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\255\255\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\033\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\255\255\255\255\ + \255\255\255\255\033\000\255\255\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\106\000\106\000\ + \255\255\255\255\106\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\106\000\255\255\106\000\ + \255\255\255\255\255\255\255\255\143\000\255\255\255\255\143\000\ + \255\255\255\255\255\255\255\255\255\255\106\000\106\000\106\000\ + \106\000\106\000\106\000\106\000\106\000\106\000\106\000\255\255\ + \255\255\255\255\255\255\255\255\143\000\255\255\255\255\255\255\ + \255\255\143\000\143\000\255\255\143\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\255\255\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\034\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\143\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\255\255\255\255\ + \255\255\255\255\034\000\255\255\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\149\000\255\255\ + \149\000\149\000\149\000\149\000\149\000\149\000\149\000\149\000\ + \149\000\149\000\149\000\149\000\149\000\149\000\149\000\149\000\ + \149\000\149\000\149\000\149\000\149\000\149\000\149\000\149\000\ + \149\000\149\000\255\255\149\000\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\156\000\106\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \255\255\156\000\143\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\255\255\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\035\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\255\255\255\255\ + \255\255\255\255\035\000\255\255\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\164\000\164\000\ + \164\000\164\000\164\000\164\000\164\000\164\000\164\000\164\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\164\000\ + \164\000\164\000\164\000\164\000\164\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\164\000\ + \164\000\164\000\164\000\164\000\164\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\255\255\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\255\255\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\036\000\255\255\255\255\036\000\036\000\ + \036\000\255\255\255\255\255\255\036\000\036\000\255\255\036\000\ + \036\000\036\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\036\000\255\255\036\000\036\000\ + \036\000\036\000\036\000\255\255\204\000\255\255\204\000\204\000\ + \204\000\204\000\204\000\204\000\204\000\204\000\204\000\204\000\ + \204\000\204\000\204\000\204\000\204\000\204\000\204\000\204\000\ + \204\000\204\000\204\000\204\000\204\000\204\000\204\000\204\000\ + \036\000\036\000\204\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\255\255\036\000\037\000\ + \036\000\255\255\037\000\037\000\037\000\255\255\255\255\255\255\ + \037\000\037\000\255\255\037\000\037\000\037\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \037\000\255\255\037\000\037\000\037\000\037\000\037\000\255\255\ + \210\000\255\255\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\037\000\037\000\210\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\255\255\037\000\255\255\037\000\255\255\255\255\255\255\ + \255\255\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\255\255\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\171\000\171\000\171\000\171\000\171\000\171\000\ + \171\000\171\000\171\000\171\000\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\171\000\171\000\171\000\171\000\171\000\ + \171\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\171\000\171\000\171\000\171\000\171\000\ + \171\000\255\255\255\255\255\255\255\255\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\038\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\255\255\255\255\255\255\255\255\038\000\255\255\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\184\000\184\000\184\000\184\000\184\000\184\000\184\000\ + \184\000\184\000\184\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\184\000\184\000\184\000\184\000\184\000\184\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\184\000\184\000\184\000\184\000\184\000\184\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\255\255\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\255\255\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\046\000\255\255\ + \255\255\046\000\046\000\046\000\255\255\255\255\255\255\046\000\ + \046\000\255\255\046\000\046\000\046\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\046\000\ + \255\255\046\000\046\000\046\000\046\000\046\000\255\255\255\255\ + \255\255\255\255\047\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\255\255\255\255\ + \255\255\255\255\255\255\046\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\255\255\255\255\ + \255\255\046\000\047\000\046\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\196\000\196\000\ + \196\000\196\000\196\000\196\000\196\000\196\000\196\000\196\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\196\000\ + \196\000\196\000\196\000\196\000\196\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\196\000\ + \196\000\196\000\196\000\196\000\196\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\255\255\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\048\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\255\255\255\255\ + \255\255\255\255\048\000\255\255\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\198\000\198\000\ + \198\000\198\000\198\000\198\000\198\000\198\000\198\000\198\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\198\000\ + \198\000\198\000\198\000\198\000\198\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\198\000\ + \198\000\198\000\198\000\198\000\198\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\255\255\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\051\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\255\255\255\255\ + \255\255\255\255\051\000\255\255\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\255\255\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\052\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\255\255\255\255\ + \255\255\255\255\052\000\255\255\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\255\255\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\055\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\255\255\255\255\ + \255\255\255\255\055\000\255\255\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\056\000\255\255\ + \255\255\255\255\056\000\255\255\056\000\255\255\255\255\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\255\255\255\255\255\255\255\255\056\000\255\255\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\057\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\255\255\255\255\ + \255\255\255\255\057\000\255\255\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\058\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\255\255\255\255\255\255\255\255\058\000\255\255\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\059\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\255\255\255\255\ + \255\255\255\255\059\000\255\255\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\060\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\255\255\255\255\255\255\255\255\060\000\255\255\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\061\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\255\255\255\255\ + \255\255\255\255\061\000\255\255\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\062\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\062\000\255\255\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\255\255\255\255\255\255\255\255\062\000\255\255\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\063\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\255\255\255\255\ + \255\255\255\255\063\000\255\255\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\064\000\255\255\ + \255\255\255\255\064\000\255\255\064\000\255\255\255\255\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\255\255\255\255\255\255\255\255\064\000\255\255\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\065\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\255\255\255\255\ + \255\255\255\255\065\000\255\255\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\067\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\255\255\255\255\255\255\255\255\067\000\255\255\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\068\000\255\255\255\255\255\255\068\000\255\255\ + \068\000\255\255\255\255\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\255\255\255\255\ + \255\255\255\255\068\000\255\255\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\069\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\255\255\255\255\255\255\255\255\069\000\255\255\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\070\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\255\255\255\255\ + \255\255\255\255\070\000\255\255\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\071\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\255\255\255\255\255\255\255\255\071\000\255\255\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\076\000\103\000\076\000\255\255\103\000\103\000\ + \103\000\076\000\255\255\255\255\103\000\103\000\255\255\103\000\ + \103\000\103\000\076\000\076\000\076\000\076\000\076\000\076\000\ + \076\000\076\000\076\000\076\000\103\000\255\255\103\000\103\000\ + \103\000\103\000\103\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\105\000\255\255\105\000\105\000\105\000\105\000\ + \255\255\255\255\255\255\105\000\105\000\255\255\105\000\105\000\ + \105\000\255\255\255\255\255\255\255\255\255\255\076\000\255\255\ + \103\000\255\255\255\255\105\000\076\000\105\000\105\000\105\000\ + \105\000\105\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \076\000\076\000\255\255\255\255\076\000\255\255\076\000\255\255\ + \116\000\255\255\076\000\116\000\116\000\116\000\103\000\255\255\ + \103\000\116\000\116\000\255\255\116\000\116\000\116\000\105\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\116\000\255\255\116\000\116\000\116\000\116\000\116\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\117\000\ + \255\255\255\255\117\000\117\000\117\000\105\000\255\255\105\000\ + \117\000\117\000\255\255\117\000\117\000\117\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\116\000\255\255\255\255\ + \117\000\255\255\117\000\117\000\117\000\117\000\117\000\255\255\ + \255\255\255\255\118\000\255\255\255\255\118\000\118\000\118\000\ + \255\255\255\255\255\255\118\000\118\000\255\255\118\000\118\000\ + \118\000\255\255\255\255\116\000\255\255\116\000\255\255\255\255\ + \255\255\255\255\255\255\118\000\117\000\118\000\118\000\118\000\ + \118\000\118\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \119\000\255\255\255\255\119\000\119\000\119\000\255\255\255\255\ + \255\255\119\000\119\000\255\255\119\000\119\000\119\000\255\255\ + \255\255\255\255\117\000\255\255\117\000\255\255\255\255\118\000\ + \255\255\119\000\076\000\119\000\119\000\119\000\119\000\119\000\ + \255\255\255\255\255\255\120\000\255\255\255\255\120\000\120\000\ + \120\000\255\255\255\255\255\255\120\000\120\000\255\255\120\000\ + \120\000\120\000\255\255\255\255\255\255\118\000\255\255\118\000\ + \255\255\255\255\255\255\255\255\120\000\119\000\120\000\120\000\ + \120\000\120\000\120\000\255\255\255\255\255\255\126\000\255\255\ + \255\255\126\000\126\000\126\000\255\255\255\255\255\255\126\000\ + \126\000\255\255\126\000\126\000\126\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\119\000\255\255\119\000\255\255\126\000\ + \120\000\126\000\126\000\126\000\126\000\126\000\255\255\255\255\ + \255\255\136\000\255\255\255\255\136\000\136\000\136\000\255\255\ + \255\255\255\255\136\000\136\000\255\255\136\000\136\000\136\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\120\000\255\255\ + \120\000\255\255\136\000\126\000\136\000\136\000\136\000\136\000\ + \136\000\255\255\255\255\255\255\139\000\255\255\255\255\139\000\ + \139\000\139\000\255\255\255\255\255\255\139\000\139\000\255\255\ + \139\000\139\000\139\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\126\000\255\255\126\000\255\255\139\000\136\000\139\000\ + \139\000\139\000\139\000\139\000\255\255\255\255\255\255\140\000\ + \255\255\255\255\140\000\140\000\140\000\255\255\255\255\255\255\ + \140\000\140\000\255\255\140\000\140\000\140\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\136\000\255\255\136\000\255\255\ + \140\000\139\000\140\000\140\000\140\000\140\000\140\000\255\255\ + \255\255\255\255\141\000\255\255\255\255\141\000\141\000\141\000\ + \255\255\255\255\255\255\141\000\141\000\255\255\141\000\141\000\ + \141\000\255\255\255\255\255\255\255\255\255\255\255\255\139\000\ + \255\255\139\000\255\255\141\000\140\000\141\000\141\000\141\000\ + \141\000\141\000\255\255\255\255\255\255\142\000\255\255\255\255\ + \142\000\142\000\142\000\255\255\255\255\255\255\142\000\142\000\ + \255\255\142\000\142\000\142\000\255\255\255\255\157\000\255\255\ + \157\000\255\255\140\000\255\255\140\000\157\000\142\000\141\000\ + \142\000\142\000\142\000\142\000\142\000\255\255\157\000\157\000\ + \157\000\157\000\157\000\157\000\157\000\157\000\157\000\157\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\141\000\255\255\141\000\ + \255\255\255\255\142\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\180\000\255\255\255\255\180\000\255\255\255\255\255\255\ + \255\255\255\255\157\000\255\255\255\255\255\255\255\255\255\255\ + \157\000\255\255\255\255\255\255\255\255\255\255\255\255\180\000\ + \142\000\180\000\142\000\255\255\157\000\255\255\180\000\255\255\ + \157\000\255\255\157\000\255\255\255\255\255\255\157\000\180\000\ + \180\000\180\000\180\000\180\000\180\000\180\000\180\000\180\000\ + \180\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\199\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \199\000\199\000\199\000\180\000\255\255\255\255\255\255\255\255\ + \255\255\180\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \255\255\255\255\255\255\255\255\255\255\180\000\180\000\255\255\ + \255\255\180\000\255\255\180\000\180\000\255\255\255\255\180\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\199\000\199\000\199\000\199\000\199\000\199\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\199\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\180\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255"; + Lexing.lex_base_code = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\010\000\036\000\022\000\000\000\000\000\000\000\ + \005\000\000\000\039\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\002\000\005\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000"; + Lexing.lex_backtrk_code = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\053\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000"; + Lexing.lex_default_code = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\031\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000"; + Lexing.lex_trans_code = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\001\000\000\000\050\000\050\000\000\000\009\000\050\000\ + \000\000\000\000\000\000\009\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \001\000\000\000\009\000\001\000\000\000\009\000\000\000\034\000\ + \000\000\000\000\009\000\000\000\012\000\001\000\000\000\000\000\ + \004\000\004\000\004\000\004\000\004\000\004\000\004\000\004\000\ + \004\000\004\000\017\000\017\000\017\000\017\000\017\000\017\000\ + \017\000\017\000\017\000\017\000\001\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\017\000\017\000\017\000\017\000\ + \017\000\017\000\017\000\017\000\017\000\017\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000"; + Lexing.lex_check_code = + "\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\024\000\111\000\180\000\189\000\111\000\112\000\190\000\ + \255\255\255\255\255\255\106\000\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \024\000\255\255\111\000\000\000\255\255\112\000\255\255\112\000\ + \255\255\255\255\106\000\255\255\106\000\107\000\255\255\255\255\ + \024\000\024\000\024\000\024\000\024\000\024\000\024\000\024\000\ + \024\000\024\000\106\000\106\000\106\000\106\000\106\000\106\000\ + \106\000\106\000\106\000\106\000\107\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\107\000\107\000\107\000\107\000\ + \107\000\107\000\107\000\107\000\107\000\107\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \111\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255"; + Lexing.lex_code = + "\255\005\255\255\007\255\006\255\255\007\255\255\009\255\008\255\ + \255\006\255\007\255\255\004\255\000\005\001\006\002\007\255\009\ + \255\255\008\255\009\255\255\000\005\001\006\004\008\003\009\002\ + \007\255\001\255\255\000\001\255"; +} + +let rec token lexbuf = + lexbuf.Lexing.lex_mem <- Array.make 10 (-1); __ocaml_lex_token_rec lexbuf 0 +and __ocaml_lex_token_rec lexbuf __ocaml_lex_state = + match Lexing.new_engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 321 "parsing/lexer.mll" + ( + if not !escaped_newlines then + raise (Error(Illegal_character (Lexing.lexeme_char lexbuf 0), + Location.curr lexbuf)); + update_loc lexbuf None 1 false 0; + token lexbuf ) +# 1909 "parsing/lexer.ml" + + | 1 -> +# 328 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 0; + EOL ) +# 1915 "parsing/lexer.ml" + + | 2 -> +# 331 "parsing/lexer.mll" + ( token lexbuf ) +# 1920 "parsing/lexer.ml" + + | 3 -> +# 333 "parsing/lexer.mll" + ( UNDERSCORE ) +# 1925 "parsing/lexer.ml" + + | 4 -> +# 335 "parsing/lexer.mll" + ( TILDE ) +# 1930 "parsing/lexer.ml" + + | 5 -> +# 337 "parsing/lexer.mll" + ( LABEL (get_label_name lexbuf) ) +# 1935 "parsing/lexer.ml" + + | 6 -> +# 339 "parsing/lexer.mll" + ( warn_latin1 lexbuf; LABEL (get_label_name lexbuf) ) +# 1940 "parsing/lexer.ml" + + | 7 -> +# 341 "parsing/lexer.mll" + ( QUESTION ) +# 1945 "parsing/lexer.ml" + + | 8 -> +# 343 "parsing/lexer.mll" + ( OPTLABEL (get_label_name lexbuf) ) +# 1950 "parsing/lexer.ml" + + | 9 -> +# 345 "parsing/lexer.mll" + ( warn_latin1 lexbuf; OPTLABEL (get_label_name lexbuf) ) +# 1955 "parsing/lexer.ml" + + | 10 -> +# 347 "parsing/lexer.mll" + ( let s = Lexing.lexeme lexbuf in + try Hashtbl.find keyword_table s + with Not_found -> LIDENT s ) +# 1962 "parsing/lexer.ml" + + | 11 -> +# 351 "parsing/lexer.mll" + ( warn_latin1 lexbuf; LIDENT (Lexing.lexeme lexbuf) ) +# 1967 "parsing/lexer.ml" + + | 12 -> +# 353 "parsing/lexer.mll" + ( UIDENT(Lexing.lexeme lexbuf) ) +# 1972 "parsing/lexer.ml" + + | 13 -> +# 355 "parsing/lexer.mll" + ( warn_latin1 lexbuf; UIDENT(Lexing.lexeme lexbuf) ) +# 1977 "parsing/lexer.ml" + + | 14 -> +# 356 "parsing/lexer.mll" + ( INT (Lexing.lexeme lexbuf, None) ) +# 1982 "parsing/lexer.ml" + + | 15 -> +let +# 357 "parsing/lexer.mll" + lit +# 1988 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos (lexbuf.Lexing.lex_curr_pos + -1) +and +# 357 "parsing/lexer.mll" + modif +# 1993 "parsing/lexer.ml" += Lexing.sub_lexeme_char lexbuf (lexbuf.Lexing.lex_curr_pos + -1) in +# 358 "parsing/lexer.mll" + ( INT (lit, Some modif) ) +# 1997 "parsing/lexer.ml" + + | 16 -> +# 360 "parsing/lexer.mll" + ( FLOAT (Lexing.lexeme lexbuf, None) ) +# 2002 "parsing/lexer.ml" + + | 17 -> +let +# 361 "parsing/lexer.mll" + lit +# 2008 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos (lexbuf.Lexing.lex_curr_pos + -1) +and +# 361 "parsing/lexer.mll" + modif +# 2013 "parsing/lexer.ml" += Lexing.sub_lexeme_char lexbuf (lexbuf.Lexing.lex_curr_pos + -1) in +# 362 "parsing/lexer.mll" + ( FLOAT (lit, Some modif) ) +# 2017 "parsing/lexer.ml" + + | 18 -> +# 364 "parsing/lexer.mll" + ( raise (Error(Invalid_literal (Lexing.lexeme lexbuf), + Location.curr lexbuf)) ) +# 2023 "parsing/lexer.ml" + + | 19 -> +# 367 "parsing/lexer.mll" + ( reset_string_buffer(); + is_in_string := true; + let string_start = lexbuf.lex_start_p in + string_start_loc := Location.curr lexbuf; + string lexbuf; + is_in_string := false; + lexbuf.lex_start_p <- string_start; + STRING (get_stored_string(), None) ) +# 2035 "parsing/lexer.ml" + + | 20 -> +# 376 "parsing/lexer.mll" + ( reset_string_buffer(); + let delim = Lexing.lexeme lexbuf in + let delim = String.sub delim 1 (String.length delim - 2) in + is_in_string := true; + let string_start = lexbuf.lex_start_p in + string_start_loc := Location.curr lexbuf; + quoted_string delim lexbuf; + is_in_string := false; + lexbuf.lex_start_p <- string_start; + STRING (get_stored_string(), Some delim) ) +# 2049 "parsing/lexer.ml" + + | 21 -> +# 387 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 1; + CHAR (Lexing.lexeme_char lexbuf 1) ) +# 2055 "parsing/lexer.ml" + + | 22 -> +# 390 "parsing/lexer.mll" + ( CHAR(Lexing.lexeme_char lexbuf 1) ) +# 2060 "parsing/lexer.ml" + + | 23 -> +# 392 "parsing/lexer.mll" + ( CHAR(char_for_backslash (Lexing.lexeme_char lexbuf 2)) ) +# 2065 "parsing/lexer.ml" + + | 24 -> +# 394 "parsing/lexer.mll" + ( CHAR(char_for_decimal_code lexbuf 2) ) +# 2070 "parsing/lexer.ml" + + | 25 -> +# 396 "parsing/lexer.mll" + ( CHAR(char_for_octal_code lexbuf 3) ) +# 2075 "parsing/lexer.ml" + + | 26 -> +# 398 "parsing/lexer.mll" + ( CHAR(char_for_hexadecimal_code lexbuf 3) ) +# 2080 "parsing/lexer.ml" + + | 27 -> +# 400 "parsing/lexer.mll" + ( let l = Lexing.lexeme lexbuf in + let esc = String.sub l 1 (String.length l - 1) in + raise (Error(Illegal_escape esc, Location.curr lexbuf)) + ) +# 2088 "parsing/lexer.ml" + + | 28 -> +# 405 "parsing/lexer.mll" + ( let s, loc = with_comment_buffer comment lexbuf in + COMMENT (s, loc) ) +# 2094 "parsing/lexer.ml" + + | 29 -> +# 408 "parsing/lexer.mll" + ( let s, loc = with_comment_buffer comment lexbuf in + if !handle_docstrings then + DOCSTRING (Docstrings.docstring s loc) + else + COMMENT ("*" ^ s, loc) + ) +# 2104 "parsing/lexer.ml" + + | 30 -> +let +# 414 "parsing/lexer.mll" + stars +# 2110 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf (lexbuf.Lexing.lex_start_pos + 3) lexbuf.Lexing.lex_curr_pos in +# 415 "parsing/lexer.mll" + ( let s, loc = + with_comment_buffer + (fun lexbuf -> + store_string ("*" ^ stars); + comment lexbuf) + lexbuf + in + COMMENT (s, loc) ) +# 2121 "parsing/lexer.ml" + + | 31 -> +# 424 "parsing/lexer.mll" + ( if !print_warnings then + Location.prerr_warning (Location.curr lexbuf) Warnings.Comment_start; + let s, loc = with_comment_buffer comment lexbuf in + COMMENT (s, loc) ) +# 2129 "parsing/lexer.ml" + + | 32 -> +let +# 428 "parsing/lexer.mll" + stars +# 2135 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf (lexbuf.Lexing.lex_start_pos + 2) (lexbuf.Lexing.lex_curr_pos + -2) in +# 429 "parsing/lexer.mll" + ( if !handle_docstrings && stars="" then + (* (**) is an empty docstring *) + DOCSTRING(Docstrings.docstring "" (Location.curr lexbuf)) + else + COMMENT (stars, Location.curr lexbuf) ) +# 2143 "parsing/lexer.ml" + + | 33 -> +# 435 "parsing/lexer.mll" + ( let loc = Location.curr lexbuf in + Location.prerr_warning loc Warnings.Comment_not_end; + lexbuf.Lexing.lex_curr_pos <- lexbuf.Lexing.lex_curr_pos - 1; + let curpos = lexbuf.lex_curr_p in + lexbuf.lex_curr_p <- { curpos with pos_cnum = curpos.pos_cnum - 1 }; + STAR + ) +# 2154 "parsing/lexer.ml" + + | 34 -> +let +# 442 "parsing/lexer.mll" + num +# 2160 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_mem.(0) lexbuf.Lexing.lex_mem.(1) +and +# 443 "parsing/lexer.mll" + name +# 2165 "parsing/lexer.ml" += Lexing.sub_lexeme_opt lexbuf lexbuf.Lexing.lex_mem.(4) lexbuf.Lexing.lex_mem.(3) +and +# 443 "parsing/lexer.mll" + directive +# 2170 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos lexbuf.Lexing.lex_mem.(2) in +# 445 "parsing/lexer.mll" + ( + match int_of_string num with + | exception _ -> + (* PR#7165 *) + let loc = Location.curr lexbuf in + let explanation = "line number out of range" in + let error = Invalid_directive (directive, Some explanation) in + raise (Error (error, loc)) + | line_num -> + (* Documentation says that the line number should be + positive, but we have never guarded against this and it + might have useful hackish uses. *) + update_loc lexbuf name line_num true 0; + token lexbuf + ) +# 2188 "parsing/lexer.ml" + + | 35 -> +# 460 "parsing/lexer.mll" + ( HASH ) +# 2193 "parsing/lexer.ml" + + | 36 -> +# 461 "parsing/lexer.mll" + ( AMPERSAND ) +# 2198 "parsing/lexer.ml" + + | 37 -> +# 462 "parsing/lexer.mll" + ( AMPERAMPER ) +# 2203 "parsing/lexer.ml" + + | 38 -> +# 463 "parsing/lexer.mll" + ( BACKQUOTE ) +# 2208 "parsing/lexer.ml" + + | 39 -> +# 464 "parsing/lexer.mll" + ( QUOTE ) +# 2213 "parsing/lexer.ml" + + | 40 -> +# 465 "parsing/lexer.mll" + ( LPAREN ) +# 2218 "parsing/lexer.ml" + + | 41 -> +# 466 "parsing/lexer.mll" + ( RPAREN ) +# 2223 "parsing/lexer.ml" + + | 42 -> +# 467 "parsing/lexer.mll" + ( STAR ) +# 2228 "parsing/lexer.ml" + + | 43 -> +# 468 "parsing/lexer.mll" + ( COMMA ) +# 2233 "parsing/lexer.ml" + + | 44 -> +# 469 "parsing/lexer.mll" + ( MINUSGREATER ) +# 2238 "parsing/lexer.ml" + + | 45 -> +# 470 "parsing/lexer.mll" + ( DOT ) +# 2243 "parsing/lexer.ml" + + | 46 -> +# 471 "parsing/lexer.mll" + ( DOTDOT ) +# 2248 "parsing/lexer.ml" + + | 47 -> +let +# 472 "parsing/lexer.mll" + s +# 2254 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf (lexbuf.Lexing.lex_start_pos + 1) lexbuf.Lexing.lex_curr_pos in +# 472 "parsing/lexer.mll" + ( DOTOP s ) +# 2258 "parsing/lexer.ml" + + | 48 -> +# 473 "parsing/lexer.mll" + ( COLON ) +# 2263 "parsing/lexer.ml" + + | 49 -> +# 474 "parsing/lexer.mll" + ( COLONCOLON ) +# 2268 "parsing/lexer.ml" + + | 50 -> +# 475 "parsing/lexer.mll" + ( COLONEQUAL ) +# 2273 "parsing/lexer.ml" + + | 51 -> +# 476 "parsing/lexer.mll" + ( COLONGREATER ) +# 2278 "parsing/lexer.ml" + + | 52 -> +# 477 "parsing/lexer.mll" + ( SEMI ) +# 2283 "parsing/lexer.ml" + + | 53 -> +# 478 "parsing/lexer.mll" + ( SEMISEMI ) +# 2288 "parsing/lexer.ml" + + | 54 -> +# 479 "parsing/lexer.mll" + ( LESS ) +# 2293 "parsing/lexer.ml" + + | 55 -> +# 480 "parsing/lexer.mll" + ( LESSMINUS ) +# 2298 "parsing/lexer.ml" + + | 56 -> +# 481 "parsing/lexer.mll" + ( EQUAL ) +# 2303 "parsing/lexer.ml" + + | 57 -> +# 482 "parsing/lexer.mll" + ( LBRACKET ) +# 2308 "parsing/lexer.ml" + + | 58 -> +# 483 "parsing/lexer.mll" + ( LBRACKETBAR ) +# 2313 "parsing/lexer.ml" + + | 59 -> +# 484 "parsing/lexer.mll" + ( LBRACKETLESS ) +# 2318 "parsing/lexer.ml" + + | 60 -> +# 485 "parsing/lexer.mll" + ( LBRACKETGREATER ) +# 2323 "parsing/lexer.ml" + + | 61 -> +# 486 "parsing/lexer.mll" + ( RBRACKET ) +# 2328 "parsing/lexer.ml" + + | 62 -> +# 487 "parsing/lexer.mll" + ( LBRACE ) +# 2333 "parsing/lexer.ml" + + | 63 -> +# 488 "parsing/lexer.mll" + ( LBRACELESS ) +# 2338 "parsing/lexer.ml" + + | 64 -> +# 489 "parsing/lexer.mll" + ( BAR ) +# 2343 "parsing/lexer.ml" + + | 65 -> +# 490 "parsing/lexer.mll" + ( BARBAR ) +# 2348 "parsing/lexer.ml" + + | 66 -> +# 491 "parsing/lexer.mll" + ( BARRBRACKET ) +# 2353 "parsing/lexer.ml" + + | 67 -> +# 492 "parsing/lexer.mll" + ( GREATER ) +# 2358 "parsing/lexer.ml" + + | 68 -> +# 493 "parsing/lexer.mll" + ( GREATERRBRACKET ) +# 2363 "parsing/lexer.ml" + + | 69 -> +# 494 "parsing/lexer.mll" + ( RBRACE ) +# 2368 "parsing/lexer.ml" + + | 70 -> +# 495 "parsing/lexer.mll" + ( GREATERRBRACE ) +# 2373 "parsing/lexer.ml" + + | 71 -> +# 496 "parsing/lexer.mll" + ( LBRACKETAT ) +# 2378 "parsing/lexer.ml" + + | 72 -> +# 497 "parsing/lexer.mll" + ( LBRACKETATAT ) +# 2383 "parsing/lexer.ml" + + | 73 -> +# 498 "parsing/lexer.mll" + ( LBRACKETATATAT ) +# 2388 "parsing/lexer.ml" + + | 74 -> +# 499 "parsing/lexer.mll" + ( LBRACKETPERCENT ) +# 2393 "parsing/lexer.ml" + + | 75 -> +# 500 "parsing/lexer.mll" + ( LBRACKETPERCENTPERCENT ) +# 2398 "parsing/lexer.ml" + + | 76 -> +# 501 "parsing/lexer.mll" + ( BANG ) +# 2403 "parsing/lexer.ml" + + | 77 -> +# 502 "parsing/lexer.mll" + ( INFIXOP0 "!=" ) +# 2408 "parsing/lexer.ml" + + | 78 -> +# 503 "parsing/lexer.mll" + ( PLUS ) +# 2413 "parsing/lexer.ml" + + | 79 -> +# 504 "parsing/lexer.mll" + ( PLUSDOT ) +# 2418 "parsing/lexer.ml" + + | 80 -> +# 505 "parsing/lexer.mll" + ( PLUSEQ ) +# 2423 "parsing/lexer.ml" + + | 81 -> +# 506 "parsing/lexer.mll" + ( MINUS ) +# 2428 "parsing/lexer.ml" + + | 82 -> +# 507 "parsing/lexer.mll" + ( MINUSDOT ) +# 2433 "parsing/lexer.ml" + + | 83 -> +# 510 "parsing/lexer.mll" + ( PREFIXOP(Lexing.lexeme lexbuf) ) +# 2438 "parsing/lexer.ml" + + | 84 -> +# 512 "parsing/lexer.mll" + ( PREFIXOP(Lexing.lexeme lexbuf) ) +# 2443 "parsing/lexer.ml" + + | 85 -> +# 514 "parsing/lexer.mll" + ( INFIXOP0(Lexing.lexeme lexbuf) ) +# 2448 "parsing/lexer.ml" + + | 86 -> +# 516 "parsing/lexer.mll" + ( INFIXOP1(Lexing.lexeme lexbuf) ) +# 2453 "parsing/lexer.ml" + + | 87 -> +# 518 "parsing/lexer.mll" + ( INFIXOP2(Lexing.lexeme lexbuf) ) +# 2458 "parsing/lexer.ml" + + | 88 -> +# 520 "parsing/lexer.mll" + ( INFIXOP4(Lexing.lexeme lexbuf) ) +# 2463 "parsing/lexer.ml" + + | 89 -> +# 521 "parsing/lexer.mll" + ( PERCENT ) +# 2468 "parsing/lexer.ml" + + | 90 -> +# 523 "parsing/lexer.mll" + ( INFIXOP3(Lexing.lexeme lexbuf) ) +# 2473 "parsing/lexer.ml" + + | 91 -> +# 525 "parsing/lexer.mll" + ( HASHOP(Lexing.lexeme lexbuf) ) +# 2478 "parsing/lexer.ml" + + | 92 -> +# 526 "parsing/lexer.mll" + ( EOF ) +# 2483 "parsing/lexer.ml" + + | 93 -> +# 528 "parsing/lexer.mll" + ( raise (Error(Illegal_character (Lexing.lexeme_char lexbuf 0), + Location.curr lexbuf)) + ) +# 2490 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_token_rec lexbuf __ocaml_lex_state + +and comment lexbuf = + __ocaml_lex_comment_rec lexbuf 143 +and __ocaml_lex_comment_rec lexbuf __ocaml_lex_state = + match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 534 "parsing/lexer.mll" + ( comment_start_loc := (Location.curr lexbuf) :: !comment_start_loc; + store_lexeme lexbuf; + comment lexbuf + ) +# 2505 "parsing/lexer.ml" + + | 1 -> +# 539 "parsing/lexer.mll" + ( match !comment_start_loc with + | [] -> assert false + | [_] -> comment_start_loc := []; Location.curr lexbuf + | _ :: l -> comment_start_loc := l; + store_lexeme lexbuf; + comment lexbuf + ) +# 2516 "parsing/lexer.ml" + + | 2 -> +# 547 "parsing/lexer.mll" + ( + string_start_loc := Location.curr lexbuf; + store_string_char '\"'; + is_in_string := true; + begin try string lexbuf + with Error (Unterminated_string, str_start) -> + match !comment_start_loc with + | [] -> assert false + | loc :: _ -> + let start = List.hd (List.rev !comment_start_loc) in + comment_start_loc := []; + raise (Error (Unterminated_string_in_comment (start, str_start), + loc)) + end; + is_in_string := false; + store_string_char '\"'; + comment lexbuf ) +# 2537 "parsing/lexer.ml" + + | 3 -> +# 565 "parsing/lexer.mll" + ( + let delim = Lexing.lexeme lexbuf in + let delim = String.sub delim 1 (String.length delim - 2) in + string_start_loc := Location.curr lexbuf; + store_lexeme lexbuf; + is_in_string := true; + begin try quoted_string delim lexbuf + with Error (Unterminated_string, str_start) -> + match !comment_start_loc with + | [] -> assert false + | loc :: _ -> + let start = List.hd (List.rev !comment_start_loc) in + comment_start_loc := []; + raise (Error (Unterminated_string_in_comment (start, str_start), + loc)) + end; + is_in_string := false; + store_string_char '|'; + store_string delim; + store_string_char '}'; + comment lexbuf ) +# 2562 "parsing/lexer.ml" + + | 4 -> +# 588 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 2567 "parsing/lexer.ml" + + | 5 -> +# 590 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 1; + store_lexeme lexbuf; + comment lexbuf + ) +# 2575 "parsing/lexer.ml" + + | 6 -> +# 595 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 2580 "parsing/lexer.ml" + + | 7 -> +# 597 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 2585 "parsing/lexer.ml" + + | 8 -> +# 599 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 2590 "parsing/lexer.ml" + + | 9 -> +# 601 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 2595 "parsing/lexer.ml" + + | 10 -> +# 603 "parsing/lexer.mll" + ( match !comment_start_loc with + | [] -> assert false + | loc :: _ -> + let start = List.hd (List.rev !comment_start_loc) in + comment_start_loc := []; + raise (Error (Unterminated_comment start, loc)) + ) +# 2606 "parsing/lexer.ml" + + | 11 -> +# 611 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 0; + store_lexeme lexbuf; + comment lexbuf + ) +# 2614 "parsing/lexer.ml" + + | 12 -> +# 616 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 2619 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_comment_rec lexbuf __ocaml_lex_state + +and string lexbuf = + lexbuf.Lexing.lex_mem <- Array.make 2 (-1); __ocaml_lex_string_rec lexbuf 175 +and __ocaml_lex_string_rec lexbuf __ocaml_lex_state = + match Lexing.new_engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 620 "parsing/lexer.mll" + ( () ) +# 2631 "parsing/lexer.ml" + + | 1 -> +let +# 621 "parsing/lexer.mll" + space +# 2637 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_mem.(0) lexbuf.Lexing.lex_curr_pos in +# 622 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false (String.length space); + if in_comment () then store_lexeme lexbuf; + string lexbuf + ) +# 2644 "parsing/lexer.ml" + + | 2 -> +# 627 "parsing/lexer.mll" + ( store_escaped_char lexbuf + (char_for_backslash(Lexing.lexeme_char lexbuf 1)); + string lexbuf ) +# 2651 "parsing/lexer.ml" + + | 3 -> +# 631 "parsing/lexer.mll" + ( store_escaped_char lexbuf (char_for_decimal_code lexbuf 1); + string lexbuf ) +# 2657 "parsing/lexer.ml" + + | 4 -> +# 634 "parsing/lexer.mll" + ( store_escaped_char lexbuf (char_for_octal_code lexbuf 2); + string lexbuf ) +# 2663 "parsing/lexer.ml" + + | 5 -> +# 637 "parsing/lexer.mll" + ( store_escaped_char lexbuf (char_for_hexadecimal_code lexbuf 2); + string lexbuf ) +# 2669 "parsing/lexer.ml" + + | 6 -> +# 640 "parsing/lexer.mll" + ( store_escaped_uchar lexbuf (uchar_for_uchar_escape lexbuf); + string lexbuf ) +# 2675 "parsing/lexer.ml" + + | 7 -> +# 643 "parsing/lexer.mll" + ( if not (in_comment ()) then begin +(* Should be an error, but we are very lax. + raise (Error (Illegal_escape (Lexing.lexeme lexbuf), + Location.curr lexbuf)) +*) + let loc = Location.curr lexbuf in + Location.prerr_warning loc Warnings.Illegal_backslash; + end; + store_lexeme lexbuf; + string lexbuf + ) +# 2690 "parsing/lexer.ml" + + | 8 -> +# 655 "parsing/lexer.mll" + ( if not (in_comment ()) then + Location.prerr_warning (Location.curr lexbuf) Warnings.Eol_in_string; + update_loc lexbuf None 1 false 0; + store_lexeme lexbuf; + string lexbuf + ) +# 2700 "parsing/lexer.ml" + + | 9 -> +# 662 "parsing/lexer.mll" + ( is_in_string := false; + raise (Error (Unterminated_string, !string_start_loc)) ) +# 2706 "parsing/lexer.ml" + + | 10 -> +# 665 "parsing/lexer.mll" + ( store_string_char(Lexing.lexeme_char lexbuf 0); + string lexbuf ) +# 2712 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_string_rec lexbuf __ocaml_lex_state + +and quoted_string delim lexbuf = + __ocaml_lex_quoted_string_rec delim lexbuf 202 +and __ocaml_lex_quoted_string_rec delim lexbuf __ocaml_lex_state = + match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 670 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 0; + store_lexeme lexbuf; + quoted_string delim lexbuf + ) +# 2727 "parsing/lexer.ml" + + | 1 -> +# 675 "parsing/lexer.mll" + ( is_in_string := false; + raise (Error (Unterminated_string, !string_start_loc)) ) +# 2733 "parsing/lexer.ml" + + | 2 -> +# 678 "parsing/lexer.mll" + ( + let edelim = Lexing.lexeme lexbuf in + let edelim = String.sub edelim 1 (String.length edelim - 2) in + if delim = edelim then () + else (store_lexeme lexbuf; quoted_string delim lexbuf) + ) +# 2743 "parsing/lexer.ml" + + | 3 -> +# 685 "parsing/lexer.mll" + ( store_string_char(Lexing.lexeme_char lexbuf 0); + quoted_string delim lexbuf ) +# 2749 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_quoted_string_rec delim lexbuf __ocaml_lex_state + +and skip_hash_bang lexbuf = + __ocaml_lex_skip_hash_bang_rec lexbuf 211 +and __ocaml_lex_skip_hash_bang_rec lexbuf __ocaml_lex_state = + match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 690 "parsing/lexer.mll" + ( update_loc lexbuf None 3 false 0 ) +# 2761 "parsing/lexer.ml" + + | 1 -> +# 692 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 0 ) +# 2766 "parsing/lexer.ml" + + | 2 -> +# 693 "parsing/lexer.mll" + ( () ) +# 2771 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_skip_hash_bang_rec lexbuf __ocaml_lex_state + +;; + +# 695 "parsing/lexer.mll" + + + let token_with_comments lexbuf = + match !preprocessor with + | None -> token lexbuf + | Some (_init, preprocess) -> preprocess token lexbuf + + type newline_state = + | NoLine (* There have been no blank lines yet. *) + | NewLine + (* There have been no blank lines, and the previous + token was a newline. *) + | BlankLine (* There have been blank lines. *) + + type doc_state = + | Initial (* There have been no docstrings yet *) + | After of docstring list + (* There have been docstrings, none of which were + preceded by a blank line *) + | Before of docstring list * docstring list * docstring list + (* There have been docstrings, some of which were + preceded by a blank line *) + + and docstring = Docstrings.docstring + + let token lexbuf = + let post_pos = lexeme_end_p lexbuf in + let attach lines docs pre_pos = + let open Docstrings in + match docs, lines with + | Initial, _ -> () + | After a, (NoLine | NewLine) -> + set_post_docstrings post_pos (List.rev a); + set_pre_docstrings pre_pos a; + | After a, BlankLine -> + set_post_docstrings post_pos (List.rev a); + set_pre_extra_docstrings pre_pos (List.rev a) + | Before(a, f, b), (NoLine | NewLine) -> + set_post_docstrings post_pos (List.rev a); + set_post_extra_docstrings post_pos + (List.rev_append f (List.rev b)); + set_floating_docstrings pre_pos (List.rev f); + set_pre_extra_docstrings pre_pos (List.rev a); + set_pre_docstrings pre_pos b + | Before(a, f, b), BlankLine -> + set_post_docstrings post_pos (List.rev a); + set_post_extra_docstrings post_pos + (List.rev_append f (List.rev b)); + set_floating_docstrings pre_pos + (List.rev_append f (List.rev b)); + set_pre_extra_docstrings pre_pos (List.rev a) + in + let rec loop lines docs lexbuf = + match token_with_comments lexbuf with + | COMMENT (s, loc) -> + add_comment (s, loc); + let lines' = + match lines with + | NoLine -> NoLine + | NewLine -> NoLine + | BlankLine -> BlankLine + in + loop lines' docs lexbuf + | EOL -> + let lines' = + match lines with + | NoLine -> NewLine + | NewLine -> BlankLine + | BlankLine -> BlankLine + in + loop lines' docs lexbuf + | DOCSTRING doc -> + Docstrings.register doc; + add_docstring_comment doc; + let docs' = + if Docstrings.docstring_body doc = "/*" then + match docs with + | Initial -> Before([], [doc], []) + | After a -> Before (a, [doc], []) + | Before(a, f, b) -> Before(a, doc :: b @ f, []) + else + match docs, lines with + | Initial, (NoLine | NewLine) -> After [doc] + | Initial, BlankLine -> Before([], [], [doc]) + | After a, (NoLine | NewLine) -> After (doc :: a) + | After a, BlankLine -> Before (a, [], [doc]) + | Before(a, f, b), (NoLine | NewLine) -> Before(a, f, doc :: b) + | Before(a, f, b), BlankLine -> Before(a, b @ f, [doc]) + in + loop NoLine docs' lexbuf + | tok -> + attach lines docs (lexeme_start_p lexbuf); + tok + in + loop NoLine Initial lexbuf + + let init () = + is_in_string := false; + comment_start_loc := []; + comment_list := []; + match !preprocessor with + | None -> () + | Some (init, _preprocess) -> init () + + let set_preprocessor init preprocess = + escaped_newlines := true; + preprocessor := Some (init, preprocess) + + +# 2888 "parsing/lexer.ml" diff --git a/res_syntax/compiler-libs-406/lexer.mli b/res_syntax/compiler-libs-406/lexer.mli new file mode 100644 index 0000000000..63617b48d4 --- /dev/null +++ b/res_syntax/compiler-libs-406/lexer.mli @@ -0,0 +1,63 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* The lexical analyzer *) + +val init : unit -> unit +val token: Lexing.lexbuf -> Parser.token +val skip_hash_bang: Lexing.lexbuf -> unit + +type error = + | Illegal_character of char + | Illegal_escape of string + | Unterminated_comment of Location.t + | Unterminated_string + | Unterminated_string_in_comment of Location.t * Location.t + | Keyword_as_label of string + | Invalid_literal of string + | Invalid_directive of string * string option +;; + +exception Error of error * Location.t + +open Format + +val report_error: formatter -> error -> unit + (* Deprecated. Use Location.{error_of_exn, report_error}. *) + +val in_comment : unit -> bool;; +val in_string : unit -> bool;; + + +val print_warnings : bool ref +val handle_docstrings: bool ref +val comments : unit -> (string * Location.t) list +val token_with_comments : Lexing.lexbuf -> Parser.token + +(* + [set_preprocessor init preprocessor] registers [init] as the function +to call to initialize the preprocessor when the lexer is initialized, +and [preprocessor] a function that is called when a new token is needed +by the parser, as [preprocessor lexer lexbuf] where [lexer] is the +lexing function. + +When a preprocessor is configured by calling [set_preprocessor], the lexer +changes its behavior to accept backslash-newline as a token-separating blank. +*) + +val set_preprocessor : + (unit -> unit) -> + ((Lexing.lexbuf -> Parser.token) -> Lexing.lexbuf -> Parser.token) -> + unit diff --git a/res_syntax/compiler-libs-406/lexing.ml b/res_syntax/compiler-libs-406/lexing.ml new file mode 100644 index 0000000000..2c2633d43e --- /dev/null +++ b/res_syntax/compiler-libs-406/lexing.ml @@ -0,0 +1,231 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* The run-time library for lexers generated by camllex *) + +type position = { + pos_fname : string; + pos_lnum : int; + pos_bol : int; + pos_cnum : int; +} + +let dummy_pos = { + pos_fname = ""; + pos_lnum = 0; + pos_bol = 0; + pos_cnum = -1; +} + +type lexbuf = + { refill_buff : lexbuf -> unit; + mutable lex_buffer : bytes; + mutable lex_buffer_len : int; + mutable lex_abs_pos : int; + mutable lex_start_pos : int; + mutable lex_curr_pos : int; + mutable lex_last_pos : int; + mutable lex_last_action : int; + mutable lex_eof_reached : bool; + mutable lex_mem : int array; + mutable lex_start_p : position; + mutable lex_curr_p : position; + } + +type lex_tables = + { lex_base: string; + lex_backtrk: string; + lex_default: string; + lex_trans: string; + lex_check: string; + lex_base_code : string; + lex_backtrk_code : string; + lex_default_code : string; + lex_trans_code : string; + lex_check_code : string; + lex_code: string;} + +external c_engine : lex_tables -> int -> lexbuf -> int = "caml_lex_engine" +external c_new_engine : lex_tables -> int -> lexbuf -> int + = "caml_new_lex_engine" + +let engine tbl state buf = + let result = c_engine tbl state buf in + if result >= 0 then begin + buf.lex_start_p <- buf.lex_curr_p; + buf.lex_curr_p <- {buf.lex_curr_p + with pos_cnum = buf.lex_abs_pos + buf.lex_curr_pos}; + end; + result + + +let new_engine tbl state buf = + let result = c_new_engine tbl state buf in + if result >= 0 then begin + buf.lex_start_p <- buf.lex_curr_p; + buf.lex_curr_p <- {buf.lex_curr_p + with pos_cnum = buf.lex_abs_pos + buf.lex_curr_pos}; + end; + result + + +let lex_refill read_fun aux_buffer lexbuf = + let read = + read_fun aux_buffer (Bytes.length aux_buffer) in + let n = + if read > 0 + then read + else (lexbuf.lex_eof_reached <- true; 0) in + (* Current state of the buffer: + <-------|---------------------|-----------> + | junk | valid data | junk | + ^ ^ ^ ^ + 0 start_pos buffer_end Bytes.length buffer + *) + if lexbuf.lex_buffer_len + n > Bytes.length lexbuf.lex_buffer then begin + (* There is not enough space at the end of the buffer *) + if lexbuf.lex_buffer_len - lexbuf.lex_start_pos + n + <= Bytes.length lexbuf.lex_buffer + then begin + (* But there is enough space if we reclaim the junk at the beginning + of the buffer *) + Bytes.blit lexbuf.lex_buffer lexbuf.lex_start_pos + lexbuf.lex_buffer 0 + (lexbuf.lex_buffer_len - lexbuf.lex_start_pos) + end else begin + (* We must grow the buffer. Doubling its size will provide enough + space since n <= String.length aux_buffer <= String.length buffer. + Watch out for string length overflow, though. *) + let newlen = + min (2 * Bytes.length lexbuf.lex_buffer) Sys.max_string_length in + if lexbuf.lex_buffer_len - lexbuf.lex_start_pos + n > newlen + then failwith "Lexing.lex_refill: cannot grow buffer"; + let newbuf = Bytes.create newlen in + (* Copy the valid data to the beginning of the new buffer *) + Bytes.blit lexbuf.lex_buffer lexbuf.lex_start_pos + newbuf 0 + (lexbuf.lex_buffer_len - lexbuf.lex_start_pos); + lexbuf.lex_buffer <- newbuf + end; + (* Reallocation or not, we have shifted the data left by + start_pos characters; update the positions *) + let s = lexbuf.lex_start_pos in + lexbuf.lex_abs_pos <- lexbuf.lex_abs_pos + s; + lexbuf.lex_curr_pos <- lexbuf.lex_curr_pos - s; + lexbuf.lex_start_pos <- 0; + lexbuf.lex_last_pos <- lexbuf.lex_last_pos - s; + lexbuf.lex_buffer_len <- lexbuf.lex_buffer_len - s ; + let t = lexbuf.lex_mem in + for i = 0 to Array.length t-1 do + let v = t.(i) in + if v >= 0 then + t.(i) <- v-s + done + end; + (* There is now enough space at the end of the buffer *) + Bytes.blit aux_buffer 0 lexbuf.lex_buffer lexbuf.lex_buffer_len n; + lexbuf.lex_buffer_len <- lexbuf.lex_buffer_len + n + +let zero_pos = { + pos_fname = ""; + pos_lnum = 1; + pos_bol = 0; + pos_cnum = 0; +} + +let from_function f = + { refill_buff = lex_refill f (Bytes.create 512); + lex_buffer = Bytes.create 1024; + lex_buffer_len = 0; + lex_abs_pos = 0; + lex_start_pos = 0; + lex_curr_pos = 0; + lex_last_pos = 0; + lex_last_action = 0; + lex_mem = [||]; + lex_eof_reached = false; + lex_start_p = zero_pos; + lex_curr_p = zero_pos; + } + +let from_channel ic = + from_function (fun buf n -> input ic buf 0 n) + +let from_string s = + { refill_buff = (fun lexbuf -> lexbuf.lex_eof_reached <- true); + lex_buffer = Bytes.of_string s; (* have to make a copy for compatibility + with unsafe-string mode *) + lex_buffer_len = String.length s; + lex_abs_pos = 0; + lex_start_pos = 0; + lex_curr_pos = 0; + lex_last_pos = 0; + lex_last_action = 0; + lex_mem = [||]; + lex_eof_reached = true; + lex_start_p = zero_pos; + lex_curr_p = zero_pos; + } + +let lexeme lexbuf = + let len = lexbuf.lex_curr_pos - lexbuf.lex_start_pos in + Bytes.sub_string lexbuf.lex_buffer lexbuf.lex_start_pos len + +let sub_lexeme lexbuf i1 i2 = + let len = i2-i1 in + Bytes.sub_string lexbuf.lex_buffer i1 len + +let sub_lexeme_opt lexbuf i1 i2 = + if i1 >= 0 then begin + let len = i2-i1 in + Some (Bytes.sub_string lexbuf.lex_buffer i1 len) + end else begin + None + end + +let sub_lexeme_char lexbuf i = Bytes.get lexbuf.lex_buffer i + +let sub_lexeme_char_opt lexbuf i = + if i >= 0 then + Some (Bytes.get lexbuf.lex_buffer i) + else + None + + +let lexeme_char lexbuf i = + Bytes.get lexbuf.lex_buffer (lexbuf.lex_start_pos + i) + +let lexeme_start lexbuf = lexbuf.lex_start_p.pos_cnum +let lexeme_end lexbuf = lexbuf.lex_curr_p.pos_cnum + +let lexeme_start_p lexbuf = lexbuf.lex_start_p +let lexeme_end_p lexbuf = lexbuf.lex_curr_p + +let new_line lexbuf = + let lcp = lexbuf.lex_curr_p in + lexbuf.lex_curr_p <- { lcp with + pos_lnum = lcp.pos_lnum + 1; + pos_bol = lcp.pos_cnum; + } + + + +(* Discard data left in lexer buffer. *) + +let flush_input lb = + lb.lex_curr_pos <- 0; + lb.lex_abs_pos <- 0; + lb.lex_curr_p <- {lb.lex_curr_p with pos_cnum = 0}; + lb.lex_buffer_len <- 0; diff --git a/res_syntax/compiler-libs-406/lexing.mli b/res_syntax/compiler-libs-406/lexing.mli new file mode 100644 index 0000000000..397d20cbb1 --- /dev/null +++ b/res_syntax/compiler-libs-406/lexing.mli @@ -0,0 +1,176 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** The run-time library for lexers generated by [ocamllex]. *) + +(** {1 Positions} *) + +type position = { + pos_fname : string; + pos_lnum : int; + pos_bol : int; + pos_cnum : int; +} +(** A value of type [position] describes a point in a source file. + [pos_fname] is the file name; [pos_lnum] is the line number; + [pos_bol] is the offset of the beginning of the line (number + of characters between the beginning of the lexbuf and the beginning + of the line); [pos_cnum] is the offset of the position (number of + characters between the beginning of the lexbuf and the position). + The difference between [pos_cnum] and [pos_bol] is the character + offset within the line (i.e. the column number, assuming each + character is one column wide). + + See the documentation of type [lexbuf] for information about + how the lexing engine will manage positions. + *) + +val dummy_pos : position +(** A value of type [position], guaranteed to be different from any + valid position. + *) + + +(** {1 Lexer buffers} *) + + +type lexbuf = + { refill_buff : lexbuf -> unit; + mutable lex_buffer : bytes; + mutable lex_buffer_len : int; + mutable lex_abs_pos : int; + mutable lex_start_pos : int; + mutable lex_curr_pos : int; + mutable lex_last_pos : int; + mutable lex_last_action : int; + mutable lex_eof_reached : bool; + mutable lex_mem : int array; + mutable lex_start_p : position; + mutable lex_curr_p : position; + } +(** The type of lexer buffers. A lexer buffer is the argument passed + to the scanning functions defined by the generated scanners. + The lexer buffer holds the current state of the scanner, plus + a function to refill the buffer from the input. + + At each token, the lexing engine will copy [lex_curr_p] to + [lex_start_p], then change the [pos_cnum] field + of [lex_curr_p] by updating it with the number of characters read + since the start of the [lexbuf]. The other fields are left + unchanged by the lexing engine. In order to keep them + accurate, they must be initialised before the first use of the + lexbuf, and updated by the relevant lexer actions (i.e. at each + end of line -- see also [new_line]). + *) + +val from_channel : in_channel -> lexbuf +(** Create a lexer buffer on the given input channel. + [Lexing.from_channel inchan] returns a lexer buffer which reads + from the input channel [inchan], at the current reading position. *) + +val from_string : string -> lexbuf +(** Create a lexer buffer which reads from + the given string. Reading starts from the first character in + the string. An end-of-input condition is generated when the + end of the string is reached. *) + +val from_function : (bytes -> int -> int) -> lexbuf +(** Create a lexer buffer with the given function as its reading method. + When the scanner needs more characters, it will call the given + function, giving it a byte sequence [s] and a byte + count [n]. The function should put [n] bytes or fewer in [s], + starting at index 0, and return the number of bytes + provided. A return value of 0 means end of input. *) + + +(** {1 Functions for lexer semantic actions} *) + + +(** The following functions can be called from the semantic actions + of lexer definitions (the ML code enclosed in braces that + computes the value returned by lexing functions). They give + access to the character string matched by the regular expression + associated with the semantic action. These functions must be + applied to the argument [lexbuf], which, in the code generated by + [ocamllex], is bound to the lexer buffer passed to the parsing + function. *) + +val lexeme : lexbuf -> string +(** [Lexing.lexeme lexbuf] returns the string matched by + the regular expression. *) + +val lexeme_char : lexbuf -> int -> char +(** [Lexing.lexeme_char lexbuf i] returns character number [i] in + the matched string. *) + +val lexeme_start : lexbuf -> int +(** [Lexing.lexeme_start lexbuf] returns the offset in the + input stream of the first character of the matched string. + The first character of the stream has offset 0. *) + +val lexeme_end : lexbuf -> int +(** [Lexing.lexeme_end lexbuf] returns the offset in the input stream + of the character following the last character of the matched + string. The first character of the stream has offset 0. *) + +val lexeme_start_p : lexbuf -> position +(** Like [lexeme_start], but return a complete [position] instead + of an offset. *) + +val lexeme_end_p : lexbuf -> position +(** Like [lexeme_end], but return a complete [position] instead + of an offset. *) + +val new_line : lexbuf -> unit +(** Update the [lex_curr_p] field of the lexbuf to reflect the start + of a new line. You can call this function in the semantic action + of the rule that matches the end-of-line character. + @since 3.11.0 +*) + +(** {1 Miscellaneous functions} *) + +val flush_input : lexbuf -> unit +(** Discard the contents of the buffer and reset the current + position to 0. The next use of the lexbuf will trigger a + refill. *) + +(**/**) + +(** {1 } *) + +(** The following definitions are used by the generated scanners only. + They are not intended to be used directly by user programs. *) + +val sub_lexeme : lexbuf -> int -> int -> string +val sub_lexeme_opt : lexbuf -> int -> int -> string option +val sub_lexeme_char : lexbuf -> int -> char +val sub_lexeme_char_opt : lexbuf -> int -> char option + +type lex_tables = + { lex_base : string; + lex_backtrk : string; + lex_default : string; + lex_trans : string; + lex_check : string; + lex_base_code : string; + lex_backtrk_code : string; + lex_default_code : string; + lex_trans_code : string; + lex_check_code : string; + lex_code: string;} + +val engine : lex_tables -> int -> lexbuf -> int +val new_engine : lex_tables -> int -> lexbuf -> int diff --git a/res_syntax/compiler-libs-406/location.ml b/res_syntax/compiler-libs-406/location.ml new file mode 100644 index 0000000000..9578b071a0 --- /dev/null +++ b/res_syntax/compiler-libs-406/location.ml @@ -0,0 +1,330 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Lexing + +let absname = ref false + (* This reference should be in Clflags, but it would create an additional + dependency and make bootstrapping Camlp4 more difficult. *) + +type t = Warnings.loc = { loc_start: position; loc_end: position; loc_ghost: bool };; + +let in_file name = + let loc = { + pos_fname = name; + pos_lnum = 1; + pos_bol = 0; + pos_cnum = -1; + } in + { loc_start = loc; loc_end = loc; loc_ghost = true } +;; + +let none = in_file "_none_";; + +let curr lexbuf = { + loc_start = lexbuf.lex_start_p; + loc_end = lexbuf.lex_curr_p; + loc_ghost = false +};; + +let init lexbuf fname = + lexbuf.lex_curr_p <- { + pos_fname = fname; + pos_lnum = 1; + pos_bol = 0; + pos_cnum = 0; + } +;; + +let symbol_rloc () = { + loc_start = Parsing.symbol_start_pos (); + loc_end = Parsing.symbol_end_pos (); + loc_ghost = false; +};; + +let symbol_gloc () = { + loc_start = Parsing.symbol_start_pos (); + loc_end = Parsing.symbol_end_pos (); + loc_ghost = true; +};; + +let rhs_loc n = { + loc_start = Parsing.rhs_start_pos n; + loc_end = Parsing.rhs_end_pos n; + loc_ghost = false; +};; + +let input_name = ref "_none_" +let input_lexbuf = ref (None : lexbuf option) +let set_input_name name = + if name <> "" then input_name := name +(* Terminal info *) + + + +let num_loc_lines = ref 0 (* number of lines already printed after input *) + +(* Print the location in some way or another *) + +open Format + +let absolute_path s = (* This function could go into Filename *) + let open Filename in + let s = if is_relative s then concat (Sys.getcwd ()) s else s in + (* Now simplify . and .. components *) + let rec aux s = + let base = basename s in + let dir = dirname s in + if dir = s then dir + else if base = current_dir_name then aux dir + else if base = parent_dir_name then dirname (aux dir) + else concat (aux dir) base + in + aux s + +let show_filename file = + let file = if file = "_none_" then !input_name else file in + if !absname then absolute_path file else file + +let print_filename ppf file = + Format.fprintf ppf "%s" (show_filename file) + +let reset () = + num_loc_lines := 0 + +let (msg_file, msg_line, msg_chars, msg_to, msg_colon) = + ("File \"", "\", line ", ", characters ", "-", ":") + +(* return file, line, char from the given position *) +let get_pos_info pos = + (pos.pos_fname, pos.pos_lnum, pos.pos_cnum - pos.pos_bol) +;; + +let setup_colors () = + Misc.Color.setup !Clflags.color + +let print_loc ppf loc = + setup_colors (); + let (file, line, startchar) = get_pos_info loc.loc_start in + let startchar = startchar + 1 in + let endchar = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum + startchar in + if file = "//toplevel//" then begin + fprintf ppf "Characters %i-%i" + loc.loc_start.pos_cnum loc.loc_end.pos_cnum + end else begin + fprintf ppf "%s@{%a%s%i" msg_file print_filename file msg_line line; + if startchar >= 0 then + fprintf ppf "%s%i%s%i" msg_chars startchar msg_to endchar; + fprintf ppf "@}" + end +;; + +let default_printer ppf loc = + setup_colors (); + fprintf ppf "@{%a@}%s@," print_loc loc msg_colon +;; + +let printer = ref default_printer +let print ppf loc = !printer ppf loc + +let error_prefix = "Error" +let warning_prefix = "Warning" + +let print_error_prefix ppf = + setup_colors (); + fprintf ppf "@{%s@}" error_prefix; +;; + +let print_compact ppf loc = + begin + let (file, line, startchar) = get_pos_info loc.loc_start in + let endchar = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum + startchar in + fprintf ppf "%a:%i" print_filename file line; + if startchar >= 0 then fprintf ppf ",%i--%i" startchar endchar + end +;; + +let print_error ppf loc = + fprintf ppf "%a%t:" print loc print_error_prefix; +;; + +let print_error_cur_file ppf () = print_error ppf (in_file !input_name);; + +let default_warning_printer loc ppf w = + match Warnings.report w with + | `Inactive -> () + | `Active { Warnings. number; message; is_error; sub_locs } -> + setup_colors (); + fprintf ppf "@["; + print ppf loc; + if is_error + then + fprintf ppf "%t (%s %d): %s@," print_error_prefix + (String.uncapitalize_ascii warning_prefix) number message + else fprintf ppf "@{%s@} %d: %s@," warning_prefix number message; + List.iter + (fun (loc, msg) -> + if loc <> none then fprintf ppf " %a %s@," print loc msg + ) + sub_locs; + fprintf ppf "@]" +;; + +let warning_printer = ref default_warning_printer ;; + +let print_warning loc ppf w = + !warning_printer loc ppf w +;; + +let formatter_for_warnings = ref err_formatter;; +let prerr_warning loc w = + print_warning loc !formatter_for_warnings w;; + +let echo_eof () = + print_newline (); + incr num_loc_lines + +type 'a loc = { + txt : 'a; + loc : t; +} + +let mkloc txt loc = { txt ; loc } +let mknoloc txt = mkloc txt none + + +type error = + { + loc: t; + msg: string; + sub: error list; + if_highlight: string; (* alternative message if locations are highlighted *) + } + +let pp_ksprintf ?before k fmt = + let buf = Buffer.create 64 in + let ppf = Format.formatter_of_buffer buf in + Misc.Color.set_color_tag_handling ppf; + begin match before with + | None -> () + | Some f -> f ppf + end; + kfprintf + (fun _ -> + pp_print_flush ppf (); + let msg = Buffer.contents buf in + k msg) + ppf fmt + +(* Shift the formatter's offset by the length of the error prefix, which + is always added by the compiler after the message has been formatted *) +let print_phanton_error_prefix ppf = + Format.pp_print_as ppf (String.length error_prefix + 2 (* ": " *)) "" + +let errorf ?(loc = none) ?(sub = []) ?(if_highlight = "") fmt = + pp_ksprintf + ~before:print_phanton_error_prefix + (fun msg -> {loc; msg; sub; if_highlight}) + fmt + +let error ?(loc = none) ?(sub = []) ?(if_highlight = "") msg = + {loc; msg; sub; if_highlight} + +let error_of_exn : (exn -> error option) list ref = ref [] + +let register_error_of_exn f = error_of_exn := f :: !error_of_exn + +exception Already_displayed_error = Warnings.Errors + +let error_of_exn exn = + match exn with + | Already_displayed_error -> Some `Already_displayed + | _ -> + let rec loop = function + | [] -> None + | f :: rest -> + match f exn with + | Some error -> Some (`Ok error) + | None -> loop rest + in + loop !error_of_exn + + +let rec default_error_reporter ppf ({loc; msg; sub}) = + fprintf ppf "@[%a %s" print_error loc msg; + List.iter (Format.fprintf ppf "@,@[<2>%a@]" default_error_reporter) sub; + fprintf ppf "@]" + +let error_reporter = ref default_error_reporter + +let report_error ppf err = + !error_reporter ppf err +;; + +let error_of_printer loc print x = + errorf ~loc "%a@?" print x + +let error_of_printer_file print x = + error_of_printer (in_file !input_name) print x + +let () = + register_error_of_exn + (function + | Sys_error msg -> + Some (errorf ~loc:(in_file !input_name) + "I/O error: %s" msg) + + | Misc.HookExnWrapper {error = e; hook_name; + hook_info={Misc.sourcefile}} -> + let sub = match error_of_exn e with + | None | Some `Already_displayed -> error (Printexc.to_string e) + | Some (`Ok err) -> err + in + Some + (errorf ~loc:(in_file sourcefile) + "In hook %S:" hook_name + ~sub:[sub]) + | _ -> None + ) + +external reraise : exn -> 'a = "%reraise" + +let rec report_exception_rec n ppf exn = + try + match error_of_exn exn with + | None -> reraise exn + | Some `Already_displayed -> () + | Some (`Ok err) -> fprintf ppf "@[%a@]@." report_error err + with exn when n > 0 -> report_exception_rec (n-1) ppf exn + +let report_exception ppf exn = report_exception_rec 5 ppf exn + + +exception Error of error + +let () = + register_error_of_exn + (function + | Error e -> Some e + | _ -> None + ) + +let raise_errorf ?(loc = none) ?(sub = []) ?(if_highlight = "") = + pp_ksprintf + ~before:print_phanton_error_prefix + (fun msg -> raise (Error ({loc; msg; sub; if_highlight}))) + +let deprecated ?(def = none) ?(use = none) loc msg = + prerr_warning loc (Warnings.Deprecated (msg, def, use)) \ No newline at end of file diff --git a/res_syntax/compiler-libs-406/longident.ml b/res_syntax/compiler-libs-406/longident.ml new file mode 100644 index 0000000000..6f5d539836 --- /dev/null +++ b/res_syntax/compiler-libs-406/longident.ml @@ -0,0 +1,49 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type t = + Lident of string + | Ldot of t * string + | Lapply of t * t + +let rec flat accu = function + Lident s -> s :: accu + | Ldot(lid, s) -> flat (s :: accu) lid + | Lapply(_, _) -> Misc.fatal_error "Longident.flat" + +let flatten lid = flat [] lid + +let last = function + Lident s -> s + | Ldot(_, s) -> s + | Lapply(_, _) -> Misc.fatal_error "Longident.last" + +let rec split_at_dots s pos = + try + let dot = String.index_from s pos '.' in + String.sub s pos (dot - pos) :: split_at_dots s (dot + 1) + with Not_found -> + [String.sub s pos (String.length s - pos)] + +let unflatten l = + match l with + | [] -> None + | hd :: tl -> Some (List.fold_left (fun p s -> Ldot(p, s)) (Lident hd) tl) + +let parse s = + match unflatten (split_at_dots s 0) with + | None -> Lident "" (* should not happen, but don't put assert false + so as not to crash the toplevel (see Genprintval) *) + | Some v -> v diff --git a/res_syntax/compiler-libs-406/longident.mli b/res_syntax/compiler-libs-406/longident.mli new file mode 100644 index 0000000000..5ffb16a812 --- /dev/null +++ b/res_syntax/compiler-libs-406/longident.mli @@ -0,0 +1,26 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Long identifiers, used in parsetree. *) + +type t = + Lident of string + | Ldot of t * string + | Lapply of t * t + +val flatten: t -> string list +val unflatten: string list -> t option +val last: t -> string +val parse: string -> t diff --git a/res_syntax/compiler-libs-406/map.ml b/res_syntax/compiler-libs-406/map.ml new file mode 100644 index 0000000000..7d096dfb3c --- /dev/null +++ b/res_syntax/compiler-libs-406/map.ml @@ -0,0 +1,480 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +module type OrderedType = + sig + type t + val compare: t -> t -> int + end + +module type S = + sig + type key + type +'a t + val empty: 'a t + val is_empty: 'a t -> bool + val mem: key -> 'a t -> bool + val add: key -> 'a -> 'a t -> 'a t + val update: key -> ('a option -> 'a option) -> 'a t -> 'a t + val singleton: key -> 'a -> 'a t + val remove: key -> 'a t -> 'a t + val merge: + (key -> 'a option -> 'b option -> 'c option) -> 'a t -> 'b t -> 'c t + val union: (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t + val compare: ('a -> 'a -> int) -> 'a t -> 'a t -> int + val equal: ('a -> 'a -> bool) -> 'a t -> 'a t -> bool + val iter: (key -> 'a -> unit) -> 'a t -> unit + val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + val for_all: (key -> 'a -> bool) -> 'a t -> bool + val exists: (key -> 'a -> bool) -> 'a t -> bool + val filter: (key -> 'a -> bool) -> 'a t -> 'a t + val partition: (key -> 'a -> bool) -> 'a t -> 'a t * 'a t + val cardinal: 'a t -> int + val bindings: 'a t -> (key * 'a) list + val min_binding: 'a t -> (key * 'a) + val min_binding_opt: 'a t -> (key * 'a) option + val max_binding: 'a t -> (key * 'a) + val max_binding_opt: 'a t -> (key * 'a) option + val choose: 'a t -> (key * 'a) + val choose_opt: 'a t -> (key * 'a) option + val split: key -> 'a t -> 'a t * 'a option * 'a t + val find: key -> 'a t -> 'a + val find_opt: key -> 'a t -> 'a option + val find_first: (key -> bool) -> 'a t -> key * 'a + val find_first_opt: (key -> bool) -> 'a t -> (key * 'a) option + val find_last: (key -> bool) -> 'a t -> key * 'a + val find_last_opt: (key -> bool) -> 'a t -> (key * 'a) option + val map: ('a -> 'b) -> 'a t -> 'b t + val mapi: (key -> 'a -> 'b) -> 'a t -> 'b t + end + +module Make(Ord: OrderedType) = struct + + type key = Ord.t + + type 'a t = + Empty + | Node of {l:'a t; v:key; d:'a; r:'a t; h:int} + + let height = function + Empty -> 0 + | Node {h} -> h + + let create l x d r = + let hl = height l and hr = height r in + Node{l; v=x; d; r; h=(if hl >= hr then hl + 1 else hr + 1)} + + let singleton x d = Node{l=Empty; v=x; d; r=Empty; h=1} + + let bal l x d r = + let hl = match l with Empty -> 0 | Node {h} -> h in + let hr = match r with Empty -> 0 | Node {h} -> h in + if hl > hr + 2 then begin + match l with + Empty -> invalid_arg "Map.bal" + | Node{l=ll; v=lv; d=ld; r=lr} -> + if height ll >= height lr then + create ll lv ld (create lr x d r) + else begin + match lr with + Empty -> invalid_arg "Map.bal" + | Node{l=lrl; v=lrv; d=lrd; r=lrr}-> + create (create ll lv ld lrl) lrv lrd (create lrr x d r) + end + end else if hr > hl + 2 then begin + match r with + Empty -> invalid_arg "Map.bal" + | Node{l=rl; v=rv; d=rd; r=rr} -> + if height rr >= height rl then + create (create l x d rl) rv rd rr + else begin + match rl with + Empty -> invalid_arg "Map.bal" + | Node{l=rll; v=rlv; d=rld; r=rlr} -> + create (create l x d rll) rlv rld (create rlr rv rd rr) + end + end else + Node{l; v=x; d; r; h=(if hl >= hr then hl + 1 else hr + 1)} + + let empty = Empty + + let is_empty = function Empty -> true | _ -> false + + let rec add x data = function + Empty -> + Node{l=Empty; v=x; d=data; r=Empty; h=1} + | Node {l; v; d; r; h} as m -> + let c = Ord.compare x v in + if c = 0 then + if d == data then m else Node{l; v=x; d=data; r; h} + else if c < 0 then + let ll = add x data l in + if l == ll then m else bal ll v d r + else + let rr = add x data r in + if r == rr then m else bal l v d rr + + let rec find x = function + Empty -> + raise Not_found + | Node {l; v; d; r} -> + let c = Ord.compare x v in + if c = 0 then d + else find x (if c < 0 then l else r) + + let rec find_first_aux v0 d0 f = function + Empty -> + (v0, d0) + | Node {l; v; d; r} -> + if f v then + find_first_aux v d f l + else + find_first_aux v0 d0 f r + + let rec find_first f = function + Empty -> + raise Not_found + | Node {l; v; d; r} -> + if f v then + find_first_aux v d f l + else + find_first f r + + let rec find_first_opt_aux v0 d0 f = function + Empty -> + Some (v0, d0) + | Node {l; v; d; r} -> + if f v then + find_first_opt_aux v d f l + else + find_first_opt_aux v0 d0 f r + + let rec find_first_opt f = function + Empty -> + None + | Node {l; v; d; r} -> + if f v then + find_first_opt_aux v d f l + else + find_first_opt f r + + let rec find_last_aux v0 d0 f = function + Empty -> + (v0, d0) + | Node {l; v; d; r} -> + if f v then + find_last_aux v d f r + else + find_last_aux v0 d0 f l + + let rec find_last f = function + Empty -> + raise Not_found + | Node {l; v; d; r} -> + if f v then + find_last_aux v d f r + else + find_last f l + + let rec find_last_opt_aux v0 d0 f = function + Empty -> + Some (v0, d0) + | Node {l; v; d; r} -> + if f v then + find_last_opt_aux v d f r + else + find_last_opt_aux v0 d0 f l + + let rec find_last_opt f = function + Empty -> + None + | Node {l; v; d; r} -> + if f v then + find_last_opt_aux v d f r + else + find_last_opt f l + + let rec find_opt x = function + Empty -> + None + | Node {l; v; d; r} -> + let c = Ord.compare x v in + if c = 0 then Some d + else find_opt x (if c < 0 then l else r) + + let rec mem x = function + Empty -> + false + | Node {l; v; r} -> + let c = Ord.compare x v in + c = 0 || mem x (if c < 0 then l else r) + + let rec min_binding = function + Empty -> raise Not_found + | Node {l=Empty; v; d} -> (v, d) + | Node {l} -> min_binding l + + let rec min_binding_opt = function + Empty -> None + | Node {l=Empty; v; d} -> Some (v, d) + | Node {l}-> min_binding_opt l + + let rec max_binding = function + Empty -> raise Not_found + | Node {v; d; r=Empty} -> (v, d) + | Node {r} -> max_binding r + + let rec max_binding_opt = function + Empty -> None + | Node {v; d; r=Empty} -> Some (v, d) + | Node {r} -> max_binding_opt r + + let rec remove_min_binding = function + Empty -> invalid_arg "Map.remove_min_elt" + | Node {l=Empty; r} -> r + | Node {l; v; d; r} -> bal (remove_min_binding l) v d r + + let merge t1 t2 = + match (t1, t2) with + (Empty, t) -> t + | (t, Empty) -> t + | (_, _) -> + let (x, d) = min_binding t2 in + bal t1 x d (remove_min_binding t2) + + let rec remove x = function + Empty -> + Empty + | (Node {l; v; d; r} as m) -> + let c = Ord.compare x v in + if c = 0 then merge l r + else if c < 0 then + let ll = remove x l in if l == ll then m else bal ll v d r + else + let rr = remove x r in if r == rr then m else bal l v d rr + + let rec update x f = function + Empty -> + begin match f None with + | None -> Empty + | Some data -> Node{l=Empty; v=x; d=data; r=Empty; h=1} + end + | Node {l; v; d; r; h} as m -> + let c = Ord.compare x v in + if c = 0 then begin + match f (Some d) with + | None -> merge l r + | Some data -> + if d == data then m else Node{l; v=x; d=data; r; h} + end else if c < 0 then + let ll = update x f l in + if l == ll then m else bal ll v d r + else + let rr = update x f r in + if r == rr then m else bal l v d rr + + let rec iter f = function + Empty -> () + | Node {l; v; d; r} -> + iter f l; f v d; iter f r + + let rec map f = function + Empty -> + Empty + | Node {l; v; d; r; h} -> + let l' = map f l in + let d' = f d in + let r' = map f r in + Node{l=l'; v; d=d'; r=r'; h} + + let rec mapi f = function + Empty -> + Empty + | Node {l; v; d; r; h} -> + let l' = mapi f l in + let d' = f v d in + let r' = mapi f r in + Node{l=l'; v; d=d'; r=r'; h} + + let rec fold f m accu = + match m with + Empty -> accu + | Node {l; v; d; r} -> + fold f r (f v d (fold f l accu)) + + let rec for_all p = function + Empty -> true + | Node {l; v; d; r} -> p v d && for_all p l && for_all p r + + let rec exists p = function + Empty -> false + | Node {l; v; d; r} -> p v d || exists p l || exists p r + + (* Beware: those two functions assume that the added k is *strictly* + smaller (or bigger) than all the present keys in the tree; it + does not test for equality with the current min (or max) key. + + Indeed, they are only used during the "join" operation which + respects this precondition. + *) + + let rec add_min_binding k x = function + | Empty -> singleton k x + | Node {l; v; d; r} -> + bal (add_min_binding k x l) v d r + + let rec add_max_binding k x = function + | Empty -> singleton k x + | Node {l; v; d; r} -> + bal l v d (add_max_binding k x r) + + (* Same as create and bal, but no assumptions are made on the + relative heights of l and r. *) + + let rec join l v d r = + match (l, r) with + (Empty, _) -> add_min_binding v d r + | (_, Empty) -> add_max_binding v d l + | (Node{l=ll; v=lv; d=ld; r=lr; h=lh}, Node{l=rl; v=rv; d=rd; r=rr; h=rh}) -> + if lh > rh + 2 then bal ll lv ld (join lr v d r) else + if rh > lh + 2 then bal (join l v d rl) rv rd rr else + create l v d r + + (* Merge two trees l and r into one. + All elements of l must precede the elements of r. + No assumption on the heights of l and r. *) + + let concat t1 t2 = + match (t1, t2) with + (Empty, t) -> t + | (t, Empty) -> t + | (_, _) -> + let (x, d) = min_binding t2 in + join t1 x d (remove_min_binding t2) + + let concat_or_join t1 v d t2 = + match d with + | Some d -> join t1 v d t2 + | None -> concat t1 t2 + + let rec split x = function + Empty -> + (Empty, None, Empty) + | Node {l; v; d; r} -> + let c = Ord.compare x v in + if c = 0 then (l, Some d, r) + else if c < 0 then + let (ll, pres, rl) = split x l in (ll, pres, join rl v d r) + else + let (lr, pres, rr) = split x r in (join l v d lr, pres, rr) + + let rec merge f s1 s2 = + match (s1, s2) with + (Empty, Empty) -> Empty + | (Node {l=l1; v=v1; d=d1; r=r1; h=h1}, _) when h1 >= height s2 -> + let (l2, d2, r2) = split v1 s2 in + concat_or_join (merge f l1 l2) v1 (f v1 (Some d1) d2) (merge f r1 r2) + | (_, Node {l=l2; v=v2; d=d2; r=r2}) -> + let (l1, d1, r1) = split v2 s1 in + concat_or_join (merge f l1 l2) v2 (f v2 d1 (Some d2)) (merge f r1 r2) + | _ -> + assert false + + let rec union f s1 s2 = + match (s1, s2) with + | (Empty, s) | (s, Empty) -> s + | (Node {l=l1; v=v1; d=d1; r=r1; h=h1}, Node {l=l2; v=v2; d=d2; r=r2; h=h2}) -> + if h1 >= h2 then + let (l2, d2, r2) = split v1 s2 in + let l = union f l1 l2 and r = union f r1 r2 in + match d2 with + | None -> join l v1 d1 r + | Some d2 -> concat_or_join l v1 (f v1 d1 d2) r + else + let (l1, d1, r1) = split v2 s1 in + let l = union f l1 l2 and r = union f r1 r2 in + match d1 with + | None -> join l v2 d2 r + | Some d1 -> concat_or_join l v2 (f v2 d1 d2) r + + let rec filter p = function + Empty -> Empty + | Node {l; v; d; r} as m -> + (* call [p] in the expected left-to-right order *) + let l' = filter p l in + let pvd = p v d in + let r' = filter p r in + if pvd then if l==l' && r==r' then m else join l' v d r' + else concat l' r' + + let rec partition p = function + Empty -> (Empty, Empty) + | Node {l; v; d; r} -> + (* call [p] in the expected left-to-right order *) + let (lt, lf) = partition p l in + let pvd = p v d in + let (rt, rf) = partition p r in + if pvd + then (join lt v d rt, concat lf rf) + else (concat lt rt, join lf v d rf) + + type 'a enumeration = End | More of key * 'a * 'a t * 'a enumeration + + let rec cons_enum m e = + match m with + Empty -> e + | Node {l; v; d; r} -> cons_enum l (More(v, d, r, e)) + + let compare cmp m1 m2 = + let rec compare_aux e1 e2 = + match (e1, e2) with + (End, End) -> 0 + | (End, _) -> -1 + | (_, End) -> 1 + | (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) -> + let c = Ord.compare v1 v2 in + if c <> 0 then c else + let c = cmp d1 d2 in + if c <> 0 then c else + compare_aux (cons_enum r1 e1) (cons_enum r2 e2) + in compare_aux (cons_enum m1 End) (cons_enum m2 End) + + let equal cmp m1 m2 = + let rec equal_aux e1 e2 = + match (e1, e2) with + (End, End) -> true + | (End, _) -> false + | (_, End) -> false + | (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) -> + Ord.compare v1 v2 = 0 && cmp d1 d2 && + equal_aux (cons_enum r1 e1) (cons_enum r2 e2) + in equal_aux (cons_enum m1 End) (cons_enum m2 End) + + let rec cardinal = function + Empty -> 0 + | Node {l; r} -> cardinal l + 1 + cardinal r + + let rec bindings_aux accu = function + Empty -> accu + | Node {l; v; d; r} -> bindings_aux ((v, d) :: bindings_aux accu r) l + + let bindings s = + bindings_aux [] s + + let choose = min_binding + + let choose_opt = min_binding_opt + +end diff --git a/res_syntax/compiler-libs-406/map.mli b/res_syntax/compiler-libs-406/map.mli new file mode 100644 index 0000000000..57432210b2 --- /dev/null +++ b/res_syntax/compiler-libs-406/map.mli @@ -0,0 +1,315 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Association tables over ordered types. + + This module implements applicative association tables, also known as + finite maps or dictionaries, given a total ordering function + over the keys. + All operations over maps are purely applicative (no side-effects). + The implementation uses balanced binary trees, and therefore searching + and insertion take time logarithmic in the size of the map. + + For instance: + {[ + module IntPairs = + struct + type t = int * int + let compare (x0,y0) (x1,y1) = + match Stdlib.compare x0 x1 with + 0 -> Stdlib.compare y0 y1 + | c -> c + end + + module PairsMap = Map.Make(IntPairs) + + let m = PairsMap.(empty |> add (0,1) "hello" |> add (1,0) "world") + ]} + + This creates a new module [PairsMap], with a new type ['a PairsMap.t] + of maps from [int * int] to ['a]. In this example, [m] contains [string] + values so its type is [string PairsMap.t]. +*) + +module type OrderedType = + sig + type t + (** The type of the map keys. *) + + val compare : t -> t -> int + (** A total ordering function over the keys. + This is a two-argument function [f] such that + [f e1 e2] is zero if the keys [e1] and [e2] are equal, + [f e1 e2] is strictly negative if [e1] is smaller than [e2], + and [f e1 e2] is strictly positive if [e1] is greater than [e2]. + Example: a suitable ordering function is the generic structural + comparison function {!Stdlib.compare}. *) + end +(** Input signature of the functor {!Map.Make}. *) + +module type S = + sig + type key + (** The type of the map keys. *) + + type (+'a) t + (** The type of maps from type [key] to type ['a]. *) + + val empty: 'a t + (** The empty map. *) + + val is_empty: 'a t -> bool + (** Test whether a map is empty or not. *) + + val mem: key -> 'a t -> bool + (** [mem x m] returns [true] if [m] contains a binding for [x], + and [false] otherwise. *) + + val add: key -> 'a -> 'a t -> 'a t + (** [add x y m] returns a map containing the same bindings as + [m], plus a binding of [x] to [y]. If [x] was already bound + in [m] to a value that is physically equal to [y], + [m] is returned unchanged (the result of the function is + then physically equal to [m]). Otherwise, the previous binding + of [x] in [m] disappears. + @before 4.03 Physical equality was not ensured. *) + + val update: key -> ('a option -> 'a option) -> 'a t -> 'a t + (** [update x f m] returns a map containing the same bindings as + [m], except for the binding of [x]. Depending on the value of + [y] where [y] is [f (find_opt x m)], the binding of [x] is + added, removed or updated. If [y] is [None], the binding is + removed if it exists; otherwise, if [y] is [Some z] then [x] + is associated to [z] in the resulting map. If [x] was already + bound in [m] to a value that is physically equal to [z], [m] + is returned unchanged (the result of the function is then + physically equal to [m]). + @since 4.06.0 + *) + + val singleton: key -> 'a -> 'a t + (** [singleton x y] returns the one-element map that contains a binding [y] + for [x]. + @since 3.12.0 + *) + + val remove: key -> 'a t -> 'a t + (** [remove x m] returns a map containing the same bindings as + [m], except for [x] which is unbound in the returned map. + If [x] was not in [m], [m] is returned unchanged + (the result of the function is then physically equal to [m]). + @before 4.03 Physical equality was not ensured. *) + + val merge: + (key -> 'a option -> 'b option -> 'c option) -> 'a t -> 'b t -> 'c t + (** [merge f m1 m2] computes a map whose keys is a subset of keys of [m1] + and of [m2]. The presence of each such binding, and the corresponding + value, is determined with the function [f]. + In terms of the [find_opt] operation, we have + [find_opt x (merge f m1 m2) = f (find_opt x m1) (find_opt x m2)] + for any key [x], provided that [f None None = None]. + @since 3.12.0 + *) + + val union: (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t + (** [union f m1 m2] computes a map whose keys is the union of keys + of [m1] and of [m2]. When the same binding is defined in both + arguments, the function [f] is used to combine them. + This is a special case of [merge]: [union f m1 m2] is equivalent + to [merge f' m1 m2], where + - [f' None None = None] + - [f' (Some v) None = Some v] + - [f' None (Some v) = Some v] + - [f' (Some v1) (Some v2) = f v1 v2] + + @since 4.03.0 + *) + + val compare: ('a -> 'a -> int) -> 'a t -> 'a t -> int + (** Total ordering between maps. The first argument is a total ordering + used to compare data associated with equal keys in the two maps. *) + + val equal: ('a -> 'a -> bool) -> 'a t -> 'a t -> bool + (** [equal cmp m1 m2] tests whether the maps [m1] and [m2] are + equal, that is, contain equal keys and associate them with + equal data. [cmp] is the equality predicate used to compare + the data associated with the keys. *) + + val iter: (key -> 'a -> unit) -> 'a t -> unit + (** [iter f m] applies [f] to all bindings in map [m]. + [f] receives the key as first argument, and the associated value + as second argument. The bindings are passed to [f] in increasing + order with respect to the ordering over the type of the keys. *) + + val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + (** [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)], + where [k1 ... kN] are the keys of all bindings in [m] + (in increasing order), and [d1 ... dN] are the associated data. *) + + val for_all: (key -> 'a -> bool) -> 'a t -> bool + (** [for_all p m] checks if all the bindings of the map + satisfy the predicate [p]. + @since 3.12.0 + *) + + val exists: (key -> 'a -> bool) -> 'a t -> bool + (** [exists p m] checks if at least one binding of the map + satisfies the predicate [p]. + @since 3.12.0 + *) + + val filter: (key -> 'a -> bool) -> 'a t -> 'a t + (** [filter p m] returns the map with all the bindings in [m] + that satisfy predicate [p]. If [p] satisfies every binding in [m], + [m] is returned unchanged (the result of the function is then + physically equal to [m]) + @since 3.12.0 + @before 4.03 Physical equality was not ensured. + *) + + val partition: (key -> 'a -> bool) -> 'a t -> 'a t * 'a t + (** [partition p m] returns a pair of maps [(m1, m2)], where + [m1] contains all the bindings of [s] that satisfy the + predicate [p], and [m2] is the map with all the bindings of + [s] that do not satisfy [p]. + @since 3.12.0 + *) + + val cardinal: 'a t -> int + (** Return the number of bindings of a map. + @since 3.12.0 + *) + + val bindings: 'a t -> (key * 'a) list + (** Return the list of all bindings of the given map. + The returned list is sorted in increasing order with respect + to the ordering [Ord.compare], where [Ord] is the argument + given to {!Map.Make}. + @since 3.12.0 + *) + + val min_binding: 'a t -> (key * 'a) + (** Return the smallest binding of the given map + (with respect to the [Ord.compare] ordering), or raise + [Not_found] if the map is empty. + @since 3.12.0 + *) + + val min_binding_opt: 'a t -> (key * 'a) option + (** Return the smallest binding of the given map + (with respect to the [Ord.compare] ordering), or [None] + if the map is empty. + @since 4.05 + *) + + val max_binding: 'a t -> (key * 'a) + (** Same as {!Map.S.min_binding}, but returns the largest binding + of the given map. + @since 3.12.0 + *) + + val max_binding_opt: 'a t -> (key * 'a) option + (** Same as {!Map.S.min_binding_opt}, but returns the largest binding + of the given map. + @since 4.05 + *) + + val choose: 'a t -> (key * 'a) + (** Return one binding of the given map, or raise [Not_found] if + the map is empty. Which binding is chosen is unspecified, + but equal bindings will be chosen for equal maps. + @since 3.12.0 + *) + + val choose_opt: 'a t -> (key * 'a) option + (** Return one binding of the given map, or [None] if + the map is empty. Which binding is chosen is unspecified, + but equal bindings will be chosen for equal maps. + @since 4.05 + *) + + val split: key -> 'a t -> 'a t * 'a option * 'a t + (** [split x m] returns a triple [(l, data, r)], where + [l] is the map with all the bindings of [m] whose key + is strictly less than [x]; + [r] is the map with all the bindings of [m] whose key + is strictly greater than [x]; + [data] is [None] if [m] contains no binding for [x], + or [Some v] if [m] binds [v] to [x]. + @since 3.12.0 + *) + + val find: key -> 'a t -> 'a + (** [find x m] returns the current binding of [x] in [m], + or raises [Not_found] if no such binding exists. *) + + val find_opt: key -> 'a t -> 'a option + (** [find_opt x m] returns [Some v] if the current binding of [x] + in [m] is [v], or [None] if no such binding exists. + @since 4.05 + *) + + val find_first: (key -> bool) -> 'a t -> key * 'a + (** [find_first f m], where [f] is a monotonically increasing function, + returns the binding of [m] with the lowest key [k] such that [f k], + or raises [Not_found] if no such key exists. + + For example, [find_first (fun k -> Ord.compare k x >= 0) m] will return + the first binding [k, v] of [m] where [Ord.compare k x >= 0] + (intuitively: [k >= x]), or raise [Not_found] if [x] is greater than any + element of [m]. + + @since 4.05 + *) + + val find_first_opt: (key -> bool) -> 'a t -> (key * 'a) option + (** [find_first_opt f m], where [f] is a monotonically increasing function, + returns an option containing the binding of [m] with the lowest key [k] + such that [f k], or [None] if no such key exists. + @since 4.05 + *) + + val find_last: (key -> bool) -> 'a t -> key * 'a + (** [find_last f m], where [f] is a monotonically decreasing function, + returns the binding of [m] with the highest key [k] such that [f k], + or raises [Not_found] if no such key exists. + @since 4.05 + *) + + val find_last_opt: (key -> bool) -> 'a t -> (key * 'a) option + (** [find_last_opt f m], where [f] is a monotonically decreasing function, + returns an option containing the binding of [m] with the highest key [k] + such that [f k], or [None] if no such key exists. + @since 4.05 + *) + + val map: ('a -> 'b) -> 'a t -> 'b t + (** [map f m] returns a map with same domain as [m], where the + associated value [a] of all bindings of [m] has been + replaced by the result of the application of [f] to [a]. + The bindings are passed to [f] in increasing order + with respect to the ordering over the type of the keys. *) + + val mapi: (key -> 'a -> 'b) -> 'a t -> 'b t + (** Same as {!Map.S.map}, but the function receives as arguments both the + key and the associated value for each binding of the map. *) + + + end +(** Output signature of the functor {!Map.Make}. *) + +module Make (Ord : OrderedType) : S with type key = Ord.t +(** Functor building an implementation of the map structure + given a totally ordered type. *) diff --git a/res_syntax/compiler-libs-406/map_gen.ml b/res_syntax/compiler-libs-406/map_gen.ml new file mode 100644 index 0000000000..bdc15ca7eb --- /dev/null +++ b/res_syntax/compiler-libs-406/map_gen.ml @@ -0,0 +1,380 @@ +(***********************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. All rights reserved. This file is distributed *) +(* under the terms of the GNU Library General Public License, with *) +(* the special exception on linking described in file ../LICENSE. *) +(* *) +(***********************************************************************) +(** adapted from stdlib *) + +type ('key,'a) t = + | Empty + | Node of ('key,'a) t * 'key * 'a * ('key,'a) t * int + +type ('key,'a) enumeration = + | End + | More of 'key * 'a * ('key,'a) t * ('key, 'a) enumeration + +let rec cardinal_aux acc = function + | Empty -> acc + | Node (l,_,_,r, _) -> + cardinal_aux (cardinal_aux (acc + 1) r ) l + +let cardinal s = cardinal_aux 0 s + +let rec bindings_aux accu = function + | Empty -> accu + | Node(l, v, d, r, _) -> bindings_aux ((v, d) :: bindings_aux accu r) l + +let bindings s = + bindings_aux [] s + +let rec keys_aux accu = function + Empty -> accu + | Node(l, v, _, r, _) -> keys_aux (v :: keys_aux accu r) l + +let keys s = keys_aux [] s + + + +let rec cons_enum m e = + match m with + Empty -> e + | Node(l, v, d, r, _) -> cons_enum l (More(v, d, r, e)) + + +let height = function + | Empty -> 0 + | Node(_,_,_,_,h) -> h + +let create l x d r = + let hl = height l and hr = height r in + Node(l, x, d, r, (if hl >= hr then hl + 1 else hr + 1)) + +let singleton x d = Node(Empty, x, d, Empty, 1) + +let bal l x d r = + let hl = match l with Empty -> 0 | Node(_,_,_,_,h) -> h in + let hr = match r with Empty -> 0 | Node(_,_,_,_,h) -> h in + if hl > hr + 2 then begin + match l with + Empty -> invalid_arg "Map.bal" + | Node(ll, lv, ld, lr, _) -> + if height ll >= height lr then + create ll lv ld (create lr x d r) + else begin + match lr with + Empty -> invalid_arg "Map.bal" + | Node(lrl, lrv, lrd, lrr, _)-> + create (create ll lv ld lrl) lrv lrd (create lrr x d r) + end + end else if hr > hl + 2 then begin + match r with + Empty -> invalid_arg "Map.bal" + | Node(rl, rv, rd, rr, _) -> + if height rr >= height rl then + create (create l x d rl) rv rd rr + else begin + match rl with + Empty -> invalid_arg "Map.bal" + | Node(rll, rlv, rld, rlr, _) -> + create (create l x d rll) rlv rld (create rlr rv rd rr) + end + end else + Node(l, x, d, r, (if hl >= hr then hl + 1 else hr + 1)) + +let empty = Empty + +let is_empty = function Empty -> true | _ -> false + +let rec min_binding_exn = function + Empty -> raise Not_found + | Node(Empty, x, d, _r, _) -> (x, d) + | Node(l, _x, _d, _r, _) -> min_binding_exn l + +let choose = min_binding_exn + +let rec max_binding_exn = function + Empty -> raise Not_found + | Node(_l, x, d, Empty, _) -> (x, d) + | Node(_l, _x, _d, r, _) -> max_binding_exn r + +let rec remove_min_binding = function + Empty -> invalid_arg "Map.remove_min_elt" + | Node(Empty, _x, _d, r, _) -> r + | Node(l, x, d, r, _) -> bal (remove_min_binding l) x d r + +let merge t1 t2 = + match (t1, t2) with + (Empty, t) -> t + | (t, Empty) -> t + | (_, _) -> + let (x, d) = min_binding_exn t2 in + bal t1 x d (remove_min_binding t2) + + +let rec iter f = function + Empty -> () + | Node(l, v, d, r, _) -> + iter f l; f v d; iter f r + +let rec map f = function + Empty -> + Empty + | Node(l, v, d, r, h) -> + let l' = map f l in + let d' = f d in + let r' = map f r in + Node(l', v, d', r', h) + +let rec mapi f = function + Empty -> + Empty + | Node(l, v, d, r, h) -> + let l' = mapi f l in + let d' = f v d in + let r' = mapi f r in + Node(l', v, d', r', h) + +let rec fold f m accu = + match m with + Empty -> accu + | Node(l, v, d, r, _) -> + fold f r (f v d (fold f l accu)) + +let rec for_all p = function + Empty -> true + | Node(l, v, d, r, _) -> p v d && for_all p l && for_all p r + +let rec exists p = function + Empty -> false + | Node(l, v, d, r, _) -> p v d || exists p l || exists p r + +(* Beware: those two functions assume that the added k is *strictly* + smaller (or bigger) than all the present keys in the tree; it + does not test for equality with the current min (or max) key. + + Indeed, they are only used during the "join" operation which + respects this precondition. +*) + +let rec add_min_binding k v = function + | Empty -> singleton k v + | Node (l, x, d, r, _h) -> + bal (add_min_binding k v l) x d r + +let rec add_max_binding k v = function + | Empty -> singleton k v + | Node (l, x, d, r, _h) -> + bal l x d (add_max_binding k v r) + +(* Same as create and bal, but no assumptions are made on the + relative heights of l and r. *) + +let rec join l v d r = + match (l, r) with + (Empty, _) -> add_min_binding v d r + | (_, Empty) -> add_max_binding v d l + | (Node(ll, lv, ld, lr, lh), Node(rl, rv, rd, rr, rh)) -> + if lh > rh + 2 then bal ll lv ld (join lr v d r) else + if rh > lh + 2 then bal (join l v d rl) rv rd rr else + create l v d r + +(* Merge two trees l and r into one. + All elements of l must precede the elements of r. + No assumption on the heights of l and r. *) + +let concat t1 t2 = + match (t1, t2) with + (Empty, t) -> t + | (t, Empty) -> t + | (_, _) -> + let (x, d) = min_binding_exn t2 in + join t1 x d (remove_min_binding t2) + +let concat_or_join t1 v d t2 = + match d with + | Some d -> join t1 v d t2 + | None -> concat t1 t2 + +let rec filter p = function + Empty -> Empty + | Node(l, v, d, r, _) -> + (* call [p] in the expected left-to-right order *) + let l' = filter p l in + let pvd = p v d in + let r' = filter p r in + if pvd then join l' v d r' else concat l' r' + +let rec partition p = function + Empty -> (Empty, Empty) + | Node(l, v, d, r, _) -> + (* call [p] in the expected left-to-right order *) + let (lt, lf) = partition p l in + let pvd = p v d in + let (rt, rf) = partition p r in + if pvd + then (join lt v d rt, concat lf rf) + else (concat lt rt, join lf v d rf) + +let compare compare_key cmp_val m1 m2 = + let rec compare_aux e1 e2 = + match (e1, e2) with + (End, End) -> 0 + | (End, _) -> -1 + | (_, End) -> 1 + | (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) -> + let c = compare_key v1 v2 in + if c <> 0 then c else + let c = cmp_val d1 d2 in + if c <> 0 then c else + compare_aux (cons_enum r1 e1) (cons_enum r2 e2) + in compare_aux (cons_enum m1 End) (cons_enum m2 End) + +let equal compare_key cmp m1 m2 = + let rec equal_aux e1 e2 = + match (e1, e2) with + (End, End) -> true + | (End, _) -> false + | (_, End) -> false + | (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) -> + compare_key v1 v2 = 0 && cmp d1 d2 && + equal_aux (cons_enum r1 e1) (cons_enum r2 e2) + in equal_aux (cons_enum m1 End) (cons_enum m2 End) + + + + +module type S = + sig + type key + type +'a t + val empty: 'a t + val is_empty: 'a t -> bool + val mem: key -> 'a t -> bool + + val add: key -> 'a -> 'a t -> 'a t + (** [add x y m] + If [x] was already bound in [m], its previous binding disappears. *) + + val adjust: key -> (unit -> 'a) -> ('a -> 'a) -> 'a t -> 'a t + (** [adjust k v f map] if not exist [add k v], otherwise + [add k v (f old)] + *) + + val singleton: key -> 'a -> 'a t + + val remove: key -> 'a t -> 'a t + (** [remove x m] returns a map containing the same bindings as + [m], except for [x] which is unbound in the returned map. *) + + val merge: + (key -> 'a option -> 'b option -> 'c option) -> 'a t -> 'b t -> 'c t + (** [merge f m1 m2] computes a map whose keys is a subset of keys of [m1] + and of [m2]. The presence of each such binding, and the corresponding + value, is determined with the function [f]. + @since 3.12.0 + *) + + val disjoint_merge : 'a t -> 'a t -> 'a t + (* merge two maps, will raise if they have the same key *) + val compare: ('a -> 'a -> int) -> 'a t -> 'a t -> int + (** Total ordering between maps. The first argument is a total ordering + used to compare data associated with equal keys in the two maps. *) + + val equal: ('a -> 'a -> bool) -> 'a t -> 'a t -> bool + + val iter: (key -> 'a -> unit) -> 'a t -> unit + (** [iter f m] applies [f] to all bindings in map [m]. + The bindings are passed to [f] in increasing order. *) + + val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + (** [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)], + where [k1 ... kN] are the keys of all bindings in [m] + (in increasing order) *) + + val for_all: (key -> 'a -> bool) -> 'a t -> bool + (** [for_all p m] checks if all the bindings of the map. + order unspecified + *) + + val exists: (key -> 'a -> bool) -> 'a t -> bool + (** [exists p m] checks if at least one binding of the map + satisfy the predicate [p]. + order unspecified + *) + + val filter: (key -> 'a -> bool) -> 'a t -> 'a t + (** [filter p m] returns the map with all the bindings in [m] + that satisfy predicate [p]. + order unspecified + *) + + val partition: (key -> 'a -> bool) -> 'a t -> 'a t * 'a t + (** [partition p m] returns a pair of maps [(m1, m2)], where + [m1] contains all the bindings of [s] that satisfy the + predicate [p], and [m2] is the map with all the bindings of + [s] that do not satisfy [p]. + *) + + val cardinal: 'a t -> int + (** Return the number of bindings of a map. *) + + val bindings: 'a t -> (key * 'a) list + (** Return the list of all bindings of the given map. + The returned list is sorted in increasing order with respect + to the ordering *) + + val keys : 'a t -> key list + (* Increasing order *) + + val min_binding_exn: 'a t -> (key * 'a) + (** raise [Not_found] if the map is empty. *) + + val max_binding_exn: 'a t -> (key * 'a) + (** Same as {!Map.S.min_binding} *) + + val choose: 'a t -> (key * 'a) + (** Return one binding of the given map, or raise [Not_found] if + the map is empty. Which binding is chosen is unspecified, + but equal bindings will be chosen for equal maps. + *) + + val split: key -> 'a t -> 'a t * 'a option * 'a t + (** [split x m] returns a triple [(l, data, r)], where + [l] is the map with all the bindings of [m] whose key + is strictly less than [x]; + [r] is the map with all the bindings of [m] whose key + is strictly greater than [x]; + [data] is [None] if [m] contains no binding for [x], + or [Some v] if [m] binds [v] to [x]. + @since 3.12.0 + *) + + val find_exn: key -> 'a t -> 'a + (** [find x m] returns the current binding of [x] in [m], + or raises [Not_found] if no such binding exists. *) + + val find_opt: key -> 'a t -> 'a option + val find_default: key -> 'a t -> 'a -> 'a + val map: ('a -> 'b) -> 'a t -> 'b t + (** [map f m] returns a map with same domain as [m], where the + associated value [a] of all bindings of [m] has been + replaced by the result of the application of [f] to [a]. + The bindings are passed to [f] in increasing order + with respect to the ordering over the type of the keys. *) + + val mapi: (key -> 'a -> 'b) -> 'a t -> 'b t + (** Same as {!Map.S.map}, but the function receives as arguments both the + key and the associated value for each binding of the map. *) + + val of_list : (key * 'a) list -> 'a t + val of_array : (key * 'a ) array -> 'a t + val add_list : (key * 'b) list -> 'b t -> 'b t + + end diff --git a/res_syntax/compiler-libs-406/misc.ml b/res_syntax/compiler-libs-406/misc.ml new file mode 100644 index 0000000000..76aff7686c --- /dev/null +++ b/res_syntax/compiler-libs-406/misc.ml @@ -0,0 +1,732 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Errors *) + +exception Fatal_error + +let fatal_error msg = + prerr_string ">> Fatal error: "; prerr_endline msg; raise Fatal_error + +let fatal_errorf fmt = Format.kasprintf fatal_error fmt + +(* Exceptions *) + +let try_finally work cleanup = + let result = (try work () with e -> cleanup (); raise e) in + cleanup (); + result +;; + +type ref_and_value = R : 'a ref * 'a -> ref_and_value + +let protect_refs = + let set_refs l = List.iter (fun (R (r, v)) -> r := v) l in + fun refs f -> + let backup = List.map (fun (R (r, _)) -> R (r, !r)) refs in + set_refs refs; + match f () with + | x -> set_refs backup; x + | exception e -> set_refs backup; raise e + +(* List functions *) + +let rec map_end f l1 l2 = + match l1 with + [] -> l2 + | hd::tl -> f hd :: map_end f tl l2 + +let rec map_left_right f = function + [] -> [] + | hd::tl -> let res = f hd in res :: map_left_right f tl + +let rec for_all2 pred l1 l2 = + match (l1, l2) with + ([], []) -> true + | (hd1::tl1, hd2::tl2) -> pred hd1 hd2 && for_all2 pred tl1 tl2 + | (_, _) -> false + +let rec replicate_list elem n = + if n <= 0 then [] else elem :: replicate_list elem (n-1) + +let rec list_remove x = function + [] -> [] + | hd :: tl -> + if hd = x then tl else hd :: list_remove x tl + +let rec split_last = function + [] -> assert false + | [x] -> ([], x) + | hd :: tl -> + let (lst, last) = split_last tl in + (hd :: lst, last) + +module Stdlib = struct + module List = struct + type 'a t = 'a list + + let rec compare cmp l1 l2 = + match l1, l2 with + | [], [] -> 0 + | [], _::_ -> -1 + | _::_, [] -> 1 + | h1::t1, h2::t2 -> + let c = cmp h1 h2 in + if c <> 0 then c + else compare cmp t1 t2 + + let rec equal eq l1 l2 = + match l1, l2 with + | ([], []) -> true + | (hd1 :: tl1, hd2 :: tl2) -> eq hd1 hd2 && equal eq tl1 tl2 + | (_, _) -> false + + let filter_map f l = + let rec aux acc l = + match l with + | [] -> List.rev acc + | h :: t -> + match f h with + | None -> aux acc t + | Some v -> aux (v :: acc) t + in + aux [] l + + let map2_prefix f l1 l2 = + let rec aux acc l1 l2 = + match l1, l2 with + | [], _ -> (List.rev acc, l2) + | _ :: _, [] -> raise (Invalid_argument "map2_prefix") + | h1::t1, h2::t2 -> + let h = f h1 h2 in + aux (h :: acc) t1 t2 + in + aux [] l1 l2 + + let some_if_all_elements_are_some l = + let rec aux acc l = + match l with + | [] -> Some (List.rev acc) + | None :: _ -> None + | Some h :: t -> aux (h :: acc) t + in + aux [] l + + let split_at n l = + let rec aux n acc l = + if n = 0 + then List.rev acc, l + else + match l with + | [] -> raise (Invalid_argument "split_at") + | t::q -> aux (n-1) (t::acc) q + in + aux n [] l + end + + module Option = struct + type 'a t = 'a option + + let equal eq o1 o2 = + match o1, o2 with + | None, None -> true + | Some e1, Some e2 -> eq e1 e2 + | _, _ -> false + + let iter f = function + | Some x -> f x + | None -> () + + let map f = function + | Some x -> Some (f x) + | None -> None + + let fold f a b = + match a with + | None -> b + | Some a -> f a b + + let value_default f ~default a = + match a with + | None -> default + | Some a -> f a + end + + module Array = struct + let exists2 p a1 a2 = + let n = Array.length a1 in + if Array.length a2 <> n then invalid_arg "Misc.Stdlib.Array.exists2"; + let rec loop i = + if i = n then false + else if p (Array.unsafe_get a1 i) (Array.unsafe_get a2 i) then true + else loop (succ i) in + loop 0 + end +end + +let may = Stdlib.Option.iter +let may_map = Stdlib.Option.map + +(* File functions *) + +let find_in_path path name = + if not (Filename.is_implicit name) then + if Sys.file_exists name then name else raise Not_found + else begin + let rec try_dir = function + [] -> raise Not_found + | dir::rem -> + let fullname = Filename.concat dir name in + if Sys.file_exists fullname then fullname else try_dir rem + in try_dir path + end + +let find_in_path_rel path name = + let rec simplify s = + let open Filename in + let base = basename s in + let dir = dirname s in + if dir = s then dir + else if base = current_dir_name then simplify dir + else concat (simplify dir) base + in + let rec try_dir = function + [] -> raise Not_found + | dir::rem -> + let fullname = simplify (Filename.concat dir name) in + if Sys.file_exists fullname then fullname else try_dir rem + in try_dir path + +let find_in_path_uncap path name = + let uname = String.uncapitalize_ascii name in + let rec try_dir = function + [] -> raise Not_found + | dir::rem -> + let fullname = Filename.concat dir name + and ufullname = Filename.concat dir uname in + if Sys.file_exists ufullname then ufullname + else if Sys.file_exists fullname then fullname + else try_dir rem + in try_dir path + +let remove_file filename = + try + if Sys.file_exists filename + then Sys.remove filename + with Sys_error _msg -> + () + +(* Expand a -I option: if it starts with +, make it relative to the standard + library directory *) + +let expand_directory alt s = + if String.length s > 0 && s.[0] = '+' + then Filename.concat alt + (String.sub s 1 (String.length s - 1)) + else s + +(* Hashtable functions *) + +let create_hashtable size init = + let tbl = Hashtbl.create size in + List.iter (fun (key, data) -> Hashtbl.add tbl key data) init; + tbl + +(* File copy *) + +let copy_file ic oc = + let buff = Bytes.create 0x1000 in + let rec copy () = + let n = input ic buff 0 0x1000 in + if n = 0 then () else (output oc buff 0 n; copy()) + in copy() + +let copy_file_chunk ic oc len = + let buff = Bytes.create 0x1000 in + let rec copy n = + if n <= 0 then () else begin + let r = input ic buff 0 (min n 0x1000) in + if r = 0 then raise End_of_file else (output oc buff 0 r; copy(n-r)) + end + in copy len + +let string_of_file ic = + let b = Buffer.create 0x10000 in + let buff = Bytes.create 0x1000 in + let rec copy () = + let n = input ic buff 0 0x1000 in + if n = 0 then Buffer.contents b else + (Buffer.add_subbytes b buff 0 n; copy()) + in copy() + +let output_to_file_via_temporary ?(mode = [Open_text]) filename fn = + let (temp_filename, oc) = + Filename.open_temp_file + ~mode ~perms:0o666 ~temp_dir:(Filename.dirname filename) + (Filename.basename filename) ".tmp" in + (* The 0o666 permissions will be modified by the umask. It's just + like what [open_out] and [open_out_bin] do. + With temp_dir = dirname filename, we ensure that the returned + temp file is in the same directory as filename itself, making + it safe to rename temp_filename to filename later. + With prefix = basename filename, we are almost certain that + the first generated name will be unique. A fixed prefix + would work too but might generate more collisions if many + files are being produced simultaneously in the same directory. *) + match fn temp_filename oc with + | res -> + close_out oc; + begin try + Sys.rename temp_filename filename; res + with exn -> + remove_file temp_filename; raise exn + end + | exception exn -> + close_out oc; remove_file temp_filename; raise exn + +(* Integer operations *) + +let rec log2 n = + if n <= 1 then 0 else 1 + log2(n asr 1) + +let align n a = + if n >= 0 then (n + a - 1) land (-a) else n land (-a) + +let no_overflow_add a b = (a lxor b) lor (a lxor (lnot (a+b))) < 0 + +let no_overflow_sub a b = (a lxor (lnot b)) lor (b lxor (a-b)) < 0 + +let no_overflow_mul a b = b <> 0 && (a * b) / b = a + +let no_overflow_lsl a k = + 0 <= k && k < Sys.word_size && min_int asr k <= a && a <= max_int asr k + +module Int_literal_converter = struct + (* To convert integer literals, allowing max_int + 1 (PR#4210) *) + let cvt_int_aux str neg of_string = + if String.length str = 0 || str.[0]= '-' + then of_string str + else neg (of_string ("-" ^ str)) + let int s = cvt_int_aux s (~-) int_of_string + let int32 s = cvt_int_aux s Int32.neg Int32.of_string + let int64 s = cvt_int_aux s Int64.neg Int64.of_string + let nativeint s = cvt_int_aux s Nativeint.neg Nativeint.of_string +end + +(* String operations *) + +let chop_extensions file = + let dirname = Filename.dirname file and basename = Filename.basename file in + try + let pos = String.index basename '.' in + let basename = String.sub basename 0 pos in + if Filename.is_implicit file && dirname = Filename.current_dir_name then + basename + else + Filename.concat dirname basename + with Not_found -> file + +let search_substring pat str start = + let rec search i j = + if j >= String.length pat then i + else if i + j >= String.length str then raise Not_found + else if str.[i + j] = pat.[j] then search i (j+1) + else search (i+1) 0 + in search start 0 + +let replace_substring ~before ~after str = + let rec search acc curr = + match search_substring before str curr with + | next -> + let prefix = String.sub str curr (next - curr) in + search (prefix :: acc) (next + String.length before) + | exception Not_found -> + let suffix = String.sub str curr (String.length str - curr) in + List.rev (suffix :: acc) + in String.concat after (search [] 0) + +let rev_split_words s = + let rec split1 res i = + if i >= String.length s then res else begin + match s.[i] with + ' ' | '\t' | '\r' | '\n' -> split1 res (i+1) + | _ -> split2 res i (i+1) + end + and split2 res i j = + if j >= String.length s then String.sub s i (j-i) :: res else begin + match s.[j] with + ' ' | '\t' | '\r' | '\n' -> split1 (String.sub s i (j-i) :: res) (j+1) + | _ -> split2 res i (j+1) + end + in split1 [] 0 + +let get_ref r = + let v = !r in + r := []; v + +let fst3 (x, _, _) = x +let snd3 (_,x,_) = x +let thd3 (_,_,x) = x + +let fst4 (x, _, _, _) = x +let snd4 (_,x,_, _) = x +let thd4 (_,_,x,_) = x +let for4 (_,_,_,x) = x + + +module LongString = struct + type t = bytes array + + let create str_size = + let tbl_size = str_size / Sys.max_string_length + 1 in + let tbl = Array.make tbl_size Bytes.empty in + for i = 0 to tbl_size - 2 do + tbl.(i) <- Bytes.create Sys.max_string_length; + done; + tbl.(tbl_size - 1) <- Bytes.create (str_size mod Sys.max_string_length); + tbl + + let length tbl = + let tbl_size = Array.length tbl in + Sys.max_string_length * (tbl_size - 1) + Bytes.length tbl.(tbl_size - 1) + + let get tbl ind = + Bytes.get tbl.(ind / Sys.max_string_length) (ind mod Sys.max_string_length) + + let set tbl ind c = + Bytes.set tbl.(ind / Sys.max_string_length) (ind mod Sys.max_string_length) + c + + let blit src srcoff dst dstoff len = + for i = 0 to len - 1 do + set dst (dstoff + i) (get src (srcoff + i)) + done + + let output oc tbl pos len = + for i = pos to pos + len - 1 do + output_char oc (get tbl i) + done + + let unsafe_blit_to_bytes src srcoff dst dstoff len = + for i = 0 to len - 1 do + Bytes.unsafe_set dst (dstoff + i) (get src (srcoff + i)) + done + + let input_bytes ic len = + let tbl = create len in + Array.iter (fun str -> really_input ic str 0 (Bytes.length str)) tbl; + tbl +end + + +let edit_distance a b cutoff = + let la, lb = String.length a, String.length b in + let cutoff = + (* using max_int for cutoff would cause overflows in (i + cutoff + 1); + we bring it back to the (max la lb) worstcase *) + min (max la lb) cutoff in + if abs (la - lb) > cutoff then None + else begin + (* initialize with 'cutoff + 1' so that not-yet-written-to cases have + the worst possible cost; this is useful when computing the cost of + a case just at the boundary of the cutoff diagonal. *) + let m = Array.make_matrix (la + 1) (lb + 1) (cutoff + 1) in + m.(0).(0) <- 0; + for i = 1 to la do + m.(i).(0) <- i; + done; + for j = 1 to lb do + m.(0).(j) <- j; + done; + for i = 1 to la do + for j = max 1 (i - cutoff - 1) to min lb (i + cutoff + 1) do + let cost = if a.[i-1] = b.[j-1] then 0 else 1 in + let best = + (* insert, delete or substitute *) + min (1 + min m.(i-1).(j) m.(i).(j-1)) (m.(i-1).(j-1) + cost) + in + let best = + (* swap two adjacent letters; we use "cost" again in case of + a swap between two identical letters; this is slightly + redundant as this is a double-substitution case, but it + was done this way in most online implementations and + imitation has its virtues *) + if not (i > 1 && j > 1 && a.[i-1] = b.[j-2] && a.[i-2] = b.[j-1]) + then best + else min best (m.(i-2).(j-2) + cost) + in + m.(i).(j) <- best + done; + done; + let result = m.(la).(lb) in + if result > cutoff + then None + else Some result + end + +let spellcheck env name = + let cutoff = + match String.length name with + | 1 | 2 -> 0 + | 3 | 4 -> 1 + | 5 | 6 -> 2 + | _ -> 3 + in + let compare target acc head = + match edit_distance target head cutoff with + | None -> acc + | Some dist -> + let (best_choice, best_dist) = acc in + if dist < best_dist then ([head], dist) + else if dist = best_dist then (head :: best_choice, dist) + else acc + in + fst (List.fold_left (compare name) ([], max_int) env) + +let did_you_mean ppf get_choices = + (* flush now to get the error report early, in the (unheard of) case + where the search in the get_choices function would take a bit of + time; in the worst case, the user has seen the error, she can + interrupt the process before the spell-checking terminates. *) + Format.fprintf ppf "@?"; + match get_choices () with + | [] -> () + | choices -> + let rest, last = split_last choices in + Format.fprintf ppf "@\nHint: Did you mean %s%s%s?@?" + (String.concat ", " rest) + (if rest = [] then "" else " or ") + last + +let cut_at s c = + let pos = String.index s c in + String.sub s 0 pos, String.sub s (pos+1) (String.length s - pos - 1) + + +module StringSet = Set.Make(struct type t = string let compare = compare end) +module StringMap = Map.Make(struct type t = string let compare = compare end) + +(* Color handling *) +module Color = struct + (* use ANSI color codes, see https://en.wikipedia.org/wiki/ANSI_escape_code *) + type color = + | Black + | Red + | Green + | Yellow + | Blue + | Magenta + | Cyan + | White + ;; + + type style = + | FG of color (* foreground *) + | BG of color (* background *) + | Bold + | Reset + + let ansi_of_color = function + | Black -> "0" + | Red -> "1" + | Green -> "2" + | Yellow -> "3" + | Blue -> "4" + | Magenta -> "5" + | Cyan -> "6" + | White -> "7" + + let code_of_style = function + | FG c -> "3" ^ ansi_of_color c + | BG c -> "4" ^ ansi_of_color c + | Bold -> "1" + | Reset -> "0" + + let ansi_of_style_l l = + let s = match l with + | [] -> code_of_style Reset + | [s] -> code_of_style s + | _ -> String.concat ";" (List.map code_of_style l) + in + "\x1b[" ^ s ^ "m" + + type styles = { + error: style list; + warning: style list; + loc: style list; + } + + let default_styles = { + warning = [Bold; FG Magenta]; + error = [Bold; FG Red]; + loc = [Bold]; + } + + let cur_styles = ref default_styles + let get_styles () = !cur_styles + let set_styles s = cur_styles := s + + (* map a tag to a style, if the tag is known. + @raise Not_found otherwise *) + let style_of_tag s = match s with + | Format.String_tag "error" -> (!cur_styles).error + | Format.String_tag "warning" -> (!cur_styles).warning + | Format.String_tag "loc" -> (!cur_styles).loc + | _ -> raise Not_found + + let color_enabled = ref true + + (* either prints the tag of [s] or delegates to [or_else] *) + let mark_open_tag ~or_else s = + try + let style = style_of_tag s in + if !color_enabled then ansi_of_style_l style else "" + with Not_found -> or_else s + + let mark_close_tag ~or_else s = + try + let _ = style_of_tag s in + if !color_enabled then ansi_of_style_l [Reset] else "" + with Not_found -> or_else s + + (* add color handling to formatter [ppf] *) + let set_color_tag_handling ppf = + let open Format in + let functions = pp_get_formatter_stag_functions ppf () in + let functions' = {functions with + mark_open_stag=(mark_open_tag ~or_else:functions.mark_open_stag); + mark_close_stag=(mark_close_tag ~or_else:functions.mark_close_stag); + } in + pp_set_mark_tags ppf true; (* enable tags *) + pp_set_formatter_stag_functions ppf functions'; + (* also setup margins *) + pp_set_margin ppf (pp_get_margin std_formatter()); + () + + external isatty : out_channel -> bool = "caml_sys_isatty" + + (* reasonable heuristic on whether colors should be enabled *) + let should_enable_color () = + let term = try Sys.getenv "TERM" with Not_found -> "" in + term <> "dumb" + && term <> "" + && isatty stderr + + type setting = Auto | Always | Never + + let setup = + let first = ref true in (* initialize only once *) + let formatter_l = + [Format.std_formatter; Format.err_formatter; Format.str_formatter] + in + fun o -> + if !first then ( + first := false; + Format.set_mark_tags true; + List.iter set_color_tag_handling formatter_l; + color_enabled := (match o with + | Some Always -> true + | Some Auto -> should_enable_color () + | Some Never -> false + | None -> should_enable_color ()) + ); + () +end + +let normalise_eol s = + let b = Buffer.create 80 in + for i = 0 to String.length s - 1 do + if s.[i] <> '\r' then Buffer.add_char b s.[i] + done; + Buffer.contents b + +let delete_eol_spaces src = + let len_src = String.length src in + let dst = Bytes.create len_src in + let rec loop i_src i_dst = + if i_src = len_src then + i_dst + else + match src.[i_src] with + | ' ' | '\t' -> + loop_spaces 1 (i_src + 1) i_dst + | c -> + Bytes.set dst i_dst c; + loop (i_src + 1) (i_dst + 1) + and loop_spaces spaces i_src i_dst = + if i_src = len_src then + i_dst + else + match src.[i_src] with + | ' ' | '\t' -> + loop_spaces (spaces + 1) (i_src + 1) i_dst + | '\n' -> + Bytes.set dst i_dst '\n'; + loop (i_src + 1) (i_dst + 1) + | _ -> + for n = 0 to spaces do + Bytes.set dst (i_dst + n) src.[i_src - spaces + n] + done; + loop (i_src + 1) (i_dst + spaces + 1) + in + let stop = loop 0 0 in + Bytes.sub_string dst 0 stop + +type hook_info = { + sourcefile : string; +} + +exception HookExnWrapper of + { + error: exn; + hook_name: string; + hook_info: hook_info; + } + +exception HookExn of exn + +let raise_direct_hook_exn e = raise (HookExn e) + +let fold_hooks list hook_info ast = + List.fold_left (fun ast (hook_name,f) -> + try + f hook_info ast + with + | HookExn e -> raise e + | error -> raise (HookExnWrapper {error; hook_name; hook_info}) + (* when explicit reraise with backtrace will be available, + it should be used here *) + + ) ast (List.sort compare list) + +module type HookSig = sig + type t + + val add_hook : string -> (hook_info -> t -> t) -> unit + val apply_hooks : hook_info -> t -> t +end + +module MakeHooks(M: sig + type t + end) : HookSig with type t = M.t += struct + + type t = M.t + + let hooks = ref [] + let add_hook name f = hooks := (name, f) :: !hooks + let apply_hooks sourcefile intf = + fold_hooks !hooks sourcefile intf +end diff --git a/res_syntax/compiler-libs-406/misc.mli b/res_syntax/compiler-libs-406/misc.mli new file mode 100644 index 0000000000..7b91844081 --- /dev/null +++ b/res_syntax/compiler-libs-406/misc.mli @@ -0,0 +1,349 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Miscellaneous useful types and functions *) + +val fatal_error: string -> 'a +val fatal_errorf: ('a, Format.formatter, unit, 'b) format4 -> 'a +exception Fatal_error + +val try_finally : (unit -> 'a) -> (unit -> unit) -> 'a;; + +val map_end: ('a -> 'b) -> 'a list -> 'b list -> 'b list + (* [map_end f l t] is [map f l @ t], just more efficient. *) +val map_left_right: ('a -> 'b) -> 'a list -> 'b list + (* Like [List.map], with guaranteed left-to-right evaluation order *) +val for_all2: ('a -> 'b -> bool) -> 'a list -> 'b list -> bool + (* Same as [List.for_all] but for a binary predicate. + In addition, this [for_all2] never fails: given two lists + with different lengths, it returns false. *) +val replicate_list: 'a -> int -> 'a list + (* [replicate_list elem n] is the list with [n] elements + all identical to [elem]. *) +val list_remove: 'a -> 'a list -> 'a list + (* [list_remove x l] returns a copy of [l] with the first + element equal to [x] removed. *) +val split_last: 'a list -> 'a list * 'a + (* Return the last element and the other elements of the given list. *) +val may: ('a -> unit) -> 'a option -> unit +val may_map: ('a -> 'b) -> 'a option -> 'b option + +type ref_and_value = R : 'a ref * 'a -> ref_and_value + +val protect_refs : ref_and_value list -> (unit -> 'a) -> 'a +(** [protect_refs l f] temporarily sets [r] to [v] for each [R (r, v)] in [l] + while executing [f]. The previous contents of the references is restored + even if [f] raises an exception. *) + +module Stdlib : sig + module List : sig + type 'a t = 'a list + + val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int + (** The lexicographic order supported by the provided order. + There is no constraint on the relative lengths of the lists. *) + + val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool + (** Returns [true] iff the given lists have the same length and content + with respect to the given equality function. *) + + val filter_map : ('a -> 'b option) -> 'a t -> 'b t + (** [filter_map f l] applies [f] to every element of [l], filters + out the [None] elements and returns the list of the arguments of + the [Some] elements. *) + + val some_if_all_elements_are_some : 'a option t -> 'a t option + (** If all elements of the given list are [Some _] then [Some xs] + is returned with the [xs] being the contents of those [Some]s, with + order preserved. Otherwise return [None]. *) + + val map2_prefix : ('a -> 'b -> 'c) -> 'a t -> 'b t -> ('c t * 'b t) + (** [let r1, r2 = map2_prefix f l1 l2] + If [l1] is of length n and [l2 = h2 @ t2] with h2 of length n, + r1 is [List.map2 f l1 h1] and r2 is t2. *) + + val split_at : int -> 'a t -> 'a t * 'a t + (** [split_at n l] returns the pair [before, after] where [before] is + the [n] first elements of [l] and [after] the remaining ones. + If [l] has less than [n] elements, raises Invalid_argument. *) + end + + module Option : sig + type 'a t = 'a option + + val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool + + val iter : ('a -> unit) -> 'a t -> unit + val map : ('a -> 'b) -> 'a t -> 'b t + val fold : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b + val value_default : ('a -> 'b) -> default:'b -> 'a t -> 'b + end + + module Array : sig + val exists2 : ('a -> 'b -> bool) -> 'a array -> 'b array -> bool + (* Same as [Array.exists], but for a two-argument predicate. Raise + Invalid_argument if the two arrays are determined to have + different lengths. *) + end +end + +val find_in_path: string list -> string -> string + (* Search a file in a list of directories. *) +val find_in_path_rel: string list -> string -> string + (* Search a relative file in a list of directories. *) +val find_in_path_uncap: string list -> string -> string + (* Same, but search also for uncapitalized name, i.e. + if name is Foo.ml, allow /path/Foo.ml and /path/foo.ml + to match. *) +val remove_file: string -> unit + (* Delete the given file if it exists. Never raise an error. *) +val expand_directory: string -> string -> string + (* [expand_directory alt file] eventually expands a [+] at the + beginning of file into [alt] (an alternate root directory) *) + +val create_hashtable: int -> ('a * 'b) list -> ('a, 'b) Hashtbl.t + (* Create a hashtable of the given size and fills it with the + given bindings. *) + +val copy_file: in_channel -> out_channel -> unit + (* [copy_file ic oc] reads the contents of file [ic] and copies + them to [oc]. It stops when encountering EOF on [ic]. *) +val copy_file_chunk: in_channel -> out_channel -> int -> unit + (* [copy_file_chunk ic oc n] reads [n] bytes from [ic] and copies + them to [oc]. It raises [End_of_file] when encountering + EOF on [ic]. *) +val string_of_file: in_channel -> string + (* [string_of_file ic] reads the contents of file [ic] and copies + them to a string. It stops when encountering EOF on [ic]. *) +val output_to_file_via_temporary: + ?mode:open_flag list -> string -> (string -> out_channel -> 'a) -> 'a + (* Produce output in temporary file, then rename it + (as atomically as possible) to the desired output file name. + [output_to_file_via_temporary filename fn] opens a temporary file + which is passed to [fn] (name + output channel). When [fn] returns, + the channel is closed and the temporary file is renamed to + [filename]. *) + +val log2: int -> int + (* [log2 n] returns [s] such that [n = 1 lsl s] + if [n] is a power of 2*) +val align: int -> int -> int + (* [align n a] rounds [n] upwards to a multiple of [a] + (a power of 2). *) +val no_overflow_add: int -> int -> bool + (* [no_overflow_add n1 n2] returns [true] if the computation of + [n1 + n2] does not overflow. *) +val no_overflow_sub: int -> int -> bool + (* [no_overflow_sub n1 n2] returns [true] if the computation of + [n1 - n2] does not overflow. *) +val no_overflow_mul: int -> int -> bool + (* [no_overflow_mul n1 n2] returns [true] if the computation of + [n1 * n2] does not overflow. *) +val no_overflow_lsl: int -> int -> bool + (* [no_overflow_lsl n k] returns [true] if the computation of + [n lsl k] does not overflow. *) + +module Int_literal_converter : sig + val int : string -> int + val int32 : string -> int32 + val int64 : string -> int64 + val nativeint : string -> nativeint +end + +val chop_extensions: string -> string + (* Return the given file name without its extensions. The extensions + is the longest suffix starting with a period and not including + a directory separator, [.xyz.uvw] for instance. + + Return the given name if it does not contain an extension. *) + +val search_substring: string -> string -> int -> int + (* [search_substring pat str start] returns the position of the first + occurrence of string [pat] in string [str]. Search starts + at offset [start] in [str]. Raise [Not_found] if [pat] + does not occur. *) + +val replace_substring: before:string -> after:string -> string -> string + (* [replace_substring ~before ~after str] replaces all + occurrences of [before] with [after] in [str] and returns + the resulting string. *) + +val rev_split_words: string -> string list + (* [rev_split_words s] splits [s] in blank-separated words, and returns + the list of words in reverse order. *) + +val get_ref: 'a list ref -> 'a list + (* [get_ref lr] returns the content of the list reference [lr] and reset + its content to the empty list. *) + + +val fst3: 'a * 'b * 'c -> 'a +val snd3: 'a * 'b * 'c -> 'b +val thd3: 'a * 'b * 'c -> 'c + +val fst4: 'a * 'b * 'c * 'd -> 'a +val snd4: 'a * 'b * 'c * 'd -> 'b +val thd4: 'a * 'b * 'c * 'd -> 'c +val for4: 'a * 'b * 'c * 'd -> 'd + +module LongString : + sig + type t = bytes array + val create : int -> t + val length : t -> int + val get : t -> int -> char + val set : t -> int -> char -> unit + val blit : t -> int -> t -> int -> int -> unit + val output : out_channel -> t -> int -> int -> unit + val unsafe_blit_to_bytes : t -> int -> bytes -> int -> int -> unit + val input_bytes : in_channel -> int -> t + end + +val edit_distance : string -> string -> int -> int option +(** [edit_distance a b cutoff] computes the edit distance between + strings [a] and [b]. To help efficiency, it uses a cutoff: if the + distance [d] is smaller than [cutoff], it returns [Some d], else + [None]. + + The distance algorithm currently used is Damerau-Levenshtein: it + computes the number of insertion, deletion, substitution of + letters, or swapping of adjacent letters to go from one word to the + other. The particular algorithm may change in the future. +*) + +val spellcheck : string list -> string -> string list +(** [spellcheck env name] takes a list of names [env] that exist in + the current environment and an erroneous [name], and returns a + list of suggestions taken from [env], that are close enough to + [name] that it may be a typo for one of them. *) + +val did_you_mean : Format.formatter -> (unit -> string list) -> unit +(** [did_you_mean ppf get_choices] hints that the user may have meant + one of the option returned by calling [get_choices]. It does nothing + if the returned list is empty. + + The [unit -> ...] thunking is meant to delay any potentially-slow + computation (typically computing edit-distance with many things + from the current environment) to when the hint message is to be + printed. You should print an understandable error message before + calling [did_you_mean], so that users get a clear notification of + the failure even if producing the hint is slow. +*) + +val cut_at : string -> char -> string * string +(** [String.cut_at s c] returns a pair containing the sub-string before + the first occurrence of [c] in [s], and the sub-string after the + first occurrence of [c] in [s]. + [let (before, after) = String.cut_at s c in + before ^ String.make 1 c ^ after] is the identity if [s] contains [c]. + + Raise [Not_found] if the character does not appear in the string + @since 4.01 +*) + + +module StringSet: Set.S with type elt = string +module StringMap: Map.S with type key = string +(* TODO: replace all custom instantiations of StringSet/StringMap in various + compiler modules with this one. *) + +(* Color handling *) +module Color : sig + type color = + | Black + | Red + | Green + | Yellow + | Blue + | Magenta + | Cyan + | White + ;; + + type style = + | FG of color (* foreground *) + | BG of color (* background *) + | Bold + | Reset + + val ansi_of_style_l : style list -> string + (* ANSI escape sequence for the given style *) + + type styles = { + error: style list; + warning: style list; + loc: style list; + } + + val default_styles: styles + val get_styles: unit -> styles + val set_styles: styles -> unit + + type setting = Auto | Always | Never + + val setup : setting option -> unit + (* [setup opt] will enable or disable color handling on standard formatters + according to the value of color setting [opt]. + Only the first call to this function has an effect. *) + + val set_color_tag_handling : Format.formatter -> unit + (* adds functions to support color tags to the given formatter. *) +end + +val normalise_eol : string -> string +(** [normalise_eol s] returns a fresh copy of [s] with any '\r' characters + removed. Intended for pre-processing text which will subsequently be printed + on a channel which performs EOL transformations (i.e. Windows) *) + +val delete_eol_spaces : string -> string +(** [delete_eol_spaces s] returns a fresh copy of [s] with any end of + line spaces removed. Intended to normalize the output of the + toplevel for tests. *) + + + +(** {1 Hook machinery} + + Hooks machinery: + [add_hook name f] will register a function that will be called on the + argument of a later call to [apply_hooks]. Hooks are applied in the + lexicographical order of their names. +*) + +type hook_info = { + sourcefile : string; +} + +exception HookExnWrapper of + { + error: exn; + hook_name: string; + hook_info: hook_info; + } + (** An exception raised by a hook will be wrapped into a + [HookExnWrapper] constructor by the hook machinery. *) + + +val raise_direct_hook_exn: exn -> 'a + (** A hook can use [raise_unwrapped_hook_exn] to raise an exception that will + not be wrapped into a {!HookExnWrapper}. *) + +module type HookSig = sig + type t + val add_hook : string -> (hook_info -> t -> t) -> unit + val apply_hooks : hook_info -> t -> t +end + +module MakeHooks : functor (M : sig type t end) -> HookSig with type t = M.t diff --git a/res_syntax/compiler-libs-406/mtype.ml b/res_syntax/compiler-libs-406/mtype.ml new file mode 100644 index 0000000000..479f12e336 --- /dev/null +++ b/res_syntax/compiler-libs-406/mtype.ml @@ -0,0 +1,422 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Operations on module types *) + +open Asttypes +open Path +open Types + + +let rec scrape env mty = + match mty with + Mty_ident p -> + begin try + scrape env (Env.find_modtype_expansion p env) + with Not_found -> + mty + end + | _ -> mty + +let freshen mty = + Subst.modtype Subst.identity mty + +let rec strengthen ~aliasable env mty p = + match scrape env mty with + Mty_signature sg -> + Mty_signature(strengthen_sig ~aliasable env sg p 0) + | Mty_functor(param, arg, res) + when !Clflags.applicative_functors && Ident.name param <> "*" -> + Mty_functor(param, arg, + strengthen ~aliasable:false env res (Papply(p, Pident param))) + | mty -> + mty + +and strengthen_sig ~aliasable env sg p pos = + match sg with + [] -> [] + | (Sig_value(_, desc) as sigelt) :: rem -> + let nextpos = + match desc.val_kind with + | Val_prim _ -> pos + | _ -> pos + 1 + in + sigelt :: strengthen_sig ~aliasable env rem p nextpos + | Sig_type(id, {type_kind=Type_abstract}, _) :: + (Sig_type(id', {type_private=Private}, _) :: _ as rem) + when Ident.name id = Ident.name id' ^ "#row" -> + strengthen_sig ~aliasable env rem p pos + | Sig_type(id, decl, rs) :: rem -> + let newdecl = + match decl.type_manifest, decl.type_private, decl.type_kind with + Some _, Public, _ -> decl + | Some _, Private, (Type_record _ | Type_variant _) -> decl + | _ -> + let manif = + Some(Btype.newgenty(Tconstr(Pdot(p, Ident.name id, nopos), + decl.type_params, ref Mnil))) in + if decl.type_kind = Type_abstract then + { decl with type_private = Public; type_manifest = manif } + else + { decl with type_manifest = manif } + in + Sig_type(id, newdecl, rs) :: strengthen_sig ~aliasable env rem p pos + | (Sig_typext _ as sigelt) :: rem -> + sigelt :: strengthen_sig ~aliasable env rem p (pos+1) + | Sig_module(id, md, rs) :: rem -> + let str = + strengthen_decl ~aliasable env md (Pdot(p, Ident.name id, pos)) + in + Sig_module(id, str, rs) + :: strengthen_sig ~aliasable + (Env.add_module_declaration ~check:false id md env) rem p (pos+1) + (* Need to add the module in case it defines manifest module types *) + | Sig_modtype(id, decl) :: rem -> + let newdecl = + match decl.mtd_type with + None -> + {decl with mtd_type = Some(Mty_ident(Pdot(p,Ident.name id,nopos)))} + | Some _ -> + decl + in + Sig_modtype(id, newdecl) :: + strengthen_sig ~aliasable (Env.add_modtype id decl env) rem p pos + (* Need to add the module type in case it is manifest *) + | (Sig_class _ as sigelt) :: rem -> + sigelt :: strengthen_sig ~aliasable env rem p (pos+1) + | (Sig_class_type _ as sigelt) :: rem -> + sigelt :: strengthen_sig ~aliasable env rem p pos + +and strengthen_decl ~aliasable env md p = + match md.md_type with + | Mty_alias _ -> md + | _ when aliasable -> {md with md_type = Mty_alias(Mta_present, p)} + | mty -> {md with md_type = strengthen ~aliasable env mty p} + +let () = Env.strengthen := strengthen + +(* In nondep_supertype, env is only used for the type it assigns to id. + Hence there is no need to keep env up-to-date by adding the bindings + traversed. *) + +type variance = Co | Contra | Strict + +let nondep_supertype env mid mty = + + let rec nondep_mty env va mty = + match mty with + Mty_ident p -> + if Path.isfree mid p then + nondep_mty env va (Env.find_modtype_expansion p env) + else mty + | Mty_alias(_, p) -> + if Path.isfree mid p then + nondep_mty env va (Env.find_module p env).md_type + else mty + | Mty_signature sg -> + Mty_signature(nondep_sig env va sg) + | Mty_functor(param, arg, res) -> + let var_inv = + match va with Co -> Contra | Contra -> Co | Strict -> Strict in + Mty_functor(param, Misc.may_map (nondep_mty env var_inv) arg, + nondep_mty + (Env.add_module ~arg:true param + (Btype.default_mty arg) env) va res) + + and nondep_sig env va = function + [] -> [] + | item :: rem -> + let rem' = nondep_sig env va rem in + match item with + Sig_value(id, d) -> + Sig_value(id, + {d with val_type = Ctype.nondep_type env mid d.val_type}) + :: rem' + | Sig_type(id, d, rs) -> + Sig_type(id, Ctype.nondep_type_decl env mid id (va = Co) d, rs) + :: rem' + | Sig_typext(id, ext, es) -> + Sig_typext(id, Ctype.nondep_extension_constructor env mid ext, es) + :: rem' + | Sig_module(id, md, rs) -> + Sig_module(id, {md with md_type=nondep_mty env va md.md_type}, rs) + :: rem' + | Sig_modtype(id, d) -> + begin try + Sig_modtype(id, nondep_modtype_decl env d) :: rem' + with Not_found -> + match va with + Co -> Sig_modtype(id, {mtd_type=None; mtd_loc=Location.none; + mtd_attributes=[]}) :: rem' + | _ -> raise Not_found + end + | Sig_class(id, d, rs) -> + Sig_class(id, Ctype.nondep_class_declaration env mid d, rs) + :: rem' + | Sig_class_type(id, d, rs) -> + Sig_class_type(id, Ctype.nondep_cltype_declaration env mid d, rs) + :: rem' + + and nondep_modtype_decl env mtd = + {mtd with mtd_type = Misc.may_map (nondep_mty env Strict) mtd.mtd_type} + + in + nondep_mty env Co mty + +let enrich_typedecl env p decl = + match decl.type_manifest with + Some _ -> decl + | None -> + try + let orig_decl = Env.find_type p env in + if orig_decl.type_arity <> decl.type_arity + then decl + else {decl with type_manifest = + Some(Btype.newgenty(Tconstr(p, decl.type_params, ref Mnil)))} + with Not_found -> + decl + +let rec enrich_modtype env p mty = + match mty with + Mty_signature sg -> + Mty_signature(List.map (enrich_item env p) sg) + | _ -> + mty + +and enrich_item env p = function + Sig_type(id, decl, rs) -> + Sig_type(id, + enrich_typedecl env (Pdot(p, Ident.name id, nopos)) decl, rs) + | Sig_module(id, md, rs) -> + Sig_module(id, + {md with + md_type = enrich_modtype env + (Pdot(p, Ident.name id, nopos)) md.md_type}, + rs) + | item -> item + +let rec type_paths env p mty = + match scrape env mty with + Mty_ident _ -> [] + | Mty_alias _ -> [] + | Mty_signature sg -> type_paths_sig env p 0 sg + | Mty_functor _ -> [] + +and type_paths_sig env p pos sg = + match sg with + [] -> [] + | Sig_value(_id, decl) :: rem -> + let pos' = match decl.val_kind with Val_prim _ -> pos | _ -> pos + 1 in + type_paths_sig env p pos' rem + | Sig_type(id, _decl, _) :: rem -> + Pdot(p, Ident.name id, nopos) :: type_paths_sig env p pos rem + | Sig_module(id, md, _) :: rem -> + type_paths env (Pdot(p, Ident.name id, pos)) md.md_type @ + type_paths_sig (Env.add_module_declaration ~check:false id md env) + p (pos+1) rem + | Sig_modtype(id, decl) :: rem -> + type_paths_sig (Env.add_modtype id decl env) p pos rem + | (Sig_typext _ | Sig_class _) :: rem -> + type_paths_sig env p (pos+1) rem + | (Sig_class_type _) :: rem -> + type_paths_sig env p pos rem + +let rec no_code_needed env mty = + match scrape env mty with + Mty_ident _ -> false + | Mty_signature sg -> no_code_needed_sig env sg + | Mty_functor(_, _, _) -> false + | Mty_alias(Mta_absent, _) -> true + | Mty_alias(Mta_present, _) -> false + +and no_code_needed_sig env sg = + match sg with + [] -> true + | Sig_value(_id, decl) :: rem -> + begin match decl.val_kind with + | Val_prim _ -> no_code_needed_sig env rem + | _ -> false + end + | Sig_module(id, md, _) :: rem -> + no_code_needed env md.md_type && + no_code_needed_sig + (Env.add_module_declaration ~check:false id md env) rem + | (Sig_type _ | Sig_modtype _ | Sig_class_type _) :: rem -> + no_code_needed_sig env rem + | (Sig_typext _ | Sig_class _) :: _ -> + false + + +(* Check whether a module type may return types *) + +let rec contains_type env = function + Mty_ident path -> + begin try match (Env.find_modtype path env).mtd_type with + | None -> raise Exit (* PR#6427 *) + | Some mty -> contains_type env mty + with Not_found -> raise Exit + end + | Mty_signature sg -> + contains_type_sig env sg + | Mty_functor (_, _, body) -> + contains_type env body + | Mty_alias _ -> + () + +and contains_type_sig env = List.iter (contains_type_item env) + +and contains_type_item env = function + Sig_type (_,({type_manifest = None} | + {type_kind = Type_abstract; type_private = Private}),_) + | Sig_modtype _ + | Sig_typext (_, {ext_args = Cstr_record _}, _) -> + (* We consider that extension constructors with an inlined + record create a type (the inlined record), even though + it would be technically safe to ignore that considering + the current constraints which guarantee that this type + is kept local to expressions. *) + raise Exit + | Sig_module (_, {md_type = mty}, _) -> + contains_type env mty + | Sig_value _ + | Sig_type _ + | Sig_typext _ + | Sig_class _ + | Sig_class_type _ -> + () + +let contains_type env mty = + try contains_type env mty; false with Exit -> true + + +(* Remove module aliases from a signature *) + +module PathSet = Set.Make (Path) +module PathMap = Map.Make (Path) +module IdentSet = Set.Make (Ident) + +let rec get_prefixes = function + Pident _ -> PathSet.empty + | Pdot (p, _, _) + | Papply (p, _) -> PathSet.add p (get_prefixes p) + +let rec get_arg_paths = function + Pident _ -> PathSet.empty + | Pdot (p, _, _) -> get_arg_paths p + | Papply (p1, p2) -> + PathSet.add p2 + (PathSet.union (get_prefixes p2) + (PathSet.union (get_arg_paths p1) (get_arg_paths p2))) + +let rec rollback_path subst p = + try Pident (PathMap.find p subst) + with Not_found -> + match p with + Pident _ | Papply _ -> p + | Pdot (p1, s, n) -> + let p1' = rollback_path subst p1 in + if Path.same p1 p1' then p else rollback_path subst (Pdot (p1', s, n)) + +let rec collect_ids subst bindings p = + begin match rollback_path subst p with + Pident id -> + let ids = + try collect_ids subst bindings (Ident.find_same id bindings) + with Not_found -> IdentSet.empty + in + IdentSet.add id ids + | _ -> IdentSet.empty + end + +let collect_arg_paths mty = + let open Btype in + let paths = ref PathSet.empty + and subst = ref PathMap.empty + and bindings = ref Ident.empty in + (* let rt = Ident.create "Root" in + and prefix = ref (Path.Pident rt) in *) + let it_path p = paths := PathSet.union (get_arg_paths p) !paths + and it_signature_item it si = + type_iterators.it_signature_item it si; + match si with + Sig_module (id, {md_type=Mty_alias(_, p)}, _) -> + bindings := Ident.add id p !bindings + | Sig_module (id, {md_type=Mty_signature sg}, _) -> + List.iter + (function Sig_module (id', _, _) -> + subst := + PathMap.add (Pdot (Pident id, Ident.name id', -1)) id' !subst + | _ -> ()) + sg + | _ -> () + in + let it = {type_iterators with it_path; it_signature_item} in + it.it_module_type it mty; + it.it_module_type unmark_iterators mty; + PathSet.fold (fun p -> IdentSet.union (collect_ids !subst !bindings p)) + !paths IdentSet.empty + +let rec remove_aliases env excl mty = + match mty with + Mty_signature sg -> + Mty_signature (remove_aliases_sig env excl sg) + | Mty_alias _ -> + let mty' = Env.scrape_alias env mty in + if mty' = mty then mty else + remove_aliases env excl mty' + | mty -> + mty + +and remove_aliases_sig env excl sg = + match sg with + [] -> [] + | Sig_module(id, md, rs) :: rem -> + let mty = + match md.md_type with + Mty_alias _ when IdentSet.mem id excl -> + md.md_type + | mty -> + remove_aliases env excl mty + in + Sig_module(id, {md with md_type = mty} , rs) :: + remove_aliases_sig (Env.add_module id mty env) excl rem + | Sig_modtype(id, mtd) :: rem -> + Sig_modtype(id, mtd) :: + remove_aliases_sig (Env.add_modtype id mtd env) excl rem + | it :: rem -> + it :: remove_aliases_sig env excl rem + +let remove_aliases env sg = + let excl = collect_arg_paths sg in + (* PathSet.iter (fun p -> Format.eprintf "%a@ " Printtyp.path p) excl; + Format.eprintf "@."; *) + remove_aliases env excl sg + + +(* Lower non-generalizable type variables *) + +let lower_nongen nglev mty = + let open Btype in + let it_type_expr it ty = + let ty = repr ty in + match ty with + {desc=Tvar _; level} -> + if level < generic_level && level > nglev then set_level ty nglev + | _ -> + type_iterators.it_type_expr it ty + in + let it = {type_iterators with it_type_expr} in + it.it_module_type it mty; + it.it_module_type unmark_iterators mty diff --git a/res_syntax/compiler-libs-406/mtype.mli b/res_syntax/compiler-libs-406/mtype.mli new file mode 100644 index 0000000000..84e870ac64 --- /dev/null +++ b/res_syntax/compiler-libs-406/mtype.mli @@ -0,0 +1,45 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Operations on module types *) + +open Types + +val scrape: Env.t -> module_type -> module_type + (* Expand toplevel module type abbreviations + till hitting a "hard" module type (signature, functor, + or abstract module type ident. *) +val freshen: module_type -> module_type + (* Return an alpha-equivalent copy of the given module type + where bound identifiers are fresh. *) +val strengthen: aliasable:bool -> Env.t -> module_type -> Path.t -> module_type + (* Strengthen abstract type components relative to the + given path. *) +val strengthen_decl: + aliasable:bool -> Env.t -> module_declaration -> Path.t -> module_declaration +val nondep_supertype: Env.t -> Ident.t -> module_type -> module_type + (* Return the smallest supertype of the given type + in which the given ident does not appear. + Raise [Not_found] if no such type exists. *) +val no_code_needed: Env.t -> module_type -> bool +val no_code_needed_sig: Env.t -> signature -> bool + (* Determine whether a module needs no implementation code, + i.e. consists only of type definitions. *) +val enrich_modtype: Env.t -> Path.t -> module_type -> module_type +val enrich_typedecl: Env.t -> Path.t -> type_declaration -> type_declaration +val type_paths: Env.t -> Path.t -> module_type -> Path.t list +val contains_type: Env.t -> module_type -> bool +val remove_aliases: Env.t -> module_type -> module_type +val lower_nongen: int -> module_type -> unit diff --git a/res_syntax/compiler-libs-406/nativeint.ml b/res_syntax/compiler-libs-406/nativeint.ml new file mode 100644 index 0000000000..2a7bf43662 --- /dev/null +++ b/res_syntax/compiler-libs-406/nativeint.ml @@ -0,0 +1,65 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Module [Nativeint]: processor-native integers *) + +external neg: nativeint -> nativeint = "%nativeint_neg" +external add: nativeint -> nativeint -> nativeint = "%nativeint_add" +external sub: nativeint -> nativeint -> nativeint = "%nativeint_sub" +external mul: nativeint -> nativeint -> nativeint = "%nativeint_mul" +external div: nativeint -> nativeint -> nativeint = "%nativeint_div" +external rem: nativeint -> nativeint -> nativeint = "%nativeint_mod" +external logand: nativeint -> nativeint -> nativeint = "%nativeint_and" +external logor: nativeint -> nativeint -> nativeint = "%nativeint_or" +external logxor: nativeint -> nativeint -> nativeint = "%nativeint_xor" +external shift_left: nativeint -> int -> nativeint = "%nativeint_lsl" +external shift_right: nativeint -> int -> nativeint = "%nativeint_asr" +external shift_right_logical: nativeint -> int -> nativeint = "%nativeint_lsr" +external of_int: int -> nativeint = "%nativeint_of_int" +external to_int: nativeint -> int = "%nativeint_to_int" +external of_float : float -> nativeint + = "caml_nativeint_of_float" "caml_nativeint_of_float_unboxed" + [@@unboxed] [@@noalloc] +external to_float : nativeint -> float + = "caml_nativeint_to_float" "caml_nativeint_to_float_unboxed" + [@@unboxed] [@@noalloc] +external of_int32: int32 -> nativeint = "%nativeint_of_int32" +external to_int32: nativeint -> int32 = "%nativeint_to_int32" + +let zero = 0n +let one = 1n +let minus_one = -1n +let succ n = add n 1n +let pred n = sub n 1n +let abs n = if n >= 0n then n else neg n +let size = Sys.word_size +let min_int = shift_left 1n (size - 1) +let max_int = sub min_int 1n +let lognot n = logxor n (-1n) + +external format : string -> nativeint -> string = "caml_nativeint_format" +let to_string n = format "%d" n + +external of_string: string -> nativeint = "caml_nativeint_of_string" + +let of_string_opt s = + (* TODO: expose a non-raising primitive directly. *) + try Some (of_string s) + with Failure _ -> None + +type t = nativeint + +let compare (x: t) (y: t) = Stdlib.compare x y +let equal (x: t) (y: t) = compare x y = 0 diff --git a/res_syntax/compiler-libs-406/nativeint.mli b/res_syntax/compiler-libs-406/nativeint.mli new file mode 100644 index 0000000000..0f1eee285d --- /dev/null +++ b/res_syntax/compiler-libs-406/nativeint.mli @@ -0,0 +1,204 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Processor-native integers. + + This module provides operations on the type [nativeint] of + signed 32-bit integers (on 32-bit platforms) or + signed 64-bit integers (on 64-bit platforms). + This integer type has exactly the same width as that of a + pointer type in the C compiler. All arithmetic operations over + [nativeint] are taken modulo 2{^32} or 2{^64} depending + on the word size of the architecture. + + Performance notice: values of type [nativeint] occupy more memory + space than values of type [int], and arithmetic operations on + [nativeint] are generally slower than those on [int]. Use [nativeint] + only when the application requires the extra bit of precision + over the [int] type. +*) + +val zero : nativeint +(** The native integer 0.*) + +val one : nativeint +(** The native integer 1.*) + +val minus_one : nativeint +(** The native integer -1.*) + +external neg : nativeint -> nativeint = "%nativeint_neg" +(** Unary negation. *) + +external add : nativeint -> nativeint -> nativeint = "%nativeint_add" +(** Addition. *) + +external sub : nativeint -> nativeint -> nativeint = "%nativeint_sub" +(** Subtraction. *) + +external mul : nativeint -> nativeint -> nativeint = "%nativeint_mul" +(** Multiplication. *) + +external div : nativeint -> nativeint -> nativeint = "%nativeint_div" +(** Integer division. Raise [Division_by_zero] if the second + argument is zero. This division rounds the real quotient of + its arguments towards zero, as specified for {!Pervasives.(/)}. *) + +external rem : nativeint -> nativeint -> nativeint = "%nativeint_mod" +(** Integer remainder. If [y] is not zero, the result + of [Nativeint.rem x y] satisfies the following properties: + [Nativeint.zero <= Nativeint.rem x y < Nativeint.abs y] and + [x = Nativeint.add (Nativeint.mul (Nativeint.div x y) y) + (Nativeint.rem x y)]. + If [y = 0], [Nativeint.rem x y] raises [Division_by_zero]. *) + +val succ : nativeint -> nativeint +(** Successor. + [Nativeint.succ x] is [Nativeint.add x Nativeint.one]. *) + +val pred : nativeint -> nativeint +(** Predecessor. + [Nativeint.pred x] is [Nativeint.sub x Nativeint.one]. *) + +val abs : nativeint -> nativeint +(** Return the absolute value of its argument. *) + +val size : int +(** The size in bits of a native integer. This is equal to [32] + on a 32-bit platform and to [64] on a 64-bit platform. *) + +val max_int : nativeint +(** The greatest representable native integer, + either 2{^31} - 1 on a 32-bit platform, + or 2{^63} - 1 on a 64-bit platform. *) + +val min_int : nativeint +(** The smallest representable native integer, + either -2{^31} on a 32-bit platform, + or -2{^63} on a 64-bit platform. *) + +external logand : nativeint -> nativeint -> nativeint = "%nativeint_and" +(** Bitwise logical and. *) + +external logor : nativeint -> nativeint -> nativeint = "%nativeint_or" +(** Bitwise logical or. *) + +external logxor : nativeint -> nativeint -> nativeint = "%nativeint_xor" +(** Bitwise logical exclusive or. *) + +val lognot : nativeint -> nativeint +(** Bitwise logical negation. *) + +external shift_left : nativeint -> int -> nativeint = "%nativeint_lsl" +(** [Nativeint.shift_left x y] shifts [x] to the left by [y] bits. + The result is unspecified if [y < 0] or [y >= bitsize], + where [bitsize] is [32] on a 32-bit platform and + [64] on a 64-bit platform. *) + +external shift_right : nativeint -> int -> nativeint = "%nativeint_asr" +(** [Nativeint.shift_right x y] shifts [x] to the right by [y] bits. + This is an arithmetic shift: the sign bit of [x] is replicated + and inserted in the vacated bits. + The result is unspecified if [y < 0] or [y >= bitsize]. *) + +external shift_right_logical : + nativeint -> int -> nativeint = "%nativeint_lsr" +(** [Nativeint.shift_right_logical x y] shifts [x] to the right + by [y] bits. + This is a logical shift: zeroes are inserted in the vacated bits + regardless of the sign of [x]. + The result is unspecified if [y < 0] or [y >= bitsize]. *) + + +external of_int : int -> nativeint = "%nativeint_of_int" +(** Convert the given integer (type [int]) to a native integer + (type [nativeint]). *) + +external to_int : nativeint -> int = "%nativeint_to_int" +(** Convert the given native integer (type [nativeint]) to an + integer (type [int]). The high-order bit is lost during + the conversion. *) + +external of_float : float -> nativeint + = "caml_nativeint_of_float" "caml_nativeint_of_float_unboxed" + [@@unboxed] [@@noalloc] +(** Convert the given floating-point number to a native integer, + discarding the fractional part (truncate towards 0). + The result of the conversion is undefined if, after truncation, + the number is outside the range + \[{!Nativeint.min_int}, {!Nativeint.max_int}\]. *) + +external to_float : nativeint -> float + = "caml_nativeint_to_float" "caml_nativeint_to_float_unboxed" + [@@unboxed] [@@noalloc] +(** Convert the given native integer to a floating-point number. *) + +external of_int32 : int32 -> nativeint = "%nativeint_of_int32" +(** Convert the given 32-bit integer (type [int32]) + to a native integer. *) + +external to_int32 : nativeint -> int32 = "%nativeint_to_int32" +(** Convert the given native integer to a + 32-bit integer (type [int32]). On 64-bit platforms, + the 64-bit native integer is taken modulo 2{^32}, + i.e. the top 32 bits are lost. On 32-bit platforms, + the conversion is exact. *) + +external of_string : string -> nativeint = "caml_nativeint_of_string" +(** Convert the given string to a native integer. + The string is read in decimal (by default, or if the string + begins with [0u]) or in hexadecimal, octal or binary if the + string begins with [0x], [0o] or [0b] respectively. + + The [0u] prefix reads the input as an unsigned integer in the range + [[0, 2*Nativeint.max_int+1]]. If the input exceeds {!Nativeint.max_int} + it is converted to the signed integer + [Int64.min_int + input - Nativeint.max_int - 1]. + + Raise [Failure "Nativeint.of_string"] if the given string is not + a valid representation of an integer, or if the integer represented + exceeds the range of integers representable in type [nativeint]. *) + +val of_string_opt: string -> nativeint option +(** Same as [of_string], but return [None] instead of raising. + @since 4.05 *) + +val to_string : nativeint -> string +(** Return the string representation of its argument, in decimal. *) + +type t = nativeint +(** An alias for the type of native integers. *) + +val compare: t -> t -> int +(** The comparison function for native integers, with the same specification as + {!Stdlib.compare}. Along with the type [t], this function [compare] + allows the module [Nativeint] to be passed as argument to the functors + {!Set.Make} and {!Map.Make}. *) + +val equal: t -> t -> bool +(** The equal function for native ints. + @since 4.03.0 *) + +(**/**) + +(** {1 Deprecated functions} *) + +external format : string -> nativeint -> string = "caml_nativeint_format" +(** [Nativeint.format fmt n] return the string representation of the + native integer [n] in the format specified by [fmt]. + [fmt] is a [Printf]-style format consisting of exactly + one [%d], [%i], [%u], [%x], [%X] or [%o] conversion specification. + This function is deprecated; use {!Printf.sprintf} with a [%nx] format + instead. *) diff --git a/res_syntax/compiler-libs-406/numbers.ml b/res_syntax/compiler-libs-406/numbers.ml new file mode 100644 index 0000000000..18006a5b80 --- /dev/null +++ b/res_syntax/compiler-libs-406/numbers.ml @@ -0,0 +1,86 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2013--2016 OCamlPro SAS *) +(* Copyright 2014--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +module Int_base = Identifiable.Make (struct + type t = int + + let compare x y = x - y + let output oc x = Printf.fprintf oc "%i" x + let hash i = i + let equal (i : int) j = i = j + let print = Format.pp_print_int +end) + +module Int = struct + type t = int + + include Int_base + + let rec zero_to_n n = + if n < 0 then Set.empty else Set.add n (zero_to_n (n-1)) +end + +module Int8 = struct + type t = int + + let zero = 0 + let one = 1 + + let of_int_exn i = + if i < -(1 lsl 7) || i > ((1 lsl 7) - 1) then + Misc.fatal_errorf "Int8.of_int_exn: %d is out of range" i + else + i + + let to_int i = i +end + +module Int16 = struct + type t = int + + let of_int_exn i = + if i < -(1 lsl 15) || i > ((1 lsl 15) - 1) then + Misc.fatal_errorf "Int16.of_int_exn: %d is out of range" i + else + i + + let lower_int64 = Int64.neg (Int64.shift_left Int64.one 15) + let upper_int64 = Int64.sub (Int64.shift_left Int64.one 15) Int64.one + + let of_int64_exn i = + if Int64.compare i lower_int64 < 0 + || Int64.compare i upper_int64 > 0 + then + Misc.fatal_errorf "Int16.of_int64_exn: %Ld is out of range" i + else + Int64.to_int i + + let to_int t = t +end + +module Float = struct + type t = float + + include Identifiable.Make (struct + type t = float + + let compare x y = Stdlib.compare x y + let output oc x = Printf.fprintf oc "%f" x + let hash f = Hashtbl.hash f + let equal (i : float) j = i = j + let print = Format.pp_print_float + end) +end diff --git a/res_syntax/compiler-libs-406/numbers.mli b/res_syntax/compiler-libs-406/numbers.mli new file mode 100644 index 0000000000..4d5e285fa6 --- /dev/null +++ b/res_syntax/compiler-libs-406/numbers.mli @@ -0,0 +1,45 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2013--2016 OCamlPro SAS *) +(* Copyright 2014--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Modules about numbers, some of which satisfy {!Identifiable.S}. *) + +module Int : sig + include Identifiable.S with type t = int + + (** [zero_to_n n] is the set of numbers \{0, ..., n\} (inclusive). *) + val zero_to_n : int -> Set.t +end + +module Int8 : sig + type t + + val zero : t + val one : t + + val of_int_exn : int -> t + val to_int : t -> int +end + +module Int16 : sig + type t + + val of_int_exn : int -> t + val of_int64_exn : Int64.t -> t + + val to_int : t -> int +end + +module Float : Identifiable.S with type t = float diff --git a/res_syntax/compiler-libs-406/obj.ml b/res_syntax/compiler-libs-406/obj.ml new file mode 100644 index 0000000000..6c5f4f9e01 --- /dev/null +++ b/res_syntax/compiler-libs-406/obj.ml @@ -0,0 +1,114 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Operations on internal representations of values *) + +type t + +external repr : 'a -> t = "%identity" +external obj : t -> 'a = "%identity" +external magic : 'a -> 'b = "%identity" +external is_int : t -> bool = "%obj_is_int" +let [@inline always] is_block a = not (is_int a) +external tag : t -> int = "caml_obj_tag" +external set_tag : t -> int -> unit = "caml_obj_set_tag" +external size : t -> int = "%obj_size" +external reachable_words : t -> int = "caml_obj_reachable_words" +external field : t -> int -> t = "%obj_field" +external set_field : t -> int -> t -> unit = "%obj_set_field" +external floatarray_get : floatarray -> int -> float = "caml_floatarray_get" +external floatarray_set : + floatarray -> int -> float -> unit = "caml_floatarray_set" +let [@inline always] double_field x i = floatarray_get (obj x : floatarray) i +let [@inline always] set_double_field x i v = + floatarray_set (obj x : floatarray) i v +external new_block : int -> int -> t = "caml_obj_block" +external dup : t -> t = "caml_obj_dup" +external truncate : t -> int -> unit = "caml_obj_truncate" +external add_offset : t -> Int32.t -> t = "caml_obj_add_offset" + +let marshal (obj : t) = + Marshal.to_bytes obj [] +let unmarshal str pos = + (Marshal.from_bytes str pos, pos + Marshal.total_size str pos) + +let first_non_constant_constructor_tag = 0 +let last_non_constant_constructor_tag = 245 + +let lazy_tag = 246 +let closure_tag = 247 +let object_tag = 248 +let infix_tag = 249 +let forward_tag = 250 + +let no_scan_tag = 251 + +let abstract_tag = 251 +let string_tag = 252 +let double_tag = 253 +let double_array_tag = 254 +let custom_tag = 255 +let final_tag = custom_tag + + +let int_tag = 1000 +let out_of_heap_tag = 1001 +let unaligned_tag = 1002 + +let extension_constructor x = + let x = repr x in + let slot = + if (is_block x) && (tag x) <> object_tag && (size x) >= 1 then field x 0 + else x + in + let name = + if (is_block slot) && (tag slot) = object_tag then field slot 0 + else invalid_arg "Obj.extension_constructor" + in + if (tag name) = string_tag then (obj slot : extension_constructor) + else invalid_arg "Obj.extension_constructor" + +let [@inline always] extension_name (slot : extension_constructor) = + (obj (field (repr slot) 0) : string) + +let [@inline always] extension_id (slot : extension_constructor) = + (obj (field (repr slot) 1) : int) + +module Ephemeron = struct + type obj_t = t + + type t (** ephemeron *) + + external create: int -> t = "caml_ephe_create" + + let length x = size(repr x) - 2 + + external get_key: t -> int -> obj_t option = "caml_ephe_get_key" + external get_key_copy: t -> int -> obj_t option = "caml_ephe_get_key_copy" + external set_key: t -> int -> obj_t -> unit = "caml_ephe_set_key" + external unset_key: t -> int -> unit = "caml_ephe_unset_key" + external check_key: t -> int -> bool = "caml_ephe_check_key" + external blit_key : t -> int -> t -> int -> int -> unit + = "caml_ephe_blit_key" + + external get_data: t -> obj_t option = "caml_ephe_get_data" + external get_data_copy: t -> obj_t option = "caml_ephe_get_data_copy" + external set_data: t -> obj_t -> unit = "caml_ephe_set_data" + external unset_data: t -> unit = "caml_ephe_unset_data" + external check_data: t -> bool = "caml_ephe_check_data" + external blit_data : t -> t -> unit = "caml_ephe_blit_data" + + +end diff --git a/res_syntax/compiler-libs-406/obj.mli b/res_syntax/compiler-libs-406/obj.mli new file mode 100644 index 0000000000..e76c7df95f --- /dev/null +++ b/res_syntax/compiler-libs-406/obj.mli @@ -0,0 +1,152 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Operations on internal representations of values. + + Not for the casual user. +*) + +type t + +external repr : 'a -> t = "%identity" +external obj : t -> 'a = "%identity" +external magic : 'a -> 'b = "%identity" +val [@inline always] is_block : t -> bool +external is_int : t -> bool = "%obj_is_int" +external tag : t -> int = "caml_obj_tag" +external size : t -> int = "%obj_size" +external reachable_words : t -> int = "caml_obj_reachable_words" + (** + Computes the total size (in words, including the headers) of all + heap blocks accessible from the argument. Statically + allocated blocks are excluded. + + @Since 4.04 + *) + +external field : t -> int -> t = "%obj_field" + +(** When using flambda: + + [set_field] MUST NOT be called on immutable blocks. (Blocks allocated + in C stubs, or with [new_block] below, are always considered mutable.) + + The same goes for [set_double_field] and [set_tag]. However, for + [set_tag], in the case of immutable blocks where the middle-end optimizers + never see code that discriminates on their tag (for example records), the + operation should be safe. Such uses are nonetheless discouraged. + + For experts only: + [set_field] et al can be made safe by first wrapping the block in + {!Sys.opaque_identity}, so any information about its contents will not + be propagated. +*) +external set_field : t -> int -> t -> unit = "%obj_set_field" +external set_tag : t -> int -> unit = "caml_obj_set_tag" + +val [@inline always] double_field : t -> int -> float (* @since 3.11.2 *) +val [@inline always] set_double_field : t -> int -> float -> unit + (* @since 3.11.2 *) +external new_block : int -> int -> t = "caml_obj_block" +external dup : t -> t = "caml_obj_dup" +external truncate : t -> int -> unit = "caml_obj_truncate" +external add_offset : t -> Int32.t -> t = "caml_obj_add_offset" + (* @since 3.12.0 *) + +val first_non_constant_constructor_tag : int +val last_non_constant_constructor_tag : int + +val lazy_tag : int +val closure_tag : int +val object_tag : int +val infix_tag : int +val forward_tag : int +val no_scan_tag : int +val abstract_tag : int +val string_tag : int (* both [string] and [bytes] *) +val double_tag : int +val double_array_tag : int +val custom_tag : int +val final_tag : int + [@@ocaml.deprecated "Replaced by custom_tag."] + +val int_tag : int +val out_of_heap_tag : int +val unaligned_tag : int (* should never happen @since 3.11.0 *) + +val extension_constructor : 'a -> extension_constructor +val [@inline always] extension_name : extension_constructor -> string +val [@inline always] extension_id : extension_constructor -> int + +(** The following two functions are deprecated. Use module {!Marshal} + instead. *) + +val marshal : t -> bytes + [@@ocaml.deprecated "Use Marshal.to_bytes instead."] +val unmarshal : bytes -> int -> t * int + [@@ocaml.deprecated "Use Marshal.from_bytes and Marshal.total_size instead."] + +module Ephemeron: sig + (** Ephemeron with arbitrary arity and untyped *) + + type obj_t = t + (** alias for {!Obj.t} *) + + type t + (** an ephemeron cf {!Ephemeron} *) + + val create: int -> t + (** [create n] returns an ephemeron with [n] keys. + All the keys and the data are initially empty *) + + val length: t -> int + (** return the number of keys *) + + val get_key: t -> int -> obj_t option + (** Same as {!Ephemeron.K1.get_key} *) + + val get_key_copy: t -> int -> obj_t option + (** Same as {!Ephemeron.K1.get_key_copy} *) + + val set_key: t -> int -> obj_t -> unit + (** Same as {!Ephemeron.K1.set_key} *) + + val unset_key: t -> int -> unit + (** Same as {!Ephemeron.K1.unset_key} *) + + val check_key: t -> int -> bool + (** Same as {!Ephemeron.K1.check_key} *) + + val blit_key : t -> int -> t -> int -> int -> unit + (** Same as {!Ephemeron.K1.blit_key} *) + + val get_data: t -> obj_t option + (** Same as {!Ephemeron.K1.get_data} *) + + val get_data_copy: t -> obj_t option + (** Same as {!Ephemeron.K1.get_data_copy} *) + + val set_data: t -> obj_t -> unit + (** Same as {!Ephemeron.K1.set_data} *) + + val unset_data: t -> unit + (** Same as {!Ephemeron.K1.unset_data} *) + + val check_data: t -> bool + (** Same as {!Ephemeron.K1.check_data} *) + + val blit_data : t -> t -> unit + (** Same as {!Ephemeron.K1.blit_data} *) +end diff --git a/res_syntax/compiler-libs-406/oo.ml b/res_syntax/compiler-libs-406/oo.ml new file mode 100644 index 0000000000..3833b9721e --- /dev/null +++ b/res_syntax/compiler-libs-406/oo.ml @@ -0,0 +1,19 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +let copy = CamlinternalOO.copy +external id : < .. > -> int = "%field1" +let new_method = CamlinternalOO.public_method_label +let public_method_label = CamlinternalOO.public_method_label diff --git a/res_syntax/compiler-libs-406/oo.mli b/res_syntax/compiler-libs-406/oo.mli new file mode 100644 index 0000000000..9be20c14e8 --- /dev/null +++ b/res_syntax/compiler-libs-406/oo.mli @@ -0,0 +1,38 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Operations on objects *) + +val copy : (< .. > as 'a) -> 'a +(** [Oo.copy o] returns a copy of object [o], that is a fresh + object with the same methods and instance variables as [o]. *) + +external id : < .. > -> int = "%field1" +(** Return an integer identifying this object, unique for + the current execution of the program. The generic comparison + and hashing functions are based on this integer. When an object + is obtained by unmarshaling, the id is refreshed, and thus + different from the original object. As a consequence, the internal + invariants of data structures such as hash table or sets containing + objects are broken after unmarshaling the data structures. + *) + +(**/**) + +(* The following is for system use only. Do not call directly. *) + +(** For internal use (CamlIDL) *) +val new_method : string -> CamlinternalOO.tag +val public_method_label : string -> CamlinternalOO.tag diff --git a/res_syntax/compiler-libs-406/oprint.ml b/res_syntax/compiler-libs-406/oprint.ml new file mode 100644 index 0000000000..1aa71f6645 --- /dev/null +++ b/res_syntax/compiler-libs-406/oprint.ml @@ -0,0 +1,773 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2002 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Format +open Outcometree + +exception Ellipsis + +let cautious f ppf arg = + try f ppf arg with + Ellipsis -> fprintf ppf "..." + +let out_ident = ref pp_print_string +let map_primitive_name = ref (fun x -> x) + +let print_lident ppf = function + | "::" -> !out_ident ppf "(::)" + | s -> !out_ident ppf s + +let rec print_ident ppf = + function + Oide_ident s -> print_lident ppf s + | Oide_dot (id, s) -> + print_ident ppf id; pp_print_char ppf '.'; print_lident ppf s + | Oide_apply (id1, id2) -> + fprintf ppf "%a(%a)" print_ident id1 print_ident id2 + +let parenthesized_ident name = + (List.mem name ["or"; "mod"; "land"; "lor"; "lxor"; "lsl"; "lsr"; "asr"]) + || + (match name.[0] with + 'a'..'z' | 'A'..'Z' | '\223'..'\246' | '\248'..'\255' | '_' -> + false + | _ -> true) + +let value_ident ppf name = + if parenthesized_ident name then + fprintf ppf "( %s )" name + else + pp_print_string ppf name + +(* Values *) + +let valid_float_lexeme s = + let l = String.length s in + let rec loop i = + if i >= l then s ^ "." else + match s.[i] with + | '0' .. '9' | '-' -> loop (i+1) + | _ -> s + in loop 0 + +let float_repres f = + match classify_float f with + FP_nan -> "nan" + | FP_infinite -> + if f < 0.0 then "neg_infinity" else "infinity" + | _ -> + let float_val = + let s1 = Printf.sprintf "%.12g" f in + if f = float_of_string s1 then s1 else + let s2 = Printf.sprintf "%.15g" f in + if f = float_of_string s2 then s2 else + Printf.sprintf "%.18g" f + in valid_float_lexeme float_val + +let parenthesize_if_neg ppf fmt v isneg = + if isneg then pp_print_char ppf '('; + fprintf ppf fmt v; + if isneg then pp_print_char ppf ')' + +let escape_string s = + (* Escape only C0 control characters (bytes <= 0x1F), DEL(0x7F), '\\' and '"' *) + let n = ref 0 in + for i = 0 to String.length s - 1 do + n := !n + + (match String.unsafe_get s i with + | '\"' | '\\' | '\n' | '\t' | '\r' | '\b' -> 2 + | '\x00' .. '\x1F' + | '\x7F' -> 4 + | _ -> 1) + done; + if !n = String.length s then s else begin + let s' = Bytes.create !n in + n := 0; + for i = 0 to String.length s - 1 do + begin match String.unsafe_get s i with + | ('\"' | '\\') as c -> + Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n c + | '\n' -> + Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 'n' + | '\t' -> + Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 't' + | '\r' -> + Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 'r' + | '\b' -> + Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 'b' + | '\x00' .. '\x1F' | '\x7F' as c -> + let a = Char.code c in + Bytes.unsafe_set s' !n '\\'; + incr n; + Bytes.unsafe_set s' !n (Char.chr (48 + a / 100)); + incr n; + Bytes.unsafe_set s' !n (Char.chr (48 + (a / 10) mod 10)); + incr n; + Bytes.unsafe_set s' !n (Char.chr (48 + a mod 10)); + | c -> Bytes.unsafe_set s' !n c + end; + incr n + done; + Bytes.to_string s' + end + + +let print_out_string ppf s = + let not_escaped = + (* let the user dynamically choose if strings should be escaped: *) + match Sys.getenv_opt "OCAMLTOP_UTF_8" with + | None -> true + | Some x -> + match bool_of_string_opt x with + | None -> true + | Some f -> f in + if not_escaped then + fprintf ppf "\"%s\"" (escape_string s) + else + fprintf ppf "%S" s + +let print_out_value ppf tree = + let rec print_tree_1 ppf = + function + | Oval_constr (name, [param]) -> + fprintf ppf "@[<1>%a@ %a@]" print_ident name print_constr_param param + | Oval_constr (name, (_ :: _ as params)) -> + fprintf ppf "@[<1>%a@ (%a)@]" print_ident name + (print_tree_list print_tree_1 ",") params + | Oval_variant (name, Some param) -> + fprintf ppf "@[<2>`%s@ %a@]" name print_constr_param param + | tree -> print_simple_tree ppf tree + and print_constr_param ppf = function + | Oval_int i -> parenthesize_if_neg ppf "%i" i (i < 0) + | Oval_int32 i -> parenthesize_if_neg ppf "%lil" i (i < 0l) + | Oval_int64 i -> parenthesize_if_neg ppf "%LiL" i (i < 0L) + | Oval_nativeint i -> parenthesize_if_neg ppf "%nin" i (i < 0n) + | Oval_float f -> parenthesize_if_neg ppf "%s" (float_repres f) (f < 0.0) + | Oval_string (_,_, Ostr_bytes) as tree -> + pp_print_char ppf '('; + print_simple_tree ppf tree; + pp_print_char ppf ')'; + | tree -> print_simple_tree ppf tree + and print_simple_tree ppf = + function + Oval_int i -> fprintf ppf "%i" i + | Oval_int32 i -> fprintf ppf "%lil" i + | Oval_int64 i -> fprintf ppf "%LiL" i + | Oval_nativeint i -> fprintf ppf "%nin" i + | Oval_float f -> pp_print_string ppf (float_repres f) + | Oval_char c -> fprintf ppf "%C" c + | Oval_string (s, maxlen, kind) -> + begin try + let len = String.length s in + let s = if len > maxlen then String.sub s 0 maxlen else s in + begin match kind with + | Ostr_bytes -> fprintf ppf "Bytes.of_string %S" s + | Ostr_string -> print_out_string ppf s + end; + (if len > maxlen then + fprintf ppf + "... (* string length %d; truncated *)" len + ) + with + Invalid_argument _ (* "String.create" *)-> fprintf ppf "" + end + | Oval_list tl -> + fprintf ppf "@[<1>[%a]@]" (print_tree_list print_tree_1 ";") tl + | Oval_array tl -> + fprintf ppf "@[<2>[|%a|]@]" (print_tree_list print_tree_1 ";") tl + | Oval_constr (name, []) -> print_ident ppf name + | Oval_variant (name, None) -> fprintf ppf "`%s" name + | Oval_stuff s -> pp_print_string ppf s + | Oval_record fel -> + fprintf ppf "@[<1>{%a}@]" (cautious (print_fields true)) fel + | Oval_ellipsis -> raise Ellipsis + | Oval_printer f -> f ppf + | Oval_tuple tree_list -> + fprintf ppf "@[<1>(%a)@]" (print_tree_list print_tree_1 ",") tree_list + | tree -> fprintf ppf "@[<1>(%a)@]" (cautious print_tree_1) tree + and print_fields first ppf = + function + [] -> () + | (name, tree) :: fields -> + if not first then fprintf ppf ";@ "; + fprintf ppf "@[<1>%a@ =@ %a@]" print_ident name (cautious print_tree_1) + tree; + print_fields false ppf fields + and print_tree_list print_item sep ppf tree_list = + let rec print_list first ppf = + function + [] -> () + | tree :: tree_list -> + if not first then fprintf ppf "%s@ " sep; + print_item ppf tree; + print_list false ppf tree_list + in + cautious (print_list true) ppf tree_list + in + cautious print_tree_1 ppf tree + +let out_value = ref print_out_value + +(* Types *) + +let rec print_list_init pr sep ppf = + function + [] -> () + | a :: l -> sep ppf; pr ppf a; print_list_init pr sep ppf l + +let rec print_list pr sep ppf = + function + [] -> () + | [a] -> pr ppf a + | a :: l -> pr ppf a; sep ppf; print_list pr sep ppf l + +let pr_present = + print_list (fun ppf s -> fprintf ppf "`%s" s) (fun ppf -> fprintf ppf "@ ") + +let pr_vars = + print_list (fun ppf s -> fprintf ppf "'%s" s) (fun ppf -> fprintf ppf "@ ") + +let rec print_out_type ppf = + function + | Otyp_alias (ty, s) -> + fprintf ppf "@[%a@ as '%s@]" print_out_type ty s + | Otyp_poly (sl, ty) -> + fprintf ppf "@[%a.@ %a@]" + pr_vars sl + print_out_type ty + | ty -> + print_out_type_1 ppf ty + +and print_out_type_1 ppf = + function + Otyp_arrow (lab, ty1, ty2) -> + pp_open_box ppf 0; + if lab <> "" then (pp_print_string ppf lab; pp_print_char ppf ':'); + print_out_type_2 ppf ty1; + pp_print_string ppf " ->"; + pp_print_space ppf (); + print_out_type_1 ppf ty2; + pp_close_box ppf () + | ty -> print_out_type_2 ppf ty +and print_out_type_2 ppf = + function + Otyp_tuple tyl -> + fprintf ppf "@[<0>%a@]" (print_typlist print_simple_out_type " *") tyl + | ty -> print_simple_out_type ppf ty +and print_simple_out_type ppf = + function + Otyp_class (ng, id, tyl) -> + fprintf ppf "@[%a%s#%a@]" print_typargs tyl (if ng then "_" else "") + print_ident id + | Otyp_constr (Oide_dot (Oide_dot (Oide_ident "Js", "Fn"), name ), + [tyl]) + -> + let res = + if name = "arity0" then + Otyp_arrow ("", Otyp_constr (Oide_ident "unit", []),tyl) + else tyl + in + fprintf ppf "@[<0>(%a@ [@bs])@]" print_out_type_1 res + | Otyp_constr (Oide_dot (Oide_dot (Oide_ident "Js_OO", "Meth" ),name), + [tyl]) + -> + let res = + if name = "arity0" then + Otyp_arrow ("", Otyp_constr (Oide_ident "unit", []),tyl) + else tyl + in + fprintf ppf "@[<0>(%a@ [@bs.meth])@]" print_out_type_1 res + | Otyp_constr (Oide_dot (Oide_dot (Oide_ident "Js_OO", "Callback" ), _), + [tyl]) + -> + fprintf ppf "@[<0>(%a@ [@bs.this])@]" print_out_type_1 tyl + | Otyp_constr (id, tyl) -> + pp_open_box ppf 0; + print_typargs ppf tyl; + print_ident ppf id; + pp_close_box ppf () + | Otyp_object (fields, rest) -> + fprintf ppf "@[<2>< %a >@]" (print_fields rest) fields + | Otyp_stuff s -> pp_print_string ppf s + | Otyp_var (ng, s) -> fprintf ppf "'%s%s" (if ng then "_" else "") s + | Otyp_variant (non_gen, row_fields, closed, tags) -> + let print_present ppf = + function + None | Some [] -> () + | Some l -> fprintf ppf "@;<1 -2>> @[%a@]" pr_present l + in + let print_fields ppf = + function + Ovar_fields fields -> + print_list print_row_field (fun ppf -> fprintf ppf "@;<1 -2>| ") + ppf fields + | Ovar_typ typ -> + print_simple_out_type ppf typ + in + fprintf ppf "%s[%s@[@[%a@]%a ]@]" (if non_gen then "_" else "") + (if closed then if tags = None then " " else "< " + else if tags = None then "> " else "? ") + print_fields row_fields + print_present tags + | Otyp_alias _ | Otyp_poly _ | Otyp_arrow _ | Otyp_tuple _ as ty -> + pp_open_box ppf 1; + pp_print_char ppf '('; + print_out_type ppf ty; + pp_print_char ppf ')'; + pp_close_box ppf () + | Otyp_abstract | Otyp_open + | Otyp_sum _ | Otyp_manifest (_, _) -> () + | Otyp_record lbls -> print_record_decl ppf lbls + | Otyp_module (p, n, tyl) -> + fprintf ppf "@[<1>(module %s" p; + let first = ref true in + List.iter2 + (fun s t -> + let sep = if !first then (first := false; "with") else "and" in + fprintf ppf " %s type %s = %a" sep s print_out_type t + ) + n tyl; + fprintf ppf ")@]" + | Otyp_attribute (t, attr) -> + fprintf ppf "@[<1>(%a [@@%s])@]" print_out_type t attr.oattr_name +and print_record_decl ppf lbls = + fprintf ppf "{%a@;<1 -2>}" + (print_list_init print_out_label (fun ppf -> fprintf ppf "@ ")) lbls +and print_fields rest ppf = + function + [] -> + begin match rest with + Some non_gen -> fprintf ppf "%s.." (if non_gen then "_" else "") + | None -> () + end + | [s, t] -> + fprintf ppf "%s : %a" s print_out_type t; + begin match rest with + Some _ -> fprintf ppf ";@ " + | None -> () + end; + print_fields rest ppf [] + | (s, t) :: l -> + fprintf ppf "%s : %a;@ %a" s print_out_type t (print_fields rest) l +and print_row_field ppf (l, opt_amp, tyl) = + let pr_of ppf = + if opt_amp then fprintf ppf " of@ &@ " + else if tyl <> [] then fprintf ppf " of@ " + else fprintf ppf "" + in + fprintf ppf "@[`%s%t%a@]" l pr_of (print_typlist print_out_type " &") + tyl +and print_typlist print_elem sep ppf = + function + [] -> () + | [ty] -> print_elem ppf ty + | ty :: tyl -> + print_elem ppf ty; + pp_print_string ppf sep; + pp_print_space ppf (); + print_typlist print_elem sep ppf tyl +and print_typargs ppf = + function + [] -> () + | [ty1] -> print_simple_out_type ppf ty1; pp_print_space ppf () + | tyl -> + pp_open_box ppf 1; + pp_print_char ppf '('; + print_typlist print_out_type "," ppf tyl; + pp_print_char ppf ')'; + pp_close_box ppf (); + pp_print_space ppf () +and print_out_label ppf (name, mut, opt, arg) = + fprintf ppf "@[<2>%s%s%s :@ %a@];" (if mut then "mutable " else "") name (if opt then "?" else "") + print_out_type arg + +let out_type = ref print_out_type + +(* Class types *) + +let type_parameter ppf (ty, (co, cn)) = + fprintf ppf "%s%s" + (if not cn then "+" else if not co then "-" else "") + (if ty = "_" then ty else "'"^ty) + +let print_out_class_params ppf = + function + [] -> () + | tyl -> + fprintf ppf "@[<1>[%a]@]@ " + (print_list type_parameter (fun ppf -> fprintf ppf ", ")) + tyl + +let rec print_out_class_type ppf = + function + Octy_constr (id, tyl) -> + let pr_tyl ppf = + function + [] -> () + | tyl -> + fprintf ppf "@[<1>[%a]@]@ " (print_typlist !out_type ",") tyl + in + fprintf ppf "@[%a%a@]" pr_tyl tyl print_ident id + | Octy_arrow (lab, ty, cty) -> + fprintf ppf "@[%s%a ->@ %a@]" (if lab <> "" then lab ^ ":" else "") + print_out_type_2 ty print_out_class_type cty + | Octy_signature (self_ty, csil) -> + let pr_param ppf = + function + Some ty -> fprintf ppf "@ @[(%a)@]" !out_type ty + | None -> () + in + fprintf ppf "@[@[<2>object%a@]@ %a@;<1 -2>end@]" pr_param self_ty + (print_list print_out_class_sig_item (fun ppf -> fprintf ppf "@ ")) + csil +and print_out_class_sig_item ppf = + function + Ocsg_constraint (ty1, ty2) -> + fprintf ppf "@[<2>constraint %a =@ %a@]" !out_type ty1 + !out_type ty2 + | Ocsg_method (name, priv, virt, ty) -> + fprintf ppf "@[<2>method %s%s%s :@ %a@]" + (if priv then "private " else "") (if virt then "virtual " else "") + name !out_type ty + | Ocsg_value (name, mut, vr, ty) -> + fprintf ppf "@[<2>val %s%s%s :@ %a@]" + (if mut then "mutable " else "") + (if vr then "virtual " else "") + name !out_type ty + +let out_class_type = ref print_out_class_type + +(* Signature *) + +let out_module_type = ref (fun _ -> failwith "Oprint.out_module_type") +let out_sig_item = ref (fun _ -> failwith "Oprint.out_sig_item") +let out_signature = ref (fun _ -> failwith "Oprint.out_signature") +let out_type_extension = ref (fun _ -> failwith "Oprint.out_type_extension") + +let rec print_out_functor funct ppf = + function + Omty_functor (_, None, mty_res) -> + if funct then fprintf ppf "() %a" (print_out_functor true) mty_res + else fprintf ppf "functor@ () %a" (print_out_functor true) mty_res + | Omty_functor (name, Some mty_arg, mty_res) -> begin + match name, funct with + | "_", true -> + fprintf ppf "->@ %a ->@ %a" + print_out_module_type mty_arg (print_out_functor false) mty_res + | "_", false -> + fprintf ppf "%a ->@ %a" + print_out_module_type mty_arg (print_out_functor false) mty_res + | name, true -> + fprintf ppf "(%s : %a) %a" name + print_out_module_type mty_arg (print_out_functor true) mty_res + | name, false -> + fprintf ppf "functor@ (%s : %a) %a" name + print_out_module_type mty_arg (print_out_functor true) mty_res + end + | m -> + if funct then fprintf ppf "->@ %a" print_out_module_type m + else print_out_module_type ppf m + +and print_out_module_type ppf = + function + Omty_abstract -> () + | Omty_functor _ as t -> + fprintf ppf "@[<2>%a@]" (print_out_functor false) t + | Omty_ident id -> fprintf ppf "%a" print_ident id + | Omty_signature sg -> + fprintf ppf "@[sig@ %a@;<1 -2>end@]" !out_signature sg + | Omty_alias id -> fprintf ppf "(module %a)" print_ident id +and print_out_signature ppf = + function + [] -> () + | [item] -> !out_sig_item ppf item + | Osig_typext(ext, Oext_first) :: items -> + (* Gather together the extension constructors *) + let rec gather_extensions acc items = + match items with + Osig_typext(ext, Oext_next) :: items -> + gather_extensions + ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc) + items + | _ -> (List.rev acc, items) + in + let exts, items = + gather_extensions + [(ext.oext_name, ext.oext_args, ext.oext_ret_type)] + items + in + let te = + { otyext_name = ext.oext_type_name; + otyext_params = ext.oext_type_params; + otyext_constructors = exts; + otyext_private = ext.oext_private } + in + fprintf ppf "%a@ %a" !out_type_extension te print_out_signature items + | item :: items -> + fprintf ppf "%a@ %a" !out_sig_item item print_out_signature items +and print_out_sig_item ppf = + function + Osig_class (vir_flag, name, params, clt, rs) -> + fprintf ppf "@[<2>%s%s@ %a%s@ :@ %a@]" + (if rs = Orec_next then "and" else "class") + (if vir_flag then " virtual" else "") print_out_class_params params + name !out_class_type clt + | Osig_class_type (vir_flag, name, params, clt, rs) -> + fprintf ppf "@[<2>%s%s@ %a%s@ =@ %a@]" + (if rs = Orec_next then "and" else "class type") + (if vir_flag then " virtual" else "") print_out_class_params params + name !out_class_type clt + | Osig_typext (ext, Oext_exception) -> + fprintf ppf "@[<2>exception %a@]" + print_out_constr (ext.oext_name, ext.oext_args, ext.oext_ret_type) + | Osig_typext (ext, _es) -> + print_out_extension_constructor ppf ext + | Osig_modtype (name, Omty_abstract) -> + fprintf ppf "@[<2>module type %s@]" name + | Osig_modtype (name, mty) -> + fprintf ppf "@[<2>module type %s =@ %a@]" name !out_module_type mty + | Osig_module (name, Omty_alias id, _) -> + fprintf ppf "@[<2>module %s =@ %a@]" name print_ident id + | Osig_module (name, mty, rs) -> + fprintf ppf "@[<2>%s %s :@ %a@]" + (match rs with Orec_not -> "module" + | Orec_first -> "module rec" + | Orec_next -> "and") + name !out_module_type mty + | Osig_type(td, rs) -> + print_out_type_decl + (match rs with + | Orec_not -> "type nonrec" + | Orec_first -> "type" + | Orec_next -> "and") + ppf td + | Osig_value vd -> + let kwd = if vd.oval_prims = [] then "val" else "external" in + let pr_prims ppf = + function + [] -> () + | s :: sl -> + fprintf ppf "@ = \"%s\"" s; + List.iter (fun s -> +(* TODO: in general, we should print bs attributes, some attributes like + bs.splice does need it *) + fprintf ppf "@ \"%s\"" (!map_primitive_name s) + ) sl + in + fprintf ppf "@[<2>%s %a :@ %a%a%a@]" kwd value_ident vd.oval_name + !out_type vd.oval_type pr_prims vd.oval_prims + (fun ppf -> List.iter (fun a -> fprintf ppf "@ [@@@@%s]" a.oattr_name)) + vd.oval_attributes + | Osig_ellipsis -> + fprintf ppf "..." + +and print_out_type_decl kwd ppf td = + let print_constraints ppf = + List.iter + (fun (ty1, ty2) -> + fprintf ppf "@ @[<2>constraint %a =@ %a@]" !out_type ty1 + !out_type ty2) + td.otype_cstrs + in + let type_defined ppf = + match td.otype_params with + [] -> pp_print_string ppf td.otype_name + | [param] -> fprintf ppf "@[%a@ %s@]" type_parameter param td.otype_name + | _ -> + fprintf ppf "@[(@[%a)@]@ %s@]" + (print_list type_parameter (fun ppf -> fprintf ppf ",@ ")) + td.otype_params + td.otype_name + in + let print_manifest ppf = + function + Otyp_manifest (ty, _) -> fprintf ppf " =@ %a" !out_type ty + | _ -> () + in + let print_name_params ppf = + fprintf ppf "%s %t%a" kwd type_defined print_manifest td.otype_type + in + let ty = + match td.otype_type with + Otyp_manifest (_, ty) -> ty + | _ -> td.otype_type + in + let print_private ppf = function + Asttypes.Private -> fprintf ppf " private" + | Asttypes.Public -> () + in + let print_immediate ppf = + if td.otype_immediate then fprintf ppf " [%@%@immediate]" else () + in + let print_unboxed ppf = + if td.otype_unboxed then fprintf ppf " [%@%@unboxed]" else () + in + let print_out_tkind ppf = function + | Otyp_abstract -> () + | Otyp_record lbls -> + fprintf ppf " =%a %a" + print_private td.otype_private + print_record_decl lbls + | Otyp_sum constrs -> + fprintf ppf " =%a@;<1 2>%a" + print_private td.otype_private + (print_list print_out_constr (fun ppf -> fprintf ppf "@ | ")) constrs + | Otyp_open -> + fprintf ppf " =%a .." + print_private td.otype_private + | ty -> + fprintf ppf " =%a@;<1 2>%a" + print_private td.otype_private + !out_type ty + in + fprintf ppf "@[<2>@[%t%a@]%t%t%t@]" + print_name_params + print_out_tkind ty + print_constraints + print_immediate + print_unboxed + +and print_out_constr ppf (name, tyl,ret_type_opt) = + let name = + match name with + | "::" -> "(::)" (* #7200 *) + | s -> s + in + match ret_type_opt with + | None -> + begin match tyl with + | [] -> + pp_print_string ppf name + | _ -> + fprintf ppf "@[<2>%s of@ %a@]" name + (print_typlist print_simple_out_type " *") tyl + end + | Some ret_type -> + begin match tyl with + | [] -> + fprintf ppf "@[<2>%s :@ %a@]" name print_simple_out_type ret_type + | _ -> + fprintf ppf "@[<2>%s :@ %a -> %a@]" name + (print_typlist print_simple_out_type " *") + tyl print_simple_out_type ret_type + end + +and print_out_extension_constructor ppf ext = + let print_extended_type ppf = + let print_type_parameter ppf ty = + fprintf ppf "%s" + (if ty = "_" then ty else "'"^ty) + in + match ext.oext_type_params with + [] -> fprintf ppf "%s" ext.oext_type_name + | [ty_param] -> + fprintf ppf "@[%a@ %s@]" + print_type_parameter + ty_param + ext.oext_type_name + | _ -> + fprintf ppf "@[(@[%a)@]@ %s@]" + (print_list print_type_parameter (fun ppf -> fprintf ppf ",@ ")) + ext.oext_type_params + ext.oext_type_name + in + fprintf ppf "@[type %t +=%s@;<1 2>%a@]" + print_extended_type + (if ext.oext_private = Asttypes.Private then " private" else "") + print_out_constr (ext.oext_name, ext.oext_args, ext.oext_ret_type) + +and print_out_type_extension ppf te = + let print_extended_type ppf = + let print_type_parameter ppf ty = + fprintf ppf "%s" + (if ty = "_" then ty else "'"^ty) + in + match te.otyext_params with + [] -> fprintf ppf "%s" te.otyext_name + | [param] -> + fprintf ppf "@[%a@ %s@]" + print_type_parameter param + te.otyext_name + | _ -> + fprintf ppf "@[(@[%a)@]@ %s@]" + (print_list print_type_parameter (fun ppf -> fprintf ppf ",@ ")) + te.otyext_params + te.otyext_name + in + fprintf ppf "@[type %t +=%s@;<1 2>%a@]" + print_extended_type + (if te.otyext_private = Asttypes.Private then " private" else "") + (print_list print_out_constr (fun ppf -> fprintf ppf "@ | ")) + te.otyext_constructors + +let _ = out_module_type := print_out_module_type +let _ = out_signature := print_out_signature +let _ = out_sig_item := print_out_sig_item +let _ = out_type_extension := print_out_type_extension + +(* Phrases *) + +let print_out_exception ppf exn outv = + match exn with + Sys.Break -> fprintf ppf "Interrupted.@." + | Out_of_memory -> fprintf ppf "Out of memory during evaluation.@." + | Stack_overflow -> + fprintf ppf "Stack overflow during evaluation (looping recursion?).@." + | _ -> fprintf ppf "@[Exception:@ %a.@]@." !out_value outv + +let rec print_items ppf = + function + [] -> () + | (Osig_typext(ext, Oext_first), None) :: items -> + (* Gather together extension constructors *) + let rec gather_extensions acc items = + match items with + (Osig_typext(ext, Oext_next), None) :: items -> + gather_extensions + ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc) + items + | _ -> (List.rev acc, items) + in + let exts, items = + gather_extensions + [(ext.oext_name, ext.oext_args, ext.oext_ret_type)] + items + in + let te = + { otyext_name = ext.oext_type_name; + otyext_params = ext.oext_type_params; + otyext_constructors = exts; + otyext_private = ext.oext_private } + in + fprintf ppf "@[%a@]" !out_type_extension te; + if items <> [] then fprintf ppf "@ %a" print_items items + | (tree, valopt) :: items -> + begin match valopt with + Some v -> + fprintf ppf "@[<2>%a =@ %a@]" !out_sig_item tree + !out_value v + | None -> fprintf ppf "@[%a@]" !out_sig_item tree + end; + if items <> [] then fprintf ppf "@ %a" print_items items + +let print_out_phrase ppf = + function + Ophr_eval (outv, ty) -> + fprintf ppf "@[- : %a@ =@ %a@]@." !out_type ty !out_value outv + | Ophr_signature [] -> () + | Ophr_signature items -> fprintf ppf "@[%a@]@." print_items items + | Ophr_exception (exn, outv) -> print_out_exception ppf exn outv + +let out_phrase = ref print_out_phrase diff --git a/res_syntax/compiler-libs-406/oprint.mli b/res_syntax/compiler-libs-406/oprint.mli new file mode 100644 index 0000000000..cf594d164d --- /dev/null +++ b/res_syntax/compiler-libs-406/oprint.mli @@ -0,0 +1,30 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2002 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Format +open Outcometree + +val out_ident : (formatter -> string -> unit) ref +val map_primitive_name : (string -> string) ref +val out_value : (formatter -> out_value -> unit) ref +val out_type : (formatter -> out_type -> unit) ref +val out_class_type : (formatter -> out_class_type -> unit) ref +val out_module_type : (formatter -> out_module_type -> unit) ref +val out_sig_item : (formatter -> out_sig_item -> unit) ref +val out_signature : (formatter -> out_sig_item list -> unit) ref +val out_type_extension : (formatter -> out_type_extension -> unit) ref +val out_phrase : (formatter -> out_phrase -> unit) ref + +val parenthesized_ident : string -> bool diff --git a/res_syntax/compiler-libs-406/outcometree.mli b/res_syntax/compiler-libs-406/outcometree.mli new file mode 100644 index 0000000000..10bd6535d4 --- /dev/null +++ b/res_syntax/compiler-libs-406/outcometree.mli @@ -0,0 +1,144 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Daniel de Rauglaudre, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2001 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Module [Outcometree]: results displayed by the toplevel *) + +(* These types represent messages that the toplevel displays as normal + results or errors. The real displaying is customisable using the hooks: + [Toploop.print_out_value] + [Toploop.print_out_type] + [Toploop.print_out_sig_item] + [Toploop.print_out_phrase] *) + +type out_ident = + | Oide_apply of out_ident * out_ident + | Oide_dot of out_ident * string + | Oide_ident of string + +type out_string = + | Ostr_string + | Ostr_bytes + +type out_attribute = + { oattr_name: string } + +type out_value = + | Oval_array of out_value list + | Oval_char of char + | Oval_constr of out_ident * out_value list + | Oval_ellipsis + | Oval_float of float + | Oval_int of int + | Oval_int32 of int32 + | Oval_int64 of int64 + | Oval_nativeint of nativeint + | Oval_list of out_value list + | Oval_printer of (Format.formatter -> unit) + | Oval_record of (out_ident * out_value) list + | Oval_string of string * int * out_string (* string, size-to-print, kind *) + | Oval_stuff of string + | Oval_tuple of out_value list + | Oval_variant of string * out_value option + +type out_type = + | Otyp_abstract + | Otyp_open + | Otyp_alias of out_type * string + | Otyp_arrow of string * out_type * out_type + | Otyp_class of bool * out_ident * out_type list + | Otyp_constr of out_ident * out_type list + | Otyp_manifest of out_type * out_type + | Otyp_object of (string * out_type) list * bool option + | Otyp_record of (string * bool * bool * out_type) list + | Otyp_stuff of string + | Otyp_sum of (string * out_type list * out_type option) list + | Otyp_tuple of out_type list + | Otyp_var of bool * string + | Otyp_variant of + bool * out_variant * bool * (string list) option + | Otyp_poly of string list * out_type + | Otyp_module of string * string list * out_type list + | Otyp_attribute of out_type * out_attribute + +and out_variant = + | Ovar_fields of (string * bool * out_type list) list + | Ovar_typ of out_type + +type out_class_type = + | Octy_constr of out_ident * out_type list + | Octy_arrow of string * out_type * out_class_type + | Octy_signature of out_type option * out_class_sig_item list +and out_class_sig_item = + | Ocsg_constraint of out_type * out_type + | Ocsg_method of string * bool * bool * out_type + | Ocsg_value of string * bool * bool * out_type + +type out_module_type = + | Omty_abstract + | Omty_functor of string * out_module_type option * out_module_type + | Omty_ident of out_ident + | Omty_signature of out_sig_item list + | Omty_alias of out_ident +and out_sig_item = + | Osig_class of + bool * string * (string * (bool * bool)) list * out_class_type * + out_rec_status + | Osig_class_type of + bool * string * (string * (bool * bool)) list * out_class_type * + out_rec_status + | Osig_typext of out_extension_constructor * out_ext_status + | Osig_modtype of string * out_module_type + | Osig_module of string * out_module_type * out_rec_status + | Osig_type of out_type_decl * out_rec_status + | Osig_value of out_val_decl + | Osig_ellipsis +and out_type_decl = + { otype_name: string; + otype_params: (string * (bool * bool)) list; + otype_type: out_type; + otype_private: Asttypes.private_flag; + otype_immediate: bool; + otype_unboxed: bool; + otype_cstrs: (out_type * out_type) list } +and out_extension_constructor = + { oext_name: string; + oext_type_name: string; + oext_type_params: string list; + oext_args: out_type list; + oext_ret_type: out_type option; + oext_private: Asttypes.private_flag } +and out_type_extension = + { otyext_name: string; + otyext_params: string list; + otyext_constructors: (string * out_type list * out_type option) list; + otyext_private: Asttypes.private_flag } +and out_val_decl = + { oval_name: string; + oval_type: out_type; + oval_prims: string list; + oval_attributes: out_attribute list } +and out_rec_status = + | Orec_not + | Orec_first + | Orec_next +and out_ext_status = + | Oext_first + | Oext_next + | Oext_exception + +type out_phrase = + | Ophr_eval of out_value * out_type + | Ophr_signature of (out_sig_item * out_value option) list + | Ophr_exception of (exn * out_value) diff --git a/res_syntax/compiler-libs-406/parmatch.ml b/res_syntax/compiler-libs-406/parmatch.ml new file mode 100644 index 0000000000..a5147cedaa --- /dev/null +++ b/res_syntax/compiler-libs-406/parmatch.ml @@ -0,0 +1,2641 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Detection of partial matches and unused match cases. *) + +open Misc +open Asttypes +open Types +open Typedtree + +(*************************************) +(* Utilities for building patterns *) +(*************************************) + +let make_pat desc ty tenv = + {pat_desc = desc; pat_loc = Location.none; pat_extra = []; + pat_type = ty ; pat_env = tenv; + pat_attributes = []; + } + +let omega = make_pat Tpat_any Ctype.none Env.empty + +let extra_pat = + make_pat + (Tpat_var (Ident.create "+", mknoloc "+")) + Ctype.none Env.empty + +let rec omegas i = + if i <= 0 then [] else omega :: omegas (i-1) + +let omega_list l = List.map (fun _ -> omega) l + +let zero = make_pat (Tpat_constant (Const_int 0)) Ctype.none Env.empty + +(*******************) +(* Coherence check *) +(*******************) + +(* For some of the operations we do in this module, we would like (because it + simplifies matters) to assume that patterns appearing on a given column in a + pattern matrix are /coherent/ (think "of the same type"). + Unfortunately that is not always true. + + Consider the following (well-typed) example: + {[ + type _ t = S : string t | U : unit t + + let f (type a) (t1 : a t) (t2 : a t) (a : a) = + match t1, t2, a with + | U, _, () -> () + | _, S, "" -> () + ]} + + Clearly the 3rd column contains incoherent patterns. + + On the example above, most of the algorithms will explore the pattern matrix + as illustrated by the following tree: + + {v + S + -------> | "" | + U | S, "" | __/ | () | + --------> | _, () | \ ¬ S + | U, _, () | __/ -------> | () | + | _, S, "" | \ + ---------> | S, "" | ----------> | "" | + ¬ U S + v} + + where following an edge labelled by a pattern P means "assuming the value I + am matching on is filtered by [P] on the column I am currently looking at, + then the following submatrix is still reachable". + + Notice that at any point of that tree, if the first column of a matrix is + incoherent, then the branch leading to it can only be taken if the scrutinee + is ill-typed. + In the example above the only case where we have a matrix with an incoherent + first column is when we consider [t1, t2, a] to be [U, S, ...]. However such + a value would be ill-typed, so we can never actually get there. + + Checking the first column at each step of the recursion and making the + concious decision of "aborting" the algorithm whenever the first column + becomes incoherent, allows us to retain the initial assumption in later + stages of the algorithms. + + --- + + N.B. two patterns can be considered coherent even though they might not be of + the same type. + + That's in part because we only care about the "head" of patterns and leave + checking coherence of subpatterns for the next steps of the algorithm: + ('a', 'b') and (1, ()) will be deemed coherent because they are both a tuples + of arity 2 (we'll notice at a later stage the incoherence of 'a' and 1). + + But also because it can be hard/costly to determine exactly whether two + patterns are of the same type or not (eg. in the example above with _ and S, + but see also the module [Coherence_illustration] in + testsuite/tests/basic-more/robustmatch.ml). + + For the moment our weak, loosely-syntactic, coherence check seems to be + enough and we leave it to each user to consider (and document!) what happens + when an "incoherence" is not detected by this check. +*) + + +let simplify_head_pat p k = + let rec simplify_head_pat p k = + match p.pat_desc with + | Tpat_alias (p,_,_) -> simplify_head_pat p k + | Tpat_var (_,_) -> omega :: k + | Tpat_or (p1,p2,_) -> simplify_head_pat p1 (simplify_head_pat p2 k) + | _ -> p :: k + in simplify_head_pat p k + +let rec simplified_first_col = function + | [] -> [] + | [] :: _ -> assert false (* the rows are non-empty! *) + | (p::_) :: rows -> + simplify_head_pat p (simplified_first_col rows) + +(* Given the simplified first column of a matrix, this function first looks for + a "discriminating" pattern on that column (i.e. a non-omega one) and then + check that every other head pattern in the column is coherent with that one. +*) +let all_coherent column = + let coherent_heads hp1 hp2 = + match hp1.pat_desc, hp2.pat_desc with + | (Tpat_var _ | Tpat_alias _ | Tpat_or _), _ + | _, (Tpat_var _ | Tpat_alias _ | Tpat_or _) -> + assert false + | Tpat_construct (_, c, _), Tpat_construct (_, c', _) -> + c.cstr_consts = c'.cstr_consts + && c.cstr_nonconsts = c'.cstr_nonconsts + | Tpat_constant c1, Tpat_constant c2 -> begin + match c1, c2 with + | Const_char _, Const_char _ + | Const_int _, Const_int _ + | Const_int32 _, Const_int32 _ + | Const_int64 _, Const_int64 _ + | Const_nativeint _, Const_nativeint _ + | Const_float _, Const_float _ + | Const_string _, Const_string _ -> true + | ( Const_char _ + | Const_int _ + | Const_int32 _ + | Const_int64 _ + | Const_nativeint _ + | Const_float _ + | Const_string _), _ -> false + end + | Tpat_tuple l1, Tpat_tuple l2 -> List.length l1 = List.length l2 + | Tpat_record ((_, lbl1, _) :: _, _), Tpat_record ((_, lbl2, _) :: _, _) -> + Array.length lbl1.lbl_all = Array.length lbl2.lbl_all + | Tpat_any, _ + | _, Tpat_any + | Tpat_record ([], _), Tpat_record (_, _) + | Tpat_record (_, _), Tpat_record ([], _) + | Tpat_variant _, Tpat_variant _ + | Tpat_array _, Tpat_array _ + | Tpat_lazy _, Tpat_lazy _ -> true + | _, _ -> false + in + match + List.find (fun head_pat -> + match head_pat.pat_desc with + | Tpat_var _ | Tpat_alias _ | Tpat_or _ -> assert false + | Tpat_any -> false + | _ -> true + ) column + with + | exception Not_found -> + (* only omegas on the column: the column is coherent. *) + true + | discr_pat -> + List.for_all (coherent_heads discr_pat) column + +let first_column simplified_matrix = + List.map fst simplified_matrix + +(***********************) +(* Compatibility check *) +(***********************) + +(* Patterns p and q compatible means: + there exists value V that matches both, However.... + + The case of extension types is dubious, as constructor rebind permits + that different constructors are the same (and are thus compatible). + + Compilation must take this into account, consider: + + type t = .. + type t += A|B + type t += C=A + + let f x y = match x,y with + | true,A -> '1' + | _,C -> '2' + | false,A -> '3' + | _,_ -> '_' + + As C is bound to A the value of f false A is '2' (and not '3' as it would + be in the absence of rebinding). + + Not considering rebinding, patterns "false,A" and "_,C" are incompatible + and the compiler can swap the second and third clause, resulting in the + (more efficiently compiled) matching + + match x,y with + | true,A -> '1' + | false,A -> '3' + | _,C -> '2' + | _,_ -> '_' + + This is not correct: when C is bound to A, "f false A" returns '2' (not '3') + + + However, diagnostics do not take constructor rebinding into account. + Notice, that due to module abstraction constructor rebinding is hidden. + + module X : sig type t = .. type t += A|B end = struct + type t = .. + type t += A + type t += B=A + end + + open X + + let f x = match x with + | A -> '1' + | B -> '2' + | _ -> '_' + + The second clause above will NOT (and cannot) be flagged as useless. + + Finally, there are two compatibility fonction + compat p q ---> 'syntactic compatibility, used for diagnostics. + may_compat p q ---> a safe approximation of possible compat, + for compilation + +*) + + +let is_absent tag row = Btype.row_field tag !row = Rabsent + +let is_absent_pat p = match p.pat_desc with +| Tpat_variant (tag, _, row) -> is_absent tag row +| _ -> false + +let const_compare x y = + match x,y with + | Const_float f1, Const_float f2 -> + compare (float_of_string f1) (float_of_string f2) + | Const_string (s1, _), Const_string (s2, _) -> + String.compare s1 s2 + | _, _ -> compare x y + +let records_args l1 l2 = + (* Invariant: fields are already sorted by Typecore.type_label_a_list *) + let rec combine r1 r2 l1 l2 = match l1,l2 with + | [],[] -> List.rev r1, List.rev r2 + | [],(_,_,p2)::rem2 -> combine (omega::r1) (p2::r2) [] rem2 + | (_,_,p1)::rem1,[] -> combine (p1::r1) (omega::r2) rem1 [] + | (_,lbl1,p1)::rem1, ( _,lbl2,p2)::rem2 -> + if lbl1.lbl_pos < lbl2.lbl_pos then + combine (p1::r1) (omega::r2) rem1 l2 + else if lbl1.lbl_pos > lbl2.lbl_pos then + combine (omega::r1) (p2::r2) l1 rem2 + else (* same label on both sides *) + combine (p1::r1) (p2::r2) rem1 rem2 in + combine [] [] l1 l2 + + + +module Compat + (Constr:sig + val equal : + Types.constructor_description -> + Types.constructor_description -> + bool + end) = struct + + let rec compat p q = match p.pat_desc,q.pat_desc with +(* Variables match any value *) + | ((Tpat_any|Tpat_var _),_) + | (_,(Tpat_any|Tpat_var _)) -> true +(* Structural induction *) + | Tpat_alias (p,_,_),_ -> compat p q + | _,Tpat_alias (q,_,_) -> compat p q + | Tpat_or (p1,p2,_),_ -> + (compat p1 q || compat p2 q) + | _,Tpat_or (q1,q2,_) -> + (compat p q1 || compat p q2) +(* Constructors, with special case for extension *) + | Tpat_construct (_, c1,ps1), Tpat_construct (_, c2,ps2) -> + Constr.equal c1 c2 && compats ps1 ps2 +(* More standard stuff *) + | Tpat_variant(l1,op1, _), Tpat_variant(l2,op2,_) -> + l1=l2 && ocompat op1 op2 + | Tpat_constant c1, Tpat_constant c2 -> + const_compare c1 c2 = 0 + | Tpat_tuple ps, Tpat_tuple qs -> compats ps qs + | Tpat_lazy p, Tpat_lazy q -> compat p q + | Tpat_record (l1,_),Tpat_record (l2,_) -> + let ps,qs = records_args l1 l2 in + compats ps qs + | Tpat_array ps, Tpat_array qs -> + List.length ps = List.length qs && + compats ps qs + | _,_ -> false + + and ocompat op oq = match op,oq with + | None,None -> true + | Some p,Some q -> compat p q + | (None,Some _)|(Some _,None) -> false + + and compats ps qs = match ps,qs with + | [], [] -> true + | p::ps, q::qs -> compat p q && compats ps qs + | _,_ -> false + +end + +module SyntacticCompat = + Compat + (struct + let equal c1 c2 = Types.equal_tag c1.cstr_tag c2.cstr_tag + end) + +let compat = SyntacticCompat.compat +and compats = SyntacticCompat.compats + +(* Due to (potential) rebinding, two extension constructors + of the same arity type may equal *) + +exception Empty (* Empty pattern *) + +(****************************************) +(* Utilities for retrieving type paths *) +(****************************************) + +(* May need a clean copy, cf. PR#4745 *) +let clean_copy ty = + if ty.level = Btype.generic_level then ty + else Subst.type_expr Subst.identity ty + +let get_type_path ty tenv = + let ty = Ctype.repr (Ctype.expand_head tenv (clean_copy ty)) in + match ty.desc with + | Tconstr (path,_,_) -> path + | _ -> fatal_error "Parmatch.get_type_path" + +(*************************************) +(* Values as patterns pretty printer *) +(*************************************) + +open Format +;; + +let is_cons = function +| {cstr_name = "::"} -> true +| _ -> false + +let pretty_const c = match c with +| Const_int i -> Printf.sprintf "%d" i +| Const_char i -> Printf.sprintf "%s" (Pprintast.string_of_int_as_char i) +| Const_string (s, _) -> Printf.sprintf "%S" s +| Const_float f -> Printf.sprintf "%s" f +| Const_int32 i -> Printf.sprintf "%ldl" i +| Const_int64 i -> Printf.sprintf "%LdL" i +| Const_nativeint i -> Printf.sprintf "%ndn" i + +let rec pretty_val ppf v = + match v.pat_extra with + (cstr, _loc, _attrs) :: rem -> + begin match cstr with + | Tpat_unpack -> + fprintf ppf "@[(module %a)@]" pretty_val { v with pat_extra = rem } + | Tpat_constraint _ -> + fprintf ppf "@[(%a : _)@]" pretty_val { v with pat_extra = rem } + | Tpat_type _ -> + fprintf ppf "@[(# %a)@]" pretty_val { v with pat_extra = rem } + | Tpat_open _ -> + fprintf ppf "@[(# %a)@]" pretty_val { v with pat_extra = rem } + end + | [] -> + match v.pat_desc with + | Tpat_any -> fprintf ppf "_" + | Tpat_var (x,_) -> fprintf ppf "%s" (Ident.name x) + | Tpat_constant c -> fprintf ppf "%s" (pretty_const c) + | Tpat_tuple vs -> + fprintf ppf "@[(%a)@]" (pretty_vals ",") vs + | Tpat_construct (_, cstr, []) -> + fprintf ppf "%s" cstr.cstr_name + | Tpat_construct (_, cstr, [w]) -> + fprintf ppf "@[<2>%s@ %a@]" cstr.cstr_name pretty_arg w + | Tpat_construct (_, cstr, vs) -> + let name = cstr.cstr_name in + begin match (name, vs) with + ("::", [v1;v2]) -> + fprintf ppf "@[%a::@,%a@]" pretty_car v1 pretty_cdr v2 + | _ -> + fprintf ppf "@[<2>%s@ @[(%a)@]@]" name (pretty_vals ",") vs + end + | Tpat_variant (l, None, _) -> + fprintf ppf "`%s" l + | Tpat_variant (l, Some w, _) -> + fprintf ppf "@[<2>`%s@ %a@]" l pretty_arg w + | Tpat_record (lvs,_) -> + let filtered_lvs = List.filter + (function + | (_,_,{pat_desc=Tpat_any}) -> false (* do not show lbl=_ *) + | _ -> true) lvs in + begin match filtered_lvs with + | [] -> fprintf ppf "_" + | (_, lbl, _) :: q -> + let elision_mark ppf = + (* we assume that there is no label repetitions here *) + if Array.length lbl.lbl_all > 1 + List.length q then + fprintf ppf ";@ _@ " + else () in + fprintf ppf "@[{%a%t}@]" + pretty_lvals filtered_lvs elision_mark + end + | Tpat_array vs -> + fprintf ppf "@[[| %a |]@]" (pretty_vals " ;") vs + | Tpat_lazy v -> + fprintf ppf "@[<2>lazy@ %a@]" pretty_arg v + | Tpat_alias (v, x,_) -> + fprintf ppf "@[(%a@ as %a)@]" pretty_val v Ident.print x + | Tpat_or (v,w,_) -> + fprintf ppf "@[(%a|@,%a)@]" pretty_or v pretty_or w + +and pretty_car ppf v = match v.pat_desc with +| Tpat_construct (_,cstr, [_ ; _]) + when is_cons cstr -> + fprintf ppf "(%a)" pretty_val v +| _ -> pretty_val ppf v + +and pretty_cdr ppf v = match v.pat_desc with +| Tpat_construct (_,cstr, [v1 ; v2]) + when is_cons cstr -> + fprintf ppf "%a::@,%a" pretty_car v1 pretty_cdr v2 +| _ -> pretty_val ppf v + +and pretty_arg ppf v = match v.pat_desc with +| Tpat_construct (_,_,_::_) +| Tpat_variant (_, Some _, _) -> fprintf ppf "(%a)" pretty_val v +| _ -> pretty_val ppf v + +and pretty_or ppf v = match v.pat_desc with +| Tpat_or (v,w,_) -> + fprintf ppf "%a|@,%a" pretty_or v pretty_or w +| _ -> pretty_val ppf v + +and pretty_vals sep ppf = function + | [] -> () + | [v] -> pretty_val ppf v + | v::vs -> + fprintf ppf "%a%s@ %a" pretty_val v sep (pretty_vals sep) vs + +and pretty_lvals ppf = function + | [] -> () + | [_,lbl,v] -> + fprintf ppf "%s=%a" lbl.lbl_name pretty_val v + | (_, lbl,v)::rest -> + fprintf ppf "%s=%a;@ %a" + lbl.lbl_name pretty_val v pretty_lvals rest + +let top_pretty ppf v = + fprintf ppf "@[%a@]@?" pretty_val v + + +let pretty_pat p = + top_pretty Format.str_formatter p ; + prerr_string (Format.flush_str_formatter ()) + +type matrix = pattern list list + +let pretty_line ps = + List.iter + (fun p -> + top_pretty Format.str_formatter p ; + prerr_string " <" ; + prerr_string (Format.flush_str_formatter ()) ; + prerr_string ">") + ps + +let pretty_matrix (pss : matrix) = + prerr_endline "begin matrix" ; + List.iter + (fun ps -> + pretty_line ps ; + prerr_endline "") + pss ; + prerr_endline "end matrix" + + +(****************************) +(* Utilities for matching *) +(****************************) + +(* Check top matching *) +let simple_match p1 p2 = + match p1.pat_desc, p2.pat_desc with + | Tpat_construct(_, c1, _), Tpat_construct(_, c2, _) -> + Types.equal_tag c1.cstr_tag c2.cstr_tag + | Tpat_variant(l1, _, _), Tpat_variant(l2, _, _) -> + l1 = l2 + | Tpat_constant(c1), Tpat_constant(c2) -> const_compare c1 c2 = 0 + | Tpat_lazy _, Tpat_lazy _ -> true + | Tpat_record _ , Tpat_record _ -> true + | Tpat_tuple p1s, Tpat_tuple p2s + | Tpat_array p1s, Tpat_array p2s -> List.length p1s = List.length p2s + | _, (Tpat_any | Tpat_var(_)) -> true + | _, _ -> false + + + + +(* extract record fields as a whole *) +let record_arg p = match p.pat_desc with +| Tpat_any -> [] +| Tpat_record (args,_) -> args +| _ -> fatal_error "Parmatch.as_record" + + +(* Raise Not_found when pos is not present in arg *) +let get_field pos arg = + let _,_, p = List.find (fun (_,lbl,_) -> pos = lbl.lbl_pos) arg in + p + +let extract_fields omegas arg = + List.map + (fun (_,lbl,_) -> + try + get_field lbl.lbl_pos arg + with Not_found -> omega) + omegas + +let all_record_args lbls = match lbls with +| (_,{lbl_all=lbl_all},_)::_ -> + let t = + Array.map + (fun lbl -> mknoloc (Longident.Lident "?temp?"), lbl,omega) + lbl_all in + List.iter + (fun ((_, lbl,_) as x) -> t.(lbl.lbl_pos) <- x) + lbls ; + Array.to_list t +| _ -> fatal_error "Parmatch.all_record_args" + + +(* Build argument list when p2 >= p1, where p1 is a simple pattern *) +let rec simple_match_args p1 p2 = match p2.pat_desc with +| Tpat_alias (p2,_,_) -> simple_match_args p1 p2 +| Tpat_construct(_, _, args) -> args +| Tpat_variant(_, Some arg, _) -> [arg] +| Tpat_tuple(args) -> args +| Tpat_record(args,_) -> extract_fields (record_arg p1) args +| Tpat_array(args) -> args +| Tpat_lazy arg -> [arg] +| (Tpat_any | Tpat_var(_)) -> + begin match p1.pat_desc with + Tpat_construct(_, _,args) -> omega_list args + | Tpat_variant(_, Some _, _) -> [omega] + | Tpat_tuple(args) -> omega_list args + | Tpat_record(args,_) -> omega_list args + | Tpat_array(args) -> omega_list args + | Tpat_lazy _ -> [omega] + | _ -> [] + end +| _ -> [] + +(* + Normalize a pattern -> + all arguments are omega (simple pattern) and no more variables +*) + +let rec normalize_pat q = match q.pat_desc with + | Tpat_any | Tpat_constant _ -> q + | Tpat_var _ -> make_pat Tpat_any q.pat_type q.pat_env + | Tpat_alias (p,_,_) -> normalize_pat p + | Tpat_tuple (args) -> + make_pat (Tpat_tuple (omega_list args)) q.pat_type q.pat_env + | Tpat_construct (lid, c,args) -> + make_pat + (Tpat_construct (lid, c,omega_list args)) + q.pat_type q.pat_env + | Tpat_variant (l, arg, row) -> + make_pat (Tpat_variant (l, may_map (fun _ -> omega) arg, row)) + q.pat_type q.pat_env + | Tpat_array (args) -> + make_pat (Tpat_array (omega_list args)) q.pat_type q.pat_env + | Tpat_record (largs, closed) -> + make_pat + (Tpat_record (List.map (fun (lid,lbl,_) -> + lid, lbl,omega) largs, closed)) + q.pat_type q.pat_env + | Tpat_lazy _ -> + make_pat (Tpat_lazy omega) q.pat_type q.pat_env + | Tpat_or _ -> fatal_error "Parmatch.normalize_pat" + +(* + Build normalized (cf. supra) discriminating pattern, + in the non-data type case +*) + +let discr_pat q pss = + + let rec acc_pat acc pss = match pss with + ({pat_desc = Tpat_alias (p,_,_)}::ps)::pss -> + acc_pat acc ((p::ps)::pss) + | ({pat_desc = Tpat_or (p1,p2,_)}::ps)::pss -> + acc_pat acc ((p1::ps)::(p2::ps)::pss) + | ({pat_desc = (Tpat_any | Tpat_var _)}::_)::pss -> + acc_pat acc pss + | (({pat_desc = Tpat_tuple _} as p)::_)::_ -> normalize_pat p + | (({pat_desc = Tpat_lazy _} as p)::_)::_ -> normalize_pat p + | (({pat_desc = Tpat_record (largs,closed)} as p)::_)::pss -> + let new_omegas = + List.fold_right + (fun (lid, lbl,_) r -> + try + let _ = get_field lbl.lbl_pos r in + r + with Not_found -> + (lid, lbl,omega)::r) + largs (record_arg acc) + in + acc_pat + (make_pat (Tpat_record (new_omegas, closed)) p.pat_type p.pat_env) + pss + | _ -> acc in + + match normalize_pat q with + | {pat_desc= (Tpat_any | Tpat_record _)} as q -> acc_pat q pss + | q -> q + +(* + In case a matching value is found, set actual arguments + of the matching pattern. +*) + +let rec read_args xs r = match xs,r with +| [],_ -> [],r +| _::xs, arg::rest -> + let args,rest = read_args xs rest in + arg::args,rest +| _,_ -> + fatal_error "Parmatch.read_args" + +let do_set_args erase_mutable q r = match q with +| {pat_desc = Tpat_tuple omegas} -> + let args,rest = read_args omegas r in + make_pat (Tpat_tuple args) q.pat_type q.pat_env::rest +| {pat_desc = Tpat_record (omegas,closed)} -> + let args,rest = read_args omegas r in + make_pat + (Tpat_record + (List.map2 (fun (lid, lbl,_) arg -> + if + erase_mutable && + (match lbl.lbl_mut with + | Mutable -> true | Immutable -> false) + then + lid, lbl, omega + else + lid, lbl, arg) + omegas args, closed)) + q.pat_type q.pat_env:: + rest +| {pat_desc = Tpat_construct (lid, c,omegas)} -> + let args,rest = read_args omegas r in + make_pat + (Tpat_construct (lid, c,args)) + q.pat_type q.pat_env:: + rest +| {pat_desc = Tpat_variant (l, omega, row)} -> + let arg, rest = + match omega, r with + Some _, a::r -> Some a, r + | None, r -> None, r + | _ -> assert false + in + make_pat + (Tpat_variant (l, arg, row)) q.pat_type q.pat_env:: + rest +| {pat_desc = Tpat_lazy _omega} -> + begin match r with + arg::rest -> + make_pat (Tpat_lazy arg) q.pat_type q.pat_env::rest + | _ -> fatal_error "Parmatch.do_set_args (lazy)" + end +| {pat_desc = Tpat_array omegas} -> + let args,rest = read_args omegas r in + make_pat + (Tpat_array args) q.pat_type q.pat_env:: + rest +| {pat_desc=Tpat_constant _|Tpat_any} -> + q::r (* case any is used in matching.ml *) +| _ -> fatal_error "Parmatch.set_args" + +let set_args q r = do_set_args false q r +and set_args_erase_mutable q r = do_set_args true q r + +(* filter pss according to pattern q *) +let filter_one q pss = + let rec filter_rec = function + ({pat_desc = Tpat_alias(p,_,_)}::ps)::pss -> + filter_rec ((p::ps)::pss) + | ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss -> + filter_rec ((p1::ps)::(p2::ps)::pss) + | (p::ps)::pss -> + if simple_match q p + then (simple_match_args q p @ ps) :: filter_rec pss + else filter_rec pss + | _ -> [] in + filter_rec pss + +(* + Filter pss in the ``extra case''. This applies : + - According to an extra constructor (datatype case, non-complete signature). + - According to anything (all-variables case). +*) +let filter_extra pss = + let rec filter_rec = function + ({pat_desc = Tpat_alias(p,_,_)}::ps)::pss -> + filter_rec ((p::ps)::pss) + | ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss -> + filter_rec ((p1::ps)::(p2::ps)::pss) + | ({pat_desc = (Tpat_any | Tpat_var(_))} :: qs) :: pss -> + qs :: filter_rec pss + | _::pss -> filter_rec pss + | [] -> [] in + filter_rec pss + +(* + Pattern p0 is the discriminating pattern, + returns [(q0,pss0) ; ... ; (qn,pssn)] + where the qi's are simple patterns and the pssi's are + matched matrices. + + NOTES + * (qi,[]) is impossible. + * In the case when matching is useless (all-variable case), + returns [] +*) + +let filter_all pat0 pss = + + let rec insert q qs env = + match env with + [] -> + let q0 = normalize_pat q in + [q0, [simple_match_args q0 q @ qs]] + | ((q0,pss) as c)::env -> + if simple_match q0 q + then (q0, ((simple_match_args q0 q @ qs) :: pss)) :: env + else c :: insert q qs env in + + let rec filter_rec env = function + ({pat_desc = Tpat_alias(p,_,_)}::ps)::pss -> + filter_rec env ((p::ps)::pss) + | ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss -> + filter_rec env ((p1::ps)::(p2::ps)::pss) + | ({pat_desc = (Tpat_any | Tpat_var(_))}::_)::pss -> + filter_rec env pss + | (p::ps)::pss -> + filter_rec (insert p ps env) pss + | _ -> env + + and filter_omega env = function + ({pat_desc = Tpat_alias(p,_,_)}::ps)::pss -> + filter_omega env ((p::ps)::pss) + | ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss -> + filter_omega env ((p1::ps)::(p2::ps)::pss) + | ({pat_desc = (Tpat_any | Tpat_var(_))}::ps)::pss -> + filter_omega + (List.map (fun (q,qss) -> (q,(simple_match_args q omega @ ps) :: qss)) + env) + pss + | _::pss -> filter_omega env pss + | [] -> env in + + filter_omega + (filter_rec + (match pat0.pat_desc with + (Tpat_record(_) | Tpat_tuple(_) | Tpat_lazy(_)) -> [pat0,[]] + | _ -> []) + pss) + pss + +(* Variant related functions *) + +let rec set_last a = function + [] -> [] + | [_] -> [a] + | x::l -> x :: set_last a l + +(* mark constructor lines for failure when they are incomplete *) +let rec mark_partial = function + ({pat_desc = Tpat_alias(p,_,_)}::ps)::pss -> + mark_partial ((p::ps)::pss) + | ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss -> + mark_partial ((p1::ps)::(p2::ps)::pss) + | ({pat_desc = (Tpat_any | Tpat_var(_))} :: _ as ps) :: pss -> + ps :: mark_partial pss + | ps::pss -> + (set_last zero ps) :: mark_partial pss + | [] -> [] + +let close_variant env row = + let row = Btype.row_repr row in + let nm = + List.fold_left + (fun nm (_tag,f) -> + match Btype.row_field_repr f with + | Reither(_, _, false, e) -> + (* m=false means that this tag is not explicitly matched *) + Btype.set_row_field e Rabsent; + None + | Rabsent | Reither (_, _, true, _) | Rpresent _ -> nm) + row.row_name row.row_fields in + if not row.row_closed || nm != row.row_name then begin + (* this unification cannot fail *) + Ctype.unify env row.row_more + (Btype.newgenty + (Tvariant {row with row_fields = []; row_more = Btype.newgenvar(); + row_closed = true; row_name = nm})) + end + +let row_of_pat pat = + match Ctype.expand_head pat.pat_env pat.pat_type with + {desc = Tvariant row} -> Btype.row_repr row + | _ -> assert false + +(* + Check whether the first column of env makes up a complete signature or + not. +*) + +let full_match closing env = match env with +| ({pat_desc = Tpat_construct(_,c,_)},_) :: _ -> + if c.cstr_consts < 0 then false (* extensions *) + else List.length env = c.cstr_consts + c.cstr_nonconsts +| ({pat_desc = Tpat_variant _} as p,_) :: _ -> + let fields = + List.map + (function ({pat_desc = Tpat_variant (tag, _, _)}, _) -> tag + | _ -> assert false) + env + in + let row = row_of_pat p in + if closing && not (Btype.row_fixed row) then + (* closing=true, we are considering the variant as closed *) + List.for_all + (fun (tag,f) -> + match Btype.row_field_repr f with + Rabsent | Reither(_, _, false, _) -> true + | Reither (_, _, true, _) + (* m=true, do not discard matched tags, rather warn *) + | Rpresent _ -> List.mem tag fields) + row.row_fields + else + row.row_closed && + List.for_all + (fun (tag,f) -> + Btype.row_field_repr f = Rabsent || List.mem tag fields) + row.row_fields +| ({pat_desc = Tpat_constant(Const_char _)},_) :: _ -> + List.length env = 256 +| ({pat_desc = Tpat_constant(_)},_) :: _ -> false +| ({pat_desc = Tpat_tuple(_)},_) :: _ -> true +| ({pat_desc = Tpat_record(_)},_) :: _ -> true +| ({pat_desc = Tpat_array(_)},_) :: _ -> false +| ({pat_desc = Tpat_lazy(_)},_) :: _ -> true +| ({pat_desc = (Tpat_any|Tpat_var _|Tpat_alias _|Tpat_or _)},_) :: _ +| [] + -> + assert false + +(* Written as a non-fragile matching, PR#7451 originated from a fragile matching below. *) +let should_extend ext env = match ext with +| None -> false +| Some ext -> begin match env with + | [] -> assert false + | (p,_)::_ -> + begin match p.pat_desc with + | Tpat_construct + (_, {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)},_) -> + let path = get_type_path p.pat_type p.pat_env in + Path.same path ext + | Tpat_construct + (_, {cstr_tag=(Cstr_extension _)},_) -> false + | Tpat_constant _|Tpat_tuple _|Tpat_variant _ + | Tpat_record _|Tpat_array _ | Tpat_lazy _ + -> false + | Tpat_any|Tpat_var _|Tpat_alias _|Tpat_or _ + -> assert false + end +end + +module ConstructorTagHashtbl = Hashtbl.Make( + struct + type t = Types.constructor_tag + let hash = Hashtbl.hash + let equal = Types.equal_tag + end +) + +(* complement constructor tags *) +let complete_tags nconsts nconstrs tags = + let seen_const = Array.make nconsts false + and seen_constr = Array.make nconstrs false in + List.iter + (function + | Cstr_constant i -> seen_const.(i) <- true + | Cstr_block i -> seen_constr.(i) <- true + | _ -> assert false) + tags ; + let r = ConstructorTagHashtbl.create (nconsts+nconstrs) in + for i = 0 to nconsts-1 do + if not seen_const.(i) then + ConstructorTagHashtbl.add r (Cstr_constant i) () + done ; + for i = 0 to nconstrs-1 do + if not seen_constr.(i) then + ConstructorTagHashtbl.add r (Cstr_block i) () + done ; + r + +(* build a pattern from a constructor list *) +let pat_of_constr ex_pat cstr = + {ex_pat with pat_desc = + Tpat_construct (mknoloc (Longident.Lident "?pat_of_constr?"), + cstr, omegas cstr.cstr_arity)} + +let orify x y = make_pat (Tpat_or (x, y, None)) x.pat_type x.pat_env + +let rec orify_many = function +| [] -> assert false +| [x] -> x +| x :: xs -> orify x (orify_many xs) + +let pat_of_constrs ex_pat cstrs = + if cstrs = [] then raise Empty else + orify_many (List.map (pat_of_constr ex_pat) cstrs) + +let pats_of_type ?(always=false) env ty = + let ty' = Ctype.expand_head env ty in + match ty'.desc with + | Tconstr (path, _, _) -> + begin try match (Env.find_type path env).type_kind with + | Type_variant cl when always || List.length cl = 1 || + List.for_all (fun cd -> cd.Types.cd_res <> None) cl -> + let cstrs = fst (Env.find_type_descrs path env) in + List.map (pat_of_constr (make_pat Tpat_any ty env)) cstrs + | Type_record _ -> + let labels = snd (Env.find_type_descrs path env) in + let fields = + List.map (fun ld -> + mknoloc (Longident.Lident "?pat_of_label?"), ld, omega) + labels + in + [make_pat (Tpat_record (fields, Closed)) ty env] + | _ -> [omega] + with Not_found -> [omega] + end + | Ttuple tl -> + [make_pat (Tpat_tuple (omegas (List.length tl))) ty env] + | _ -> [omega] + +let rec get_variant_constructors env ty = + match (Ctype.repr ty).desc with + | Tconstr (path,_,_) -> begin + try match Env.find_type path env with + | {type_kind=Type_variant _} -> + fst (Env.find_type_descrs path env) + | {type_manifest = Some _} -> + get_variant_constructors env + (Ctype.expand_head_once env (clean_copy ty)) + | _ -> fatal_error "Parmatch.get_variant_constructors" + with Not_found -> + fatal_error "Parmatch.get_variant_constructors" + end + | _ -> fatal_error "Parmatch.get_variant_constructors" + +(* Sends back a pattern that complements constructor tags all_tag *) +let complete_constrs p all_tags = + let c = + match p.pat_desc with Tpat_construct (_, c, _) -> c | _ -> assert false in + let not_tags = complete_tags c.cstr_consts c.cstr_nonconsts all_tags in + let constrs = get_variant_constructors p.pat_env c.cstr_res in + let others = + List.filter + (fun cnstr -> ConstructorTagHashtbl.mem not_tags cnstr.cstr_tag) + constrs in + let const, nonconst = + List.partition (fun cnstr -> cnstr.cstr_arity = 0) others in + const @ nonconst + +let build_other_constrs env p = + match p.pat_desc with + Tpat_construct (_, {cstr_tag=Cstr_constant _|Cstr_block _}, _) -> + let get_tag = function + | {pat_desc = Tpat_construct (_,c,_)} -> c.cstr_tag + | _ -> fatal_error "Parmatch.get_tag" in + let all_tags = List.map (fun (p,_) -> get_tag p) env in + pat_of_constrs p (complete_constrs p all_tags) + | _ -> extra_pat + +(* Auxiliary for build_other *) + +let build_other_constant proj make first next p env = + let all = List.map (fun (p, _) -> proj p.pat_desc) env in + let rec try_const i = + if List.mem i all + then try_const (next i) + else make_pat (make i) p.pat_type p.pat_env + in try_const first + +(* + Builds a pattern that is incompatible with all patterns in + in the first column of env +*) + +let some_other_tag = "" + +let build_other ext env = match env with +| ({pat_desc = Tpat_construct (lid, {cstr_tag=Cstr_extension _},_)},_) :: _ -> + (* let c = {c with cstr_name = "*extension*"} in *) (* PR#7330 *) + make_pat (Tpat_var (Ident.create "*extension*", + {lid with txt="*extension*"})) Ctype.none Env.empty +| ({pat_desc = Tpat_construct _} as p,_) :: _ -> + begin match ext with + | Some ext when Path.same ext (get_type_path p.pat_type p.pat_env) -> + extra_pat + | _ -> + build_other_constrs env p + end +| ({pat_desc = Tpat_variant (_,_,r)} as p,_) :: _ -> + let tags = + List.map + (function ({pat_desc = Tpat_variant (tag, _, _)}, _) -> tag + | _ -> assert false) + env + in + let row = row_of_pat p in + let make_other_pat tag const = + let arg = if const then None else Some omega in + make_pat (Tpat_variant(tag, arg, r)) p.pat_type p.pat_env in + begin match + List.fold_left + (fun others (tag,f) -> + if List.mem tag tags then others else + match Btype.row_field_repr f with + Rabsent (* | Reither _ *) -> others + (* This one is called after erasing pattern info *) + | Reither (c, _, _, _) -> make_other_pat tag c :: others + | Rpresent arg -> make_other_pat tag (arg = None) :: others) + [] row.row_fields + with + [] -> + make_other_pat some_other_tag true + | pat::other_pats -> + List.fold_left + (fun p_res pat -> + make_pat (Tpat_or (pat, p_res, None)) p.pat_type p.pat_env) + pat other_pats + end +| ({pat_desc = Tpat_constant(Const_char _)} as p,_) :: _ -> + let all_chars = + List.map + (fun (p,_) -> match p.pat_desc with + | Tpat_constant (Const_char c) -> c + | _ -> assert false) + env in + + let rec find_other i imax = + if i > imax then raise Not_found + else + let ci = i in + if List.mem ci all_chars then + find_other (i+1) imax + else + make_pat (Tpat_constant (Const_char ci)) p.pat_type p.pat_env in + let rec try_chars = function + | [] -> omega + | (c1,c2) :: rest -> + try + find_other (Char.code c1) (Char.code c2) + with + | Not_found -> try_chars rest in + + try_chars + [ 'a', 'z' ; 'A', 'Z' ; '0', '9' ; + ' ', '~' ; Char.chr 0 , Char.chr 255] + +| ({pat_desc=(Tpat_constant (Const_int _))} as p,_) :: _ -> + build_other_constant + (function Tpat_constant(Const_int i) -> i | _ -> assert false) + (function i -> Tpat_constant(Const_int i)) + 0 succ p env +| ({pat_desc=(Tpat_constant (Const_int32 _))} as p,_) :: _ -> + build_other_constant + (function Tpat_constant(Const_int32 i) -> i | _ -> assert false) + (function i -> Tpat_constant(Const_int32 i)) + 0l Int32.succ p env +| ({pat_desc=(Tpat_constant (Const_int64 _))} as p,_) :: _ -> + build_other_constant + (function Tpat_constant(Const_int64 i) -> i | _ -> assert false) + (function i -> Tpat_constant(Const_int64 i)) + 0L Int64.succ p env +| ({pat_desc=(Tpat_constant (Const_nativeint _))} as p,_) :: _ -> + build_other_constant + (function Tpat_constant(Const_nativeint i) -> i | _ -> assert false) + (function i -> Tpat_constant(Const_nativeint i)) + 0n Nativeint.succ p env +| ({pat_desc=(Tpat_constant (Const_string _))} as p,_) :: _ -> + build_other_constant + (function Tpat_constant(Const_string (s, _)) -> String.length s + | _ -> assert false) + (function i -> Tpat_constant(Const_string(String.make i '*', None))) + 0 succ p env +| ({pat_desc=(Tpat_constant (Const_float _))} as p,_) :: _ -> + build_other_constant + (function Tpat_constant(Const_float f) -> float_of_string f + | _ -> assert false) + (function f -> Tpat_constant(Const_float (string_of_float f))) + 0.0 (fun f -> f +. 1.0) p env + +| ({pat_desc = Tpat_array _} as p,_)::_ -> + let all_lengths = + List.map + (fun (p,_) -> match p.pat_desc with + | Tpat_array args -> List.length args + | _ -> assert false) + env in + let rec try_arrays l = + if List.mem l all_lengths then try_arrays (l+1) + else + make_pat + (Tpat_array (omegas l)) + p.pat_type p.pat_env in + try_arrays 0 +| [] -> omega +| _ -> omega + +(* + Core function : + Is the last row of pattern matrix pss + qs satisfiable ? + That is : + Does there exists at least one value vector, es such that : + 1- for all ps in pss ps # es (ps and es are not compatible) + 2- qs <= es (es matches qs) +*) + +let rec has_instance p = match p.pat_desc with + | Tpat_variant (l,_,r) when is_absent l r -> false + | Tpat_any | Tpat_var _ | Tpat_constant _ | Tpat_variant (_,None,_) -> true + | Tpat_alias (p,_,_) | Tpat_variant (_,Some p,_) -> has_instance p + | Tpat_or (p1,p2,_) -> has_instance p1 || has_instance p2 + | Tpat_construct (_,_,ps) | Tpat_tuple ps | Tpat_array ps -> + has_instances ps + | Tpat_record (lps,_) -> has_instances (List.map (fun (_,_,x) -> x) lps) + | Tpat_lazy p + -> has_instance p + + +and has_instances = function + | [] -> true + | q::rem -> has_instance q && has_instances rem + +(* + In two places in the following function, we check the coherence of the first + column of (pss + qs). + If it is incoherent, then we exit early saying that (pss + qs) is not + satisfiable (which is equivalent to saying "oh, we shouldn't have considered + that branch, no good result came come from here"). + + But what happens if we have a coherent but ill-typed column? + - we might end up returning [false], which is equivalent to noticing the + incompatibility: clearly this is fine. + - if we end up returning [true] then we're saying that [qs] is useful while + it is not. This is sad but not the end of the world, we're just allowing dead + code to survive. +*) +let rec satisfiable pss qs = match pss with +| [] -> has_instances qs +| _ -> + match qs with + | [] -> false + | {pat_desc = Tpat_or(q1,q2,_)}::qs -> + satisfiable pss (q1::qs) || satisfiable pss (q2::qs) + | {pat_desc = Tpat_alias(q,_,_)}::qs -> + satisfiable pss (q::qs) + | {pat_desc = (Tpat_any | Tpat_var(_))}::qs -> + if not (all_coherent (simplified_first_col pss)) then + false + else begin + let q0 = discr_pat omega pss in + match filter_all q0 pss with + (* first column of pss is made of variables only *) + | [] -> satisfiable (filter_extra pss) qs + | constrs -> + if full_match false constrs then + List.exists + (fun (p,pss) -> + not (is_absent_pat p) && + satisfiable pss (simple_match_args p omega @ qs)) + constrs + else + satisfiable (filter_extra pss) qs + end + | {pat_desc=Tpat_variant (l,_,r)}::_ when is_absent l r -> false + | q::qs -> + if not (all_coherent (q :: simplified_first_col pss)) then + false + else begin + let q0 = discr_pat q pss in + satisfiable (filter_one q0 pss) (simple_match_args q0 q @ qs) + end + +(* Also return the remaining cases, to enable GADT handling + + For considerations regarding the coherence check, see the comment on + [satisfiable] above. *) +let rec satisfiables pss qs = match pss with +| [] -> if has_instances qs then [qs] else [] +| _ -> + match qs with + | [] -> [] + | {pat_desc = Tpat_or(q1,q2,_)}::qs -> + satisfiables pss (q1::qs) @ satisfiables pss (q2::qs) + | {pat_desc = Tpat_alias(q,_,_)}::qs -> + satisfiables pss (q::qs) + | {pat_desc = (Tpat_any | Tpat_var(_))}::qs -> + if not (all_coherent (simplified_first_col pss)) then + [] + else begin + let q0 = discr_pat omega pss in + let wild p = + List.map (fun qs -> p::qs) (satisfiables (filter_extra pss) qs) in + match filter_all q0 pss with + (* first column of pss is made of variables only *) + | [] -> + wild omega + | (p,_)::_ as constrs -> + let for_constrs () = + List.flatten ( + List.map + (fun (p,pss) -> + if is_absent_pat p then [] else + List.map (set_args p) + (satisfiables pss (simple_match_args p omega @ qs))) + constrs ) + in + if full_match false constrs then for_constrs () else + match p.pat_desc with + Tpat_construct _ -> + (* activate this code for checking non-gadt constructors *) + wild (build_other_constrs constrs p) @ for_constrs () + | _ -> + wild omega + end + | {pat_desc=Tpat_variant (l,_,r)}::_ when is_absent l r -> [] + | q::qs -> + if not (all_coherent (q :: simplified_first_col pss)) then + [] + else begin + let q0 = discr_pat q pss in + List.map (set_args q0) + (satisfiables (filter_one q0 pss) (simple_match_args q0 q @ qs)) + end + +(* + Now another satisfiable function that additionally + supplies an example of a matching value. + + This function should be called for exhaustiveness check only. +*) + +type 'a result = + | Rnone (* No matching value *) + | Rsome of 'a (* This matching value *) + +(* +let rec try_many f = function + | [] -> Rnone + | (p,pss)::rest -> + match f (p,pss) with + | Rnone -> try_many f rest + | r -> r +*) + +let rappend r1 r2 = + match r1, r2 with + | Rnone, _ -> r2 + | _, Rnone -> r1 + | Rsome l1, Rsome l2 -> Rsome (l1 @ l2) + +let rec try_many_gadt f = function + | [] -> Rnone + | (p,pss)::rest -> + rappend (f (p, pss)) (try_many_gadt f rest) + +(* +let rec exhaust ext pss n = match pss with +| [] -> Rsome (omegas n) +| []::_ -> Rnone +| pss -> + let q0 = discr_pat omega pss in + begin match filter_all q0 pss with + (* first column of pss is made of variables only *) + | [] -> + begin match exhaust ext (filter_extra pss) (n-1) with + | Rsome r -> Rsome (q0::r) + | r -> r + end + | constrs -> + let try_non_omega (p,pss) = + if is_absent_pat p then + Rnone + else + match + exhaust + ext pss (List.length (simple_match_args p omega) + n - 1) + with + | Rsome r -> Rsome (set_args p r) + | r -> r in + if + full_match true false constrs && not (should_extend ext constrs) + then + try_many try_non_omega constrs + else + (* + D = filter_extra pss is the default matrix + as it is included in pss, one can avoid + recursive calls on specialized matrices, + Essentially : + * D exhaustive => pss exhaustive + * D non-exhaustive => we have a non-filtered value + *) + let r = exhaust ext (filter_extra pss) (n-1) in + match r with + | Rnone -> Rnone + | Rsome r -> + try + Rsome (build_other ext constrs::r) + with + (* cannot occur, since constructors don't make a full signature *) + | Empty -> fatal_error "Parmatch.exhaust" + end + +let combinations f lst lst' = + let rec iter2 x = + function + [] -> [] + | y :: ys -> + f x y :: iter2 x ys + in + let rec iter = + function + [] -> [] + | x :: xs -> iter2 x lst' @ iter xs + in + iter lst +*) +(* +let print_pat pat = + let rec string_of_pat pat = + match pat.pat_desc with + Tpat_var _ -> "v" + | Tpat_any -> "_" + | Tpat_alias (p, x) -> Printf.sprintf "(%s) as ?" (string_of_pat p) + | Tpat_constant n -> "0" + | Tpat_construct (_, lid, _) -> + Printf.sprintf "%s" (String.concat "." (Longident.flatten lid.txt)) + | Tpat_lazy p -> + Printf.sprintf "(lazy %s)" (string_of_pat p) + | Tpat_or (p1,p2,_) -> + Printf.sprintf "(%s | %s)" (string_of_pat p1) (string_of_pat p2) + | Tpat_tuple list -> + Printf.sprintf "(%s)" (String.concat "," (List.map string_of_pat list)) + | Tpat_variant (_, _, _) -> "variant" + | Tpat_record (_, _) -> "record" + | Tpat_array _ -> "array" + in + Printf.fprintf stderr "PAT[%s]\n%!" (string_of_pat pat) +*) + +(* strictly more powerful than exhaust; however, exhaust + was kept for backwards compatibility *) +let rec exhaust_gadt (ext:Path.t option) pss n = match pss with +| [] -> Rsome [omegas n] +| []::_ -> Rnone +| pss -> + if not (all_coherent (simplified_first_col pss)) then + (* We're considering an ill-typed branch, we won't actually be able to + produce a well typed value taking that branch. *) + Rnone + else begin + (* Assuming the first column is ill-typed but considered coherent, we + might end up producing an ill-typed witness of non-exhaustivity + corresponding to the current branch. + + If [exhaust] has been called by [do_check_partial], then the witnesses + produced get typechecked and the ill-typed ones are discarded. + + If [exhaust] has been called by [do_check_fragile], then it is possible + we might fail to warn the user that the matching is fragile. See for + example testsuite/tests/warnings/w04_failure.ml. *) + let q0 = discr_pat omega pss in + match filter_all q0 pss with + (* first column of pss is made of variables only *) + | [] -> + begin match exhaust_gadt ext (filter_extra pss) (n-1) with + | Rsome r -> Rsome (List.map (fun row -> q0::row) r) + | r -> r + end + | constrs -> + let try_non_omega (p,pss) = + if is_absent_pat p then + Rnone + else + match + exhaust_gadt + ext pss (List.length (simple_match_args p omega) + n - 1) + with + | Rsome r -> Rsome (List.map (fun row -> (set_args p row)) r) + | r -> r in + let before = try_many_gadt try_non_omega constrs in + if + full_match false constrs && not (should_extend ext constrs) + then + before + else + (* + D = filter_extra pss is the default matrix + as it is included in pss, one can avoid + recursive calls on specialized matrices, + Essentially : + * D exhaustive => pss exhaustive + * D non-exhaustive => we have a non-filtered value + *) + let r = exhaust_gadt ext (filter_extra pss) (n-1) in + match r with + | Rnone -> before + | Rsome r -> + try + let p = build_other ext constrs in + let dug = List.map (fun tail -> p :: tail) r in + match before with + | Rnone -> Rsome dug + | Rsome x -> Rsome (x @ dug) + with + (* cannot occur, since constructors don't make a full signature *) + | Empty -> fatal_error "Parmatch.exhaust" + end + +let exhaust_gadt ext pss n = + let ret = exhaust_gadt ext pss n in + match ret with + Rnone -> Rnone + | Rsome lst -> + (* The following line is needed to compile stdlib/printf.ml *) + if lst = [] then Rsome (omegas n) else + let singletons = + List.map + (function + [x] -> x + | _ -> assert false) + lst + in + Rsome [orify_many singletons] + +(* + Another exhaustiveness check, enforcing variant typing. + Note that it does not check exact exhaustiveness, but whether a + matching could be made exhaustive by closing all variant types. + When this is true of all other columns, the current column is left + open (even if it means that the whole matching is not exhaustive as + a result). + When this is false for the matrix minus the current column, and the + current column is composed of variant tags, we close the variant + (even if it doesn't help in making the matching exhaustive). +*) + +let rec pressure_variants tdefs = function + | [] -> false + | []::_ -> true + | pss -> + if not (all_coherent (simplified_first_col pss)) then + true + else begin + let q0 = discr_pat omega pss in + match filter_all q0 pss with + [] -> pressure_variants tdefs (filter_extra pss) + | constrs -> + let rec try_non_omega = function + (_p,pss) :: rem -> + let ok = pressure_variants tdefs pss in + try_non_omega rem && ok + | [] -> true + in + if full_match (tdefs=None) constrs then + try_non_omega constrs + else if tdefs = None then + pressure_variants None (filter_extra pss) + else + let full = full_match true constrs in + let ok = + if full then try_non_omega constrs + else try_non_omega (filter_all q0 (mark_partial pss)) + in + begin match constrs, tdefs with + ({pat_desc=Tpat_variant _} as p,_):: _, Some env -> + let row = row_of_pat p in + if Btype.row_fixed row + || pressure_variants None (filter_extra pss) then () + else close_variant env row + | _ -> () + end; + ok + end + + +(* Yet another satisfiable function *) + +(* + This time every_satisfiable pss qs checks the + utility of every expansion of qs. + Expansion means expansion of or-patterns inside qs +*) + +type answer = + | Used (* Useful pattern *) + | Unused (* Useless pattern *) + | Upartial of Typedtree.pattern list (* Mixed, with list of useless ones *) + + + +(* this row type enable column processing inside the matrix + - left -> elements not to be processed, + - right -> elements to be processed +*) +type 'a row = {no_ors : 'a list ; ors : 'a list ; active : 'a list} + + +(* +let pretty_row {ors=ors ; no_ors=no_ors; active=active} = + pretty_line ors ; prerr_string " *" ; + pretty_line no_ors ; prerr_string " *" ; + pretty_line active + +let pretty_rows rs = + prerr_endline "begin matrix" ; + List.iter + (fun r -> + pretty_row r ; + prerr_endline "") + rs ; + prerr_endline "end matrix" +*) + +(* Initial build *) +let make_row ps = {ors=[] ; no_ors=[]; active=ps} + +let make_rows pss = List.map make_row pss + + +(* Useful to detect and expand or pats inside as pats *) +let rec unalias p = match p.pat_desc with +| Tpat_alias (p,_,_) -> unalias p +| _ -> p + + +let is_var p = match (unalias p).pat_desc with +| Tpat_any|Tpat_var _ -> true +| _ -> false + +let is_var_column rs = + List.for_all + (fun r -> match r.active with + | p::_ -> is_var p + | [] -> assert false) + rs + +(* Standard or-args for left-to-right matching *) +let rec or_args p = match p.pat_desc with +| Tpat_or (p1,p2,_) -> p1,p2 +| Tpat_alias (p,_,_) -> or_args p +| _ -> assert false + +(* Just remove current column *) +let remove r = match r.active with +| _::rem -> {r with active=rem} +| [] -> assert false + +let remove_column rs = List.map remove rs + +(* Current column has been processed *) +let push_no_or r = match r.active with +| p::rem -> { r with no_ors = p::r.no_ors ; active=rem} +| [] -> assert false + +let push_or r = match r.active with +| p::rem -> { r with ors = p::r.ors ; active=rem} +| [] -> assert false + +let push_or_column rs = List.map push_or rs +and push_no_or_column rs = List.map push_no_or rs + +(* Those are adaptations of the previous homonymous functions that + work on the current column, instead of the first column +*) + +let discr_pat q rs = + discr_pat q (List.map (fun r -> r.active) rs) + +let filter_one q rs = + let rec filter_rec rs = match rs with + | [] -> [] + | r::rem -> + match r.active with + | [] -> assert false + | {pat_desc = Tpat_alias(p,_,_)}::ps -> + filter_rec ({r with active = p::ps}::rem) + | {pat_desc = Tpat_or(p1,p2,_)}::ps -> + filter_rec + ({r with active = p1::ps}:: + {r with active = p2::ps}:: + rem) + | p::ps -> + if simple_match q p then + {r with active=simple_match_args q p @ ps} :: filter_rec rem + else + filter_rec rem in + filter_rec rs + + +(* Back to normal matrices *) +let make_vector r = List.rev r.no_ors + +let make_matrix rs = List.map make_vector rs + + +(* Standard union on answers *) +let union_res r1 r2 = match r1, r2 with +| (Unused,_) +| (_, Unused) -> Unused +| Used,_ -> r2 +| _, Used -> r1 +| Upartial u1, Upartial u2 -> Upartial (u1@u2) + +(* propose or pats for expansion *) +let extract_elements qs = + let rec do_rec seen = function + | [] -> [] + | q::rem -> + {no_ors= List.rev_append seen rem @ qs.no_ors ; + ors=[] ; + active = [q]}:: + do_rec (q::seen) rem in + do_rec [] qs.ors + +(* idem for matrices *) +let transpose rs = match rs with +| [] -> assert false +| r::rem -> + let i = List.map (fun x -> [x]) r in + List.fold_left + (List.map2 (fun r x -> x::r)) + i rem + +let extract_columns pss qs = match pss with +| [] -> List.map (fun _ -> []) qs.ors +| _ -> + let rows = List.map extract_elements pss in + transpose rows + +(* Core function + The idea is to first look for or patterns (recursive case), then + check or-patterns argument usefulness (terminal case) +*) +let rec simplified_first_usefulness_col = function + | [] -> [] + | row :: rows -> + match row.active with + | [] -> assert false (* the rows are non-empty! *) + | p :: _ -> simplify_head_pat p (simplified_first_usefulness_col rows) + +let rec every_satisfiables pss qs = match qs.active with +| [] -> + (* qs is now partitionned, check usefulness *) + begin match qs.ors with + | [] -> (* no or-patterns *) + if satisfiable (make_matrix pss) (make_vector qs) then + Used + else + Unused + | _ -> (* n or-patterns -> 2n expansions *) + List.fold_right2 + (fun pss qs r -> match r with + | Unused -> Unused + | _ -> + match qs.active with + | [q] -> + let q1,q2 = or_args q in + let r_loc = every_both pss qs q1 q2 in + union_res r r_loc + | _ -> assert false) + (extract_columns pss qs) (extract_elements qs) + Used + end +| q::rem -> + let uq = unalias q in + begin match uq.pat_desc with + | Tpat_any | Tpat_var _ -> + if is_var_column pss then +(* forget about ``all-variable'' columns now *) + every_satisfiables (remove_column pss) (remove qs) + else +(* otherwise this is direct food for satisfiable *) + every_satisfiables (push_no_or_column pss) (push_no_or qs) + | Tpat_or (q1,q2,_) -> + if + q1.pat_loc.Location.loc_ghost && + q2.pat_loc.Location.loc_ghost + then +(* syntactically generated or-pats should not be expanded *) + every_satisfiables (push_no_or_column pss) (push_no_or qs) + else +(* this is a real or-pattern *) + every_satisfiables (push_or_column pss) (push_or qs) + | Tpat_variant (l,_,r) when is_absent l r -> (* Ah Jacques... *) + Unused + | _ -> +(* standard case, filter matrix *) + (* The handling of incoherent matrices is kept in line with + [satisfiable] *) + if not (all_coherent (uq :: simplified_first_usefulness_col pss)) then + Unused + else begin + let q0 = discr_pat q pss in + every_satisfiables + (filter_one q0 pss) + {qs with active=simple_match_args q0 q @ rem} + end + end + +(* + This function ``every_both'' performs the usefulness check + of or-pat q1|q2. + The trick is to call every_satisfied twice with + current active columns restricted to q1 and q2, + That way, + - others orpats in qs.ors will not get expanded. + - all matching work performed on qs.no_ors is not performed again. + *) +and every_both pss qs q1 q2 = + let qs1 = {qs with active=[q1]} + and qs2 = {qs with active=[q2]} in + let r1 = every_satisfiables pss qs1 + and r2 = every_satisfiables (if compat q1 q2 then qs1::pss else pss) qs2 in + match r1 with + | Unused -> + begin match r2 with + | Unused -> Unused + | Used -> Upartial [q1] + | Upartial u2 -> Upartial (q1::u2) + end + | Used -> + begin match r2 with + | Unused -> Upartial [q2] + | _ -> r2 + end + | Upartial u1 -> + begin match r2 with + | Unused -> Upartial (u1@[q2]) + | Used -> r1 + | Upartial u2 -> Upartial (u1 @ u2) + end + + + + +(* le_pat p q means, forall V, V matches q implies V matches p *) +let rec le_pat p q = + match (p.pat_desc, q.pat_desc) with + | (Tpat_var _|Tpat_any),_ -> true + | Tpat_alias(p,_,_), _ -> le_pat p q + | _, Tpat_alias(q,_,_) -> le_pat p q + | Tpat_constant(c1), Tpat_constant(c2) -> const_compare c1 c2 = 0 + | Tpat_construct(_,c1,ps), Tpat_construct(_,c2,qs) -> + Types.equal_tag c1.cstr_tag c2.cstr_tag && le_pats ps qs + | Tpat_variant(l1,Some p1,_), Tpat_variant(l2,Some p2,_) -> + (l1 = l2 && le_pat p1 p2) + | Tpat_variant(l1,None,_r1), Tpat_variant(l2,None,_) -> + l1 = l2 + | Tpat_variant(_,_,_), Tpat_variant(_,_,_) -> false + | Tpat_tuple(ps), Tpat_tuple(qs) -> le_pats ps qs + | Tpat_lazy p, Tpat_lazy q -> le_pat p q + | Tpat_record (l1,_), Tpat_record (l2,_) -> + let ps,qs = records_args l1 l2 in + le_pats ps qs + | Tpat_array(ps), Tpat_array(qs) -> + List.length ps = List.length qs && le_pats ps qs +(* In all other cases, enumeration is performed *) + | _,_ -> not (satisfiable [[p]] [q]) + +and le_pats ps qs = + match ps,qs with + p::ps, q::qs -> le_pat p q && le_pats ps qs + | _, _ -> true + +let get_mins le ps = + let rec select_rec r = function + [] -> r + | p::ps -> + if List.exists (fun p0 -> le p0 p) ps + then select_rec r ps + else select_rec (p::r) ps in + select_rec [] (select_rec [] ps) + +(* + lub p q is a pattern that matches all values matched by p and q + may raise Empty, when p and q are not compatible +*) + +let rec lub p q = match p.pat_desc,q.pat_desc with +| Tpat_alias (p,_,_),_ -> lub p q +| _,Tpat_alias (q,_,_) -> lub p q +| (Tpat_any|Tpat_var _),_ -> q +| _,(Tpat_any|Tpat_var _) -> p +| Tpat_or (p1,p2,_),_ -> orlub p1 p2 q +| _,Tpat_or (q1,q2,_) -> orlub q1 q2 p (* Thanks god, lub is commutative *) +| Tpat_constant c1, Tpat_constant c2 when const_compare c1 c2 = 0 -> p +| Tpat_tuple ps, Tpat_tuple qs -> + let rs = lubs ps qs in + make_pat (Tpat_tuple rs) p.pat_type p.pat_env +| Tpat_lazy p, Tpat_lazy q -> + let r = lub p q in + make_pat (Tpat_lazy r) p.pat_type p.pat_env +| Tpat_construct (lid, c1,ps1), Tpat_construct (_,c2,ps2) + when Types.equal_tag c1.cstr_tag c2.cstr_tag -> + let rs = lubs ps1 ps2 in + make_pat (Tpat_construct (lid, c1,rs)) + p.pat_type p.pat_env +| Tpat_variant(l1,Some p1,row), Tpat_variant(l2,Some p2,_) + when l1=l2 -> + let r=lub p1 p2 in + make_pat (Tpat_variant (l1,Some r,row)) p.pat_type p.pat_env +| Tpat_variant (l1,None,_row), Tpat_variant(l2,None,_) + when l1 = l2 -> p +| Tpat_record (l1,closed),Tpat_record (l2,_) -> + let rs = record_lubs l1 l2 in + make_pat (Tpat_record (rs, closed)) p.pat_type p.pat_env +| Tpat_array ps, Tpat_array qs + when List.length ps = List.length qs -> + let rs = lubs ps qs in + make_pat (Tpat_array rs) p.pat_type p.pat_env +| _,_ -> + raise Empty + +and orlub p1 p2 q = + try + let r1 = lub p1 q in + try + {q with pat_desc=(Tpat_or (r1,lub p2 q,None))} + with + | Empty -> r1 +with +| Empty -> lub p2 q + +and record_lubs l1 l2 = + let rec lub_rec l1 l2 = match l1,l2 with + | [],_ -> l2 + | _,[] -> l1 + | (lid1, lbl1,p1)::rem1, (lid2, lbl2,p2)::rem2 -> + if lbl1.lbl_pos < lbl2.lbl_pos then + (lid1, lbl1,p1)::lub_rec rem1 l2 + else if lbl2.lbl_pos < lbl1.lbl_pos then + (lid2, lbl2,p2)::lub_rec l1 rem2 + else + (lid1, lbl1,lub p1 p2)::lub_rec rem1 rem2 in + lub_rec l1 l2 + +and lubs ps qs = match ps,qs with +| p::ps, q::qs -> lub p q :: lubs ps qs +| _,_ -> [] + + +(******************************) +(* Exported variant closing *) +(******************************) + +(* Apply pressure to variants *) + +let pressure_variants tdefs patl = + let pss = List.map (fun p -> [p;omega]) patl in + ignore (pressure_variants (Some tdefs) pss) + +(*****************************) +(* Utilities for diagnostics *) +(*****************************) + +(* + Build up a working pattern matrix by forgetting + about guarded patterns +*) + +let rec initial_matrix = function + [] -> [] + | {c_guard=Some _} :: rem -> initial_matrix rem + | {c_guard=None; c_lhs=p} :: rem -> [p] :: initial_matrix rem + +(******************************************) +(* Look for a row that matches some value *) +(******************************************) + +(* + Useful for seeing if the example of + non-matched value can indeed be matched + (by a guarded clause) +*) + + + +exception NoGuard + +let rec initial_all no_guard = function + | [] -> + if no_guard then + raise NoGuard + else + [] + | {c_lhs=pat; c_guard; _} :: rem -> + ([pat], pat.pat_loc) :: initial_all (no_guard && c_guard = None) rem + + +let rec do_filter_var = function + | (_::ps,loc)::rem -> (ps,loc)::do_filter_var rem + | _ -> [] + +let do_filter_one q pss = + let rec filter_rec = function + | ({pat_desc = Tpat_alias(p,_,_)}::ps,loc)::pss -> + filter_rec ((p::ps,loc)::pss) + | ({pat_desc = Tpat_or(p1,p2,_)}::ps,loc)::pss -> + filter_rec ((p1::ps,loc)::(p2::ps,loc)::pss) + | (p::ps,loc)::pss -> + if simple_match q p + then (simple_match_args q p @ ps, loc) :: filter_rec pss + else filter_rec pss + | _ -> [] in + filter_rec pss + +let rec do_match pss qs = match qs with +| [] -> + begin match pss with + | ([],loc)::_ -> Some loc + | _ -> None + end +| q::qs -> match q with + | {pat_desc = Tpat_or (q1,q2,_)} -> + begin match do_match pss (q1::qs) with + | None -> do_match pss (q2::qs) + | r -> r + end + | {pat_desc = Tpat_any} -> + do_match (do_filter_var pss) qs + | _ -> + let q0 = normalize_pat q in + (* [pss] will (or won't) match [q0 :: qs] regardless of the coherence of + its first column. *) + do_match (do_filter_one q0 pss) (simple_match_args q0 q @ qs) + + +let check_partial_all v casel = + try + let pss = initial_all true casel in + do_match pss [v] + with + | NoGuard -> None + +(************************) +(* Exhaustiveness check *) +(************************) + +(* conversion from Typedtree.pattern to Parsetree.pattern list *) +module Conv = struct + open Parsetree + let mkpat desc = Ast_helper.Pat.mk desc + + let name_counter = ref 0 + let fresh name = + let current = !name_counter in + name_counter := !name_counter + 1; + "#$" ^ name ^ string_of_int current + + let conv typed = + let constrs = Hashtbl.create 7 in + let labels = Hashtbl.create 7 in + let rec loop pat = + match pat.pat_desc with + Tpat_or (pa,pb,_) -> + mkpat (Ppat_or (loop pa, loop pb)) + | Tpat_var (_, ({txt="*extension*"} as nm)) -> (* PR#7330 *) + mkpat (Ppat_var nm) + | Tpat_any + | Tpat_var _ -> + mkpat Ppat_any + | Tpat_constant c -> + mkpat (Ppat_constant (Untypeast.constant c)) + | Tpat_alias (p,_,_) -> loop p + | Tpat_tuple lst -> + mkpat (Ppat_tuple (List.map loop lst)) + | Tpat_construct (cstr_lid, cstr, lst) -> + let id = fresh cstr.cstr_name in + let lid = { cstr_lid with txt = Longident.Lident id } in + Hashtbl.add constrs id cstr; + let arg = + match List.map loop lst with + | [] -> None + | [p] -> Some p + | lst -> Some (mkpat (Ppat_tuple lst)) + in + mkpat (Ppat_construct(lid, arg)) + | Tpat_variant(label,p_opt,_row_desc) -> + let arg = Misc.may_map loop p_opt in + mkpat (Ppat_variant(label, arg)) + | Tpat_record (subpatterns, _closed_flag) -> + let fields = + List.map + (fun (_, lbl, p) -> + let id = fresh lbl.lbl_name in + Hashtbl.add labels id lbl; + (mknoloc (Longident.Lident id), loop p)) + subpatterns + in + mkpat (Ppat_record (fields, Open)) + | Tpat_array lst -> + mkpat (Ppat_array (List.map loop lst)) + | Tpat_lazy p -> + mkpat (Ppat_lazy (loop p)) + in + let ps = loop typed in + (ps, constrs, labels) +end + + +(* Whether the counter-example contains an extension pattern *) +let contains_extension pat = + let r = ref false in + let rec loop = function + {pat_desc=Tpat_var (_, {txt="*extension*"})} -> + r := true + | p -> Typedtree.iter_pattern_desc loop p.pat_desc + in loop pat; !r + +(* Build an untyped or-pattern from its expected type *) +let ppat_of_type env ty = + match pats_of_type env ty with + [{pat_desc = Tpat_any}] -> + (Conv.mkpat Parsetree.Ppat_any, Hashtbl.create 0, Hashtbl.create 0) + | pats -> + Conv.conv (orify_many pats) + +let do_check_partial ?pred exhaust loc casel pss = match pss with +| [] -> + (* + This can occur + - For empty matches generated by ocamlp4 (no warning) + - when all patterns have guards (then, casel <> []) + (specific warning) + Then match MUST be considered non-exhaustive, + otherwise compilation of PM is broken. + *) + begin match casel with + | [] -> () + | _ -> + if Warnings.is_active Warnings.All_clauses_guarded then + Location.prerr_warning loc Warnings.All_clauses_guarded + end ; + Partial +| ps::_ -> + begin match exhaust None pss (List.length ps) with + | Rnone -> Total + | Rsome [u] -> + let v = + match pred with + | Some pred -> + let (pattern,constrs,labels) = Conv.conv u in + let u' = pred constrs labels pattern in + (* pretty_pat u; + begin match u' with + None -> prerr_endline ": impossible" + | Some _ -> prerr_endline ": possible" + end; *) + u' + | None -> Some u + in + begin match v with + None -> Total + | Some v -> + if Warnings.is_active (Warnings.Partial_match "") then begin + let errmsg = + try + let buf = Buffer.create 16 in + let fmt = formatter_of_buffer buf in + top_pretty fmt v; + begin match check_partial_all v casel with + | None -> () + | Some _ -> + (* This is 'Some loc', where loc is the location of + a possibly matching clause. + Forget about loc, because printing two locations + is a pain in the top-level *) + Buffer.add_string buf + "\n(However, some guarded clause may match this value.)" + end; + if contains_extension v then + Buffer.add_string buf + "\nMatching over values of extensible variant types \ + (the *extension* above)\n\ + must include a wild card pattern in order to be exhaustive." + ; + Buffer.contents buf + with _ -> + "" + in + Location.prerr_warning loc (Warnings.Partial_match errmsg) + end; + Partial + end + | _ -> + fatal_error "Parmatch.check_partial" + end + +(* +let do_check_partial_normal loc casel pss = + do_check_partial exhaust loc casel pss + *) + +let do_check_partial_gadt pred loc casel pss = + do_check_partial ~pred exhaust_gadt loc casel pss + + + +(*****************) +(* Fragile check *) +(*****************) + +(* Collect all data types in a pattern *) + +let rec add_path path = function + | [] -> [path] + | x::rem as paths -> + if Path.same path x then paths + else x::add_path path rem + +let extendable_path path = + not + (Path.same path Predef.path_bool || + Path.same path Predef.path_list || + Path.same path Predef.path_unit || + Path.same path Predef.path_option) + +let rec collect_paths_from_pat r p = match p.pat_desc with +| Tpat_construct(_, {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)},ps) + -> + let path = get_type_path p.pat_type p.pat_env in + List.fold_left + collect_paths_from_pat + (if extendable_path path then add_path path r else r) + ps +| Tpat_any|Tpat_var _|Tpat_constant _| Tpat_variant (_,None,_) -> r +| Tpat_tuple ps | Tpat_array ps +| Tpat_construct (_, {cstr_tag=Cstr_extension _}, ps)-> + List.fold_left collect_paths_from_pat r ps +| Tpat_record (lps,_) -> + List.fold_left + (fun r (_, _, p) -> collect_paths_from_pat r p) + r lps +| Tpat_variant (_, Some p, _) | Tpat_alias (p,_,_) -> collect_paths_from_pat r p +| Tpat_or (p1,p2,_) -> + collect_paths_from_pat (collect_paths_from_pat r p1) p2 +| Tpat_lazy p + -> + collect_paths_from_pat r p + + +(* + Actual fragile check + 1. Collect data types in the patterns of the match. + 2. One exhaustivity check per datatype, considering that + the type is extended. +*) + +let do_check_fragile_param exhaust loc casel pss = + let exts = + List.fold_left + (fun r c -> collect_paths_from_pat r c.c_lhs) + [] casel in + match exts with + | [] -> () + | _ -> match pss with + | [] -> () + | ps::_ -> + List.iter + (fun ext -> + match exhaust (Some ext) pss (List.length ps) with + | Rnone -> + Location.prerr_warning + loc + (Warnings.Fragile_match (Path.name ext)) + | Rsome _ -> ()) + exts + +(*let do_check_fragile_normal = do_check_fragile_param exhaust*) +let do_check_fragile_gadt = do_check_fragile_param exhaust_gadt + +(********************************) +(* Exported unused clause check *) +(********************************) + +let check_unused pred casel = + if Warnings.is_active Warnings.Unused_match + || List.exists (fun c -> c.c_rhs.exp_desc = Texp_unreachable) casel then + let rec do_rec pref = function + | [] -> () + | {c_lhs=q; c_guard; c_rhs} :: rem -> + let qs = [q] in + begin try + let pss = + get_mins le_pats (List.filter (compats qs) pref) in + (* First look for redundant or partially redundant patterns *) + let r = every_satisfiables (make_rows pss) (make_row qs) in + let refute = (c_rhs.exp_desc = Texp_unreachable) in + (* Do not warn for unused [pat -> .] *) + if r = Unused && refute then () else + let r = + (* Do not refine if there are no other lines *) + let skip = + r = Unused || (not refute && pref = []) || + not(refute || Warnings.is_active Warnings.Unreachable_case) in + if skip then r else + (* Then look for empty patterns *) + let sfs = satisfiables pss qs in + if sfs = [] then Unused else + let sfs = + List.map (function [u] -> u | _ -> assert false) sfs in + let u = orify_many sfs in + (*Format.eprintf "%a@." pretty_val u;*) + let (pattern,constrs,labels) = Conv.conv u in + let pattern = {pattern with Parsetree.ppat_loc = q.pat_loc} in + match pred refute constrs labels pattern with + None when not refute -> + Location.prerr_warning q.pat_loc Warnings.Unreachable_case; + Used + | _ -> r + in + match r with + | Unused -> + Location.prerr_warning + q.pat_loc Warnings.Unused_match + | Upartial ps -> + List.iter + (fun p -> + Location.prerr_warning + p.pat_loc Warnings.Unused_pat) + ps + | Used -> () + with Empty | Not_found | NoGuard -> assert false + end ; + + if c_guard <> None then + do_rec pref rem + else + do_rec ([q]::pref) rem in + + do_rec [] casel + +(*********************************) +(* Exported irrefutability tests *) +(*********************************) + +let irrefutable pat = le_pat pat omega + +let inactive ~partial pat = + match partial with + | Partial -> false + | Total -> begin + let rec loop pat = + match pat.pat_desc with + | Tpat_lazy _ | Tpat_array _ -> + false + | Tpat_any | Tpat_var _ | Tpat_variant (_, None, _) -> + true + | Tpat_constant c -> begin + match c with + | Const_string _ -> Config.safe_string + | Const_int _ | Const_char _ | Const_float _ + | Const_int32 _ | Const_int64 _ | Const_nativeint _ -> true + end + | Tpat_tuple ps | Tpat_construct (_, _, ps) -> + List.for_all (fun p -> loop p) ps + | Tpat_alias (p,_,_) | Tpat_variant (_, Some p, _) -> + loop p + | Tpat_record (ldps,_) -> + List.for_all + (fun (_, lbl, p) -> lbl.lbl_mut = Immutable && loop p) + ldps + | Tpat_or (p,q,_) -> + loop p && loop q + in + loop pat + end + + + + + + + +(*********************************) +(* Exported exhaustiveness check *) +(*********************************) + +(* + Fragile check is performed when required and + on exhaustive matches only. +*) + +let check_partial_param do_check_partial do_check_fragile loc casel = + let pss = initial_matrix casel in + let pss = get_mins le_pats pss in + let total = do_check_partial loc casel pss in + if + total = Total && Warnings.is_active (Warnings.Fragile_match "") + then begin + do_check_fragile loc casel pss + end ; + total + +(*let check_partial = + check_partial_param + do_check_partial_normal + do_check_fragile_normal*) + +let check_partial_gadt pred loc casel = + check_partial_param (do_check_partial_gadt pred) + do_check_fragile_gadt loc casel + + +(*************************************) +(* Ambiguous variable in or-patterns *) +(*************************************) + +(* Specification: ambiguous variables in or-patterns. + + The semantics of or-patterns in OCaml is specified with + a left-to-right bias: a value [v] matches the pattern [p | q] if it + matches [p] or [q], but if it matches both, the environment + captured by the match is the environment captured by [p], never the + one captured by [q]. + + While this property is generally well-understood, one specific case + where users expect a different semantics is when a pattern is + followed by a when-guard: [| p when g -> e]. Consider for example: + + | ((Const x, _) | (_, Const x)) when is_neutral x -> branch + + The semantics is clear: match the scrutinee against the pattern, if + it matches, test the guard, and if the guard passes, take the + branch. + + However, consider the input [(Const a, Const b)], where [a] fails + the test [is_neutral f], while [b] passes the test [is_neutral + b]. With the left-to-right semantics, the clause above is *not* + taken by its input: matching [(Const a, Const b)] against the + or-pattern succeeds in the left branch, it returns the environment + [x -> a], and then the guard [is_neutral a] is tested and fails, + the branch is not taken. Most users, however, intuitively expect + that any pair that has one side passing the test will take the + branch. They assume it is equivalent to the following: + + | (Const x, _) when is_neutral x -> branch + | (_, Const x) when is_neutral x -> branch + + while it is not. + + The code below is dedicated to finding these confusing cases: the + cases where a guard uses "ambiguous" variables, that are bound to + different parts of the scrutinees by different sides of + a or-pattern. In other words, it finds the cases where the + specified left-to-right semantics is not equivalent to + a non-deterministic semantics (any branch can be taken) relatively + to a specific guard. +*) + +module IdSet = Set.Make(Ident) + +let pattern_vars p = IdSet.of_list (Typedtree.pat_bound_idents p) + +(* Row for ambiguous variable search, + unseen is the traditional pattern row, + seen is a list of position bindings *) + +type amb_row = { unseen : pattern list ; seen : IdSet.t list; } + + +(* Push binding variables now *) + +let rec do_push r p ps seen k = match p.pat_desc with +| Tpat_alias (p,x,_) -> do_push (IdSet.add x r) p ps seen k +| Tpat_var (x,_) -> + (omega,{ unseen = ps; seen=IdSet.add x r::seen; })::k +| Tpat_or (p1,p2,_) -> + do_push r p1 ps seen (do_push r p2 ps seen k) +| _ -> + (p,{ unseen = ps; seen = r::seen; })::k + +let rec push_vars = function + | [] -> [] + | { unseen = [] }::_ -> assert false + | { unseen = p::ps; seen; }::rem -> + do_push IdSet.empty p ps seen (push_vars rem) + +let collect_stable = function + | [] -> assert false + | { seen=xss; _}::rem -> + let rec c_rec xss = function + | [] -> xss + | {seen=yss; _}::rem -> + let xss = List.map2 IdSet.inter xss yss in + c_rec xss rem in + let inters = c_rec xss rem in + List.fold_left IdSet.union IdSet.empty inters + + +(*********************************************) +(* Filtering utilities for our specific rows *) +(*********************************************) + +(* Take a pattern matrix as a list (rows) of lists (columns) of patterns + | p1, p2, .., pn + | q1, q2, .., qn + | r1, r2, .., rn + | ... + + We split this matrix into a list of sub-matrices, one for each head + constructor appearing in the leftmost column. For each row whose + left column starts with a head constructor, remove this head + column, prepend one column for each argument of the constructor, + and add the resulting row in the sub-matrix corresponding to this + head constructor. + + Rows whose left column is omega (the Any pattern _) may match any + head constructor, so they are added to all groups. + + The list of sub-matrices is represented as a list of pair + (head constructor, submatrix) +*) + +let filter_all = + (* the head constructor (as a pattern with omega arguments) of + a pattern *) + let discr_head pat = + match pat.pat_desc with + | Tpat_record (lbls, closed) -> + (* a partial record pattern { f1 = p1; f2 = p2; _ } + needs to be expanded, otherwise matching against this head + would drop the pattern arguments for non-mentioned fields *) + let lbls = all_record_args lbls in + normalize_pat { pat with pat_desc = Tpat_record (lbls, closed) } + | _ -> normalize_pat pat + in + + (* insert a row of head [p] and rest [r] into the right group *) + let rec insert p r env = match env with + | [] -> + (* if no group matched this row, it has a head constructor that + was never seen before; add a new sub-matrix for this head *) + let p0 = discr_head p in + [p0,[{ r with unseen = simple_match_args p0 p @ r.unseen }]] + | (q0,rs) as bd::env -> + if simple_match q0 p then begin + let r = { r with unseen = simple_match_args q0 p@r.unseen; } in + (q0,r::rs)::env + end + else bd::insert p r env in + + (* insert a row of head omega into all groups *) + let insert_omega r env = + List.map + (fun (q0,rs) -> + let r = + { r with unseen = simple_match_args q0 omega @ r.unseen; } in + (q0,r::rs)) + env + in + + let rec filter_rec env = function + | [] -> env + | ({pat_desc=(Tpat_var _|Tpat_alias _|Tpat_or _)},_)::_ -> assert false + | ({pat_desc=Tpat_any}, _)::rs -> filter_rec env rs + | (p,r)::rs -> filter_rec (insert p r env) rs in + + let rec filter_omega env = function + | [] -> env + | ({pat_desc=(Tpat_var _|Tpat_alias _|Tpat_or _)},_)::_ -> assert false + | ({pat_desc=Tpat_any},r)::rs -> filter_omega (insert_omega r env) rs + | _::rs -> filter_omega env rs in + + fun rs -> + (* first insert the rows with head constructors, + to get the definitive list of groups *) + let env = filter_rec [] rs in + (* then add the omega rows to all groups *) + filter_omega env rs + +(* Compute stable bindings *) + +let rec do_stable rs = match rs with +| [] -> assert false (* No empty matrix *) +| { unseen=[]; _ }::_ -> + collect_stable rs +| _ -> + let rs = push_vars rs in + if not (all_coherent (first_column rs)) then begin + (* If the first column is incoherent, then all the variables of this + matrix are stable. *) + List.fold_left (fun acc (_, { seen; _ }) -> + List.fold_left IdSet.union acc seen + ) IdSet.empty rs + end else begin + (* If the column is ill-typed but deemed coherent, we might spuriously + warn about some variables being unstable. + As sad as that might be, the warning can be silenced by splitting the + or-pattern... *) + match filter_all rs with + | [] -> + do_stable (List.map snd rs) + | (_,rs)::env -> + List.fold_left + (fun xs (_,rs) -> IdSet.inter xs (do_stable rs)) + (do_stable rs) env + end + +let stable p = do_stable [{unseen=[p]; seen=[];}] + + +(* All identifier paths that appear in an expression that occurs + as a clause right hand side or guard. + + The function is rather complex due to the compilation of + unpack patterns by introducing code in rhs expressions + and **guards**. + + For pattern (module M:S) -> e the code is + let module M_mod = unpack M .. in e + + Hence M is "free" in e iff M_mod is free in e. + + Not doing so will yield excessive warning in + (module (M:S) } ...) when true -> .... + as M is always present in + let module M_mod = unpack M .. in true +*) + +let all_rhs_idents exp = + let ids = ref IdSet.empty in + let module Iterator = TypedtreeIter.MakeIterator(struct + include TypedtreeIter.DefaultIteratorArgument + let enter_expression exp = match exp.exp_desc with + | Texp_ident (path, _lid, _descr) -> + List.iter + (fun id -> ids := IdSet.add id !ids) + (Path.heads path) + | _ -> () + +(* Very hackish, detect unpack pattern compilation + and perform "indirect check for them" *) + let is_unpack exp = + List.exists + (fun (attr, _) -> attr.txt = "#modulepat") exp.exp_attributes + + let leave_expression exp = + if is_unpack exp then begin match exp.exp_desc with + | Texp_letmodule + (id_mod,_, + {mod_desc= + Tmod_unpack ({exp_desc=Texp_ident (Path.Pident id_exp,_,_)},_)}, + _) -> + assert (IdSet.mem id_exp !ids) ; + if not (IdSet.mem id_mod !ids) then begin + ids := IdSet.remove id_exp !ids + end + | _ -> assert false + end + end) in + Iterator.iter_expression exp; + !ids + +let check_ambiguous_bindings = + let open Warnings in + let warn0 = Ambiguous_pattern [] in + fun cases -> + if is_active warn0 then + List.iter + (fun case -> match case with + | { c_guard=None ; _} -> () + | { c_lhs=p; c_guard=Some g; _} -> + let all = + IdSet.inter (pattern_vars p) (all_rhs_idents g) in + if not (IdSet.is_empty all) then begin + let st = stable p in + let ambiguous = IdSet.diff all st in + if not (IdSet.is_empty ambiguous) then begin + let pps = IdSet.elements ambiguous |> List.map Ident.name in + let warn = Ambiguous_pattern pps in + Location.prerr_warning p.pat_loc warn + end + end) + cases diff --git a/res_syntax/compiler-libs-406/parmatch.mli b/res_syntax/compiler-libs-406/parmatch.mli new file mode 100644 index 0000000000..feecb0c5b0 --- /dev/null +++ b/res_syntax/compiler-libs-406/parmatch.mli @@ -0,0 +1,100 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Detection of partial matches and unused match cases. *) +open Asttypes +open Typedtree +open Types + +val pretty_const : constant -> string +val top_pretty : Format.formatter -> pattern -> unit +val pretty_pat : pattern -> unit +val pretty_line : pattern list -> unit +val pretty_matrix : pattern list list -> unit + +val omega : pattern +val omegas : int -> pattern list +val omega_list : 'a list -> pattern list +val normalize_pat : pattern -> pattern +val all_record_args : + (Longident.t loc * label_description * pattern) list -> + (Longident.t loc * label_description * pattern) list +val const_compare : constant -> constant -> int + +val le_pat : pattern -> pattern -> bool +val le_pats : pattern list -> pattern list -> bool + +(* Exported compatibility functor, abstracted over constructor equality *) +module Compat : + functor + (Constr: sig + val equal : + Types.constructor_description -> + Types.constructor_description -> + bool + end) -> sig + val compat : pattern -> pattern -> bool + val compats : pattern list -> pattern list -> bool + end + +exception Empty +val lub : pattern -> pattern -> pattern +val lubs : pattern list -> pattern list -> pattern list + +val get_mins : ('a -> 'a -> bool) -> 'a list -> 'a list + +(* Those two functions recombine one pattern and its arguments: + For instance: + (_,_)::p1::p2::rem -> (p1, p2)::rem + The second one will replace mutable arguments by '_' +*) +val set_args : pattern -> pattern list -> pattern list +val set_args_erase_mutable : pattern -> pattern list -> pattern list + +val pat_of_constr : pattern -> constructor_description -> pattern +val complete_constrs : + pattern -> constructor_tag list -> constructor_description list +val ppat_of_type : + Env.t -> type_expr -> + Parsetree.pattern * + (string, constructor_description) Hashtbl.t * + (string, label_description) Hashtbl.t + +val pressure_variants: Env.t -> pattern list -> unit +val check_partial_gadt: + ((string, constructor_description) Hashtbl.t -> + (string, label_description) Hashtbl.t -> + Parsetree.pattern -> pattern option) -> + Location.t -> case list -> partial +val check_unused: + (bool -> + (string, constructor_description) Hashtbl.t -> + (string, label_description) Hashtbl.t -> + Parsetree.pattern -> pattern option) -> + case list -> unit + +(* Irrefutability tests *) +val irrefutable : pattern -> bool + +(** An inactive pattern is a pattern, matching against which can be duplicated, erased or + delayed without change in observable behavior of the program. Patterns containing + (lazy _) subpatterns or reads of mutable fields are active. *) +val inactive : partial:partial -> pattern -> bool + +(* Ambiguous bindings *) +val check_ambiguous_bindings : case list -> unit + +(* The tag used for open polymorphic variant types *) +val some_other_tag : label diff --git a/res_syntax/compiler-libs-406/parse.ml b/res_syntax/compiler-libs-406/parse.ml new file mode 100644 index 0000000000..ba89f0e2e2 --- /dev/null +++ b/res_syntax/compiler-libs-406/parse.ml @@ -0,0 +1,67 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Entry points in the parser *) + +(* Skip tokens to the end of the phrase *) + +let rec skip_phrase lexbuf = + try + match Lexer.token lexbuf with + Parser.SEMISEMI | Parser.EOF -> () + | _ -> skip_phrase lexbuf + with + | Lexer.Error (Lexer.Unterminated_comment _, _) + | Lexer.Error (Lexer.Unterminated_string, _) + | Lexer.Error (Lexer.Unterminated_string_in_comment _, _) + | Lexer.Error (Lexer.Illegal_character _, _) -> skip_phrase lexbuf +;; + +let maybe_skip_phrase lexbuf = + if Parsing.is_current_lookahead Parser.SEMISEMI + || Parsing.is_current_lookahead Parser.EOF + then () + else skip_phrase lexbuf + +let wrap parsing_fun lexbuf = + try + Docstrings.init (); + Lexer.init (); + let ast = parsing_fun Lexer.token lexbuf in + Parsing.clear_parser(); + Docstrings.warn_bad_docstrings (); + ast + with + | Lexer.Error(Lexer.Illegal_character _, _) as err + when !Location.input_name = "//toplevel//"-> + skip_phrase lexbuf; + raise err + | Syntaxerr.Error _ as err + when !Location.input_name = "//toplevel//" -> + maybe_skip_phrase lexbuf; + raise err + | Parsing.Parse_error | Syntaxerr.Escape_error -> + let loc = Location.curr lexbuf in + if !Location.input_name = "//toplevel//" + then maybe_skip_phrase lexbuf; + raise(Syntaxerr.Error(Syntaxerr.Other loc)) + +let implementation = wrap Parser.implementation +and interface = wrap Parser.interface +and toplevel_phrase = wrap Parser.toplevel_phrase +and use_file = wrap Parser.use_file +and core_type = wrap Parser.parse_core_type +and expression = wrap Parser.parse_expression +and pattern = wrap Parser.parse_pattern diff --git a/res_syntax/compiler-libs-406/parse.mli b/res_syntax/compiler-libs-406/parse.mli new file mode 100644 index 0000000000..8e6eb4544e --- /dev/null +++ b/res_syntax/compiler-libs-406/parse.mli @@ -0,0 +1,24 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Entry points in the parser *) + +val implementation : Lexing.lexbuf -> Parsetree.structure +val interface : Lexing.lexbuf -> Parsetree.signature +val toplevel_phrase : Lexing.lexbuf -> Parsetree.toplevel_phrase +val use_file : Lexing.lexbuf -> Parsetree.toplevel_phrase list +val core_type : Lexing.lexbuf -> Parsetree.core_type +val expression : Lexing.lexbuf -> Parsetree.expression +val pattern : Lexing.lexbuf -> Parsetree.pattern diff --git a/res_syntax/compiler-libs-406/parser.ml b/res_syntax/compiler-libs-406/parser.ml new file mode 100644 index 0000000000..5146b6523f --- /dev/null +++ b/res_syntax/compiler-libs-406/parser.ml @@ -0,0 +1,12174 @@ +type token = + | AMPERAMPER + | AMPERSAND + | AND + | AS + | ASSERT + | BACKQUOTE + | BANG + | BAR + | BARBAR + | BARRBRACKET + | BEGIN + | CHAR of (char) + | CLASS + | COLON + | COLONCOLON + | COLONEQUAL + | COLONGREATER + | COMMA + | CONSTRAINT + | DO + | DONE + | DOT + | DOTDOT + | DOWNTO + | ELSE + | END + | EOF + | EQUAL + | EXCEPTION + | EXTERNAL + | FALSE + | FLOAT of (string * char option) + | FOR + | FUN + | FUNCTION + | FUNCTOR + | GREATER + | GREATERRBRACE + | GREATERRBRACKET + | IF + | IN + | INCLUDE + | INFIXOP0 of (string) + | INFIXOP1 of (string) + | INFIXOP2 of (string) + | INFIXOP3 of (string) + | INFIXOP4 of (string) + | DOTOP of (string) + | INHERIT + | INITIALIZER + | INT of (string * char option) + | LABEL of (string) + | LAZY + | LBRACE + | LBRACELESS + | LBRACKET + | LBRACKETBAR + | LBRACKETLESS + | LBRACKETGREATER + | LBRACKETPERCENT + | LBRACKETPERCENTPERCENT + | LESS + | LESSMINUS + | LET + | LIDENT of (string) + | LPAREN + | LBRACKETAT + | LBRACKETATAT + | LBRACKETATATAT + | MATCH + | METHOD + | MINUS + | MINUSDOT + | MINUSGREATER + | MODULE + | MUTABLE + | NEW + | NONREC + | OBJECT + | OF + | OPEN + | OPTLABEL of (string) + | OR + | PERCENT + | PLUS + | PLUSDOT + | PLUSEQ + | PREFIXOP of (string) + | PRIVATE + | QUESTION + | QUOTE + | RBRACE + | RBRACKET + | REC + | RPAREN + | SEMI + | SEMISEMI + | HASH + | HASHOP of (string) + | SIG + | STAR + | STRING of (string * string option) + | STRUCT + | THEN + | TILDE + | TO + | TRUE + | TRY + | TYPE + | UIDENT of (string) + | UNDERSCORE + | VAL + | VIRTUAL + | WHEN + | WHILE + | WITH + | COMMENT of (string * Location.t) + | DOCSTRING of (Docstrings.docstring) + | EOL + +open Parsing;; +let _ = parse_error;; +# 19 "ml/parser.mly" +open Location +open Asttypes +open Longident +open Parsetree +open Ast_helper +open Docstrings + +let mktyp d = Typ.mk ~loc:(symbol_rloc()) d +let mkpat d = Pat.mk ~loc:(symbol_rloc()) d +let mkexp d = Exp.mk ~loc:(symbol_rloc()) d +let mkmty ?attrs d = Mty.mk ~loc:(symbol_rloc()) ?attrs d +let mksig d = Sig.mk ~loc:(symbol_rloc()) d +let mkmod ?attrs d = Mod.mk ~loc:(symbol_rloc()) ?attrs d +let mkstr d = Str.mk ~loc:(symbol_rloc()) d +let mkcty ?attrs d = Cty.mk ~loc:(symbol_rloc()) ?attrs d +let mkctf ?attrs ?docs d = + Ctf.mk ~loc:(symbol_rloc()) ?attrs ?docs d +let mkcf ?attrs ?docs d = + Cf.mk ~loc:(symbol_rloc()) ?attrs ?docs d + +let mkrhs rhs pos = mkloc rhs (rhs_loc pos) + +let reloc_pat x = { x with ppat_loc = symbol_rloc () };; +let reloc_exp x = { x with pexp_loc = symbol_rloc () };; + +let mkoperator name pos = + let loc = rhs_loc pos in + Exp.mk ~loc (Pexp_ident(mkloc (Lident name) loc)) + +let mkpatvar name pos = + Pat.mk ~loc:(rhs_loc pos) (Ppat_var (mkrhs name pos)) + +(* + Ghost expressions and patterns: + expressions and patterns that do not appear explicitly in the + source file they have the loc_ghost flag set to true. + Then the profiler will not try to instrument them and the + -annot option will not try to display their type. + + Every grammar rule that generates an element with a location must + make at most one non-ghost element, the topmost one. + + How to tell whether your location must be ghost: + A location corresponds to a range of characters in the source file. + If the location contains a piece of code that is syntactically + valid (according to the documentation), and corresponds to the + AST node, then the location must be real; in all other cases, + it must be ghost. +*) +let ghexp d = Exp.mk ~loc:(symbol_gloc ()) d +let ghpat d = Pat.mk ~loc:(symbol_gloc ()) d +let ghtyp d = Typ.mk ~loc:(symbol_gloc ()) d +let ghloc d = { txt = d; loc = symbol_gloc () } +let ghstr d = Str.mk ~loc:(symbol_gloc()) d +let ghsig d = Sig.mk ~loc:(symbol_gloc()) d + +let mkinfix arg1 name arg2 = + mkexp(Pexp_apply(mkoperator name 2, [Nolabel, arg1; Nolabel, arg2])) + +let neg_string f = + if String.length f > 0 && f.[0] = '-' + then String.sub f 1 (String.length f - 1) + else "-" ^ f + +let mkuminus name arg = + match name, arg.pexp_desc with + | "-", Pexp_constant(Pconst_integer (n,m)) -> + mkexp(Pexp_constant(Pconst_integer(neg_string n,m))) + | ("-" | "-."), Pexp_constant(Pconst_float (f, m)) -> + mkexp(Pexp_constant(Pconst_float(neg_string f, m))) + | _ -> + mkexp(Pexp_apply(mkoperator ("~" ^ name) 1, [Nolabel, arg])) + +let mkuplus name arg = + let desc = arg.pexp_desc in + match name, desc with + | "+", Pexp_constant(Pconst_integer _) + | ("+" | "+."), Pexp_constant(Pconst_float _) -> mkexp desc + | _ -> + mkexp(Pexp_apply(mkoperator ("~" ^ name) 1, [Nolabel, arg])) + +let mkexp_cons consloc args loc = + Exp.mk ~loc (Pexp_construct(mkloc (Lident "::") consloc, Some args)) + +let mkpat_cons consloc args loc = + Pat.mk ~loc (Ppat_construct(mkloc (Lident "::") consloc, Some args)) + +let rec mktailexp nilloc = function + [] -> + let loc = { nilloc with loc_ghost = true } in + let nil = { txt = Lident "[]"; loc = loc } in + Exp.mk ~loc (Pexp_construct (nil, None)) + | e1 :: el -> + let exp_el = mktailexp nilloc el in + let loc = {loc_start = e1.pexp_loc.loc_start; + loc_end = exp_el.pexp_loc.loc_end; + loc_ghost = true} + in + let arg = Exp.mk ~loc (Pexp_tuple [e1; exp_el]) in + mkexp_cons {loc with loc_ghost = true} arg loc + +let rec mktailpat nilloc = function + [] -> + let loc = { nilloc with loc_ghost = true } in + let nil = { txt = Lident "[]"; loc = loc } in + Pat.mk ~loc (Ppat_construct (nil, None)) + | p1 :: pl -> + let pat_pl = mktailpat nilloc pl in + let loc = {loc_start = p1.ppat_loc.loc_start; + loc_end = pat_pl.ppat_loc.loc_end; + loc_ghost = true} + in + let arg = Pat.mk ~loc (Ppat_tuple [p1; pat_pl]) in + mkpat_cons {loc with loc_ghost = true} arg loc + +let mkstrexp e attrs = + { pstr_desc = Pstr_eval (e, attrs); pstr_loc = e.pexp_loc } + +let mkexp_constraint e (t1, t2) = + match t1, t2 with + | Some t, None -> ghexp(Pexp_constraint(e, t)) + | _, Some t -> ghexp(Pexp_coerce(e, t1, t)) + | None, None -> assert false + +let mkexp_opt_constraint e = function + | None -> e + | Some constraint_ -> mkexp_constraint e constraint_ + +let mkpat_opt_constraint p = function + | None -> p + | Some typ -> mkpat (Ppat_constraint(p, typ)) + +let array_function str name = + ghloc (Ldot(Lident str, (if !Clflags.fast then "unsafe_" ^ name else name))) + +let syntax_error () = + raise Syntaxerr.Escape_error + +let unclosed opening_name opening_num closing_name closing_num = + raise(Syntaxerr.Error(Syntaxerr.Unclosed(rhs_loc opening_num, opening_name, + rhs_loc closing_num, closing_name))) + +let expecting pos nonterm = + raise Syntaxerr.(Error(Expecting(rhs_loc pos, nonterm))) + +let not_expecting pos nonterm = + raise Syntaxerr.(Error(Not_expecting(rhs_loc pos, nonterm))) + + +let lapply p1 p2 = + if !Clflags.applicative_functors + then Lapply(p1, p2) + else raise (Syntaxerr.Error(Syntaxerr.Applicative_path (symbol_rloc()))) + +let exp_of_label lbl pos = + mkexp (Pexp_ident(mkrhs (Lident(Longident.last lbl)) pos)) + +let pat_of_label lbl pos = + mkpat (Ppat_var (mkrhs (Longident.last lbl) pos)) + +let mk_newtypes newtypes exp = + List.fold_right (fun newtype exp -> mkexp (Pexp_newtype (newtype, exp))) + newtypes exp + +let wrap_type_annotation newtypes core_type body = + let exp = mkexp(Pexp_constraint(body,core_type)) in + let exp = mk_newtypes newtypes exp in + (exp, ghtyp(Ptyp_poly(newtypes, Typ.varify_constructors newtypes core_type))) + +let wrap_exp_attrs body (ext, attrs) = + (* todo: keep exact location for the entire attribute *) + let body = {body with pexp_attributes = attrs @ body.pexp_attributes} in + match ext with + | None -> body + | Some id -> ghexp(Pexp_extension (id, PStr [mkstrexp body []])) + +let mkexp_attrs d attrs = + wrap_exp_attrs (mkexp d) attrs + +let wrap_typ_attrs typ (ext, attrs) = + (* todo: keep exact location for the entire attribute *) + let typ = {typ with ptyp_attributes = attrs @ typ.ptyp_attributes} in + match ext with + | None -> typ + | Some id -> ghtyp(Ptyp_extension (id, PTyp typ)) + +let mktyp_attrs d attrs = + wrap_typ_attrs (mktyp d) attrs + +let wrap_pat_attrs pat (ext, attrs) = + (* todo: keep exact location for the entire attribute *) + let pat = {pat with ppat_attributes = attrs @ pat.ppat_attributes} in + match ext with + | None -> pat + | Some id -> ghpat(Ppat_extension (id, PPat (pat, None))) + +let mkpat_attrs d attrs = + wrap_pat_attrs (mkpat d) attrs + +let wrap_class_type_attrs body attrs = + {body with pcty_attributes = attrs @ body.pcty_attributes} +let wrap_mod_attrs body attrs = + {body with pmod_attributes = attrs @ body.pmod_attributes} +let wrap_mty_attrs body attrs = + {body with pmty_attributes = attrs @ body.pmty_attributes} + +let wrap_str_ext body ext = + match ext with + | None -> body + | Some id -> ghstr(Pstr_extension ((id, PStr [body]), [])) + +let mkstr_ext d ext = + wrap_str_ext (mkstr d) ext + +let wrap_sig_ext body ext = + match ext with + | None -> body + | Some id -> ghsig(Psig_extension ((id, PSig [body]), [])) + +let mksig_ext d ext = + wrap_sig_ext (mksig d) ext + +let text_str pos = Str.text (rhs_text pos) +let text_sig pos = Sig.text (rhs_text pos) +let text_cstr pos = Cf.text (rhs_text pos) +let text_csig pos = Ctf.text (rhs_text pos) + + +let extra_text text pos items = + let pre_extras = rhs_pre_extra_text pos in + let post_extras = rhs_post_extra_text pos in + text pre_extras @ items @ text post_extras + +let extra_str pos items = extra_text Str.text pos items +let extra_sig pos items = extra_text Sig.text pos items +let extra_cstr pos items = extra_text Cf.text pos items +let extra_csig pos items = extra_text Ctf.text pos items + +let extra_rhs_core_type ct ~pos = + let docs = rhs_info pos in + { ct with ptyp_attributes = add_info_attrs docs ct.ptyp_attributes } + +type let_binding = + { lb_pattern: pattern; + lb_expression: expression; + lb_attributes: attributes; + lb_docs: docs Lazy.t; + lb_text: text Lazy.t; + lb_loc: Location.t; } + +type [@warning "-69"] let_bindings = + { lbs_bindings: let_binding list; + lbs_rec: rec_flag; + lbs_extension: string Asttypes.loc option; + lbs_loc: Location.t } + +let mklb first (p, e) attrs = + { lb_pattern = p; + lb_expression = e; + lb_attributes = attrs; + lb_docs = symbol_docs_lazy (); + lb_text = if first then empty_text_lazy + else symbol_text_lazy (); + lb_loc = symbol_rloc (); } + +let mklbs ext rf lb = + { lbs_bindings = [lb]; + lbs_rec = rf; + lbs_extension = ext ; + lbs_loc = symbol_rloc (); } + +let addlb lbs lb = + { lbs with lbs_bindings = lb :: lbs.lbs_bindings } + +let val_of_let_bindings lbs = + let bindings = + List.map + (fun lb -> + Vb.mk ~loc:lb.lb_loc ~attrs:lb.lb_attributes + ~docs:(Lazy.force lb.lb_docs) + ~text:(Lazy.force lb.lb_text) + lb.lb_pattern lb.lb_expression) + lbs.lbs_bindings + in + let str = mkstr(Pstr_value(lbs.lbs_rec, List.rev bindings)) in + match lbs.lbs_extension with + | None -> str + | Some id -> ghstr (Pstr_extension((id, PStr [str]), [])) + +let expr_of_let_bindings lbs body = + let bindings = + List.map + (fun lb -> + Vb.mk ~loc:lb.lb_loc ~attrs:lb.lb_attributes + lb.lb_pattern lb.lb_expression) + lbs.lbs_bindings + in + mkexp_attrs (Pexp_let(lbs.lbs_rec, List.rev bindings, body)) + (lbs.lbs_extension, []) + + + +(* Alternatively, we could keep the generic module type in the Parsetree + and extract the package type during type-checking. In that case, + the assertions below should be turned into explicit checks. *) +let package_type_of_module_type pmty = + let err loc s = + raise (Syntaxerr.Error (Syntaxerr.Invalid_package_type (loc, s))) + in + let map_cstr = function + | Pwith_type (lid, ptyp) -> + let loc = ptyp.ptype_loc in + if ptyp.ptype_params <> [] then + err loc "parametrized types are not supported"; + if ptyp.ptype_cstrs <> [] then + err loc "constrained types are not supported"; + if ptyp.ptype_private <> Public then + err loc "private types are not supported"; + + (* restrictions below are checked by the 'with_constraint' rule *) + assert (ptyp.ptype_kind = Ptype_abstract); + assert (ptyp.ptype_attributes = []); + let ty = + match ptyp.ptype_manifest with + | Some ty -> ty + | None -> assert false + in + (lid, ty) + | _ -> + err pmty.pmty_loc "only 'with type t =' constraints are supported" + in + match pmty with + | {pmty_desc = Pmty_ident lid} -> (lid, []) + | {pmty_desc = Pmty_with({pmty_desc = Pmty_ident lid}, cstrs)} -> + (lid, List.map map_cstr cstrs) + | _ -> + err pmty.pmty_loc + "only module type identifier and 'with type' constraints are supported" + + +# 466 "ml/parser.ml" +let yytransl_const = [| + 257 (* AMPERAMPER *); + 258 (* AMPERSAND *); + 259 (* AND *); + 260 (* AS *); + 261 (* ASSERT *); + 262 (* BACKQUOTE *); + 263 (* BANG *); + 264 (* BAR *); + 265 (* BARBAR *); + 266 (* BARRBRACKET *); + 267 (* BEGIN *); + 269 (* CLASS *); + 270 (* COLON *); + 271 (* COLONCOLON *); + 272 (* COLONEQUAL *); + 273 (* COLONGREATER *); + 274 (* COMMA *); + 275 (* CONSTRAINT *); + 276 (* DO *); + 277 (* DONE *); + 278 (* DOT *); + 279 (* DOTDOT *); + 280 (* DOWNTO *); + 281 (* ELSE *); + 282 (* END *); + 0 (* EOF *); + 283 (* EQUAL *); + 284 (* EXCEPTION *); + 285 (* EXTERNAL *); + 286 (* FALSE *); + 288 (* FOR *); + 289 (* FUN *); + 290 (* FUNCTION *); + 291 (* FUNCTOR *); + 292 (* GREATER *); + 293 (* GREATERRBRACE *); + 294 (* GREATERRBRACKET *); + 295 (* IF *); + 296 (* IN *); + 297 (* INCLUDE *); + 304 (* INHERIT *); + 305 (* INITIALIZER *); + 308 (* LAZY *); + 309 (* LBRACE *); + 310 (* LBRACELESS *); + 311 (* LBRACKET *); + 312 (* LBRACKETBAR *); + 313 (* LBRACKETLESS *); + 314 (* LBRACKETGREATER *); + 315 (* LBRACKETPERCENT *); + 316 (* LBRACKETPERCENTPERCENT *); + 317 (* LESS *); + 318 (* LESSMINUS *); + 319 (* LET *); + 321 (* LPAREN *); + 322 (* LBRACKETAT *); + 323 (* LBRACKETATAT *); + 324 (* LBRACKETATATAT *); + 325 (* MATCH *); + 326 (* METHOD *); + 327 (* MINUS *); + 328 (* MINUSDOT *); + 329 (* MINUSGREATER *); + 330 (* MODULE *); + 331 (* MUTABLE *); + 332 (* NEW *); + 333 (* NONREC *); + 334 (* OBJECT *); + 335 (* OF *); + 336 (* OPEN *); + 338 (* OR *); + 339 (* PERCENT *); + 340 (* PLUS *); + 341 (* PLUSDOT *); + 342 (* PLUSEQ *); + 344 (* PRIVATE *); + 345 (* QUESTION *); + 346 (* QUOTE *); + 347 (* RBRACE *); + 348 (* RBRACKET *); + 349 (* REC *); + 350 (* RPAREN *); + 351 (* SEMI *); + 352 (* SEMISEMI *); + 353 (* HASH *); + 355 (* SIG *); + 356 (* STAR *); + 358 (* STRUCT *); + 359 (* THEN *); + 360 (* TILDE *); + 361 (* TO *); + 362 (* TRUE *); + 363 (* TRY *); + 364 (* TYPE *); + 366 (* UNDERSCORE *); + 367 (* VAL *); + 368 (* VIRTUAL *); + 369 (* WHEN *); + 370 (* WHILE *); + 371 (* WITH *); + 374 (* EOL *); + 0|] + +let yytransl_block = [| + 268 (* CHAR *); + 287 (* FLOAT *); + 298 (* INFIXOP0 *); + 299 (* INFIXOP1 *); + 300 (* INFIXOP2 *); + 301 (* INFIXOP3 *); + 302 (* INFIXOP4 *); + 303 (* DOTOP *); + 306 (* INT *); + 307 (* LABEL *); + 320 (* LIDENT *); + 337 (* OPTLABEL *); + 343 (* PREFIXOP *); + 354 (* HASHOP *); + 357 (* STRING *); + 365 (* UIDENT *); + 372 (* COMMENT *); + 373 (* DOCSTRING *); + 0|] + +let yylhs = "\255\255\ +\001\000\002\000\003\000\004\000\005\000\011\000\011\000\012\000\ +\012\000\014\000\014\000\015\000\015\000\015\000\015\000\015\000\ +\015\000\015\000\015\000\015\000\018\000\018\000\018\000\018\000\ +\018\000\018\000\018\000\018\000\018\000\018\000\018\000\006\000\ +\006\000\024\000\024\000\024\000\025\000\025\000\025\000\025\000\ +\025\000\025\000\025\000\025\000\025\000\025\000\025\000\025\000\ +\025\000\025\000\037\000\041\000\041\000\041\000\032\000\033\000\ +\033\000\042\000\043\000\013\000\013\000\013\000\013\000\013\000\ +\013\000\013\000\013\000\013\000\013\000\013\000\007\000\007\000\ +\007\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ +\046\000\046\000\046\000\046\000\046\000\046\000\046\000\035\000\ +\052\000\054\000\054\000\054\000\049\000\050\000\051\000\051\000\ +\055\000\056\000\057\000\057\000\034\000\059\000\059\000\061\000\ +\062\000\062\000\062\000\063\000\063\000\064\000\064\000\064\000\ +\064\000\064\000\064\000\065\000\065\000\065\000\065\000\066\000\ +\066\000\066\000\066\000\066\000\075\000\075\000\075\000\075\000\ +\075\000\075\000\075\000\078\000\079\000\079\000\080\000\080\000\ +\081\000\081\000\081\000\081\000\081\000\081\000\082\000\082\000\ +\082\000\085\000\067\000\036\000\036\000\086\000\087\000\009\000\ +\009\000\009\000\009\000\089\000\089\000\089\000\089\000\089\000\ +\089\000\089\000\089\000\094\000\094\000\091\000\091\000\090\000\ +\090\000\092\000\093\000\093\000\021\000\021\000\021\000\021\000\ +\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ +\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ +\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ +\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ +\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ +\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ +\021\000\021\000\021\000\021\000\021\000\021\000\096\000\096\000\ +\096\000\096\000\096\000\096\000\096\000\096\000\096\000\096\000\ +\096\000\096\000\096\000\096\000\096\000\096\000\096\000\096\000\ +\096\000\096\000\096\000\096\000\096\000\096\000\096\000\096\000\ +\096\000\096\000\096\000\096\000\096\000\096\000\096\000\096\000\ +\096\000\096\000\096\000\096\000\096\000\096\000\096\000\096\000\ +\096\000\096\000\096\000\096\000\096\000\096\000\096\000\096\000\ +\096\000\096\000\096\000\096\000\096\000\096\000\096\000\096\000\ +\096\000\096\000\096\000\096\000\097\000\097\000\115\000\115\000\ +\116\000\116\000\116\000\116\000\117\000\074\000\074\000\118\000\ +\118\000\118\000\118\000\118\000\118\000\026\000\026\000\123\000\ +\124\000\126\000\126\000\073\000\073\000\073\000\100\000\100\000\ +\127\000\127\000\127\000\101\000\101\000\101\000\101\000\102\000\ +\102\000\111\000\111\000\129\000\129\000\129\000\130\000\130\000\ +\114\000\114\000\132\000\132\000\112\000\112\000\070\000\070\000\ +\070\000\070\000\070\000\131\000\131\000\010\000\010\000\010\000\ +\010\000\010\000\010\000\010\000\010\000\010\000\010\000\121\000\ +\121\000\121\000\121\000\121\000\121\000\121\000\121\000\121\000\ +\134\000\134\000\134\000\134\000\095\000\095\000\122\000\122\000\ +\122\000\122\000\122\000\122\000\122\000\122\000\122\000\122\000\ +\122\000\122\000\122\000\122\000\122\000\122\000\122\000\122\000\ +\122\000\122\000\122\000\122\000\138\000\138\000\138\000\138\000\ +\138\000\138\000\138\000\133\000\133\000\133\000\135\000\135\000\ +\135\000\140\000\140\000\139\000\139\000\139\000\139\000\141\000\ +\141\000\142\000\142\000\028\000\143\000\143\000\027\000\029\000\ +\029\000\144\000\145\000\149\000\149\000\148\000\148\000\148\000\ +\148\000\148\000\148\000\148\000\148\000\148\000\148\000\148\000\ +\147\000\147\000\147\000\152\000\153\000\153\000\155\000\155\000\ +\156\000\154\000\154\000\154\000\157\000\060\000\060\000\150\000\ +\150\000\150\000\158\000\159\000\031\000\031\000\048\000\098\000\ +\161\000\161\000\161\000\161\000\162\000\162\000\151\000\151\000\ +\151\000\164\000\165\000\030\000\047\000\167\000\167\000\167\000\ +\167\000\167\000\167\000\168\000\168\000\168\000\169\000\170\000\ +\171\000\172\000\045\000\045\000\173\000\173\000\173\000\173\000\ +\174\000\174\000\120\000\120\000\071\000\071\000\166\000\166\000\ +\008\000\008\000\175\000\175\000\177\000\177\000\177\000\177\000\ +\177\000\128\000\128\000\179\000\179\000\179\000\179\000\179\000\ +\179\000\179\000\179\000\179\000\179\000\179\000\179\000\179\000\ +\179\000\179\000\179\000\179\000\179\000\179\000\022\000\183\000\ +\183\000\184\000\184\000\182\000\182\000\186\000\186\000\187\000\ +\187\000\185\000\185\000\178\000\178\000\076\000\076\000\163\000\ +\163\000\180\000\180\000\180\000\180\000\180\000\180\000\180\000\ +\190\000\188\000\189\000\068\000\110\000\110\000\110\000\110\000\ +\136\000\136\000\136\000\136\000\136\000\058\000\058\000\119\000\ +\119\000\119\000\119\000\119\000\191\000\191\000\191\000\191\000\ +\191\000\191\000\191\000\191\000\191\000\191\000\191\000\191\000\ +\191\000\191\000\191\000\191\000\191\000\191\000\191\000\191\000\ +\191\000\191\000\191\000\191\000\191\000\191\000\191\000\191\000\ +\191\000\160\000\160\000\160\000\160\000\160\000\160\000\109\000\ +\109\000\103\000\103\000\103\000\103\000\103\000\103\000\103\000\ +\108\000\108\000\137\000\137\000\016\000\016\000\176\000\176\000\ +\176\000\044\000\044\000\077\000\077\000\181\000\181\000\104\000\ +\125\000\125\000\146\000\146\000\105\000\105\000\072\000\072\000\ +\069\000\069\000\084\000\084\000\083\000\083\000\083\000\083\000\ +\083\000\053\000\053\000\099\000\099\000\113\000\113\000\106\000\ +\106\000\107\000\107\000\192\000\192\000\192\000\192\000\192\000\ +\192\000\192\000\192\000\192\000\192\000\192\000\192\000\192\000\ +\192\000\192\000\192\000\192\000\192\000\192\000\192\000\192\000\ +\192\000\192\000\192\000\192\000\192\000\192\000\192\000\192\000\ +\192\000\192\000\192\000\192\000\192\000\192\000\192\000\192\000\ +\192\000\192\000\192\000\192\000\192\000\192\000\192\000\192\000\ +\192\000\192\000\192\000\192\000\192\000\192\000\088\000\088\000\ +\019\000\194\000\039\000\023\000\023\000\017\000\017\000\040\000\ +\040\000\040\000\020\000\038\000\193\000\193\000\193\000\193\000\ +\193\000\000\000\000\000\000\000\000\000\000\000" + +let yylen = "\002\000\ +\002\000\002\000\002\000\002\000\002\000\002\000\005\000\001\000\ +\001\000\002\000\001\000\001\000\004\000\004\000\005\000\002\000\ +\003\000\001\000\002\000\001\000\005\000\005\000\003\000\003\000\ +\005\000\007\000\009\000\007\000\006\000\006\000\005\000\003\000\ +\001\000\000\000\002\000\002\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\002\000\001\000\004\000\002\000\004\000\002\000\005\000\001\000\ +\002\000\006\000\005\000\001\000\004\000\004\000\005\000\003\000\ +\003\000\005\000\003\000\003\000\001\000\002\000\000\000\002\000\ +\002\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\002\000\001\000\005\000\ +\004\000\002\000\006\000\003\000\005\000\006\000\001\000\002\000\ +\007\000\006\000\000\000\002\000\006\000\000\000\003\000\002\000\ +\003\000\005\000\000\000\000\000\002\000\003\000\003\000\004\000\ +\004\000\002\000\001\000\007\000\007\000\006\000\007\000\007\000\ +\007\000\005\000\008\000\011\000\004\000\001\000\004\000\004\000\ +\002\000\001\000\007\000\002\000\003\000\000\000\000\000\002\000\ +\004\000\004\000\007\000\004\000\002\000\001\000\005\000\005\000\ +\003\000\003\000\003\000\001\000\002\000\009\000\008\000\001\000\ +\002\000\003\000\005\000\005\000\002\000\005\000\002\000\004\000\ +\002\000\002\000\001\000\001\000\001\000\000\000\002\000\001\000\ +\003\000\001\000\001\000\003\000\001\000\002\000\003\000\007\000\ +\006\000\007\000\004\000\004\000\007\000\006\000\006\000\005\000\ +\001\000\002\000\002\000\007\000\005\000\006\000\010\000\003\000\ +\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ +\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ +\003\000\003\000\003\000\003\000\002\000\002\000\005\000\007\000\ +\007\000\007\000\007\000\007\000\009\000\009\000\009\000\003\000\ +\003\000\003\000\004\000\004\000\002\000\001\000\001\000\001\000\ +\001\000\001\000\003\000\003\000\004\000\003\000\004\000\004\000\ +\003\000\005\000\004\000\005\000\005\000\005\000\005\000\005\000\ +\005\000\005\000\005\000\005\000\005\000\005\000\007\000\007\000\ +\007\000\007\000\007\000\007\000\005\000\003\000\003\000\005\000\ +\005\000\004\000\004\000\002\000\006\000\004\000\006\000\004\000\ +\004\000\006\000\004\000\006\000\002\000\002\000\003\000\003\000\ +\002\000\005\000\004\000\005\000\003\000\003\000\005\000\007\000\ +\006\000\009\000\008\000\001\000\001\000\002\000\001\000\001\000\ +\002\000\002\000\002\000\002\000\001\000\001\000\002\000\002\000\ +\004\000\007\000\008\000\003\000\005\000\001\000\002\000\005\000\ +\004\000\001\000\003\000\002\000\002\000\005\000\001\000\003\000\ +\003\000\005\000\003\000\002\000\004\000\002\000\005\000\003\000\ +\003\000\003\000\001\000\001\000\003\000\002\000\004\000\002\000\ +\002\000\003\000\003\000\001\000\001\000\003\000\002\000\004\000\ +\002\000\002\000\002\000\001\000\000\000\003\000\003\000\001\000\ +\003\000\003\000\003\000\003\000\003\000\002\000\001\000\003\000\ +\003\000\001\000\003\000\003\000\003\000\003\000\002\000\001\000\ +\001\000\002\000\002\000\003\000\001\000\001\000\001\000\001\000\ +\003\000\001\000\001\000\002\000\001\000\003\000\004\000\004\000\ +\005\000\005\000\004\000\003\000\003\000\005\000\005\000\004\000\ +\005\000\007\000\007\000\001\000\003\000\003\000\004\000\004\000\ +\004\000\002\000\004\000\003\000\003\000\003\000\003\000\003\000\ +\003\000\001\000\003\000\001\000\002\000\004\000\003\000\004\000\ +\002\000\002\000\000\000\006\000\001\000\002\000\008\000\001\000\ +\002\000\008\000\007\000\003\000\000\000\000\000\002\000\003\000\ +\002\000\003\000\002\000\003\000\005\000\005\000\005\000\007\000\ +\000\000\001\000\003\000\002\000\001\000\003\000\002\000\001\000\ +\002\000\000\000\001\000\001\000\002\000\001\000\003\000\001\000\ +\001\000\002\000\003\000\004\000\001\000\007\000\006\000\003\000\ +\000\000\002\000\004\000\002\000\001\000\003\000\001\000\001\000\ +\002\000\005\000\007\000\009\000\009\000\001\000\001\000\001\000\ +\001\000\002\000\002\000\001\000\001\000\002\000\003\000\004\000\ +\004\000\005\000\001\000\003\000\006\000\005\000\004\000\004\000\ +\001\000\002\000\002\000\003\000\001\000\003\000\001\000\003\000\ +\001\000\002\000\001\000\004\000\001\000\006\000\004\000\005\000\ +\003\000\001\000\003\000\002\000\001\000\001\000\002\000\004\000\ +\003\000\002\000\002\000\003\000\005\000\003\000\004\000\005\000\ +\004\000\002\000\004\000\006\000\005\000\001\000\001\000\001\000\ +\003\000\001\000\001\000\005\000\002\000\001\000\000\000\001\000\ +\003\000\001\000\002\000\001\000\003\000\001\000\003\000\001\000\ +\003\000\002\000\002\000\001\000\001\000\001\000\001\000\001\000\ +\004\000\006\000\002\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\002\000\002\000\002\000\002\000\001\000\001\000\001\000\ +\003\000\003\000\002\000\003\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\003\000\004\000\003\000\004\000\003\000\004\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\002\000\002\000\003\000\001\000\001\000\001\000\ +\003\000\001\000\005\000\002\000\002\000\003\000\001\000\001\000\ +\001\000\003\000\001\000\003\000\001\000\003\000\001\000\003\000\ +\004\000\001\000\003\000\001\000\003\000\001\000\003\000\002\000\ +\000\000\001\000\000\000\001\000\001\000\001\000\000\000\001\000\ +\000\000\001\000\000\000\001\000\000\000\001\000\001\000\002\000\ +\002\000\000\000\001\000\000\000\001\000\000\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\003\000\ +\004\000\004\000\004\000\000\000\002\000\000\000\002\000\000\000\ +\002\000\003\000\004\000\004\000\001\000\002\000\002\000\002\000\ +\004\000\002\000\002\000\002\000\002\000\002\000" + +let yydefred = "\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\054\002\000\000\000\000\000\000\111\002\056\002\ +\000\000\000\000\000\000\000\000\000\000\053\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\152\002\153\002\000\000\000\000\000\000\154\002\ +\155\002\000\000\000\000\055\002\112\002\000\000\000\000\117\002\ +\230\000\000\000\000\000\226\002\000\000\000\000\000\000\036\001\ +\000\000\033\000\000\000\000\000\038\000\039\000\000\000\041\000\ +\042\000\043\000\000\000\045\000\046\000\000\000\048\000\000\000\ +\050\000\056\000\205\001\000\000\148\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\231\000\232\000\104\002\054\001\168\001\ +\000\000\000\000\000\000\000\000\000\000\227\002\000\000\075\000\ +\074\000\000\000\082\000\083\000\000\000\000\000\087\000\000\000\ +\077\000\078\000\079\000\080\000\000\000\084\000\095\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\119\002\005\002\228\002\000\000\022\002\000\000\006\002\ +\249\001\000\000\000\000\253\001\000\000\229\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\064\002\000\000\000\000\ +\000\000\000\000\119\001\230\002\000\000\000\000\140\001\113\001\ +\000\000\000\000\057\002\117\001\118\001\000\000\103\001\000\000\ +\125\001\000\000\000\000\000\000\000\000\063\002\062\002\128\002\ +\022\001\233\000\234\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\075\001\000\000\025\001\052\002\000\000\000\000\ +\000\000\108\002\000\000\000\000\012\001\000\000\158\002\159\002\ +\160\002\161\002\162\002\163\002\164\002\165\002\166\002\167\002\ +\168\002\169\002\170\002\171\002\172\002\173\002\174\002\175\002\ +\176\002\177\002\178\002\179\002\180\002\181\002\182\002\156\002\ +\183\002\184\002\185\002\186\002\187\002\188\002\189\002\190\002\ +\191\002\192\002\193\002\194\002\195\002\196\002\197\002\198\002\ +\199\002\200\002\201\002\157\002\202\002\203\002\204\002\205\002\ +\206\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\067\002\094\002\093\002\000\000\092\002\000\000\095\002\088\002\ +\090\002\070\002\071\002\072\002\073\002\074\002\000\000\089\002\ +\000\000\000\000\000\000\091\002\097\002\000\000\000\000\096\002\ +\000\000\109\002\081\002\087\002\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\147\002\000\000\021\001\035\000\000\000\ +\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\229\000\ +\000\000\036\000\000\000\000\000\000\000\055\001\000\000\169\001\ +\000\000\057\000\000\000\149\000\049\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\037\001\040\001\000\000\000\000\000\000\213\000\214\000\000\000\ +\000\000\000\000\072\000\000\000\002\000\086\000\073\000\000\000\ +\096\000\000\000\115\002\000\000\027\002\000\000\000\000\149\002\ +\000\000\018\002\000\000\048\002\010\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\045\002\000\000\000\000\000\000\000\000\ +\000\000\000\000\004\002\126\002\000\000\011\002\003\000\250\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\007\002\004\000\ +\000\000\000\000\113\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\146\001\000\000\082\002\000\000\086\002\000\000\000\000\ +\084\002\069\002\000\000\059\002\058\002\061\002\060\002\124\001\ +\000\000\000\000\000\000\000\000\005\000\102\001\000\000\114\001\ +\115\001\000\000\000\000\000\000\000\000\217\002\000\000\000\000\ +\000\000\000\000\238\000\000\000\000\000\102\002\000\000\000\000\ +\103\002\098\002\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\163\000\122\001\123\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\018\000\020\000\ +\000\000\000\000\000\000\000\000\000\000\092\001\000\000\007\001\ +\006\001\000\000\000\000\024\001\023\001\000\000\081\001\000\000\ +\000\000\000\000\000\000\000\000\221\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\130\002\000\000\110\002\000\000\000\000\ +\000\000\068\002\000\000\236\000\235\000\000\000\066\002\065\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\108\000\ +\000\000\000\000\132\002\000\000\000\000\000\000\000\000\032\000\ +\213\002\000\000\000\000\000\000\000\000\000\000\118\002\105\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\154\000\000\000\ +\000\000\175\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\045\001\043\001\029\001\000\000\042\001\038\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\069\000\060\000\ +\122\002\000\000\000\000\000\000\000\000\000\000\026\002\000\000\ +\024\002\000\000\029\002\014\002\000\000\000\000\000\000\000\000\ +\051\002\009\002\042\002\043\002\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\040\002\000\000\116\002\120\002\000\000\ +\000\000\000\000\012\002\101\001\116\001\000\000\000\000\000\000\ +\142\001\141\001\000\000\000\000\000\000\000\000\000\000\133\001\ +\000\000\132\001\095\001\094\001\100\001\000\000\098\001\000\000\ +\150\001\000\000\000\000\000\000\126\001\000\000\121\001\000\000\ +\218\002\215\002\000\000\000\000\000\000\241\000\000\000\000\000\ +\000\000\239\000\237\000\140\002\000\000\099\002\000\000\100\002\ +\000\000\000\000\000\000\000\000\085\002\000\000\083\002\000\000\ +\000\000\162\000\000\000\164\000\000\000\165\000\159\000\170\000\ +\000\000\157\000\000\000\161\000\000\000\000\000\000\000\000\000\ +\180\000\000\000\000\000\063\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\016\000\019\000\051\000\000\000\000\000\074\001\ +\090\001\000\000\091\001\000\000\000\000\077\001\000\000\082\001\ +\000\000\017\001\016\001\011\001\010\001\222\002\000\000\000\000\ +\219\002\208\002\220\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\112\001\000\000\000\000\000\000\000\000\ +\000\000\240\000\211\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\228\000\227\000\000\000\000\000\ +\000\000\000\000\196\001\195\001\000\000\186\001\000\000\000\000\ +\000\000\000\000\000\000\027\001\000\000\019\001\000\000\014\001\ +\000\000\000\000\000\000\243\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\070\000\089\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\015\002\030\002\000\000\000\000\ +\000\000\019\002\017\002\000\000\000\000\000\000\247\001\000\000\ +\000\000\000\000\000\000\000\000\008\002\000\000\000\000\127\002\ +\000\000\000\000\121\002\252\001\114\002\000\000\000\000\000\000\ +\159\001\000\000\144\001\143\001\147\001\145\001\000\000\136\001\ +\000\000\127\001\131\001\128\001\000\000\209\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\101\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\210\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\068\001\070\001\000\000\000\000\ +\000\000\000\000\011\000\000\000\000\000\024\000\000\000\023\000\ +\000\000\017\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\056\001\000\000\000\000\000\000\000\000\048\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\111\001\000\000\000\000\ +\080\002\078\002\076\002\000\000\031\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\006\000\008\000\009\000\000\000\054\000\ +\055\000\000\000\105\000\000\000\000\000\000\000\000\000\000\000\ +\115\000\109\000\088\000\184\000\000\000\189\001\000\000\000\000\ +\000\000\000\000\192\001\188\001\000\000\000\000\210\002\009\001\ +\008\001\028\001\026\001\000\000\000\000\107\002\000\000\244\000\ +\242\000\155\000\057\001\000\000\000\000\000\000\005\001\248\000\ +\000\000\246\000\000\000\000\000\000\000\000\000\000\000\254\000\ +\000\000\250\000\000\000\252\000\000\000\000\000\068\000\067\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\235\001\000\000\ +\123\002\000\000\000\000\000\000\000\000\000\000\093\000\000\000\ +\000\000\025\002\032\002\000\000\016\002\034\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\021\002\013\002\000\000\041\002\ +\000\000\151\002\158\001\000\000\137\001\135\001\134\001\130\001\ +\129\001\247\000\245\000\000\000\000\000\000\000\253\000\249\000\ +\251\000\000\000\000\000\198\001\000\000\138\002\000\000\000\000\ +\215\001\000\000\000\000\000\000\000\000\207\001\000\000\134\002\ +\133\002\000\000\047\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\160\000\000\000\000\000\067\001\065\001\000\000\064\001\ +\000\000\000\000\010\000\000\000\000\000\014\000\013\000\000\000\ +\225\002\177\000\208\001\000\000\000\000\000\000\000\000\060\001\ +\000\000\000\000\000\000\058\001\061\001\105\001\104\001\110\001\ +\000\000\108\001\000\000\153\001\000\000\052\001\000\000\000\000\ +\033\001\000\000\000\000\000\000\101\000\058\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\114\000\ +\000\000\000\000\187\001\000\000\173\001\000\000\191\001\164\001\ +\190\000\020\001\018\001\015\001\013\001\000\000\173\001\059\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\062\000\061\000\000\000\000\000\000\000\ +\000\000\094\000\092\000\000\000\000\000\000\000\000\000\028\002\ +\020\002\035\002\248\001\244\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\103\000\000\000\193\001\000\000\000\000\ +\214\001\217\001\211\001\000\000\206\001\000\000\000\000\000\000\ +\181\000\000\000\167\000\158\000\156\000\000\000\069\001\000\000\ +\000\000\000\000\000\000\031\000\000\000\000\000\025\000\022\000\ +\021\000\176\000\178\000\000\000\000\000\000\000\049\001\000\000\ +\000\000\032\001\000\000\000\000\106\000\000\000\000\000\000\000\ +\000\000\111\000\000\000\110\000\190\001\000\000\179\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\200\001\201\001\ +\000\000\000\000\136\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\004\001\000\000\000\001\000\000\002\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\236\001\ +\097\000\000\000\000\000\098\000\033\002\050\002\139\001\138\001\ +\003\001\255\000\001\001\199\001\197\001\000\000\000\000\124\002\ +\000\000\130\000\000\000\126\000\000\000\000\000\166\001\167\001\ +\000\000\071\001\066\001\029\000\000\000\030\000\000\000\000\000\ +\000\000\000\000\059\001\053\001\007\000\000\000\112\000\113\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\180\001\ +\000\000\000\000\000\000\000\000\202\001\000\000\000\000\170\001\ +\000\000\000\000\000\000\222\001\223\001\224\001\225\001\035\001\ +\000\000\171\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\228\001\ +\229\001\000\000\000\000\000\000\129\000\150\000\000\000\000\000\ +\000\000\000\000\026\000\028\000\000\000\000\000\062\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\203\001\000\000\172\001\000\000\000\000\000\000\220\001\ +\226\001\227\001\034\001\151\000\000\000\000\000\000\000\238\001\ +\242\001\173\001\091\000\000\000\221\001\230\001\000\000\000\000\ +\000\000\000\000\135\000\125\002\000\000\191\000\000\000\000\000\ +\050\001\000\000\000\000\000\000\122\000\000\000\000\000\000\000\ +\000\000\204\001\183\001\000\000\000\000\181\001\000\000\000\000\ +\000\000\000\000\231\001\000\000\125\000\000\000\000\000\128\000\ +\127\000\000\000\000\000\027\000\051\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\118\000\000\000\000\000\ +\000\000\000\000\232\001\233\001\000\000\133\000\000\000\000\000\ +\000\000\000\000\000\000\142\000\136\000\219\001\120\000\121\000\ +\000\000\000\000\000\000\000\000\000\000\119\000\184\001\234\001\ +\000\000\000\000\000\000\000\000\000\000\141\000\000\000\123\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\140\000\137\000\144\002\145\002\ +\000\000\000\000\000\000\000\000\138\000\000\000\000\000\000\000\ +\000\000\000\000\124\000\000\000\000\000\000\000\139\000\000\000\ +\000\000" + +let yydgoto = "\006\000\ +\052\000\094\000\124\000\134\000\148\000\245\001\095\000\153\005\ +\054\000\171\001\250\002\175\003\065\003\132\003\200\002\055\000\ +\190\001\223\001\072\001\056\000\057\000\066\003\046\001\058\000\ +\059\000\136\000\061\000\062\000\063\000\064\000\065\000\066\000\ +\067\000\068\000\069\000\070\000\071\000\072\000\073\000\000\001\ +\251\002\074\000\082\001\088\002\238\003\104\000\105\000\075\000\ +\107\000\108\000\109\000\110\000\037\001\049\003\111\000\113\001\ +\168\003\089\002\102\003\026\004\015\002\016\002\255\002\186\003\ +\103\004\101\004\199\004\076\000\031\004\075\004\154\005\213\004\ +\076\004\117\003\003\005\136\001\004\005\114\005\115\005\146\005\ +\173\005\203\005\199\005\165\002\092\005\077\000\084\001\250\000\ +\192\002\120\003\047\004\121\003\119\003\183\002\152\000\078\000\ +\096\001\228\002\121\001\195\002\193\002\079\000\080\000\081\000\ +\042\004\082\000\083\000\185\000\084\000\085\000\186\000\196\000\ +\239\001\192\000\097\001\098\001\074\002\232\002\086\000\155\005\ +\234\002\157\000\087\000\078\001\253\001\077\004\196\002\127\000\ +\187\000\188\000\231\001\193\000\158\000\159\000\237\002\160\000\ +\128\000\161\000\158\001\161\001\159\001\128\002\167\004\088\000\ +\080\001\020\002\005\003\109\004\218\004\214\004\032\004\006\003\ +\191\003\007\003\196\003\028\004\158\004\215\004\216\004\217\004\ +\172\002\106\003\107\003\033\004\034\004\062\003\043\005\063\005\ +\044\005\045\005\046\005\047\005\239\003\059\005\129\000\130\000\ +\131\000\132\000\133\000\129\001\142\001\095\002\096\002\097\002\ +\255\003\055\003\252\003\130\001\131\001\132\001\030\001\251\000\ +\246\001\047\001" + +let yysindex = "\180\007\ +\119\061\200\008\016\047\124\064\160\067\000\000\076\004\241\002\ +\244\009\076\004\000\000\236\254\076\004\076\004\000\000\000\000\ +\076\004\076\004\076\004\076\004\076\004\000\000\076\004\044\067\ +\174\002\205\061\037\062\122\057\122\057\068\003\000\000\232\054\ +\122\057\076\004\000\000\000\000\036\004\076\004\106\000\000\000\ +\000\000\244\009\119\061\000\000\000\000\076\004\076\004\000\000\ +\000\000\076\004\076\004\000\000\254\000\102\000\157\000\000\000\ +\225\072\000\000\222\005\236\255\000\000\000\000\241\000\000\000\ +\000\000\000\000\017\001\000\000\000\000\024\002\000\000\102\000\ +\000\000\000\000\000\000\089\001\000\000\034\069\015\002\244\009\ +\244\009\124\064\124\064\000\000\000\000\000\000\000\000\000\000\ +\076\004\076\004\036\004\200\008\076\004\000\000\140\003\000\000\ +\000\000\241\000\000\000\000\000\024\002\102\000\000\000\200\008\ +\000\000\000\000\000\000\000\000\113\002\000\000\000\000\145\007\ +\220\002\050\255\122\009\044\003\165\016\016\047\054\003\241\002\ +\021\003\000\000\000\000\000\000\038\000\000\000\023\003\000\000\ +\000\000\115\001\232\000\000\000\061\002\000\000\216\004\236\255\ +\076\004\076\004\035\003\163\066\226\066\000\000\088\059\018\004\ +\129\004\086\003\000\000\000\000\067\000\251\003\000\000\000\000\ +\160\067\160\067\000\000\000\000\000\000\039\004\000\000\107\004\ +\000\000\122\057\122\057\033\004\244\009\000\000\000\000\000\000\ +\000\000\000\000\000\000\122\062\076\004\041\002\097\005\160\067\ +\040\066\220\002\124\064\019\002\244\009\000\000\188\004\113\001\ +\212\002\117\255\000\000\127\004\000\000\000\000\246\004\165\002\ +\224\004\000\000\073\073\248\004\000\000\248\004\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\032\061\080\005\032\061\076\004\076\004\106\000\022\005\ +\000\000\000\000\000\000\244\009\000\000\036\005\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\083\005\000\000\ +\000\000\000\000\165\000\000\000\000\000\000\000\000\000\000\000\ +\244\009\000\000\000\000\000\000\184\255\130\255\032\061\124\064\ +\076\004\160\255\070\005\000\000\076\004\000\000\000\000\124\064\ +\069\005\097\005\124\064\000\000\122\057\222\005\102\000\225\004\ +\124\064\124\064\124\064\124\064\124\064\124\064\124\064\124\064\ +\124\064\124\064\124\064\124\064\124\064\124\064\124\064\124\064\ +\124\064\124\064\124\064\124\064\124\064\207\062\124\064\000\000\ +\076\004\000\000\173\005\033\004\124\064\000\000\033\004\000\000\ +\033\004\000\000\033\004\000\000\000\000\124\064\104\003\099\005\ +\244\009\244\009\150\005\157\005\244\009\150\005\119\002\041\069\ +\000\000\000\000\124\064\119\002\119\002\000\000\000\000\041\002\ +\219\003\168\004\000\000\069\005\000\000\000\000\000\000\033\004\ +\000\000\174\004\000\000\017\255\000\000\138\005\235\005\000\000\ +\174\004\000\000\174\004\000\000\000\000\000\000\233\005\163\005\ +\231\005\043\017\043\017\000\000\016\047\076\004\033\004\183\000\ +\198\005\004\006\000\000\000\000\255\005\000\000\000\000\000\000\ +\090\008\094\003\170\005\194\005\016\047\021\003\000\000\000\000\ +\160\067\168\068\000\000\010\006\034\006\203\255\224\005\037\004\ +\236\005\000\000\236\005\000\000\018\004\000\000\165\000\129\004\ +\000\000\000\000\076\001\000\000\000\000\000\000\000\000\000\000\ +\053\002\152\013\239\059\044\060\000\000\000\000\144\003\000\000\ +\000\000\160\067\076\003\032\061\033\004\000\000\033\004\119\002\ +\189\004\102\005\000\000\205\001\227\005\000\000\251\005\158\000\ +\000\000\000\000\009\002\006\070\077\006\128\003\168\068\011\058\ +\104\002\136\005\205\005\188\065\000\000\000\000\000\000\160\067\ +\241\005\033\004\141\001\033\004\115\005\072\006\000\000\000\000\ +\119\002\143\005\035\003\070\006\214\007\000\000\078\006\000\000\ +\000\000\035\003\124\064\000\000\000\000\157\005\000\000\124\064\ +\118\255\051\003\200\073\160\067\000\000\020\006\122\057\023\006\ +\041\002\009\006\076\004\000\000\229\050\000\000\022\006\028\006\ +\029\006\000\000\019\002\000\000\000\000\038\006\000\000\000\000\ +\041\006\027\006\241\002\037\006\178\002\160\067\232\002\000\000\ +\043\006\032\006\000\000\029\005\122\006\123\006\032\061\000\000\ +\000\000\044\067\116\003\036\063\124\063\087\055\000\000\000\000\ +\166\073\166\073\134\073\247\007\073\073\134\073\239\009\239\009\ +\239\009\239\009\089\002\104\006\104\006\239\009\089\002\089\002\ +\134\073\104\006\089\002\089\002\089\002\122\057\000\000\104\006\ +\229\050\000\000\029\005\044\006\227\005\073\073\124\064\124\064\ +\124\064\170\004\092\006\124\064\124\064\124\064\119\002\119\002\ +\000\000\000\000\000\000\218\004\000\000\000\000\134\073\027\001\ +\033\004\219\003\048\006\033\004\000\000\211\002\000\000\000\000\ +\000\000\123\002\055\006\186\002\029\005\057\006\000\000\199\255\ +\000\000\155\006\000\000\000\000\174\004\091\001\211\255\062\048\ +\000\000\000\000\000\000\000\000\096\006\219\003\016\047\159\002\ +\016\047\016\047\119\003\000\000\071\006\000\000\000\000\021\001\ +\241\002\097\006\000\000\000\000\000\000\121\003\016\047\148\006\ +\000\000\000\000\053\003\160\067\029\000\108\005\067\006\000\000\ +\097\011\000\000\000\000\000\000\000\000\179\002\000\000\162\006\ +\000\000\173\000\031\067\178\059\000\000\173\000\000\000\094\006\ +\000\000\000\000\124\064\124\064\235\004\000\000\124\064\124\064\ +\124\064\000\000\000\000\000\000\132\006\000\000\095\006\000\000\ +\019\015\074\002\019\015\033\004\000\000\188\006\000\000\016\047\ +\124\064\000\000\126\006\000\000\160\067\000\000\000\000\000\000\ +\127\006\000\000\127\006\000\000\090\008\122\058\124\064\188\065\ +\000\000\108\000\184\006\000\000\124\064\130\006\033\004\073\001\ +\119\061\155\001\000\000\000\000\000\000\087\006\000\000\000\000\ +\000\000\161\000\000\000\033\004\124\064\000\000\073\073\000\000\ +\073\073\000\000\000\000\000\000\000\000\000\000\033\004\243\000\ +\000\000\000\000\000\000\157\006\027\001\178\002\043\006\102\000\ +\100\065\068\005\190\006\000\000\187\006\146\006\149\006\153\006\ +\021\002\000\000\000\000\220\002\191\006\178\002\219\003\019\002\ +\078\003\178\002\102\000\007\002\000\000\000\000\169\001\201\003\ +\091\005\103\004\000\000\000\000\176\003\000\000\244\254\016\047\ +\124\064\125\006\221\255\000\000\255\002\000\000\248\004\000\000\ +\248\004\128\006\165\000\000\000\165\255\124\064\102\000\156\006\ +\178\002\132\006\073\073\038\005\063\000\190\255\162\005\124\064\ +\085\070\117\070\195\070\130\006\094\255\145\006\200\008\219\003\ +\129\002\000\000\000\000\186\003\212\006\219\003\043\006\214\004\ +\102\000\176\003\214\006\174\004\000\000\000\000\016\047\057\000\ +\224\006\000\000\000\000\241\002\057\255\033\004\000\000\016\047\ +\180\001\137\006\033\004\021\003\000\000\097\006\159\006\000\000\ +\090\008\124\006\000\000\000\000\000\000\033\004\160\067\142\006\ +\000\000\037\004\000\000\000\000\000\000\000\000\149\000\000\000\ +\223\255\000\000\000\000\000\000\208\001\000\000\098\000\245\255\ +\181\005\227\070\049\071\081\071\103\004\174\006\000\000\164\006\ +\000\000\172\006\071\006\158\006\169\000\225\006\033\004\000\000\ +\102\000\144\000\182\255\126\006\154\006\125\005\226\006\226\006\ +\237\006\166\006\179\006\126\006\000\000\000\000\210\063\124\064\ +\160\067\041\073\000\000\044\005\124\064\000\000\219\003\000\000\ +\030\003\000\000\016\047\073\073\124\064\124\064\033\004\216\006\ +\060\255\000\000\028\009\124\064\233\058\234\006\000\000\161\065\ +\059\002\105\060\166\060\227\060\124\064\000\000\016\047\160\067\ +\000\000\000\000\000\000\015\000\000\000\160\067\219\003\102\000\ +\102\000\175\001\226\005\000\000\000\000\000\000\248\006\000\000\ +\000\000\016\047\000\000\033\004\033\004\106\000\106\000\102\000\ +\000\000\000\000\000\000\000\000\160\067\000\000\217\000\236\006\ +\180\006\241\002\000\000\000\000\234\005\244\006\000\000\000\000\ +\000\000\000\000\000\000\110\000\122\005\000\000\019\002\000\000\ +\000\000\000\000\000\000\236\006\102\000\203\006\000\000\000\000\ +\206\006\000\000\210\006\124\064\124\064\124\064\073\073\000\000\ +\211\006\000\000\217\006\000\000\223\006\199\005\000\000\000\000\ +\033\004\120\004\180\001\043\006\029\005\015\007\000\000\000\000\ +\000\000\219\003\180\001\201\003\061\001\008\007\000\000\200\006\ +\219\003\000\000\000\000\087\001\000\000\000\000\074\255\000\000\ +\016\047\241\002\193\006\097\006\000\000\000\000\016\047\000\000\ +\037\004\000\000\000\000\219\003\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\124\064\124\064\124\064\000\000\000\000\ +\000\000\228\255\201\006\000\000\007\007\000\000\157\005\208\006\ +\000\000\164\006\090\008\184\000\102\000\000\000\204\006\000\000\ +\000\000\124\064\000\000\188\065\016\047\124\064\209\006\215\006\ +\016\047\000\000\124\064\218\006\000\000\000\000\219\006\000\000\ +\124\064\019\002\000\000\174\069\097\255\000\000\000\000\033\004\ +\000\000\000\000\000\000\124\064\124\064\126\006\046\001\000\000\ +\126\006\124\064\019\007\000\000\000\000\000\000\000\000\000\000\ +\179\002\000\000\162\006\000\000\173\000\000\000\088\003\173\000\ +\000\000\227\006\184\006\180\001\000\000\000\000\019\002\219\003\ +\255\254\016\047\124\064\033\004\102\000\033\004\102\000\000\000\ +\184\006\103\004\000\000\162\011\000\000\229\006\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\108\002\000\000\000\000\ +\036\007\124\064\124\064\168\071\200\071\022\072\124\064\124\064\ +\124\064\219\003\019\002\000\000\000\000\202\005\035\003\129\002\ +\211\002\000\000\000\000\219\003\229\006\211\002\016\047\000\000\ +\000\000\000\000\000\000\000\000\033\004\097\006\001\000\054\072\ +\132\072\164\072\103\004\000\000\241\002\000\000\131\005\053\007\ +\000\000\000\000\000\000\055\007\000\000\204\006\102\000\048\007\ +\000\000\033\004\000\000\000\000\000\000\033\004\000\000\188\065\ +\124\064\073\073\226\005\000\000\094\000\082\001\000\000\000\000\ +\000\000\000\000\000\000\049\007\016\047\239\006\000\000\124\064\ +\124\064\000\000\226\005\161\003\000\000\125\003\102\000\102\000\ +\174\255\000\000\187\003\000\000\000\000\041\002\000\000\249\006\ +\222\069\229\045\000\000\222\003\021\007\070\007\000\000\000\000\ +\027\001\054\255\000\000\252\000\015\003\054\255\131\005\073\073\ +\073\073\000\000\018\007\000\000\022\007\000\000\024\007\073\073\ +\073\073\073\073\180\001\226\005\170\005\170\005\046\005\000\000\ +\000\000\105\004\048\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\016\047\001\007\000\000\ +\033\004\000\000\234\005\000\000\254\002\062\048\000\000\000\000\ +\124\064\000\000\000\000\000\000\202\000\000\000\250\006\016\047\ +\238\003\161\065\000\000\000\000\000\000\016\047\000\000\000\000\ +\233\006\229\006\157\005\235\006\164\006\157\005\027\001\000\000\ +\033\004\070\007\229\006\164\006\000\000\033\004\016\047\000\000\ +\041\002\030\002\193\001\000\000\000\000\000\000\000\000\000\000\ +\254\006\000\000\234\005\124\064\124\064\124\064\007\003\007\003\ +\016\047\005\007\016\047\061\001\041\002\027\001\008\002\000\000\ +\000\000\099\000\106\000\029\007\000\000\000\000\194\003\033\004\ +\079\007\219\003\000\000\000\000\065\004\124\064\000\000\033\004\ +\157\005\157\005\013\066\157\005\157\005\110\005\033\004\093\255\ +\010\007\000\000\090\004\000\000\106\002\074\002\033\004\000\000\ +\000\000\000\000\000\000\000\000\073\073\073\073\073\073\000\000\ +\000\000\000\000\000\000\027\001\000\000\000\000\213\003\033\004\ +\016\047\135\004\000\000\000\000\009\007\000\000\011\007\124\064\ +\000\000\092\007\093\007\060\017\000\000\094\007\097\007\124\064\ +\085\007\000\000\000\000\164\006\070\007\000\000\016\047\074\002\ +\033\004\033\004\000\000\096\007\000\000\043\006\083\001\000\000\ +\000\000\037\002\033\004\000\000\000\000\062\048\062\048\126\006\ +\033\004\086\007\075\001\016\047\016\047\000\000\124\064\025\007\ +\033\004\033\004\000\000\000\000\039\005\000\000\033\004\033\004\ +\033\004\033\004\102\000\000\000\000\000\000\000\000\000\000\000\ +\095\007\124\064\016\047\033\004\033\004\000\000\000\000\000\000\ +\131\005\016\047\131\005\139\001\009\003\000\000\016\047\000\000\ +\033\004\033\004\102\000\234\005\006\007\032\007\157\005\227\005\ +\164\006\107\007\102\000\091\004\000\000\000\000\000\000\000\000\ +\109\007\157\005\157\005\016\047\000\000\124\064\062\048\110\007\ +\111\007\033\004\000\000\102\000\016\047\016\047\000\000\033\004\ +\033\004" + +let yyrindex = "\000\000\ +\126\008\127\008\000\000\000\000\000\000\000\000\106\069\000\000\ +\000\000\039\064\000\000\000\000\214\002\242\005\000\000\000\000\ +\221\067\101\066\099\067\209\064\139\003\000\000\106\069\000\000\ +\000\000\000\000\000\000\000\000\000\000\248\067\193\017\000\000\ +\000\000\209\064\000\000\000\000\200\004\096\000\042\004\000\000\ +\000\000\000\000\060\000\000\000\000\000\209\064\225\007\000\000\ +\000\000\242\005\209\064\000\000\000\000\021\043\103\016\000\000\ +\136\044\000\000\060\000\120\043\000\000\000\000\067\044\000\000\ +\000\000\000\000\081\053\000\000\000\000\102\053\000\000\021\043\ +\000\000\000\000\000\000\000\000\000\000\035\025\221\027\058\024\ +\174\024\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\214\002\099\004\200\004\062\000\225\007\000\000\000\000\000\000\ +\000\000\218\012\000\000\000\000\111\053\146\053\000\000\062\000\ +\000\000\000\000\000\000\000\000\167\053\000\000\000\000\000\000\ +\113\005\113\005\000\000\188\012\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\240\016\000\000\ +\000\000\000\000\151\015\000\000\163\014\000\000\000\000\000\000\ +\221\067\229\068\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\014\049\000\000\000\000\ +\255\001\098\003\000\000\000\000\000\000\050\005\000\000\125\049\ +\000\000\000\000\000\000\117\054\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\221\001\000\000\000\000\000\000\ +\000\000\053\068\000\000\000\000\000\000\135\255\016\002\000\000\ +\214\255\000\000\000\000\076\000\000\000\000\000\069\255\000\000\ +\040\004\000\000\215\255\131\000\000\000\245\005\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\038\007\004\054\038\007\214\002\026\007\042\004\080\068\ +\000\000\000\000\000\000\012\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\028\056\114\056\139\003\000\000\000\000\200\056\030\057\000\000\ +\014\000\000\000\000\000\000\000\000\000\000\000\038\007\000\000\ +\217\003\000\000\008\003\000\000\026\007\000\000\000\000\000\000\ +\084\006\000\000\000\000\000\000\000\000\060\000\132\050\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\057\034\000\000\000\000\ +\248\067\000\000\120\043\141\068\000\000\000\000\041\005\000\000\ +\031\007\000\000\055\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\221\022\151\025\ +\000\000\000\000\000\000\011\026\128\026\000\000\000\000\000\000\ +\000\000\000\000\000\000\084\006\000\000\000\000\000\000\031\007\ +\000\000\000\000\000\000\125\001\000\000\120\007\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\147\255\000\000\098\007\ +\000\000\102\007\116\007\000\000\000\000\099\004\180\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\045\000\000\000\194\000\090\000\ +\131\000\000\000\245\005\000\000\066\000\000\000\026\007\101\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\038\007\117\054\000\000\155\048\244\026\ +\000\000\000\000\000\000\000\000\164\005\000\000\000\000\000\000\ +\000\000\000\000\068\017\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\066\007\000\000\198\055\021\043\207\002\000\000\000\000\ +\104\027\000\000\000\000\000\000\000\000\000\000\085\255\000\000\ +\000\000\196\000\000\000\000\000\000\000\069\004\000\000\162\000\ +\000\000\000\000\041\007\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\026\007\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\001\003\000\000\000\000\038\007\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\096\037\205\037\053\038\174\034\208\039\157\038\034\035\150\035\ +\011\036\127\036\127\031\081\028\197\028\243\036\244\031\104\032\ +\005\039\058\029\220\032\081\033\197\033\000\000\000\000\174\029\ +\000\000\000\000\111\003\000\000\164\005\051\040\000\000\000\000\ +\000\000\000\000\082\018\000\000\000\000\000\000\081\023\198\023\ +\000\000\000\000\000\000\105\022\000\000\000\000\109\039\025\053\ +\066\007\000\000\000\000\005\004\030\006\146\053\000\000\000\000\ +\000\000\000\000\000\000\000\000\001\003\000\000\000\000\000\000\ +\000\000\193\049\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\246\044\ +\000\000\000\000\000\000\000\000\205\045\000\000\000\000\000\000\ +\000\000\048\046\000\000\000\000\000\000\000\000\000\000\102\255\ +\000\000\000\000\222\000\162\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\202\006\000\000\093\005\ +\000\000\225\003\000\000\000\000\000\000\165\005\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\090\007\000\000\000\000\000\000\ +\000\000\000\000\000\000\238\039\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\034\030\000\000\000\000\000\000\038\065\000\000\ +\169\004\000\000\000\000\000\000\000\000\000\000\025\001\000\000\ +\000\000\084\255\000\000\169\255\000\000\000\000\185\255\000\000\ +\097\000\000\000\000\000\000\000\000\000\000\000\064\007\065\007\ +\000\000\000\000\000\000\000\000\136\003\000\000\000\000\213\005\ +\182\004\000\000\074\006\000\000\191\002\105\000\139\000\143\000\ +\000\000\000\000\000\000\053\068\185\040\000\000\000\000\000\000\ +\000\000\000\000\021\043\000\000\000\000\000\000\234\004\021\043\ +\053\068\228\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\131\000\000\000\ +\245\005\000\000\139\003\000\000\000\000\000\000\213\005\000\000\ +\000\000\090\007\000\000\198\006\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\018\005\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\146\053\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\047\002\000\000\000\000\ +\087\255\000\000\210\000\000\000\000\000\147\046\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\197\000\000\000\229\000\ +\000\000\125\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\068\007\000\000\000\000\099\007\ +\012\050\000\000\074\050\000\000\000\000\040\011\238\039\000\000\ +\021\043\000\000\000\000\174\001\000\000\049\255\073\007\073\007\ +\068\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\089\045\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\136\255\000\000\000\000\122\007\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\021\043\ +\028\041\000\000\127\010\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\050\038\065\139\004\225\002\136\004\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\163\051\ +\000\000\000\000\000\000\000\000\021\043\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\016\052\028\041\000\000\000\000\000\000\ +\198\018\000\000\058\019\000\000\000\000\000\000\155\040\000\000\ +\175\019\000\000\035\020\000\000\151\020\000\000\000\000\000\000\ +\235\003\000\000\162\050\000\000\001\003\240\047\000\000\119\007\ +\000\000\000\000\191\047\146\053\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\125\001\000\000\000\000\000\000\190\057\ +\000\000\000\000\128\007\248\046\000\000\000\000\000\000\000\000\ +\186\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\048\004\000\000\000\000\021\043\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\220\255\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\012\005\000\000\125\004\000\000\203\004\000\000\000\000\062\005\ +\000\000\000\000\151\030\231\041\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\120\003\136\004\058\003\136\004\000\000\ +\011\031\228\001\000\000\115\007\000\000\063\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\069\007\000\000\000\000\000\000\063\001\069\007\000\000\000\000\ +\000\000\000\000\000\000\000\000\229\015\091\047\000\000\000\000\ +\000\000\000\000\068\007\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\074\042\021\043\000\000\ +\000\000\103\001\000\000\000\000\000\000\148\001\000\000\000\000\ +\000\000\254\040\175\008\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\026\012\000\000\000\000\000\000\136\004\136\004\ +\108\007\000\000\099\007\000\000\000\000\000\000\000\000\000\000\ +\000\000\118\007\199\049\069\052\000\000\122\052\000\000\000\000\ +\249\050\028\041\000\000\000\000\000\000\028\041\000\000\097\041\ +\201\041\000\000\012\021\000\000\128\021\000\000\244\021\044\042\ +\143\042\247\042\049\051\042\048\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\044\001\000\000\028\041\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\112\007\108\007\000\000\114\007\099\007\000\000\249\050\000\000\ +\178\052\208\052\056\006\099\007\000\000\219\051\000\000\000\000\ +\000\000\092\052\021\043\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\028\041\000\000\000\000\000\000\106\010\238\013\ +\000\000\156\050\000\000\000\000\000\000\028\015\146\053\000\000\ +\000\000\000\000\115\003\193\002\000\000\000\000\000\000\249\004\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\111\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\219\051\000\000\ +\000\000\000\000\000\000\000\000\092\052\000\000\139\039\000\000\ +\000\000\000\000\000\000\000\000\090\043\189\043\037\044\000\000\ +\000\000\000\000\000\000\028\015\000\000\000\000\000\000\031\007\ +\000\000\000\000\000\000\000\000\082\007\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\099\007\008\053\000\000\000\000\000\000\ +\139\039\139\039\000\000\241\015\000\000\000\000\000\000\000\000\ +\000\000\019\005\161\004\000\000\000\000\000\000\000\000\000\000\ +\168\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\181\047\139\039\000\000\000\000\000\000\000\000\000\050\021\006\ +\120\003\058\003\243\004\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\032\002\071\003\000\000\000\000\000\000\ +\000\000\000\000\000\000\117\007\000\000\000\000\000\000\000\000\ +\182\002\105\051\243\004\243\004\125\007\126\007\000\000\130\007\ +\099\007\000\000\243\004\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\196\003\000\000\243\004\000\000\000\000\000\000\212\003\ +\237\004" + +let yygindex = "\000\000\ +\000\000\000\000\000\000\000\000\000\000\020\000\183\255\037\000\ +\168\000\184\005\119\253\000\000\166\254\147\005\096\255\145\008\ +\232\012\061\254\077\005\253\255\063\014\144\252\036\003\247\255\ +\000\000\046\000\016\000\021\000\027\000\000\000\000\000\000\000\ +\000\000\030\000\035\000\040\000\000\000\255\255\003\000\093\009\ +\084\002\000\000\000\000\000\000\000\000\000\000\000\000\041\000\ +\000\000\000\000\000\000\000\000\010\255\059\252\000\000\000\000\ +\000\000\004\000\148\005\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\010\003\056\000\112\251\081\255\136\253\214\251\ +\048\253\185\252\087\251\199\003\087\003\000\000\000\000\000\000\ +\000\000\000\000\000\000\211\253\000\000\000\000\000\000\042\000\ +\082\255\014\006\085\005\100\005\000\000\000\000\083\255\048\000\ +\000\000\000\000\170\255\035\002\103\253\160\006\187\010\173\011\ +\000\000\000\000\000\000\131\255\000\000\006\013\182\006\006\000\ +\104\255\048\003\121\007\000\000\124\007\165\006\244\010\176\253\ +\000\000\218\000\000\000\000\000\000\000\198\003\090\005\152\255\ +\254\004\000\000\000\000\000\000\000\000\227\000\000\000\034\007\ +\145\255\042\007\081\006\083\008\000\000\000\000\060\004\000\000\ +\000\000\129\007\233\253\016\005\193\251\101\251\000\252\028\253\ +\000\000\204\252\000\000\074\004\000\000\000\000\119\251\088\255\ +\101\253\062\006\091\007\000\000\000\000\232\003\000\000\000\000\ +\253\003\243\252\000\000\200\003\108\004\000\000\179\253\135\002\ +\155\255\000\000\000\000\192\005\147\254\157\255\199\254\151\255\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\055\255\000\000" + +let yytablesize = 19255 +let yytable = "\126\000\ +\102\000\151\000\212\001\213\001\103\000\203\001\119\001\117\001\ +\251\001\230\001\128\001\168\000\118\001\157\001\086\002\026\003\ +\137\001\096\000\107\001\221\001\053\000\151\001\097\000\061\003\ +\151\003\203\002\063\003\123\001\098\000\190\003\111\001\099\000\ +\198\000\162\004\176\001\024\003\100\000\143\001\126\003\125\000\ +\123\002\101\000\106\000\241\001\043\004\242\001\060\000\139\004\ +\027\004\074\001\248\001\090\004\052\004\051\005\034\005\222\004\ +\169\000\120\001\030\005\034\000\131\003\071\000\039\001\102\002\ +\162\000\103\002\181\001\220\004\084\001\050\003\252\000\184\000\ +\039\005\143\003\031\001\171\000\037\005\194\003\001\004\008\000\ +\191\000\206\002\162\000\087\001\080\001\009\002\023\002\173\000\ +\060\000\038\001\102\000\216\001\197\004\231\003\103\000\098\002\ +\184\004\195\003\243\004\069\004\206\004\161\001\102\000\023\002\ +\075\001\084\001\103\000\096\000\126\000\006\002\087\001\126\000\ +\097\000\126\000\126\000\131\005\232\001\218\002\098\000\096\000\ +\045\001\099\000\198\001\139\001\097\000\095\001\100\000\100\001\ +\101\001\007\002\098\000\101\000\106\000\099\000\113\002\079\001\ +\151\000\151\000\100\000\151\000\085\005\122\001\171\000\101\000\ +\106\000\132\005\002\004\199\001\113\002\151\000\151\000\113\002\ +\037\005\135\001\131\003\151\004\064\002\200\001\027\005\162\000\ +\052\002\113\002\162\000\084\001\208\003\145\004\040\003\245\001\ +\089\001\040\003\127\001\135\000\151\000\151\000\087\001\080\001\ +\224\001\087\001\087\001\080\001\023\002\064\002\115\002\004\002\ +\083\001\160\001\245\001\232\003\133\005\218\003\185\004\080\002\ +\161\001\245\001\245\001\089\001\161\001\228\001\201\001\029\001\ +\229\001\202\001\129\002\188\001\189\001\040\004\052\003\233\001\ +\041\003\219\002\115\002\041\003\192\001\093\001\085\001\245\001\ +\245\001\067\002\052\003\088\001\200\003\083\001\014\004\008\002\ +\085\001\113\002\079\001\245\001\225\001\113\002\079\001\064\002\ +\064\002\251\003\245\001\245\001\125\002\245\001\076\001\082\005\ +\093\001\115\002\152\002\115\002\218\003\155\004\088\001\037\003\ +\088\005\064\002\059\004\037\005\012\002\044\001\188\004\115\002\ +\247\004\190\004\209\003\089\001\203\002\025\005\089\001\089\001\ +\133\002\117\001\134\002\082\002\013\002\069\002\089\004\194\005\ +\117\001\196\005\117\001\077\001\160\001\005\002\245\001\083\001\ +\160\001\128\001\128\001\219\003\083\003\026\005\041\004\109\002\ +\162\000\178\002\053\003\089\005\024\002\130\002\213\001\060\000\ +\116\002\060\000\140\005\056\003\163\001\203\002\059\003\122\002\ +\093\001\081\002\085\001\169\000\093\001\085\001\088\001\201\003\ +\211\005\088\001\088\001\034\000\015\004\071\000\216\003\156\004\ +\052\003\085\002\071\004\158\002\013\005\015\005\177\001\163\001\ +\038\001\028\000\178\001\076\001\060\000\010\003\166\000\082\002\ +\229\002\179\001\019\004\196\001\180\001\034\000\023\002\071\000\ +\083\002\154\001\217\001\075\001\059\004\012\005\248\004\216\002\ +\086\001\216\003\241\002\154\001\083\002\087\002\027\004\162\000\ +\079\002\082\002\086\001\069\002\049\005\114\004\126\000\177\001\ +\036\001\084\002\216\002\178\001\111\002\126\000\107\005\126\000\ +\084\003\216\002\179\001\085\002\155\001\180\001\126\000\126\000\ +\081\002\126\000\150\002\160\005\162\000\205\004\155\001\163\001\ +\071\002\072\002\077\002\163\001\076\002\126\000\075\002\095\001\ +\216\002\126\000\169\004\075\002\253\003\151\000\151\000\034\000\ +\028\000\071\000\217\003\216\002\170\003\166\000\082\002\085\002\ +\216\002\151\002\012\004\216\002\002\002\216\002\076\001\083\002\ +\045\001\222\002\039\004\151\002\167\002\162\000\151\000\151\000\ +\151\000\139\003\048\001\147\004\127\003\154\001\151\000\006\001\ +\154\001\127\001\127\001\179\001\086\001\018\004\111\005\086\001\ +\084\002\156\001\083\002\078\001\162\001\038\002\079\002\010\002\ +\111\002\115\004\085\002\151\000\151\000\235\003\216\002\018\002\ +\151\000\162\000\022\002\243\003\151\000\135\004\006\004\224\001\ +\155\001\119\005\074\005\155\001\128\003\157\001\150\002\162\001\ +\126\000\126\000\162\000\039\002\150\002\065\005\162\000\203\002\ +\077\002\060\000\106\004\148\001\075\002\055\002\162\000\126\000\ +\151\000\102\000\013\004\079\001\058\002\103\000\177\001\163\000\ +\164\004\151\000\178\001\168\002\117\001\151\002\122\001\224\001\ +\069\003\179\001\096\000\041\005\180\001\157\001\026\001\097\000\ +\210\002\212\002\151\000\070\003\071\003\098\000\245\002\038\002\ +\099\000\038\002\213\001\081\001\112\002\100\000\010\005\223\002\ +\114\002\198\001\101\000\106\000\156\001\097\005\078\001\162\001\ +\226\002\015\003\017\003\162\001\031\002\191\000\114\002\075\005\ +\169\002\114\002\074\003\214\002\061\004\039\002\064\002\039\002\ +\149\001\110\005\199\001\114\002\162\000\151\000\107\004\061\005\ +\157\001\150\003\152\003\213\001\200\001\038\005\214\002\150\002\ +\105\003\245\004\116\002\189\004\060\000\214\002\135\002\064\002\ +\134\003\184\000\046\003\136\002\092\004\198\001\087\002\177\001\ +\177\005\014\005\191\000\178\001\123\003\147\001\135\003\171\003\ +\143\004\137\002\179\001\214\002\135\002\180\001\168\000\022\003\ +\179\005\126\000\052\003\141\003\126\000\201\001\199\001\214\002\ +\202\001\171\002\087\002\126\000\214\002\126\000\126\000\214\002\ +\200\001\214\002\075\003\114\002\081\002\135\002\204\003\114\002\ +\205\003\064\002\064\002\126\000\076\003\048\003\057\003\135\002\ +\151\000\172\000\125\005\087\005\214\002\126\000\193\001\002\004\ +\146\001\202\002\162\000\064\002\028\000\162\000\095\005\151\000\ +\151\000\166\000\082\002\067\003\162\000\193\003\086\001\137\004\ +\162\000\201\001\214\002\083\002\202\001\166\003\142\004\194\001\ +\214\002\168\000\095\005\078\003\002\004\126\000\136\003\126\000\ +\135\002\138\002\189\003\135\002\126\000\089\003\169\000\218\001\ +\166\005\151\000\203\002\147\001\084\002\061\003\058\003\011\004\ +\063\003\126\000\151\000\180\003\151\000\218\001\085\002\100\004\ +\102\004\137\005\248\003\046\001\172\000\021\003\224\001\028\000\ +\041\005\095\004\250\003\117\001\162\002\219\001\012\000\016\004\ +\137\005\092\001\093\001\177\001\114\003\028\000\235\002\178\001\ +\214\002\181\003\004\004\219\001\137\003\234\003\179\001\236\002\ +\008\004\180\001\197\005\227\001\029\000\151\000\163\002\029\003\ +\030\003\152\003\213\001\104\005\033\000\106\005\182\003\203\002\ +\162\000\169\000\220\001\087\002\224\001\162\000\060\000\040\003\ +\138\003\048\000\198\005\199\002\040\003\184\003\122\001\203\002\ +\220\001\185\003\122\001\045\001\126\000\196\004\122\001\048\000\ +\122\001\199\002\177\001\046\001\122\001\122\001\178\001\061\005\ +\122\001\162\000\235\002\216\002\178\003\179\001\169\002\183\003\ +\180\001\122\001\083\001\236\002\216\002\175\005\176\005\116\000\ +\099\001\041\003\164\003\170\002\087\002\102\000\041\003\235\004\ +\203\002\103\000\087\002\169\002\197\003\017\004\118\004\241\003\ +\126\000\242\004\116\000\126\000\139\002\218\001\096\000\167\005\ +\094\005\116\000\078\004\097\000\126\000\194\001\106\002\000\004\ +\122\001\098\000\095\003\096\003\099\000\126\000\198\001\122\001\ +\162\000\100\000\045\001\151\000\216\002\028\000\101\000\106\000\ +\116\000\194\001\214\002\219\001\168\005\202\002\162\000\171\002\ +\115\003\122\001\122\001\116\000\122\001\122\001\220\005\199\001\ +\029\000\123\001\116\000\116\000\179\003\116\000\125\003\015\000\ +\033\000\200\001\169\005\085\001\171\002\214\002\147\000\122\001\ +\106\002\106\002\165\003\112\001\142\000\204\001\214\002\169\002\ +\220\001\221\004\142\000\204\001\115\001\151\000\213\001\048\000\ +\108\003\147\000\106\002\087\002\136\005\060\001\061\001\126\000\ +\147\000\110\001\109\003\109\001\193\001\214\002\116\000\126\000\ +\044\003\151\000\201\001\170\005\151\000\202\001\151\000\151\000\ +\151\000\179\004\210\005\126\000\151\000\150\001\147\000\147\000\ +\087\002\150\004\151\000\087\002\236\001\194\001\214\002\180\002\ +\181\002\122\000\147\000\066\001\202\002\162\000\126\000\064\004\ +\198\003\147\000\147\000\045\000\147\000\246\001\048\000\210\002\ +\171\002\151\000\163\004\147\001\071\001\210\003\195\004\247\002\ +\134\000\179\001\106\001\087\004\180\001\111\004\106\001\046\003\ +\246\001\237\001\236\003\224\001\248\002\106\001\012\000\246\001\ +\246\001\012\000\189\000\134\000\047\003\182\002\097\004\092\001\ +\093\001\106\001\134\000\012\000\012\000\147\000\115\001\012\000\ +\149\001\228\001\236\004\120\001\229\001\246\001\246\001\253\002\ +\012\000\012\000\012\000\012\000\237\003\190\000\087\002\090\002\ +\134\000\246\001\249\002\216\002\162\000\087\002\012\000\012\000\ +\246\001\246\001\048\003\246\001\134\000\126\000\202\003\068\003\ +\106\001\254\002\213\001\126\000\134\000\148\004\134\000\107\000\ +\087\002\239\004\012\000\122\000\216\002\012\000\048\005\012\000\ +\012\000\012\000\012\000\071\005\162\000\045\001\216\002\012\000\ +\012\000\120\002\107\000\040\003\074\003\062\004\012\000\126\000\ +\146\002\107\000\146\002\203\003\246\001\031\005\054\004\055\004\ +\151\000\126\000\012\000\146\002\012\000\126\000\012\000\134\000\ +\166\000\081\002\220\002\042\005\065\004\066\004\224\001\063\004\ +\107\000\133\001\012\000\072\004\221\002\012\000\147\001\216\002\ +\185\001\012\000\216\002\107\000\086\004\041\003\117\000\147\001\ +\190\000\028\000\062\005\107\000\112\005\107\000\166\000\082\002\ +\146\002\170\004\025\002\200\005\140\001\174\004\160\004\011\000\ +\083\002\117\000\194\001\224\001\087\002\167\000\126\000\253\000\ +\117\000\123\001\155\001\152\003\213\001\123\001\119\001\117\001\ +\126\000\123\001\016\000\123\001\118\001\185\001\194\001\123\001\ +\123\001\084\002\193\004\123\001\155\001\138\001\107\000\117\000\ +\201\005\214\002\145\001\085\002\123\001\022\000\087\002\224\001\ +\093\005\048\000\117\000\177\001\214\002\162\000\198\004\178\001\ +\087\002\117\000\117\000\126\000\117\000\254\000\179\001\048\000\ +\212\004\180\001\144\000\255\000\108\005\115\001\163\000\022\005\ +\012\003\162\000\177\002\002\005\063\002\118\002\064\002\145\000\ +\253\004\048\000\080\003\123\001\152\003\213\001\129\005\155\001\ +\065\002\214\002\123\001\172\003\151\000\216\002\185\001\209\001\ +\044\000\087\002\087\002\190\000\146\002\117\000\072\003\214\002\ +\077\003\126\000\173\003\174\003\123\001\123\001\162\000\123\001\ +\123\001\162\000\122\000\145\000\139\000\216\002\147\002\141\000\ +\194\001\209\001\119\002\216\002\126\000\126\000\126\000\214\002\ +\148\002\168\004\123\001\144\000\048\000\171\004\145\000\152\001\ +\090\002\087\002\175\004\002\005\194\001\145\000\206\002\146\002\ +\149\001\017\005\162\000\031\002\149\001\031\002\144\000\214\002\ +\149\001\040\003\149\001\186\004\187\004\144\000\149\001\192\003\ +\216\002\191\004\149\001\145\000\090\002\135\001\033\005\216\002\ +\035\005\166\000\126\000\149\001\031\002\081\002\021\005\145\000\ +\205\002\116\005\126\000\144\000\214\002\028\005\145\000\145\000\ +\078\005\145\000\200\004\045\001\126\000\214\002\151\000\144\000\ +\183\001\214\002\126\000\041\003\000\005\028\000\144\000\144\000\ +\216\002\144\000\166\000\082\002\122\000\216\002\214\002\162\000\ +\214\002\214\002\135\001\126\000\083\002\214\002\240\003\150\002\ +\177\001\149\001\029\005\214\002\178\001\214\002\119\002\162\000\ +\172\001\096\001\145\000\179\001\077\005\126\000\180\001\126\000\ +\186\001\144\003\080\005\149\001\149\001\084\002\149\001\149\001\ +\214\002\122\000\144\000\173\001\151\002\216\002\087\002\085\002\ +\214\002\169\003\013\003\091\005\150\002\176\003\214\002\151\000\ +\214\002\149\001\083\005\164\000\214\002\086\005\164\000\214\002\ +\011\005\164\000\164\000\120\005\097\001\164\000\164\000\164\000\ +\164\000\164\000\162\000\164\000\214\002\162\000\162\000\019\005\ +\020\005\151\002\164\000\146\002\213\003\126\000\164\000\137\002\ +\214\002\164\000\164\000\214\002\135\005\214\005\163\000\132\004\ +\126\000\043\003\164\000\164\000\146\002\090\002\164\000\164\000\ +\107\001\187\001\162\000\126\000\107\001\216\002\144\005\212\002\ +\122\005\123\005\216\001\126\005\127\005\162\000\107\001\033\001\ +\171\005\133\004\126\000\126\000\172\005\143\005\146\002\107\001\ +\126\000\126\000\212\002\162\000\162\000\216\002\163\000\174\001\ +\145\005\212\002\216\002\216\002\148\001\164\000\164\000\164\000\ +\034\000\164\000\162\000\161\005\216\002\003\003\090\002\126\000\ +\073\005\040\003\175\001\008\000\090\002\002\005\126\000\002\005\ +\212\002\117\001\004\003\126\000\149\000\117\001\107\001\126\002\ +\180\005\181\005\034\000\212\002\117\001\216\002\060\005\117\001\ +\091\004\144\001\146\002\212\002\146\002\212\002\152\001\216\002\ +\126\000\226\001\152\001\126\000\212\002\164\000\164\000\193\005\ +\031\003\126\000\126\000\041\003\152\001\234\001\198\004\105\004\ +\214\002\182\001\146\002\204\005\112\000\152\001\113\000\114\000\ +\028\000\104\000\115\000\214\002\143\000\115\001\117\000\193\001\ +\191\001\063\002\212\002\155\002\202\005\121\005\212\002\117\001\ +\218\005\164\000\146\002\214\002\155\001\156\002\209\005\143\000\ +\126\002\224\005\225\005\104\000\091\002\212\002\143\000\120\000\ +\194\001\216\005\217\005\146\003\212\002\090\002\121\000\109\001\ +\235\001\071\000\132\000\109\001\092\002\026\002\027\002\028\002\ +\029\002\097\003\122\000\123\000\143\000\062\002\177\003\149\005\ +\142\000\030\002\212\002\187\003\216\002\215\003\109\001\158\005\ +\143\000\048\000\090\002\071\000\132\000\090\002\212\002\143\000\ +\143\000\096\001\143\000\245\003\216\002\096\001\212\002\099\001\ +\212\002\096\001\211\003\096\001\206\002\057\005\238\001\096\001\ +\096\001\151\001\246\003\160\001\160\001\151\001\182\005\153\003\ +\058\005\164\000\164\000\154\003\096\001\031\002\185\005\151\001\ +\184\001\185\001\155\003\214\002\247\003\156\003\240\001\214\002\ +\151\001\192\005\188\003\143\000\097\001\002\003\157\003\164\000\ +\097\001\212\002\120\001\003\003\097\001\247\001\097\001\206\001\ +\214\002\214\002\097\001\085\003\249\002\164\000\097\001\214\002\ +\004\003\164\000\252\001\096\001\058\004\086\003\148\002\097\001\ +\090\002\116\004\096\001\228\001\214\002\219\005\229\001\090\002\ +\177\001\254\001\214\002\117\004\178\001\162\000\014\002\255\001\ +\128\005\000\002\045\004\179\001\096\001\096\001\180\001\096\001\ +\096\001\019\002\090\002\001\002\038\004\164\000\214\002\068\002\ +\191\001\069\002\159\002\191\001\160\002\191\001\097\001\191\001\ +\142\000\204\001\096\001\070\002\148\001\097\001\161\002\148\002\ +\148\001\148\002\148\002\148\002\148\001\148\002\148\001\076\001\ +\148\002\148\002\148\001\202\002\162\000\045\001\148\001\097\001\ +\097\001\254\004\097\001\097\001\191\001\028\000\162\000\148\001\ +\191\001\255\004\000\005\026\002\027\002\028\002\029\002\184\002\ +\185\002\099\001\148\002\093\004\094\004\097\001\207\002\030\002\ +\001\005\148\002\164\000\144\001\212\002\073\002\220\003\212\002\ +\221\003\237\004\139\002\104\004\190\000\148\002\148\002\206\002\ +\208\002\212\002\222\003\139\002\238\004\100\002\090\002\214\002\ +\112\004\020\004\012\000\021\004\182\001\148\001\212\002\122\000\ +\212\002\212\002\101\002\164\000\150\002\022\004\104\002\182\001\ +\120\004\013\000\014\000\031\002\212\002\212\002\150\002\148\001\ +\148\001\105\002\148\001\148\001\182\001\182\001\021\000\249\002\ +\090\002\191\001\106\002\191\001\184\002\187\002\113\002\130\004\ +\212\002\114\002\090\002\212\002\115\002\148\001\122\000\138\004\ +\212\002\029\000\182\001\121\002\073\001\062\002\212\002\126\002\ +\062\002\033\000\202\002\162\000\212\002\005\005\191\001\037\000\ +\191\001\204\002\062\002\162\000\045\001\039\000\062\002\127\002\ +\212\002\216\002\216\002\119\002\212\002\186\002\188\002\062\002\ +\062\002\062\002\062\002\090\002\090\002\043\000\131\002\135\002\ +\212\002\107\002\108\002\212\002\212\002\209\002\062\002\164\000\ +\165\004\047\000\132\002\214\002\050\000\118\001\135\002\214\002\ +\124\002\118\001\164\002\214\002\214\002\135\002\166\002\197\002\ +\118\001\062\002\176\002\118\001\062\002\206\002\119\002\062\002\ +\062\002\062\002\214\002\090\002\118\001\005\005\062\002\062\002\ +\213\002\142\002\144\002\146\002\135\002\062\002\135\002\225\002\ +\238\002\150\002\227\002\055\005\056\005\230\002\062\002\239\002\ +\135\002\062\002\240\002\062\002\112\000\062\002\113\000\114\000\ +\028\000\214\002\115\000\242\002\243\002\116\000\117\000\008\003\ +\202\004\062\002\204\004\118\001\062\002\244\002\009\003\194\002\ +\062\002\246\002\001\003\131\002\131\002\061\001\118\000\048\000\ +\025\003\032\003\131\002\038\003\054\003\191\001\119\000\120\000\ +\191\001\135\002\042\003\045\003\135\002\051\003\121\000\131\002\ +\064\003\149\001\073\003\224\002\241\004\131\002\079\003\087\003\ +\179\001\244\004\122\000\123\000\001\000\002\000\003\000\004\000\ +\005\000\094\003\101\003\002\002\103\003\116\003\184\002\129\003\ +\131\002\131\002\249\002\031\002\142\003\252\002\185\000\185\000\ +\182\001\099\001\008\005\159\003\160\003\099\001\185\000\161\003\ +\090\002\099\001\162\003\099\001\185\000\185\000\163\003\099\001\ +\199\003\167\003\182\001\212\003\182\001\206\003\182\001\233\003\ +\185\000\242\003\182\001\249\003\099\001\008\000\005\004\007\004\ +\119\002\185\000\023\005\024\005\010\004\029\004\030\004\185\000\ +\185\000\185\000\185\000\185\000\035\004\005\005\036\004\044\004\ +\191\001\194\000\049\004\051\004\046\004\040\005\008\000\068\004\ +\114\001\050\005\185\000\050\004\074\004\096\004\108\004\185\000\ +\113\004\110\004\121\004\122\004\185\000\185\000\182\001\123\004\ +\127\004\136\004\099\001\191\001\204\002\140\004\128\004\185\000\ +\185\000\185\000\185\000\185\000\129\004\141\004\144\001\149\004\ +\144\001\159\004\157\004\177\004\099\001\099\001\070\005\099\001\ +\099\001\185\000\161\004\144\001\182\001\192\004\172\004\112\000\ +\166\004\113\000\114\000\028\000\173\004\115\000\158\003\176\004\ +\115\001\117\000\099\001\082\003\219\004\204\002\223\004\005\005\ +\194\004\005\005\006\005\009\005\212\002\018\003\016\005\212\002\ +\182\001\036\005\160\001\093\003\018\005\206\004\096\005\052\005\ +\067\005\212\002\120\000\053\005\166\002\054\005\100\005\076\005\ +\081\005\121\000\084\005\099\005\105\005\113\005\212\002\164\000\ +\212\002\212\002\109\005\118\005\134\005\122\000\123\000\147\005\ +\148\005\150\005\151\005\156\005\118\003\212\002\157\005\159\005\ +\178\005\042\003\039\005\183\005\191\005\207\005\062\002\208\005\ +\212\005\062\002\215\005\221\005\222\005\034\000\071\000\026\002\ +\212\002\034\000\214\002\062\002\071\000\047\002\216\002\062\002\ +\212\002\044\002\191\001\214\002\120\002\042\003\212\002\144\001\ +\062\002\062\002\062\002\062\002\212\002\150\000\008\000\046\002\ +\114\001\102\000\144\001\223\002\224\002\194\001\182\001\062\002\ +\212\002\214\002\137\002\049\002\212\002\144\001\166\000\135\002\ +\183\000\182\001\136\002\135\002\218\001\214\003\015\000\136\002\ +\212\002\138\002\062\002\212\002\141\002\062\002\230\003\120\002\ +\062\002\062\002\062\002\191\001\142\002\143\002\144\001\062\002\ +\062\002\139\002\182\001\195\005\066\005\141\005\062\002\112\000\ +\122\003\113\000\114\000\028\000\048\004\115\000\190\005\011\003\ +\115\001\117\000\062\002\081\003\062\002\211\002\062\002\079\005\ +\078\002\077\002\056\004\191\001\151\002\023\003\028\003\163\001\ +\149\002\007\005\062\002\119\004\252\004\062\002\205\005\206\005\ +\112\003\062\002\120\000\117\002\093\002\072\005\213\005\064\005\ +\000\000\121\000\098\005\240\004\000\000\000\000\042\003\204\002\ +\000\000\000\000\000\000\000\000\000\000\122\000\123\000\223\005\ +\191\001\191\001\000\000\000\000\000\000\052\001\009\004\000\000\ +\000\000\141\001\000\000\000\000\112\000\000\000\113\000\114\000\ +\028\000\144\001\115\000\000\000\000\000\116\000\117\000\000\000\ +\000\000\000\000\000\000\156\001\150\000\150\000\000\000\150\000\ +\216\002\216\002\059\001\060\001\061\001\000\000\118\000\216\002\ +\000\000\150\000\150\000\000\000\000\000\216\002\119\000\120\000\ +\000\000\000\000\000\000\000\000\216\002\191\001\121\000\042\003\ +\194\002\000\000\216\002\000\000\000\000\063\001\064\001\042\003\ +\150\000\150\000\122\000\123\000\222\001\000\000\000\000\000\000\ +\191\001\066\001\067\001\068\001\069\001\216\002\216\002\000\000\ +\000\000\081\004\083\004\085\004\000\000\182\001\000\000\088\004\ +\000\000\000\000\071\001\000\000\000\000\194\002\000\000\000\000\ +\000\000\000\000\000\000\165\000\000\000\000\000\172\000\000\000\ +\000\000\174\000\175\000\000\000\000\000\176\000\177\000\178\000\ +\179\000\180\000\000\000\181\000\194\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\032\001\000\000\ +\000\000\034\001\035\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\042\003\040\001\041\001\144\001\000\000\042\001\043\001\ +\112\000\000\000\113\000\114\000\028\000\000\000\115\000\000\000\ +\000\000\115\001\117\000\000\000\000\000\182\001\000\000\182\001\ +\000\000\182\001\000\000\144\001\182\001\000\000\000\000\000\000\ +\042\003\000\000\000\000\000\000\000\000\144\001\015\000\000\000\ +\191\001\015\000\191\001\120\000\000\000\104\001\105\001\106\001\ +\000\000\108\001\121\000\015\000\015\000\000\000\000\000\015\000\ +\000\000\000\000\204\002\000\000\000\000\000\000\122\000\123\000\ +\015\000\015\000\015\000\015\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\012\000\042\003\015\000\015\000\ +\000\000\000\000\042\003\000\000\000\000\000\000\000\000\000\000\ +\000\000\191\001\000\000\089\000\014\000\153\001\154\001\066\002\ +\000\000\000\000\015\000\000\000\000\000\015\000\000\000\000\000\ +\090\000\015\000\015\000\000\000\000\000\000\000\144\001\015\000\ +\015\000\000\000\144\001\000\000\000\000\000\000\015\000\204\002\ +\000\000\000\000\000\000\029\000\000\000\000\000\000\000\000\000\ +\000\000\197\001\015\000\033\000\015\000\000\000\015\000\204\002\ +\042\003\091\000\144\001\000\000\000\000\000\000\000\000\039\000\ +\000\000\000\000\015\000\209\002\000\000\015\000\000\000\000\000\ +\144\001\015\000\000\000\000\000\000\000\000\000\141\001\092\000\ +\000\000\150\000\150\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\093\000\000\000\000\000\050\000\042\003\ +\204\002\000\000\000\000\000\000\000\000\000\000\042\003\000\000\ +\000\000\000\000\150\000\150\000\150\000\000\000\000\000\000\000\ +\000\000\000\000\150\000\000\000\000\000\191\001\000\000\069\005\ +\000\000\157\002\112\000\000\000\113\000\114\000\028\000\000\000\ +\115\000\249\001\250\001\116\000\117\000\144\001\000\000\150\000\ +\150\000\000\000\000\000\000\000\150\000\000\000\000\000\000\000\ +\150\000\240\001\000\000\222\001\118\000\144\001\000\000\003\002\ +\000\000\000\000\191\001\156\001\119\000\060\003\000\000\000\000\ +\000\000\000\000\156\001\000\000\121\000\011\002\052\000\069\005\ +\000\000\017\002\000\000\000\000\150\000\000\000\000\000\070\004\ +\122\000\123\000\000\000\000\000\000\000\150\000\000\000\000\000\ +\124\001\000\000\000\000\222\001\191\001\000\000\000\000\000\000\ +\000\000\144\001\000\000\000\000\144\001\125\001\150\000\000\000\ +\000\000\000\003\000\000\191\001\000\000\000\000\000\000\144\001\ +\000\000\000\000\183\000\191\001\000\000\000\000\000\000\000\000\ +\112\000\000\000\113\000\114\000\028\000\000\000\115\000\000\000\ +\000\000\126\001\117\000\000\000\191\001\000\000\000\000\153\000\ +\000\000\000\000\000\000\170\000\000\000\000\000\000\000\000\000\ +\000\000\150\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\170\000\120\000\000\000\191\001\191\001\000\000\ +\000\000\000\000\121\000\144\001\000\000\000\000\000\000\191\001\ +\000\000\000\000\110\002\000\000\170\000\144\001\122\000\123\000\ +\000\000\000\000\000\000\000\000\000\000\144\001\191\001\000\000\ +\000\000\000\000\000\000\191\001\191\001\191\001\191\001\000\000\ +\156\000\008\000\009\000\000\000\000\000\052\001\010\000\011\000\ +\144\001\144\001\000\000\135\002\000\000\000\000\000\000\000\000\ +\170\000\000\000\170\000\170\000\000\000\144\001\069\005\000\000\ +\069\005\015\000\016\000\156\001\150\000\000\000\000\000\000\000\ +\144\001\058\001\059\001\060\001\061\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\150\000\150\000\022\000\144\001\106\002\ +\024\000\025\000\026\000\027\000\144\001\144\001\028\000\000\000\ +\162\000\000\000\000\000\142\000\032\000\063\001\064\001\000\000\ +\000\000\000\000\110\003\000\000\000\000\000\000\000\000\000\000\ +\000\000\066\001\067\001\068\001\069\001\150\000\153\000\153\000\ +\000\000\153\000\042\000\000\000\000\000\000\000\150\000\000\000\ +\150\000\000\000\071\001\153\000\153\000\000\000\000\000\231\002\ +\044\000\000\000\222\001\000\000\000\000\045\000\000\000\170\000\ +\048\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\240\001\153\000\214\001\240\001\000\000\000\000\170\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\240\001\145\003\ +\000\000\150\000\240\001\000\000\000\000\000\000\052\000\156\000\ +\156\000\052\000\156\000\240\001\240\001\240\001\240\001\000\000\ +\222\001\000\000\000\000\052\000\156\000\156\000\000\000\000\000\ +\000\000\000\000\240\001\000\000\000\000\000\000\000\000\000\000\ +\052\000\000\000\052\000\052\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\205\001\156\000\156\000\240\001\052\000\052\000\ +\240\001\000\000\000\000\240\001\240\001\240\001\000\000\000\000\ +\000\000\154\000\240\001\240\001\000\000\171\000\000\000\000\000\ +\000\000\240\001\052\000\000\000\000\000\052\000\170\000\244\003\ +\000\000\052\000\052\000\000\000\171\000\240\001\000\000\240\001\ +\052\000\240\001\000\000\000\000\000\000\000\000\052\000\000\000\ +\000\000\000\000\000\000\170\000\141\001\240\001\171\000\000\000\ +\240\001\000\000\052\000\000\000\240\001\000\000\052\000\150\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\052\000\000\000\000\000\052\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\171\000\000\000\171\000\171\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\150\000\000\000\170\000\170\000\000\000\000\000\170\000\ +\000\000\053\000\170\000\000\000\116\001\021\002\000\000\000\000\ +\000\000\000\000\000\000\032\002\000\000\150\000\000\000\106\002\ +\150\000\000\000\150\000\150\000\150\000\000\000\000\000\106\002\ +\150\000\000\000\000\000\000\000\106\002\000\000\150\000\000\000\ +\154\000\154\000\000\000\154\000\000\000\000\000\000\000\000\000\ +\000\000\106\002\000\000\106\002\106\002\154\000\154\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\150\000\000\000\000\000\ +\106\002\171\000\000\000\153\000\214\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\154\000\215\001\000\000\222\001\ +\088\003\171\000\000\000\106\002\000\000\000\000\106\002\000\000\ +\000\000\106\002\106\002\106\002\153\000\153\000\153\000\207\003\ +\000\000\106\002\000\000\000\000\153\000\000\000\000\000\106\002\ +\000\000\000\000\000\000\000\000\134\004\000\000\000\000\000\000\ +\000\000\000\000\000\000\106\002\000\000\000\000\000\000\106\002\ +\000\000\214\001\153\000\000\000\156\000\156\000\214\001\000\000\ +\000\000\000\000\153\000\106\002\000\000\000\000\106\002\112\000\ +\000\000\113\000\114\000\028\000\000\000\115\000\000\000\000\000\ +\116\000\117\000\000\000\000\000\140\002\156\000\156\000\156\000\ +\000\000\206\004\000\000\000\000\000\000\156\000\153\000\000\000\ +\171\000\118\000\000\000\000\000\000\000\000\000\000\000\153\000\ +\207\004\119\000\120\000\115\002\150\000\000\000\000\000\198\001\ +\000\000\121\000\156\000\156\000\000\000\171\000\000\000\156\000\ +\153\000\000\000\222\001\156\000\000\000\122\000\123\000\000\000\ +\000\000\000\000\000\000\000\000\170\000\032\002\000\000\000\000\ +\208\004\076\000\113\000\114\000\028\000\000\000\115\000\000\000\ +\000\000\116\000\209\004\000\000\000\000\000\000\000\000\156\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\222\001\ +\233\002\000\000\118\000\153\000\000\000\000\000\000\000\000\000\ +\000\000\210\004\119\000\120\000\000\000\000\000\000\000\000\000\ +\000\000\156\000\121\000\000\000\000\000\171\000\171\000\000\000\ +\000\000\171\000\155\000\201\001\171\000\000\000\211\004\123\000\ +\000\000\000\000\000\000\222\001\000\000\000\000\000\000\156\001\ +\000\000\053\000\000\000\000\000\053\000\000\000\116\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\116\001\053\000\116\001\ +\000\000\000\000\000\000\000\000\233\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\053\000\000\000\053\000\053\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\153\000\000\000\ +\150\000\053\000\053\000\000\000\000\000\154\000\215\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\153\000\153\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\053\000\000\000\000\000\ +\053\000\000\000\000\000\000\000\053\000\053\000\154\000\154\000\ +\154\000\000\000\000\000\053\000\111\003\000\000\154\000\000\000\ +\000\000\053\000\000\000\000\000\000\000\000\000\000\000\153\000\ +\000\000\000\000\000\000\000\000\000\000\053\000\000\000\156\000\ +\153\000\053\000\214\001\215\001\154\000\000\000\000\000\000\000\ +\215\001\000\000\000\000\000\000\154\000\053\000\156\000\156\000\ +\053\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\155\000\155\000\000\000\155\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\155\000\155\000\ +\154\000\000\000\150\000\214\001\000\000\000\000\000\000\000\000\ +\156\000\154\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\156\000\000\000\156\000\000\000\155\000\155\000\000\000\ +\000\000\000\000\154\000\115\002\000\000\115\002\115\002\115\002\ +\000\000\000\000\000\000\115\002\000\000\000\000\171\000\000\000\ +\115\002\000\000\000\000\000\000\115\002\115\002\115\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\115\002\115\002\115\002\ +\115\002\076\000\000\000\000\000\156\000\000\000\000\000\115\002\ +\000\000\000\000\000\000\150\000\115\002\154\000\076\000\000\000\ +\000\000\000\000\000\000\115\002\115\002\239\001\110\003\000\000\ +\000\000\000\000\000\000\076\000\000\000\076\000\076\000\115\002\ +\000\000\000\000\115\002\115\002\000\000\115\002\115\002\115\002\ +\000\000\115\002\076\000\000\000\115\002\115\002\000\000\000\000\ +\000\000\153\000\000\000\115\002\000\000\000\000\000\000\000\000\ +\000\000\116\001\000\000\000\000\000\000\076\000\115\002\115\002\ +\110\003\115\002\115\002\115\002\115\002\076\000\165\005\115\002\ +\000\000\000\000\000\000\076\000\000\000\000\000\000\000\115\002\ +\115\002\076\000\115\002\000\000\000\000\000\000\115\002\000\000\ +\154\000\000\000\000\000\057\002\000\000\076\000\059\002\000\000\ +\060\002\076\000\061\002\153\000\000\000\000\000\000\000\154\000\ +\154\000\000\000\156\000\000\000\000\000\076\000\000\000\000\000\ +\076\000\000\000\000\000\000\000\000\000\000\000\000\000\153\000\ +\000\000\000\000\214\001\000\000\153\000\153\000\153\000\094\002\ +\195\000\195\000\153\000\099\002\000\000\000\000\000\000\000\000\ +\153\000\154\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\154\000\000\000\215\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\156\000\000\000\000\000\153\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\156\000\000\000\000\000\156\000\079\004\156\000\156\000\156\000\ +\102\001\103\001\000\000\156\000\000\000\215\001\000\000\141\002\ +\000\000\156\000\000\000\000\000\000\000\008\000\155\000\155\000\ +\000\000\000\000\002\002\011\000\153\002\000\000\154\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\156\000\000\000\000\000\137\000\000\000\015\000\016\000\155\000\ +\155\000\155\000\000\000\000\000\000\000\000\000\000\000\155\000\ +\155\000\198\002\000\000\201\002\000\000\000\000\000\000\000\000\ +\000\000\022\000\000\000\138\000\139\000\000\000\140\000\141\000\ +\000\000\000\000\028\000\000\000\155\000\155\000\000\000\142\000\ +\143\000\155\000\000\000\000\000\000\000\155\000\144\000\000\000\ +\116\001\000\000\000\000\000\000\000\000\254\003\214\001\000\000\ +\000\000\000\000\000\000\145\000\000\000\239\001\000\000\000\000\ +\239\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\146\000\155\000\239\001\154\000\044\000\000\000\239\001\000\000\ +\000\000\045\000\155\000\000\000\048\000\147\000\000\000\239\001\ +\239\001\239\001\239\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\155\000\000\000\000\000\239\001\000\000\ +\000\000\000\000\000\000\209\001\000\000\000\000\000\000\156\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\239\001\000\000\000\000\239\001\154\000\000\000\239\001\ +\239\001\239\001\000\000\000\000\000\000\000\000\239\001\239\001\ +\036\003\000\000\000\000\039\003\000\000\239\001\155\000\000\000\ +\000\000\154\000\000\000\000\000\215\001\000\000\154\000\154\000\ +\154\000\239\001\000\000\239\001\154\000\239\001\000\000\000\000\ +\000\000\000\000\154\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\239\001\000\000\000\000\239\001\000\000\000\000\000\000\ +\239\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\154\000\214\001\000\000\000\000\000\000\000\000\033\002\ +\034\002\035\002\036\002\037\002\038\002\039\002\040\002\041\002\ +\042\002\043\002\044\002\045\002\046\002\047\002\048\002\049\002\ +\050\002\051\002\052\002\053\002\000\000\056\002\000\000\000\000\ +\000\000\155\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\113\003\062\002\000\000\251\001\000\000\ +\155\000\155\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\079\002\002\002\156\000\002\002\002\002\002\002\000\000\ +\000\000\000\000\002\002\146\004\000\000\000\000\133\003\002\002\ +\000\000\000\000\000\000\002\002\002\002\002\002\000\000\000\000\ +\000\000\000\000\155\000\000\000\002\002\002\002\002\002\002\002\ +\000\000\000\000\000\000\155\000\000\000\155\000\002\002\000\000\ +\000\000\000\000\002\002\002\002\214\001\000\000\000\000\000\000\ +\000\000\000\000\002\002\002\002\000\000\000\000\000\000\000\000\ +\215\001\000\000\000\000\000\000\000\000\000\000\002\002\000\000\ +\000\000\002\002\000\000\000\000\002\002\002\002\002\002\000\000\ +\002\002\000\000\000\000\002\002\002\002\000\000\155\000\000\000\ +\237\001\000\000\002\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\002\002\002\002\000\000\ +\002\002\002\002\002\002\000\000\000\000\156\000\002\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\214\001\002\002\000\000\ +\000\000\002\002\000\000\000\000\000\000\002\002\000\000\000\000\ +\138\005\000\000\000\000\209\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\209\001\000\000\003\004\000\000\000\000\ +\209\001\215\002\000\000\000\000\000\000\000\000\217\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\209\001\000\000\209\001\ +\209\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\162\005\000\000\209\001\000\000\156\000\104\003\ +\000\000\112\000\000\000\113\000\114\000\028\000\000\000\115\000\ +\000\000\000\000\115\001\117\000\155\000\000\000\037\004\209\001\ +\000\000\000\000\195\000\195\000\215\001\209\001\209\001\209\001\ +\000\000\000\000\000\000\000\000\000\000\209\001\106\002\000\000\ +\000\000\000\000\000\000\209\001\120\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\121\000\000\000\000\000\067\004\209\001\ +\000\000\000\000\000\000\209\001\116\001\027\003\000\000\122\000\ +\123\000\000\000\033\003\034\003\035\003\000\000\155\000\209\001\ +\000\000\000\000\209\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\251\001\000\000\ +\251\001\251\001\155\000\098\004\099\004\155\000\251\001\155\000\ +\155\000\155\000\000\000\251\001\000\000\155\000\000\000\251\001\ +\251\001\251\001\000\000\155\000\000\000\000\000\000\000\000\000\ +\251\001\251\001\251\001\251\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\251\001\000\000\000\000\000\000\215\001\251\001\ +\000\000\000\000\155\000\000\000\000\000\000\000\251\001\251\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\131\004\000\000\251\001\000\000\000\000\251\001\000\000\000\000\ +\251\001\251\001\251\001\000\000\251\001\098\003\099\003\100\003\ +\251\001\000\000\000\000\144\004\000\000\000\000\251\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\036\002\ +\237\001\251\001\251\001\237\001\251\001\251\001\251\001\000\000\ +\000\000\000\000\000\000\214\002\000\000\237\001\000\000\215\001\ +\000\000\237\001\251\001\130\003\000\000\251\001\000\000\000\000\ +\214\002\251\001\237\001\237\001\237\001\237\001\000\000\000\000\ +\000\000\000\000\000\000\140\003\000\000\000\000\000\000\000\000\ +\000\000\237\001\000\000\214\002\000\000\214\002\214\002\214\002\ +\000\000\214\002\000\000\000\000\214\002\214\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\237\001\000\000\000\000\237\001\ +\000\000\155\000\237\001\237\001\237\001\000\000\000\000\000\000\ +\000\000\237\001\237\001\000\000\000\000\000\000\214\002\000\000\ +\237\001\000\000\000\000\209\001\000\000\214\002\000\000\000\000\ +\000\000\000\000\000\000\201\004\237\001\203\004\237\001\000\000\ +\237\001\214\002\214\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\237\001\000\000\223\003\237\001\ +\000\000\000\000\000\000\237\001\000\000\000\000\106\002\106\002\ +\106\002\106\002\000\000\000\000\106\002\106\002\106\002\106\002\ +\106\002\106\002\106\002\106\002\106\002\106\002\106\002\106\002\ +\106\002\106\002\106\002\106\002\246\004\000\000\106\002\106\002\ +\106\002\106\002\106\002\106\002\106\002\106\002\000\000\000\000\ +\000\000\000\000\106\002\106\002\000\000\000\000\106\002\106\002\ +\106\002\106\002\106\002\106\002\106\002\106\002\000\000\106\002\ +\106\002\106\002\000\000\106\002\106\002\106\002\106\002\000\000\ +\000\000\106\002\106\002\106\002\000\000\106\002\106\002\106\002\ +\106\002\106\002\106\002\000\000\106\002\106\002\106\002\106\002\ +\106\002\000\000\000\000\000\000\000\000\155\000\106\002\106\002\ +\106\002\106\002\106\002\106\002\106\002\106\002\000\000\106\002\ +\064\002\106\002\106\002\060\004\106\002\106\002\106\002\106\002\ +\106\002\000\000\106\002\106\002\000\000\106\002\106\002\106\002\ +\106\002\000\000\106\002\106\002\000\000\106\002\000\000\000\000\ +\000\000\106\002\000\000\112\000\000\000\113\000\114\000\028\000\ +\000\000\115\000\000\000\000\000\116\000\117\000\000\000\000\000\ +\068\005\000\000\000\000\000\000\000\000\000\000\134\001\036\002\ +\000\000\036\002\036\002\036\002\000\000\118\000\000\000\036\002\ +\000\000\000\000\000\000\000\000\036\002\119\000\120\000\000\000\ +\036\002\036\002\036\002\000\000\000\000\121\000\000\000\000\000\ +\000\000\036\002\036\002\036\002\036\002\090\005\000\000\000\000\ +\000\000\122\000\123\000\036\002\000\000\000\000\000\000\155\000\ +\036\002\000\000\124\004\125\004\126\004\000\000\000\000\036\002\ +\036\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\036\002\000\000\000\000\036\002\117\005\ +\000\000\036\002\036\002\036\002\000\000\036\002\000\000\000\000\ +\036\002\036\002\000\000\000\000\000\000\000\000\130\005\036\002\ +\000\000\124\001\000\000\209\001\000\000\000\000\139\005\000\000\ +\000\000\000\000\036\002\036\002\000\000\036\002\036\002\036\002\ +\209\001\241\000\152\004\153\004\154\004\000\000\000\000\142\005\ +\155\000\000\000\000\000\036\002\000\000\209\001\036\002\209\001\ +\209\001\112\000\036\002\113\000\114\000\028\000\000\000\115\000\ +\000\000\000\000\126\001\117\000\209\001\000\000\000\000\000\000\ +\163\005\164\005\112\000\000\000\113\000\114\000\028\000\178\004\ +\115\000\000\000\174\005\116\000\117\000\000\000\000\000\209\001\ +\000\000\000\000\209\001\000\000\120\000\209\001\209\001\209\001\ +\000\000\184\005\000\000\121\000\118\000\209\001\186\005\187\005\ +\188\005\189\005\000\000\209\001\119\000\060\003\000\000\122\000\ +\123\000\000\000\000\000\000\000\121\000\000\000\000\000\209\001\ +\000\000\000\000\000\000\209\001\000\000\000\000\000\000\152\005\ +\122\000\123\000\000\000\000\000\000\000\000\000\000\000\209\001\ +\000\000\000\000\209\001\000\000\000\000\000\000\000\000\000\000\ +\224\004\225\004\000\000\000\000\000\000\232\004\233\004\234\004\ +\064\002\064\002\064\002\064\002\000\000\247\000\064\002\064\002\ +\064\002\064\002\064\002\064\002\064\002\064\002\064\002\064\002\ +\064\002\064\002\064\002\064\002\064\002\064\002\064\002\000\000\ +\064\002\064\002\064\002\064\002\064\002\064\002\064\002\064\002\ +\000\000\000\000\000\000\000\000\064\002\064\002\000\000\000\000\ +\064\002\064\002\064\002\064\002\064\002\064\002\064\002\064\002\ +\000\000\064\002\064\002\064\002\000\000\064\002\064\002\064\002\ +\064\002\000\000\000\000\064\002\064\002\064\002\052\002\064\002\ +\064\002\064\002\064\002\064\002\064\002\000\000\064\002\064\002\ +\064\002\064\002\064\002\000\000\000\000\000\000\000\000\000\000\ +\064\002\064\002\064\002\064\002\064\002\064\002\064\002\064\002\ +\000\000\064\002\000\000\064\002\064\002\000\000\064\002\064\002\ +\064\002\064\002\064\002\000\000\064\002\064\002\000\000\064\002\ +\064\002\064\002\064\002\000\000\064\002\064\002\000\000\064\002\ +\000\000\000\000\000\000\064\002\000\000\000\000\000\000\000\000\ +\000\000\245\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\241\000\241\000\241\000\241\000\000\000\000\000\241\000\ +\241\000\241\000\241\000\241\000\241\000\241\000\241\000\241\000\ +\241\000\241\000\241\000\241\000\241\000\241\000\241\000\241\000\ +\000\000\241\000\241\000\241\000\241\000\241\000\241\000\241\000\ +\241\000\000\000\101\005\102\005\103\005\241\000\241\000\000\000\ +\000\000\241\000\241\000\241\000\241\000\241\000\241\000\241\000\ +\241\000\000\000\241\000\241\000\241\000\000\000\241\000\241\000\ +\241\000\241\000\000\000\000\000\241\000\241\000\241\000\000\000\ +\241\000\241\000\241\000\241\000\241\000\241\000\000\000\241\000\ +\241\000\241\000\241\000\241\000\000\000\000\000\000\000\000\000\ +\000\000\241\000\241\000\241\000\241\000\241\000\241\000\241\000\ +\241\000\000\000\241\000\000\000\241\000\241\000\253\000\241\000\ +\241\000\241\000\241\000\241\000\000\000\241\000\241\000\000\000\ +\241\000\241\000\241\000\241\000\000\000\241\000\241\000\000\000\ +\241\000\000\000\000\000\000\000\241\000\247\000\247\000\247\000\ +\247\000\000\000\000\000\247\000\247\000\247\000\247\000\247\000\ +\247\000\247\000\247\000\247\000\247\000\247\000\247\000\247\000\ +\247\000\247\000\247\000\247\000\000\000\247\000\247\000\247\000\ +\247\000\247\000\247\000\247\000\247\000\000\000\000\000\000\000\ +\000\000\247\000\247\000\000\000\000\000\247\000\247\000\247\000\ +\247\000\247\000\247\000\247\000\247\000\000\000\247\000\247\000\ +\247\000\000\000\247\000\247\000\247\000\247\000\000\000\000\000\ +\247\000\247\000\247\000\000\000\247\000\247\000\247\000\247\000\ +\247\000\247\000\000\000\247\000\247\000\247\000\247\000\247\000\ +\000\000\000\000\000\000\000\000\000\000\247\000\247\000\247\000\ +\247\000\247\000\247\000\247\000\247\000\000\000\247\000\000\000\ +\247\000\247\000\249\000\247\000\247\000\247\000\247\000\247\000\ +\000\000\247\000\247\000\000\000\247\000\247\000\247\000\247\000\ +\000\000\247\000\247\000\000\000\247\000\000\000\000\000\000\000\ +\247\000\245\000\245\000\245\000\245\000\000\000\000\000\245\000\ +\245\000\245\000\245\000\245\000\245\000\245\000\245\000\245\000\ +\245\000\245\000\245\000\245\000\245\000\245\000\245\000\245\000\ +\000\000\245\000\245\000\245\000\245\000\245\000\245\000\245\000\ +\245\000\000\000\000\000\000\000\000\000\245\000\245\000\000\000\ +\000\000\245\000\245\000\245\000\245\000\245\000\245\000\245\000\ +\245\000\000\000\245\000\245\000\245\000\000\000\245\000\245\000\ +\245\000\245\000\000\000\000\000\245\000\245\000\245\000\000\000\ +\245\000\245\000\245\000\245\000\245\000\245\000\000\000\245\000\ +\245\000\245\000\245\000\245\000\000\000\000\000\000\000\000\000\ +\000\000\245\000\245\000\245\000\245\000\245\000\245\000\245\000\ +\245\000\000\000\245\000\000\000\245\000\245\000\251\000\245\000\ +\245\000\245\000\245\000\245\000\000\000\245\000\245\000\000\000\ +\245\000\245\000\245\000\245\000\000\000\245\000\245\000\000\000\ +\245\000\000\000\000\000\000\000\245\000\000\000\253\000\253\000\ +\253\000\253\000\000\000\000\000\253\000\253\000\253\000\253\000\ +\253\000\253\000\253\000\253\000\253\000\253\000\253\000\253\000\ +\253\000\253\000\253\000\253\000\253\000\000\000\253\000\253\000\ +\253\000\253\000\253\000\253\000\253\000\253\000\000\000\000\000\ +\000\000\000\000\253\000\253\000\000\000\000\000\253\000\253\000\ +\253\000\253\000\253\000\253\000\253\000\253\000\000\000\253\000\ +\253\000\253\000\000\000\253\000\253\000\253\000\253\000\000\000\ +\000\000\253\000\253\000\253\000\000\000\253\000\253\000\253\000\ +\253\000\253\000\253\000\000\000\253\000\253\000\253\000\253\000\ +\253\000\000\000\000\000\000\000\000\000\000\000\253\000\253\000\ +\253\000\253\000\253\000\253\000\253\000\253\000\000\000\253\000\ +\000\000\253\000\253\000\003\001\253\000\253\000\253\000\253\000\ +\253\000\000\000\253\000\253\000\000\000\253\000\253\000\253\000\ +\253\000\000\000\253\000\253\000\000\000\253\000\000\000\000\000\ +\000\000\253\000\249\000\249\000\249\000\249\000\000\000\000\000\ +\249\000\249\000\249\000\249\000\249\000\249\000\249\000\249\000\ +\249\000\249\000\249\000\249\000\249\000\249\000\249\000\249\000\ +\249\000\000\000\249\000\249\000\249\000\249\000\249\000\249\000\ +\249\000\249\000\000\000\000\000\000\000\000\000\249\000\249\000\ +\000\000\000\000\249\000\249\000\249\000\249\000\249\000\249\000\ +\249\000\249\000\000\000\249\000\249\000\249\000\000\000\249\000\ +\249\000\249\000\249\000\000\000\000\000\249\000\249\000\249\000\ +\000\000\249\000\249\000\249\000\249\000\249\000\249\000\000\000\ +\249\000\249\000\249\000\249\000\249\000\000\000\000\000\000\000\ +\000\000\000\000\249\000\249\000\249\000\249\000\249\000\249\000\ +\249\000\249\000\000\000\249\000\000\000\249\000\249\000\255\000\ +\249\000\249\000\249\000\249\000\249\000\000\000\249\000\249\000\ +\000\000\249\000\249\000\249\000\249\000\000\000\249\000\249\000\ +\000\000\249\000\000\000\000\000\000\000\249\000\251\000\251\000\ +\251\000\251\000\000\000\000\000\251\000\251\000\251\000\251\000\ +\251\000\251\000\251\000\251\000\251\000\251\000\251\000\251\000\ +\251\000\251\000\251\000\251\000\251\000\000\000\251\000\251\000\ +\251\000\251\000\251\000\251\000\251\000\251\000\000\000\000\000\ +\000\000\000\000\251\000\251\000\000\000\000\000\251\000\251\000\ +\251\000\251\000\251\000\251\000\251\000\251\000\000\000\251\000\ +\251\000\251\000\000\000\251\000\251\000\251\000\251\000\000\000\ +\000\000\251\000\251\000\251\000\000\000\251\000\251\000\251\000\ +\251\000\251\000\251\000\000\000\251\000\251\000\251\000\251\000\ +\251\000\000\000\000\000\000\000\000\000\000\000\251\000\251\000\ +\251\000\251\000\251\000\251\000\251\000\251\000\000\000\251\000\ +\000\000\251\000\251\000\001\001\251\000\251\000\251\000\251\000\ +\251\000\000\000\251\000\251\000\000\000\251\000\251\000\251\000\ +\251\000\000\000\251\000\251\000\000\000\251\000\000\000\000\000\ +\000\000\251\000\000\000\003\001\003\001\003\001\003\001\000\000\ +\000\000\003\001\003\001\003\001\003\001\003\001\003\001\003\001\ +\003\001\003\001\003\001\003\001\003\001\003\001\003\001\003\001\ +\003\001\003\001\000\000\003\001\003\001\003\001\003\001\003\001\ +\003\001\003\001\003\001\000\000\000\000\000\000\000\000\003\001\ +\003\001\000\000\000\000\003\001\003\001\003\001\003\001\003\001\ +\003\001\003\001\003\001\000\000\003\001\003\001\003\001\000\000\ +\003\001\003\001\003\001\003\001\000\000\000\000\003\001\003\001\ +\003\001\000\000\003\001\003\001\003\001\003\001\003\001\003\001\ +\000\000\003\001\003\001\003\001\003\001\003\001\000\000\000\000\ +\000\000\000\000\000\000\003\001\003\001\003\001\003\001\003\001\ +\003\001\003\001\003\001\000\000\003\001\000\000\003\001\003\001\ +\030\001\003\001\003\001\003\001\003\001\003\001\000\000\003\001\ +\003\001\000\000\003\001\003\001\003\001\003\001\000\000\003\001\ +\003\001\000\000\003\001\000\000\000\000\000\000\003\001\255\000\ +\255\000\255\000\255\000\000\000\000\000\255\000\255\000\255\000\ +\255\000\255\000\255\000\255\000\255\000\255\000\255\000\255\000\ +\255\000\255\000\255\000\255\000\255\000\255\000\000\000\255\000\ +\255\000\255\000\255\000\255\000\255\000\255\000\255\000\000\000\ +\000\000\000\000\000\000\255\000\255\000\000\000\000\000\255\000\ +\255\000\255\000\255\000\255\000\255\000\255\000\255\000\000\000\ +\255\000\255\000\255\000\000\000\255\000\255\000\255\000\255\000\ +\000\000\000\000\255\000\255\000\255\000\000\000\255\000\255\000\ +\255\000\255\000\255\000\255\000\000\000\255\000\255\000\255\000\ +\255\000\255\000\000\000\000\000\000\000\000\000\000\000\255\000\ +\255\000\255\000\255\000\255\000\255\000\255\000\255\000\000\000\ +\255\000\000\000\255\000\255\000\039\001\255\000\255\000\255\000\ +\255\000\255\000\000\000\255\000\255\000\000\000\255\000\255\000\ +\255\000\255\000\000\000\255\000\255\000\000\000\255\000\000\000\ +\000\000\000\000\255\000\001\001\001\001\001\001\001\001\000\000\ +\000\000\001\001\001\001\001\001\001\001\001\001\001\001\001\001\ +\001\001\001\001\001\001\001\001\001\001\001\001\001\001\001\001\ +\001\001\001\001\000\000\001\001\001\001\001\001\001\001\001\001\ +\001\001\001\001\001\001\000\000\000\000\000\000\000\000\001\001\ +\001\001\000\000\000\000\001\001\001\001\001\001\001\001\001\001\ +\001\001\001\001\001\001\000\000\001\001\001\001\001\001\000\000\ +\001\001\001\001\001\001\001\001\000\000\000\000\001\001\001\001\ +\001\001\000\000\001\001\001\001\001\001\001\001\001\001\001\001\ +\000\000\001\001\001\001\001\001\001\001\001\001\000\000\000\000\ +\000\000\000\000\000\000\001\001\001\001\001\001\001\001\001\001\ +\001\001\001\001\001\001\000\000\001\001\000\000\001\001\001\001\ +\041\001\001\001\001\001\001\001\001\001\001\001\000\000\001\001\ +\001\001\000\000\001\001\001\001\001\001\001\001\000\000\001\001\ +\001\001\000\000\001\001\000\000\000\000\000\000\001\001\000\000\ +\030\001\030\001\030\001\030\001\000\000\000\000\030\001\030\001\ +\030\001\030\001\030\001\030\001\030\001\030\001\030\001\030\001\ +\030\001\030\001\030\001\030\001\030\001\030\001\000\000\000\000\ +\030\001\030\001\030\001\030\001\030\001\030\001\030\001\030\001\ +\000\000\000\000\000\000\000\000\030\001\030\001\000\000\000\000\ +\030\001\030\001\030\001\030\001\030\001\030\001\030\001\000\000\ +\000\000\030\001\030\001\030\001\000\000\030\001\030\001\030\001\ +\030\001\000\000\000\000\030\001\030\001\030\001\000\000\030\001\ +\030\001\030\001\030\001\030\001\030\001\000\000\030\001\030\001\ +\030\001\030\001\030\001\000\000\000\000\000\000\000\000\000\000\ +\030\001\030\001\030\001\030\001\030\001\030\001\030\001\030\001\ +\000\000\030\001\000\000\030\001\030\001\044\001\030\001\030\001\ +\030\001\030\001\030\001\000\000\030\001\030\001\000\000\030\001\ +\030\001\030\001\030\001\000\000\030\001\030\001\000\000\030\001\ +\000\000\000\000\000\000\030\001\039\001\039\001\039\001\039\001\ +\000\000\000\000\039\001\039\001\039\001\039\001\039\001\039\001\ +\039\001\039\001\039\001\039\001\039\001\039\001\039\001\039\001\ +\039\001\039\001\000\000\000\000\039\001\039\001\039\001\039\001\ +\039\001\039\001\039\001\039\001\000\000\000\000\000\000\000\000\ +\039\001\039\001\000\000\000\000\039\001\039\001\039\001\039\001\ +\039\001\039\001\039\001\000\000\000\000\039\001\039\001\039\001\ +\000\000\039\001\039\001\039\001\039\001\000\000\000\000\039\001\ +\039\001\039\001\000\000\039\001\039\001\039\001\039\001\039\001\ +\039\001\000\000\039\001\039\001\039\001\039\001\039\001\000\000\ +\000\000\000\000\000\000\000\000\039\001\039\001\039\001\039\001\ +\039\001\039\001\039\001\039\001\000\000\039\001\000\000\039\001\ +\039\001\233\000\039\001\039\001\039\001\000\000\000\000\000\000\ +\039\001\039\001\000\000\039\001\039\001\039\001\039\001\000\000\ +\039\001\039\001\000\000\039\001\000\000\000\000\000\000\039\001\ +\041\001\041\001\041\001\041\001\000\000\000\000\041\001\041\001\ +\041\001\041\001\041\001\041\001\041\001\041\001\041\001\041\001\ +\041\001\041\001\041\001\041\001\041\001\041\001\000\000\000\000\ +\041\001\041\001\041\001\041\001\041\001\041\001\041\001\041\001\ +\000\000\000\000\000\000\000\000\041\001\041\001\000\000\000\000\ +\041\001\041\001\041\001\041\001\041\001\041\001\041\001\000\000\ +\000\000\041\001\041\001\041\001\000\000\041\001\041\001\041\001\ +\041\001\000\000\000\000\041\001\041\001\041\001\000\000\041\001\ +\041\001\041\001\041\001\041\001\041\001\000\000\041\001\041\001\ +\041\001\041\001\041\001\000\000\000\000\000\000\000\000\000\000\ +\041\001\041\001\041\001\041\001\041\001\041\001\041\001\041\001\ +\000\000\041\001\000\000\041\001\041\001\234\000\041\001\041\001\ +\041\001\000\000\000\000\000\000\041\001\041\001\000\000\041\001\ +\041\001\041\001\041\001\000\000\041\001\041\001\000\000\041\001\ +\000\000\000\000\000\000\041\001\000\000\044\001\044\001\044\001\ +\044\001\000\000\000\000\044\001\044\001\044\001\044\001\044\001\ +\044\001\044\001\044\001\044\001\044\001\044\001\044\001\044\001\ +\044\001\044\001\044\001\000\000\000\000\044\001\044\001\044\001\ +\044\001\044\001\044\001\044\001\044\001\000\000\000\000\000\000\ +\000\000\044\001\044\001\000\000\000\000\044\001\044\001\044\001\ +\044\001\044\001\044\001\044\001\000\000\000\000\044\001\044\001\ +\044\001\000\000\044\001\044\001\044\001\044\001\000\000\000\000\ +\044\001\044\001\044\001\000\000\044\001\044\001\044\001\044\001\ +\044\001\044\001\000\000\044\001\044\001\044\001\044\001\044\001\ +\000\000\000\000\000\000\000\000\000\000\044\001\044\001\044\001\ +\044\001\044\001\044\001\044\001\044\001\000\000\044\001\000\000\ +\044\001\044\001\173\000\044\001\044\001\044\001\000\000\000\000\ +\000\000\044\001\044\001\000\000\044\001\044\001\044\001\044\001\ +\000\000\044\001\044\001\000\000\044\001\000\000\000\000\000\000\ +\044\001\233\000\233\000\233\000\233\000\000\000\000\000\000\000\ +\000\000\233\000\233\000\233\000\000\000\000\000\233\000\233\000\ +\233\000\233\000\233\000\233\000\233\000\233\000\233\000\233\000\ +\000\000\233\000\233\000\233\000\233\000\233\000\233\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\233\000\233\000\000\000\ +\000\000\233\000\233\000\233\000\233\000\233\000\233\000\233\000\ +\233\000\000\000\233\000\000\000\233\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\233\000\233\000\000\000\ +\233\000\000\000\000\000\233\000\233\000\233\000\000\000\233\000\ +\233\000\233\000\233\000\233\000\000\000\000\000\000\000\000\000\ +\000\000\233\000\233\000\233\000\233\000\233\000\233\000\233\000\ +\000\000\000\000\233\000\000\000\233\000\233\000\174\000\233\000\ +\233\000\233\000\233\000\233\000\000\000\233\000\000\000\000\000\ +\233\000\233\000\233\000\000\000\000\000\233\000\000\000\000\000\ +\233\000\000\000\000\000\000\000\233\000\234\000\234\000\234\000\ +\234\000\000\000\000\000\000\000\000\000\234\000\234\000\234\000\ +\000\000\000\000\234\000\234\000\234\000\234\000\234\000\234\000\ +\234\000\234\000\234\000\234\000\000\000\234\000\234\000\234\000\ +\234\000\234\000\234\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\234\000\234\000\000\000\000\000\234\000\234\000\234\000\ +\234\000\234\000\234\000\234\000\234\000\000\000\234\000\000\000\ +\234\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\234\000\234\000\000\000\234\000\000\000\000\000\234\000\ +\234\000\234\000\000\000\234\000\234\000\234\000\234\000\234\000\ +\000\000\000\000\000\000\000\000\000\000\234\000\234\000\234\000\ +\234\000\234\000\234\000\234\000\000\000\000\000\234\000\000\000\ +\234\000\234\000\186\000\234\000\234\000\234\000\234\000\234\000\ +\000\000\234\000\000\000\000\000\234\000\234\000\234\000\000\000\ +\000\000\234\000\000\000\000\000\234\000\000\000\000\000\000\000\ +\234\000\000\000\173\000\173\000\173\000\173\000\000\000\000\000\ +\000\000\000\000\173\000\173\000\173\000\000\000\000\000\173\000\ +\173\000\173\000\173\000\173\000\173\000\173\000\173\000\173\000\ +\000\000\000\000\173\000\173\000\173\000\173\000\173\000\173\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\173\000\173\000\ +\000\000\000\000\173\000\173\000\173\000\173\000\173\000\173\000\ +\173\000\000\000\000\000\173\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\173\000\173\000\ +\000\000\173\000\000\000\000\000\173\000\173\000\173\000\000\000\ +\173\000\173\000\173\000\173\000\173\000\000\000\000\000\000\000\ +\000\000\000\000\173\000\000\000\173\000\173\000\173\000\173\000\ +\173\000\000\000\000\000\000\000\000\000\173\000\173\000\187\000\ +\173\000\173\000\173\000\000\000\000\000\000\000\173\000\000\000\ +\000\000\173\000\000\000\173\000\000\000\000\000\173\000\000\000\ +\000\000\173\000\000\000\000\000\000\000\173\000\174\000\174\000\ +\174\000\174\000\000\000\000\000\000\000\000\000\174\000\174\000\ +\174\000\000\000\000\000\174\000\174\000\174\000\174\000\174\000\ +\174\000\174\000\174\000\174\000\000\000\000\000\174\000\174\000\ +\174\000\174\000\174\000\174\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\174\000\174\000\000\000\000\000\174\000\174\000\ +\174\000\174\000\174\000\174\000\174\000\000\000\000\000\174\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\174\000\174\000\000\000\174\000\000\000\000\000\ +\174\000\174\000\174\000\000\000\174\000\174\000\174\000\174\000\ +\174\000\000\000\000\000\000\000\000\000\000\000\174\000\000\000\ +\174\000\174\000\174\000\174\000\174\000\000\000\000\000\000\000\ +\000\000\174\000\174\000\225\000\174\000\174\000\174\000\000\000\ +\000\000\000\000\174\000\000\000\000\000\174\000\000\000\174\000\ +\000\000\000\000\174\000\000\000\000\000\174\000\000\000\000\000\ +\000\000\174\000\186\000\186\000\186\000\186\000\000\000\000\000\ +\000\000\000\000\186\000\186\000\186\000\000\000\000\000\186\000\ +\186\000\186\000\186\000\186\000\186\000\186\000\186\000\186\000\ +\000\000\000\000\186\000\186\000\186\000\186\000\186\000\186\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\186\000\186\000\ +\000\000\000\000\186\000\186\000\186\000\186\000\186\000\186\000\ +\186\000\000\000\000\000\186\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\186\000\186\000\ +\000\000\186\000\000\000\000\000\186\000\186\000\186\000\000\000\ +\186\000\186\000\186\000\186\000\186\000\000\000\000\000\000\000\ +\000\000\000\000\186\000\000\000\186\000\186\000\186\000\186\000\ +\186\000\000\000\000\000\000\000\000\000\186\000\186\000\226\000\ +\186\000\186\000\186\000\000\000\000\000\000\000\186\000\000\000\ +\000\000\186\000\000\000\186\000\000\000\000\000\186\000\000\000\ +\000\000\186\000\000\000\000\000\000\000\186\000\000\000\187\000\ +\187\000\187\000\187\000\000\000\000\000\000\000\000\000\187\000\ +\187\000\187\000\000\000\000\000\187\000\187\000\187\000\187\000\ +\187\000\187\000\187\000\187\000\187\000\000\000\000\000\187\000\ +\187\000\187\000\187\000\187\000\187\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\187\000\187\000\000\000\000\000\187\000\ +\187\000\187\000\187\000\187\000\187\000\187\000\000\000\000\000\ +\187\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\187\000\187\000\000\000\187\000\000\000\ +\000\000\187\000\187\000\187\000\000\000\187\000\187\000\187\000\ +\187\000\187\000\000\000\000\000\000\000\000\000\000\000\187\000\ +\000\000\187\000\187\000\187\000\187\000\187\000\000\000\000\000\ +\000\000\000\000\187\000\187\000\185\000\187\000\187\000\187\000\ +\000\000\000\000\000\000\187\000\000\000\000\000\187\000\000\000\ +\187\000\000\000\000\000\187\000\000\000\000\000\187\000\000\000\ +\000\000\000\000\187\000\225\000\225\000\225\000\225\000\000\000\ +\000\000\000\000\000\000\225\000\225\000\225\000\000\000\000\000\ +\225\000\225\000\225\000\225\000\225\000\225\000\225\000\225\000\ +\225\000\000\000\000\000\225\000\225\000\225\000\225\000\225\000\ +\225\000\000\000\000\000\000\000\000\000\000\000\000\000\225\000\ +\225\000\000\000\000\000\225\000\225\000\225\000\225\000\225\000\ +\225\000\225\000\000\000\000\000\225\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\225\000\ +\225\000\000\000\225\000\000\000\000\000\225\000\225\000\225\000\ +\000\000\225\000\225\000\225\000\225\000\225\000\000\000\000\000\ +\000\000\000\000\000\000\225\000\000\000\225\000\225\000\225\000\ +\225\000\225\000\000\000\000\000\000\000\000\000\225\000\225\000\ +\196\000\225\000\225\000\225\000\000\000\000\000\000\000\225\000\ +\000\000\000\000\225\000\000\000\225\000\000\000\000\000\225\000\ +\000\000\000\000\225\000\000\000\000\000\000\000\225\000\226\000\ +\226\000\226\000\226\000\000\000\000\000\000\000\000\000\226\000\ +\226\000\226\000\000\000\000\000\226\000\226\000\226\000\226\000\ +\226\000\226\000\226\000\226\000\226\000\000\000\000\000\226\000\ +\226\000\226\000\226\000\226\000\226\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\226\000\226\000\000\000\000\000\226\000\ +\226\000\226\000\226\000\226\000\226\000\226\000\000\000\000\000\ +\226\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\226\000\226\000\000\000\226\000\000\000\ +\000\000\226\000\226\000\226\000\000\000\226\000\226\000\226\000\ +\226\000\226\000\000\000\000\000\000\000\000\000\000\000\226\000\ +\000\000\226\000\226\000\226\000\226\000\226\000\000\000\000\000\ +\000\000\000\000\226\000\226\000\197\000\226\000\226\000\226\000\ +\000\000\000\000\000\000\226\000\000\000\000\000\226\000\000\000\ +\226\000\000\000\000\000\226\000\000\000\000\000\226\000\000\000\ +\000\000\000\000\226\000\000\000\185\000\185\000\185\000\185\000\ +\000\000\000\000\000\000\000\000\185\000\185\000\185\000\000\000\ +\000\000\185\000\185\000\185\000\185\000\185\000\000\000\185\000\ +\185\000\185\000\000\000\000\000\185\000\185\000\185\000\185\000\ +\185\000\185\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\185\000\185\000\000\000\000\000\185\000\185\000\185\000\185\000\ +\185\000\185\000\185\000\000\000\000\000\185\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\185\000\185\000\000\000\185\000\000\000\000\000\185\000\185\000\ +\185\000\000\000\185\000\185\000\185\000\185\000\185\000\000\000\ +\000\000\000\000\000\000\000\000\185\000\000\000\185\000\185\000\ +\185\000\185\000\185\000\000\000\000\000\000\000\000\000\185\000\ +\185\000\204\000\185\000\185\000\185\000\000\000\000\000\000\000\ +\185\000\000\000\000\000\185\000\000\000\185\000\000\000\000\000\ +\185\000\000\000\000\000\185\000\000\000\000\000\000\000\185\000\ +\196\000\196\000\196\000\196\000\000\000\000\000\000\000\000\000\ +\196\000\196\000\196\000\000\000\000\000\196\000\196\000\196\000\ +\196\000\196\000\196\000\196\000\196\000\196\000\000\000\000\000\ +\196\000\196\000\196\000\196\000\196\000\196\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\196\000\196\000\000\000\000\000\ +\196\000\196\000\196\000\196\000\196\000\196\000\000\000\000\000\ +\000\000\196\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\196\000\196\000\000\000\196\000\ +\000\000\000\000\196\000\196\000\196\000\000\000\196\000\196\000\ +\196\000\196\000\196\000\000\000\000\000\000\000\000\000\000\000\ +\196\000\000\000\196\000\196\000\196\000\196\000\196\000\000\000\ +\000\000\000\000\000\000\196\000\196\000\203\000\196\000\196\000\ +\196\000\000\000\000\000\000\000\196\000\000\000\000\000\196\000\ +\000\000\196\000\000\000\000\000\196\000\000\000\000\000\196\000\ +\000\000\000\000\000\000\196\000\197\000\197\000\197\000\197\000\ +\000\000\000\000\000\000\000\000\197\000\197\000\197\000\000\000\ +\000\000\197\000\197\000\197\000\197\000\197\000\197\000\197\000\ +\197\000\197\000\000\000\000\000\197\000\197\000\197\000\197\000\ +\197\000\197\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\197\000\197\000\000\000\000\000\197\000\197\000\197\000\197\000\ +\197\000\197\000\000\000\000\000\000\000\197\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\197\000\197\000\000\000\197\000\000\000\000\000\197\000\197\000\ +\197\000\000\000\197\000\197\000\197\000\197\000\197\000\000\000\ +\000\000\000\000\000\000\000\000\197\000\000\000\197\000\197\000\ +\197\000\197\000\197\000\000\000\000\000\000\000\000\000\197\000\ +\197\000\179\000\197\000\197\000\197\000\000\000\000\000\000\000\ +\197\000\000\000\000\000\197\000\000\000\197\000\000\000\000\000\ +\197\000\000\000\000\000\197\000\000\000\000\000\000\000\197\000\ +\000\000\204\000\204\000\204\000\204\000\000\000\000\000\000\000\ +\000\000\204\000\204\000\204\000\000\000\000\000\204\000\204\000\ +\204\000\204\000\204\000\204\000\204\000\204\000\204\000\000\000\ +\000\000\204\000\204\000\204\000\204\000\204\000\204\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\204\000\204\000\000\000\ +\000\000\204\000\204\000\204\000\204\000\204\000\204\000\000\000\ +\000\000\000\000\204\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\204\000\204\000\000\000\ +\204\000\000\000\000\000\204\000\204\000\204\000\000\000\204\000\ +\204\000\204\000\204\000\204\000\000\000\000\000\000\000\000\000\ +\000\000\204\000\000\000\204\000\204\000\204\000\204\000\204\000\ +\000\000\000\000\000\000\000\000\204\000\204\000\182\000\204\000\ +\204\000\204\000\000\000\000\000\000\000\204\000\000\000\000\000\ +\204\000\000\000\204\000\000\000\000\000\204\000\000\000\000\000\ +\204\000\000\000\000\000\000\000\204\000\203\000\203\000\203\000\ +\203\000\000\000\000\000\000\000\000\000\203\000\203\000\203\000\ +\000\000\000\000\203\000\203\000\203\000\203\000\203\000\203\000\ +\203\000\203\000\203\000\000\000\000\000\203\000\203\000\203\000\ +\203\000\203\000\203\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\203\000\203\000\000\000\000\000\203\000\203\000\203\000\ +\203\000\203\000\203\000\000\000\000\000\000\000\203\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\203\000\203\000\000\000\203\000\000\000\000\000\203\000\ +\203\000\203\000\000\000\203\000\203\000\203\000\203\000\203\000\ +\000\000\000\000\000\000\000\000\000\000\203\000\000\000\203\000\ +\203\000\203\000\203\000\203\000\000\000\000\000\000\000\000\000\ +\203\000\203\000\183\000\203\000\203\000\203\000\000\000\000\000\ +\000\000\203\000\000\000\000\000\203\000\000\000\203\000\000\000\ +\000\000\203\000\000\000\000\000\203\000\000\000\000\000\000\000\ +\203\000\179\000\179\000\179\000\179\000\000\000\000\000\000\000\ +\000\000\000\000\179\000\179\000\000\000\000\000\179\000\179\000\ +\179\000\179\000\179\000\179\000\179\000\179\000\179\000\000\000\ +\000\000\179\000\179\000\179\000\179\000\179\000\179\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\179\000\179\000\000\000\ +\000\000\179\000\179\000\179\000\179\000\179\000\179\000\179\000\ +\000\000\000\000\179\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\179\000\179\000\000\000\ +\179\000\000\000\000\000\179\000\179\000\179\000\000\000\179\000\ +\179\000\179\000\179\000\179\000\000\000\000\000\000\000\000\000\ +\000\000\179\000\000\000\179\000\179\000\179\000\179\000\179\000\ +\000\000\000\000\000\000\000\000\179\000\179\000\195\000\179\000\ +\179\000\179\000\000\000\000\000\000\000\179\000\000\000\000\000\ +\179\000\000\000\179\000\000\000\000\000\179\000\000\000\000\000\ +\179\000\000\000\000\000\000\000\179\000\000\000\182\000\182\000\ +\182\000\182\000\000\000\000\000\000\000\000\000\000\000\182\000\ +\182\000\000\000\000\000\182\000\182\000\182\000\182\000\182\000\ +\182\000\182\000\182\000\182\000\000\000\000\000\182\000\182\000\ +\182\000\182\000\182\000\182\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\182\000\182\000\000\000\000\000\182\000\182\000\ +\182\000\182\000\182\000\182\000\182\000\000\000\000\000\182\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\182\000\182\000\000\000\182\000\000\000\000\000\ +\182\000\182\000\182\000\000\000\182\000\182\000\182\000\182\000\ +\182\000\000\000\000\000\000\000\000\000\000\000\182\000\000\000\ +\182\000\182\000\182\000\182\000\182\000\000\000\000\000\000\000\ +\000\000\182\000\182\000\201\000\182\000\182\000\182\000\000\000\ +\000\000\000\000\182\000\000\000\000\000\182\000\000\000\182\000\ +\000\000\000\000\182\000\000\000\000\000\182\000\000\000\000\000\ +\000\000\182\000\183\000\183\000\183\000\183\000\000\000\000\000\ +\000\000\000\000\000\000\183\000\183\000\000\000\000\000\183\000\ +\183\000\183\000\183\000\183\000\183\000\183\000\183\000\183\000\ +\000\000\000\000\183\000\183\000\183\000\183\000\183\000\183\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\183\000\183\000\ +\000\000\000\000\183\000\183\000\183\000\183\000\183\000\183\000\ +\183\000\000\000\000\000\183\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\183\000\183\000\ +\000\000\183\000\000\000\000\000\183\000\183\000\183\000\000\000\ +\183\000\183\000\183\000\183\000\183\000\000\000\000\000\000\000\ +\000\000\000\000\183\000\000\000\183\000\183\000\183\000\183\000\ +\183\000\000\000\000\000\000\000\000\000\183\000\183\000\202\000\ +\183\000\183\000\183\000\000\000\000\000\000\000\183\000\000\000\ +\000\000\183\000\000\000\183\000\000\000\000\000\183\000\000\000\ +\000\000\183\000\000\000\000\000\000\000\183\000\195\000\195\000\ +\195\000\195\000\000\000\000\000\000\000\000\000\195\000\195\000\ +\195\000\000\000\000\000\195\000\195\000\195\000\195\000\195\000\ +\195\000\195\000\195\000\195\000\000\000\000\000\195\000\195\000\ +\195\000\195\000\195\000\195\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\195\000\195\000\000\000\000\000\195\000\195\000\ +\195\000\195\000\195\000\000\000\000\000\000\000\000\000\195\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\195\000\195\000\000\000\195\000\000\000\000\000\ +\195\000\195\000\195\000\000\000\195\000\195\000\195\000\195\000\ +\195\000\000\000\000\000\000\000\000\000\000\000\195\000\000\000\ +\195\000\000\000\195\000\195\000\195\000\000\000\000\000\000\000\ +\000\000\195\000\195\000\198\000\195\000\195\000\195\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\195\000\000\000\195\000\ +\000\000\000\000\195\000\000\000\000\000\195\000\000\000\000\000\ +\000\000\195\000\000\000\201\000\201\000\201\000\201\000\000\000\ +\000\000\000\000\000\000\201\000\201\000\201\000\000\000\000\000\ +\201\000\201\000\201\000\201\000\201\000\201\000\201\000\201\000\ +\201\000\000\000\000\000\201\000\201\000\201\000\201\000\201\000\ +\201\000\000\000\000\000\000\000\000\000\000\000\000\000\201\000\ +\201\000\000\000\000\000\201\000\201\000\201\000\201\000\201\000\ +\000\000\000\000\000\000\000\000\201\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\201\000\ +\201\000\000\000\201\000\000\000\000\000\201\000\201\000\201\000\ +\000\000\201\000\201\000\201\000\201\000\201\000\000\000\000\000\ +\000\000\000\000\000\000\201\000\000\000\201\000\000\000\201\000\ +\201\000\201\000\000\000\000\000\000\000\000\000\201\000\201\000\ +\199\000\201\000\201\000\201\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\201\000\000\000\201\000\000\000\000\000\201\000\ +\000\000\000\000\201\000\000\000\000\000\000\000\201\000\202\000\ +\202\000\202\000\202\000\000\000\000\000\000\000\000\000\202\000\ +\202\000\202\000\000\000\000\000\202\000\202\000\202\000\202\000\ +\202\000\202\000\202\000\202\000\202\000\000\000\000\000\202\000\ +\202\000\202\000\202\000\202\000\202\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\202\000\202\000\000\000\000\000\202\000\ +\202\000\202\000\202\000\202\000\000\000\000\000\000\000\000\000\ +\202\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\202\000\202\000\000\000\202\000\000\000\ +\000\000\202\000\202\000\202\000\000\000\202\000\202\000\202\000\ +\202\000\202\000\000\000\000\000\000\000\000\000\000\000\202\000\ +\000\000\202\000\000\000\202\000\202\000\202\000\000\000\000\000\ +\000\000\000\000\202\000\202\000\200\000\202\000\202\000\202\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\202\000\000\000\ +\202\000\000\000\000\000\202\000\000\000\000\000\202\000\000\000\ +\000\000\000\000\202\000\198\000\198\000\198\000\198\000\000\000\ +\000\000\000\000\000\000\198\000\198\000\198\000\000\000\000\000\ +\198\000\198\000\198\000\198\000\198\000\198\000\198\000\198\000\ +\198\000\000\000\000\000\198\000\198\000\198\000\198\000\198\000\ +\198\000\000\000\000\000\000\000\000\000\000\000\000\000\198\000\ +\198\000\000\000\000\000\198\000\198\000\198\000\198\000\198\000\ +\000\000\000\000\000\000\000\000\198\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\198\000\ +\198\000\000\000\198\000\000\000\000\000\198\000\198\000\198\000\ +\000\000\198\000\198\000\198\000\198\000\198\000\000\000\000\000\ +\000\000\000\000\000\000\198\000\000\000\198\000\000\000\198\000\ +\198\000\198\000\000\000\000\000\000\000\000\000\198\000\198\000\ +\153\000\198\000\198\000\198\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\198\000\000\000\198\000\000\000\000\000\198\000\ +\000\000\000\000\198\000\000\000\000\000\000\000\198\000\000\000\ +\199\000\199\000\199\000\199\000\000\000\000\000\000\000\000\000\ +\199\000\199\000\199\000\000\000\000\000\199\000\199\000\199\000\ +\199\000\199\000\199\000\199\000\199\000\199\000\000\000\000\000\ +\199\000\199\000\199\000\199\000\199\000\199\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\199\000\199\000\000\000\000\000\ +\199\000\199\000\199\000\199\000\199\000\000\000\000\000\000\000\ +\000\000\199\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\199\000\199\000\000\000\199\000\ +\000\000\000\000\199\000\199\000\199\000\000\000\199\000\199\000\ +\199\000\199\000\199\000\000\000\000\000\000\000\000\000\000\000\ +\199\000\000\000\199\000\000\000\199\000\199\000\199\000\000\000\ +\000\000\000\000\000\000\199\000\199\000\192\000\199\000\199\000\ +\199\000\000\000\000\000\000\000\000\000\000\000\000\000\199\000\ +\000\000\199\000\000\000\000\000\199\000\000\000\000\000\199\000\ +\000\000\000\000\000\000\199\000\200\000\200\000\200\000\200\000\ +\000\000\000\000\000\000\000\000\200\000\200\000\200\000\000\000\ +\000\000\200\000\200\000\200\000\200\000\200\000\200\000\200\000\ +\200\000\200\000\000\000\000\000\200\000\200\000\200\000\200\000\ +\200\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\200\000\200\000\000\000\000\000\200\000\200\000\200\000\200\000\ +\200\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\200\000\200\000\000\000\200\000\000\000\000\000\200\000\200\000\ +\200\000\000\000\200\000\200\000\200\000\200\000\200\000\000\000\ +\000\000\000\000\000\000\000\000\200\000\000\000\200\000\000\000\ +\200\000\200\000\200\000\000\000\000\000\000\000\000\000\200\000\ +\200\000\205\000\200\000\200\000\200\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\200\000\000\000\200\000\000\000\000\000\ +\200\000\000\000\000\000\200\000\000\000\000\000\000\000\200\000\ +\153\000\153\000\153\000\153\000\000\000\000\000\000\000\000\000\ +\153\000\153\000\153\000\000\000\000\000\153\000\153\000\153\000\ +\153\000\153\000\153\000\153\000\153\000\153\000\000\000\000\000\ +\153\000\153\000\153\000\153\000\153\000\153\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\153\000\153\000\000\000\000\000\ +\153\000\153\000\153\000\153\000\153\000\153\000\153\000\000\000\ +\000\000\153\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\153\000\153\000\000\000\000\000\ +\000\000\000\000\153\000\153\000\153\000\000\000\153\000\000\000\ +\000\000\153\000\153\000\000\000\000\000\000\000\000\000\000\000\ +\153\000\000\000\153\000\000\000\000\000\000\000\153\000\000\000\ +\000\000\000\000\000\000\153\000\153\000\207\000\153\000\153\000\ +\153\000\000\000\000\000\000\000\153\000\000\000\000\000\153\000\ +\000\000\153\000\000\000\000\000\153\000\000\000\000\000\153\000\ +\000\000\000\000\000\000\153\000\000\000\192\000\192\000\192\000\ +\192\000\000\000\000\000\000\000\000\000\192\000\192\000\192\000\ +\000\000\000\000\192\000\192\000\000\000\192\000\192\000\192\000\ +\192\000\192\000\192\000\000\000\000\000\192\000\192\000\192\000\ +\192\000\192\000\192\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\192\000\192\000\000\000\000\000\192\000\192\000\192\000\ +\192\000\000\000\000\000\000\000\000\000\000\000\192\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\192\000\192\000\000\000\192\000\000\000\000\000\192\000\ +\192\000\192\000\000\000\192\000\000\000\000\000\192\000\192\000\ +\000\000\000\000\000\000\000\000\000\000\192\000\000\000\192\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\192\000\192\000\193\000\192\000\192\000\192\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\192\000\000\000\192\000\000\000\ +\000\000\192\000\000\000\000\000\192\000\000\000\000\000\000\000\ +\192\000\205\000\205\000\205\000\205\000\000\000\000\000\000\000\ +\000\000\205\000\205\000\205\000\000\000\000\000\205\000\205\000\ +\000\000\205\000\205\000\205\000\205\000\205\000\205\000\000\000\ +\000\000\205\000\205\000\205\000\205\000\205\000\205\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\205\000\205\000\000\000\ +\000\000\205\000\205\000\205\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\205\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\205\000\205\000\000\000\ +\205\000\000\000\000\000\000\000\205\000\205\000\000\000\205\000\ +\000\000\000\000\205\000\205\000\000\000\000\000\000\000\000\000\ +\000\000\205\000\000\000\205\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\205\000\205\000\194\000\205\000\ +\205\000\205\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\205\000\000\000\205\000\000\000\000\000\205\000\000\000\000\000\ +\205\000\000\000\000\000\000\000\205\000\207\000\207\000\207\000\ +\207\000\000\000\000\000\000\000\000\000\207\000\207\000\207\000\ +\000\000\000\000\207\000\207\000\000\000\207\000\207\000\207\000\ +\207\000\207\000\207\000\000\000\000\000\207\000\207\000\207\000\ +\207\000\207\000\207\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\207\000\207\000\000\000\000\000\207\000\207\000\207\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\207\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\207\000\207\000\000\000\207\000\000\000\000\000\000\000\ +\207\000\207\000\000\000\207\000\000\000\000\000\207\000\207\000\ +\000\000\000\000\000\000\000\000\000\000\207\000\000\000\207\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\207\000\207\000\206\000\207\000\207\000\207\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\207\000\000\000\207\000\000\000\ +\000\000\207\000\000\000\000\000\207\000\000\000\000\000\000\000\ +\207\000\000\000\193\000\193\000\193\000\193\000\000\000\000\000\ +\000\000\000\000\193\000\193\000\193\000\000\000\000\000\193\000\ +\193\000\000\000\193\000\193\000\193\000\193\000\193\000\193\000\ +\000\000\000\000\193\000\193\000\193\000\193\000\193\000\193\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\193\000\193\000\ +\000\000\000\000\193\000\193\000\193\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\193\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\193\000\193\000\ +\000\000\193\000\000\000\000\000\000\000\193\000\193\000\000\000\ +\193\000\000\000\000\000\193\000\193\000\000\000\000\000\000\000\ +\000\000\000\000\193\000\000\000\193\000\000\000\000\000\211\000\ +\000\000\000\000\000\000\000\000\000\000\193\000\193\000\000\000\ +\193\000\193\000\193\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\193\000\000\000\193\000\000\000\000\000\193\000\000\000\ +\000\000\193\000\000\000\000\000\000\000\193\000\194\000\194\000\ +\194\000\194\000\000\000\000\000\000\000\000\000\194\000\194\000\ +\194\000\000\000\000\000\194\000\194\000\000\000\194\000\194\000\ +\194\000\194\000\194\000\194\000\000\000\000\000\194\000\194\000\ +\194\000\194\000\194\000\194\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\194\000\194\000\000\000\000\000\194\000\194\000\ +\194\000\000\000\000\000\000\000\000\000\000\000\000\000\194\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\194\000\194\000\000\000\194\000\000\000\000\000\ +\000\000\194\000\194\000\000\000\194\000\000\000\000\000\194\000\ +\194\000\000\000\000\000\000\000\210\000\000\000\194\000\000\000\ +\194\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\194\000\194\000\000\000\194\000\194\000\194\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\194\000\000\000\194\000\ +\000\000\000\000\194\000\000\000\000\000\194\000\000\000\000\000\ +\000\000\194\000\206\000\206\000\206\000\206\000\000\000\000\000\ +\000\000\000\000\206\000\206\000\206\000\000\000\000\000\206\000\ +\206\000\000\000\206\000\206\000\206\000\206\000\206\000\206\000\ +\000\000\000\000\206\000\206\000\206\000\206\000\206\000\206\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\206\000\206\000\ +\000\000\000\000\206\000\206\000\206\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\206\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\206\000\206\000\ +\000\000\206\000\000\000\000\000\209\000\206\000\206\000\000\000\ +\206\000\000\000\000\000\206\000\206\000\000\000\000\000\000\000\ +\000\000\000\000\206\000\000\000\206\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\206\000\206\000\000\000\ +\206\000\206\000\206\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\206\000\000\000\206\000\000\000\000\000\206\000\211\000\ +\000\000\206\000\211\000\000\000\000\000\206\000\000\000\211\000\ +\211\000\211\000\000\000\000\000\211\000\211\000\000\000\211\000\ +\211\000\211\000\211\000\211\000\211\000\000\000\000\000\211\000\ +\211\000\211\000\000\000\211\000\211\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\211\000\000\000\000\000\211\000\ +\211\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\211\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\211\000\073\001\000\000\211\000\000\000\ +\000\000\000\000\211\000\211\000\000\000\211\000\000\000\000\000\ +\211\000\211\000\000\000\000\000\000\000\000\000\000\000\211\000\ +\000\000\211\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\211\000\211\000\000\000\211\000\211\000\211\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\211\000\000\000\ +\211\000\000\000\000\000\211\000\210\000\000\000\211\000\210\000\ +\000\000\000\000\211\000\000\000\210\000\210\000\210\000\000\000\ +\000\000\210\000\210\000\000\000\210\000\210\000\210\000\210\000\ +\210\000\210\000\000\000\000\000\210\000\210\000\210\000\000\000\ +\210\000\210\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\210\000\000\000\000\000\210\000\210\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\210\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\208\000\000\000\000\000\000\000\ +\210\000\000\000\000\000\210\000\000\000\000\000\000\000\210\000\ +\210\000\000\000\210\000\000\000\000\000\210\000\210\000\000\000\ +\000\000\000\000\000\000\000\000\210\000\000\000\210\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\210\000\ +\210\000\000\000\210\000\210\000\210\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\210\000\209\000\210\000\000\000\209\000\ +\210\000\000\000\000\000\210\000\209\000\000\000\209\000\210\000\ +\000\000\209\000\209\000\000\000\209\000\209\000\209\000\209\000\ +\209\000\209\000\000\000\000\000\209\000\209\000\209\000\000\000\ +\209\000\209\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\209\000\000\000\000\000\209\000\209\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\209\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\072\001\000\000\000\000\000\000\ +\209\000\000\000\000\000\209\000\000\000\000\000\000\000\209\000\ +\209\000\000\000\209\000\000\000\000\000\209\000\209\000\000\000\ +\000\000\000\000\000\000\000\000\209\000\000\000\000\000\000\000\ +\000\000\000\000\214\002\000\000\000\000\000\000\000\000\209\000\ +\209\000\000\000\209\000\209\000\209\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\209\000\073\001\209\000\000\000\073\001\ +\209\000\000\000\000\000\209\000\073\001\000\000\073\001\209\000\ +\000\000\073\001\073\001\000\000\073\001\073\001\073\001\073\001\ +\073\001\073\001\000\000\000\000\073\001\073\001\073\001\000\000\ +\073\001\073\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\073\001\000\000\000\000\073\001\073\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\073\001\000\000\212\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\073\001\000\000\000\000\073\001\000\000\000\000\000\000\073\001\ +\073\001\000\000\073\001\000\000\000\000\073\001\073\001\000\000\ +\000\000\000\000\000\000\000\000\073\001\214\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\073\001\ +\073\001\000\000\073\001\073\001\073\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\073\001\208\000\073\001\000\000\208\000\ +\073\001\000\000\000\000\073\001\208\000\000\000\208\000\073\001\ +\000\000\208\000\208\000\000\000\208\000\208\000\208\000\208\000\ +\208\000\208\000\000\000\000\000\208\000\208\000\208\000\000\000\ +\208\000\208\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\208\000\000\000\000\000\208\000\208\000\000\000\000\000\ +\000\000\000\000\224\000\000\000\000\000\208\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\208\000\000\000\000\000\208\000\000\000\000\000\000\000\208\000\ +\208\000\000\000\208\000\000\000\000\000\208\000\208\000\000\000\ +\000\000\000\000\000\000\000\000\208\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\208\000\ +\208\000\000\000\208\000\208\000\208\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\208\000\072\001\208\000\000\000\072\001\ +\208\000\000\000\000\000\208\000\072\001\000\000\072\001\208\000\ +\000\000\072\001\072\001\000\000\072\001\072\001\072\001\072\001\ +\072\001\072\001\000\000\000\000\072\001\072\001\072\001\000\000\ +\072\001\072\001\214\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\072\001\214\002\000\000\072\001\072\001\000\000\214\002\ +\000\000\000\000\215\000\000\000\000\000\072\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\214\002\000\000\214\002\214\002\ +\072\001\000\000\000\000\072\001\000\000\000\000\000\000\072\001\ +\072\001\000\000\072\001\214\002\000\000\072\001\072\001\000\000\ +\099\000\000\000\000\000\000\000\072\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\214\002\072\001\ +\072\001\214\002\072\001\072\001\072\001\214\002\214\002\212\000\ +\000\000\000\000\212\000\072\001\214\002\072\001\000\000\212\000\ +\072\001\212\000\214\002\072\001\212\000\212\000\000\000\072\001\ +\212\000\000\000\212\000\212\000\212\000\000\000\214\002\212\000\ +\212\000\212\000\214\002\212\000\212\000\214\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\212\000\000\000\214\002\212\000\ +\212\000\214\002\214\002\000\000\000\000\188\000\000\000\000\000\ +\212\000\000\000\000\000\000\000\000\000\000\000\000\000\214\002\ +\000\000\214\002\214\002\212\000\000\000\000\000\212\000\000\000\ +\000\000\000\000\212\000\212\000\000\000\212\000\214\002\000\000\ +\212\000\212\000\000\000\212\002\000\000\000\000\000\000\212\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\214\002\212\000\212\000\214\002\212\000\212\000\212\000\ +\214\002\214\002\224\000\000\000\000\000\224\000\212\000\214\002\ +\212\000\000\000\224\000\212\000\224\000\214\002\212\000\224\000\ +\224\000\000\000\212\000\224\000\000\000\224\000\224\000\224\000\ +\000\000\214\002\224\000\224\000\224\000\214\002\224\000\224\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\224\000\ +\000\000\214\002\224\000\224\000\214\002\000\000\000\000\000\000\ +\217\000\000\000\000\000\224\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\224\000\000\000\ +\000\000\224\000\000\000\000\000\000\000\224\000\224\000\000\000\ +\224\000\000\000\000\000\224\000\224\000\000\000\000\000\000\000\ +\000\000\000\000\224\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\224\000\224\000\000\000\ +\224\000\224\000\224\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\224\000\215\000\224\000\000\000\215\000\224\000\000\000\ +\000\000\224\000\215\000\000\000\215\000\224\000\000\000\215\000\ +\215\000\000\000\000\000\215\000\000\000\215\000\215\000\215\000\ +\000\000\000\000\215\000\215\000\215\000\000\000\215\000\215\000\ +\099\000\000\000\000\000\000\000\000\000\000\000\000\000\215\000\ +\000\000\000\000\215\000\215\000\000\000\099\000\000\000\000\000\ +\216\000\000\000\000\000\215\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\099\000\000\000\099\000\099\000\215\000\000\000\ +\000\000\215\000\000\000\000\000\000\000\215\000\215\000\000\000\ +\215\000\099\000\000\000\215\000\215\000\000\000\100\000\000\000\ +\000\000\000\000\215\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\099\000\215\000\215\000\099\000\ +\215\000\215\000\215\000\099\000\099\000\188\000\000\000\000\000\ +\188\000\215\000\099\000\215\000\000\000\188\000\215\000\188\000\ +\099\000\215\000\188\000\188\000\000\000\215\000\188\000\000\000\ +\188\000\188\000\188\000\000\000\099\000\188\000\188\000\188\000\ +\099\000\188\000\188\000\212\002\000\000\000\000\212\002\000\000\ +\000\000\000\000\188\000\000\000\099\000\188\000\188\000\099\000\ +\212\002\000\000\000\000\220\000\000\000\000\000\188\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\212\002\000\000\212\002\ +\212\002\188\000\000\000\000\000\188\000\000\000\000\000\000\000\ +\188\000\188\000\000\000\188\000\212\002\000\000\188\000\188\000\ +\000\000\165\001\000\000\000\000\000\000\188\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\212\002\ +\188\000\188\000\212\002\188\000\188\000\188\000\000\000\212\002\ +\217\000\000\000\000\000\217\000\188\000\212\002\188\000\000\000\ +\217\000\188\000\217\000\212\002\188\000\217\000\217\000\000\000\ +\188\000\217\000\000\000\217\000\217\000\217\000\000\000\212\002\ +\217\000\217\000\217\000\212\002\217\000\217\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\217\000\000\000\212\002\ +\217\000\217\000\212\002\000\000\000\000\000\000\218\000\000\000\ +\000\000\217\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\217\000\000\000\000\000\217\000\ +\000\000\000\000\000\000\217\000\217\000\000\000\217\000\000\000\ +\000\000\217\000\217\000\000\000\000\000\000\000\000\000\000\000\ +\217\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\217\000\217\000\000\000\217\000\217\000\ +\217\000\000\000\000\000\000\000\000\000\000\000\000\000\217\000\ +\216\000\217\000\000\000\216\000\217\000\000\000\000\000\217\000\ +\216\000\000\000\216\000\217\000\000\000\216\000\216\000\000\000\ +\000\000\216\000\000\000\216\000\216\000\216\000\000\000\000\000\ +\216\000\216\000\216\000\000\000\216\000\216\000\100\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\216\000\000\000\000\000\ +\216\000\216\000\000\000\100\000\000\000\000\000\219\000\000\000\ +\000\000\216\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\100\000\000\000\100\000\100\000\216\000\000\000\000\000\216\000\ +\000\000\000\000\000\000\216\000\216\000\000\000\216\000\100\000\ +\000\000\216\000\216\000\000\000\212\002\000\000\000\000\000\000\ +\216\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\100\000\216\000\216\000\100\000\216\000\216\000\ +\216\000\100\000\100\000\220\000\000\000\000\000\220\000\216\000\ +\100\000\216\000\000\000\220\000\216\000\220\000\100\000\216\000\ +\220\000\220\000\000\000\216\000\220\000\000\000\220\000\220\000\ +\220\000\000\000\100\000\220\000\220\000\220\000\100\000\220\000\ +\220\000\165\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\220\000\000\000\100\000\220\000\220\000\100\000\165\001\000\000\ +\000\000\223\000\000\000\000\000\220\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\165\001\000\000\165\001\165\001\220\000\ +\000\000\000\000\220\000\000\000\000\000\000\000\220\000\220\000\ +\000\000\220\000\165\001\000\000\220\000\220\000\000\000\037\000\ +\000\000\000\000\000\000\220\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\165\001\220\000\220\000\ +\165\001\220\000\220\000\220\000\165\001\165\001\218\000\000\000\ +\000\000\218\000\220\000\165\001\220\000\000\000\218\000\220\000\ +\218\000\165\001\220\000\218\000\218\000\000\000\220\000\218\000\ +\000\000\218\000\218\000\218\000\000\000\165\001\218\000\218\000\ +\218\000\165\001\218\000\218\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\218\000\000\000\165\001\218\000\218\000\ +\165\001\000\000\000\000\000\000\221\000\000\000\000\000\218\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\218\000\000\000\000\000\218\000\000\000\000\000\ +\000\000\218\000\218\000\000\000\218\000\000\000\000\000\218\000\ +\218\000\000\000\000\000\000\000\000\000\000\000\218\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\218\000\218\000\000\000\218\000\218\000\218\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\218\000\219\000\218\000\ +\000\000\219\000\218\000\000\000\000\000\218\000\219\000\000\000\ +\219\000\218\000\000\000\219\000\219\000\000\000\000\000\219\000\ +\000\000\219\000\219\000\219\000\000\000\000\000\219\000\219\000\ +\219\000\000\000\219\000\219\000\212\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\219\000\000\000\000\000\219\000\219\000\ +\000\000\212\002\000\000\000\000\222\000\000\000\000\000\219\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\212\002\000\000\ +\212\002\212\002\219\000\000\000\000\000\219\000\000\000\000\000\ +\000\000\219\000\219\000\000\000\219\000\212\002\000\000\219\000\ +\219\000\000\000\040\000\000\000\000\000\000\000\219\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\212\002\219\000\219\000\212\002\219\000\219\000\219\000\000\000\ +\212\002\223\000\000\000\000\000\223\000\219\000\212\002\219\000\ +\000\000\223\000\219\000\223\000\212\002\219\000\223\000\223\000\ +\000\000\219\000\223\000\000\000\223\000\223\000\223\000\000\000\ +\212\002\223\000\223\000\223\000\212\002\223\000\223\000\037\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\223\000\000\000\ +\212\002\223\000\223\000\212\002\037\000\000\000\000\000\152\000\ +\000\000\000\000\223\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\037\000\000\000\037\000\037\000\223\000\000\000\000\000\ +\223\000\000\000\000\000\000\000\223\000\223\000\000\000\223\000\ +\037\000\000\000\223\000\223\000\000\000\000\000\000\000\000\000\ +\000\000\223\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\037\000\223\000\223\000\037\000\223\000\ +\223\000\223\000\000\000\037\000\221\000\000\000\000\000\221\000\ +\223\000\037\000\223\000\000\000\221\000\223\000\221\000\037\000\ +\223\000\221\000\221\000\000\000\223\000\221\000\000\000\221\000\ +\221\000\221\000\000\000\037\000\221\000\221\000\221\000\037\000\ +\221\000\221\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\221\000\000\000\037\000\221\000\221\000\037\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\221\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\003\002\000\000\000\000\ +\221\000\000\000\000\000\221\000\000\000\000\000\000\000\221\000\ +\221\000\000\000\221\000\000\000\000\000\221\000\221\000\000\000\ +\000\000\000\000\000\000\000\000\221\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\221\000\ +\221\000\000\000\221\000\221\000\221\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\221\000\222\000\221\000\000\000\222\000\ +\221\000\000\000\000\000\221\000\222\000\000\000\222\000\221\000\ +\000\000\222\000\222\000\000\000\000\000\222\000\000\000\222\000\ +\222\000\222\000\000\000\000\000\222\000\222\000\222\000\000\000\ +\222\000\222\000\040\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\222\000\000\000\000\000\222\000\222\000\000\000\040\000\ +\000\000\000\000\000\000\000\000\000\000\222\000\000\000\000\000\ +\189\000\000\000\000\000\000\000\040\000\000\000\040\000\040\000\ +\222\000\000\000\000\000\222\000\000\000\000\000\000\000\222\000\ +\222\000\000\000\222\000\040\000\000\000\222\000\222\000\000\000\ +\000\000\000\000\000\000\000\000\222\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\040\000\222\000\ +\222\000\040\000\222\000\222\000\222\000\000\000\040\000\152\000\ +\000\000\000\000\152\000\222\000\040\000\222\000\000\000\152\000\ +\222\000\152\000\040\000\222\000\152\000\152\000\000\000\222\000\ +\152\000\000\000\152\000\152\000\152\000\000\000\040\000\152\000\ +\152\000\152\000\040\000\152\000\152\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\152\000\000\000\040\000\152\000\ +\152\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\152\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\152\000\000\000\000\000\152\000\000\000\ +\000\000\000\000\152\000\152\000\037\002\152\000\000\000\000\000\ +\152\000\152\000\000\000\000\000\000\000\000\000\000\000\152\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\152\000\152\000\000\000\152\000\000\000\152\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\152\000\000\000\ +\152\000\000\000\000\000\152\000\000\000\003\002\152\000\003\002\ +\003\002\003\002\152\000\000\000\000\000\003\002\000\000\000\000\ +\000\000\000\000\003\002\000\000\000\000\000\000\003\002\003\002\ +\003\002\000\000\000\000\000\000\000\000\000\000\000\000\003\002\ +\003\002\003\002\003\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\003\002\000\000\000\000\000\000\003\002\003\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\003\002\003\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001\002\ +\000\000\003\002\000\000\000\000\003\002\000\000\000\000\003\002\ +\003\002\003\002\000\000\003\002\000\000\000\000\003\002\003\002\ +\000\000\000\000\000\000\000\000\000\000\003\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\003\002\003\002\000\000\003\002\003\002\003\002\000\000\000\000\ +\189\000\003\002\000\000\189\000\000\000\000\000\000\000\000\000\ +\189\000\003\002\189\000\000\000\003\002\189\000\189\000\000\000\ +\003\002\189\000\000\000\189\000\189\000\189\000\000\000\000\000\ +\189\000\000\000\189\000\000\000\189\000\189\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\189\000\000\000\000\000\ +\189\000\189\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\189\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\255\001\000\000\189\000\000\000\000\000\189\000\ +\000\000\000\000\000\000\189\000\189\000\000\000\189\000\000\000\ +\000\000\189\000\189\000\000\000\000\000\000\000\000\000\000\000\ +\189\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\189\000\189\000\000\000\189\000\189\000\ +\189\000\000\000\000\000\000\000\000\000\000\000\000\000\189\000\ +\000\000\189\000\000\000\000\000\189\000\000\000\000\000\189\000\ +\000\000\000\000\000\000\189\000\037\002\000\000\037\002\037\002\ +\037\002\000\000\000\000\000\000\037\002\000\000\000\000\000\000\ +\000\000\037\002\000\000\000\000\000\000\037\002\037\002\037\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\037\002\037\002\ +\037\002\037\002\000\000\000\000\206\004\000\000\000\000\000\000\ +\037\002\000\000\000\000\000\000\000\000\037\002\000\000\000\002\ +\000\000\000\000\000\000\032\005\037\002\037\002\000\000\000\000\ +\000\000\000\000\198\001\000\000\000\000\000\000\000\000\000\000\ +\037\002\000\000\000\000\037\002\000\000\000\000\037\002\037\002\ +\037\002\000\000\037\002\000\000\000\000\037\002\037\002\000\000\ +\000\000\000\000\000\000\208\004\037\002\113\000\114\000\028\000\ +\000\000\115\000\000\000\000\000\116\000\209\004\000\000\037\002\ +\037\002\000\000\037\002\037\002\037\002\000\000\000\000\001\002\ +\000\000\001\002\001\002\001\002\000\000\118\000\000\000\001\002\ +\037\002\000\000\000\000\037\002\001\002\119\000\120\000\037\002\ +\001\002\001\002\001\002\000\000\000\000\121\000\000\000\000\000\ +\000\000\001\002\001\002\001\002\001\002\000\000\201\001\000\000\ +\000\000\211\004\123\000\001\002\000\000\000\000\000\000\000\000\ +\001\002\000\000\254\001\000\000\000\000\000\000\000\000\001\002\ +\001\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\001\002\000\000\000\000\001\002\000\000\ +\000\000\001\002\001\002\001\002\000\000\001\002\000\000\000\000\ +\000\000\001\002\000\000\000\000\000\000\000\000\000\000\001\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\001\002\001\002\000\000\001\002\001\002\001\002\ +\000\000\000\000\255\001\000\000\255\001\255\001\255\001\000\000\ +\000\000\000\000\255\001\001\002\000\000\000\000\001\002\255\001\ +\000\000\000\000\001\002\255\001\255\001\255\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\255\001\255\001\255\001\255\001\ +\000\000\000\000\000\000\000\000\146\000\000\000\255\001\000\000\ +\000\000\000\000\000\000\255\001\000\000\000\000\090\000\000\000\ +\000\000\000\000\255\001\255\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\255\001\000\000\ +\000\000\255\001\000\000\000\000\255\001\255\001\255\001\000\000\ +\255\001\000\000\000\000\000\000\255\001\000\000\000\000\000\000\ +\000\000\000\000\255\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\255\001\255\001\065\000\ +\255\001\255\001\255\001\000\000\000\000\000\000\000\000\000\002\ +\000\000\000\002\000\002\000\002\000\000\000\000\255\001\000\002\ +\000\000\255\001\000\000\000\000\000\002\255\001\000\000\000\000\ +\000\002\000\002\000\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\002\000\002\000\002\000\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\ +\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\002\ +\000\002\066\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\002\000\000\000\000\000\002\000\000\ +\000\000\000\002\000\002\000\002\000\000\000\002\000\000\000\000\ +\000\000\000\002\000\000\000\000\000\000\000\000\112\000\000\002\ +\113\000\114\000\028\000\000\000\115\000\000\000\000\000\116\000\ +\117\000\000\000\000\002\000\002\000\000\000\002\000\002\000\002\ +\000\000\000\000\254\001\000\000\254\001\254\001\254\001\000\000\ +\118\000\000\000\254\001\000\002\000\000\000\000\000\002\254\001\ +\119\000\120\000\000\002\254\001\254\001\254\001\000\000\000\000\ +\121\000\000\000\000\000\000\000\254\001\254\001\254\001\254\001\ +\000\000\000\000\000\000\000\000\122\000\123\000\254\001\000\000\ +\000\000\000\000\000\000\254\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\254\001\254\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\254\001\000\000\ +\000\000\254\001\214\002\000\000\254\001\254\001\254\001\000\000\ +\254\001\000\000\000\000\000\000\254\001\000\000\000\000\000\000\ +\000\000\000\000\254\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\146\000\254\001\254\001\146\000\ +\254\001\254\001\254\001\000\000\000\000\000\000\090\000\000\000\ +\000\000\146\000\000\000\000\000\000\000\146\000\254\001\146\000\ +\000\000\254\001\000\000\090\000\000\000\254\001\146\000\146\000\ +\146\000\146\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\090\000\000\000\090\000\090\000\000\000\146\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\090\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\065\000\ +\146\000\000\000\000\000\146\000\000\000\000\000\000\000\146\000\ +\146\000\000\000\090\000\000\000\065\000\146\000\146\000\000\000\ +\065\000\090\000\090\000\000\000\146\000\000\000\000\000\000\000\ +\090\000\065\000\065\000\065\000\065\000\106\002\090\000\000\000\ +\146\000\000\000\146\000\000\000\146\000\000\000\000\000\000\000\ +\065\000\000\000\090\000\000\000\000\000\000\000\090\000\000\000\ +\146\000\000\000\000\000\146\000\000\000\000\000\000\000\146\000\ +\000\000\066\000\090\000\065\000\066\000\090\000\065\000\000\000\ +\000\000\065\000\065\000\065\000\000\000\000\000\066\000\000\000\ +\065\000\065\000\066\000\000\000\000\000\000\000\000\000\065\000\ +\000\000\000\000\000\000\066\000\066\000\066\000\066\000\000\000\ +\000\000\000\000\000\000\065\000\000\000\065\000\000\000\065\000\ +\000\000\000\000\066\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\065\000\000\000\000\000\065\000\000\000\ +\000\000\000\000\065\000\000\000\000\000\066\000\000\000\000\000\ +\066\000\000\000\000\000\000\000\066\000\066\000\000\000\000\000\ +\000\000\000\000\066\000\066\000\112\000\000\000\113\000\114\000\ +\028\000\066\000\115\000\000\000\120\001\116\000\117\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\066\000\000\000\066\000\ +\000\000\066\000\000\000\000\000\000\000\000\000\118\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\066\000\119\000\060\003\ +\066\000\000\000\214\002\000\000\066\000\214\002\121\000\214\002\ +\214\002\214\002\214\002\000\000\000\000\214\002\214\002\214\002\ +\000\000\000\000\122\000\123\000\000\000\214\002\000\000\000\000\ +\000\000\214\002\000\000\000\000\214\002\000\000\214\002\214\002\ +\214\002\214\002\214\002\214\002\214\002\214\002\214\002\000\000\ +\000\000\214\002\214\002\214\002\000\000\000\000\098\002\000\000\ +\000\000\000\000\214\002\214\002\214\002\214\002\214\002\214\002\ +\214\002\214\002\214\002\214\002\214\002\214\002\214\002\214\002\ +\000\000\214\002\214\002\214\002\000\000\214\002\214\002\214\002\ +\214\002\214\002\214\002\000\000\214\002\214\002\000\000\214\002\ +\214\002\000\000\214\002\214\002\000\000\000\000\214\002\214\002\ +\000\000\214\002\214\002\214\002\214\002\214\002\214\002\214\002\ +\000\000\214\002\214\002\214\002\000\000\214\002\000\000\214\002\ +\214\002\000\000\214\002\000\000\214\002\214\002\214\002\214\002\ +\214\002\214\002\214\002\212\001\214\002\106\002\000\000\000\000\ +\000\000\106\002\000\000\106\002\000\000\106\002\000\000\106\002\ +\000\000\106\002\000\000\106\002\106\002\000\000\106\002\106\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\106\002\106\002\000\000\106\002\106\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\106\002\ +\106\002\106\002\106\002\000\000\106\002\106\002\000\000\000\000\ +\106\002\213\001\000\000\000\000\000\000\106\002\106\002\106\002\ +\000\000\000\000\000\000\000\000\106\002\000\000\106\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\106\002\000\000\ +\000\000\106\002\000\000\000\000\000\000\000\000\106\002\000\000\ +\106\002\106\002\000\000\106\002\106\002\000\000\106\002\000\000\ +\000\000\000\000\106\002\000\000\000\000\106\002\000\000\106\002\ +\000\000\000\000\106\002\106\002\120\001\000\000\106\002\000\000\ +\120\001\000\000\120\001\212\002\120\001\000\000\120\001\000\000\ +\120\001\000\000\120\001\120\001\000\000\120\001\120\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\120\001\ +\000\000\000\000\120\001\120\001\000\000\000\000\000\000\000\000\ +\000\000\064\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\120\001\120\001\ +\000\000\120\001\000\000\120\001\120\001\000\000\000\000\120\001\ +\000\000\000\000\000\000\000\000\120\001\120\001\120\001\000\000\ +\000\000\000\000\000\000\120\001\000\000\120\001\098\002\000\000\ +\000\000\098\002\000\000\000\000\000\000\120\001\098\002\000\000\ +\120\001\000\000\000\000\098\002\098\002\120\001\000\000\120\001\ +\120\001\098\002\120\001\120\001\119\002\120\001\000\000\000\000\ +\098\002\120\001\098\002\098\002\120\001\000\000\120\001\000\000\ +\000\000\120\001\120\001\000\000\000\000\120\001\000\000\098\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\031\002\ +\209\001\031\002\031\002\031\002\000\000\031\002\000\000\000\000\ +\031\002\031\002\098\002\000\000\000\000\098\002\000\000\119\002\ +\098\002\098\002\098\002\212\001\000\000\000\000\212\001\000\000\ +\098\002\031\002\000\000\212\001\000\000\098\002\098\002\000\000\ +\212\001\031\002\031\002\000\000\000\000\000\000\212\001\000\000\ +\000\000\031\002\098\002\000\000\000\000\212\001\098\002\212\001\ +\212\001\000\000\000\000\000\000\000\000\031\002\031\002\000\000\ +\063\000\000\000\098\002\212\001\212\001\098\002\214\002\000\000\ +\214\002\214\002\214\002\000\000\214\002\000\000\000\000\214\002\ +\214\002\000\000\000\000\000\000\000\000\000\000\000\000\212\001\ +\000\000\213\001\212\001\000\000\213\001\212\001\212\001\212\001\ +\214\002\213\001\000\000\000\000\040\002\212\001\213\001\000\000\ +\214\002\214\002\000\000\212\001\213\001\000\000\000\000\000\000\ +\214\002\000\000\000\000\213\001\000\000\213\001\213\001\212\001\ +\131\000\000\000\000\000\212\001\214\002\214\002\000\000\040\002\ +\000\000\213\001\213\001\000\000\000\000\000\000\000\000\212\001\ +\000\000\000\000\212\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\212\002\000\000\213\001\212\002\000\000\ +\213\001\000\000\000\000\213\001\213\001\213\001\000\000\000\000\ +\212\002\000\000\213\001\213\001\000\000\000\000\212\002\000\000\ +\000\000\213\001\000\000\000\000\000\000\212\002\000\000\212\002\ +\212\002\064\000\174\001\000\000\064\000\213\001\000\000\000\000\ +\000\000\213\001\000\000\212\002\212\002\000\000\064\000\000\000\ +\000\000\000\000\064\000\212\002\212\002\213\001\000\000\000\000\ +\213\001\000\000\000\000\064\000\064\000\064\000\064\000\212\002\ +\000\000\000\000\212\002\000\000\000\000\000\000\000\000\212\002\ +\000\000\212\002\064\000\000\000\000\000\212\002\000\000\000\000\ +\000\000\000\000\241\001\212\002\241\001\241\001\241\001\000\000\ +\241\001\000\000\214\002\241\001\241\001\064\000\000\000\212\002\ +\064\000\000\000\000\000\212\002\064\000\064\000\000\000\000\000\ +\000\000\000\000\008\000\064\000\241\001\000\000\000\000\212\002\ +\011\000\064\000\212\002\000\000\241\001\241\001\000\000\000\000\ +\209\001\000\000\000\000\209\001\241\001\064\000\000\000\064\000\ +\209\001\064\000\015\000\016\000\000\000\209\001\000\000\000\000\ +\241\001\241\001\000\000\209\001\000\000\064\000\000\000\174\001\ +\064\000\000\000\209\001\000\000\209\001\209\001\022\000\000\000\ +\138\000\139\000\000\000\140\000\141\000\000\000\000\000\028\000\ +\000\000\209\001\000\000\000\000\142\000\143\000\000\000\000\000\ +\000\000\000\000\000\000\144\000\000\000\000\000\000\000\000\000\ +\063\000\000\000\000\000\063\000\209\001\000\000\000\000\209\001\ +\145\000\000\000\209\001\209\001\209\001\063\000\000\000\000\000\ +\000\000\063\000\209\001\000\000\175\001\146\000\000\000\000\000\ +\209\001\044\000\063\000\063\000\063\000\063\000\045\000\000\000\ +\000\000\048\000\147\000\000\000\209\001\000\000\000\000\000\000\ +\209\001\063\000\000\000\209\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\209\001\000\000\000\000\209\001\ +\131\000\000\000\000\000\131\000\063\000\000\000\000\000\063\000\ +\000\000\000\000\000\000\063\000\063\000\131\000\000\000\000\000\ +\000\000\177\001\063\000\131\000\000\000\000\000\000\000\000\000\ +\063\000\000\000\131\000\000\000\131\000\131\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\063\000\000\000\063\000\000\000\ +\063\000\131\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\131\000\000\000\000\000\000\000\063\000\000\000\000\000\063\000\ +\000\000\000\000\174\001\000\000\131\000\174\001\000\000\131\000\ +\000\000\000\000\000\000\131\000\131\000\000\000\131\000\174\001\ +\000\000\176\001\131\000\000\000\000\000\174\001\000\000\000\000\ +\131\000\000\000\000\000\000\000\174\001\000\000\174\001\174\001\ +\000\000\000\000\000\000\000\000\131\000\000\000\000\000\000\000\ +\131\000\000\000\000\000\174\001\000\000\000\000\000\000\178\001\ +\000\000\000\000\000\000\000\000\131\000\000\000\000\000\131\000\ +\000\000\000\000\214\002\000\000\000\000\214\002\174\001\000\000\ +\000\000\174\001\214\002\000\000\000\000\174\001\174\001\214\002\ +\000\000\000\000\000\000\000\000\174\001\214\002\000\000\000\000\ +\000\000\000\000\174\001\000\000\214\002\000\000\214\002\214\002\ +\115\002\000\000\000\000\000\000\000\000\000\000\174\001\000\000\ +\000\000\000\000\174\001\214\002\000\000\000\000\000\000\182\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\174\001\174\001\ +\000\000\174\001\174\001\000\000\000\000\000\000\214\002\000\000\ +\209\001\214\002\000\000\000\000\174\001\214\002\214\002\000\000\ +\000\000\000\000\174\001\000\000\214\002\000\000\000\000\000\000\ +\000\000\174\001\214\002\174\001\174\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\214\002\000\000\ +\174\001\000\000\214\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\175\001\000\000\214\002\175\001\ +\000\000\214\002\000\000\174\001\000\000\000\000\174\001\000\000\ +\044\000\175\001\174\001\174\001\000\000\000\000\000\000\175\001\ +\000\000\174\001\000\000\209\001\000\000\000\000\175\001\174\001\ +\175\001\175\001\000\000\209\001\000\000\047\000\000\000\000\000\ +\209\001\000\000\000\000\174\001\000\000\175\001\085\000\174\001\ +\000\000\000\000\000\000\000\000\000\000\209\001\000\000\209\001\ +\209\001\177\001\000\000\174\001\177\001\000\000\174\001\000\000\ +\175\001\000\000\000\000\175\001\209\001\000\000\177\001\175\001\ +\175\001\000\000\000\000\000\000\177\001\000\000\175\001\000\000\ +\000\000\212\002\000\000\177\001\175\001\177\001\177\001\209\001\ +\000\000\000\000\209\001\000\000\000\000\209\001\209\001\209\001\ +\175\001\000\000\177\001\000\000\175\001\209\001\081\000\000\000\ +\000\000\000\000\000\000\209\001\000\000\000\000\000\000\000\000\ +\175\001\176\001\000\000\175\001\176\001\177\001\000\000\209\001\ +\177\001\000\000\000\000\209\001\177\001\177\001\176\001\000\000\ +\000\000\000\000\000\000\177\001\176\001\000\000\000\000\209\001\ +\000\000\177\001\209\001\176\001\000\000\176\001\176\001\178\001\ +\000\000\000\000\178\001\000\000\000\000\177\001\000\000\000\000\ +\000\000\177\001\176\001\000\000\178\001\000\000\000\000\000\000\ +\000\000\000\000\178\001\000\000\000\000\177\001\000\000\000\000\ +\177\001\178\001\000\000\178\001\178\001\176\001\000\000\000\000\ +\176\001\000\000\000\000\000\000\176\001\176\001\000\000\000\000\ +\178\001\000\000\000\000\176\001\000\000\000\000\000\000\000\000\ +\000\000\176\001\000\000\000\000\000\000\000\000\000\000\182\001\ +\000\000\000\000\182\001\178\001\000\000\176\001\178\001\000\000\ +\000\000\176\001\178\001\178\001\182\001\000\000\000\000\000\000\ +\209\001\178\001\182\001\000\000\000\000\176\001\000\000\178\001\ +\176\001\182\001\000\000\182\001\182\001\209\001\000\000\000\000\ +\000\000\000\000\000\000\178\001\000\000\000\000\000\000\178\001\ +\182\001\000\000\209\001\000\000\209\001\209\001\000\000\000\000\ +\000\000\000\000\000\000\178\001\000\000\000\000\178\001\000\000\ +\000\000\209\001\000\000\182\001\000\000\000\000\182\001\000\000\ +\000\000\000\000\182\001\182\001\000\000\000\000\000\000\000\000\ +\044\000\182\001\000\000\000\000\209\001\000\000\000\000\182\001\ +\000\000\000\000\209\001\209\001\209\001\044\000\000\000\000\000\ +\000\000\000\000\209\001\182\001\000\000\047\000\000\000\182\001\ +\209\001\000\000\044\000\000\000\044\000\044\000\085\000\000\000\ +\000\000\000\000\047\000\182\001\209\001\000\000\182\001\000\000\ +\209\001\044\000\000\000\085\000\000\000\000\000\000\000\047\000\ +\000\000\047\000\047\000\000\000\209\001\000\000\000\000\209\001\ +\085\000\000\000\085\000\085\000\044\000\000\000\047\000\044\000\ +\000\000\212\002\000\000\000\000\044\000\000\000\000\000\085\000\ +\000\000\000\000\044\000\000\000\000\000\000\000\212\002\000\000\ +\044\000\047\000\000\000\000\000\047\000\000\000\081\000\000\000\ +\000\000\047\000\085\000\212\002\044\000\212\002\212\002\047\000\ +\044\000\000\000\085\000\081\000\000\000\047\000\000\000\000\000\ +\085\000\000\000\212\002\000\000\044\000\000\000\085\000\044\000\ +\081\000\047\000\081\000\081\000\000\000\047\000\000\000\000\000\ +\000\000\000\000\085\000\000\000\000\000\212\002\085\000\081\000\ +\000\000\047\000\000\000\000\000\047\000\212\002\000\000\000\000\ +\000\000\000\000\085\000\212\002\000\000\085\000\000\000\000\000\ +\000\000\212\002\081\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\081\000\000\000\000\000\212\002\000\000\000\000\ +\081\000\212\002\000\000\000\000\000\000\000\000\081\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\212\002\000\000\000\000\ +\212\002\000\000\081\000\207\002\000\000\000\000\081\000\000\000\ +\207\002\207\002\207\002\207\002\000\000\000\000\207\002\207\002\ +\207\002\207\002\081\000\000\000\000\000\081\000\207\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\207\002\000\000\207\002\ +\207\002\207\002\207\002\207\002\207\002\207\002\207\002\000\000\ +\000\000\000\000\207\002\000\000\207\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\207\002\207\002\207\002\207\002\ +\207\002\207\002\207\002\207\002\000\000\000\000\207\002\207\002\ +\000\000\000\000\207\002\207\002\207\002\207\002\000\000\207\002\ +\207\002\207\002\207\002\207\002\000\000\207\002\000\000\000\000\ +\207\002\207\002\000\000\207\002\207\002\000\000\000\000\207\002\ +\207\002\000\000\207\002\000\000\207\002\207\002\000\000\207\002\ +\207\002\000\000\000\000\207\002\207\002\000\000\207\002\000\000\ +\207\002\207\002\000\000\207\002\000\000\207\002\207\002\207\002\ +\207\002\207\002\207\002\207\002\214\002\207\002\000\000\000\000\ +\000\000\214\002\214\002\214\002\214\002\000\000\000\000\214\002\ +\214\002\000\000\000\000\000\000\000\000\000\000\000\000\214\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\214\002\000\000\ +\214\002\000\000\214\002\214\002\214\002\214\002\214\002\214\002\ +\000\000\000\000\000\000\214\002\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\214\002\214\002\214\002\ +\214\002\214\002\214\002\214\002\214\002\000\000\000\000\214\002\ +\214\002\000\000\000\000\214\002\214\002\214\002\000\000\000\000\ +\214\002\214\002\214\002\214\002\214\002\000\000\214\002\000\000\ +\000\000\214\002\214\002\000\000\000\000\214\002\000\000\000\000\ +\214\002\214\002\000\000\214\002\000\000\214\002\214\002\000\000\ +\000\000\214\002\000\000\000\000\000\000\214\002\000\000\214\002\ +\000\000\214\002\214\002\000\000\214\002\000\000\214\002\214\002\ +\000\000\214\002\214\002\214\002\214\002\000\000\214\002\001\001\ +\002\001\003\001\000\000\000\000\007\000\008\000\004\001\000\000\ +\005\001\000\000\010\000\011\000\000\000\000\000\006\001\007\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\008\001\000\000\000\000\015\000\016\000\017\000\ +\018\000\019\000\000\000\009\001\000\000\000\000\020\000\000\000\ +\000\000\010\001\011\001\012\001\013\001\014\001\015\001\000\000\ +\000\000\022\000\000\000\023\000\024\000\025\000\026\000\027\000\ +\000\000\000\000\028\000\000\000\016\001\000\000\030\000\031\000\ +\032\000\000\000\000\000\000\000\034\000\000\000\017\001\018\001\ +\000\000\019\001\000\000\000\000\000\000\038\000\000\000\000\000\ +\000\000\020\001\021\001\022\001\023\001\024\001\025\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\026\001\000\000\000\000\ +\000\000\027\001\000\000\028\001\044\000\000\000\000\000\000\000\ +\000\000\045\000\046\000\000\000\048\000\049\000\001\001\002\001\ +\003\001\051\000\000\000\007\000\008\000\004\001\000\000\005\001\ +\000\000\010\000\011\000\000\000\000\000\018\003\007\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\008\001\000\000\000\000\015\000\016\000\017\000\018\000\ +\019\000\000\000\009\001\000\000\000\000\020\000\000\000\000\000\ +\010\001\011\001\012\001\013\001\014\001\015\001\000\000\000\000\ +\022\000\000\000\023\000\024\000\025\000\026\000\027\000\000\000\ +\000\000\028\000\000\000\016\001\000\000\030\000\031\000\032\000\ +\000\000\000\000\000\000\034\000\000\000\017\001\018\001\000\000\ +\019\003\000\000\000\000\000\000\038\000\000\000\000\000\000\000\ +\020\001\021\001\022\001\023\001\024\001\025\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\020\003\000\000\000\000\000\000\ +\027\001\000\000\028\001\044\000\000\000\000\000\000\000\000\000\ +\045\000\046\000\000\000\048\000\049\000\214\002\000\000\000\000\ +\051\000\000\000\214\002\214\002\214\002\000\000\000\000\000\000\ +\214\002\214\002\214\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\214\002\ +\000\000\214\002\214\002\214\002\214\002\214\002\214\002\214\002\ +\000\000\000\000\000\000\000\000\214\002\000\000\214\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\214\002\ +\000\000\214\002\214\002\214\002\214\002\214\002\000\000\000\000\ +\214\002\214\002\000\000\000\000\214\002\214\002\214\002\000\000\ +\000\000\214\002\214\002\000\000\214\002\214\002\000\000\214\002\ +\000\000\000\000\000\000\214\002\000\000\214\002\000\000\000\000\ +\000\000\214\002\214\002\085\002\214\002\000\000\000\000\000\000\ +\152\002\152\002\152\002\000\000\000\000\214\002\152\002\152\002\ +\000\000\000\000\214\002\000\000\000\000\000\000\000\000\214\002\ +\214\002\214\002\214\002\214\002\214\002\000\000\000\000\214\002\ +\000\000\152\002\152\002\152\002\152\002\152\002\000\000\000\000\ +\000\000\000\000\152\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\152\002\000\000\152\002\ +\152\002\152\002\152\002\152\002\000\000\000\000\152\002\000\000\ +\000\000\000\000\152\002\152\002\152\002\000\000\000\000\000\000\ +\152\002\000\000\152\002\152\002\000\000\000\000\000\000\000\000\ +\000\000\152\002\000\000\000\000\000\000\000\000\000\000\152\002\ +\152\002\086\002\152\002\000\000\000\000\000\000\153\002\153\002\ +\153\002\085\002\000\000\000\000\153\002\153\002\000\000\000\000\ +\152\002\000\000\000\000\000\000\000\000\152\002\152\002\000\000\ +\152\002\152\002\000\000\000\000\000\000\152\002\000\000\153\002\ +\153\002\153\002\153\002\153\002\000\000\000\000\000\000\000\000\ +\153\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\153\002\000\000\153\002\153\002\153\002\ +\153\002\153\002\000\000\000\000\153\002\000\000\000\000\000\000\ +\153\002\153\002\153\002\000\000\000\000\000\000\153\002\000\000\ +\153\002\153\002\000\000\000\000\000\000\000\000\000\000\153\002\ +\000\000\000\000\000\000\000\000\000\000\153\002\153\002\083\002\ +\153\002\000\000\000\000\000\000\154\002\154\002\154\002\086\002\ +\000\000\000\000\154\002\154\002\000\000\000\000\153\002\000\000\ +\000\000\000\000\000\000\153\002\153\002\000\000\153\002\153\002\ +\000\000\000\000\000\000\153\002\000\000\154\002\154\002\154\002\ +\154\002\154\002\000\000\000\000\000\000\000\000\154\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\154\002\000\000\154\002\154\002\154\002\154\002\154\002\ +\000\000\000\000\154\002\000\000\000\000\000\000\154\002\154\002\ +\154\002\000\000\000\000\000\000\154\002\000\000\154\002\154\002\ +\000\000\000\000\000\000\000\000\000\000\154\002\000\000\000\000\ +\000\000\000\000\000\000\154\002\154\002\084\002\154\002\000\000\ +\000\000\000\000\155\002\155\002\155\002\083\002\000\000\000\000\ +\155\002\155\002\000\000\000\000\154\002\000\000\000\000\000\000\ +\000\000\154\002\154\002\000\000\154\002\154\002\000\000\000\000\ +\000\000\154\002\000\000\155\002\155\002\155\002\155\002\155\002\ +\000\000\000\000\000\000\000\000\155\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\155\002\ +\000\000\155\002\155\002\155\002\155\002\155\002\000\000\000\000\ +\155\002\000\000\000\000\000\000\155\002\155\002\155\002\000\000\ +\000\000\000\000\155\002\000\000\155\002\155\002\000\000\000\000\ +\000\000\000\000\000\000\155\002\000\000\000\000\000\000\000\000\ +\000\000\155\002\155\002\000\000\155\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\084\002\199\000\200\000\201\000\000\000\ +\000\000\000\000\155\002\000\000\202\000\000\000\203\000\155\002\ +\155\002\000\000\155\002\155\002\204\000\205\000\206\000\155\002\ +\000\000\207\000\208\000\209\000\000\000\210\000\211\000\212\000\ +\000\000\213\000\214\000\215\000\216\000\000\000\000\000\000\000\ +\217\000\218\000\219\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\220\000\221\000\000\000\000\000\222\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\223\000\224\000\000\000\000\000\000\000\004\002\225\000\226\000\ +\000\000\004\002\000\000\227\000\228\000\229\000\230\000\231\000\ +\232\000\233\000\000\000\234\000\000\000\000\000\004\002\000\000\ +\004\002\235\000\000\000\243\001\000\000\000\000\236\000\004\002\ +\004\002\000\000\000\000\000\000\237\000\000\000\000\000\238\000\ +\239\000\004\002\240\000\241\000\242\000\243\000\244\000\000\000\ +\245\000\246\000\247\000\248\000\249\000\004\002\004\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\004\002\000\000\000\000\000\000\004\002\000\000\004\002\ +\004\002\004\002\000\000\004\002\000\000\000\000\004\002\000\000\ +\000\000\000\000\001\001\002\001\003\001\000\000\000\000\000\000\ +\008\000\164\001\000\000\005\001\000\000\000\000\011\000\243\001\ +\004\002\006\001\007\001\000\000\004\002\000\000\004\002\000\000\ +\000\000\004\002\000\000\000\000\000\000\008\001\137\000\000\000\ +\015\000\016\000\004\002\000\000\004\002\000\000\009\001\000\000\ +\000\000\000\000\000\000\000\000\010\001\011\001\012\001\013\001\ +\014\001\015\001\000\000\000\000\022\000\000\000\138\000\139\000\ +\000\000\140\000\141\000\000\000\000\000\028\000\000\000\016\001\ +\000\000\000\000\142\000\143\000\000\000\000\000\000\000\000\000\ +\000\000\165\001\166\001\000\000\167\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\020\001\021\001\168\001\169\001\ +\024\001\170\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\026\001\000\000\000\000\146\000\027\001\000\000\028\001\044\000\ +\000\000\000\000\000\000\000\000\045\000\000\000\179\002\048\000\ +\147\000\001\001\002\001\003\001\000\000\000\000\000\000\008\000\ +\164\001\000\000\005\001\000\000\000\000\011\000\000\000\000\000\ +\006\001\007\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\008\001\137\000\000\000\015\000\ +\016\000\000\000\000\000\000\000\000\000\009\001\000\000\000\000\ +\000\000\000\000\000\000\010\001\011\001\012\001\013\001\014\001\ +\015\001\000\000\000\000\022\000\000\000\138\000\139\000\000\000\ +\140\000\141\000\000\000\000\000\028\000\000\000\016\001\000\000\ +\000\000\142\000\143\000\000\000\000\000\000\000\000\000\000\000\ +\165\001\166\001\000\000\167\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\020\001\021\001\168\001\169\001\024\001\ +\170\001\000\000\000\000\000\000\000\000\000\000\000\000\026\001\ +\000\000\000\000\146\000\027\001\000\000\028\001\044\000\000\000\ +\000\000\000\000\000\000\045\000\000\000\124\003\048\000\147\000\ +\001\001\002\001\003\001\000\000\000\000\000\000\008\000\164\001\ +\000\000\005\001\000\000\000\000\011\000\000\000\000\000\006\001\ +\007\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\008\001\137\000\000\000\015\000\016\000\ +\000\000\000\000\000\000\000\000\009\001\000\000\000\000\000\000\ +\000\000\000\000\010\001\011\001\012\001\013\001\014\001\015\001\ +\000\000\000\000\022\000\000\000\138\000\139\000\000\000\140\000\ +\141\000\000\000\000\000\028\000\000\000\016\001\000\000\000\000\ +\142\000\143\000\000\000\000\000\000\000\000\000\000\000\165\001\ +\166\001\000\000\167\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\020\001\021\001\168\001\169\001\024\001\170\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\026\001\000\000\ +\000\000\146\000\027\001\000\000\028\001\044\000\000\000\000\000\ +\000\000\000\000\045\000\000\000\073\004\048\000\147\000\001\001\ +\002\001\003\001\000\000\000\000\000\000\008\000\164\001\000\000\ +\005\001\000\000\000\000\011\000\000\000\000\000\006\001\007\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\008\001\137\000\000\000\015\000\016\000\000\000\ +\000\000\000\000\000\000\009\001\000\000\000\000\000\000\000\000\ +\000\000\010\001\011\001\012\001\013\001\014\001\015\001\000\000\ +\000\000\022\000\000\000\138\000\139\000\000\000\140\000\141\000\ +\000\000\000\000\028\000\000\000\016\001\000\000\000\000\142\000\ +\143\000\000\000\000\000\000\000\000\000\000\000\165\001\166\001\ +\000\000\167\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\020\001\021\001\168\001\169\001\024\001\170\001\000\000\ +\000\000\091\003\000\000\000\000\000\000\026\001\000\000\008\000\ +\146\000\027\001\000\000\028\001\044\000\011\000\000\000\000\000\ +\018\003\045\000\000\000\000\000\048\000\147\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\137\000\000\000\015\000\ +\016\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\022\000\000\000\138\000\139\000\000\000\ +\140\000\141\000\000\000\000\000\028\000\000\000\143\002\000\000\ +\000\000\142\000\143\000\000\000\008\000\000\000\000\000\000\000\ +\144\000\000\000\011\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\145\000\000\000\000\000\ +\000\000\000\000\137\000\000\000\015\000\016\000\000\000\092\003\ +\000\000\000\000\146\000\000\000\000\000\000\000\044\000\000\000\ +\000\000\000\000\000\000\045\000\000\000\000\000\048\000\147\000\ +\022\000\000\000\138\000\139\000\000\000\140\000\141\000\000\000\ +\000\000\028\000\000\000\145\002\000\000\000\000\142\000\143\000\ +\000\000\008\000\000\000\000\000\000\000\144\000\000\000\011\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\145\000\000\000\000\000\000\000\000\000\137\000\ +\000\000\015\000\016\000\000\000\000\000\000\000\000\000\146\000\ +\000\000\000\000\000\000\044\000\000\000\000\000\000\000\000\000\ +\045\000\000\000\000\000\048\000\147\000\022\000\000\000\138\000\ +\139\000\000\000\140\000\141\000\000\000\000\000\028\000\000\000\ +\080\004\000\000\000\000\142\000\143\000\000\000\008\000\000\000\ +\000\000\000\000\144\000\000\000\011\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\145\000\ +\000\000\000\000\000\000\000\000\137\000\000\000\015\000\016\000\ +\000\000\000\000\000\000\000\000\146\000\000\000\000\000\000\000\ +\044\000\000\000\000\000\000\000\000\000\045\000\000\000\000\000\ +\048\000\147\000\022\000\000\000\138\000\139\000\000\000\140\000\ +\141\000\000\000\000\000\028\000\000\000\082\004\000\000\000\000\ +\142\000\143\000\000\000\008\000\000\000\000\000\000\000\144\000\ +\000\000\011\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\145\000\000\000\000\000\000\000\ +\000\000\137\000\000\000\015\000\016\000\000\000\000\000\000\000\ +\000\000\146\000\000\000\000\000\000\000\044\000\000\000\000\000\ +\000\000\000\000\045\000\000\000\000\000\048\000\147\000\022\000\ +\000\000\138\000\139\000\000\000\140\000\141\000\000\000\000\000\ +\028\000\000\000\084\004\000\000\000\000\142\000\143\000\000\000\ +\008\000\000\000\000\000\000\000\144\000\000\000\011\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\145\000\000\000\000\000\000\000\000\000\137\000\000\000\ +\015\000\016\000\000\000\000\000\000\000\000\000\146\000\000\000\ +\000\000\000\000\044\000\000\000\000\000\000\000\000\000\045\000\ +\000\000\000\000\048\000\147\000\022\000\000\000\138\000\139\000\ +\000\000\140\000\141\000\000\000\000\000\028\000\000\000\000\000\ +\000\000\000\000\142\000\143\000\007\000\008\000\009\000\000\000\ +\000\000\144\000\010\000\011\000\012\000\243\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\145\000\000\000\ +\000\000\000\000\000\000\013\000\014\000\015\000\016\000\017\000\ +\018\000\019\000\000\000\146\000\000\000\000\000\020\000\044\000\ +\021\000\000\000\000\000\000\000\045\000\000\000\000\000\048\000\ +\147\000\022\000\000\000\023\000\024\000\025\000\026\000\027\000\ +\000\000\000\000\028\000\029\000\000\000\000\000\030\000\031\000\ +\032\000\000\000\000\000\033\000\034\000\000\000\035\000\036\000\ +\000\000\037\000\000\000\000\000\000\000\038\000\000\000\039\000\ +\000\000\000\000\000\000\040\000\041\000\000\000\042\000\000\000\ +\244\001\000\000\000\000\007\000\008\000\009\000\000\000\043\000\ +\000\000\010\000\011\000\012\000\044\000\000\000\000\000\000\000\ +\000\000\045\000\046\000\047\000\048\000\049\000\050\000\000\000\ +\000\000\051\000\013\000\014\000\015\000\016\000\017\000\018\000\ +\019\000\000\000\000\000\000\000\000\000\020\000\000\000\021\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\022\000\000\000\023\000\024\000\025\000\026\000\027\000\000\000\ +\000\000\028\000\029\000\000\000\000\000\030\000\031\000\032\000\ +\000\000\000\000\033\000\034\000\000\000\035\000\036\000\000\000\ +\037\000\000\000\000\000\000\000\038\000\000\000\039\000\000\000\ +\000\000\000\000\040\000\041\000\000\000\042\000\000\000\000\000\ +\000\000\007\000\008\000\009\000\000\000\000\000\043\000\010\000\ +\011\000\000\000\000\000\044\000\000\000\000\000\000\000\000\000\ +\045\000\046\000\047\000\048\000\049\000\050\000\000\000\000\000\ +\051\000\000\000\015\000\016\000\017\000\018\000\019\000\000\000\ +\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\022\000\000\000\ +\023\000\024\000\025\000\026\000\027\000\000\000\000\000\028\000\ +\000\000\000\000\000\000\030\000\031\000\032\000\000\000\000\000\ +\000\000\034\000\000\000\035\000\036\000\000\000\000\000\000\000\ +\000\000\000\000\038\000\000\000\000\000\000\000\000\000\000\000\ +\040\000\041\000\000\000\042\000\000\000\000\000\000\000\000\000\ +\194\000\007\000\008\000\009\000\000\000\000\000\197\000\010\000\ +\011\000\044\000\000\000\000\000\000\000\000\000\045\000\046\000\ +\000\000\048\000\049\000\000\000\000\000\000\000\051\000\000\000\ +\000\000\000\000\015\000\016\000\017\000\018\000\019\000\000\000\ +\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\022\000\000\000\ +\023\000\024\000\025\000\026\000\027\000\000\000\000\000\028\000\ +\000\000\000\000\000\000\030\000\031\000\032\000\000\000\000\000\ +\000\000\034\000\000\000\035\000\036\000\000\000\000\000\000\000\ +\000\000\000\000\038\000\000\000\000\000\000\000\000\000\000\000\ +\040\000\041\000\000\000\042\000\000\000\000\000\007\000\008\000\ +\009\000\000\000\000\000\000\000\010\000\011\000\000\000\000\000\ +\000\000\044\000\000\000\000\000\000\000\000\000\045\000\046\000\ +\000\000\048\000\049\000\195\001\000\000\000\000\051\000\015\000\ +\016\000\017\000\018\000\019\000\000\000\000\000\000\000\000\000\ +\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\022\000\000\000\023\000\024\000\025\000\ +\026\000\027\000\000\000\000\000\028\000\000\000\000\000\000\000\ +\030\000\031\000\032\000\000\000\000\000\000\000\034\000\000\000\ +\035\000\036\000\000\000\000\000\000\000\000\000\000\000\038\000\ +\000\000\000\000\000\000\000\000\000\000\040\000\041\000\000\000\ +\042\000\000\000\000\000\007\000\008\000\009\000\000\000\000\000\ +\000\000\010\000\011\000\000\000\000\000\000\000\044\000\000\000\ +\000\000\000\000\000\000\045\000\046\000\000\000\048\000\049\000\ +\000\000\000\000\000\000\051\000\015\000\016\000\017\000\018\000\ +\019\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\022\000\000\000\023\000\024\000\025\000\026\000\027\000\000\000\ +\000\000\028\000\000\000\000\000\000\000\030\000\031\000\032\000\ +\000\000\000\000\000\000\034\000\000\000\035\000\036\000\000\000\ +\000\000\000\000\000\000\000\000\038\000\000\000\000\000\000\000\ +\000\000\054\002\040\000\041\000\000\000\042\000\000\000\000\000\ +\007\000\008\000\009\000\000\000\000\000\000\000\010\000\011\000\ +\000\000\000\000\000\000\044\000\000\000\000\000\000\000\000\000\ +\045\000\046\000\000\000\048\000\049\000\000\000\000\000\000\000\ +\051\000\015\000\016\000\017\000\018\000\019\000\000\000\000\000\ +\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\022\000\000\000\023\000\ +\024\000\025\000\026\000\027\000\000\000\000\000\028\000\000\000\ +\000\000\000\000\030\000\031\000\032\000\000\000\000\000\000\000\ +\034\000\000\000\035\000\036\000\000\000\000\000\000\000\000\000\ +\000\000\038\000\000\000\000\000\000\000\000\000\000\000\040\000\ +\041\000\000\000\042\000\000\000\000\000\000\000\000\000\014\003\ +\007\000\008\000\009\000\000\000\000\000\016\003\010\000\011\000\ +\044\000\000\000\000\000\000\000\000\000\045\000\046\000\000\000\ +\048\000\049\000\000\000\000\000\000\000\051\000\000\000\000\000\ +\000\000\015\000\016\000\017\000\018\000\019\000\000\000\000\000\ +\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\022\000\000\000\023\000\ +\024\000\025\000\026\000\027\000\000\000\000\000\028\000\000\000\ +\000\000\000\000\030\000\031\000\032\000\000\000\000\000\000\000\ +\034\000\000\000\035\000\036\000\000\000\000\000\000\000\000\000\ +\000\000\038\000\000\000\000\000\000\000\000\000\000\000\040\000\ +\041\000\000\000\042\000\000\000\000\000\000\000\007\000\008\000\ +\009\000\000\000\000\000\000\000\010\000\011\000\000\000\000\000\ +\044\000\000\000\000\000\000\000\000\000\045\000\046\000\053\004\ +\048\000\049\000\000\000\000\000\000\000\051\000\000\000\015\000\ +\016\000\017\000\018\000\019\000\000\000\000\000\000\000\000\000\ +\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\022\000\000\000\023\000\024\000\025\000\ +\026\000\027\000\000\000\000\000\028\000\000\000\000\000\000\000\ +\030\000\031\000\032\000\000\000\000\000\000\000\034\000\000\000\ +\035\000\036\000\000\000\000\000\000\000\000\000\000\000\038\000\ +\000\000\000\000\000\000\000\000\000\000\040\000\041\000\000\000\ +\042\000\000\000\000\000\216\002\216\002\216\002\000\000\000\000\ +\000\000\216\002\216\002\000\000\000\000\000\000\044\000\000\000\ +\000\000\000\000\000\000\045\000\046\000\000\000\048\000\049\000\ +\216\002\000\000\000\000\051\000\216\002\216\002\216\002\216\002\ +\216\002\000\000\000\000\000\000\000\000\216\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\216\002\000\000\216\002\216\002\216\002\216\002\216\002\000\000\ +\000\000\216\002\000\000\000\000\000\000\216\002\216\002\216\002\ +\000\000\000\000\000\000\216\002\000\000\216\002\216\002\000\000\ +\000\000\000\000\000\000\000\000\216\002\000\000\000\000\000\000\ +\000\000\000\000\216\002\216\002\000\000\216\002\000\000\000\000\ +\007\000\008\000\009\000\000\000\000\000\000\000\010\000\011\000\ +\000\000\000\000\000\000\216\002\000\000\000\000\000\000\000\000\ +\216\002\216\002\000\000\216\002\216\002\000\000\000\000\000\000\ +\216\002\015\000\016\000\017\000\018\000\019\000\000\000\000\000\ +\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\022\000\000\000\023\000\ +\024\000\025\000\026\000\027\000\000\000\000\000\028\000\000\000\ +\000\000\000\000\030\000\031\000\032\000\000\000\000\000\000\000\ +\034\000\000\000\035\000\036\000\000\000\000\000\000\000\000\000\ +\000\000\038\000\000\000\000\000\000\000\000\000\000\000\040\000\ +\041\000\000\000\042\000\000\000\000\000\216\002\216\002\216\002\ +\000\000\000\000\000\000\216\002\216\002\000\000\000\000\000\000\ +\044\000\000\000\000\000\000\000\000\000\045\000\046\000\000\000\ +\048\000\049\000\000\000\000\000\000\000\051\000\216\002\216\002\ +\216\002\216\002\216\002\000\000\000\000\000\000\000\000\216\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\216\002\000\000\216\002\216\002\216\002\216\002\ +\216\002\000\000\000\000\216\002\000\000\000\000\000\000\216\002\ +\216\002\216\002\000\000\000\000\000\000\216\002\000\000\216\002\ +\216\002\000\000\000\000\000\000\000\000\000\000\216\002\000\000\ +\000\000\000\000\000\000\000\000\216\002\216\002\000\000\216\002\ +\000\000\000\000\214\002\214\002\214\002\000\000\000\000\000\000\ +\214\002\214\002\000\000\000\000\000\000\216\002\000\000\000\000\ +\000\000\000\000\216\002\216\002\000\000\216\002\216\002\000\000\ +\000\000\000\000\216\002\214\002\214\002\214\002\214\002\214\002\ +\000\000\000\000\000\000\000\000\214\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\214\002\ +\000\000\214\002\214\002\214\002\214\002\214\002\000\000\000\000\ +\214\002\000\000\000\000\000\000\214\002\214\002\214\002\000\000\ +\000\000\008\000\214\002\000\000\214\002\214\002\000\000\011\000\ +\000\000\147\003\000\000\214\002\229\001\000\000\000\000\000\000\ +\000\000\214\002\214\002\000\000\214\002\000\000\148\003\000\000\ +\000\000\015\000\016\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\214\002\000\000\000\000\000\000\000\000\214\002\ +\214\002\000\000\214\002\214\002\000\000\022\000\207\001\214\002\ +\139\000\000\000\140\000\141\000\000\000\000\000\028\000\000\000\ +\000\000\000\000\000\000\142\000\149\003\000\000\008\000\000\000\ +\000\000\000\000\144\000\000\000\011\000\000\000\228\001\000\000\ +\000\000\229\001\000\000\000\000\209\001\000\000\000\000\145\000\ +\000\000\000\000\000\000\148\003\210\001\000\000\015\000\016\000\ +\000\000\008\000\000\000\000\000\146\000\000\000\000\000\011\000\ +\044\000\189\002\000\000\211\001\000\000\045\000\000\000\000\000\ +\048\000\147\000\022\000\207\001\000\000\139\000\000\000\140\000\ +\141\000\015\000\016\000\028\000\000\000\000\000\000\000\000\000\ +\142\000\149\003\000\000\000\000\000\000\000\000\000\000\144\000\ +\000\000\000\000\000\000\000\000\000\000\022\000\207\001\000\000\ +\139\000\209\001\140\000\141\000\145\000\000\000\028\000\000\000\ +\000\000\210\001\000\000\142\000\190\002\000\000\000\000\000\000\ +\000\000\146\000\144\000\000\000\191\002\044\000\000\000\000\000\ +\211\001\000\000\045\000\000\000\209\001\048\000\147\000\145\000\ +\000\000\000\000\008\000\000\000\210\001\000\000\000\000\000\000\ +\011\000\000\000\124\005\000\000\146\000\000\000\000\000\000\000\ +\044\000\000\000\000\000\211\001\000\000\045\000\000\000\148\003\ +\048\000\147\000\015\000\016\000\000\000\008\000\000\000\000\000\ +\000\000\000\000\000\000\011\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\022\000\207\001\ +\000\000\139\000\000\000\140\000\141\000\015\000\016\000\028\000\ +\000\000\000\000\000\000\000\000\142\000\149\003\000\000\000\000\ +\000\000\000\000\000\000\144\000\000\000\000\000\000\000\000\000\ +\000\000\022\000\207\001\000\000\139\000\209\001\140\000\141\000\ +\145\000\000\000\028\000\000\000\000\000\210\001\000\000\142\000\ +\208\001\000\000\216\002\000\000\000\000\146\000\144\000\000\000\ +\216\002\044\000\000\000\000\000\211\001\000\000\045\000\000\000\ +\209\001\048\000\147\000\145\000\000\000\000\000\000\000\000\000\ +\210\001\000\000\216\002\216\002\000\000\000\000\000\000\000\000\ +\146\000\000\000\000\000\000\000\044\000\000\000\000\000\211\001\ +\000\000\045\000\000\000\000\000\048\000\147\000\216\002\216\002\ +\000\000\216\002\000\000\216\002\216\002\000\000\000\000\216\002\ +\000\000\000\000\000\000\000\000\216\002\216\002\000\000\000\000\ +\008\000\000\000\000\000\216\002\000\000\000\000\011\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\216\002\000\000\000\000\ +\216\002\000\000\000\000\000\000\000\000\216\002\137\000\000\000\ +\015\000\016\000\000\000\000\000\000\000\216\002\000\000\000\000\ +\000\000\216\002\000\000\000\000\216\002\000\000\216\002\000\000\ +\000\000\216\002\216\002\000\000\022\000\000\000\138\000\139\000\ +\000\000\140\000\141\000\000\000\000\000\028\000\000\000\000\000\ +\000\000\000\000\142\000\143\000\000\000\000\000\000\000\008\000\ +\000\000\144\000\000\000\162\001\000\000\011\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\145\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\137\000\194\000\015\000\ +\016\000\000\000\000\000\146\000\000\000\000\000\000\000\044\000\ +\000\000\000\000\000\000\000\000\045\000\000\000\000\000\048\000\ +\147\000\000\000\000\000\022\000\000\000\138\000\139\000\000\000\ +\140\000\141\000\000\000\000\000\028\000\000\000\000\000\000\000\ +\000\000\142\000\143\000\000\000\008\000\000\000\000\000\000\000\ +\144\000\000\000\011\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\008\000\009\000\000\000\000\000\145\000\010\000\011\000\ +\000\000\000\000\137\000\000\000\015\000\016\000\000\000\000\000\ +\000\000\000\000\146\000\000\000\000\000\000\000\044\000\000\000\ +\000\000\015\000\016\000\045\000\000\000\000\000\048\000\147\000\ +\022\000\000\000\138\000\139\000\000\000\140\000\141\000\000\000\ +\000\000\028\000\000\000\000\000\000\000\022\000\142\000\143\000\ +\024\000\025\000\026\000\027\000\000\000\144\000\028\000\000\000\ +\216\002\000\000\216\002\182\000\032\000\000\000\216\002\000\000\ +\000\000\000\000\145\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\090\003\000\000\000\000\000\000\216\002\146\000\ +\216\002\216\002\042\000\044\000\000\000\000\000\000\000\000\000\ +\045\000\000\000\000\000\048\000\147\000\000\000\000\000\000\000\ +\044\000\000\000\000\000\000\000\216\002\045\000\216\002\216\002\ +\048\000\216\002\216\002\000\000\000\000\216\002\000\000\000\000\ +\000\000\000\000\216\002\216\002\000\000\008\000\000\000\000\000\ +\000\000\216\002\000\000\011\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\216\002\000\000\ +\000\000\000\000\000\000\137\000\000\000\015\000\016\000\000\000\ +\000\000\000\000\000\000\216\002\000\000\000\000\000\000\216\002\ +\000\000\000\000\000\000\000\000\216\002\000\000\000\000\216\002\ +\216\002\022\000\000\000\138\000\139\000\000\000\140\000\141\000\ +\000\000\000\000\028\000\000\000\000\000\000\000\000\000\142\000\ +\143\000\000\000\216\002\000\000\000\000\000\000\144\000\000\000\ +\216\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\145\000\000\000\000\000\000\000\000\000\ +\216\002\000\000\216\002\216\002\000\000\216\002\000\000\000\000\ +\146\000\000\000\000\000\216\002\044\000\000\000\000\000\000\000\ +\000\000\045\000\000\000\000\000\048\000\147\000\216\002\000\000\ +\216\002\216\002\000\000\216\002\216\002\216\002\216\002\216\002\ +\000\000\000\000\000\000\000\000\216\002\216\002\000\000\000\000\ +\000\000\000\000\000\000\216\002\000\000\000\000\000\000\000\000\ +\000\000\216\002\000\000\216\002\216\002\000\000\216\002\216\002\ +\216\002\000\000\216\002\000\000\000\000\000\000\000\000\216\002\ +\216\002\000\000\148\002\000\000\000\000\216\002\216\002\000\000\ +\148\002\216\002\000\000\000\000\000\000\000\000\216\002\000\000\ +\000\000\216\002\216\002\216\002\000\000\000\000\000\000\000\000\ +\148\002\000\000\148\002\148\002\216\002\129\002\000\000\000\000\ +\216\002\000\000\000\000\129\002\216\002\000\000\000\000\000\000\ +\000\000\216\002\000\000\000\000\216\002\216\002\148\002\000\000\ +\148\002\148\002\000\000\148\002\148\002\129\002\129\002\148\002\ +\000\000\000\000\000\000\000\000\148\002\148\002\000\000\000\000\ +\000\000\000\000\000\000\148\002\000\000\000\000\000\000\000\000\ +\000\000\129\002\000\000\129\002\129\002\000\000\129\002\129\002\ +\148\002\000\000\129\002\000\000\000\000\000\000\000\000\129\002\ +\129\002\000\000\214\002\000\000\000\000\148\002\129\002\000\000\ +\214\002\148\002\000\000\000\000\000\000\000\000\148\002\000\000\ +\000\000\148\002\148\002\129\002\000\000\000\000\000\000\000\000\ +\000\000\000\000\214\002\214\002\000\000\008\000\000\000\000\000\ +\129\002\000\000\000\000\011\000\129\002\000\000\000\000\000\000\ +\000\000\129\002\000\000\000\000\129\002\129\002\214\002\000\000\ +\214\002\214\002\000\000\214\002\214\002\015\000\016\000\214\002\ +\000\000\000\000\000\000\000\000\214\002\214\002\000\000\000\000\ +\000\000\000\000\000\000\214\002\000\000\000\000\000\000\000\000\ +\000\000\022\000\000\000\000\000\139\000\000\000\140\000\141\000\ +\214\002\000\000\028\000\000\000\000\000\000\000\000\000\142\000\ +\143\000\000\000\216\002\000\000\000\000\214\002\144\000\000\000\ +\216\002\214\002\000\000\000\000\000\000\000\000\214\002\000\000\ +\000\000\214\002\214\002\145\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\216\002\216\002\000\000\000\000\000\000\000\000\ +\146\000\000\000\000\000\000\000\044\000\000\000\000\000\000\000\ +\000\000\045\000\000\000\000\000\048\000\147\000\216\002\000\000\ +\000\000\216\002\000\000\216\002\216\002\000\000\000\000\216\002\ +\000\000\000\000\000\000\000\000\216\002\216\002\000\000\008\000\ +\009\000\000\000\000\000\216\002\010\000\011\000\008\000\009\000\ +\000\000\000\000\000\000\010\000\011\000\000\000\000\000\087\001\ +\216\002\000\000\000\000\000\000\000\000\000\000\000\000\015\000\ +\016\000\000\000\000\000\000\000\000\000\216\002\015\000\016\000\ +\000\000\216\002\000\000\000\000\000\000\000\000\216\002\000\000\ +\088\001\216\002\216\002\022\000\089\001\000\000\024\000\025\000\ +\026\000\027\000\022\000\089\001\028\000\024\000\025\000\026\000\ +\027\000\142\000\032\000\028\000\000\000\000\000\000\000\000\000\ +\142\000\032\000\000\000\000\000\000\000\000\000\000\000\216\002\ +\216\002\000\000\090\001\000\000\216\002\216\002\000\000\000\000\ +\042\000\090\001\091\001\000\000\000\000\000\000\000\000\042\000\ +\000\000\091\001\092\001\093\001\000\000\000\000\044\000\216\002\ +\216\002\094\001\000\000\045\000\000\000\044\000\048\000\000\000\ +\094\001\000\000\045\000\000\000\000\000\048\000\000\000\000\000\ +\000\000\000\000\000\000\216\002\000\000\000\000\216\002\216\002\ +\216\002\216\002\000\000\000\000\216\002\000\000\000\000\000\000\ +\000\000\216\002\216\002\000\000\000\000\180\004\049\001\050\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\051\001\000\000\ +\000\000\000\000\000\000\181\004\052\001\053\001\182\004\054\001\ +\216\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\055\001\000\000\000\000\000\000\000\000\000\000\216\002\000\000\ +\000\000\056\001\000\000\216\002\000\000\000\000\216\002\057\001\ +\058\001\059\001\060\001\061\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\062\001\000\000\167\002\000\000\000\000\162\000\ +\000\000\000\000\000\000\000\000\063\001\064\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\065\001\ +\066\001\067\001\068\001\069\001\000\000\001\001\002\001\003\001\ +\000\000\000\000\000\000\183\004\164\001\000\000\005\001\000\000\ +\000\000\071\001\000\000\000\000\112\000\007\001\113\000\114\000\ +\028\000\000\000\115\000\000\000\000\000\116\000\117\000\000\000\ +\008\001\000\000\000\000\000\000\000\000\000\000\000\000\134\001\ +\000\000\009\001\000\000\000\000\000\000\000\000\118\000\010\001\ +\011\001\012\001\013\001\014\001\015\001\000\000\119\000\120\000\ +\000\000\000\000\000\000\168\002\000\000\000\000\121\000\000\000\ +\000\000\000\000\016\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\122\000\123\000\173\002\166\001\000\000\174\002\ +\000\000\000\000\000\000\000\000\224\003\049\001\050\001\020\001\ +\021\001\175\002\169\001\024\001\170\001\051\001\000\000\000\000\ +\000\000\000\000\000\000\052\001\053\001\000\000\054\001\027\001\ +\000\000\028\001\000\000\000\000\000\000\000\000\000\000\055\001\ +\000\000\000\000\000\000\000\000\226\003\049\001\050\001\000\000\ +\056\001\000\000\000\000\000\000\000\000\051\001\057\001\058\001\ +\059\001\060\001\061\001\052\001\053\001\000\000\054\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\055\001\ +\000\000\062\001\000\000\000\000\000\000\000\000\162\000\000\000\ +\056\001\000\000\000\000\063\001\064\001\000\000\057\001\058\001\ +\059\001\060\001\061\001\000\000\000\000\000\000\065\001\066\001\ +\067\001\068\001\069\001\000\000\000\000\000\000\000\000\225\003\ +\000\000\062\001\000\000\000\000\000\000\000\000\162\000\000\000\ +\071\001\000\000\000\000\063\001\064\001\000\000\000\000\000\000\ +\000\000\000\000\228\003\049\001\050\001\000\000\065\001\066\001\ +\067\001\068\001\069\001\051\001\000\000\000\000\000\000\000\000\ +\227\003\052\001\053\001\000\000\054\001\000\000\000\000\000\000\ +\071\001\000\000\000\000\000\000\000\000\055\001\000\000\000\000\ +\000\000\000\000\224\003\049\001\050\001\000\000\056\001\000\000\ +\000\000\000\000\000\000\051\001\057\001\058\001\059\001\060\001\ +\061\001\052\001\053\001\000\000\054\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\055\001\000\000\062\001\ +\000\000\000\000\000\000\000\000\162\000\000\000\056\001\000\000\ +\000\000\063\001\064\001\000\000\057\001\058\001\059\001\060\001\ +\061\001\000\000\000\000\000\000\065\001\066\001\067\001\068\001\ +\069\001\000\000\000\000\000\000\000\000\000\000\000\000\062\001\ +\229\003\000\000\000\000\000\000\162\000\000\000\071\001\000\000\ +\000\000\063\001\064\001\000\000\000\000\000\000\000\000\000\000\ +\226\003\049\001\050\001\000\000\065\001\066\001\067\001\068\001\ +\069\001\051\001\000\000\000\000\000\000\023\004\000\000\052\001\ +\053\001\000\000\054\001\000\000\000\000\000\000\071\001\000\000\ +\000\000\000\000\000\000\055\001\000\000\000\000\000\000\000\000\ +\228\003\049\001\050\001\000\000\056\001\000\000\000\000\000\000\ +\000\000\051\001\057\001\058\001\059\001\060\001\061\001\052\001\ +\053\001\000\000\054\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\055\001\000\000\062\001\000\000\000\000\ +\000\000\000\000\162\000\000\000\056\001\000\000\000\000\063\001\ +\064\001\000\000\057\001\058\001\059\001\060\001\061\001\000\000\ +\000\000\000\000\065\001\066\001\067\001\068\001\069\001\000\000\ +\000\000\000\000\000\000\000\000\024\004\062\001\000\000\000\000\ +\000\000\000\000\162\000\000\000\071\001\000\000\000\000\063\001\ +\064\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\065\001\066\001\067\001\068\001\069\001\226\004\ +\049\001\050\001\000\000\000\000\000\000\000\000\025\004\000\000\ +\051\001\000\000\000\000\000\000\071\001\000\000\052\001\053\001\ +\000\000\054\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\055\001\000\000\000\000\000\000\000\000\228\004\ +\049\001\050\001\000\000\056\001\000\000\000\000\000\000\000\000\ +\051\001\057\001\058\001\059\001\060\001\061\001\052\001\053\001\ +\000\000\054\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\055\001\000\000\062\001\000\000\000\000\000\000\ +\000\000\162\000\000\000\056\001\000\000\000\000\063\001\064\001\ +\000\000\057\001\058\001\059\001\060\001\061\001\000\000\000\000\ +\000\000\065\001\066\001\067\001\068\001\069\001\000\000\000\000\ +\000\000\000\000\227\004\000\000\062\001\000\000\000\000\000\000\ +\000\000\162\000\000\000\071\001\000\000\000\000\063\001\064\001\ +\000\000\000\000\000\000\000\000\000\000\230\004\049\001\050\001\ +\000\000\065\001\066\001\067\001\068\001\069\001\051\001\000\000\ +\000\000\000\000\000\000\229\004\052\001\053\001\000\000\054\001\ +\000\000\000\000\000\000\071\001\000\000\000\000\000\000\000\000\ +\055\001\000\000\000\000\000\000\000\000\226\004\049\001\050\001\ +\000\000\056\001\000\000\000\000\000\000\000\000\051\001\057\001\ +\058\001\059\001\060\001\061\001\052\001\053\001\000\000\054\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\055\001\000\000\062\001\000\000\000\000\000\000\000\000\162\000\ +\000\000\056\001\000\000\000\000\063\001\064\001\000\000\057\001\ +\058\001\059\001\060\001\061\001\000\000\000\000\000\000\065\001\ +\066\001\067\001\068\001\069\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\062\001\231\004\000\000\000\000\000\000\162\000\ +\000\000\071\001\000\000\000\000\063\001\064\001\000\000\000\000\ +\000\000\000\000\000\000\228\004\049\001\050\001\000\000\065\001\ +\066\001\067\001\068\001\069\001\051\001\000\000\000\000\000\000\ +\249\004\000\000\052\001\053\001\000\000\054\001\000\000\000\000\ +\000\000\071\001\000\000\000\000\000\000\000\000\055\001\000\000\ +\000\000\000\000\000\000\230\004\049\001\050\001\000\000\056\001\ +\000\000\000\000\000\000\000\000\051\001\057\001\058\001\059\001\ +\060\001\061\001\052\001\053\001\000\000\054\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\055\001\000\000\ +\062\001\000\000\000\000\000\000\000\000\162\000\000\000\056\001\ +\000\000\000\000\063\001\064\001\000\000\057\001\058\001\059\001\ +\060\001\061\001\000\000\000\000\000\000\065\001\066\001\067\001\ +\068\001\069\001\000\000\000\000\000\000\000\000\000\000\250\004\ +\062\001\049\001\050\001\000\000\000\000\162\000\000\000\071\001\ +\000\000\051\001\063\001\064\001\000\000\000\000\000\000\052\001\ +\053\001\000\000\054\001\000\000\000\000\065\001\066\001\067\001\ +\068\001\069\001\000\000\055\001\000\000\000\000\000\000\000\000\ +\000\000\251\004\000\000\000\000\056\001\000\000\000\000\071\001\ +\000\000\000\000\057\001\058\001\059\001\060\001\061\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\062\001\000\000\000\000\ +\000\000\000\000\162\000\000\000\000\000\000\000\000\000\063\001\ +\064\001\049\001\050\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\051\001\065\001\066\001\067\001\068\001\069\001\052\001\ +\053\001\000\000\054\001\000\000\000\000\000\000\000\000\070\001\ +\000\000\057\004\000\000\055\001\071\001\000\000\000\000\000\000\ +\000\000\049\001\050\001\000\000\056\001\000\000\000\000\000\000\ +\000\000\051\001\057\001\058\001\059\001\060\001\061\001\052\001\ +\053\001\000\000\054\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\055\001\000\000\062\001\000\000\000\000\ +\000\000\000\000\162\000\000\000\056\001\000\000\000\000\063\001\ +\064\001\000\000\057\001\058\001\059\001\060\001\061\001\000\000\ +\000\000\000\000\065\001\066\001\067\001\068\001\069\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\062\001\049\001\050\001\ +\000\000\000\000\162\000\000\000\071\001\000\000\051\001\063\001\ +\064\001\000\000\000\000\000\000\052\001\000\000\000\000\000\000\ +\000\000\000\000\065\001\066\001\067\001\068\001\069\001\000\000\ +\055\001\000\000\000\000\000\000\000\000\000\000\049\001\050\001\ +\000\000\056\001\000\000\000\000\071\001\000\000\000\000\057\001\ +\058\001\059\001\060\001\061\001\052\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\055\001\000\000\062\001\000\000\000\000\000\000\000\000\162\000\ +\000\000\056\001\000\000\000\000\063\001\064\001\000\000\057\001\ +\058\001\059\001\060\001\061\001\012\000\000\000\000\000\065\001\ +\066\001\067\001\068\001\069\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\062\001\089\000\014\000\000\000\000\000\162\000\ +\000\000\071\001\000\000\000\000\063\001\064\001\000\000\000\000\ +\090\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\066\001\067\001\068\001\069\001\000\000\000\000\112\000\000\000\ +\113\000\114\000\028\000\029\000\115\000\000\000\000\000\116\000\ +\117\000\071\001\000\000\033\000\000\000\000\000\000\000\000\000\ +\000\000\091\000\000\000\000\000\000\000\000\000\000\000\039\000\ +\118\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\119\000\120\000\000\000\000\000\000\000\000\000\000\000\092\000\ +\121\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\093\000\122\000\123\000\050\000" + +let yycheck = "\003\000\ +\002\000\005\000\177\000\177\000\002\000\174\000\112\000\112\000\ +\255\000\185\000\115\000\008\000\112\000\139\000\105\001\061\002\ +\118\000\002\000\092\000\180\000\001\000\133\000\002\000\104\002\ +\233\002\221\001\104\002\114\000\002\000\002\003\104\000\002\000\ +\027\000\034\004\146\000\059\002\002\000\000\000\192\002\003\000\ +\150\001\002\000\002\000\196\000\116\003\198\000\001\000\245\003\ +\101\003\059\000\252\000\164\003\124\003\223\004\210\004\119\004\ +\009\000\008\001\203\004\000\000\198\002\000\000\043\000\121\001\ +\066\001\123\001\000\000\110\004\000\001\093\002\029\000\024\000\ +\019\001\229\002\033\000\027\001\214\004\090\001\022\001\006\001\ +\025\000\022\001\066\001\000\001\000\001\031\001\000\001\108\001\ +\043\000\042\000\092\000\178\000\094\001\000\001\092\000\079\001\ +\000\001\110\001\141\004\040\001\008\001\000\001\104\000\017\001\ +\059\000\037\001\104\000\092\000\112\000\029\001\027\001\115\000\ +\092\000\117\000\118\000\023\001\000\001\000\001\092\000\104\000\ +\067\001\092\000\030\001\120\000\104\000\078\000\092\000\080\000\ +\081\000\000\001\104\000\092\000\092\000\104\000\000\001\000\001\ +\140\000\141\000\104\000\143\000\029\005\092\001\094\001\104\000\ +\104\000\053\001\090\001\055\001\014\001\153\000\154\000\017\001\ +\034\005\117\000\036\003\012\004\022\001\065\001\201\004\066\001\ +\014\001\027\001\066\001\095\001\000\001\092\001\073\001\000\001\ +\000\001\073\001\115\000\004\000\176\000\177\000\091\001\091\001\ +\180\000\094\001\095\001\095\001\094\001\047\001\036\001\000\001\ +\000\001\000\001\019\001\094\001\088\005\000\001\094\001\104\001\ +\091\001\026\001\027\001\027\001\095\001\014\001\106\001\032\000\ +\017\001\109\001\000\001\162\000\163\000\024\001\008\001\091\001\ +\115\001\092\001\064\001\115\001\165\000\000\001\000\001\048\001\ +\049\001\087\001\008\001\000\001\000\001\037\001\000\001\094\001\ +\010\001\091\001\091\001\060\001\181\000\095\001\095\001\097\001\ +\098\001\055\003\067\001\068\001\154\001\070\001\003\001\026\005\ +\027\001\095\001\188\001\097\001\000\001\018\001\027\001\082\002\ +\035\005\115\001\132\003\133\005\093\001\000\000\070\004\109\001\ +\000\001\073\004\094\001\091\001\200\002\088\001\094\001\095\001\ +\161\001\114\001\163\001\000\001\109\001\000\001\000\001\185\005\ +\121\001\187\005\123\001\040\001\091\001\094\001\111\001\095\001\ +\095\001\130\001\131\001\094\001\000\001\112\001\105\001\133\001\ +\066\001\207\001\092\001\036\005\046\001\091\001\212\001\250\000\ +\145\001\252\000\106\005\101\002\000\001\241\002\092\001\149\001\ +\091\001\035\001\092\001\004\001\095\001\095\001\091\001\091\001\ +\201\005\094\001\095\001\000\001\094\001\000\001\000\001\092\001\ +\008\001\000\001\147\003\193\001\181\004\182\004\004\001\027\001\ +\025\001\059\001\008\001\000\001\031\001\023\002\064\001\065\001\ +\249\001\015\001\094\001\172\000\018\001\026\001\045\001\026\001\ +\074\001\000\001\179\000\046\001\230\003\000\001\094\001\000\001\ +\000\001\000\001\003\002\010\001\000\001\105\001\155\004\066\001\ +\000\001\094\001\010\001\094\001\221\004\000\001\114\001\004\001\ +\007\001\099\001\019\001\008\001\018\001\121\001\060\005\123\001\ +\092\001\026\001\015\001\109\001\000\001\018\001\130\001\131\001\ +\035\001\133\001\000\001\132\005\066\001\106\004\010\001\091\001\ +\089\001\090\001\000\001\095\001\093\001\145\001\000\001\096\001\ +\049\001\149\001\044\004\092\001\092\001\153\001\154\001\092\001\ +\059\001\092\001\092\001\060\001\247\002\064\001\065\001\094\001\ +\065\001\000\001\014\001\068\001\000\001\070\001\091\001\074\001\ +\067\001\243\001\027\001\010\001\015\001\066\001\178\001\179\001\ +\180\001\017\001\022\001\001\004\073\001\092\001\186\001\015\001\ +\095\001\130\001\131\001\015\001\092\001\092\001\092\001\095\001\ +\099\001\000\001\094\001\000\001\000\001\018\001\094\001\032\001\ +\018\001\092\001\109\001\207\001\208\001\040\003\111\001\040\001\ +\212\001\066\001\043\001\046\003\216\001\237\003\068\003\219\001\ +\092\001\074\005\017\001\095\001\113\001\000\001\092\001\027\001\ +\228\001\229\001\066\001\018\001\000\001\243\004\066\001\171\003\ +\094\001\188\001\018\001\004\001\094\001\070\001\066\001\243\001\ +\244\001\243\001\094\001\003\001\077\001\243\001\004\001\083\001\ +\065\001\253\001\008\001\094\001\101\002\092\001\000\000\003\002\ +\112\002\015\001\243\001\008\001\018\001\131\002\094\001\243\001\ +\228\001\229\001\014\002\113\002\114\002\243\001\011\002\092\001\ +\243\001\094\001\192\002\003\001\094\001\243\001\176\004\243\001\ +\000\001\030\001\243\001\243\001\091\001\043\005\091\001\091\001\ +\247\001\028\002\029\002\095\001\109\001\238\001\014\001\094\001\ +\014\001\017\001\022\001\000\001\135\003\092\001\022\001\094\001\ +\073\001\063\005\055\001\027\001\066\001\057\002\094\001\008\001\ +\091\001\233\002\233\002\233\002\065\001\217\004\019\001\091\001\ +\169\002\143\004\171\002\022\001\023\002\026\001\008\001\047\001\ +\000\001\026\002\014\001\000\001\167\003\030\001\082\002\004\001\ +\152\005\000\001\027\002\008\001\189\002\065\001\014\001\248\002\ +\002\001\014\001\015\001\048\001\030\001\018\001\027\001\054\002\ +\022\001\101\002\008\001\113\001\104\002\106\001\055\001\060\001\ +\109\001\079\001\110\002\111\002\065\001\113\002\114\002\068\001\ +\065\001\070\001\094\001\091\001\035\001\055\001\015\003\095\001\ +\017\003\097\001\098\001\127\002\121\002\065\001\036\001\065\001\ +\132\002\027\001\083\005\031\005\008\001\137\002\022\001\090\001\ +\022\001\065\001\066\001\115\001\059\001\066\001\042\005\147\002\ +\148\002\064\001\065\001\111\002\066\001\005\003\062\001\242\003\ +\066\001\106\001\111\001\074\001\109\001\244\002\249\003\047\001\ +\036\001\094\001\062\005\127\002\090\001\169\002\094\001\171\002\ +\106\001\094\001\001\003\109\001\176\002\137\002\027\001\035\001\ +\094\001\181\002\118\004\065\001\099\001\006\005\092\001\080\003\ +\006\005\189\002\190\002\019\001\192\002\035\001\109\001\182\003\ +\183\003\093\005\050\003\022\001\094\001\030\002\202\002\059\001\ +\008\001\027\001\052\003\052\003\000\001\065\001\000\000\000\001\ +\108\005\097\001\098\001\004\001\176\002\059\001\253\001\008\001\ +\092\001\049\001\064\003\065\001\201\002\039\003\015\001\253\001\ +\073\003\018\001\088\001\115\001\060\001\233\002\026\001\064\002\ +\065\002\152\003\152\003\057\005\068\001\059\005\070\001\179\004\ +\066\001\094\001\102\001\247\002\248\002\066\001\201\002\073\001\ +\094\001\109\001\112\001\111\001\073\001\255\002\000\001\195\004\ +\102\001\255\002\004\001\067\001\008\003\096\004\008\001\109\001\ +\010\001\111\001\004\001\094\001\014\001\015\001\008\001\008\001\ +\018\001\066\001\057\002\055\001\014\001\015\001\014\001\111\001\ +\018\001\027\001\003\001\057\002\064\001\150\005\151\005\000\001\ +\018\001\115\001\014\001\027\001\040\003\039\003\115\001\130\004\ +\236\004\039\003\046\003\014\001\008\003\094\001\207\003\044\003\ +\052\003\140\004\019\001\055\003\000\001\035\001\039\003\019\001\ +\027\001\026\001\000\001\039\003\064\003\090\001\047\001\060\003\ +\066\001\039\003\155\002\156\002\039\003\073\003\030\001\073\001\ +\066\001\039\003\067\001\079\003\112\001\059\001\039\003\039\003\ +\049\001\110\001\036\001\065\001\048\001\065\001\066\001\079\001\ +\177\002\091\001\092\001\060\001\094\001\095\001\215\005\055\001\ +\060\001\000\000\067\001\068\001\094\001\070\001\191\002\030\001\ +\068\001\065\001\070\001\072\000\079\001\055\001\000\001\113\001\ +\097\001\098\001\094\001\003\001\064\001\065\001\064\001\014\001\ +\102\001\014\001\064\001\065\001\064\001\129\003\044\004\109\001\ +\055\001\019\001\115\001\135\003\027\001\045\001\046\001\139\003\ +\026\001\102\000\065\001\000\000\022\001\095\001\111\001\147\003\ +\022\001\149\003\106\001\111\001\152\003\109\001\154\003\155\003\ +\156\003\058\004\200\005\159\003\160\003\097\001\048\001\049\001\ +\164\003\007\004\166\003\167\003\000\001\047\001\112\001\064\001\ +\065\001\109\001\060\001\083\001\065\001\066\001\178\003\139\003\ +\009\003\067\001\068\001\106\001\070\001\000\001\109\001\147\003\ +\079\001\189\003\035\004\065\001\100\001\022\003\095\004\014\001\ +\000\001\015\001\004\001\159\003\018\001\194\003\008\001\014\001\ +\019\001\037\001\074\001\207\003\027\001\015\001\000\001\026\001\ +\027\001\003\001\037\001\019\001\027\001\110\001\178\003\097\001\ +\098\001\027\001\026\001\013\001\014\001\111\001\064\001\017\001\ +\000\000\014\001\131\004\008\001\017\001\048\001\049\001\000\001\ +\026\001\027\001\028\001\029\001\108\001\064\001\242\003\105\001\ +\048\001\060\001\065\001\030\001\066\001\249\003\040\001\041\001\ +\067\001\068\001\065\001\070\001\060\001\001\004\000\001\097\001\ +\066\001\026\001\176\004\007\004\068\001\002\004\070\001\000\001\ +\012\004\135\004\060\001\109\001\055\001\063\001\000\001\065\001\ +\066\001\067\001\068\001\022\001\066\001\067\001\065\001\073\001\ +\074\001\147\001\019\001\073\001\022\001\000\001\080\001\035\004\ +\064\001\026\001\066\001\037\001\111\001\206\004\127\003\128\003\ +\044\004\045\004\092\001\075\001\094\001\049\004\096\001\111\001\ +\064\001\035\001\000\001\220\004\141\003\142\003\058\004\026\001\ +\049\001\014\001\108\001\148\003\010\001\111\001\065\001\106\001\ +\064\001\115\001\109\001\060\001\157\003\115\001\000\001\065\001\ +\064\001\059\001\243\004\068\001\067\005\070\001\064\001\065\001\ +\112\001\045\004\047\001\075\001\064\001\049\004\031\004\012\001\ +\074\001\019\001\090\001\095\004\096\004\109\001\098\004\028\001\ +\026\001\000\001\064\001\018\005\018\005\004\001\208\004\208\004\ +\108\004\008\001\031\001\010\001\208\004\109\001\110\001\014\001\ +\015\001\099\001\027\001\018\001\064\001\064\001\111\001\049\001\ +\112\001\064\001\100\001\109\001\027\001\050\001\130\004\131\004\ +\041\005\109\001\060\001\004\001\075\001\066\001\098\004\008\001\ +\140\004\067\001\068\001\143\004\070\001\074\001\015\001\109\001\ +\108\004\018\001\071\001\080\001\061\005\064\001\083\001\027\001\ +\037\001\066\001\027\001\159\004\053\001\064\001\055\001\084\001\ +\157\004\109\001\110\001\066\001\083\005\083\005\086\005\064\001\ +\065\001\112\001\073\001\094\001\176\004\035\001\064\001\040\001\ +\101\001\181\004\182\004\064\001\066\001\111\001\064\001\064\001\ +\064\001\189\004\109\001\110\001\091\001\092\001\066\001\094\001\ +\095\001\066\001\109\001\000\001\053\001\059\001\055\001\056\001\ +\090\001\066\001\109\001\065\001\208\004\209\004\210\004\088\001\ +\065\001\042\004\113\001\000\001\109\001\046\004\019\001\000\000\ +\082\002\221\004\051\004\223\004\110\001\026\001\022\001\109\001\ +\000\001\189\004\066\001\109\001\004\001\109\001\019\001\112\001\ +\008\001\073\001\010\001\068\004\069\004\026\001\014\001\064\001\ +\102\001\074\004\018\001\048\001\110\002\209\004\210\004\109\001\ +\027\001\064\001\254\004\027\001\109\001\035\001\094\001\060\001\ +\221\001\064\001\006\005\048\001\000\001\075\001\067\001\068\001\ +\027\001\070\001\099\004\067\001\016\005\035\001\018\005\060\001\ +\022\001\013\001\022\005\115\001\064\001\059\001\067\001\068\001\ +\064\001\070\001\064\001\065\001\109\001\238\001\026\001\066\001\ +\028\001\029\001\254\004\039\005\074\001\059\001\109\001\000\001\ +\004\001\073\001\112\001\065\001\008\001\041\001\109\001\066\001\ +\031\001\000\000\111\001\015\001\016\005\057\005\018\001\059\005\ +\018\001\230\002\022\005\091\001\092\001\099\001\094\001\095\001\ +\060\001\109\001\111\001\050\001\000\001\109\001\074\005\109\001\ +\068\001\246\002\027\002\039\005\037\001\250\002\074\001\083\005\ +\102\001\113\001\027\005\007\000\080\001\030\005\010\000\109\001\ +\177\004\013\000\014\000\027\001\000\000\017\000\018\000\019\000\ +\020\000\021\000\066\001\023\000\096\001\066\001\066\001\192\004\ +\193\004\037\001\030\000\066\001\025\003\113\005\034\000\064\001\ +\108\001\037\000\038\000\111\001\027\001\027\001\083\001\000\001\ +\124\005\086\002\046\000\047\000\083\001\247\002\050\000\051\000\ +\004\001\023\001\066\001\135\005\008\001\035\001\000\001\000\001\ +\081\005\082\005\091\001\084\005\085\005\066\001\018\001\108\001\ +\146\005\026\001\150\005\151\005\146\005\113\005\109\001\027\001\ +\156\005\157\005\019\001\066\001\066\001\059\001\083\001\031\001\ +\026\001\026\001\064\001\065\001\000\000\089\000\090\000\091\000\ +\000\001\093\000\066\001\135\005\074\001\071\001\040\003\179\005\ +\009\005\073\001\050\001\006\001\046\003\185\005\186\005\187\005\ +\049\001\004\001\084\001\191\005\005\000\008\001\066\001\022\001\ +\156\005\157\005\026\001\060\001\015\001\099\001\094\001\018\001\ +\166\003\125\000\064\001\068\001\066\001\070\001\004\001\109\001\ +\212\005\022\001\008\001\215\005\000\000\137\000\138\000\179\005\ +\047\001\221\005\222\005\115\001\018\001\095\001\186\005\189\003\ +\064\001\149\000\088\001\191\005\055\001\027\001\057\001\058\001\ +\059\001\000\001\061\001\075\001\000\001\064\001\065\001\022\001\ +\164\000\053\001\000\001\055\001\189\005\078\005\111\001\066\001\ +\212\005\173\000\112\001\091\001\064\001\065\001\199\005\019\001\ +\022\001\221\005\222\005\026\001\093\001\019\001\026\001\090\001\ +\047\001\210\005\211\005\232\002\026\001\135\003\097\001\004\001\ +\027\001\000\001\000\001\008\001\109\001\053\001\054\001\055\001\ +\056\001\047\001\109\001\110\001\048\001\000\000\251\002\120\005\ +\064\001\065\001\048\001\000\003\093\001\000\001\027\001\128\005\ +\060\001\109\001\164\003\026\001\026\001\167\003\060\001\067\001\ +\068\001\000\001\070\001\094\001\109\001\004\001\068\001\018\001\ +\070\001\008\001\023\003\010\001\022\001\016\001\095\001\014\001\ +\015\001\004\001\109\001\140\000\141\000\008\001\159\005\004\001\ +\027\001\253\000\254\000\008\001\027\001\109\001\040\001\018\001\ +\153\000\154\000\015\001\091\001\049\003\018\001\095\001\095\001\ +\027\001\178\005\000\001\111\001\000\001\065\001\027\001\019\001\ +\004\001\111\001\008\001\071\001\008\001\022\001\010\001\176\000\ +\064\001\065\001\014\001\000\001\065\001\033\001\018\001\071\001\ +\084\001\037\001\093\001\066\001\073\001\010\001\006\001\027\001\ +\242\003\000\001\073\001\014\001\084\001\214\005\017\001\249\003\ +\004\001\094\001\090\001\010\001\008\001\066\001\065\001\053\001\ +\027\001\055\001\014\001\015\001\091\001\092\001\018\001\094\001\ +\095\001\077\001\012\004\065\001\113\003\073\001\110\001\053\001\ +\076\001\055\001\053\001\079\001\055\001\081\001\066\001\083\001\ +\064\001\065\001\113\001\065\001\000\001\073\001\065\001\055\001\ +\004\001\057\001\058\001\059\001\008\001\061\001\010\001\003\001\ +\064\001\065\001\014\001\065\001\066\001\067\001\018\001\091\001\ +\092\001\055\001\094\001\095\001\112\001\059\001\066\001\027\001\ +\116\001\063\001\064\001\053\001\054\001\055\001\056\001\064\001\ +\065\001\000\000\090\001\168\003\169\003\113\001\064\001\065\001\ +\078\001\097\001\134\001\135\001\000\001\064\001\053\001\003\001\ +\055\001\016\001\055\001\184\003\064\001\109\001\110\001\022\001\ +\227\001\013\001\065\001\064\001\027\001\092\001\096\004\234\001\ +\197\003\053\001\013\001\055\001\160\001\073\001\026\001\109\001\ +\028\001\029\001\008\001\167\001\000\001\065\001\014\001\171\001\ +\213\003\028\001\029\001\109\001\040\001\041\001\010\001\091\001\ +\092\001\095\001\094\001\095\001\184\001\185\001\041\001\065\001\ +\130\004\189\001\036\001\191\001\064\001\065\001\073\001\073\001\ +\060\001\014\001\140\004\063\001\022\001\113\001\109\001\244\003\ +\068\001\060\001\206\001\090\001\063\001\000\001\074\001\022\001\ +\003\001\068\001\065\001\066\001\080\001\159\004\218\001\074\001\ +\220\001\221\001\013\001\066\001\067\001\080\001\017\001\014\001\ +\092\001\064\001\065\001\022\001\096\001\210\001\211\001\026\001\ +\027\001\028\001\029\001\181\004\182\004\096\001\095\001\008\001\ +\108\001\130\001\131\001\111\001\000\000\000\001\041\001\251\001\ +\037\004\108\001\095\001\055\001\111\001\004\001\023\001\059\001\ +\153\001\008\001\112\001\063\001\064\001\030\001\092\001\103\001\ +\015\001\060\001\014\001\018\001\063\001\022\001\065\001\066\001\ +\067\001\068\001\078\001\221\004\027\001\223\004\073\001\074\001\ +\027\001\178\001\179\001\180\001\053\001\080\001\055\001\092\001\ +\091\001\186\001\092\001\237\004\238\004\109\001\000\000\092\001\ +\065\001\092\001\094\001\094\001\055\001\096\001\057\001\058\001\ +\059\001\109\001\061\001\094\001\092\001\064\001\065\001\014\001\ +\101\004\108\001\103\004\066\001\111\001\115\001\020\001\216\001\ +\115\001\109\001\115\001\064\001\065\001\046\001\081\001\109\001\ +\109\001\062\001\071\001\108\001\002\001\081\002\089\001\090\001\ +\084\002\106\001\086\002\109\001\109\001\109\001\097\001\084\001\ +\073\001\073\001\100\001\244\001\137\004\090\001\027\001\109\001\ +\015\001\142\004\109\001\110\001\001\000\002\000\003\000\004\000\ +\005\000\092\001\055\001\000\001\094\001\064\001\064\001\008\001\ +\109\001\110\001\065\001\109\001\040\001\014\002\001\001\002\001\ +\124\002\000\001\167\004\014\001\018\001\004\001\009\001\062\001\ +\074\005\008\001\062\001\010\001\015\001\016\001\062\001\014\001\ +\092\001\027\001\142\002\064\001\144\002\094\001\146\002\079\001\ +\027\001\014\001\150\002\014\001\027\001\006\001\094\001\073\001\ +\109\001\036\001\199\004\200\004\095\001\064\001\075\001\042\001\ +\043\001\044\001\045\001\046\001\073\001\111\005\022\001\094\001\ +\172\002\092\001\014\001\073\001\027\001\218\004\006\001\040\001\ +\008\001\222\004\061\001\094\001\027\001\014\001\027\001\066\001\ +\021\001\086\001\064\001\062\001\071\001\072\001\194\002\062\001\ +\062\001\003\001\073\001\199\002\200\002\014\001\062\001\082\001\ +\083\001\084\001\085\001\086\001\062\001\086\001\210\002\095\001\ +\212\002\027\001\090\001\073\001\091\001\092\001\003\005\094\001\ +\095\001\100\001\091\001\223\002\224\002\027\001\094\001\055\001\ +\101\001\057\001\058\001\059\001\094\001\061\001\234\002\094\001\ +\064\001\065\001\113\001\132\002\088\001\241\002\027\001\185\005\ +\094\001\187\005\014\001\020\001\000\001\015\001\022\001\003\001\ +\252\002\053\001\147\002\148\002\094\001\008\001\043\005\062\001\ +\080\001\013\001\090\001\062\001\092\001\062\001\051\005\094\001\ +\112\001\097\001\112\001\094\001\088\001\065\001\026\001\019\003\ +\028\001\029\001\063\005\021\001\091\001\109\001\110\001\095\001\ +\094\001\014\001\014\001\014\001\181\002\041\001\014\001\027\001\ +\027\001\037\003\019\001\091\001\022\001\112\001\000\001\088\001\ +\014\001\003\001\014\001\014\001\014\001\000\000\000\000\008\001\ +\060\001\092\001\065\001\013\001\092\001\036\001\109\001\017\001\ +\068\001\036\001\062\003\109\001\022\001\065\003\074\001\067\003\ +\026\001\027\001\028\001\029\001\080\001\005\000\006\001\036\001\ +\008\001\064\001\078\003\092\001\092\001\090\001\082\003\041\001\ +\092\001\040\001\064\001\036\001\096\001\089\003\094\001\053\001\ +\024\000\093\003\053\001\064\001\091\001\026\003\000\000\064\001\ +\108\001\064\001\060\001\111\001\064\001\063\001\036\003\065\001\ +\066\001\067\001\068\001\111\003\064\001\064\001\114\003\073\001\ +\074\001\064\001\118\003\186\005\254\004\111\005\080\001\055\001\ +\187\002\057\001\058\001\059\001\120\003\061\001\171\005\026\002\ +\064\001\065\001\092\001\131\002\094\001\000\001\096\001\018\005\ +\096\001\094\001\129\003\143\003\187\001\057\002\063\002\141\000\ +\183\001\166\004\108\001\212\003\155\004\111\001\195\005\196\005\ +\171\002\115\001\090\001\145\001\108\001\006\005\203\005\243\004\ +\255\255\097\001\043\005\136\004\255\255\255\255\170\003\171\003\ +\255\255\255\255\255\255\255\255\255\255\109\001\110\001\220\005\ +\180\003\181\003\255\255\255\255\255\255\015\001\079\003\255\255\ +\255\255\121\000\255\255\255\255\055\001\255\255\057\001\058\001\ +\059\001\197\003\061\001\255\255\255\255\064\001\065\001\255\255\ +\255\255\255\255\255\255\139\000\140\000\141\000\255\255\143\000\ +\064\001\065\001\044\001\045\001\046\001\255\255\081\001\071\001\ +\255\255\153\000\154\000\255\255\255\255\077\001\089\001\090\001\ +\255\255\255\255\255\255\255\255\084\001\233\003\097\001\235\003\ +\129\003\255\255\090\001\255\255\255\255\071\001\072\001\243\003\ +\176\000\177\000\109\001\110\001\180\000\255\255\255\255\255\255\ +\252\003\083\001\084\001\085\001\086\001\109\001\110\001\255\255\ +\255\255\154\003\155\003\156\003\255\255\009\004\255\255\160\003\ +\255\255\255\255\100\001\255\255\255\255\166\003\255\255\255\255\ +\255\255\255\255\255\255\007\000\255\255\255\255\010\000\255\255\ +\255\255\013\000\014\000\255\255\255\255\017\000\018\000\019\000\ +\020\000\021\000\255\255\023\000\189\003\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\034\000\255\255\ +\255\255\037\000\038\000\255\255\255\255\255\255\255\255\255\255\ +\255\255\061\004\046\000\047\000\064\004\255\255\050\000\051\000\ +\055\001\255\255\057\001\058\001\059\001\255\255\061\001\255\255\ +\255\255\064\001\065\001\255\255\255\255\081\004\255\255\083\004\ +\255\255\085\004\255\255\087\004\088\004\255\255\255\255\255\255\ +\092\004\255\255\255\255\255\255\255\255\097\004\000\001\255\255\ +\100\004\003\001\102\004\090\001\255\255\089\000\090\000\091\000\ +\255\255\093\000\097\001\013\001\014\001\255\255\255\255\017\001\ +\255\255\255\255\118\004\255\255\255\255\255\255\109\001\110\001\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\013\001\137\004\040\001\041\001\ +\255\255\255\255\142\004\255\255\255\255\255\255\255\255\255\255\ +\255\255\149\004\255\255\028\001\029\001\137\000\138\000\087\001\ +\255\255\255\255\060\001\255\255\255\255\063\001\255\255\255\255\ +\041\001\067\001\068\001\255\255\255\255\255\255\170\004\073\001\ +\074\001\255\255\174\004\255\255\255\255\255\255\080\001\179\004\ +\255\255\255\255\255\255\060\001\255\255\255\255\255\255\255\255\ +\255\255\173\000\092\001\068\001\094\001\255\255\096\001\195\004\ +\196\004\074\001\198\004\255\255\255\255\255\255\255\255\080\001\ +\255\255\255\255\108\001\000\001\255\255\111\001\255\255\255\255\ +\212\004\115\001\255\255\255\255\255\255\255\255\150\001\096\001\ +\255\255\153\001\154\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\108\001\255\255\255\255\111\001\235\004\ +\236\004\255\255\255\255\255\255\255\255\255\255\242\004\255\255\ +\255\255\255\255\178\001\179\001\180\001\255\255\255\255\255\255\ +\255\255\255\255\186\001\255\255\255\255\001\005\255\255\003\005\ +\255\255\193\001\055\001\255\255\057\001\058\001\059\001\255\255\ +\061\001\253\000\254\000\064\001\065\001\017\005\255\255\207\001\ +\208\001\255\255\255\255\255\255\212\001\255\255\255\255\255\255\ +\216\001\000\000\255\255\219\001\081\001\033\005\255\255\019\001\ +\255\255\255\255\038\005\227\001\089\001\090\001\255\255\255\255\ +\255\255\255\255\234\001\255\255\097\001\033\001\000\000\051\005\ +\255\255\037\001\255\255\255\255\244\001\255\255\255\255\108\001\ +\109\001\110\001\255\255\255\255\255\255\253\001\255\255\255\255\ +\023\001\255\255\255\255\003\002\072\005\255\255\255\255\255\255\ +\255\255\077\005\255\255\255\255\080\005\036\001\014\002\255\255\ +\255\255\017\002\255\255\087\005\255\255\255\255\255\255\091\005\ +\255\255\255\255\026\002\095\005\255\255\255\255\255\255\255\255\ +\055\001\255\255\057\001\058\001\059\001\255\255\061\001\255\255\ +\255\255\064\001\065\001\255\255\112\005\255\255\255\255\005\000\ +\255\255\255\255\255\255\009\000\255\255\255\255\255\255\255\255\ +\255\255\057\002\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\024\000\090\001\255\255\137\005\138\005\255\255\ +\255\255\255\255\097\001\143\005\255\255\255\255\255\255\147\005\ +\255\255\255\255\134\001\255\255\042\000\153\005\109\001\110\001\ +\255\255\255\255\255\255\255\255\255\255\161\005\162\005\255\255\ +\255\255\255\255\255\255\167\005\168\005\169\005\170\005\255\255\ +\005\000\006\001\007\001\255\255\255\255\015\001\011\001\012\001\ +\180\005\181\005\255\255\167\001\255\255\255\255\255\255\255\255\ +\078\000\255\255\080\000\081\000\255\255\193\005\194\005\255\255\ +\196\005\030\001\031\001\131\002\132\002\255\255\255\255\255\255\ +\204\005\043\001\044\001\045\001\046\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\147\002\148\002\050\001\218\005\000\000\ +\053\001\054\001\055\001\056\001\224\005\225\005\059\001\255\255\ +\066\001\255\255\255\255\064\001\065\001\071\001\072\001\255\255\ +\255\255\255\255\170\002\255\255\255\255\255\255\255\255\255\255\ +\255\255\083\001\084\001\085\001\086\001\181\002\140\000\141\000\ +\255\255\143\000\087\001\255\255\255\255\255\255\190\002\255\255\ +\192\002\255\255\100\001\153\000\154\000\255\255\255\255\251\001\ +\101\001\255\255\202\002\255\255\255\255\106\001\255\255\165\000\ +\109\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\000\001\176\000\177\000\003\001\255\255\255\255\181\000\ +\255\255\255\255\255\255\255\255\255\255\255\255\013\001\231\002\ +\255\255\233\002\017\001\255\255\255\255\255\255\000\001\140\000\ +\141\000\003\001\143\000\026\001\027\001\028\001\029\001\255\255\ +\248\002\255\255\255\255\013\001\153\000\154\000\255\255\255\255\ +\255\255\255\255\041\001\255\255\255\255\255\255\255\255\255\255\ +\026\001\255\255\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\175\000\176\000\177\000\060\001\040\001\041\001\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\255\255\ +\255\255\005\000\073\001\074\001\255\255\009\000\255\255\255\255\ +\255\255\080\001\060\001\255\255\255\255\063\001\004\001\047\003\ +\255\255\067\001\068\001\255\255\024\000\092\001\255\255\094\001\ +\074\001\096\001\255\255\255\255\255\255\255\255\080\001\255\255\ +\255\255\255\255\255\255\025\001\068\003\108\001\042\000\255\255\ +\111\001\255\255\092\001\255\255\115\001\255\255\096\001\079\003\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\108\001\255\255\255\255\111\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\078\000\255\255\080\000\081\000\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\129\003\255\255\089\001\090\001\255\255\255\255\093\001\ +\255\255\000\000\096\001\255\255\112\000\042\001\255\255\255\255\ +\255\255\255\255\255\255\048\001\255\255\149\003\255\255\000\001\ +\152\003\255\255\154\003\155\003\156\003\255\255\255\255\008\001\ +\160\003\255\255\255\255\255\255\013\001\255\255\166\003\255\255\ +\140\000\141\000\255\255\143\000\255\255\255\255\255\255\255\255\ +\255\255\026\001\255\255\028\001\029\001\153\000\154\000\255\255\ +\255\255\255\255\255\255\255\255\255\255\189\003\255\255\255\255\ +\041\001\165\000\255\255\153\001\154\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\176\000\177\000\255\255\207\003\ +\000\001\181\000\255\255\060\001\255\255\255\255\063\001\255\255\ +\255\255\066\001\067\001\068\001\178\001\179\001\180\001\019\003\ +\255\255\074\001\255\255\255\255\186\001\255\255\255\255\080\001\ +\255\255\255\255\255\255\255\255\236\003\255\255\255\255\255\255\ +\255\255\255\255\255\255\092\001\255\255\255\255\255\255\096\001\ +\255\255\207\001\208\001\255\255\153\001\154\001\212\001\255\255\ +\255\255\255\255\216\001\108\001\255\255\255\255\111\001\055\001\ +\255\255\057\001\058\001\059\001\255\255\061\001\255\255\255\255\ +\064\001\065\001\255\255\255\255\177\001\178\001\179\001\180\001\ +\255\255\008\001\255\255\255\255\255\255\186\001\244\001\255\255\ +\004\001\081\001\255\255\255\255\255\255\255\255\255\255\253\001\ +\023\001\089\001\090\001\000\000\044\004\255\255\255\255\030\001\ +\255\255\097\001\207\001\208\001\255\255\025\001\255\255\212\001\ +\014\002\255\255\058\004\216\001\255\255\109\001\110\001\255\255\ +\255\255\255\255\255\255\255\255\026\002\226\001\255\255\255\255\ +\055\001\000\000\057\001\058\001\059\001\255\255\061\001\255\255\ +\255\255\064\001\065\001\255\255\255\255\255\255\255\255\244\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\095\004\ +\253\001\255\255\081\001\057\002\255\255\255\255\255\255\255\255\ +\255\255\088\001\089\001\090\001\255\255\255\255\255\255\255\255\ +\255\255\014\002\097\001\255\255\255\255\089\001\090\001\255\255\ +\255\255\093\001\005\000\106\001\096\001\255\255\109\001\110\001\ +\255\255\255\255\255\255\131\004\255\255\255\255\255\255\135\004\ +\255\255\000\001\255\255\255\255\003\001\255\255\114\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\121\001\013\001\123\001\ +\255\255\255\255\255\255\255\255\057\002\255\255\255\255\255\255\ +\255\255\255\255\255\255\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\132\002\255\255\ +\176\004\040\001\041\001\255\255\255\255\153\001\154\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\147\002\148\002\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\178\001\179\001\ +\180\001\255\255\255\255\074\001\170\002\255\255\186\001\255\255\ +\255\255\080\001\255\255\255\255\255\255\255\255\255\255\181\002\ +\255\255\255\255\255\255\255\255\255\255\092\001\255\255\132\002\ +\190\002\096\001\192\002\207\001\208\001\255\255\255\255\255\255\ +\212\001\255\255\255\255\255\255\216\001\108\001\147\002\148\002\ +\111\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\140\000\141\000\255\255\143\000\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\153\000\154\000\ +\244\001\255\255\018\005\233\002\255\255\255\255\255\255\255\255\ +\181\002\253\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\190\002\255\255\192\002\255\255\176\000\177\000\255\255\ +\255\255\255\255\014\002\000\001\255\255\002\001\003\001\004\001\ +\255\255\255\255\255\255\008\001\255\255\255\255\026\002\255\255\ +\013\001\255\255\255\255\255\255\017\001\018\001\019\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\026\001\027\001\028\001\ +\029\001\000\001\255\255\255\255\233\002\255\255\255\255\036\001\ +\255\255\255\255\255\255\083\005\041\001\057\002\013\001\255\255\ +\255\255\255\255\255\255\048\001\049\001\000\000\094\005\255\255\ +\255\255\255\255\255\255\026\001\255\255\028\001\029\001\060\001\ +\255\255\255\255\063\001\064\001\255\255\066\001\067\001\068\001\ +\255\255\070\001\041\001\255\255\073\001\074\001\255\255\255\255\ +\255\255\079\003\255\255\080\001\255\255\255\255\255\255\255\255\ +\255\255\101\002\255\255\255\255\255\255\060\001\091\001\092\001\ +\136\005\094\001\095\001\096\001\097\001\068\001\142\005\100\001\ +\255\255\255\255\255\255\074\001\255\255\255\255\255\255\108\001\ +\109\001\080\001\111\001\255\255\255\255\255\255\115\001\255\255\ +\132\002\255\255\255\255\076\001\255\255\092\001\079\001\255\255\ +\081\001\096\001\083\001\129\003\255\255\255\255\255\255\147\002\ +\148\002\255\255\079\003\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\255\255\255\255\255\255\149\003\ +\255\255\255\255\152\003\255\255\154\003\155\003\156\003\112\001\ +\026\000\027\000\160\003\116\001\255\255\255\255\255\255\255\255\ +\166\003\181\002\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\190\002\255\255\192\002\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\129\003\255\255\255\255\189\003\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\149\003\255\255\255\255\152\003\153\003\154\003\155\003\156\003\ +\082\000\083\000\255\255\160\003\255\255\233\002\255\255\000\001\ +\255\255\166\003\255\255\255\255\255\255\006\001\153\001\154\001\ +\255\255\255\255\000\000\012\001\189\001\255\255\191\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\189\003\255\255\255\255\028\001\255\255\030\001\031\001\178\001\ +\179\001\180\001\255\255\255\255\255\255\255\255\255\255\186\001\ +\187\001\218\001\255\255\220\001\255\255\255\255\255\255\255\255\ +\255\255\050\001\255\255\052\001\053\001\255\255\055\001\056\001\ +\255\255\255\255\059\001\255\255\207\001\208\001\255\255\064\001\ +\065\001\212\001\255\255\255\255\255\255\216\001\071\001\255\255\ +\052\003\255\255\255\255\255\255\255\255\057\003\044\004\255\255\ +\255\255\255\255\255\255\084\001\255\255\000\001\255\255\255\255\ +\003\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\097\001\244\001\013\001\079\003\101\001\255\255\017\001\255\255\ +\255\255\106\001\253\001\255\255\109\001\110\001\255\255\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\014\002\255\255\255\255\041\001\255\255\ +\255\255\255\255\255\255\000\000\255\255\255\255\255\255\044\004\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\255\255\255\255\063\001\129\003\255\255\066\001\ +\067\001\068\001\255\255\255\255\255\255\255\255\073\001\074\001\ +\081\002\255\255\255\255\084\002\255\255\080\001\057\002\255\255\ +\255\255\149\003\255\255\255\255\152\003\255\255\154\003\155\003\ +\156\003\092\001\255\255\094\001\160\003\096\001\255\255\255\255\ +\255\255\255\255\166\003\255\255\255\255\255\255\255\255\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\115\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\189\003\176\004\255\255\255\255\255\255\255\255\049\001\ +\050\001\051\001\052\001\053\001\054\001\055\001\056\001\057\001\ +\058\001\059\001\060\001\061\001\062\001\063\001\064\001\065\001\ +\066\001\067\001\068\001\069\001\255\255\071\001\255\255\255\255\ +\255\255\132\002\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\172\002\086\001\255\255\000\000\255\255\ +\147\002\148\002\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\099\001\000\001\176\004\002\001\003\001\004\001\255\255\ +\255\255\255\255\008\001\255\003\255\255\255\255\199\002\013\001\ +\255\255\255\255\255\255\017\001\018\001\019\001\255\255\255\255\ +\255\255\255\255\181\002\255\255\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\190\002\255\255\192\002\036\001\255\255\ +\255\255\255\255\040\001\041\001\018\005\255\255\255\255\255\255\ +\255\255\255\255\048\001\049\001\255\255\255\255\255\255\255\255\ +\044\004\255\255\255\255\255\255\255\255\255\255\060\001\255\255\ +\255\255\063\001\255\255\255\255\066\001\067\001\068\001\255\255\ +\070\001\255\255\255\255\073\001\074\001\255\255\233\002\255\255\ +\000\000\255\255\080\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\091\001\092\001\255\255\ +\094\001\095\001\096\001\255\255\255\255\018\005\100\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\083\005\108\001\255\255\ +\255\255\111\001\255\255\255\255\255\255\115\001\255\255\255\255\ +\094\005\255\255\255\255\000\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\008\001\255\255\062\003\255\255\255\255\ +\013\001\235\001\255\255\255\255\255\255\255\255\240\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\026\001\255\255\028\001\ +\029\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\136\005\255\255\041\001\255\255\083\005\053\001\ +\255\255\055\001\255\255\057\001\058\001\059\001\255\255\061\001\ +\255\255\255\255\064\001\065\001\079\003\255\255\111\003\060\001\ +\255\255\255\255\028\002\029\002\176\004\066\001\067\001\068\001\ +\255\255\255\255\255\255\255\255\255\255\074\001\000\000\255\255\ +\255\255\255\255\255\255\080\001\090\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\097\001\255\255\255\255\143\003\092\001\ +\255\255\255\255\255\255\096\001\208\004\063\002\255\255\109\001\ +\110\001\255\255\068\002\069\002\070\002\255\255\129\003\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\000\001\255\255\ +\002\001\003\001\149\003\180\003\181\003\152\003\008\001\154\003\ +\155\003\156\003\255\255\013\001\255\255\160\003\255\255\017\001\ +\018\001\019\001\255\255\166\003\255\255\255\255\255\255\255\255\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\036\001\255\255\255\255\255\255\018\005\041\001\ +\255\255\255\255\189\003\255\255\255\255\255\255\048\001\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\233\003\255\255\060\001\255\255\255\255\063\001\255\255\255\255\ +\066\001\067\001\068\001\255\255\070\001\159\002\160\002\161\002\ +\074\001\255\255\255\255\252\003\255\255\255\255\080\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\000\000\ +\000\001\091\001\092\001\003\001\094\001\095\001\096\001\255\255\ +\255\255\255\255\255\255\023\001\255\255\013\001\255\255\083\005\ +\255\255\017\001\108\001\197\002\255\255\111\001\255\255\255\255\ +\036\001\115\001\026\001\027\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\213\002\255\255\255\255\255\255\255\255\ +\255\255\041\001\255\255\055\001\255\255\057\001\058\001\059\001\ +\255\255\061\001\255\255\255\255\064\001\065\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\255\255\255\255\063\001\ +\255\255\044\004\066\001\067\001\068\001\255\255\255\255\255\255\ +\255\255\073\001\074\001\255\255\255\255\255\255\090\001\255\255\ +\080\001\255\255\255\255\000\000\255\255\097\001\255\255\255\255\ +\255\255\255\255\255\255\100\004\092\001\102\004\094\001\255\255\ +\096\001\109\001\110\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\108\001\255\255\032\003\111\001\ +\255\255\255\255\255\255\115\001\255\255\255\255\000\001\001\001\ +\002\001\003\001\255\255\255\255\006\001\007\001\008\001\009\001\ +\010\001\011\001\012\001\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\149\004\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\030\001\031\001\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\045\001\046\001\047\001\255\255\049\001\ +\050\001\051\001\255\255\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\060\001\061\001\255\255\063\001\064\001\065\001\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\255\255\255\255\255\255\176\004\080\001\081\001\ +\082\001\083\001\084\001\085\001\086\001\087\001\255\255\089\001\ +\000\000\091\001\092\001\133\003\094\001\095\001\096\001\097\001\ +\098\001\255\255\100\001\101\001\255\255\103\001\104\001\105\001\ +\106\001\255\255\108\001\109\001\255\255\111\001\255\255\255\255\ +\255\255\115\001\255\255\055\001\255\255\057\001\058\001\059\001\ +\255\255\061\001\255\255\255\255\064\001\065\001\255\255\255\255\ +\001\005\255\255\255\255\255\255\255\255\255\255\074\001\000\001\ +\255\255\002\001\003\001\004\001\255\255\081\001\255\255\008\001\ +\255\255\255\255\255\255\255\255\013\001\089\001\090\001\255\255\ +\017\001\018\001\019\001\255\255\255\255\097\001\255\255\255\255\ +\255\255\026\001\027\001\028\001\029\001\038\005\255\255\255\255\ +\255\255\109\001\110\001\036\001\255\255\255\255\255\255\018\005\ +\041\001\255\255\220\003\221\003\222\003\255\255\255\255\048\001\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\255\255\255\255\063\001\072\005\ +\255\255\066\001\067\001\068\001\255\255\070\001\255\255\255\255\ +\073\001\074\001\255\255\255\255\255\255\255\255\087\005\080\001\ +\255\255\023\001\255\255\000\001\255\255\255\255\095\005\255\255\ +\255\255\255\255\091\001\092\001\255\255\094\001\095\001\096\001\ +\013\001\000\000\020\004\021\004\022\004\255\255\255\255\112\005\ +\083\005\255\255\255\255\108\001\255\255\026\001\111\001\028\001\ +\029\001\055\001\115\001\057\001\058\001\059\001\255\255\061\001\ +\255\255\255\255\064\001\065\001\041\001\255\255\255\255\255\255\ +\137\005\138\005\055\001\255\255\057\001\058\001\059\001\057\004\ +\061\001\255\255\147\005\064\001\065\001\255\255\255\255\060\001\ +\255\255\255\255\063\001\255\255\090\001\066\001\067\001\068\001\ +\255\255\162\005\255\255\097\001\081\001\074\001\167\005\168\005\ +\169\005\170\005\255\255\080\001\089\001\090\001\255\255\109\001\ +\110\001\255\255\255\255\255\255\097\001\255\255\255\255\092\001\ +\255\255\255\255\255\255\096\001\255\255\255\255\255\255\108\001\ +\109\001\110\001\255\255\255\255\255\255\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\255\255\255\255\ +\122\004\123\004\255\255\255\255\255\255\127\004\128\004\129\004\ +\000\001\001\001\002\001\003\001\255\255\000\000\006\001\007\001\ +\008\001\009\001\010\001\011\001\012\001\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\022\001\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\030\001\031\001\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\046\001\047\001\ +\255\255\049\001\050\001\051\001\255\255\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\060\001\061\001\062\001\063\001\ +\064\001\065\001\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\081\001\082\001\083\001\084\001\085\001\086\001\087\001\ +\255\255\089\001\255\255\091\001\092\001\255\255\094\001\095\001\ +\096\001\097\001\098\001\255\255\100\001\101\001\255\255\103\001\ +\104\001\105\001\106\001\255\255\108\001\109\001\255\255\111\001\ +\255\255\255\255\255\255\115\001\255\255\255\255\255\255\255\255\ +\255\255\000\000\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\000\001\001\001\002\001\003\001\255\255\255\255\006\001\ +\007\001\008\001\009\001\010\001\011\001\012\001\013\001\014\001\ +\015\001\016\001\017\001\018\001\019\001\020\001\021\001\022\001\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\030\001\ +\031\001\255\255\052\005\053\005\054\005\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\044\001\045\001\046\001\ +\047\001\255\255\049\001\050\001\051\001\255\255\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\060\001\061\001\255\255\ +\063\001\064\001\065\001\066\001\067\001\068\001\255\255\070\001\ +\071\001\072\001\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\081\001\082\001\083\001\084\001\085\001\086\001\ +\087\001\255\255\089\001\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\097\001\098\001\255\255\100\001\101\001\255\255\ +\103\001\104\001\105\001\106\001\255\255\108\001\109\001\255\255\ +\111\001\255\255\255\255\255\255\115\001\000\001\001\001\002\001\ +\003\001\255\255\255\255\006\001\007\001\008\001\009\001\010\001\ +\011\001\012\001\013\001\014\001\015\001\016\001\017\001\018\001\ +\019\001\020\001\021\001\022\001\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\030\001\031\001\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\044\001\045\001\046\001\047\001\255\255\049\001\050\001\ +\051\001\255\255\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\060\001\061\001\255\255\063\001\064\001\065\001\066\001\ +\067\001\068\001\255\255\070\001\071\001\072\001\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\081\001\082\001\ +\083\001\084\001\085\001\086\001\087\001\255\255\089\001\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\097\001\098\001\ +\255\255\100\001\101\001\255\255\103\001\104\001\105\001\106\001\ +\255\255\108\001\109\001\255\255\111\001\255\255\255\255\255\255\ +\115\001\000\001\001\001\002\001\003\001\255\255\255\255\006\001\ +\007\001\008\001\009\001\010\001\011\001\012\001\013\001\014\001\ +\015\001\016\001\017\001\018\001\019\001\020\001\021\001\022\001\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\030\001\ +\031\001\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\044\001\045\001\046\001\ +\047\001\255\255\049\001\050\001\051\001\255\255\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\060\001\061\001\255\255\ +\063\001\064\001\065\001\066\001\067\001\068\001\255\255\070\001\ +\071\001\072\001\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\081\001\082\001\083\001\084\001\085\001\086\001\ +\087\001\255\255\089\001\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\097\001\098\001\255\255\100\001\101\001\255\255\ +\103\001\104\001\105\001\106\001\255\255\108\001\109\001\255\255\ +\111\001\255\255\255\255\255\255\115\001\255\255\000\001\001\001\ +\002\001\003\001\255\255\255\255\006\001\007\001\008\001\009\001\ +\010\001\011\001\012\001\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\022\001\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\030\001\031\001\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\045\001\046\001\047\001\255\255\049\001\ +\050\001\051\001\255\255\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\060\001\061\001\255\255\063\001\064\001\065\001\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\081\001\ +\082\001\083\001\084\001\085\001\086\001\087\001\255\255\089\001\ +\255\255\091\001\092\001\000\000\094\001\095\001\096\001\097\001\ +\098\001\255\255\100\001\101\001\255\255\103\001\104\001\105\001\ +\106\001\255\255\108\001\109\001\255\255\111\001\255\255\255\255\ +\255\255\115\001\000\001\001\001\002\001\003\001\255\255\255\255\ +\006\001\007\001\008\001\009\001\010\001\011\001\012\001\013\001\ +\014\001\015\001\016\001\017\001\018\001\019\001\020\001\021\001\ +\022\001\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\030\001\031\001\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\043\001\044\001\045\001\ +\046\001\047\001\255\255\049\001\050\001\051\001\255\255\053\001\ +\054\001\055\001\056\001\255\255\255\255\059\001\060\001\061\001\ +\255\255\063\001\064\001\065\001\066\001\067\001\068\001\255\255\ +\070\001\071\001\072\001\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\081\001\082\001\083\001\084\001\085\001\ +\086\001\087\001\255\255\089\001\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\097\001\098\001\255\255\100\001\101\001\ +\255\255\103\001\104\001\105\001\106\001\255\255\108\001\109\001\ +\255\255\111\001\255\255\255\255\255\255\115\001\000\001\001\001\ +\002\001\003\001\255\255\255\255\006\001\007\001\008\001\009\001\ +\010\001\011\001\012\001\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\022\001\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\030\001\031\001\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\045\001\046\001\047\001\255\255\049\001\ +\050\001\051\001\255\255\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\060\001\061\001\255\255\063\001\064\001\065\001\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\081\001\ +\082\001\083\001\084\001\085\001\086\001\087\001\255\255\089\001\ +\255\255\091\001\092\001\000\000\094\001\095\001\096\001\097\001\ +\098\001\255\255\100\001\101\001\255\255\103\001\104\001\105\001\ +\106\001\255\255\108\001\109\001\255\255\111\001\255\255\255\255\ +\255\255\115\001\255\255\000\001\001\001\002\001\003\001\255\255\ +\255\255\006\001\007\001\008\001\009\001\010\001\011\001\012\001\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\022\001\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\030\001\031\001\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\045\001\046\001\047\001\255\255\049\001\050\001\051\001\255\255\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\060\001\ +\061\001\255\255\063\001\064\001\065\001\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\081\001\082\001\083\001\084\001\ +\085\001\086\001\087\001\255\255\089\001\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\097\001\098\001\255\255\100\001\ +\101\001\255\255\103\001\104\001\105\001\106\001\255\255\108\001\ +\109\001\255\255\111\001\255\255\255\255\255\255\115\001\000\001\ +\001\001\002\001\003\001\255\255\255\255\006\001\007\001\008\001\ +\009\001\010\001\011\001\012\001\013\001\014\001\015\001\016\001\ +\017\001\018\001\019\001\020\001\021\001\022\001\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\030\001\031\001\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\043\001\044\001\045\001\046\001\047\001\255\255\ +\049\001\050\001\051\001\255\255\053\001\054\001\055\001\056\001\ +\255\255\255\255\059\001\060\001\061\001\255\255\063\001\064\001\ +\065\001\066\001\067\001\068\001\255\255\070\001\071\001\072\001\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\081\001\082\001\083\001\084\001\085\001\086\001\087\001\255\255\ +\089\001\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\097\001\098\001\255\255\100\001\101\001\255\255\103\001\104\001\ +\105\001\106\001\255\255\108\001\109\001\255\255\111\001\255\255\ +\255\255\255\255\115\001\000\001\001\001\002\001\003\001\255\255\ +\255\255\006\001\007\001\008\001\009\001\010\001\011\001\012\001\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\022\001\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\030\001\031\001\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\045\001\046\001\047\001\255\255\049\001\050\001\051\001\255\255\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\060\001\ +\061\001\255\255\063\001\064\001\065\001\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\081\001\082\001\083\001\084\001\ +\085\001\086\001\087\001\255\255\089\001\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\097\001\098\001\255\255\100\001\ +\101\001\255\255\103\001\104\001\105\001\106\001\255\255\108\001\ +\109\001\255\255\111\001\255\255\255\255\255\255\115\001\255\255\ +\000\001\001\001\002\001\003\001\255\255\255\255\006\001\007\001\ +\008\001\009\001\010\001\011\001\012\001\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\255\255\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\030\001\031\001\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\046\001\255\255\ +\255\255\049\001\050\001\051\001\255\255\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\060\001\061\001\255\255\063\001\ +\064\001\065\001\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\081\001\082\001\083\001\084\001\085\001\086\001\087\001\ +\255\255\089\001\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\097\001\098\001\255\255\100\001\101\001\255\255\103\001\ +\104\001\105\001\106\001\255\255\108\001\109\001\255\255\111\001\ +\255\255\255\255\255\255\115\001\000\001\001\001\002\001\003\001\ +\255\255\255\255\006\001\007\001\008\001\009\001\010\001\011\001\ +\012\001\013\001\014\001\015\001\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\030\001\031\001\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\043\001\ +\044\001\045\001\046\001\255\255\255\255\049\001\050\001\051\001\ +\255\255\053\001\054\001\055\001\056\001\255\255\255\255\059\001\ +\060\001\061\001\255\255\063\001\064\001\065\001\066\001\067\001\ +\068\001\255\255\070\001\071\001\072\001\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\081\001\082\001\083\001\ +\084\001\085\001\086\001\087\001\255\255\089\001\255\255\091\001\ +\092\001\000\000\094\001\095\001\096\001\255\255\255\255\255\255\ +\100\001\101\001\255\255\103\001\104\001\105\001\106\001\255\255\ +\108\001\109\001\255\255\111\001\255\255\255\255\255\255\115\001\ +\000\001\001\001\002\001\003\001\255\255\255\255\006\001\007\001\ +\008\001\009\001\010\001\011\001\012\001\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\255\255\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\030\001\031\001\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\046\001\255\255\ +\255\255\049\001\050\001\051\001\255\255\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\060\001\061\001\255\255\063\001\ +\064\001\065\001\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\081\001\082\001\083\001\084\001\085\001\086\001\087\001\ +\255\255\089\001\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\255\255\255\255\255\255\100\001\101\001\255\255\103\001\ +\104\001\105\001\106\001\255\255\108\001\109\001\255\255\111\001\ +\255\255\255\255\255\255\115\001\255\255\000\001\001\001\002\001\ +\003\001\255\255\255\255\006\001\007\001\008\001\009\001\010\001\ +\011\001\012\001\013\001\014\001\015\001\016\001\017\001\018\001\ +\019\001\020\001\021\001\255\255\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\030\001\031\001\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\044\001\045\001\046\001\255\255\255\255\049\001\050\001\ +\051\001\255\255\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\060\001\061\001\255\255\063\001\064\001\065\001\066\001\ +\067\001\068\001\255\255\070\001\071\001\072\001\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\081\001\082\001\ +\083\001\084\001\085\001\086\001\087\001\255\255\089\001\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\255\255\255\255\ +\255\255\100\001\101\001\255\255\103\001\104\001\105\001\106\001\ +\255\255\108\001\109\001\255\255\111\001\255\255\255\255\255\255\ +\115\001\000\001\001\001\002\001\003\001\255\255\255\255\255\255\ +\255\255\008\001\009\001\010\001\255\255\255\255\013\001\014\001\ +\015\001\016\001\017\001\018\001\019\001\020\001\021\001\022\001\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\044\001\045\001\046\001\ +\047\001\255\255\049\001\255\255\051\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\061\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\070\001\ +\071\001\072\001\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\081\001\082\001\083\001\084\001\085\001\086\001\ +\255\255\255\255\089\001\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\097\001\098\001\255\255\100\001\255\255\255\255\ +\103\001\104\001\105\001\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\115\001\000\001\001\001\002\001\ +\003\001\255\255\255\255\255\255\255\255\008\001\009\001\010\001\ +\255\255\255\255\013\001\014\001\015\001\016\001\017\001\018\001\ +\019\001\020\001\021\001\022\001\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\044\001\045\001\046\001\047\001\255\255\049\001\255\255\ +\051\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\061\001\255\255\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\071\001\072\001\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\081\001\082\001\ +\083\001\084\001\085\001\086\001\255\255\255\255\089\001\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\097\001\098\001\ +\255\255\100\001\255\255\255\255\103\001\104\001\105\001\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\115\001\255\255\000\001\001\001\002\001\003\001\255\255\255\255\ +\255\255\255\255\008\001\009\001\010\001\255\255\255\255\013\001\ +\014\001\015\001\016\001\017\001\018\001\019\001\020\001\021\001\ +\255\255\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\043\001\044\001\045\001\ +\046\001\255\255\255\255\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\ +\255\255\063\001\255\255\255\255\066\001\067\001\068\001\255\255\ +\070\001\071\001\072\001\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\082\001\083\001\084\001\085\001\ +\086\001\255\255\255\255\255\255\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\255\255\255\255\255\255\100\001\255\255\ +\255\255\103\001\255\255\105\001\255\255\255\255\108\001\255\255\ +\255\255\111\001\255\255\255\255\255\255\115\001\000\001\001\001\ +\002\001\003\001\255\255\255\255\255\255\255\255\008\001\009\001\ +\010\001\255\255\255\255\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\255\255\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\045\001\046\001\255\255\255\255\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\061\001\255\255\063\001\255\255\255\255\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\255\255\ +\082\001\083\001\084\001\085\001\086\001\255\255\255\255\255\255\ +\255\255\091\001\092\001\000\000\094\001\095\001\096\001\255\255\ +\255\255\255\255\100\001\255\255\255\255\103\001\255\255\105\001\ +\255\255\255\255\108\001\255\255\255\255\111\001\255\255\255\255\ +\255\255\115\001\000\001\001\001\002\001\003\001\255\255\255\255\ +\255\255\255\255\008\001\009\001\010\001\255\255\255\255\013\001\ +\014\001\015\001\016\001\017\001\018\001\019\001\020\001\021\001\ +\255\255\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\043\001\044\001\045\001\ +\046\001\255\255\255\255\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\ +\255\255\063\001\255\255\255\255\066\001\067\001\068\001\255\255\ +\070\001\071\001\072\001\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\082\001\083\001\084\001\085\001\ +\086\001\255\255\255\255\255\255\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\255\255\255\255\255\255\100\001\255\255\ +\255\255\103\001\255\255\105\001\255\255\255\255\108\001\255\255\ +\255\255\111\001\255\255\255\255\255\255\115\001\255\255\000\001\ +\001\001\002\001\003\001\255\255\255\255\255\255\255\255\008\001\ +\009\001\010\001\255\255\255\255\013\001\014\001\015\001\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\043\001\044\001\045\001\046\001\255\255\255\255\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\061\001\255\255\063\001\255\255\ +\255\255\066\001\067\001\068\001\255\255\070\001\071\001\072\001\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\082\001\083\001\084\001\085\001\086\001\255\255\255\255\ +\255\255\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\255\255\255\255\255\255\100\001\255\255\255\255\103\001\255\255\ +\105\001\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\255\255\255\255\115\001\000\001\001\001\002\001\003\001\255\255\ +\255\255\255\255\255\255\008\001\009\001\010\001\255\255\255\255\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\255\255\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\045\001\046\001\255\255\255\255\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\061\001\255\255\063\001\255\255\255\255\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\255\255\082\001\083\001\084\001\ +\085\001\086\001\255\255\255\255\255\255\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\255\255\255\255\255\255\100\001\ +\255\255\255\255\103\001\255\255\105\001\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\115\001\000\001\ +\001\001\002\001\003\001\255\255\255\255\255\255\255\255\008\001\ +\009\001\010\001\255\255\255\255\013\001\014\001\015\001\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\043\001\044\001\045\001\046\001\255\255\255\255\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\061\001\255\255\063\001\255\255\ +\255\255\066\001\067\001\068\001\255\255\070\001\071\001\072\001\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\082\001\083\001\084\001\085\001\086\001\255\255\255\255\ +\255\255\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\255\255\255\255\255\255\100\001\255\255\255\255\103\001\255\255\ +\105\001\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\255\255\255\255\115\001\255\255\000\001\001\001\002\001\003\001\ +\255\255\255\255\255\255\255\255\008\001\009\001\010\001\255\255\ +\255\255\013\001\014\001\015\001\016\001\017\001\255\255\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\043\001\ +\044\001\045\001\046\001\255\255\255\255\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\061\001\255\255\063\001\255\255\255\255\066\001\067\001\ +\068\001\255\255\070\001\071\001\072\001\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\082\001\083\001\ +\084\001\085\001\086\001\255\255\255\255\255\255\255\255\091\001\ +\092\001\000\000\094\001\095\001\096\001\255\255\255\255\255\255\ +\100\001\255\255\255\255\103\001\255\255\105\001\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\115\001\ +\000\001\001\001\002\001\003\001\255\255\255\255\255\255\255\255\ +\008\001\009\001\010\001\255\255\255\255\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\255\255\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\255\255\255\255\ +\255\255\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\061\001\255\255\063\001\ +\255\255\255\255\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\255\255\082\001\083\001\084\001\085\001\086\001\255\255\ +\255\255\255\255\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\255\255\255\255\255\255\100\001\255\255\255\255\103\001\ +\255\255\105\001\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\115\001\000\001\001\001\002\001\003\001\ +\255\255\255\255\255\255\255\255\008\001\009\001\010\001\255\255\ +\255\255\013\001\014\001\015\001\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\043\001\ +\044\001\045\001\255\255\255\255\255\255\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\061\001\255\255\063\001\255\255\255\255\066\001\067\001\ +\068\001\255\255\070\001\071\001\072\001\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\082\001\083\001\ +\084\001\085\001\086\001\255\255\255\255\255\255\255\255\091\001\ +\092\001\000\000\094\001\095\001\096\001\255\255\255\255\255\255\ +\100\001\255\255\255\255\103\001\255\255\105\001\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\115\001\ +\255\255\000\001\001\001\002\001\003\001\255\255\255\255\255\255\ +\255\255\008\001\009\001\010\001\255\255\255\255\013\001\014\001\ +\015\001\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\044\001\045\001\255\255\ +\255\255\255\255\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\061\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\070\001\ +\071\001\072\001\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\083\001\084\001\085\001\086\001\ +\255\255\255\255\255\255\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\255\255\255\255\255\255\100\001\255\255\255\255\ +\103\001\255\255\105\001\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\115\001\000\001\001\001\002\001\ +\003\001\255\255\255\255\255\255\255\255\008\001\009\001\010\001\ +\255\255\255\255\013\001\014\001\015\001\016\001\017\001\018\001\ +\019\001\020\001\021\001\255\255\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\044\001\045\001\255\255\255\255\255\255\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\061\001\255\255\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\071\001\072\001\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\255\255\082\001\ +\083\001\084\001\085\001\086\001\255\255\255\255\255\255\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\255\255\255\255\ +\255\255\100\001\255\255\255\255\103\001\255\255\105\001\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\115\001\000\001\001\001\002\001\003\001\255\255\255\255\255\255\ +\255\255\255\255\009\001\010\001\255\255\255\255\013\001\014\001\ +\015\001\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\044\001\045\001\046\001\ +\255\255\255\255\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\061\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\070\001\ +\071\001\072\001\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\083\001\084\001\085\001\086\001\ +\255\255\255\255\255\255\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\255\255\255\255\255\255\100\001\255\255\255\255\ +\103\001\255\255\105\001\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\115\001\255\255\000\001\001\001\ +\002\001\003\001\255\255\255\255\255\255\255\255\255\255\009\001\ +\010\001\255\255\255\255\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\255\255\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\045\001\046\001\255\255\255\255\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\061\001\255\255\063\001\255\255\255\255\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\255\255\ +\082\001\083\001\084\001\085\001\086\001\255\255\255\255\255\255\ +\255\255\091\001\092\001\000\000\094\001\095\001\096\001\255\255\ +\255\255\255\255\100\001\255\255\255\255\103\001\255\255\105\001\ +\255\255\255\255\108\001\255\255\255\255\111\001\255\255\255\255\ +\255\255\115\001\000\001\001\001\002\001\003\001\255\255\255\255\ +\255\255\255\255\255\255\009\001\010\001\255\255\255\255\013\001\ +\014\001\015\001\016\001\017\001\018\001\019\001\020\001\021\001\ +\255\255\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\043\001\044\001\045\001\ +\046\001\255\255\255\255\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\ +\255\255\063\001\255\255\255\255\066\001\067\001\068\001\255\255\ +\070\001\071\001\072\001\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\082\001\083\001\084\001\085\001\ +\086\001\255\255\255\255\255\255\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\255\255\255\255\255\255\100\001\255\255\ +\255\255\103\001\255\255\105\001\255\255\255\255\108\001\255\255\ +\255\255\111\001\255\255\255\255\255\255\115\001\000\001\001\001\ +\002\001\003\001\255\255\255\255\255\255\255\255\008\001\009\001\ +\010\001\255\255\255\255\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\255\255\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\255\255\255\255\255\255\255\255\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\061\001\255\255\063\001\255\255\255\255\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\255\255\ +\082\001\255\255\084\001\085\001\086\001\255\255\255\255\255\255\ +\255\255\091\001\092\001\000\000\094\001\095\001\096\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\103\001\255\255\105\001\ +\255\255\255\255\108\001\255\255\255\255\111\001\255\255\255\255\ +\255\255\115\001\255\255\000\001\001\001\002\001\003\001\255\255\ +\255\255\255\255\255\255\008\001\009\001\010\001\255\255\255\255\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\255\255\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\255\255\255\255\255\255\255\255\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\061\001\255\255\063\001\255\255\255\255\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\255\255\082\001\255\255\084\001\ +\085\001\086\001\255\255\255\255\255\255\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\103\001\255\255\105\001\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\115\001\000\001\ +\001\001\002\001\003\001\255\255\255\255\255\255\255\255\008\001\ +\009\001\010\001\255\255\255\255\013\001\014\001\015\001\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\043\001\044\001\255\255\255\255\255\255\255\255\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\061\001\255\255\063\001\255\255\ +\255\255\066\001\067\001\068\001\255\255\070\001\071\001\072\001\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\082\001\255\255\084\001\085\001\086\001\255\255\255\255\ +\255\255\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\103\001\255\255\ +\105\001\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\255\255\255\255\115\001\000\001\001\001\002\001\003\001\255\255\ +\255\255\255\255\255\255\008\001\009\001\010\001\255\255\255\255\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\255\255\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\255\255\255\255\255\255\255\255\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\061\001\255\255\063\001\255\255\255\255\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\255\255\082\001\255\255\084\001\ +\085\001\086\001\255\255\255\255\255\255\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\103\001\255\255\105\001\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\115\001\255\255\ +\000\001\001\001\002\001\003\001\255\255\255\255\255\255\255\255\ +\008\001\009\001\010\001\255\255\255\255\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\255\255\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\255\255\255\255\255\255\ +\255\255\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\061\001\255\255\063\001\ +\255\255\255\255\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\255\255\082\001\255\255\084\001\085\001\086\001\255\255\ +\255\255\255\255\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\255\255\255\255\255\255\255\255\255\255\255\255\103\001\ +\255\255\105\001\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\115\001\000\001\001\001\002\001\003\001\ +\255\255\255\255\255\255\255\255\008\001\009\001\010\001\255\255\ +\255\255\013\001\014\001\015\001\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\043\001\ +\044\001\255\255\255\255\255\255\255\255\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\061\001\255\255\063\001\255\255\255\255\066\001\067\001\ +\068\001\255\255\070\001\071\001\072\001\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\082\001\255\255\ +\084\001\085\001\086\001\255\255\255\255\255\255\255\255\091\001\ +\092\001\000\000\094\001\095\001\096\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\103\001\255\255\105\001\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\115\001\ +\000\001\001\001\002\001\003\001\255\255\255\255\255\255\255\255\ +\008\001\009\001\010\001\255\255\255\255\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\255\255\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\046\001\255\255\ +\255\255\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\061\001\255\255\255\255\ +\255\255\255\255\066\001\067\001\068\001\255\255\070\001\255\255\ +\255\255\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\255\255\082\001\255\255\255\255\255\255\086\001\255\255\ +\255\255\255\255\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\255\255\255\255\255\255\100\001\255\255\255\255\103\001\ +\255\255\105\001\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\115\001\255\255\000\001\001\001\002\001\ +\003\001\255\255\255\255\255\255\255\255\008\001\009\001\010\001\ +\255\255\255\255\013\001\014\001\255\255\016\001\017\001\018\001\ +\019\001\020\001\021\001\255\255\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\255\255\255\255\255\255\255\255\255\255\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\061\001\255\255\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\255\255\255\255\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\255\255\082\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\103\001\255\255\105\001\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\115\001\000\001\001\001\002\001\003\001\255\255\255\255\255\255\ +\255\255\008\001\009\001\010\001\255\255\255\255\013\001\014\001\ +\255\255\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\061\001\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\103\001\255\255\105\001\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\115\001\000\001\001\001\002\001\ +\003\001\255\255\255\255\255\255\255\255\008\001\009\001\010\001\ +\255\255\255\255\013\001\014\001\255\255\016\001\017\001\018\001\ +\019\001\020\001\021\001\255\255\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\061\001\255\255\063\001\255\255\255\255\255\255\ +\067\001\068\001\255\255\070\001\255\255\255\255\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\255\255\082\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\103\001\255\255\105\001\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\115\001\255\255\000\001\001\001\002\001\003\001\255\255\255\255\ +\255\255\255\255\008\001\009\001\010\001\255\255\255\255\013\001\ +\014\001\255\255\016\001\017\001\018\001\019\001\020\001\021\001\ +\255\255\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\ +\255\255\063\001\255\255\255\255\255\255\067\001\068\001\255\255\ +\070\001\255\255\255\255\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\082\001\255\255\255\255\000\000\ +\255\255\255\255\255\255\255\255\255\255\091\001\092\001\255\255\ +\094\001\095\001\096\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\103\001\255\255\105\001\255\255\255\255\108\001\255\255\ +\255\255\111\001\255\255\255\255\255\255\115\001\000\001\001\001\ +\002\001\003\001\255\255\255\255\255\255\255\255\008\001\009\001\ +\010\001\255\255\255\255\013\001\014\001\255\255\016\001\017\001\ +\018\001\019\001\020\001\021\001\255\255\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\255\255\255\255\255\255\255\255\255\255\255\255\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\061\001\255\255\063\001\255\255\255\255\ +\255\255\067\001\068\001\255\255\070\001\255\255\255\255\073\001\ +\074\001\255\255\255\255\255\255\000\000\255\255\080\001\255\255\ +\082\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\091\001\092\001\255\255\094\001\095\001\096\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\103\001\255\255\105\001\ +\255\255\255\255\108\001\255\255\255\255\111\001\255\255\255\255\ +\255\255\115\001\000\001\001\001\002\001\003\001\255\255\255\255\ +\255\255\255\255\008\001\009\001\010\001\255\255\255\255\013\001\ +\014\001\255\255\016\001\017\001\018\001\019\001\020\001\021\001\ +\255\255\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\ +\255\255\063\001\255\255\255\255\000\000\067\001\068\001\255\255\ +\070\001\255\255\255\255\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\082\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\091\001\092\001\255\255\ +\094\001\095\001\096\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\103\001\255\255\105\001\255\255\255\255\108\001\000\001\ +\255\255\111\001\003\001\255\255\255\255\115\001\255\255\008\001\ +\009\001\010\001\255\255\255\255\013\001\014\001\255\255\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\255\255\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\037\001\255\255\255\255\040\001\ +\041\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\000\000\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\070\001\255\255\255\255\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\082\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\091\001\092\001\255\255\094\001\095\001\096\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\103\001\255\255\ +\105\001\255\255\255\255\108\001\000\001\255\255\111\001\003\001\ +\255\255\255\255\115\001\255\255\008\001\009\001\010\001\255\255\ +\255\255\013\001\014\001\255\255\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\255\255\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\037\001\255\255\255\255\040\001\041\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\000\000\255\255\255\255\255\255\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\255\255\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\082\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\091\001\ +\092\001\255\255\094\001\095\001\096\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\103\001\000\001\105\001\255\255\003\001\ +\108\001\255\255\255\255\111\001\008\001\255\255\010\001\115\001\ +\255\255\013\001\014\001\255\255\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\255\255\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\037\001\255\255\255\255\040\001\041\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\000\000\255\255\255\255\255\255\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\255\255\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\255\255\255\255\ +\255\255\255\255\000\000\255\255\255\255\255\255\255\255\091\001\ +\092\001\255\255\094\001\095\001\096\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\103\001\000\001\105\001\255\255\003\001\ +\108\001\255\255\255\255\111\001\008\001\255\255\010\001\115\001\ +\255\255\013\001\014\001\255\255\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\255\255\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\037\001\255\255\255\255\040\001\041\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\049\001\255\255\000\000\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\255\255\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\000\000\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\091\001\ +\092\001\255\255\094\001\095\001\096\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\103\001\000\001\105\001\255\255\003\001\ +\108\001\255\255\255\255\111\001\008\001\255\255\010\001\115\001\ +\255\255\013\001\014\001\255\255\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\255\255\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\037\001\255\255\255\255\040\001\041\001\255\255\255\255\ +\255\255\255\255\000\000\255\255\255\255\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\255\255\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\091\001\ +\092\001\255\255\094\001\095\001\096\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\103\001\000\001\105\001\255\255\003\001\ +\108\001\255\255\255\255\111\001\008\001\255\255\010\001\115\001\ +\255\255\013\001\014\001\255\255\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\255\255\ +\028\001\029\001\000\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\037\001\008\001\255\255\040\001\041\001\255\255\013\001\ +\255\255\255\255\000\000\255\255\255\255\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\026\001\255\255\028\001\029\001\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\041\001\255\255\073\001\074\001\255\255\ +\000\000\255\255\255\255\255\255\080\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\091\001\ +\092\001\063\001\094\001\095\001\096\001\067\001\068\001\000\001\ +\255\255\255\255\003\001\103\001\074\001\105\001\255\255\008\001\ +\108\001\010\001\080\001\111\001\013\001\014\001\255\255\115\001\ +\017\001\255\255\019\001\020\001\021\001\255\255\092\001\024\001\ +\025\001\026\001\096\001\028\001\029\001\000\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\037\001\255\255\108\001\040\001\ +\041\001\111\001\013\001\255\255\255\255\000\000\255\255\255\255\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\026\001\ +\255\255\028\001\029\001\060\001\255\255\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\070\001\041\001\255\255\ +\073\001\074\001\255\255\000\000\255\255\255\255\255\255\080\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\091\001\092\001\063\001\094\001\095\001\096\001\ +\067\001\068\001\000\001\255\255\255\255\003\001\103\001\074\001\ +\105\001\255\255\008\001\108\001\010\001\080\001\111\001\013\001\ +\014\001\255\255\115\001\017\001\255\255\019\001\020\001\021\001\ +\255\255\092\001\024\001\025\001\026\001\096\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\037\001\ +\255\255\108\001\040\001\041\001\111\001\255\255\255\255\255\255\ +\000\000\255\255\255\255\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\255\255\ +\255\255\063\001\255\255\255\255\255\255\067\001\068\001\255\255\ +\070\001\255\255\255\255\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\091\001\092\001\255\255\ +\094\001\095\001\096\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\103\001\000\001\105\001\255\255\003\001\108\001\255\255\ +\255\255\111\001\008\001\255\255\010\001\115\001\255\255\013\001\ +\014\001\255\255\255\255\017\001\255\255\019\001\020\001\021\001\ +\255\255\255\255\024\001\025\001\026\001\255\255\028\001\029\001\ +\000\001\255\255\255\255\255\255\255\255\255\255\255\255\037\001\ +\255\255\255\255\040\001\041\001\255\255\013\001\255\255\255\255\ +\000\000\255\255\255\255\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\026\001\255\255\028\001\029\001\060\001\255\255\ +\255\255\063\001\255\255\255\255\255\255\067\001\068\001\255\255\ +\070\001\041\001\255\255\073\001\074\001\255\255\000\000\255\255\ +\255\255\255\255\080\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\091\001\092\001\063\001\ +\094\001\095\001\096\001\067\001\068\001\000\001\255\255\255\255\ +\003\001\103\001\074\001\105\001\255\255\008\001\108\001\010\001\ +\080\001\111\001\013\001\014\001\255\255\115\001\017\001\255\255\ +\019\001\020\001\021\001\255\255\092\001\024\001\025\001\026\001\ +\096\001\028\001\029\001\000\001\255\255\255\255\003\001\255\255\ +\255\255\255\255\037\001\255\255\108\001\040\001\041\001\111\001\ +\013\001\255\255\255\255\000\000\255\255\255\255\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\026\001\255\255\028\001\ +\029\001\060\001\255\255\255\255\063\001\255\255\255\255\255\255\ +\067\001\068\001\255\255\070\001\041\001\255\255\073\001\074\001\ +\255\255\000\000\255\255\255\255\255\255\080\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\091\001\092\001\063\001\094\001\095\001\096\001\255\255\068\001\ +\000\001\255\255\255\255\003\001\103\001\074\001\105\001\255\255\ +\008\001\108\001\010\001\080\001\111\001\013\001\014\001\255\255\ +\115\001\017\001\255\255\019\001\020\001\021\001\255\255\092\001\ +\024\001\025\001\026\001\096\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\037\001\255\255\108\001\ +\040\001\041\001\111\001\255\255\255\255\255\255\000\000\255\255\ +\255\255\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\255\255\255\255\063\001\ +\255\255\255\255\255\255\067\001\068\001\255\255\070\001\255\255\ +\255\255\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\091\001\092\001\255\255\094\001\095\001\ +\096\001\255\255\255\255\255\255\255\255\255\255\255\255\103\001\ +\000\001\105\001\255\255\003\001\108\001\255\255\255\255\111\001\ +\008\001\255\255\010\001\115\001\255\255\013\001\014\001\255\255\ +\255\255\017\001\255\255\019\001\020\001\021\001\255\255\255\255\ +\024\001\025\001\026\001\255\255\028\001\029\001\000\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\037\001\255\255\255\255\ +\040\001\041\001\255\255\013\001\255\255\255\255\000\000\255\255\ +\255\255\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\026\001\255\255\028\001\029\001\060\001\255\255\255\255\063\001\ +\255\255\255\255\255\255\067\001\068\001\255\255\070\001\041\001\ +\255\255\073\001\074\001\255\255\000\000\255\255\255\255\255\255\ +\080\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\091\001\092\001\063\001\094\001\095\001\ +\096\001\067\001\068\001\000\001\255\255\255\255\003\001\103\001\ +\074\001\105\001\255\255\008\001\108\001\010\001\080\001\111\001\ +\013\001\014\001\255\255\115\001\017\001\255\255\019\001\020\001\ +\021\001\255\255\092\001\024\001\025\001\026\001\096\001\028\001\ +\029\001\000\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\037\001\255\255\108\001\040\001\041\001\111\001\013\001\255\255\ +\255\255\000\000\255\255\255\255\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\026\001\255\255\028\001\029\001\060\001\ +\255\255\255\255\063\001\255\255\255\255\255\255\067\001\068\001\ +\255\255\070\001\041\001\255\255\073\001\074\001\255\255\000\000\ +\255\255\255\255\255\255\080\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\091\001\092\001\ +\063\001\094\001\095\001\096\001\067\001\068\001\000\001\255\255\ +\255\255\003\001\103\001\074\001\105\001\255\255\008\001\108\001\ +\010\001\080\001\111\001\013\001\014\001\255\255\115\001\017\001\ +\255\255\019\001\020\001\021\001\255\255\092\001\024\001\025\001\ +\026\001\096\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\037\001\255\255\108\001\040\001\041\001\ +\111\001\255\255\255\255\255\255\000\000\255\255\255\255\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\255\255\255\255\063\001\255\255\255\255\ +\255\255\067\001\068\001\255\255\070\001\255\255\255\255\073\001\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\091\001\092\001\255\255\094\001\095\001\096\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\103\001\000\001\105\001\ +\255\255\003\001\108\001\255\255\255\255\111\001\008\001\255\255\ +\010\001\115\001\255\255\013\001\014\001\255\255\255\255\017\001\ +\255\255\019\001\020\001\021\001\255\255\255\255\024\001\025\001\ +\026\001\255\255\028\001\029\001\000\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\037\001\255\255\255\255\040\001\041\001\ +\255\255\013\001\255\255\255\255\000\000\255\255\255\255\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\026\001\255\255\ +\028\001\029\001\060\001\255\255\255\255\063\001\255\255\255\255\ +\255\255\067\001\068\001\255\255\070\001\041\001\255\255\073\001\ +\074\001\255\255\000\000\255\255\255\255\255\255\080\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\091\001\092\001\063\001\094\001\095\001\096\001\255\255\ +\068\001\000\001\255\255\255\255\003\001\103\001\074\001\105\001\ +\255\255\008\001\108\001\010\001\080\001\111\001\013\001\014\001\ +\255\255\115\001\017\001\255\255\019\001\020\001\021\001\255\255\ +\092\001\024\001\025\001\026\001\096\001\028\001\029\001\000\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\037\001\255\255\ +\108\001\040\001\041\001\111\001\013\001\255\255\255\255\000\000\ +\255\255\255\255\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\026\001\255\255\028\001\029\001\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\041\001\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\091\001\092\001\063\001\094\001\ +\095\001\096\001\255\255\068\001\000\001\255\255\255\255\003\001\ +\103\001\074\001\105\001\255\255\008\001\108\001\010\001\080\001\ +\111\001\013\001\014\001\255\255\115\001\017\001\255\255\019\001\ +\020\001\021\001\255\255\092\001\024\001\025\001\026\001\096\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\037\001\255\255\108\001\040\001\041\001\111\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\000\000\255\255\255\255\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\255\255\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\091\001\ +\092\001\255\255\094\001\095\001\096\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\103\001\000\001\105\001\255\255\003\001\ +\108\001\255\255\255\255\111\001\008\001\255\255\010\001\115\001\ +\255\255\013\001\014\001\255\255\255\255\017\001\255\255\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\255\255\ +\028\001\029\001\000\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\037\001\255\255\255\255\040\001\041\001\255\255\013\001\ +\255\255\255\255\255\255\255\255\255\255\049\001\255\255\255\255\ +\000\000\255\255\255\255\255\255\026\001\255\255\028\001\029\001\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\041\001\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\091\001\ +\092\001\063\001\094\001\095\001\096\001\255\255\068\001\000\001\ +\255\255\255\255\003\001\103\001\074\001\105\001\255\255\008\001\ +\108\001\010\001\080\001\111\001\013\001\014\001\255\255\115\001\ +\017\001\255\255\019\001\020\001\021\001\255\255\092\001\024\001\ +\025\001\026\001\096\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\037\001\255\255\108\001\040\001\ +\041\001\111\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\255\255\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\000\000\070\001\255\255\255\255\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\091\001\092\001\255\255\094\001\255\255\096\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\103\001\255\255\ +\105\001\255\255\255\255\108\001\255\255\000\001\111\001\002\001\ +\003\001\004\001\115\001\255\255\255\255\008\001\255\255\255\255\ +\255\255\255\255\013\001\255\255\255\255\255\255\017\001\018\001\ +\019\001\255\255\255\255\255\255\255\255\255\255\255\255\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\036\001\255\255\255\255\255\255\040\001\041\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\000\000\ +\255\255\060\001\255\255\255\255\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\255\255\255\255\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\091\001\092\001\255\255\094\001\095\001\096\001\255\255\255\255\ +\000\001\100\001\255\255\003\001\255\255\255\255\255\255\255\255\ +\008\001\108\001\010\001\255\255\111\001\013\001\014\001\255\255\ +\115\001\017\001\255\255\019\001\020\001\021\001\255\255\255\255\ +\024\001\255\255\026\001\255\255\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\037\001\255\255\255\255\ +\040\001\041\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\000\000\255\255\060\001\255\255\255\255\063\001\ +\255\255\255\255\255\255\067\001\068\001\255\255\070\001\255\255\ +\255\255\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\091\001\092\001\255\255\094\001\095\001\ +\096\001\255\255\255\255\255\255\255\255\255\255\255\255\103\001\ +\255\255\105\001\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\115\001\000\001\255\255\002\001\003\001\ +\004\001\255\255\255\255\255\255\008\001\255\255\255\255\255\255\ +\255\255\013\001\255\255\255\255\255\255\017\001\018\001\019\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\026\001\027\001\ +\028\001\029\001\255\255\255\255\008\001\255\255\255\255\255\255\ +\036\001\255\255\255\255\255\255\255\255\041\001\255\255\000\000\ +\255\255\255\255\255\255\023\001\048\001\049\001\255\255\255\255\ +\255\255\255\255\030\001\255\255\255\255\255\255\255\255\255\255\ +\060\001\255\255\255\255\063\001\255\255\255\255\066\001\067\001\ +\068\001\255\255\070\001\255\255\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\055\001\080\001\057\001\058\001\059\001\ +\255\255\061\001\255\255\255\255\064\001\065\001\255\255\091\001\ +\092\001\255\255\094\001\095\001\096\001\255\255\255\255\000\001\ +\255\255\002\001\003\001\004\001\255\255\081\001\255\255\008\001\ +\108\001\255\255\255\255\111\001\013\001\089\001\090\001\115\001\ +\017\001\018\001\019\001\255\255\255\255\097\001\255\255\255\255\ +\255\255\026\001\027\001\028\001\029\001\255\255\106\001\255\255\ +\255\255\109\001\110\001\036\001\255\255\255\255\255\255\255\255\ +\041\001\255\255\000\000\255\255\255\255\255\255\255\255\048\001\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\255\255\255\255\063\001\255\255\ +\255\255\066\001\067\001\068\001\255\255\070\001\255\255\255\255\ +\255\255\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\091\001\092\001\255\255\094\001\095\001\096\001\ +\255\255\255\255\000\001\255\255\002\001\003\001\004\001\255\255\ +\255\255\255\255\008\001\108\001\255\255\255\255\111\001\013\001\ +\255\255\255\255\115\001\017\001\018\001\019\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\000\000\255\255\036\001\255\255\ +\255\255\255\255\255\255\041\001\255\255\255\255\000\000\255\255\ +\255\255\255\255\048\001\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\255\255\ +\255\255\063\001\255\255\255\255\066\001\067\001\068\001\255\255\ +\070\001\255\255\255\255\255\255\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\255\255\255\255\255\255\255\255\000\001\ +\255\255\002\001\003\001\004\001\255\255\255\255\108\001\008\001\ +\255\255\111\001\255\255\255\255\013\001\115\001\255\255\255\255\ +\017\001\018\001\019\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\026\001\027\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\036\001\255\255\255\255\255\255\255\255\ +\041\001\255\255\255\255\255\255\255\255\255\255\255\255\048\001\ +\049\001\000\000\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\255\255\255\255\063\001\255\255\ +\255\255\066\001\067\001\068\001\255\255\070\001\255\255\255\255\ +\255\255\074\001\255\255\255\255\255\255\255\255\055\001\080\001\ +\057\001\058\001\059\001\255\255\061\001\255\255\255\255\064\001\ +\065\001\255\255\091\001\092\001\255\255\094\001\095\001\096\001\ +\255\255\255\255\000\001\255\255\002\001\003\001\004\001\255\255\ +\081\001\255\255\008\001\108\001\255\255\255\255\111\001\013\001\ +\089\001\090\001\115\001\017\001\018\001\019\001\255\255\255\255\ +\097\001\255\255\255\255\255\255\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\109\001\110\001\036\001\255\255\ +\255\255\255\255\255\255\041\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\048\001\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\255\255\ +\255\255\063\001\000\000\255\255\066\001\067\001\068\001\255\255\ +\070\001\255\255\255\255\255\255\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\000\001\091\001\092\001\003\001\ +\094\001\095\001\096\001\255\255\255\255\255\255\000\001\255\255\ +\255\255\013\001\255\255\255\255\255\255\017\001\108\001\019\001\ +\255\255\111\001\255\255\013\001\255\255\115\001\026\001\027\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\026\001\255\255\028\001\029\001\255\255\041\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\041\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\000\001\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\060\001\255\255\013\001\073\001\074\001\255\255\ +\017\001\067\001\068\001\255\255\080\001\255\255\255\255\255\255\ +\074\001\026\001\027\001\028\001\029\001\000\000\080\001\255\255\ +\092\001\255\255\094\001\255\255\096\001\255\255\255\255\255\255\ +\041\001\255\255\092\001\255\255\255\255\255\255\096\001\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\115\001\ +\255\255\000\001\108\001\060\001\003\001\111\001\063\001\255\255\ +\255\255\066\001\067\001\068\001\255\255\255\255\013\001\255\255\ +\073\001\074\001\017\001\255\255\255\255\255\255\255\255\080\001\ +\255\255\255\255\255\255\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\092\001\255\255\094\001\255\255\096\001\ +\255\255\255\255\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\255\255\255\255\115\001\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\255\255\ +\255\255\255\255\073\001\074\001\055\001\255\255\057\001\058\001\ +\059\001\080\001\061\001\255\255\000\000\064\001\065\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\092\001\255\255\094\001\ +\255\255\096\001\255\255\255\255\255\255\255\255\081\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\108\001\089\001\090\001\ +\111\001\255\255\000\001\255\255\115\001\003\001\097\001\005\001\ +\006\001\007\001\008\001\255\255\255\255\011\001\012\001\013\001\ +\255\255\255\255\109\001\110\001\255\255\019\001\255\255\255\255\ +\255\255\023\001\255\255\255\255\026\001\255\255\028\001\029\001\ +\030\001\031\001\032\001\033\001\034\001\035\001\036\001\255\255\ +\255\255\039\001\040\001\041\001\255\255\255\255\000\000\255\255\ +\255\255\255\255\048\001\049\001\050\001\051\001\052\001\053\001\ +\054\001\055\001\056\001\057\001\058\001\059\001\060\001\061\001\ +\255\255\063\001\064\001\065\001\255\255\067\001\068\001\069\001\ +\070\001\071\001\072\001\255\255\074\001\075\001\255\255\077\001\ +\078\001\255\255\080\001\081\001\255\255\255\255\084\001\085\001\ +\255\255\087\001\088\001\089\001\090\001\091\001\092\001\093\001\ +\255\255\095\001\096\001\097\001\255\255\099\001\255\255\101\001\ +\102\001\255\255\104\001\255\255\106\001\107\001\108\001\109\001\ +\110\001\111\001\112\001\000\000\114\001\000\001\255\255\255\255\ +\255\255\004\001\255\255\006\001\255\255\008\001\255\255\010\001\ +\255\255\012\001\255\255\014\001\015\001\255\255\017\001\018\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\027\001\028\001\255\255\030\001\031\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\050\001\ +\051\001\052\001\053\001\255\255\055\001\056\001\255\255\255\255\ +\059\001\000\000\255\255\255\255\255\255\064\001\065\001\066\001\ +\255\255\255\255\255\255\255\255\071\001\255\255\073\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\081\001\255\255\ +\255\255\084\001\255\255\255\255\255\255\255\255\089\001\255\255\ +\091\001\092\001\255\255\094\001\095\001\255\255\097\001\255\255\ +\255\255\255\255\101\001\255\255\255\255\104\001\255\255\106\001\ +\255\255\255\255\109\001\110\001\000\001\255\255\113\001\255\255\ +\004\001\255\255\006\001\000\000\008\001\255\255\010\001\255\255\ +\012\001\255\255\014\001\015\001\255\255\017\001\018\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\027\001\ +\255\255\255\255\030\001\031\001\255\255\255\255\255\255\255\255\ +\255\255\000\000\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\050\001\051\001\ +\255\255\053\001\255\255\055\001\056\001\255\255\255\255\059\001\ +\255\255\255\255\255\255\255\255\064\001\065\001\066\001\255\255\ +\255\255\255\255\255\255\071\001\255\255\073\001\000\001\255\255\ +\255\255\003\001\255\255\255\255\255\255\081\001\008\001\255\255\ +\084\001\255\255\255\255\013\001\014\001\089\001\255\255\091\001\ +\092\001\019\001\094\001\095\001\022\001\097\001\255\255\255\255\ +\026\001\101\001\028\001\029\001\104\001\255\255\106\001\255\255\ +\255\255\109\001\110\001\255\255\255\255\113\001\255\255\041\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\055\001\ +\000\000\057\001\058\001\059\001\255\255\061\001\255\255\255\255\ +\064\001\065\001\060\001\255\255\255\255\063\001\255\255\065\001\ +\066\001\067\001\068\001\000\001\255\255\255\255\003\001\255\255\ +\074\001\081\001\255\255\008\001\255\255\079\001\080\001\255\255\ +\013\001\089\001\090\001\255\255\255\255\255\255\019\001\255\255\ +\255\255\097\001\092\001\255\255\255\255\026\001\096\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\109\001\110\001\255\255\ +\000\000\255\255\108\001\040\001\041\001\111\001\055\001\255\255\ +\057\001\058\001\059\001\255\255\061\001\255\255\255\255\064\001\ +\065\001\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\255\255\000\001\063\001\255\255\003\001\066\001\067\001\068\001\ +\081\001\008\001\255\255\255\255\073\001\074\001\013\001\255\255\ +\089\001\090\001\255\255\080\001\019\001\255\255\255\255\255\255\ +\097\001\255\255\255\255\026\001\255\255\028\001\029\001\092\001\ +\000\000\255\255\255\255\096\001\109\001\110\001\255\255\100\001\ +\255\255\040\001\041\001\255\255\255\255\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\000\001\255\255\060\001\003\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\255\255\ +\013\001\255\255\073\001\074\001\255\255\255\255\019\001\255\255\ +\255\255\080\001\255\255\255\255\255\255\026\001\255\255\028\001\ +\029\001\000\001\000\000\255\255\003\001\092\001\255\255\255\255\ +\255\255\096\001\255\255\040\001\041\001\255\255\013\001\255\255\ +\255\255\255\255\017\001\048\001\049\001\108\001\255\255\255\255\ +\111\001\255\255\255\255\026\001\027\001\028\001\029\001\060\001\ +\255\255\255\255\063\001\255\255\255\255\255\255\255\255\068\001\ +\255\255\070\001\041\001\255\255\255\255\074\001\255\255\255\255\ +\255\255\255\255\055\001\080\001\057\001\058\001\059\001\255\255\ +\061\001\255\255\000\000\064\001\065\001\060\001\255\255\092\001\ +\063\001\255\255\255\255\096\001\067\001\068\001\255\255\255\255\ +\255\255\255\255\006\001\074\001\081\001\255\255\255\255\108\001\ +\012\001\080\001\111\001\255\255\089\001\090\001\255\255\255\255\ +\000\001\255\255\255\255\003\001\097\001\092\001\255\255\094\001\ +\008\001\096\001\030\001\031\001\255\255\013\001\255\255\255\255\ +\109\001\110\001\255\255\019\001\255\255\108\001\255\255\000\000\ +\111\001\255\255\026\001\255\255\028\001\029\001\050\001\255\255\ +\052\001\053\001\255\255\055\001\056\001\255\255\255\255\059\001\ +\255\255\041\001\255\255\255\255\064\001\065\001\255\255\255\255\ +\255\255\255\255\255\255\071\001\255\255\255\255\255\255\255\255\ +\000\001\255\255\255\255\003\001\060\001\255\255\255\255\063\001\ +\084\001\255\255\066\001\067\001\068\001\013\001\255\255\255\255\ +\255\255\017\001\074\001\255\255\000\000\097\001\255\255\255\255\ +\080\001\101\001\026\001\027\001\028\001\029\001\106\001\255\255\ +\255\255\109\001\110\001\255\255\092\001\255\255\255\255\255\255\ +\096\001\041\001\255\255\000\000\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\108\001\255\255\255\255\111\001\ +\000\001\255\255\255\255\003\001\060\001\255\255\255\255\063\001\ +\255\255\255\255\255\255\067\001\068\001\013\001\255\255\255\255\ +\255\255\000\000\074\001\019\001\255\255\255\255\255\255\255\255\ +\080\001\255\255\026\001\255\255\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\092\001\255\255\094\001\255\255\ +\096\001\041\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\048\001\255\255\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\000\001\255\255\060\001\003\001\255\255\063\001\ +\255\255\255\255\255\255\067\001\068\001\255\255\070\001\013\001\ +\255\255\000\000\074\001\255\255\255\255\019\001\255\255\255\255\ +\080\001\255\255\255\255\255\255\026\001\255\255\028\001\029\001\ +\255\255\255\255\255\255\255\255\092\001\255\255\255\255\255\255\ +\096\001\255\255\255\255\041\001\255\255\255\255\255\255\000\000\ +\255\255\255\255\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\000\001\255\255\255\255\003\001\060\001\255\255\ +\255\255\063\001\008\001\255\255\255\255\067\001\068\001\013\001\ +\255\255\255\255\255\255\255\255\074\001\019\001\255\255\255\255\ +\255\255\255\255\080\001\255\255\026\001\255\255\028\001\029\001\ +\086\001\255\255\255\255\255\255\255\255\255\255\092\001\255\255\ +\255\255\255\255\096\001\041\001\255\255\255\255\255\255\000\000\ +\255\255\255\255\255\255\255\255\255\255\255\255\108\001\000\001\ +\255\255\111\001\003\001\255\255\255\255\255\255\060\001\255\255\ +\000\000\063\001\255\255\255\255\013\001\067\001\068\001\255\255\ +\255\255\255\255\019\001\255\255\074\001\255\255\255\255\255\255\ +\255\255\026\001\080\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\092\001\255\255\ +\041\001\255\255\096\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\000\001\255\255\108\001\003\001\ +\255\255\111\001\255\255\060\001\255\255\255\255\063\001\255\255\ +\000\000\013\001\067\001\068\001\255\255\255\255\255\255\019\001\ +\255\255\074\001\255\255\000\001\255\255\255\255\026\001\080\001\ +\028\001\029\001\255\255\008\001\255\255\000\000\255\255\255\255\ +\013\001\255\255\255\255\092\001\255\255\041\001\000\000\096\001\ +\255\255\255\255\255\255\255\255\255\255\026\001\255\255\028\001\ +\029\001\000\001\255\255\108\001\003\001\255\255\111\001\255\255\ +\060\001\255\255\255\255\063\001\041\001\255\255\013\001\067\001\ +\068\001\255\255\255\255\255\255\019\001\255\255\074\001\255\255\ +\255\255\000\000\255\255\026\001\080\001\028\001\029\001\060\001\ +\255\255\255\255\063\001\255\255\255\255\066\001\067\001\068\001\ +\092\001\255\255\041\001\255\255\096\001\074\001\000\000\255\255\ +\255\255\255\255\255\255\080\001\255\255\255\255\255\255\255\255\ +\108\001\000\001\255\255\111\001\003\001\060\001\255\255\092\001\ +\063\001\255\255\255\255\096\001\067\001\068\001\013\001\255\255\ +\255\255\255\255\255\255\074\001\019\001\255\255\255\255\108\001\ +\255\255\080\001\111\001\026\001\255\255\028\001\029\001\000\001\ +\255\255\255\255\003\001\255\255\255\255\092\001\255\255\255\255\ +\255\255\096\001\041\001\255\255\013\001\255\255\255\255\255\255\ +\255\255\255\255\019\001\255\255\255\255\108\001\255\255\255\255\ +\111\001\026\001\255\255\028\001\029\001\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\255\255\ +\041\001\255\255\255\255\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\255\255\255\255\255\255\255\255\000\001\ +\255\255\255\255\003\001\060\001\255\255\092\001\063\001\255\255\ +\255\255\096\001\067\001\068\001\013\001\255\255\255\255\255\255\ +\000\001\074\001\019\001\255\255\255\255\108\001\255\255\080\001\ +\111\001\026\001\255\255\028\001\029\001\013\001\255\255\255\255\ +\255\255\255\255\255\255\092\001\255\255\255\255\255\255\096\001\ +\041\001\255\255\026\001\255\255\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\255\255\041\001\255\255\060\001\255\255\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\255\255\255\255\255\255\ +\000\001\074\001\255\255\255\255\060\001\255\255\255\255\080\001\ +\255\255\255\255\066\001\067\001\068\001\013\001\255\255\255\255\ +\255\255\255\255\074\001\092\001\255\255\000\001\255\255\096\001\ +\080\001\255\255\026\001\255\255\028\001\029\001\000\001\255\255\ +\255\255\255\255\013\001\108\001\092\001\255\255\111\001\255\255\ +\096\001\041\001\255\255\013\001\255\255\255\255\255\255\026\001\ +\255\255\028\001\029\001\255\255\108\001\255\255\255\255\111\001\ +\026\001\255\255\028\001\029\001\060\001\255\255\041\001\063\001\ +\255\255\000\001\255\255\255\255\068\001\255\255\255\255\041\001\ +\255\255\255\255\074\001\255\255\255\255\255\255\013\001\255\255\ +\080\001\060\001\255\255\255\255\063\001\255\255\000\001\255\255\ +\255\255\068\001\060\001\026\001\092\001\028\001\029\001\074\001\ +\096\001\255\255\068\001\013\001\255\255\080\001\255\255\255\255\ +\074\001\255\255\041\001\255\255\108\001\255\255\080\001\111\001\ +\026\001\092\001\028\001\029\001\255\255\096\001\255\255\255\255\ +\255\255\255\255\092\001\255\255\255\255\060\001\096\001\041\001\ +\255\255\108\001\255\255\255\255\111\001\068\001\255\255\255\255\ +\255\255\255\255\108\001\074\001\255\255\111\001\255\255\255\255\ +\255\255\080\001\060\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\068\001\255\255\255\255\092\001\255\255\255\255\ +\074\001\096\001\255\255\255\255\255\255\255\255\080\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\092\001\000\001\255\255\255\255\096\001\255\255\ +\005\001\006\001\007\001\008\001\255\255\255\255\011\001\012\001\ +\013\001\014\001\108\001\255\255\255\255\111\001\019\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\026\001\255\255\028\001\ +\029\001\030\001\031\001\032\001\033\001\034\001\035\001\255\255\ +\255\255\255\255\039\001\255\255\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\049\001\050\001\051\001\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\060\001\ +\255\255\255\255\063\001\064\001\065\001\066\001\255\255\068\001\ +\069\001\070\001\071\001\072\001\255\255\074\001\255\255\255\255\ +\077\001\078\001\255\255\080\001\081\001\255\255\255\255\084\001\ +\085\001\255\255\087\001\255\255\089\001\090\001\255\255\092\001\ +\093\001\255\255\255\255\096\001\097\001\255\255\099\001\255\255\ +\101\001\102\001\255\255\104\001\255\255\106\001\107\001\108\001\ +\109\001\110\001\111\001\112\001\000\001\114\001\255\255\255\255\ +\255\255\005\001\006\001\007\001\008\001\255\255\255\255\011\001\ +\012\001\255\255\255\255\255\255\255\255\255\255\255\255\019\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\026\001\255\255\ +\028\001\255\255\030\001\031\001\032\001\033\001\034\001\035\001\ +\255\255\255\255\255\255\039\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\049\001\050\001\051\001\ +\052\001\053\001\054\001\055\001\056\001\255\255\255\255\059\001\ +\060\001\255\255\255\255\063\001\064\001\065\001\255\255\255\255\ +\068\001\069\001\070\001\071\001\072\001\255\255\074\001\255\255\ +\255\255\077\001\078\001\255\255\255\255\081\001\255\255\255\255\ +\084\001\085\001\255\255\087\001\255\255\089\001\090\001\255\255\ +\255\255\093\001\255\255\255\255\255\255\097\001\255\255\099\001\ +\255\255\101\001\102\001\255\255\104\001\255\255\106\001\107\001\ +\255\255\109\001\110\001\111\001\112\001\255\255\114\001\000\001\ +\001\001\002\001\255\255\255\255\005\001\006\001\007\001\255\255\ +\009\001\255\255\011\001\012\001\255\255\255\255\015\001\016\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\027\001\255\255\255\255\030\001\031\001\032\001\ +\033\001\034\001\255\255\036\001\255\255\255\255\039\001\255\255\ +\255\255\042\001\043\001\044\001\045\001\046\001\047\001\255\255\ +\255\255\050\001\255\255\052\001\053\001\054\001\055\001\056\001\ +\255\255\255\255\059\001\255\255\061\001\255\255\063\001\064\001\ +\065\001\255\255\255\255\255\255\069\001\255\255\071\001\072\001\ +\255\255\074\001\255\255\255\255\255\255\078\001\255\255\255\255\ +\255\255\082\001\083\001\084\001\085\001\086\001\087\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\094\001\255\255\255\255\ +\255\255\098\001\255\255\100\001\101\001\255\255\255\255\255\255\ +\255\255\106\001\107\001\255\255\109\001\110\001\000\001\001\001\ +\002\001\114\001\255\255\005\001\006\001\007\001\255\255\009\001\ +\255\255\011\001\012\001\255\255\255\255\015\001\016\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\027\001\255\255\255\255\030\001\031\001\032\001\033\001\ +\034\001\255\255\036\001\255\255\255\255\039\001\255\255\255\255\ +\042\001\043\001\044\001\045\001\046\001\047\001\255\255\255\255\ +\050\001\255\255\052\001\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\255\255\061\001\255\255\063\001\064\001\065\001\ +\255\255\255\255\255\255\069\001\255\255\071\001\072\001\255\255\ +\074\001\255\255\255\255\255\255\078\001\255\255\255\255\255\255\ +\082\001\083\001\084\001\085\001\086\001\087\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\094\001\255\255\255\255\255\255\ +\098\001\255\255\100\001\101\001\255\255\255\255\255\255\255\255\ +\106\001\107\001\255\255\109\001\110\001\000\001\255\255\255\255\ +\114\001\255\255\005\001\006\001\007\001\255\255\255\255\255\255\ +\011\001\012\001\013\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\026\001\ +\255\255\028\001\029\001\030\001\031\001\032\001\033\001\034\001\ +\255\255\255\255\255\255\255\255\039\001\255\255\041\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\050\001\ +\255\255\052\001\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\060\001\255\255\255\255\063\001\064\001\065\001\255\255\ +\255\255\068\001\069\001\255\255\071\001\072\001\255\255\074\001\ +\255\255\255\255\255\255\078\001\255\255\080\001\255\255\255\255\ +\255\255\084\001\085\001\000\001\087\001\255\255\255\255\255\255\ +\005\001\006\001\007\001\255\255\255\255\096\001\011\001\012\001\ +\255\255\255\255\101\001\255\255\255\255\255\255\255\255\106\001\ +\107\001\108\001\109\001\110\001\111\001\255\255\255\255\114\001\ +\255\255\030\001\031\001\032\001\033\001\034\001\255\255\255\255\ +\255\255\255\255\039\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\255\255\ +\255\255\255\255\063\001\064\001\065\001\255\255\255\255\255\255\ +\069\001\255\255\071\001\072\001\255\255\255\255\255\255\255\255\ +\255\255\078\001\255\255\255\255\255\255\255\255\255\255\084\001\ +\085\001\000\001\087\001\255\255\255\255\255\255\005\001\006\001\ +\007\001\094\001\255\255\255\255\011\001\012\001\255\255\255\255\ +\101\001\255\255\255\255\255\255\255\255\106\001\107\001\255\255\ +\109\001\110\001\255\255\255\255\255\255\114\001\255\255\030\001\ +\031\001\032\001\033\001\034\001\255\255\255\255\255\255\255\255\ +\039\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\052\001\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\255\255\255\255\255\255\ +\063\001\064\001\065\001\255\255\255\255\255\255\069\001\255\255\ +\071\001\072\001\255\255\255\255\255\255\255\255\255\255\078\001\ +\255\255\255\255\255\255\255\255\255\255\084\001\085\001\000\001\ +\087\001\255\255\255\255\255\255\005\001\006\001\007\001\094\001\ +\255\255\255\255\011\001\012\001\255\255\255\255\101\001\255\255\ +\255\255\255\255\255\255\106\001\107\001\255\255\109\001\110\001\ +\255\255\255\255\255\255\114\001\255\255\030\001\031\001\032\001\ +\033\001\034\001\255\255\255\255\255\255\255\255\039\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\050\001\255\255\052\001\053\001\054\001\055\001\056\001\ +\255\255\255\255\059\001\255\255\255\255\255\255\063\001\064\001\ +\065\001\255\255\255\255\255\255\069\001\255\255\071\001\072\001\ +\255\255\255\255\255\255\255\255\255\255\078\001\255\255\255\255\ +\255\255\255\255\255\255\084\001\085\001\000\001\087\001\255\255\ +\255\255\255\255\005\001\006\001\007\001\094\001\255\255\255\255\ +\011\001\012\001\255\255\255\255\101\001\255\255\255\255\255\255\ +\255\255\106\001\107\001\255\255\109\001\110\001\255\255\255\255\ +\255\255\114\001\255\255\030\001\031\001\032\001\033\001\034\001\ +\255\255\255\255\255\255\255\255\039\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\050\001\ +\255\255\052\001\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\255\255\255\255\255\255\063\001\064\001\065\001\255\255\ +\255\255\255\255\069\001\255\255\071\001\072\001\255\255\255\255\ +\255\255\255\255\255\255\078\001\255\255\255\255\255\255\255\255\ +\255\255\084\001\085\001\255\255\087\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\094\001\003\001\004\001\005\001\255\255\ +\255\255\255\255\101\001\255\255\011\001\255\255\013\001\106\001\ +\107\001\255\255\109\001\110\001\019\001\020\001\021\001\114\001\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\030\001\ +\255\255\032\001\033\001\034\001\035\001\255\255\255\255\255\255\ +\039\001\040\001\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\048\001\049\001\255\255\255\255\052\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\063\001\064\001\255\255\255\255\255\255\000\001\069\001\070\001\ +\255\255\004\001\255\255\074\001\075\001\076\001\077\001\078\001\ +\079\001\080\001\255\255\082\001\255\255\255\255\017\001\255\255\ +\019\001\088\001\255\255\022\001\255\255\255\255\093\001\026\001\ +\027\001\255\255\255\255\255\255\099\001\255\255\255\255\102\001\ +\103\001\036\001\105\001\106\001\107\001\108\001\109\001\255\255\ +\111\001\112\001\113\001\114\001\115\001\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\255\255\255\255\255\255\064\001\255\255\066\001\ +\067\001\068\001\255\255\070\001\255\255\255\255\073\001\255\255\ +\255\255\255\255\000\001\001\001\002\001\255\255\255\255\255\255\ +\006\001\007\001\255\255\009\001\255\255\255\255\012\001\090\001\ +\091\001\015\001\016\001\255\255\095\001\255\255\097\001\255\255\ +\255\255\100\001\255\255\255\255\255\255\027\001\028\001\255\255\ +\030\001\031\001\109\001\255\255\111\001\255\255\036\001\255\255\ +\255\255\255\255\255\255\255\255\042\001\043\001\044\001\045\001\ +\046\001\047\001\255\255\255\255\050\001\255\255\052\001\053\001\ +\255\255\055\001\056\001\255\255\255\255\059\001\255\255\061\001\ +\255\255\255\255\064\001\065\001\255\255\255\255\255\255\255\255\ +\255\255\071\001\072\001\255\255\074\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\082\001\083\001\084\001\085\001\ +\086\001\087\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\094\001\255\255\255\255\097\001\098\001\255\255\100\001\101\001\ +\255\255\255\255\255\255\255\255\106\001\255\255\108\001\109\001\ +\110\001\000\001\001\001\002\001\255\255\255\255\255\255\006\001\ +\007\001\255\255\009\001\255\255\255\255\012\001\255\255\255\255\ +\015\001\016\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\027\001\028\001\255\255\030\001\ +\031\001\255\255\255\255\255\255\255\255\036\001\255\255\255\255\ +\255\255\255\255\255\255\042\001\043\001\044\001\045\001\046\001\ +\047\001\255\255\255\255\050\001\255\255\052\001\053\001\255\255\ +\055\001\056\001\255\255\255\255\059\001\255\255\061\001\255\255\ +\255\255\064\001\065\001\255\255\255\255\255\255\255\255\255\255\ +\071\001\072\001\255\255\074\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\082\001\083\001\084\001\085\001\086\001\ +\087\001\255\255\255\255\255\255\255\255\255\255\255\255\094\001\ +\255\255\255\255\097\001\098\001\255\255\100\001\101\001\255\255\ +\255\255\255\255\255\255\106\001\255\255\108\001\109\001\110\001\ +\000\001\001\001\002\001\255\255\255\255\255\255\006\001\007\001\ +\255\255\009\001\255\255\255\255\012\001\255\255\255\255\015\001\ +\016\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\027\001\028\001\255\255\030\001\031\001\ +\255\255\255\255\255\255\255\255\036\001\255\255\255\255\255\255\ +\255\255\255\255\042\001\043\001\044\001\045\001\046\001\047\001\ +\255\255\255\255\050\001\255\255\052\001\053\001\255\255\055\001\ +\056\001\255\255\255\255\059\001\255\255\061\001\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\072\001\255\255\074\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\082\001\083\001\084\001\085\001\086\001\087\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\094\001\255\255\ +\255\255\097\001\098\001\255\255\100\001\101\001\255\255\255\255\ +\255\255\255\255\106\001\255\255\108\001\109\001\110\001\000\001\ +\001\001\002\001\255\255\255\255\255\255\006\001\007\001\255\255\ +\009\001\255\255\255\255\012\001\255\255\255\255\015\001\016\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\027\001\028\001\255\255\030\001\031\001\255\255\ +\255\255\255\255\255\255\036\001\255\255\255\255\255\255\255\255\ +\255\255\042\001\043\001\044\001\045\001\046\001\047\001\255\255\ +\255\255\050\001\255\255\052\001\053\001\255\255\055\001\056\001\ +\255\255\255\255\059\001\255\255\061\001\255\255\255\255\064\001\ +\065\001\255\255\255\255\255\255\255\255\255\255\071\001\072\001\ +\255\255\074\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\082\001\083\001\084\001\085\001\086\001\087\001\255\255\ +\255\255\000\001\255\255\255\255\255\255\094\001\255\255\006\001\ +\097\001\098\001\255\255\100\001\101\001\012\001\255\255\255\255\ +\015\001\106\001\255\255\255\255\109\001\110\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\028\001\255\255\030\001\ +\031\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\052\001\053\001\255\255\ +\055\001\056\001\255\255\255\255\059\001\255\255\000\001\255\255\ +\255\255\064\001\065\001\255\255\006\001\255\255\255\255\255\255\ +\071\001\255\255\012\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\084\001\255\255\255\255\ +\255\255\255\255\028\001\255\255\030\001\031\001\255\255\094\001\ +\255\255\255\255\097\001\255\255\255\255\255\255\101\001\255\255\ +\255\255\255\255\255\255\106\001\255\255\255\255\109\001\110\001\ +\050\001\255\255\052\001\053\001\255\255\055\001\056\001\255\255\ +\255\255\059\001\255\255\000\001\255\255\255\255\064\001\065\001\ +\255\255\006\001\255\255\255\255\255\255\071\001\255\255\012\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\084\001\255\255\255\255\255\255\255\255\028\001\ +\255\255\030\001\031\001\255\255\255\255\255\255\255\255\097\001\ +\255\255\255\255\255\255\101\001\255\255\255\255\255\255\255\255\ +\106\001\255\255\255\255\109\001\110\001\050\001\255\255\052\001\ +\053\001\255\255\055\001\056\001\255\255\255\255\059\001\255\255\ +\000\001\255\255\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\255\255\071\001\255\255\012\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\084\001\ +\255\255\255\255\255\255\255\255\028\001\255\255\030\001\031\001\ +\255\255\255\255\255\255\255\255\097\001\255\255\255\255\255\255\ +\101\001\255\255\255\255\255\255\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\255\255\052\001\053\001\255\255\055\001\ +\056\001\255\255\255\255\059\001\255\255\000\001\255\255\255\255\ +\064\001\065\001\255\255\006\001\255\255\255\255\255\255\071\001\ +\255\255\012\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\084\001\255\255\255\255\255\255\ +\255\255\028\001\255\255\030\001\031\001\255\255\255\255\255\255\ +\255\255\097\001\255\255\255\255\255\255\101\001\255\255\255\255\ +\255\255\255\255\106\001\255\255\255\255\109\001\110\001\050\001\ +\255\255\052\001\053\001\255\255\055\001\056\001\255\255\255\255\ +\059\001\255\255\000\001\255\255\255\255\064\001\065\001\255\255\ +\006\001\255\255\255\255\255\255\071\001\255\255\012\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\084\001\255\255\255\255\255\255\255\255\028\001\255\255\ +\030\001\031\001\255\255\255\255\255\255\255\255\097\001\255\255\ +\255\255\255\255\101\001\255\255\255\255\255\255\255\255\106\001\ +\255\255\255\255\109\001\110\001\050\001\255\255\052\001\053\001\ +\255\255\055\001\056\001\255\255\255\255\059\001\255\255\255\255\ +\255\255\255\255\064\001\065\001\005\001\006\001\007\001\255\255\ +\255\255\071\001\011\001\012\001\013\001\014\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\084\001\255\255\ +\255\255\255\255\255\255\028\001\029\001\030\001\031\001\032\001\ +\033\001\034\001\255\255\097\001\255\255\255\255\039\001\101\001\ +\041\001\255\255\255\255\255\255\106\001\255\255\255\255\109\001\ +\110\001\050\001\255\255\052\001\053\001\054\001\055\001\056\001\ +\255\255\255\255\059\001\060\001\255\255\255\255\063\001\064\001\ +\065\001\255\255\255\255\068\001\069\001\255\255\071\001\072\001\ +\255\255\074\001\255\255\255\255\255\255\078\001\255\255\080\001\ +\255\255\255\255\255\255\084\001\085\001\255\255\087\001\255\255\ +\089\001\255\255\255\255\005\001\006\001\007\001\255\255\096\001\ +\255\255\011\001\012\001\013\001\101\001\255\255\255\255\255\255\ +\255\255\106\001\107\001\108\001\109\001\110\001\111\001\255\255\ +\255\255\114\001\028\001\029\001\030\001\031\001\032\001\033\001\ +\034\001\255\255\255\255\255\255\255\255\039\001\255\255\041\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\050\001\255\255\052\001\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\060\001\255\255\255\255\063\001\064\001\065\001\ +\255\255\255\255\068\001\069\001\255\255\071\001\072\001\255\255\ +\074\001\255\255\255\255\255\255\078\001\255\255\080\001\255\255\ +\255\255\255\255\084\001\085\001\255\255\087\001\255\255\255\255\ +\255\255\005\001\006\001\007\001\255\255\255\255\096\001\011\001\ +\012\001\255\255\255\255\101\001\255\255\255\255\255\255\255\255\ +\106\001\107\001\108\001\109\001\110\001\111\001\255\255\255\255\ +\114\001\255\255\030\001\031\001\032\001\033\001\034\001\255\255\ +\255\255\255\255\255\255\039\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\050\001\255\255\ +\052\001\053\001\054\001\055\001\056\001\255\255\255\255\059\001\ +\255\255\255\255\255\255\063\001\064\001\065\001\255\255\255\255\ +\255\255\069\001\255\255\071\001\072\001\255\255\255\255\255\255\ +\255\255\255\255\078\001\255\255\255\255\255\255\255\255\255\255\ +\084\001\085\001\255\255\087\001\255\255\255\255\255\255\255\255\ +\092\001\005\001\006\001\007\001\255\255\255\255\010\001\011\001\ +\012\001\101\001\255\255\255\255\255\255\255\255\106\001\107\001\ +\255\255\109\001\110\001\255\255\255\255\255\255\114\001\255\255\ +\255\255\255\255\030\001\031\001\032\001\033\001\034\001\255\255\ +\255\255\255\255\255\255\039\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\050\001\255\255\ +\052\001\053\001\054\001\055\001\056\001\255\255\255\255\059\001\ +\255\255\255\255\255\255\063\001\064\001\065\001\255\255\255\255\ +\255\255\069\001\255\255\071\001\072\001\255\255\255\255\255\255\ +\255\255\255\255\078\001\255\255\255\255\255\255\255\255\255\255\ +\084\001\085\001\255\255\087\001\255\255\255\255\005\001\006\001\ +\007\001\255\255\255\255\255\255\011\001\012\001\255\255\255\255\ +\255\255\101\001\255\255\255\255\255\255\255\255\106\001\107\001\ +\255\255\109\001\110\001\026\001\255\255\255\255\114\001\030\001\ +\031\001\032\001\033\001\034\001\255\255\255\255\255\255\255\255\ +\039\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\052\001\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\255\255\255\255\255\255\ +\063\001\064\001\065\001\255\255\255\255\255\255\069\001\255\255\ +\071\001\072\001\255\255\255\255\255\255\255\255\255\255\078\001\ +\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\ +\087\001\255\255\255\255\005\001\006\001\007\001\255\255\255\255\ +\255\255\011\001\012\001\255\255\255\255\255\255\101\001\255\255\ +\255\255\255\255\255\255\106\001\107\001\255\255\109\001\110\001\ +\255\255\255\255\255\255\114\001\030\001\031\001\032\001\033\001\ +\034\001\255\255\255\255\255\255\255\255\039\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\050\001\255\255\052\001\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\255\255\255\255\255\255\063\001\064\001\065\001\ +\255\255\255\255\255\255\069\001\255\255\071\001\072\001\255\255\ +\255\255\255\255\255\255\255\255\078\001\255\255\255\255\255\255\ +\255\255\083\001\084\001\085\001\255\255\087\001\255\255\255\255\ +\005\001\006\001\007\001\255\255\255\255\255\255\011\001\012\001\ +\255\255\255\255\255\255\101\001\255\255\255\255\255\255\255\255\ +\106\001\107\001\255\255\109\001\110\001\255\255\255\255\255\255\ +\114\001\030\001\031\001\032\001\033\001\034\001\255\255\255\255\ +\255\255\255\255\039\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\255\255\ +\255\255\255\255\063\001\064\001\065\001\255\255\255\255\255\255\ +\069\001\255\255\071\001\072\001\255\255\255\255\255\255\255\255\ +\255\255\078\001\255\255\255\255\255\255\255\255\255\255\084\001\ +\085\001\255\255\087\001\255\255\255\255\255\255\255\255\092\001\ +\005\001\006\001\007\001\255\255\255\255\010\001\011\001\012\001\ +\101\001\255\255\255\255\255\255\255\255\106\001\107\001\255\255\ +\109\001\110\001\255\255\255\255\255\255\114\001\255\255\255\255\ +\255\255\030\001\031\001\032\001\033\001\034\001\255\255\255\255\ +\255\255\255\255\039\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\255\255\ +\255\255\255\255\063\001\064\001\065\001\255\255\255\255\255\255\ +\069\001\255\255\071\001\072\001\255\255\255\255\255\255\255\255\ +\255\255\078\001\255\255\255\255\255\255\255\255\255\255\084\001\ +\085\001\255\255\087\001\255\255\255\255\255\255\005\001\006\001\ +\007\001\255\255\255\255\255\255\011\001\012\001\255\255\255\255\ +\101\001\255\255\255\255\255\255\255\255\106\001\107\001\022\001\ +\109\001\110\001\255\255\255\255\255\255\114\001\255\255\030\001\ +\031\001\032\001\033\001\034\001\255\255\255\255\255\255\255\255\ +\039\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\052\001\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\255\255\255\255\255\255\ +\063\001\064\001\065\001\255\255\255\255\255\255\069\001\255\255\ +\071\001\072\001\255\255\255\255\255\255\255\255\255\255\078\001\ +\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\ +\087\001\255\255\255\255\005\001\006\001\007\001\255\255\255\255\ +\255\255\011\001\012\001\255\255\255\255\255\255\101\001\255\255\ +\255\255\255\255\255\255\106\001\107\001\255\255\109\001\110\001\ +\026\001\255\255\255\255\114\001\030\001\031\001\032\001\033\001\ +\034\001\255\255\255\255\255\255\255\255\039\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\050\001\255\255\052\001\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\255\255\255\255\255\255\063\001\064\001\065\001\ +\255\255\255\255\255\255\069\001\255\255\071\001\072\001\255\255\ +\255\255\255\255\255\255\255\255\078\001\255\255\255\255\255\255\ +\255\255\255\255\084\001\085\001\255\255\087\001\255\255\255\255\ +\005\001\006\001\007\001\255\255\255\255\255\255\011\001\012\001\ +\255\255\255\255\255\255\101\001\255\255\255\255\255\255\255\255\ +\106\001\107\001\255\255\109\001\110\001\255\255\255\255\255\255\ +\114\001\030\001\031\001\032\001\033\001\034\001\255\255\255\255\ +\255\255\255\255\039\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\255\255\ +\255\255\255\255\063\001\064\001\065\001\255\255\255\255\255\255\ +\069\001\255\255\071\001\072\001\255\255\255\255\255\255\255\255\ +\255\255\078\001\255\255\255\255\255\255\255\255\255\255\084\001\ +\085\001\255\255\087\001\255\255\255\255\005\001\006\001\007\001\ +\255\255\255\255\255\255\011\001\012\001\255\255\255\255\255\255\ +\101\001\255\255\255\255\255\255\255\255\106\001\107\001\255\255\ +\109\001\110\001\255\255\255\255\255\255\114\001\030\001\031\001\ +\032\001\033\001\034\001\255\255\255\255\255\255\255\255\039\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\050\001\255\255\052\001\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\255\255\255\255\255\255\063\001\ +\064\001\065\001\255\255\255\255\255\255\069\001\255\255\071\001\ +\072\001\255\255\255\255\255\255\255\255\255\255\078\001\255\255\ +\255\255\255\255\255\255\255\255\084\001\085\001\255\255\087\001\ +\255\255\255\255\005\001\006\001\007\001\255\255\255\255\255\255\ +\011\001\012\001\255\255\255\255\255\255\101\001\255\255\255\255\ +\255\255\255\255\106\001\107\001\255\255\109\001\110\001\255\255\ +\255\255\255\255\114\001\030\001\031\001\032\001\033\001\034\001\ +\255\255\255\255\255\255\255\255\039\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\050\001\ +\255\255\052\001\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\255\255\255\255\255\255\063\001\064\001\065\001\255\255\ +\255\255\006\001\069\001\255\255\071\001\072\001\255\255\012\001\ +\255\255\014\001\255\255\078\001\017\001\255\255\255\255\255\255\ +\255\255\084\001\085\001\255\255\087\001\255\255\027\001\255\255\ +\255\255\030\001\031\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\101\001\255\255\255\255\255\255\255\255\106\001\ +\107\001\255\255\109\001\110\001\255\255\050\001\051\001\114\001\ +\053\001\255\255\055\001\056\001\255\255\255\255\059\001\255\255\ +\255\255\255\255\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\255\255\071\001\255\255\012\001\255\255\014\001\255\255\ +\255\255\017\001\255\255\255\255\081\001\255\255\255\255\084\001\ +\255\255\255\255\255\255\027\001\089\001\255\255\030\001\031\001\ +\255\255\006\001\255\255\255\255\097\001\255\255\255\255\012\001\ +\101\001\014\001\255\255\104\001\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\051\001\255\255\053\001\255\255\055\001\ +\056\001\030\001\031\001\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\255\255\255\255\255\255\255\255\255\255\050\001\051\001\255\255\ +\053\001\081\001\055\001\056\001\084\001\255\255\059\001\255\255\ +\255\255\089\001\255\255\064\001\065\001\255\255\255\255\255\255\ +\255\255\097\001\071\001\255\255\073\001\101\001\255\255\255\255\ +\104\001\255\255\106\001\255\255\081\001\109\001\110\001\084\001\ +\255\255\255\255\006\001\255\255\089\001\255\255\255\255\255\255\ +\012\001\255\255\014\001\255\255\097\001\255\255\255\255\255\255\ +\101\001\255\255\255\255\104\001\255\255\106\001\255\255\027\001\ +\109\001\110\001\030\001\031\001\255\255\006\001\255\255\255\255\ +\255\255\255\255\255\255\012\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\050\001\051\001\ +\255\255\053\001\255\255\055\001\056\001\030\001\031\001\059\001\ +\255\255\255\255\255\255\255\255\064\001\065\001\255\255\255\255\ +\255\255\255\255\255\255\071\001\255\255\255\255\255\255\255\255\ +\255\255\050\001\051\001\255\255\053\001\081\001\055\001\056\001\ +\084\001\255\255\059\001\255\255\255\255\089\001\255\255\064\001\ +\065\001\255\255\006\001\255\255\255\255\097\001\071\001\255\255\ +\012\001\101\001\255\255\255\255\104\001\255\255\106\001\255\255\ +\081\001\109\001\110\001\084\001\255\255\255\255\255\255\255\255\ +\089\001\255\255\030\001\031\001\255\255\255\255\255\255\255\255\ +\097\001\255\255\255\255\255\255\101\001\255\255\255\255\104\001\ +\255\255\106\001\255\255\255\255\109\001\110\001\050\001\051\001\ +\255\255\053\001\255\255\055\001\056\001\255\255\255\255\059\001\ +\255\255\255\255\255\255\255\255\064\001\065\001\255\255\255\255\ +\006\001\255\255\255\255\071\001\255\255\255\255\012\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\081\001\255\255\255\255\ +\084\001\255\255\255\255\255\255\255\255\089\001\028\001\255\255\ +\030\001\031\001\255\255\255\255\255\255\097\001\255\255\255\255\ +\255\255\101\001\255\255\255\255\104\001\255\255\106\001\255\255\ +\255\255\109\001\110\001\255\255\050\001\255\255\052\001\053\001\ +\255\255\055\001\056\001\255\255\255\255\059\001\255\255\255\255\ +\255\255\255\255\064\001\065\001\255\255\255\255\255\255\006\001\ +\255\255\071\001\255\255\010\001\255\255\012\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\084\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\028\001\092\001\030\001\ +\031\001\255\255\255\255\097\001\255\255\255\255\255\255\101\001\ +\255\255\255\255\255\255\255\255\106\001\255\255\255\255\109\001\ +\110\001\255\255\255\255\050\001\255\255\052\001\053\001\255\255\ +\055\001\056\001\255\255\255\255\059\001\255\255\255\255\255\255\ +\255\255\064\001\065\001\255\255\006\001\255\255\255\255\255\255\ +\071\001\255\255\012\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\006\001\007\001\255\255\255\255\084\001\011\001\012\001\ +\255\255\255\255\028\001\255\255\030\001\031\001\255\255\255\255\ +\255\255\255\255\097\001\255\255\255\255\255\255\101\001\255\255\ +\255\255\030\001\031\001\106\001\255\255\255\255\109\001\110\001\ +\050\001\255\255\052\001\053\001\255\255\055\001\056\001\255\255\ +\255\255\059\001\255\255\255\255\255\255\050\001\064\001\065\001\ +\053\001\054\001\055\001\056\001\255\255\071\001\059\001\255\255\ +\006\001\255\255\008\001\064\001\065\001\255\255\012\001\255\255\ +\255\255\255\255\084\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\092\001\255\255\255\255\255\255\028\001\097\001\ +\030\001\031\001\087\001\101\001\255\255\255\255\255\255\255\255\ +\106\001\255\255\255\255\109\001\110\001\255\255\255\255\255\255\ +\101\001\255\255\255\255\255\255\050\001\106\001\052\001\053\001\ +\109\001\055\001\056\001\255\255\255\255\059\001\255\255\255\255\ +\255\255\255\255\064\001\065\001\255\255\006\001\255\255\255\255\ +\255\255\071\001\255\255\012\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\084\001\255\255\ +\255\255\255\255\255\255\028\001\255\255\030\001\031\001\255\255\ +\255\255\255\255\255\255\097\001\255\255\255\255\255\255\101\001\ +\255\255\255\255\255\255\255\255\106\001\255\255\255\255\109\001\ +\110\001\050\001\255\255\052\001\053\001\255\255\055\001\056\001\ +\255\255\255\255\059\001\255\255\255\255\255\255\255\255\064\001\ +\065\001\255\255\006\001\255\255\255\255\255\255\071\001\255\255\ +\012\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\084\001\255\255\255\255\255\255\255\255\ +\028\001\255\255\030\001\031\001\255\255\006\001\255\255\255\255\ +\097\001\255\255\255\255\012\001\101\001\255\255\255\255\255\255\ +\255\255\106\001\255\255\255\255\109\001\110\001\050\001\255\255\ +\052\001\053\001\255\255\055\001\056\001\030\001\031\001\059\001\ +\255\255\255\255\255\255\255\255\064\001\065\001\255\255\255\255\ +\255\255\255\255\255\255\071\001\255\255\255\255\255\255\255\255\ +\255\255\050\001\255\255\052\001\053\001\255\255\055\001\056\001\ +\084\001\255\255\059\001\255\255\255\255\255\255\255\255\064\001\ +\065\001\255\255\006\001\255\255\255\255\097\001\071\001\255\255\ +\012\001\101\001\255\255\255\255\255\255\255\255\106\001\255\255\ +\255\255\109\001\110\001\084\001\255\255\255\255\255\255\255\255\ +\028\001\255\255\030\001\031\001\093\001\006\001\255\255\255\255\ +\097\001\255\255\255\255\012\001\101\001\255\255\255\255\255\255\ +\255\255\106\001\255\255\255\255\109\001\110\001\050\001\255\255\ +\052\001\053\001\255\255\055\001\056\001\030\001\031\001\059\001\ +\255\255\255\255\255\255\255\255\064\001\065\001\255\255\255\255\ +\255\255\255\255\255\255\071\001\255\255\255\255\255\255\255\255\ +\255\255\050\001\255\255\052\001\053\001\255\255\055\001\056\001\ +\084\001\255\255\059\001\255\255\255\255\255\255\255\255\064\001\ +\065\001\255\255\006\001\255\255\255\255\097\001\071\001\255\255\ +\012\001\101\001\255\255\255\255\255\255\255\255\106\001\255\255\ +\255\255\109\001\110\001\084\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\030\001\031\001\255\255\006\001\255\255\255\255\ +\097\001\255\255\255\255\012\001\101\001\255\255\255\255\255\255\ +\255\255\106\001\255\255\255\255\109\001\110\001\050\001\255\255\ +\052\001\053\001\255\255\055\001\056\001\030\001\031\001\059\001\ +\255\255\255\255\255\255\255\255\064\001\065\001\255\255\255\255\ +\255\255\255\255\255\255\071\001\255\255\255\255\255\255\255\255\ +\255\255\050\001\255\255\255\255\053\001\255\255\055\001\056\001\ +\084\001\255\255\059\001\255\255\255\255\255\255\255\255\064\001\ +\065\001\255\255\006\001\255\255\255\255\097\001\071\001\255\255\ +\012\001\101\001\255\255\255\255\255\255\255\255\106\001\255\255\ +\255\255\109\001\110\001\084\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\030\001\031\001\255\255\255\255\255\255\255\255\ +\097\001\255\255\255\255\255\255\101\001\255\255\255\255\255\255\ +\255\255\106\001\255\255\255\255\109\001\110\001\050\001\255\255\ +\255\255\053\001\255\255\055\001\056\001\255\255\255\255\059\001\ +\255\255\255\255\255\255\255\255\064\001\065\001\255\255\006\001\ +\007\001\255\255\255\255\071\001\011\001\012\001\006\001\007\001\ +\255\255\255\255\255\255\011\001\012\001\255\255\255\255\022\001\ +\084\001\255\255\255\255\255\255\255\255\255\255\255\255\030\001\ +\031\001\255\255\255\255\255\255\255\255\097\001\030\001\031\001\ +\255\255\101\001\255\255\255\255\255\255\255\255\106\001\255\255\ +\047\001\109\001\110\001\050\001\051\001\255\255\053\001\054\001\ +\055\001\056\001\050\001\051\001\059\001\053\001\054\001\055\001\ +\056\001\064\001\065\001\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\006\001\ +\007\001\255\255\081\001\255\255\011\001\012\001\255\255\255\255\ +\087\001\081\001\089\001\255\255\255\255\255\255\255\255\087\001\ +\255\255\089\001\097\001\098\001\255\255\255\255\101\001\030\001\ +\031\001\104\001\255\255\106\001\255\255\101\001\109\001\255\255\ +\104\001\255\255\106\001\255\255\255\255\109\001\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\255\255\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\255\255\255\255\255\255\ +\255\255\064\001\065\001\255\255\255\255\000\001\001\001\002\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\009\001\255\255\ +\255\255\255\255\255\255\014\001\015\001\016\001\017\001\018\001\ +\087\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\027\001\255\255\255\255\255\255\255\255\255\255\101\001\255\255\ +\255\255\036\001\255\255\106\001\255\255\255\255\109\001\042\001\ +\043\001\044\001\045\001\046\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\061\001\255\255\015\001\255\255\255\255\066\001\ +\255\255\255\255\255\255\255\255\071\001\072\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\082\001\ +\083\001\084\001\085\001\086\001\255\255\000\001\001\001\002\001\ +\255\255\255\255\255\255\094\001\007\001\255\255\009\001\255\255\ +\255\255\100\001\255\255\255\255\055\001\016\001\057\001\058\001\ +\059\001\255\255\061\001\255\255\255\255\064\001\065\001\255\255\ +\027\001\255\255\255\255\255\255\255\255\255\255\255\255\074\001\ +\255\255\036\001\255\255\255\255\255\255\255\255\081\001\042\001\ +\043\001\044\001\045\001\046\001\047\001\255\255\089\001\090\001\ +\255\255\255\255\255\255\094\001\255\255\255\255\097\001\255\255\ +\255\255\255\255\061\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\109\001\110\001\071\001\072\001\255\255\074\001\ +\255\255\255\255\255\255\255\255\000\001\001\001\002\001\082\001\ +\083\001\084\001\085\001\086\001\087\001\009\001\255\255\255\255\ +\255\255\255\255\255\255\015\001\016\001\255\255\018\001\098\001\ +\255\255\100\001\255\255\255\255\255\255\255\255\255\255\027\001\ +\255\255\255\255\255\255\255\255\000\001\001\001\002\001\255\255\ +\036\001\255\255\255\255\255\255\255\255\009\001\042\001\043\001\ +\044\001\045\001\046\001\015\001\016\001\255\255\018\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\027\001\ +\255\255\061\001\255\255\255\255\255\255\255\255\066\001\255\255\ +\036\001\255\255\255\255\071\001\072\001\255\255\042\001\043\001\ +\044\001\045\001\046\001\255\255\255\255\255\255\082\001\083\001\ +\084\001\085\001\086\001\255\255\255\255\255\255\255\255\091\001\ +\255\255\061\001\255\255\255\255\255\255\255\255\066\001\255\255\ +\100\001\255\255\255\255\071\001\072\001\255\255\255\255\255\255\ +\255\255\255\255\000\001\001\001\002\001\255\255\082\001\083\001\ +\084\001\085\001\086\001\009\001\255\255\255\255\255\255\255\255\ +\092\001\015\001\016\001\255\255\018\001\255\255\255\255\255\255\ +\100\001\255\255\255\255\255\255\255\255\027\001\255\255\255\255\ +\255\255\255\255\000\001\001\001\002\001\255\255\036\001\255\255\ +\255\255\255\255\255\255\009\001\042\001\043\001\044\001\045\001\ +\046\001\015\001\016\001\255\255\018\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\027\001\255\255\061\001\ +\255\255\255\255\255\255\255\255\066\001\255\255\036\001\255\255\ +\255\255\071\001\072\001\255\255\042\001\043\001\044\001\045\001\ +\046\001\255\255\255\255\255\255\082\001\083\001\084\001\085\001\ +\086\001\255\255\255\255\255\255\255\255\255\255\255\255\061\001\ +\094\001\255\255\255\255\255\255\066\001\255\255\100\001\255\255\ +\255\255\071\001\072\001\255\255\255\255\255\255\255\255\255\255\ +\000\001\001\001\002\001\255\255\082\001\083\001\084\001\085\001\ +\086\001\009\001\255\255\255\255\255\255\091\001\255\255\015\001\ +\016\001\255\255\018\001\255\255\255\255\255\255\100\001\255\255\ +\255\255\255\255\255\255\027\001\255\255\255\255\255\255\255\255\ +\000\001\001\001\002\001\255\255\036\001\255\255\255\255\255\255\ +\255\255\009\001\042\001\043\001\044\001\045\001\046\001\015\001\ +\016\001\255\255\018\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\027\001\255\255\061\001\255\255\255\255\ +\255\255\255\255\066\001\255\255\036\001\255\255\255\255\071\001\ +\072\001\255\255\042\001\043\001\044\001\045\001\046\001\255\255\ +\255\255\255\255\082\001\083\001\084\001\085\001\086\001\255\255\ +\255\255\255\255\255\255\255\255\092\001\061\001\255\255\255\255\ +\255\255\255\255\066\001\255\255\100\001\255\255\255\255\071\001\ +\072\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\082\001\083\001\084\001\085\001\086\001\000\001\ +\001\001\002\001\255\255\255\255\255\255\255\255\094\001\255\255\ +\009\001\255\255\255\255\255\255\100\001\255\255\015\001\016\001\ +\255\255\018\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\027\001\255\255\255\255\255\255\255\255\000\001\ +\001\001\002\001\255\255\036\001\255\255\255\255\255\255\255\255\ +\009\001\042\001\043\001\044\001\045\001\046\001\015\001\016\001\ +\255\255\018\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\027\001\255\255\061\001\255\255\255\255\255\255\ +\255\255\066\001\255\255\036\001\255\255\255\255\071\001\072\001\ +\255\255\042\001\043\001\044\001\045\001\046\001\255\255\255\255\ +\255\255\082\001\083\001\084\001\085\001\086\001\255\255\255\255\ +\255\255\255\255\091\001\255\255\061\001\255\255\255\255\255\255\ +\255\255\066\001\255\255\100\001\255\255\255\255\071\001\072\001\ +\255\255\255\255\255\255\255\255\255\255\000\001\001\001\002\001\ +\255\255\082\001\083\001\084\001\085\001\086\001\009\001\255\255\ +\255\255\255\255\255\255\092\001\015\001\016\001\255\255\018\001\ +\255\255\255\255\255\255\100\001\255\255\255\255\255\255\255\255\ +\027\001\255\255\255\255\255\255\255\255\000\001\001\001\002\001\ +\255\255\036\001\255\255\255\255\255\255\255\255\009\001\042\001\ +\043\001\044\001\045\001\046\001\015\001\016\001\255\255\018\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\027\001\255\255\061\001\255\255\255\255\255\255\255\255\066\001\ +\255\255\036\001\255\255\255\255\071\001\072\001\255\255\042\001\ +\043\001\044\001\045\001\046\001\255\255\255\255\255\255\082\001\ +\083\001\084\001\085\001\086\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\061\001\094\001\255\255\255\255\255\255\066\001\ +\255\255\100\001\255\255\255\255\071\001\072\001\255\255\255\255\ +\255\255\255\255\255\255\000\001\001\001\002\001\255\255\082\001\ +\083\001\084\001\085\001\086\001\009\001\255\255\255\255\255\255\ +\091\001\255\255\015\001\016\001\255\255\018\001\255\255\255\255\ +\255\255\100\001\255\255\255\255\255\255\255\255\027\001\255\255\ +\255\255\255\255\255\255\000\001\001\001\002\001\255\255\036\001\ +\255\255\255\255\255\255\255\255\009\001\042\001\043\001\044\001\ +\045\001\046\001\015\001\016\001\255\255\018\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\027\001\255\255\ +\061\001\255\255\255\255\255\255\255\255\066\001\255\255\036\001\ +\255\255\255\255\071\001\072\001\255\255\042\001\043\001\044\001\ +\045\001\046\001\255\255\255\255\255\255\082\001\083\001\084\001\ +\085\001\086\001\255\255\255\255\255\255\255\255\255\255\092\001\ +\061\001\001\001\002\001\255\255\255\255\066\001\255\255\100\001\ +\255\255\009\001\071\001\072\001\255\255\255\255\255\255\015\001\ +\016\001\255\255\018\001\255\255\255\255\082\001\083\001\084\001\ +\085\001\086\001\255\255\027\001\255\255\255\255\255\255\255\255\ +\255\255\094\001\255\255\255\255\036\001\255\255\255\255\100\001\ +\255\255\255\255\042\001\043\001\044\001\045\001\046\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\061\001\255\255\255\255\ +\255\255\255\255\066\001\255\255\255\255\255\255\255\255\071\001\ +\072\001\001\001\002\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\009\001\082\001\083\001\084\001\085\001\086\001\015\001\ +\016\001\255\255\018\001\255\255\255\255\255\255\255\255\095\001\ +\255\255\025\001\255\255\027\001\100\001\255\255\255\255\255\255\ +\255\255\001\001\002\001\255\255\036\001\255\255\255\255\255\255\ +\255\255\009\001\042\001\043\001\044\001\045\001\046\001\015\001\ +\016\001\255\255\018\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\027\001\255\255\061\001\255\255\255\255\ +\255\255\255\255\066\001\255\255\036\001\255\255\255\255\071\001\ +\072\001\255\255\042\001\043\001\044\001\045\001\046\001\255\255\ +\255\255\255\255\082\001\083\001\084\001\085\001\086\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\061\001\001\001\002\001\ +\255\255\255\255\066\001\255\255\100\001\255\255\009\001\071\001\ +\072\001\255\255\255\255\255\255\015\001\255\255\255\255\255\255\ +\255\255\255\255\082\001\083\001\084\001\085\001\086\001\255\255\ +\027\001\255\255\255\255\255\255\255\255\255\255\001\001\002\001\ +\255\255\036\001\255\255\255\255\100\001\255\255\255\255\042\001\ +\043\001\044\001\045\001\046\001\015\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\027\001\255\255\061\001\255\255\255\255\255\255\255\255\066\001\ +\255\255\036\001\255\255\255\255\071\001\072\001\255\255\042\001\ +\043\001\044\001\045\001\046\001\013\001\255\255\255\255\082\001\ +\083\001\084\001\085\001\086\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\061\001\028\001\029\001\255\255\255\255\066\001\ +\255\255\100\001\255\255\255\255\071\001\072\001\255\255\255\255\ +\041\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\083\001\084\001\085\001\086\001\255\255\255\255\055\001\255\255\ +\057\001\058\001\059\001\060\001\061\001\255\255\255\255\064\001\ +\065\001\100\001\255\255\068\001\255\255\255\255\255\255\255\255\ +\255\255\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\081\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\089\001\090\001\255\255\255\255\255\255\255\255\255\255\096\001\ +\097\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\108\001\109\001\110\001\111\001" + +let yynames_const = "\ + AMPERAMPER\000\ + AMPERSAND\000\ + AND\000\ + AS\000\ + ASSERT\000\ + BACKQUOTE\000\ + BANG\000\ + BAR\000\ + BARBAR\000\ + BARRBRACKET\000\ + BEGIN\000\ + CLASS\000\ + COLON\000\ + COLONCOLON\000\ + COLONEQUAL\000\ + COLONGREATER\000\ + COMMA\000\ + CONSTRAINT\000\ + DO\000\ + DONE\000\ + DOT\000\ + DOTDOT\000\ + DOWNTO\000\ + ELSE\000\ + END\000\ + EOF\000\ + EQUAL\000\ + EXCEPTION\000\ + EXTERNAL\000\ + FALSE\000\ + FOR\000\ + FUN\000\ + FUNCTION\000\ + FUNCTOR\000\ + GREATER\000\ + GREATERRBRACE\000\ + GREATERRBRACKET\000\ + IF\000\ + IN\000\ + INCLUDE\000\ + INHERIT\000\ + INITIALIZER\000\ + LAZY\000\ + LBRACE\000\ + LBRACELESS\000\ + LBRACKET\000\ + LBRACKETBAR\000\ + LBRACKETLESS\000\ + LBRACKETGREATER\000\ + LBRACKETPERCENT\000\ + LBRACKETPERCENTPERCENT\000\ + LESS\000\ + LESSMINUS\000\ + LET\000\ + LPAREN\000\ + LBRACKETAT\000\ + LBRACKETATAT\000\ + LBRACKETATATAT\000\ + MATCH\000\ + METHOD\000\ + MINUS\000\ + MINUSDOT\000\ + MINUSGREATER\000\ + MODULE\000\ + MUTABLE\000\ + NEW\000\ + NONREC\000\ + OBJECT\000\ + OF\000\ + OPEN\000\ + OR\000\ + PERCENT\000\ + PLUS\000\ + PLUSDOT\000\ + PLUSEQ\000\ + PRIVATE\000\ + QUESTION\000\ + QUOTE\000\ + RBRACE\000\ + RBRACKET\000\ + REC\000\ + RPAREN\000\ + SEMI\000\ + SEMISEMI\000\ + HASH\000\ + SIG\000\ + STAR\000\ + STRUCT\000\ + THEN\000\ + TILDE\000\ + TO\000\ + TRUE\000\ + TRY\000\ + TYPE\000\ + UNDERSCORE\000\ + VAL\000\ + VIRTUAL\000\ + WHEN\000\ + WHILE\000\ + WITH\000\ + EOL\000\ + " + +let yynames_block = "\ + CHAR\000\ + FLOAT\000\ + INFIXOP0\000\ + INFIXOP1\000\ + INFIXOP2\000\ + INFIXOP3\000\ + INFIXOP4\000\ + DOTOP\000\ + INT\000\ + LABEL\000\ + LIDENT\000\ + OPTLABEL\000\ + PREFIXOP\000\ + HASHOP\000\ + STRING\000\ + UIDENT\000\ + COMMENT\000\ + DOCSTRING\000\ + " + +let yyact = [| + (fun _ -> failwith "parser") +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'structure) in + Obj.repr( +# 568 "ml/parser.mly" + ( extra_str 1 _1 ) +# 6360 "ml/parser.ml" + : Parsetree.structure)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'signature) in + Obj.repr( +# 571 "ml/parser.mly" + ( extra_sig 1 _1 ) +# 6367 "ml/parser.ml" + : Parsetree.signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 576 "ml/parser.mly" + ( _1 ) +# 6374 "ml/parser.ml" + : Parsetree.core_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 579 "ml/parser.mly" + ( _1 ) +# 6381 "ml/parser.ml" + : Parsetree.expression)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 582 "ml/parser.mly" + ( _1 ) +# 6388 "ml/parser.ml" + : Parsetree.pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 589 "ml/parser.mly" + ( mkrhs "*" 2, None ) +# 6394 "ml/parser.ml" + : 'functor_arg)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'functor_arg_name) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 591 "ml/parser.mly" + ( mkrhs _2 2, Some _4 ) +# 6402 "ml/parser.ml" + : 'functor_arg)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 595 "ml/parser.mly" + ( _1 ) +# 6409 "ml/parser.ml" + : 'functor_arg_name)) +; (fun __caml_parser_env -> + Obj.repr( +# 596 "ml/parser.mly" + ( "_" ) +# 6415 "ml/parser.ml" + : 'functor_arg_name)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'functor_args) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'functor_arg) in + Obj.repr( +# 601 "ml/parser.mly" + ( _2 :: _1 ) +# 6423 "ml/parser.ml" + : 'functor_args)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'functor_arg) in + Obj.repr( +# 603 "ml/parser.mly" + ( [ _1 ] ) +# 6430 "ml/parser.ml" + : 'functor_args)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'mod_longident) in + Obj.repr( +# 608 "ml/parser.mly" + ( mkmod(Pmod_ident (mkrhs _1 1)) ) +# 6437 "ml/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'structure) in + Obj.repr( +# 610 "ml/parser.mly" + ( mkmod ~attrs:_2 (Pmod_structure(extra_str 3 _3)) ) +# 6445 "ml/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'structure) in + Obj.repr( +# 612 "ml/parser.mly" + ( unclosed "struct" 1 "end" 4 ) +# 6453 "ml/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'functor_args) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in + Obj.repr( +# 614 "ml/parser.mly" + ( let modexp = + List.fold_left + (fun acc (n, t) -> mkmod(Pmod_functor(n, t, acc))) + _5 _3 + in wrap_mod_attrs modexp _2 ) +# 6466 "ml/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'paren_module_expr) in + Obj.repr( +# 620 "ml/parser.mly" + ( mkmod(Pmod_apply(_1, _2)) ) +# 6474 "ml/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'module_expr) in + Obj.repr( +# 622 "ml/parser.mly" + ( mkmod(Pmod_apply(_1, mkmod (Pmod_structure []))) ) +# 6481 "ml/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'paren_module_expr) in + Obj.repr( +# 624 "ml/parser.mly" + ( _1 ) +# 6488 "ml/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 626 "ml/parser.mly" + ( Mod.attr _1 _2 ) +# 6496 "ml/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 628 "ml/parser.mly" + ( mkmod(Pmod_extension _1) ) +# 6503 "ml/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 633 "ml/parser.mly" + ( mkmod(Pmod_constraint(_2, _4)) ) +# 6511 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 635 "ml/parser.mly" + ( unclosed "(" 1 ")" 5 ) +# 6519 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + Obj.repr( +# 637 "ml/parser.mly" + ( _2 ) +# 6526 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + Obj.repr( +# 639 "ml/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 6533 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 641 "ml/parser.mly" + ( mkmod ~attrs:_3 (Pmod_unpack _4)) +# 6541 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 643 "ml/parser.mly" + ( mkmod ~attrs:_3 + (Pmod_unpack( + ghexp(Pexp_constraint(_4, ghtyp(Ptyp_package _6))))) ) +# 6552 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : 'expr) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'package_type) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 648 "ml/parser.mly" + ( mkmod ~attrs:_3 + (Pmod_unpack( + ghexp(Pexp_coerce(_4, Some(ghtyp(Ptyp_package _6)), + ghtyp(Ptyp_package _8))))) ) +# 6565 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 653 "ml/parser.mly" + ( mkmod ~attrs:_3 + (Pmod_unpack( + ghexp(Pexp_coerce(_4, None, ghtyp(Ptyp_package _6))))) ) +# 6576 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + Obj.repr( +# 657 "ml/parser.mly" + ( unclosed "(" 1 ")" 6 ) +# 6584 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + Obj.repr( +# 659 "ml/parser.mly" + ( unclosed "(" 1 ")" 6 ) +# 6592 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 661 "ml/parser.mly" + ( unclosed "(" 1 ")" 5 ) +# 6600 "ml/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'post_item_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'structure_tail) in + Obj.repr( +# 666 "ml/parser.mly" + ( mark_rhs_docs 1 2; + (text_str 1) @ mkstrexp _1 _2 :: _3 ) +# 6610 "ml/parser.ml" + : 'structure)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'structure_tail) in + Obj.repr( +# 668 "ml/parser.mly" + ( _1 ) +# 6617 "ml/parser.ml" + : 'structure)) +; (fun __caml_parser_env -> + Obj.repr( +# 671 "ml/parser.mly" + ( [] ) +# 6623 "ml/parser.ml" + : 'structure_tail)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'structure) in + Obj.repr( +# 672 "ml/parser.mly" + ( (text_str 1) @ _2 ) +# 6630 "ml/parser.ml" + : 'structure_tail)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'structure_item) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'structure_tail) in + Obj.repr( +# 673 "ml/parser.mly" + ( (text_str 1) @ _1 :: _2 ) +# 6638 "ml/parser.ml" + : 'structure_tail)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'let_bindings) in + Obj.repr( +# 677 "ml/parser.mly" + ( val_of_let_bindings _1 ) +# 6645 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'primitive_declaration) in + Obj.repr( +# 679 "ml/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_primitive body) ext ) +# 6652 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'value_description) in + Obj.repr( +# 681 "ml/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_primitive body) ext ) +# 6659 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_declarations) in + Obj.repr( +# 683 "ml/parser.mly" + ( let (nr, l, ext ) = _1 in mkstr_ext (Pstr_type (nr, List.rev l)) ext ) +# 6666 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'str_type_extension) in + Obj.repr( +# 685 "ml/parser.mly" + ( let (l, ext) = _1 in mkstr_ext (Pstr_typext l) ext ) +# 6673 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'str_exception_declaration) in + Obj.repr( +# 687 "ml/parser.mly" + ( let (l, ext) = _1 in mkstr_ext (Pstr_exception l) ext ) +# 6680 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_binding) in + Obj.repr( +# 689 "ml/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_module body) ext ) +# 6687 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_bindings) in + Obj.repr( +# 691 "ml/parser.mly" + ( let (l, ext) = _1 in mkstr_ext (Pstr_recmodule(List.rev l)) ext ) +# 6694 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_type_declaration) in + Obj.repr( +# 693 "ml/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_modtype body) ext ) +# 6701 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'open_statement) in + Obj.repr( +# 695 "ml/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_open body) ext ) +# 6708 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_type_declarations) in + Obj.repr( +# 697 "ml/parser.mly" + ( let (l, ext) = _1 in mkstr_ext (Pstr_class_type (List.rev l)) ext ) +# 6715 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'str_include_statement) in + Obj.repr( +# 699 "ml/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_include body) ext ) +# 6722 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 701 "ml/parser.mly" + ( mkstr(Pstr_extension (_1, (add_docs_attrs (symbol_docs ()) _2))) ) +# 6730 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in + Obj.repr( +# 703 "ml/parser.mly" + ( mark_symbol_docs (); + mkstr(Pstr_attribute _1) ) +# 6738 "ml/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 708 "ml/parser.mly" + ( let (ext, attrs) = _2 in + Incl.mk _3 ~attrs:(attrs@_4) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 6750 "ml/parser.ml" + : 'str_include_statement)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in + Obj.repr( +# 715 "ml/parser.mly" + ( _2 ) +# 6757 "ml/parser.ml" + : 'module_binding_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in + Obj.repr( +# 717 "ml/parser.mly" + ( mkmod(Pmod_constraint(_4, _2)) ) +# 6765 "ml/parser.ml" + : 'module_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'functor_arg) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_binding_body) in + Obj.repr( +# 719 "ml/parser.mly" + ( mkmod(Pmod_functor(fst _1, snd _1, _2)) ) +# 6773 "ml/parser.ml" + : 'module_binding_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_binding_body) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 723 "ml/parser.mly" + ( let (ext, attrs) = _2 in + Mb.mk (mkrhs _3 3) _4 ~attrs:(attrs@_5) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext ) +# 6786 "ml/parser.ml" + : 'module_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_binding) in + Obj.repr( +# 729 "ml/parser.mly" + ( let (b, ext) = _1 in ([b], ext) ) +# 6793 "ml/parser.ml" + : 'rec_module_bindings)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'rec_module_bindings) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_module_binding) in + Obj.repr( +# 731 "ml/parser.mly" + ( let (l, ext) = _1 in (_2 :: l, ext) ) +# 6801 "ml/parser.ml" + : 'rec_module_bindings)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'module_binding_body) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 735 "ml/parser.mly" + ( let (ext, attrs) = _2 in + Mb.mk (mkrhs _4 4) _5 ~attrs:(attrs@_6) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext ) +# 6814 "ml/parser.ml" + : 'rec_module_binding)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_binding_body) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 742 "ml/parser.mly" + ( Mb.mk (mkrhs _3 3) _4 ~attrs:(_2@_5) ~loc:(symbol_rloc ()) + ~text:(symbol_text ()) ~docs:(symbol_docs ()) ) +# 6825 "ml/parser.ml" + : 'and_module_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'mty_longident) in + Obj.repr( +# 750 "ml/parser.mly" + ( mkmty(Pmty_ident (mkrhs _1 1)) ) +# 6832 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'signature) in + Obj.repr( +# 752 "ml/parser.mly" + ( mkmty ~attrs:_2 (Pmty_signature (extra_sig 3 _3)) ) +# 6840 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'signature) in + Obj.repr( +# 754 "ml/parser.mly" + ( unclosed "sig" 1 "end" 4 ) +# 6848 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'functor_args) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 757 "ml/parser.mly" + ( let mty = + List.fold_left + (fun acc (n, t) -> mkmty(Pmty_functor(n, t, acc))) + _5 _3 + in wrap_mty_attrs mty _2 ) +# 6861 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 764 "ml/parser.mly" + ( mkmty(Pmty_functor(mknoloc "_", Some _1, _3)) ) +# 6869 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'with_constraints) in + Obj.repr( +# 766 "ml/parser.mly" + ( mkmty(Pmty_with(_1, List.rev _3)) ) +# 6877 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in + Obj.repr( +# 768 "ml/parser.mly" + ( mkmty ~attrs:_4 (Pmty_typeof _5) ) +# 6885 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 772 "ml/parser.mly" + ( _2 ) +# 6892 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 774 "ml/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 6899 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 776 "ml/parser.mly" + ( mkmty(Pmty_extension _1) ) +# 6906 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 778 "ml/parser.mly" + ( Mty.attr _1 _2 ) +# 6914 "ml/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + Obj.repr( +# 781 "ml/parser.mly" + ( [] ) +# 6920 "ml/parser.ml" + : 'signature)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'signature) in + Obj.repr( +# 782 "ml/parser.mly" + ( (text_sig 1) @ _2 ) +# 6927 "ml/parser.ml" + : 'signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'signature_item) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'signature) in + Obj.repr( +# 783 "ml/parser.mly" + ( (text_sig 1) @ _1 :: _2 ) +# 6935 "ml/parser.ml" + : 'signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'value_description) in + Obj.repr( +# 787 "ml/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_value body) ext ) +# 6942 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'primitive_declaration) in + Obj.repr( +# 789 "ml/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_value body) ext) +# 6949 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_declarations) in + Obj.repr( +# 791 "ml/parser.mly" + ( let (nr, l, ext) = _1 in mksig_ext (Psig_type (nr, List.rev l)) ext ) +# 6956 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_type_extension) in + Obj.repr( +# 793 "ml/parser.mly" + ( let (l, ext) = _1 in mksig_ext (Psig_typext l) ext ) +# 6963 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_exception_declaration) in + Obj.repr( +# 795 "ml/parser.mly" + ( let (l, ext) = _1 in mksig_ext (Psig_exception l) ext ) +# 6970 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_declaration) in + Obj.repr( +# 797 "ml/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_module body) ext ) +# 6977 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_alias) in + Obj.repr( +# 799 "ml/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_module body) ext ) +# 6984 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_declarations) in + Obj.repr( +# 801 "ml/parser.mly" + ( let (l, ext) = _1 in mksig_ext (Psig_recmodule (List.rev l)) ext ) +# 6991 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_type_declaration) in + Obj.repr( +# 803 "ml/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_modtype body) ext ) +# 6998 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'open_statement) in + Obj.repr( +# 805 "ml/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_open body) ext ) +# 7005 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_include_statement) in + Obj.repr( +# 807 "ml/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_include body) ext ) +# 7012 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_type_declarations) in + Obj.repr( +# 809 "ml/parser.mly" + ( let (l, ext) = _1 in mksig_ext (Psig_class_type (List.rev l)) ext ) +# 7019 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 811 "ml/parser.mly" + ( mksig(Psig_extension (_1, (add_docs_attrs (symbol_docs ()) _2))) ) +# 7027 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in + Obj.repr( +# 813 "ml/parser.mly" + ( mark_symbol_docs (); + mksig(Psig_attribute _1) ) +# 7035 "ml/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'override_flag) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'mod_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 818 "ml/parser.mly" + ( let (ext, attrs) = _3 in + Opn.mk (mkrhs _4 4) ~override:_2 ~attrs:(attrs@_5) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext) +# 7048 "ml/parser.ml" + : 'open_statement)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 825 "ml/parser.mly" + ( let (ext, attrs) = _2 in + Incl.mk _3 ~attrs:(attrs@_4) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext) +# 7060 "ml/parser.ml" + : 'sig_include_statement)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 832 "ml/parser.mly" + ( _2 ) +# 7067 "ml/parser.ml" + : 'module_declaration_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'module_declaration_body) in + Obj.repr( +# 834 "ml/parser.mly" + ( mkmty(Pmty_functor(mkrhs _2 2, Some _4, _6)) ) +# 7076 "ml/parser.ml" + : 'module_declaration_body)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'module_declaration_body) in + Obj.repr( +# 836 "ml/parser.mly" + ( mkmty(Pmty_functor(mkrhs "*" 1, None, _3)) ) +# 7083 "ml/parser.ml" + : 'module_declaration_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_declaration_body) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 840 "ml/parser.mly" + ( let (ext, attrs) = _2 in + Md.mk (mkrhs _3 3) _4 ~attrs:(attrs@_5) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 7096 "ml/parser.ml" + : 'module_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'mod_longident) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 847 "ml/parser.mly" + ( let (ext, attrs) = _2 in + Md.mk (mkrhs _3 3) + (Mty.alias ~loc:(rhs_loc 5) (mkrhs _5 5)) ~attrs:(attrs@_6) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 7110 "ml/parser.ml" + : 'module_alias)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_declaration) in + Obj.repr( +# 855 "ml/parser.mly" + ( let (body, ext) = _1 in ([body], ext) ) +# 7117 "ml/parser.ml" + : 'rec_module_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'rec_module_declarations) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_module_declaration) in + Obj.repr( +# 857 "ml/parser.mly" + ( let (l, ext) = _1 in (_2 :: l, ext) ) +# 7125 "ml/parser.ml" + : 'rec_module_declarations)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 861 "ml/parser.mly" + ( let (ext, attrs) = _2 in + Md.mk (mkrhs _4 4) _6 ~attrs:(attrs@_7) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext) +# 7138 "ml/parser.ml" + : 'rec_module_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 868 "ml/parser.mly" + ( Md.mk (mkrhs _3 3) _5 ~attrs:(_2@_6) ~loc:(symbol_rloc()) + ~text:(symbol_text()) ~docs:(symbol_docs()) ) +# 7149 "ml/parser.ml" + : 'and_module_declaration)) +; (fun __caml_parser_env -> + Obj.repr( +# 872 "ml/parser.mly" + ( None ) +# 7155 "ml/parser.ml" + : 'module_type_declaration_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 873 "ml/parser.mly" + ( Some _2 ) +# 7162 "ml/parser.ml" + : 'module_type_declaration_body)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'ident) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'module_type_declaration_body) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 878 "ml/parser.mly" + ( let (ext, attrs) = _3 in + Mtd.mk (mkrhs _4 4) ?typ:_5 ~attrs:(attrs@_6) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 7175 "ml/parser.ml" + : 'module_type_declaration)) +; (fun __caml_parser_env -> + Obj.repr( +# 886 "ml/parser.mly" + ( [] ) +# 7181 "ml/parser.ml" + : 'class_type_parameters)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'type_parameter_list) in + Obj.repr( +# 887 "ml/parser.mly" + ( List.rev _2 ) +# 7188 "ml/parser.ml" + : 'class_type_parameters)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_self_pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_fields) in + Obj.repr( +# 891 "ml/parser.mly" + ( Cstr.mk _1 (extra_cstr 2 (List.rev _2)) ) +# 7196 "ml/parser.ml" + : 'class_structure)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 895 "ml/parser.mly" + ( reloc_pat _2 ) +# 7203 "ml/parser.ml" + : 'class_self_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 897 "ml/parser.mly" + ( mkpat(Ppat_constraint(_2, _4)) ) +# 7211 "ml/parser.ml" + : 'class_self_pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 899 "ml/parser.mly" + ( ghpat(Ppat_any) ) +# 7217 "ml/parser.ml" + : 'class_self_pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 903 "ml/parser.mly" + ( [] ) +# 7223 "ml/parser.ml" + : 'class_fields)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_fields) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_field) in + Obj.repr( +# 905 "ml/parser.mly" + ( _2 :: (text_cstr 2) @ _1 ) +# 7231 "ml/parser.ml" + : 'class_fields)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'value) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 909 "ml/parser.mly" + ( let v, attrs = _2 in + mkcf (Pcf_val v) ~attrs:(attrs@_3) ~docs:(symbol_docs ()) ) +# 7240 "ml/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'method_) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 912 "ml/parser.mly" + ( let meth, attrs = _2 in + mkcf (Pcf_method meth) ~attrs:(attrs@_3) ~docs:(symbol_docs ()) ) +# 7249 "ml/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'constrain_field) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 915 "ml/parser.mly" + ( mkcf (Pcf_constraint _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 7258 "ml/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 917 "ml/parser.mly" + ( mkcf (Pcf_initializer _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 7267 "ml/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 919 "ml/parser.mly" + ( mkcf (Pcf_extension _1) ~attrs:_2 ~docs:(symbol_docs ()) ) +# 7275 "ml/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in + Obj.repr( +# 921 "ml/parser.mly" + ( mark_symbol_docs (); + mkcf (Pcf_attribute _1) ) +# 7283 "ml/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 927 "ml/parser.mly" + ( if _1 = Override then syntax_error (); + (mkloc _5 (rhs_loc 5), Mutable, Cfk_virtual _7), _2 ) +# 7294 "ml/parser.ml" + : 'value)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'mutable_flag) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 930 "ml/parser.mly" + ( if _1 = Override then syntax_error (); + (mkrhs _5 5, _4, Cfk_virtual _7), _2 ) +# 7306 "ml/parser.ml" + : 'value)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'mutable_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 933 "ml/parser.mly" + ( (mkrhs _4 4, _3, Cfk_concrete (_1, _6)), _2 ) +# 7317 "ml/parser.ml" + : 'value)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mutable_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'label) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'type_constraint) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 935 "ml/parser.mly" + ( + let e = mkexp_constraint _7 _5 in + (mkrhs _4 4, _3, Cfk_concrete (_1, e)), _2 + ) +# 7332 "ml/parser.ml" + : 'value)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'poly_type) in + Obj.repr( +# 943 "ml/parser.mly" + ( if _1 = Override then syntax_error (); + (mkloc _5 (rhs_loc 5), Private, Cfk_virtual _7), _2 ) +# 7343 "ml/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'private_flag) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'poly_type) in + Obj.repr( +# 946 "ml/parser.mly" + ( if _1 = Override then syntax_error (); + (mkloc _5 (rhs_loc 5), _4, Cfk_virtual _7), _2 ) +# 7355 "ml/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'private_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'label) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'strict_binding) in + Obj.repr( +# 949 "ml/parser.mly" + ( (mkloc _4 (rhs_loc 4), _3, + Cfk_concrete (_1, ghexp(Pexp_poly (_5, None)))), _2 ) +# 7367 "ml/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 7 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'private_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'label) in + let _6 = (Parsing.peek_val __caml_parser_env 2 : 'poly_type) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 952 "ml/parser.mly" + ( (mkloc _4 (rhs_loc 4), _3, + Cfk_concrete (_1, ghexp(Pexp_poly(_8, Some _6)))), _2 ) +# 7380 "ml/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 10 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 9 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 8 : 'private_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 7 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 4 : 'lident_list) in + let _9 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _11 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 956 "ml/parser.mly" + ( let exp, poly = wrap_type_annotation _7 _9 _11 in + (mkloc _4 (rhs_loc 4), _3, + Cfk_concrete (_1, ghexp(Pexp_poly(exp, Some poly)))), _2 ) +# 7395 "ml/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'clty_longident) in + Obj.repr( +# 965 "ml/parser.mly" + ( mkcty(Pcty_constr (mkloc _4 (rhs_loc 4), List.rev _2)) ) +# 7403 "ml/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'clty_longident) in + Obj.repr( +# 967 "ml/parser.mly" + ( mkcty(Pcty_constr (mkrhs _1 1, [])) ) +# 7410 "ml/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_sig_body) in + Obj.repr( +# 969 "ml/parser.mly" + ( mkcty ~attrs:_2 (Pcty_signature _3) ) +# 7418 "ml/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_sig_body) in + Obj.repr( +# 971 "ml/parser.mly" + ( unclosed "object" 1 "end" 4 ) +# 7426 "ml/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 973 "ml/parser.mly" + ( Cty.attr _1 _2 ) +# 7434 "ml/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 975 "ml/parser.mly" + ( mkcty(Pcty_extension _1) ) +# 7441 "ml/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'class_signature) in + Obj.repr( +# 977 "ml/parser.mly" + ( wrap_class_type_attrs (mkcty(Pcty_open(_3, mkrhs _5 5, _7))) _4 ) +# 7451 "ml/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_self_type) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_sig_fields) in + Obj.repr( +# 981 "ml/parser.mly" + ( Csig.mk _1 (extra_csig 2 (List.rev _2)) ) +# 7459 "ml/parser.ml" + : 'class_sig_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 985 "ml/parser.mly" + ( _2 ) +# 7466 "ml/parser.ml" + : 'class_self_type)) +; (fun __caml_parser_env -> + Obj.repr( +# 987 "ml/parser.mly" + ( mktyp(Ptyp_any) ) +# 7472 "ml/parser.ml" + : 'class_self_type)) +; (fun __caml_parser_env -> + Obj.repr( +# 990 "ml/parser.mly" + ( [] ) +# 7478 "ml/parser.ml" + : 'class_sig_fields)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_sig_fields) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_sig_field) in + Obj.repr( +# 991 "ml/parser.mly" + ( _2 :: (text_csig 2) @ _1 ) +# 7486 "ml/parser.ml" + : 'class_sig_fields)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 995 "ml/parser.mly" + ( mkctf (Pctf_inherit _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 7495 "ml/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'value_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 997 "ml/parser.mly" + ( mkctf (Pctf_val _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 7504 "ml/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'private_virtual_flags) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'label) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'poly_type) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1000 "ml/parser.mly" + ( + let (p, v) = _3 in + mkctf (Pctf_method (mkrhs _4 4, p, v, _6)) ~attrs:(_2@_7) ~docs:(symbol_docs ()) + ) +# 7518 "ml/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'constrain_field) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1005 "ml/parser.mly" + ( mkctf (Pctf_constraint _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 7527 "ml/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1007 "ml/parser.mly" + ( mkctf (Pctf_extension _1) ~attrs:_2 ~docs:(symbol_docs ()) ) +# 7535 "ml/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in + Obj.repr( +# 1009 "ml/parser.mly" + ( mark_symbol_docs (); + mkctf(Pctf_attribute _1) ) +# 7543 "ml/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'mutable_flag) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1014 "ml/parser.mly" + ( mkrhs _3 3, _2, Virtual, _5 ) +# 7552 "ml/parser.ml" + : 'value_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'virtual_flag) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1016 "ml/parser.mly" + ( mkrhs _3 3, Mutable, _2, _5 ) +# 7561 "ml/parser.ml" + : 'value_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1018 "ml/parser.mly" + ( mkrhs _1 1, Immutable, Concrete, _3 ) +# 7569 "ml/parser.ml" + : 'value_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1021 "ml/parser.mly" + ( _1, _3, symbol_rloc() ) +# 7577 "ml/parser.ml" + : 'constrain)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1024 "ml/parser.mly" + ( _1, _3 ) +# 7585 "ml/parser.ml" + : 'constrain_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_type_declaration) in + Obj.repr( +# 1028 "ml/parser.mly" + ( let (body, ext) = _1 in ([body],ext) ) +# 7592 "ml/parser.ml" + : 'class_type_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_type_declarations) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_class_type_declaration) in + Obj.repr( +# 1030 "ml/parser.mly" + ( let (l, ext) = _1 in (_2 :: l, ext) ) +# 7600 "ml/parser.ml" + : 'class_type_declarations)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : 'virtual_flag) in + let _5 = (Parsing.peek_val __caml_parser_env 4 : 'class_type_parameters) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1035 "ml/parser.mly" + ( let (ext, attrs) = _3 in + Ci.mk (mkrhs _6 6) _8 ~virt:_4 ~params:_5 ~attrs:(attrs@_9) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext) +# 7615 "ml/parser.ml" + : 'class_type_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'virtual_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'class_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _7 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1043 "ml/parser.mly" + ( Ci.mk (mkrhs _5 5) _7 ~virt:_3 ~params:_4 + ~attrs:(_2@_8) ~loc:(symbol_rloc ()) + ~text:(symbol_text ()) ~docs:(symbol_docs ()) ) +# 7629 "ml/parser.ml" + : 'and_class_type_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1051 "ml/parser.mly" + ( _1 ) +# 7636 "ml/parser.ml" + : 'seq_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1052 "ml/parser.mly" + ( _1 ) +# 7643 "ml/parser.ml" + : 'seq_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1053 "ml/parser.mly" + ( mkexp(Pexp_sequence(_1, _3)) ) +# 7651 "ml/parser.ml" + : 'seq_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'attr_id) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1055 "ml/parser.mly" + ( let seq = mkexp(Pexp_sequence (_1, _5)) in + let payload = PStr [mkstrexp seq []] in + mkexp (Pexp_extension (_4, payload)) ) +# 7662 "ml/parser.ml" + : 'seq_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label_let_pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'opt_default) in + Obj.repr( +# 1061 "ml/parser.mly" + ( (Optional (fst _3), _4, snd _3) ) +# 7670 "ml/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_var) in + Obj.repr( +# 1063 "ml/parser.mly" + ( (Optional (fst _2), None, snd _2) ) +# 7677 "ml/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'let_pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'opt_default) in + Obj.repr( +# 1065 "ml/parser.mly" + ( (Optional _1, _4, _3) ) +# 7686 "ml/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_var) in + Obj.repr( +# 1067 "ml/parser.mly" + ( (Optional _1, None, _2) ) +# 7694 "ml/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'label_let_pattern) in + Obj.repr( +# 1069 "ml/parser.mly" + ( (Labelled (fst _3), None, snd _3) ) +# 7701 "ml/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_var) in + Obj.repr( +# 1071 "ml/parser.mly" + ( (Labelled (fst _2), None, snd _2) ) +# 7708 "ml/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in + Obj.repr( +# 1073 "ml/parser.mly" + ( (Labelled _1, None, _2) ) +# 7716 "ml/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in + Obj.repr( +# 1075 "ml/parser.mly" + ( (Nolabel, None, _1) ) +# 7723 "ml/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 1078 "ml/parser.mly" + ( mkpat(Ppat_var (mkrhs _1 1)) ) +# 7730 "ml/parser.ml" + : 'pattern_var)) +; (fun __caml_parser_env -> + Obj.repr( +# 1079 "ml/parser.mly" + ( mkpat Ppat_any ) +# 7736 "ml/parser.ml" + : 'pattern_var)) +; (fun __caml_parser_env -> + Obj.repr( +# 1082 "ml/parser.mly" + ( None ) +# 7742 "ml/parser.ml" + : 'opt_default)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1083 "ml/parser.mly" + ( Some _2 ) +# 7749 "ml/parser.ml" + : 'opt_default)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_var) in + Obj.repr( +# 1087 "ml/parser.mly" + ( _1 ) +# 7756 "ml/parser.ml" + : 'label_let_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label_var) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1089 "ml/parser.mly" + ( let (lab, pat) = _1 in (lab, mkpat(Ppat_constraint(pat, _3))) ) +# 7764 "ml/parser.ml" + : 'label_let_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 1092 "ml/parser.mly" + ( (_1, mkpat(Ppat_var (mkrhs _1 1))) ) +# 7771 "ml/parser.ml" + : 'label_var)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1096 "ml/parser.mly" + ( _1 ) +# 7778 "ml/parser.ml" + : 'let_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1098 "ml/parser.mly" + ( mkpat(Ppat_constraint(_1, _3)) ) +# 7786 "ml/parser.ml" + : 'let_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1102 "ml/parser.mly" + ( _1 ) +# 7793 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_labeled_expr_list) in + Obj.repr( +# 1104 "ml/parser.mly" + ( mkexp(Pexp_apply(_1, List.rev _2)) ) +# 7801 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'let_bindings) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1106 "ml/parser.mly" + ( expr_of_let_bindings _1 _3 ) +# 7809 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'module_binding_body) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1108 "ml/parser.mly" + ( mkexp_attrs (Pexp_letmodule(mkrhs _4 4, _5, _7)) _3 ) +# 7819 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'let_exception_declaration) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1110 "ml/parser.mly" + ( mkexp_attrs (Pexp_letexception(_4, _6)) _3 ) +# 7828 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1112 "ml/parser.mly" + ( mkexp_attrs (Pexp_open(_3, mkrhs _5 5, _7)) _4 ) +# 7838 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_bar) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'match_cases) in + Obj.repr( +# 1114 "ml/parser.mly" + ( mkexp_attrs (Pexp_function(List.rev _4)) _2 ) +# 7847 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in + Obj.repr( +# 1116 "ml/parser.mly" + ( let (l,o,p) = _3 in + mkexp_attrs (Pexp_fun(l, o, p, _4)) _2 ) +# 7857 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'lident_list) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in + Obj.repr( +# 1119 "ml/parser.mly" + ( mkexp_attrs (mk_newtypes _5 _7).pexp_desc _2 ) +# 7866 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_bar) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'match_cases) in + Obj.repr( +# 1121 "ml/parser.mly" + ( mkexp_attrs (Pexp_match(_3, List.rev _6)) _2 ) +# 7876 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_bar) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'match_cases) in + Obj.repr( +# 1123 "ml/parser.mly" + ( mkexp_attrs (Pexp_try(_3, List.rev _6)) _2 ) +# 7886 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + Obj.repr( +# 1125 "ml/parser.mly" + ( syntax_error() ) +# 7894 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr_comma_list) in + Obj.repr( +# 1127 "ml/parser.mly" + ( mkexp(Pexp_tuple(List.rev _1)) ) +# 7901 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1129 "ml/parser.mly" + ( mkexp(Pexp_construct(mkrhs _1 1, Some _2)) ) +# 7909 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1131 "ml/parser.mly" + ( mkexp(Pexp_variant(_1, Some _2)) ) +# 7917 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1133 "ml/parser.mly" + ( mkexp_attrs(Pexp_ifthenelse(_3, _5, Some _7)) _2 ) +# 7927 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1135 "ml/parser.mly" + ( mkexp_attrs (Pexp_ifthenelse(_3, _5, None)) _2 ) +# 7936 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1137 "ml/parser.mly" + ( mkexp_attrs (Pexp_while(_3, _5)) _2 ) +# 7945 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 8 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 7 : 'pattern) in + let _5 = (Parsing.peek_val __caml_parser_env 5 : 'seq_expr) in + let _6 = (Parsing.peek_val __caml_parser_env 4 : 'direction_flag) in + let _7 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _9 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1140 "ml/parser.mly" + ( mkexp_attrs(Pexp_for(_3, _5, _7, _6, _9)) _2 ) +# 7957 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1142 "ml/parser.mly" + ( mkexp_cons (rhs_loc 2) (ghexp(Pexp_tuple[_1;_3])) (symbol_rloc()) ) +# 7965 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1144 "ml/parser.mly" + ( mkinfix _1 _2 _3 ) +# 7974 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1146 "ml/parser.mly" + ( mkinfix _1 _2 _3 ) +# 7983 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1148 "ml/parser.mly" + ( mkinfix _1 _2 _3 ) +# 7992 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1150 "ml/parser.mly" + ( mkinfix _1 _2 _3 ) +# 8001 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1152 "ml/parser.mly" + ( mkinfix _1 _2 _3 ) +# 8010 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1154 "ml/parser.mly" + ( mkinfix _1 "+" _3 ) +# 8018 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1156 "ml/parser.mly" + ( mkinfix _1 "+." _3 ) +# 8026 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1158 "ml/parser.mly" + ( mkinfix _1 "+=" _3 ) +# 8034 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1160 "ml/parser.mly" + ( mkinfix _1 "-" _3 ) +# 8042 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1162 "ml/parser.mly" + ( mkinfix _1 "-." _3 ) +# 8050 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1164 "ml/parser.mly" + ( mkinfix _1 "*" _3 ) +# 8058 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1166 "ml/parser.mly" + ( mkinfix _1 "%" _3 ) +# 8066 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1168 "ml/parser.mly" + ( mkinfix _1 "=" _3 ) +# 8074 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1170 "ml/parser.mly" + ( mkinfix _1 "<" _3 ) +# 8082 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1172 "ml/parser.mly" + ( mkinfix _1 ">" _3 ) +# 8090 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1174 "ml/parser.mly" + ( mkinfix _1 "or" _3 ) +# 8098 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1176 "ml/parser.mly" + ( mkinfix _1 "||" _3 ) +# 8106 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1178 "ml/parser.mly" + ( mkinfix _1 "&" _3 ) +# 8114 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1180 "ml/parser.mly" + ( mkinfix _1 "&&" _3 ) +# 8122 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1182 "ml/parser.mly" + ( mkinfix _1 ":=" _3 ) +# 8130 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'subtractive) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1184 "ml/parser.mly" + ( mkuminus _1 _2 ) +# 8138 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'additive) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1186 "ml/parser.mly" + ( mkuplus _1 _2 ) +# 8146 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1188 "ml/parser.mly" + ( mkexp(Pexp_setfield(_1, mkrhs _3 3, _5)) ) +# 8155 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1190 "ml/parser.mly" + ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function "Array" "set")), + [Nolabel,_1; Nolabel,_4; Nolabel,_7])) ) +# 8165 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1193 "ml/parser.mly" + ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function "String" "set")), + [Nolabel,_1; Nolabel,_4; Nolabel,_7])) ) +# 8175 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1196 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "[]<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _4; Nolabel, _7]) ) +# 8186 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1199 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "()<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _4; Nolabel, _7]) ) +# 8197 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1202 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "{}<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _4; Nolabel, _7]) ) +# 8208 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 8 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1205 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3,"." ^ _4 ^ "[]<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _6; Nolabel, _9]) ) +# 8220 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 8 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1208 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "()<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _6; Nolabel, _9]) ) +# 8232 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 8 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1211 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "{}<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _6; Nolabel, _9]) ) +# 8244 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1214 "ml/parser.mly" + ( mkexp(Pexp_setinstvar(mkrhs _1 1, _3)) ) +# 8252 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1216 "ml/parser.mly" + ( mkexp_attrs (Pexp_assert _3) _2 ) +# 8260 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1218 "ml/parser.mly" + ( mkexp_attrs (Pexp_lazy _3) _2 ) +# 8268 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_structure) in + Obj.repr( +# 1220 "ml/parser.mly" + ( mkexp_attrs (Pexp_object _3) _2 ) +# 8276 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_structure) in + Obj.repr( +# 1222 "ml/parser.mly" + ( unclosed "object" 1 "end" 4 ) +# 8284 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 1224 "ml/parser.mly" + ( Exp.attr _1 _2 ) +# 8292 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + Obj.repr( +# 1226 "ml/parser.mly" + ( not_expecting 1 "wildcard \"_\"" ) +# 8298 "ml/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'val_longident) in + Obj.repr( +# 1230 "ml/parser.mly" + ( mkexp(Pexp_ident (mkrhs _1 1)) ) +# 8305 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in + Obj.repr( +# 1232 "ml/parser.mly" + ( mkexp(Pexp_constant _1) ) +# 8312 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constr_longident) in + Obj.repr( +# 1234 "ml/parser.mly" + ( mkexp(Pexp_construct(mkrhs _1 1, None)) ) +# 8319 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in + Obj.repr( +# 1236 "ml/parser.mly" + ( mkexp(Pexp_variant(_1, None)) ) +# 8326 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1238 "ml/parser.mly" + ( reloc_exp _2 ) +# 8333 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1240 "ml/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 8340 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1242 "ml/parser.mly" + ( wrap_exp_attrs (reloc_exp _3) _2 (* check location *) ) +# 8348 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + Obj.repr( +# 1244 "ml/parser.mly" + ( mkexp_attrs (Pexp_construct (mkloc (Lident "()") (symbol_rloc ()), + None)) _2 ) +# 8356 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1247 "ml/parser.mly" + ( unclosed "begin" 1 "end" 4 ) +# 8364 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'type_constraint) in + Obj.repr( +# 1249 "ml/parser.mly" + ( mkexp_constraint _2 _3 ) +# 8372 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'label_longident) in + Obj.repr( +# 1251 "ml/parser.mly" + ( mkexp(Pexp_field(_1, mkrhs _3 3)) ) +# 8380 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1253 "ml/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, _4)) ) +# 8388 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1255 "ml/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, + mkexp(Pexp_construct(mkrhs (Lident "()") 1, None)))) ) +# 8396 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1258 "ml/parser.mly" + ( unclosed "(" 3 ")" 5 ) +# 8404 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1260 "ml/parser.mly" + ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function "Array" "get")), + [Nolabel,_1; Nolabel,_4])) ) +# 8413 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1263 "ml/parser.mly" + ( unclosed "(" 3 ")" 5 ) +# 8421 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1265 "ml/parser.mly" + ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function "String" "get")), + [Nolabel,_1; Nolabel,_4])) ) +# 8430 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1268 "ml/parser.mly" + ( unclosed "[" 3 "]" 5 ) +# 8438 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1270 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "[]")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _4]) ) +# 8448 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1273 "ml/parser.mly" + ( unclosed "[" 3 "]" 5 ) +# 8457 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1275 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "()")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _4]) ) +# 8467 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1278 "ml/parser.mly" + ( unclosed "(" 3 ")" 5 ) +# 8476 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1280 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "{}")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _4]) ) +# 8486 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1283 "ml/parser.mly" + ( unclosed "{" 3 "}" 5 ) +# 8495 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1285 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "[]")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _6]) ) +# 8506 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1288 "ml/parser.mly" + ( unclosed "[" 5 "]" 7 ) +# 8516 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1290 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "()")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _6]) ) +# 8527 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1293 "ml/parser.mly" + ( unclosed "(" 5 ")" 7 ) +# 8537 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1295 "ml/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "{}")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _6]) ) +# 8548 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1298 "ml/parser.mly" + ( unclosed "{" 5 "}" 7 ) +# 8558 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr_comma_list) in + Obj.repr( +# 1300 "ml/parser.mly" + ( unclosed "{" 3 "}" 5 ) +# 8566 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in + Obj.repr( +# 1302 "ml/parser.mly" + ( let (exten, fields) = _2 in mkexp (Pexp_record(fields, exten)) ) +# 8573 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in + Obj.repr( +# 1304 "ml/parser.mly" + ( unclosed "{" 1 "}" 3 ) +# 8580 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in + Obj.repr( +# 1306 "ml/parser.mly" + ( let (exten, fields) = _4 in + let rec_exp = mkexp(Pexp_record(fields, exten)) in + mkexp(Pexp_open(Fresh, mkrhs _1 1, rec_exp)) ) +# 8590 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in + Obj.repr( +# 1310 "ml/parser.mly" + ( unclosed "{" 3 "}" 5 ) +# 8598 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1312 "ml/parser.mly" + ( mkexp (Pexp_array(List.rev _2)) ) +# 8606 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1314 "ml/parser.mly" + ( unclosed "[|" 1 "|]" 4 ) +# 8614 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + Obj.repr( +# 1316 "ml/parser.mly" + ( mkexp (Pexp_array []) ) +# 8620 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1318 "ml/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp(Pexp_array(List.rev _4)))) ) +# 8629 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1320 "ml/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp(Pexp_array []))) ) +# 8636 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1322 "ml/parser.mly" + ( unclosed "[|" 3 "|]" 6 ) +# 8645 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1324 "ml/parser.mly" + ( reloc_exp (mktailexp (rhs_loc 4) (List.rev _2)) ) +# 8653 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1326 "ml/parser.mly" + ( unclosed "[" 1 "]" 4 ) +# 8661 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1328 "ml/parser.mly" + ( let list_exp = reloc_exp (mktailexp (rhs_loc 6) (List.rev _4)) in + mkexp(Pexp_open(Fresh, mkrhs _1 1, list_exp)) ) +# 8671 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1331 "ml/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, + mkexp(Pexp_construct(mkrhs (Lident "[]") 1, None)))) ) +# 8679 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1334 "ml/parser.mly" + ( unclosed "[" 3 "]" 6 ) +# 8688 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1336 "ml/parser.mly" + ( mkexp(Pexp_apply(mkoperator _1 1, [Nolabel,_2])) ) +# 8696 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1338 "ml/parser.mly" + ( mkexp(Pexp_apply(mkoperator "!" 1, [Nolabel,_2])) ) +# 8703 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in + Obj.repr( +# 1340 "ml/parser.mly" + ( mkexp (Pexp_override _2) ) +# 8710 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in + Obj.repr( +# 1342 "ml/parser.mly" + ( unclosed "{<" 1 ">}" 3 ) +# 8717 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + Obj.repr( +# 1344 "ml/parser.mly" + ( mkexp (Pexp_override [])) +# 8723 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in + Obj.repr( +# 1346 "ml/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp (Pexp_override _4)))) +# 8731 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1348 "ml/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp (Pexp_override [])))) +# 8738 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in + Obj.repr( +# 1350 "ml/parser.mly" + ( unclosed "{<" 3 ">}" 5 ) +# 8746 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'label) in + Obj.repr( +# 1352 "ml/parser.mly" + ( mkexp(Pexp_send(_1, mkrhs _3 3)) ) +# 8754 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1354 "ml/parser.mly" + ( mkinfix _1 _2 _3 ) +# 8763 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + Obj.repr( +# 1356 "ml/parser.mly" + ( mkexp_attrs (Pexp_pack _4) _3 ) +# 8771 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 1358 "ml/parser.mly" + ( mkexp_attrs (Pexp_constraint (ghexp (Pexp_pack _4), + ghtyp (Ptyp_package _6))) + _3 ) +# 8782 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'module_expr) in + Obj.repr( +# 1362 "ml/parser.mly" + ( unclosed "(" 1 ")" 6 ) +# 8790 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 8 : 'mod_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 1365 "ml/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, + mkexp_attrs (Pexp_constraint (ghexp (Pexp_pack _6), + ghtyp (Ptyp_package _8))) + _5 )) ) +# 8803 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 7 : 'mod_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _6 = (Parsing.peek_val __caml_parser_env 2 : 'module_expr) in + Obj.repr( +# 1370 "ml/parser.mly" + ( unclosed "(" 3 ")" 8 ) +# 8812 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 1372 "ml/parser.mly" + ( mkexp (Pexp_extension _1) ) +# 8819 "ml/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'labeled_simple_expr) in + Obj.repr( +# 1376 "ml/parser.mly" + ( [_1] ) +# 8826 "ml/parser.ml" + : 'simple_labeled_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_labeled_expr_list) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'labeled_simple_expr) in + Obj.repr( +# 1378 "ml/parser.mly" + ( _2 :: _1 ) +# 8834 "ml/parser.ml" + : 'simple_labeled_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1382 "ml/parser.mly" + ( (Nolabel, _1) ) +# 8841 "ml/parser.ml" + : 'labeled_simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_expr) in + Obj.repr( +# 1384 "ml/parser.mly" + ( _1 ) +# 8848 "ml/parser.ml" + : 'labeled_simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1388 "ml/parser.mly" + ( (Labelled _1, _2) ) +# 8856 "ml/parser.ml" + : 'label_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_ident) in + Obj.repr( +# 1390 "ml/parser.mly" + ( (Labelled (fst _2), snd _2) ) +# 8863 "ml/parser.ml" + : 'label_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_ident) in + Obj.repr( +# 1392 "ml/parser.mly" + ( (Optional (fst _2), snd _2) ) +# 8870 "ml/parser.ml" + : 'label_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1394 "ml/parser.mly" + ( (Optional _1, _2) ) +# 8878 "ml/parser.ml" + : 'label_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 1397 "ml/parser.mly" + ( (_1, mkexp(Pexp_ident(mkrhs (Lident _1) 1))) ) +# 8885 "ml/parser.ml" + : 'label_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 1400 "ml/parser.mly" + ( [mkrhs _1 1] ) +# 8892 "ml/parser.ml" + : 'lident_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'lident_list) in + Obj.repr( +# 1401 "ml/parser.mly" + ( mkrhs _1 1 :: _2 ) +# 8900 "ml/parser.ml" + : 'lident_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'val_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'strict_binding) in + Obj.repr( +# 1405 "ml/parser.mly" + ( (mkpatvar _1 1, _2) ) +# 8908 "ml/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'val_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'type_constraint) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1407 "ml/parser.mly" + ( let v = mkpatvar _1 1 in (* PR#7344 *) + let t = + match _2 with + Some t, None -> t + | _, Some t -> t + | _ -> assert false + in + (ghpat(Ppat_constraint(v, ghtyp(Ptyp_poly([],t)))), + mkexp_constraint _4 _2) ) +# 8925 "ml/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'val_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'typevar_list) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1417 "ml/parser.mly" + ( (ghpat(Ppat_constraint(mkpatvar _1 1, + ghtyp(Ptyp_poly(List.rev _3,_5)))), + _7) ) +# 8937 "ml/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 7 : 'val_ident) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'lident_list) in + let _6 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1421 "ml/parser.mly" + ( let exp, poly = wrap_type_annotation _4 _6 _8 in + (ghpat(Ppat_constraint(mkpatvar _1 1, poly)), exp) ) +# 8948 "ml/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1424 "ml/parser.mly" + ( (_1, _3) ) +# 8956 "ml/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_pattern_not_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1426 "ml/parser.mly" + ( (ghpat(Ppat_constraint(_1, _3)), _5) ) +# 8965 "ml/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'let_binding) in + Obj.repr( +# 1429 "ml/parser.mly" + ( _1 ) +# 8972 "ml/parser.ml" + : 'let_bindings)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'let_bindings) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_let_binding) in + Obj.repr( +# 1430 "ml/parser.mly" + ( addlb _1 _2 ) +# 8980 "ml/parser.ml" + : 'let_bindings)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'rec_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'let_binding_body) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1434 "ml/parser.mly" + ( let (ext, attr) = _2 in + mklbs ext _3 (mklb true _4 (attr@_5)) ) +# 8991 "ml/parser.ml" + : 'let_binding)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'let_binding_body) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1439 "ml/parser.mly" + ( mklb false _3 (_2@_4) ) +# 9000 "ml/parser.ml" + : 'and_let_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'strict_binding) in + Obj.repr( +# 1443 "ml/parser.mly" + ( _1 ) +# 9007 "ml/parser.ml" + : 'fun_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'type_constraint) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1445 "ml/parser.mly" + ( mkexp_constraint _3 _1 ) +# 9015 "ml/parser.ml" + : 'fun_binding)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1449 "ml/parser.mly" + ( _2 ) +# 9022 "ml/parser.ml" + : 'strict_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'fun_binding) in + Obj.repr( +# 1451 "ml/parser.mly" + ( let (l, o, p) = _1 in ghexp(Pexp_fun(l, o, p, _2)) ) +# 9030 "ml/parser.ml" + : 'strict_binding)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'lident_list) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'fun_binding) in + Obj.repr( +# 1453 "ml/parser.mly" + ( mk_newtypes _3 _5 ) +# 9038 "ml/parser.ml" + : 'strict_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'match_case) in + Obj.repr( +# 1456 "ml/parser.mly" + ( [_1] ) +# 9045 "ml/parser.ml" + : 'match_cases)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'match_cases) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'match_case) in + Obj.repr( +# 1457 "ml/parser.mly" + ( _3 :: _1 ) +# 9053 "ml/parser.ml" + : 'match_cases)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1461 "ml/parser.mly" + ( Exp.case _1 _3 ) +# 9061 "ml/parser.ml" + : 'match_case)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1463 "ml/parser.mly" + ( Exp.case _1 ~guard:_3 _5 ) +# 9070 "ml/parser.ml" + : 'match_case)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1465 "ml/parser.mly" + ( Exp.case _1 (Exp.unreachable ~loc:(rhs_loc 3) ())) +# 9077 "ml/parser.ml" + : 'match_case)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1469 "ml/parser.mly" + ( _2 ) +# 9084 "ml/parser.ml" + : 'fun_def)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1471 "ml/parser.mly" + ( mkexp (Pexp_constraint (_4, _2)) ) +# 9092 "ml/parser.ml" + : 'fun_def)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in + Obj.repr( +# 1474 "ml/parser.mly" + ( + let (l,o,p) = _1 in + ghexp(Pexp_fun(l, o, p, _2)) + ) +# 9103 "ml/parser.ml" + : 'fun_def)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'lident_list) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in + Obj.repr( +# 1479 "ml/parser.mly" + ( mk_newtypes _3 _5 ) +# 9111 "ml/parser.ml" + : 'fun_def)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr_comma_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1482 "ml/parser.mly" + ( _3 :: _1 ) +# 9119 "ml/parser.ml" + : 'expr_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1483 "ml/parser.mly" + ( [_3; _1] ) +# 9127 "ml/parser.ml" + : 'expr_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr_list) in + Obj.repr( +# 1486 "ml/parser.mly" + ( (Some _1, _3) ) +# 9135 "ml/parser.ml" + : 'record_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr_list) in + Obj.repr( +# 1487 "ml/parser.mly" + ( (None, _1) ) +# 9142 "ml/parser.ml" + : 'record_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr) in + Obj.repr( +# 1490 "ml/parser.mly" + ( [_1] ) +# 9149 "ml/parser.ml" + : 'lbl_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'lbl_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr_list) in + Obj.repr( +# 1491 "ml/parser.mly" + ( _1 :: _3 ) +# 9157 "ml/parser.ml" + : 'lbl_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_expr) in + Obj.repr( +# 1492 "ml/parser.mly" + ( [_1] ) +# 9164 "ml/parser.ml" + : 'lbl_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'label_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_type_constraint) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1496 "ml/parser.mly" + ( (mkrhs _1 1, mkexp_opt_constraint _4 _2) ) +# 9173 "ml/parser.ml" + : 'lbl_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'label_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'opt_type_constraint) in + Obj.repr( +# 1498 "ml/parser.mly" + ( (mkrhs _1 1, mkexp_opt_constraint (exp_of_label _1 1) _2) ) +# 9181 "ml/parser.ml" + : 'lbl_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'opt_semi) in + Obj.repr( +# 1501 "ml/parser.mly" + ( [_1] ) +# 9189 "ml/parser.ml" + : 'field_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'field_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'field_expr_list) in + Obj.repr( +# 1502 "ml/parser.mly" + ( _1 :: _3 ) +# 9197 "ml/parser.ml" + : 'field_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1506 "ml/parser.mly" + ( (mkrhs _1 1, _3) ) +# 9205 "ml/parser.ml" + : 'field_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label) in + Obj.repr( +# 1508 "ml/parser.mly" + ( (mkrhs _1 1, exp_of_label (Lident _1) 1) ) +# 9212 "ml/parser.ml" + : 'field_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1511 "ml/parser.mly" + ( [_1] ) +# 9219 "ml/parser.ml" + : 'expr_semi_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1512 "ml/parser.mly" + ( _3 :: _1 ) +# 9227 "ml/parser.ml" + : 'expr_semi_list)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1515 "ml/parser.mly" + ( (Some _2, None) ) +# 9234 "ml/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1516 "ml/parser.mly" + ( (Some _2, Some _4) ) +# 9242 "ml/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1517 "ml/parser.mly" + ( (None, Some _2) ) +# 9249 "ml/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 1518 "ml/parser.mly" + ( syntax_error() ) +# 9255 "ml/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 1519 "ml/parser.mly" + ( syntax_error() ) +# 9261 "ml/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_constraint) in + Obj.repr( +# 1522 "ml/parser.mly" + ( Some _1 ) +# 9268 "ml/parser.ml" + : 'opt_type_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 1523 "ml/parser.mly" + ( None ) +# 9274 "ml/parser.ml" + : 'opt_type_constraint)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 1530 "ml/parser.mly" + ( mkpat(Ppat_alias(_1, mkrhs _3 3)) ) +# 9282 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1532 "ml/parser.mly" + ( expecting 3 "identifier" ) +# 9289 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_comma_list) in + Obj.repr( +# 1534 "ml/parser.mly" + ( mkpat(Ppat_tuple(List.rev _1)) ) +# 9296 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1536 "ml/parser.mly" + ( mkpat_cons (rhs_loc 2) (ghpat(Ppat_tuple[_1;_3])) (symbol_rloc()) ) +# 9304 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1538 "ml/parser.mly" + ( expecting 3 "pattern" ) +# 9311 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1540 "ml/parser.mly" + ( mkpat(Ppat_or(_1, _3)) ) +# 9319 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1542 "ml/parser.mly" + ( expecting 3 "pattern" ) +# 9326 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1544 "ml/parser.mly" + ( mkpat_attrs (Ppat_exception _3) _2) +# 9334 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 1546 "ml/parser.mly" + ( Pat.attr _1 _2 ) +# 9342 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_gen) in + Obj.repr( +# 1547 "ml/parser.mly" + ( _1 ) +# 9349 "ml/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 1551 "ml/parser.mly" + ( mkpat(Ppat_alias(_1, mkrhs _3 3)) ) +# 9357 "ml/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + Obj.repr( +# 1553 "ml/parser.mly" + ( expecting 3 "identifier" ) +# 9364 "ml/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_no_exn_comma_list) in + Obj.repr( +# 1555 "ml/parser.mly" + ( mkpat(Ppat_tuple(List.rev _1)) ) +# 9371 "ml/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1557 "ml/parser.mly" + ( mkpat_cons (rhs_loc 2) (ghpat(Ppat_tuple[_1;_3])) (symbol_rloc()) ) +# 9379 "ml/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + Obj.repr( +# 1559 "ml/parser.mly" + ( expecting 3 "pattern" ) +# 9386 "ml/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1561 "ml/parser.mly" + ( mkpat(Ppat_or(_1, _3)) ) +# 9394 "ml/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + Obj.repr( +# 1563 "ml/parser.mly" + ( expecting 3 "pattern" ) +# 9401 "ml/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'pattern_no_exn) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 1565 "ml/parser.mly" + ( Pat.attr _1 _2 ) +# 9409 "ml/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_gen) in + Obj.repr( +# 1566 "ml/parser.mly" + ( _1 ) +# 9416 "ml/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in + Obj.repr( +# 1570 "ml/parser.mly" + ( _1 ) +# 9423 "ml/parser.ml" + : 'pattern_gen)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1572 "ml/parser.mly" + ( mkpat(Ppat_construct(mkrhs _1 1, Some _2)) ) +# 9431 "ml/parser.ml" + : 'pattern_gen)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1574 "ml/parser.mly" + ( mkpat(Ppat_variant(_1, Some _2)) ) +# 9439 "ml/parser.ml" + : 'pattern_gen)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in + Obj.repr( +# 1576 "ml/parser.mly" + ( mkpat_attrs (Ppat_lazy _3) _2) +# 9447 "ml/parser.ml" + : 'pattern_gen)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 1580 "ml/parser.mly" + ( mkpat(Ppat_var (mkrhs _1 1)) ) +# 9454 "ml/parser.ml" + : 'simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern_not_ident) in + Obj.repr( +# 1581 "ml/parser.mly" + ( _1 ) +# 9461 "ml/parser.ml" + : 'simple_pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 1585 "ml/parser.mly" + ( mkpat(Ppat_any) ) +# 9467 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'signed_constant) in + Obj.repr( +# 1587 "ml/parser.mly" + ( mkpat(Ppat_constant _1) ) +# 9474 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'signed_constant) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'signed_constant) in + Obj.repr( +# 1589 "ml/parser.mly" + ( mkpat(Ppat_interval (_1, _3)) ) +# 9482 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constr_longident) in + Obj.repr( +# 1591 "ml/parser.mly" + ( mkpat(Ppat_construct(mkrhs _1 1, None)) ) +# 9489 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in + Obj.repr( +# 1593 "ml/parser.mly" + ( mkpat(Ppat_variant(_1, None)) ) +# 9496 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in + Obj.repr( +# 1595 "ml/parser.mly" + ( mkpat(Ppat_type (mkrhs _2 2)) ) +# 9503 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_delimited_pattern) in + Obj.repr( +# 1597 "ml/parser.mly" + ( _1 ) +# 9510 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_delimited_pattern) in + Obj.repr( +# 1599 "ml/parser.mly" + ( mkpat @@ Ppat_open(mkrhs _1 1, _3) ) +# 9518 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1601 "ml/parser.mly" + ( mkpat @@ Ppat_open(mkrhs _1 1, mkpat @@ + Ppat_construct ( mkrhs (Lident "[]") 4, None)) ) +# 9526 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1604 "ml/parser.mly" + ( mkpat @@ Ppat_open( mkrhs _1 1, mkpat @@ + Ppat_construct ( mkrhs (Lident "()") 4, None) ) ) +# 9534 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 1607 "ml/parser.mly" + ( mkpat @@ Ppat_open (mkrhs _1 1, _4)) +# 9542 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 1609 "ml/parser.mly" + (unclosed "(" 3 ")" 5 ) +# 9550 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1611 "ml/parser.mly" + ( expecting 4 "pattern" ) +# 9557 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 1613 "ml/parser.mly" + ( reloc_pat _2 ) +# 9564 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 1615 "ml/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 9571 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 1617 "ml/parser.mly" + ( mkpat(Ppat_constraint(_2, _4)) ) +# 9579 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 1619 "ml/parser.mly" + ( unclosed "(" 1 ")" 5 ) +# 9587 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1621 "ml/parser.mly" + ( expecting 4 "type" ) +# 9594 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in + Obj.repr( +# 1623 "ml/parser.mly" + ( mkpat_attrs (Ppat_unpack (mkrhs _4 4)) _3 ) +# 9602 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 1625 "ml/parser.mly" + ( mkpat_attrs + (Ppat_constraint(mkpat(Ppat_unpack (mkrhs _4 4)), + ghtyp(Ptyp_package _6))) + _3 ) +# 9614 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 1630 "ml/parser.mly" + ( unclosed "(" 1 ")" 7 ) +# 9623 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 1632 "ml/parser.mly" + ( mkpat(Ppat_extension _1) ) +# 9630 "ml/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_pattern_list) in + Obj.repr( +# 1637 "ml/parser.mly" + ( let (fields, closed) = _2 in mkpat(Ppat_record(fields, closed)) ) +# 9637 "ml/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_pattern_list) in + Obj.repr( +# 1639 "ml/parser.mly" + ( unclosed "{" 1 "}" 3 ) +# 9644 "ml/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1641 "ml/parser.mly" + ( reloc_pat (mktailpat (rhs_loc 4) (List.rev _2)) ) +# 9652 "ml/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1643 "ml/parser.mly" + ( unclosed "[" 1 "]" 4 ) +# 9660 "ml/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1645 "ml/parser.mly" + ( mkpat(Ppat_array(List.rev _2)) ) +# 9668 "ml/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 1647 "ml/parser.mly" + ( mkpat(Ppat_array []) ) +# 9674 "ml/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1649 "ml/parser.mly" + ( unclosed "[|" 1 "|]" 4 ) +# 9682 "ml/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_comma_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1652 "ml/parser.mly" + ( _3 :: _1 ) +# 9690 "ml/parser.ml" + : 'pattern_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1653 "ml/parser.mly" + ( [_3; _1] ) +# 9698 "ml/parser.ml" + : 'pattern_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1654 "ml/parser.mly" + ( expecting 3 "pattern" ) +# 9705 "ml/parser.ml" + : 'pattern_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn_comma_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1657 "ml/parser.mly" + ( _3 :: _1 ) +# 9713 "ml/parser.ml" + : 'pattern_no_exn_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1658 "ml/parser.mly" + ( [_3; _1] ) +# 9721 "ml/parser.ml" + : 'pattern_no_exn_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + Obj.repr( +# 1659 "ml/parser.mly" + ( expecting 3 "pattern" ) +# 9728 "ml/parser.ml" + : 'pattern_no_exn_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1662 "ml/parser.mly" + ( [_1] ) +# 9735 "ml/parser.ml" + : 'pattern_semi_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1663 "ml/parser.mly" + ( _3 :: _1 ) +# 9743 "ml/parser.ml" + : 'pattern_semi_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_pattern) in + Obj.repr( +# 1666 "ml/parser.mly" + ( [_1], Closed ) +# 9750 "ml/parser.ml" + : 'lbl_pattern_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_pattern) in + Obj.repr( +# 1667 "ml/parser.mly" + ( [_1], Closed ) +# 9757 "ml/parser.ml" + : 'lbl_pattern_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'lbl_pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'opt_semi) in + Obj.repr( +# 1668 "ml/parser.mly" + ( [_1], Open ) +# 9765 "ml/parser.ml" + : 'lbl_pattern_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'lbl_pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_pattern_list) in + Obj.repr( +# 1670 "ml/parser.mly" + ( let (fields, closed) = _3 in _1 :: fields, closed ) +# 9773 "ml/parser.ml" + : 'lbl_pattern_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'label_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_pattern_type_constraint) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1674 "ml/parser.mly" + ( (mkrhs _1 1, mkpat_opt_constraint _4 _2) ) +# 9782 "ml/parser.ml" + : 'lbl_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'label_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'opt_pattern_type_constraint) in + Obj.repr( +# 1676 "ml/parser.mly" + ( (mkrhs _1 1, mkpat_opt_constraint (pat_of_label _1 1) _2) ) +# 9790 "ml/parser.ml" + : 'lbl_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1679 "ml/parser.mly" + ( Some _2 ) +# 9797 "ml/parser.ml" + : 'opt_pattern_type_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 1680 "ml/parser.mly" + ( None ) +# 9803 "ml/parser.ml" + : 'opt_pattern_type_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'val_ident) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1687 "ml/parser.mly" + ( let (ext, attrs) = _2 in + Val.mk (mkrhs _3 3) _5 ~attrs:(attrs@_6) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 9816 "ml/parser.ml" + : 'value_description)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string * string option) in + Obj.repr( +# 1696 "ml/parser.mly" + ( [fst _1] ) +# 9823 "ml/parser.ml" + : 'primitive_declaration_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string * string option) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'primitive_declaration_body) in + Obj.repr( +# 1697 "ml/parser.mly" + ( fst _1 :: _2 ) +# 9831 "ml/parser.ml" + : 'primitive_declaration_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'val_ident) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : 'core_type) in + let _7 = (Parsing.peek_val __caml_parser_env 1 : 'primitive_declaration_body) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1702 "ml/parser.mly" + ( let (ext, attrs) = _2 in + Val.mk (mkrhs _3 3) _5 ~prim:_7 ~attrs:(attrs@_8) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext ) +# 9845 "ml/parser.ml" + : 'primitive_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_declaration) in + Obj.repr( +# 1712 "ml/parser.mly" + ( let (nonrec_flag, ty, ext) = _1 in (nonrec_flag, [ty], ext) ) +# 9852 "ml/parser.ml" + : 'type_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'type_declarations) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_type_declaration) in + Obj.repr( +# 1714 "ml/parser.mly" + ( let (nonrec_flag, tys, ext) = _1 in (nonrec_flag, _2 :: tys, ext) ) +# 9860 "ml/parser.ml" + : 'type_declarations)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'nonrec_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'optional_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 2 : 'type_kind) in + let _7 = (Parsing.peek_val __caml_parser_env 1 : 'constraints) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1720 "ml/parser.mly" + ( let (kind, priv, manifest) = _6 in + let (ext, attrs) = _2 in + let ty = + Type.mk (mkrhs _5 5) ~params:_4 ~cstrs:(List.rev _7) ~kind + ~priv ?manifest ~attrs:(attrs@_8) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + in + (_3, ty, ext) ) +# 9880 "ml/parser.ml" + : 'type_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'optional_type_parameters) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'type_kind) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'constraints) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1732 "ml/parser.mly" + ( let (kind, priv, manifest) = _5 in + Type.mk (mkrhs _4 4) ~params:_3 ~cstrs:(List.rev _6) + ~kind ~priv ?manifest ~attrs:(_2@_7) ~loc:(symbol_rloc ()) + ~text:(symbol_text ()) ~docs:(symbol_docs ()) ) +# 9895 "ml/parser.ml" + : 'and_type_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constraints) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'constrain) in + Obj.repr( +# 1738 "ml/parser.mly" + ( _3 :: _1 ) +# 9903 "ml/parser.ml" + : 'constraints)) +; (fun __caml_parser_env -> + Obj.repr( +# 1739 "ml/parser.mly" + ( [] ) +# 9909 "ml/parser.ml" + : 'constraints)) +; (fun __caml_parser_env -> + Obj.repr( +# 1743 "ml/parser.mly" + ( (Ptype_abstract, Public, None) ) +# 9915 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1745 "ml/parser.mly" + ( (Ptype_abstract, Public, Some _2) ) +# 9922 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1747 "ml/parser.mly" + ( (Ptype_abstract, Private, Some _3) ) +# 9929 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declarations) in + Obj.repr( +# 1749 "ml/parser.mly" + ( (Ptype_variant(List.rev _2), Public, None) ) +# 9936 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declarations) in + Obj.repr( +# 1751 "ml/parser.mly" + ( (Ptype_variant(List.rev _3), Private, None) ) +# 9943 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + Obj.repr( +# 1753 "ml/parser.mly" + ( (Ptype_open, Public, None) ) +# 9949 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + Obj.repr( +# 1755 "ml/parser.mly" + ( (Ptype_open, Private, None) ) +# 9955 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'private_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'label_declarations) in + Obj.repr( +# 1757 "ml/parser.mly" + ( (Ptype_record _4, _2, None) ) +# 9963 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'private_flag) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declarations) in + Obj.repr( +# 1759 "ml/parser.mly" + ( (Ptype_variant(List.rev _5), _4, Some _2) ) +# 9972 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'private_flag) in + Obj.repr( +# 1761 "ml/parser.mly" + ( (Ptype_open, _4, Some _2) ) +# 9980 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'private_flag) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'label_declarations) in + Obj.repr( +# 1763 "ml/parser.mly" + ( (Ptype_record _6, _4, Some _2) ) +# 9989 "ml/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + Obj.repr( +# 1766 "ml/parser.mly" + ( [] ) +# 9995 "ml/parser.ml" + : 'optional_type_parameters)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_parameter) in + Obj.repr( +# 1767 "ml/parser.mly" + ( [_1] ) +# 10002 "ml/parser.ml" + : 'optional_type_parameters)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'optional_type_parameter_list) in + Obj.repr( +# 1768 "ml/parser.mly" + ( List.rev _2 ) +# 10009 "ml/parser.ml" + : 'optional_type_parameters)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'type_variance) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_variable) in + Obj.repr( +# 1771 "ml/parser.mly" + ( _2, _1 ) +# 10017 "ml/parser.ml" + : 'optional_type_parameter)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_parameter) in + Obj.repr( +# 1774 "ml/parser.mly" + ( [_1] ) +# 10024 "ml/parser.ml" + : 'optional_type_parameter_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'optional_type_parameter_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_parameter) in + Obj.repr( +# 1775 "ml/parser.mly" + ( _3 :: _1 ) +# 10032 "ml/parser.ml" + : 'optional_type_parameter_list)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 1778 "ml/parser.mly" + ( mktyp(Ptyp_var _2) ) +# 10039 "ml/parser.ml" + : 'optional_type_variable)) +; (fun __caml_parser_env -> + Obj.repr( +# 1779 "ml/parser.mly" + ( mktyp(Ptyp_any) ) +# 10045 "ml/parser.ml" + : 'optional_type_variable)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'type_variance) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'type_variable) in + Obj.repr( +# 1784 "ml/parser.mly" + ( _2, _1 ) +# 10053 "ml/parser.ml" + : 'type_parameter)) +; (fun __caml_parser_env -> + Obj.repr( +# 1787 "ml/parser.mly" + ( Invariant ) +# 10059 "ml/parser.ml" + : 'type_variance)) +; (fun __caml_parser_env -> + Obj.repr( +# 1788 "ml/parser.mly" + ( Covariant ) +# 10065 "ml/parser.ml" + : 'type_variance)) +; (fun __caml_parser_env -> + Obj.repr( +# 1789 "ml/parser.mly" + ( Contravariant ) +# 10071 "ml/parser.ml" + : 'type_variance)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 1792 "ml/parser.mly" + ( mktyp(Ptyp_var _2) ) +# 10078 "ml/parser.ml" + : 'type_variable)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_parameter) in + Obj.repr( +# 1795 "ml/parser.mly" + ( [_1] ) +# 10085 "ml/parser.ml" + : 'type_parameter_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'type_parameter_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'type_parameter) in + Obj.repr( +# 1796 "ml/parser.mly" + ( _3 :: _1 ) +# 10093 "ml/parser.ml" + : 'type_parameter_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declaration) in + Obj.repr( +# 1799 "ml/parser.mly" + ( [_1] ) +# 10100 "ml/parser.ml" + : 'constructor_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_constructor_declaration) in + Obj.repr( +# 1800 "ml/parser.mly" + ( [_1] ) +# 10107 "ml/parser.ml" + : 'constructor_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constructor_declarations) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_constructor_declaration) in + Obj.repr( +# 1801 "ml/parser.mly" + ( _2 :: _1 ) +# 10115 "ml/parser.ml" + : 'constructor_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 1805 "ml/parser.mly" + ( + let args,res = _2 in + Type.constructor (mkrhs _1 1) ~args ?res ~attrs:_3 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) + ) +# 10128 "ml/parser.ml" + : 'constructor_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 1813 "ml/parser.mly" + ( + let args,res = _3 in + Type.constructor (mkrhs _2 2) ~args ?res ~attrs:_4 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) + ) +# 10141 "ml/parser.ml" + : 'bar_constructor_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_exception_declaration) in + Obj.repr( +# 1820 "ml/parser.mly" + ( _1 ) +# 10148 "ml/parser.ml" + : 'str_exception_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'constr_ident) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'constr_longident) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1823 "ml/parser.mly" + ( let (ext,attrs) = _2 in + Te.rebind (mkrhs _3 3) (mkrhs _5 5) ~attrs:(attrs @ _6 @ _7) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 10162 "ml/parser.ml" + : 'str_exception_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'constr_ident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'generalized_constructor_arguments) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1831 "ml/parser.mly" + ( let args, res = _4 in + let (ext,attrs) = _2 in + Te.decl (mkrhs _3 3) ~args ?res ~attrs:(attrs @ _5 @ _6) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 10177 "ml/parser.ml" + : 'sig_exception_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 1839 "ml/parser.mly" + ( let args, res = _2 in + Te.decl (mkrhs _1 1) ~args ?res ~attrs:_3 ~loc:(symbol_rloc()) ) +# 10187 "ml/parser.ml" + : 'let_exception_declaration)) +; (fun __caml_parser_env -> + Obj.repr( +# 1843 "ml/parser.mly" + ( (Pcstr_tuple [],None) ) +# 10193 "ml/parser.ml" + : 'generalized_constructor_arguments)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_arguments) in + Obj.repr( +# 1844 "ml/parser.mly" + ( (_2,None) ) +# 10200 "ml/parser.ml" + : 'generalized_constructor_arguments)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'constructor_arguments) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 1846 "ml/parser.mly" + ( (_2,Some _4) ) +# 10208 "ml/parser.ml" + : 'generalized_constructor_arguments)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 1848 "ml/parser.mly" + ( (Pcstr_tuple [],Some _2) ) +# 10215 "ml/parser.ml" + : 'generalized_constructor_arguments)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_list) in + Obj.repr( +# 1852 "ml/parser.mly" + ( Pcstr_tuple (List.rev _1) ) +# 10222 "ml/parser.ml" + : 'constructor_arguments)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'label_declarations) in + Obj.repr( +# 1853 "ml/parser.mly" + ( Pcstr_record _2 ) +# 10229 "ml/parser.ml" + : 'constructor_arguments)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_declaration) in + Obj.repr( +# 1856 "ml/parser.mly" + ( [_1] ) +# 10236 "ml/parser.ml" + : 'label_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_declaration_semi) in + Obj.repr( +# 1857 "ml/parser.mly" + ( [_1] ) +# 10243 "ml/parser.ml" + : 'label_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'label_declaration_semi) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_declarations) in + Obj.repr( +# 1858 "ml/parser.mly" + ( _1 :: _2 ) +# 10251 "ml/parser.ml" + : 'label_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mutable_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'label) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'poly_type_no_attr) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 1862 "ml/parser.mly" + ( + Type.field (mkrhs _2 2) _4 ~mut:_1 ~attrs:_5 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) + ) +# 10264 "ml/parser.ml" + : 'label_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'mutable_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'label) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'poly_type_no_attr) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 1869 "ml/parser.mly" + ( + let info = + match rhs_info 5 with + | Some _ as info_before_semi -> info_before_semi + | None -> symbol_info () + in + Type.field (mkrhs _2 2) _4 ~mut:_1 ~attrs:(_5 @ _7) + ~loc:(symbol_rloc()) ~info + ) +# 10283 "ml/parser.ml" + : 'label_declaration_semi)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 7 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'nonrec_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : 'optional_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 4 : 'type_longident) in + let _7 = (Parsing.peek_val __caml_parser_env 2 : 'private_flag) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'str_extension_constructors) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1885 "ml/parser.mly" + ( let (ext, attrs) = _2 in + if _3 <> Recursive then not_expecting 3 "nonrec flag"; + Te.mk (mkrhs _5 5) (List.rev _8) ~params:_4 ~priv:_7 + ~attrs:(attrs@_9) ~docs:(symbol_docs ()) + , ext ) +# 10300 "ml/parser.ml" + : 'str_type_extension)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 7 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'nonrec_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : 'optional_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 4 : 'type_longident) in + let _7 = (Parsing.peek_val __caml_parser_env 2 : 'private_flag) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'sig_extension_constructors) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1894 "ml/parser.mly" + ( let (ext, attrs) = _2 in + if _3 <> Recursive then not_expecting 3 "nonrec flag"; + Te.mk (mkrhs _5 5) (List.rev _8) ~params:_4 ~priv:_7 + ~attrs:(attrs @ _9) ~docs:(symbol_docs ()) + , ext ) +# 10317 "ml/parser.ml" + : 'sig_type_extension)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension_constructor_declaration) in + Obj.repr( +# 1901 "ml/parser.mly" + ( [_1] ) +# 10324 "ml/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in + Obj.repr( +# 1902 "ml/parser.mly" + ( [_1] ) +# 10331 "ml/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension_constructor_rebind) in + Obj.repr( +# 1903 "ml/parser.mly" + ( [_1] ) +# 10338 "ml/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_rebind) in + Obj.repr( +# 1904 "ml/parser.mly" + ( [_1] ) +# 10345 "ml/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'str_extension_constructors) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in + Obj.repr( +# 1906 "ml/parser.mly" + ( _2 :: _1 ) +# 10353 "ml/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'str_extension_constructors) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_rebind) in + Obj.repr( +# 1908 "ml/parser.mly" + ( _2 :: _1 ) +# 10361 "ml/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension_constructor_declaration) in + Obj.repr( +# 1911 "ml/parser.mly" + ( [_1] ) +# 10368 "ml/parser.ml" + : 'sig_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in + Obj.repr( +# 1912 "ml/parser.mly" + ( [_1] ) +# 10375 "ml/parser.ml" + : 'sig_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'sig_extension_constructors) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in + Obj.repr( +# 1914 "ml/parser.mly" + ( _2 :: _1 ) +# 10383 "ml/parser.ml" + : 'sig_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 1918 "ml/parser.mly" + ( let args, res = _2 in + Te.decl (mkrhs _1 1) ~args ?res ~attrs:_3 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) ) +# 10394 "ml/parser.ml" + : 'extension_constructor_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 1924 "ml/parser.mly" + ( let args, res = _3 in + Te.decl (mkrhs _2 2) ~args ?res ~attrs:_4 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) ) +# 10405 "ml/parser.ml" + : 'bar_extension_constructor_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'constr_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 1930 "ml/parser.mly" + ( Te.rebind (mkrhs _1 1) (mkrhs _3 3) ~attrs:_4 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) ) +# 10415 "ml/parser.ml" + : 'extension_constructor_rebind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'constr_ident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 1935 "ml/parser.mly" + ( Te.rebind (mkrhs _2 2) (mkrhs _4 4) ~attrs:_5 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) ) +# 10425 "ml/parser.ml" + : 'bar_extension_constructor_rebind)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'with_constraint) in + Obj.repr( +# 1942 "ml/parser.mly" + ( [_1] ) +# 10432 "ml/parser.ml" + : 'with_constraints)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'with_constraints) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'with_constraint) in + Obj.repr( +# 1943 "ml/parser.mly" + ( _3 :: _1 ) +# 10440 "ml/parser.ml" + : 'with_constraints)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'optional_type_parameters) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'label_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'with_type_binder) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'core_type_no_attr) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'constraints) in + Obj.repr( +# 1948 "ml/parser.mly" + ( Pwith_type + (mkrhs _3 3, + (Type.mk (mkrhs (Longident.last _3) 3) + ~params:_2 + ~cstrs:(List.rev _6) + ~manifest:_5 + ~priv:_4 + ~loc:(symbol_rloc()))) ) +# 10458 "ml/parser.ml" + : 'with_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'optional_type_parameters) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 1959 "ml/parser.mly" + ( Pwith_typesubst + (mkrhs _3 3, + (Type.mk (mkrhs (Longident.last _3) 3) + ~params:_2 + ~manifest:_5 + ~loc:(symbol_rloc()))) ) +# 10472 "ml/parser.ml" + : 'with_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'mod_ext_longident) in + Obj.repr( +# 1966 "ml/parser.mly" + ( Pwith_module (mkrhs _2 2, mkrhs _4 4) ) +# 10480 "ml/parser.ml" + : 'with_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'mod_ext_longident) in + Obj.repr( +# 1968 "ml/parser.mly" + ( Pwith_modsubst (mkrhs _2 2, mkrhs _4 4) ) +# 10488 "ml/parser.ml" + : 'with_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 1971 "ml/parser.mly" + ( Public ) +# 10494 "ml/parser.ml" + : 'with_type_binder)) +; (fun __caml_parser_env -> + Obj.repr( +# 1972 "ml/parser.mly" + ( Private ) +# 10500 "ml/parser.ml" + : 'with_type_binder)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 1978 "ml/parser.mly" + ( [mkrhs _2 2] ) +# 10507 "ml/parser.ml" + : 'typevar_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typevar_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 1979 "ml/parser.mly" + ( mkrhs _3 3 :: _1 ) +# 10515 "ml/parser.ml" + : 'typevar_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1983 "ml/parser.mly" + ( _1 ) +# 10522 "ml/parser.ml" + : 'poly_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typevar_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1985 "ml/parser.mly" + ( mktyp(Ptyp_poly(List.rev _1, _3)) ) +# 10530 "ml/parser.ml" + : 'poly_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 1989 "ml/parser.mly" + ( _1 ) +# 10537 "ml/parser.ml" + : 'poly_type_no_attr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typevar_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 1991 "ml/parser.mly" + ( mktyp(Ptyp_poly(List.rev _1, _3)) ) +# 10545 "ml/parser.ml" + : 'poly_type_no_attr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 1998 "ml/parser.mly" + ( _1 ) +# 10552 "ml/parser.ml" + : 'core_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 2000 "ml/parser.mly" + ( Typ.attr _1 _2 ) +# 10560 "ml/parser.ml" + : 'core_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2004 "ml/parser.mly" + ( _1 ) +# 10567 "ml/parser.ml" + : 'core_type_no_attr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'core_type2) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2006 "ml/parser.mly" + ( mktyp(Ptyp_alias(_1, _4)) ) +# 10575 "ml/parser.ml" + : 'core_type_no_attr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type_or_tuple) in + Obj.repr( +# 2010 "ml/parser.mly" + ( _1 ) +# 10582 "ml/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2012 "ml/parser.mly" + ( let param = extra_rhs_core_type _4 ~pos:4 in + mktyp (Ptyp_arrow(Optional _2 , param, _6)) ) +# 10592 "ml/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2015 "ml/parser.mly" + ( let param = extra_rhs_core_type _2 ~pos:2 in + mktyp(Ptyp_arrow(Optional _1 , param, _4)) + ) +# 10603 "ml/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2019 "ml/parser.mly" + ( let param = extra_rhs_core_type _3 ~pos:3 in + mktyp(Ptyp_arrow(Labelled _1, param, _5)) ) +# 10613 "ml/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2022 "ml/parser.mly" + ( let param = extra_rhs_core_type _1 ~pos:1 in + mktyp(Ptyp_arrow(Nolabel, param, _3)) ) +# 10622 "ml/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type2) in + Obj.repr( +# 2028 "ml/parser.mly" + ( _1 ) +# 10629 "ml/parser.ml" + : 'simple_core_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'core_type_comma_list) in + Obj.repr( +# 2030 "ml/parser.mly" + ( match _2 with [sty] -> sty | _ -> raise Parse_error ) +# 10636 "ml/parser.ml" + : 'simple_core_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2035 "ml/parser.mly" + ( mktyp(Ptyp_var _2) ) +# 10643 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + Obj.repr( +# 2037 "ml/parser.mly" + ( mktyp(Ptyp_any) ) +# 10649 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in + Obj.repr( +# 2039 "ml/parser.mly" + ( mktyp(Ptyp_constr(mkrhs _1 1, [])) ) +# 10656 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_core_type2) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in + Obj.repr( +# 2041 "ml/parser.mly" + ( mktyp(Ptyp_constr(mkrhs _2 2, [_1])) ) +# 10664 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in + Obj.repr( +# 2043 "ml/parser.mly" + ( mktyp(Ptyp_constr(mkrhs _4 4, List.rev _2)) ) +# 10672 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'meth_list) in + Obj.repr( +# 2045 "ml/parser.mly" + ( let (f, c) = _2 in mktyp(Ptyp_object (f, c)) ) +# 10679 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + Obj.repr( +# 2047 "ml/parser.mly" + ( mktyp(Ptyp_object ([], Closed)) ) +# 10685 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in + Obj.repr( +# 2049 "ml/parser.mly" + ( mktyp(Ptyp_class(mkrhs _2 2, [])) ) +# 10692 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type2) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in + Obj.repr( +# 2051 "ml/parser.mly" + ( mktyp(Ptyp_class(mkrhs _3 3, [_1])) ) +# 10700 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'core_type_comma_list) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in + Obj.repr( +# 2053 "ml/parser.mly" + ( mktyp(Ptyp_class(mkrhs _5 5, List.rev _2)) ) +# 10708 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tag_field) in + Obj.repr( +# 2055 "ml/parser.mly" + ( mktyp(Ptyp_variant([_2], Closed, None)) ) +# 10715 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in + Obj.repr( +# 2061 "ml/parser.mly" + ( mktyp(Ptyp_variant(List.rev _3, Closed, None)) ) +# 10722 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'row_field) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in + Obj.repr( +# 2063 "ml/parser.mly" + ( mktyp(Ptyp_variant(_2 :: List.rev _4, Closed, None)) ) +# 10730 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_bar) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in + Obj.repr( +# 2065 "ml/parser.mly" + ( mktyp(Ptyp_variant(List.rev _3, Open, None)) ) +# 10738 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + Obj.repr( +# 2067 "ml/parser.mly" + ( mktyp(Ptyp_variant([], Open, None)) ) +# 10744 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_bar) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in + Obj.repr( +# 2069 "ml/parser.mly" + ( mktyp(Ptyp_variant(List.rev _3, Closed, Some [])) ) +# 10752 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'opt_bar) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'row_field_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag_list) in + Obj.repr( +# 2071 "ml/parser.mly" + ( mktyp(Ptyp_variant(List.rev _3, Closed, Some (List.rev _5))) ) +# 10761 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 2073 "ml/parser.mly" + ( mktyp_attrs (Ptyp_package _4) _3 ) +# 10769 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 2075 "ml/parser.mly" + ( mktyp (Ptyp_extension _1) ) +# 10776 "ml/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 2078 "ml/parser.mly" + ( package_type_of_module_type _1 ) +# 10783 "ml/parser.ml" + : 'package_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'row_field) in + Obj.repr( +# 2081 "ml/parser.mly" + ( [_1] ) +# 10790 "ml/parser.ml" + : 'row_field_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'row_field_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'row_field) in + Obj.repr( +# 2082 "ml/parser.mly" + ( _3 :: _1 ) +# 10798 "ml/parser.ml" + : 'row_field_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tag_field) in + Obj.repr( +# 2085 "ml/parser.mly" + ( _1 ) +# 10805 "ml/parser.ml" + : 'row_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2086 "ml/parser.mly" + ( Rinherit _1 ) +# 10812 "ml/parser.ml" + : 'row_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'name_tag) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'opt_ampersand) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'amper_type_list) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2090 "ml/parser.mly" + ( Rtag (mkrhs _1 1, add_info_attrs (symbol_info ()) _5, + _3, List.rev _4) ) +# 10823 "ml/parser.ml" + : 'tag_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2093 "ml/parser.mly" + ( Rtag (mkrhs _1 1, add_info_attrs (symbol_info ()) _2, true, []) ) +# 10831 "ml/parser.ml" + : 'tag_field)) +; (fun __caml_parser_env -> + Obj.repr( +# 2096 "ml/parser.mly" + ( true ) +# 10837 "ml/parser.ml" + : 'opt_ampersand)) +; (fun __caml_parser_env -> + Obj.repr( +# 2097 "ml/parser.mly" + ( false ) +# 10843 "ml/parser.ml" + : 'opt_ampersand)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 2100 "ml/parser.mly" + ( [_1] ) +# 10850 "ml/parser.ml" + : 'amper_type_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'amper_type_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 2101 "ml/parser.mly" + ( _3 :: _1 ) +# 10858 "ml/parser.ml" + : 'amper_type_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in + Obj.repr( +# 2104 "ml/parser.mly" + ( [_1] ) +# 10865 "ml/parser.ml" + : 'name_tag_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag_list) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in + Obj.repr( +# 2105 "ml/parser.mly" + ( _2 :: _1 ) +# 10873 "ml/parser.ml" + : 'name_tag_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2108 "ml/parser.mly" + ( _1 ) +# 10880 "ml/parser.ml" + : 'simple_core_type_or_tuple)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_list) in + Obj.repr( +# 2110 "ml/parser.mly" + ( mktyp(Ptyp_tuple(_1 :: List.rev _3)) ) +# 10888 "ml/parser.ml" + : 'simple_core_type_or_tuple)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 2113 "ml/parser.mly" + ( [_1] ) +# 10895 "ml/parser.ml" + : 'core_type_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 2114 "ml/parser.mly" + ( _3 :: _1 ) +# 10903 "ml/parser.ml" + : 'core_type_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2117 "ml/parser.mly" + ( [_1] ) +# 10910 "ml/parser.ml" + : 'core_type_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2118 "ml/parser.mly" + ( _3 :: _1 ) +# 10918 "ml/parser.ml" + : 'core_type_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'field_semi) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'meth_list) in + Obj.repr( +# 2121 "ml/parser.mly" + ( let (f, c) = _2 in (_1 :: f, c) ) +# 10926 "ml/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'inherit_field_semi) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'meth_list) in + Obj.repr( +# 2122 "ml/parser.mly" + ( let (f, c) = _2 in (_1 :: f, c) ) +# 10934 "ml/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'field_semi) in + Obj.repr( +# 2123 "ml/parser.mly" + ( [_1], Closed ) +# 10941 "ml/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'field) in + Obj.repr( +# 2124 "ml/parser.mly" + ( [_1], Closed ) +# 10948 "ml/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'inherit_field_semi) in + Obj.repr( +# 2125 "ml/parser.mly" + ( [_1], Closed ) +# 10955 "ml/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2126 "ml/parser.mly" + ( [Oinherit _1], Closed ) +# 10962 "ml/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + Obj.repr( +# 2127 "ml/parser.mly" + ( [], Open ) +# 10968 "ml/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'poly_type_no_attr) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2131 "ml/parser.mly" + ( Otag (mkrhs _1 1, add_info_attrs (symbol_info ()) _4, _3) ) +# 10977 "ml/parser.ml" + : 'field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'poly_type_no_attr) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2136 "ml/parser.mly" + ( let info = + match rhs_info 4 with + | Some _ as info_before_semi -> info_before_semi + | None -> symbol_info () + in + ( Otag (mkrhs _1 1, add_info_attrs info (_4 @ _6), _3)) ) +# 10992 "ml/parser.ml" + : 'field_semi)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_core_type) in + Obj.repr( +# 2145 "ml/parser.mly" + ( Oinherit _1 ) +# 10999 "ml/parser.ml" + : 'inherit_field_semi)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2148 "ml/parser.mly" + ( _1 ) +# 11006 "ml/parser.ml" + : 'label)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2154 "ml/parser.mly" + ( let (n, m) = _1 in Pconst_integer (n, m) ) +# 11013 "ml/parser.ml" + : 'constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : char) in + Obj.repr( +# 2155 "ml/parser.mly" + ( Pconst_char (Char.code _1) ) +# 11020 "ml/parser.ml" + : 'constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string * string option) in + Obj.repr( +# 2156 "ml/parser.mly" + ( let (s, d) = _1 in Pconst_string (s, d) ) +# 11027 "ml/parser.ml" + : 'constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2157 "ml/parser.mly" + ( let (f, m) = _1 in Pconst_float (f, m) ) +# 11034 "ml/parser.ml" + : 'constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in + Obj.repr( +# 2160 "ml/parser.mly" + ( _1 ) +# 11041 "ml/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2161 "ml/parser.mly" + ( let (n, m) = _2 in Pconst_integer("-" ^ n, m) ) +# 11048 "ml/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2162 "ml/parser.mly" + ( let (f, m) = _2 in Pconst_float("-" ^ f, m) ) +# 11055 "ml/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2163 "ml/parser.mly" + ( let (n, m) = _2 in Pconst_integer (n, m) ) +# 11062 "ml/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2164 "ml/parser.mly" + ( let (f, m) = _2 in Pconst_float(f, m) ) +# 11069 "ml/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2170 "ml/parser.mly" + ( _1 ) +# 11076 "ml/parser.ml" + : 'ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2171 "ml/parser.mly" + ( _1 ) +# 11083 "ml/parser.ml" + : 'ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2174 "ml/parser.mly" + ( _1 ) +# 11090 "ml/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'operator) in + Obj.repr( +# 2175 "ml/parser.mly" + ( _2 ) +# 11097 "ml/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'operator) in + Obj.repr( +# 2176 "ml/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 11104 "ml/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2177 "ml/parser.mly" + ( expecting 2 "operator" ) +# 11110 "ml/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2178 "ml/parser.mly" + ( expecting 3 "module-expr" ) +# 11116 "ml/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2181 "ml/parser.mly" + ( _1 ) +# 11123 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2182 "ml/parser.mly" + ( _1 ) +# 11130 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2183 "ml/parser.mly" + ( _1 ) +# 11137 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2184 "ml/parser.mly" + ( _1 ) +# 11144 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2185 "ml/parser.mly" + ( _1 ) +# 11151 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2186 "ml/parser.mly" + ( _1 ) +# 11158 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in + Obj.repr( +# 2187 "ml/parser.mly" + ( "."^ _1 ^"()" ) +# 11165 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in + Obj.repr( +# 2188 "ml/parser.mly" + ( "."^ _1 ^ "()<-" ) +# 11172 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in + Obj.repr( +# 2189 "ml/parser.mly" + ( "."^ _1 ^"[]" ) +# 11179 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in + Obj.repr( +# 2190 "ml/parser.mly" + ( "."^ _1 ^ "[]<-" ) +# 11186 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in + Obj.repr( +# 2191 "ml/parser.mly" + ( "."^ _1 ^"{}" ) +# 11193 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in + Obj.repr( +# 2192 "ml/parser.mly" + ( "."^ _1 ^ "{}<-" ) +# 11200 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2193 "ml/parser.mly" + ( _1 ) +# 11207 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2194 "ml/parser.mly" + ( "!" ) +# 11213 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2195 "ml/parser.mly" + ( "+" ) +# 11219 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2196 "ml/parser.mly" + ( "+." ) +# 11225 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2197 "ml/parser.mly" + ( "-" ) +# 11231 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2198 "ml/parser.mly" + ( "-." ) +# 11237 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2199 "ml/parser.mly" + ( "*" ) +# 11243 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2200 "ml/parser.mly" + ( "=" ) +# 11249 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2201 "ml/parser.mly" + ( "<" ) +# 11255 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2202 "ml/parser.mly" + ( ">" ) +# 11261 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2203 "ml/parser.mly" + ( "or" ) +# 11267 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2204 "ml/parser.mly" + ( "||" ) +# 11273 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2205 "ml/parser.mly" + ( "&" ) +# 11279 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2206 "ml/parser.mly" + ( "&&" ) +# 11285 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2207 "ml/parser.mly" + ( ":=" ) +# 11291 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2208 "ml/parser.mly" + ( "+=" ) +# 11297 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2209 "ml/parser.mly" + ( "%" ) +# 11303 "ml/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2212 "ml/parser.mly" + ( _1 ) +# 11310 "ml/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2213 "ml/parser.mly" + ( "[]" ) +# 11316 "ml/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2214 "ml/parser.mly" + ( "()" ) +# 11322 "ml/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2215 "ml/parser.mly" + ( "::" ) +# 11328 "ml/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2216 "ml/parser.mly" + ( "false" ) +# 11334 "ml/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2217 "ml/parser.mly" + ( "true" ) +# 11340 "ml/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 2221 "ml/parser.mly" + ( Lident _1 ) +# 11347 "ml/parser.ml" + : 'val_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 2222 "ml/parser.mly" + ( Ldot(_1, _3) ) +# 11355 "ml/parser.ml" + : 'val_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'mod_longident) in + Obj.repr( +# 2225 "ml/parser.mly" + ( _1 ) +# 11362 "ml/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + Obj.repr( +# 2226 "ml/parser.mly" + ( Ldot(_1,"::") ) +# 11369 "ml/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2227 "ml/parser.mly" + ( Lident "[]" ) +# 11375 "ml/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2228 "ml/parser.mly" + ( Lident "()" ) +# 11381 "ml/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2229 "ml/parser.mly" + ( Lident "::" ) +# 11387 "ml/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2230 "ml/parser.mly" + ( Lident "false" ) +# 11393 "ml/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2231 "ml/parser.mly" + ( Lident "true" ) +# 11399 "ml/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2234 "ml/parser.mly" + ( Lident _1 ) +# 11406 "ml/parser.ml" + : 'label_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2235 "ml/parser.mly" + ( Ldot(_1, _3) ) +# 11414 "ml/parser.ml" + : 'label_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2238 "ml/parser.mly" + ( Lident _1 ) +# 11421 "ml/parser.ml" + : 'type_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2239 "ml/parser.mly" + ( Ldot(_1, _3) ) +# 11429 "ml/parser.ml" + : 'type_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2242 "ml/parser.mly" + ( Lident _1 ) +# 11436 "ml/parser.ml" + : 'mod_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2243 "ml/parser.mly" + ( Ldot(_1, _3) ) +# 11444 "ml/parser.ml" + : 'mod_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2246 "ml/parser.mly" + ( Lident _1 ) +# 11451 "ml/parser.ml" + : 'mod_ext_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2247 "ml/parser.mly" + ( Ldot(_1, _3) ) +# 11459 "ml/parser.ml" + : 'mod_ext_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'mod_ext_longident) in + Obj.repr( +# 2248 "ml/parser.mly" + ( lapply _1 _3 ) +# 11467 "ml/parser.ml" + : 'mod_ext_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2251 "ml/parser.mly" + ( Lident _1 ) +# 11474 "ml/parser.ml" + : 'mty_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2252 "ml/parser.mly" + ( Ldot(_1, _3) ) +# 11482 "ml/parser.ml" + : 'mty_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2255 "ml/parser.mly" + ( Lident _1 ) +# 11489 "ml/parser.ml" + : 'clty_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2256 "ml/parser.mly" + ( Ldot(_1, _3) ) +# 11497 "ml/parser.ml" + : 'clty_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2259 "ml/parser.mly" + ( Lident _1 ) +# 11504 "ml/parser.ml" + : 'class_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2260 "ml/parser.mly" + ( Ldot(_1, _3) ) +# 11512 "ml/parser.ml" + : 'class_longident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2269 "ml/parser.mly" + ( _2 ) +# 11519 "ml/parser.ml" + : 'name_tag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2272 "ml/parser.mly" + ( Nonrecursive ) +# 11525 "ml/parser.ml" + : 'rec_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2273 "ml/parser.mly" + ( Recursive ) +# 11531 "ml/parser.ml" + : 'rec_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2276 "ml/parser.mly" + ( Recursive ) +# 11537 "ml/parser.ml" + : 'nonrec_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2277 "ml/parser.mly" + ( Nonrecursive ) +# 11543 "ml/parser.ml" + : 'nonrec_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2280 "ml/parser.mly" + ( Upto ) +# 11549 "ml/parser.ml" + : 'direction_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2281 "ml/parser.mly" + ( Downto ) +# 11555 "ml/parser.ml" + : 'direction_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2284 "ml/parser.mly" + ( Public ) +# 11561 "ml/parser.ml" + : 'private_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2285 "ml/parser.mly" + ( Private ) +# 11567 "ml/parser.ml" + : 'private_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2288 "ml/parser.mly" + ( Immutable ) +# 11573 "ml/parser.ml" + : 'mutable_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2289 "ml/parser.mly" + ( Mutable ) +# 11579 "ml/parser.ml" + : 'mutable_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2292 "ml/parser.mly" + ( Concrete ) +# 11585 "ml/parser.ml" + : 'virtual_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2293 "ml/parser.mly" + ( Virtual ) +# 11591 "ml/parser.ml" + : 'virtual_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2296 "ml/parser.mly" + ( Public, Concrete ) +# 11597 "ml/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2297 "ml/parser.mly" + ( Private, Concrete ) +# 11603 "ml/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2298 "ml/parser.mly" + ( Public, Virtual ) +# 11609 "ml/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2299 "ml/parser.mly" + ( Private, Virtual ) +# 11615 "ml/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2300 "ml/parser.mly" + ( Private, Virtual ) +# 11621 "ml/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2303 "ml/parser.mly" + ( Fresh ) +# 11627 "ml/parser.ml" + : 'override_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2304 "ml/parser.mly" + ( Override ) +# 11633 "ml/parser.ml" + : 'override_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2307 "ml/parser.mly" + ( () ) +# 11639 "ml/parser.ml" + : 'opt_bar)) +; (fun __caml_parser_env -> + Obj.repr( +# 2308 "ml/parser.mly" + ( () ) +# 11645 "ml/parser.ml" + : 'opt_bar)) +; (fun __caml_parser_env -> + Obj.repr( +# 2311 "ml/parser.mly" + ( () ) +# 11651 "ml/parser.ml" + : 'opt_semi)) +; (fun __caml_parser_env -> + Obj.repr( +# 2312 "ml/parser.mly" + ( () ) +# 11657 "ml/parser.ml" + : 'opt_semi)) +; (fun __caml_parser_env -> + Obj.repr( +# 2315 "ml/parser.mly" + ( "-" ) +# 11663 "ml/parser.ml" + : 'subtractive)) +; (fun __caml_parser_env -> + Obj.repr( +# 2316 "ml/parser.mly" + ( "-." ) +# 11669 "ml/parser.ml" + : 'subtractive)) +; (fun __caml_parser_env -> + Obj.repr( +# 2319 "ml/parser.mly" + ( "+" ) +# 11675 "ml/parser.ml" + : 'additive)) +; (fun __caml_parser_env -> + Obj.repr( +# 2320 "ml/parser.mly" + ( "+." ) +# 11681 "ml/parser.ml" + : 'additive)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2326 "ml/parser.mly" + ( _1 ) +# 11688 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2327 "ml/parser.mly" + ( _1 ) +# 11695 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2328 "ml/parser.mly" + ( "and" ) +# 11701 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2329 "ml/parser.mly" + ( "as" ) +# 11707 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2330 "ml/parser.mly" + ( "assert" ) +# 11713 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2331 "ml/parser.mly" + ( "begin" ) +# 11719 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2332 "ml/parser.mly" + ( "class" ) +# 11725 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2333 "ml/parser.mly" + ( "constraint" ) +# 11731 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2334 "ml/parser.mly" + ( "do" ) +# 11737 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2335 "ml/parser.mly" + ( "done" ) +# 11743 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2336 "ml/parser.mly" + ( "downto" ) +# 11749 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2337 "ml/parser.mly" + ( "else" ) +# 11755 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2338 "ml/parser.mly" + ( "end" ) +# 11761 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2339 "ml/parser.mly" + ( "exception" ) +# 11767 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2340 "ml/parser.mly" + ( "external" ) +# 11773 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2341 "ml/parser.mly" + ( "false" ) +# 11779 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2342 "ml/parser.mly" + ( "for" ) +# 11785 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2343 "ml/parser.mly" + ( "fun" ) +# 11791 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2344 "ml/parser.mly" + ( "function" ) +# 11797 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2345 "ml/parser.mly" + ( "functor" ) +# 11803 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2346 "ml/parser.mly" + ( "if" ) +# 11809 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2347 "ml/parser.mly" + ( "in" ) +# 11815 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2348 "ml/parser.mly" + ( "include" ) +# 11821 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2349 "ml/parser.mly" + ( "inherit" ) +# 11827 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2350 "ml/parser.mly" + ( "initializer" ) +# 11833 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2351 "ml/parser.mly" + ( "lazy" ) +# 11839 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2352 "ml/parser.mly" + ( "let" ) +# 11845 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2353 "ml/parser.mly" + ( "match" ) +# 11851 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2354 "ml/parser.mly" + ( "method" ) +# 11857 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2355 "ml/parser.mly" + ( "module" ) +# 11863 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2356 "ml/parser.mly" + ( "mutable" ) +# 11869 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2357 "ml/parser.mly" + ( "new" ) +# 11875 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2358 "ml/parser.mly" + ( "nonrec" ) +# 11881 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2359 "ml/parser.mly" + ( "object" ) +# 11887 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2360 "ml/parser.mly" + ( "of" ) +# 11893 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2361 "ml/parser.mly" + ( "open" ) +# 11899 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2362 "ml/parser.mly" + ( "or" ) +# 11905 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2363 "ml/parser.mly" + ( "private" ) +# 11911 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2364 "ml/parser.mly" + ( "rec" ) +# 11917 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2365 "ml/parser.mly" + ( "sig" ) +# 11923 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2366 "ml/parser.mly" + ( "struct" ) +# 11929 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2367 "ml/parser.mly" + ( "then" ) +# 11935 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2368 "ml/parser.mly" + ( "to" ) +# 11941 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2369 "ml/parser.mly" + ( "true" ) +# 11947 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2370 "ml/parser.mly" + ( "try" ) +# 11953 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2371 "ml/parser.mly" + ( "type" ) +# 11959 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2372 "ml/parser.mly" + ( "val" ) +# 11965 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2373 "ml/parser.mly" + ( "virtual" ) +# 11971 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2374 "ml/parser.mly" + ( "when" ) +# 11977 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2375 "ml/parser.mly" + ( "while" ) +# 11983 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2376 "ml/parser.mly" + ( "with" ) +# 11989 "ml/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'single_attr_id) in + Obj.repr( +# 2381 "ml/parser.mly" + ( mkloc _1 (symbol_rloc()) ) +# 11996 "ml/parser.ml" + : 'attr_id)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'single_attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attr_id) in + Obj.repr( +# 2382 "ml/parser.mly" + ( mkloc (_1 ^ "." ^ _3.txt) (symbol_rloc())) +# 12004 "ml/parser.ml" + : 'attr_id)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2385 "ml/parser.mly" + ( (_2, _3) ) +# 12012 "ml/parser.ml" + : 'attribute)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2388 "ml/parser.mly" + ( (_2, _3) ) +# 12020 "ml/parser.ml" + : 'post_item_attribute)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2391 "ml/parser.mly" + ( (_2, _3) ) +# 12028 "ml/parser.ml" + : 'floating_attribute)) +; (fun __caml_parser_env -> + Obj.repr( +# 2394 "ml/parser.mly" + ( [] ) +# 12034 "ml/parser.ml" + : 'post_item_attributes)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'post_item_attribute) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 2395 "ml/parser.mly" + ( _1 :: _2 ) +# 12042 "ml/parser.ml" + : 'post_item_attributes)) +; (fun __caml_parser_env -> + Obj.repr( +# 2398 "ml/parser.mly" + ( [] ) +# 12048 "ml/parser.ml" + : 'attributes)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'attribute) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2399 "ml/parser.mly" + ( _1 :: _2 ) +# 12056 "ml/parser.ml" + : 'attributes)) +; (fun __caml_parser_env -> + Obj.repr( +# 2402 "ml/parser.mly" + ( None, [] ) +# 12062 "ml/parser.ml" + : 'ext_attributes)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'attribute) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2403 "ml/parser.mly" + ( None, _1 :: _2 ) +# 12070 "ml/parser.ml" + : 'ext_attributes)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2404 "ml/parser.mly" + ( Some _2, _3 ) +# 12078 "ml/parser.ml" + : 'ext_attributes)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2407 "ml/parser.mly" + ( (_2, _3) ) +# 12086 "ml/parser.ml" + : 'extension)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2410 "ml/parser.mly" + ( (_2, _3) ) +# 12094 "ml/parser.ml" + : 'item_extension)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'structure) in + Obj.repr( +# 2413 "ml/parser.mly" + ( PStr _1 ) +# 12101 "ml/parser.ml" + : 'payload)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'signature) in + Obj.repr( +# 2414 "ml/parser.mly" + ( PSig _2 ) +# 12108 "ml/parser.ml" + : 'payload)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 2415 "ml/parser.mly" + ( PTyp _2 ) +# 12115 "ml/parser.ml" + : 'payload)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 2416 "ml/parser.mly" + ( PPat (_2, None) ) +# 12122 "ml/parser.ml" + : 'payload)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 2417 "ml/parser.mly" + ( PPat (_2, Some _4) ) +# 12130 "ml/parser.ml" + : 'payload)) +(* Entry implementation *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry interface *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry parse_core_type *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry parse_expression *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry parse_pattern *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +|] +let yytables = + { Parsing.actions=yyact; + Parsing.transl_const=yytransl_const; + Parsing.transl_block=yytransl_block; + Parsing.lhs=yylhs; + Parsing.len=yylen; + Parsing.defred=yydefred; + Parsing.dgoto=yydgoto; + Parsing.sindex=yysindex; + Parsing.rindex=yyrindex; + Parsing.gindex=yygindex; + Parsing.tablesize=yytablesize; + Parsing.table=yytable; + Parsing.check=yycheck; + Parsing.error_function=parse_error; + Parsing.names_const=yynames_const; + Parsing.names_block=yynames_block } + +let implementation (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 1 lexfun lexbuf : Parsetree.structure) +let interface (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 2 lexfun lexbuf : Parsetree.signature) +let toplevel_phrase (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 3 lexfun lexbuf : Parsetree.toplevel_phrase) +let use_file (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 4 lexfun lexbuf : Parsetree.toplevel_phrase list) +let parse_core_type (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 5 lexfun lexbuf : Parsetree.core_type) +let parse_expression (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 6 lexfun lexbuf : Parsetree.expression) +let parse_pattern (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 7 lexfun lexbuf : Parsetree.pattern) +;; diff --git a/res_syntax/compiler-libs-406/parser.mli b/res_syntax/compiler-libs-406/parser.mli new file mode 100644 index 0000000000..444edf0314 --- /dev/null +++ b/res_syntax/compiler-libs-406/parser.mli @@ -0,0 +1,135 @@ +type token = + | AMPERAMPER + | AMPERSAND + | AND + | AS + | ASSERT + | BACKQUOTE + | BANG + | BAR + | BARBAR + | BARRBRACKET + | BEGIN + | CHAR of (char) + | CLASS + | COLON + | COLONCOLON + | COLONEQUAL + | COLONGREATER + | COMMA + | CONSTRAINT + | DO + | DONE + | DOT + | DOTDOT + | DOWNTO + | ELSE + | END + | EOF + | EQUAL + | EXCEPTION + | EXTERNAL + | FALSE + | FLOAT of (string * char option) + | FOR + | FUN + | FUNCTION + | FUNCTOR + | GREATER + | GREATERRBRACE + | GREATERRBRACKET + | IF + | IN + | INCLUDE + | INFIXOP0 of (string) + | INFIXOP1 of (string) + | INFIXOP2 of (string) + | INFIXOP3 of (string) + | INFIXOP4 of (string) + | DOTOP of (string) + | INHERIT + | INITIALIZER + | INT of (string * char option) + | LABEL of (string) + | LAZY + | LBRACE + | LBRACELESS + | LBRACKET + | LBRACKETBAR + | LBRACKETLESS + | LBRACKETGREATER + | LBRACKETPERCENT + | LBRACKETPERCENTPERCENT + | LESS + | LESSMINUS + | LET + | LIDENT of (string) + | LPAREN + | LBRACKETAT + | LBRACKETATAT + | LBRACKETATATAT + | MATCH + | METHOD + | MINUS + | MINUSDOT + | MINUSGREATER + | MODULE + | MUTABLE + | NEW + | NONREC + | OBJECT + | OF + | OPEN + | OPTLABEL of (string) + | OR + | PERCENT + | PLUS + | PLUSDOT + | PLUSEQ + | PREFIXOP of (string) + | PRIVATE + | QUESTION + | QUOTE + | RBRACE + | RBRACKET + | REC + | RPAREN + | SEMI + | SEMISEMI + | HASH + | HASHOP of (string) + | SIG + | STAR + | STRING of (string * string option) + | STRUCT + | THEN + | TILDE + | TO + | TRUE + | TRY + | TYPE + | UIDENT of (string) + | UNDERSCORE + | VAL + | VIRTUAL + | WHEN + | WHILE + | WITH + | COMMENT of (string * Location.t) + | DOCSTRING of (Docstrings.docstring) + | EOL + +val implementation : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.structure +val interface : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.signature +val toplevel_phrase : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.toplevel_phrase +val use_file : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.toplevel_phrase list +val parse_core_type : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.core_type +val parse_expression : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.expression +val parse_pattern : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.pattern diff --git a/res_syntax/compiler-libs-406/parsetree.mli b/res_syntax/compiler-libs-406/parsetree.mli new file mode 100644 index 0000000000..9aad889539 --- /dev/null +++ b/res_syntax/compiler-libs-406/parsetree.mli @@ -0,0 +1,875 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Abstract syntax tree produced by parsing *) + +open Asttypes + +type constant = + Pconst_integer of string * char option + (* 3 3l 3L 3n + + Suffixes [g-z][G-Z] are accepted by the parser. + Suffixes except 'l', 'L' and 'n' are rejected by the typechecker + *) + | Pconst_char of int + (* 'c' *) + | Pconst_string of string * string option + (* "constant" + {delim|other constant|delim} + *) + | Pconst_float of string * char option + (* 3.4 2e5 1.4e-4 + + Suffixes [g-z][G-Z] are accepted by the parser. + Suffixes are rejected by the typechecker. + *) + +(** {1 Extension points} *) + +type attribute = string loc * payload + (* [@id ARG] + [@@id ARG] + + Metadata containers passed around within the AST. + The compiler ignores unknown attributes. + *) + +and extension = string loc * payload + (* [%id ARG] + [%%id ARG] + + Sub-language placeholder -- rejected by the typechecker. + *) + +and attributes = attribute list + +and payload = + | PStr of structure + | PSig of signature (* : SIG *) + | PTyp of core_type (* : T *) + | PPat of pattern * expression option (* ? P or ? P when E *) + +(** {1 Core language} *) + +(* Type expressions *) + +and core_type = + { + ptyp_desc: core_type_desc; + ptyp_loc: Location.t; + ptyp_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and core_type_desc = + | Ptyp_any + (* _ *) + | Ptyp_var of string + (* 'a *) + | Ptyp_arrow of arg_label * core_type * core_type + (* T1 -> T2 Simple + ~l:T1 -> T2 Labelled + ?l:T1 -> T2 Optional + *) + | Ptyp_tuple of core_type list + (* T1 * ... * Tn + + Invariant: n >= 2 + *) + | Ptyp_constr of Longident.t loc * core_type list + (* tconstr + T tconstr + (T1, ..., Tn) tconstr + *) + | Ptyp_object of object_field list * closed_flag + (* < l1:T1; ...; ln:Tn > (flag = Closed) + < l1:T1; ...; ln:Tn; .. > (flag = Open) + *) + | Ptyp_class of Longident.t loc * core_type list + (* #tconstr + T #tconstr + (T1, ..., Tn) #tconstr + *) + | Ptyp_alias of core_type * string + (* T as 'a *) + | Ptyp_variant of row_field list * closed_flag * label list option + (* [ `A|`B ] (flag = Closed; labels = None) + [> `A|`B ] (flag = Open; labels = None) + [< `A|`B ] (flag = Closed; labels = Some []) + [< `A|`B > `X `Y ](flag = Closed; labels = Some ["X";"Y"]) + *) + | Ptyp_poly of string loc list * core_type + (* 'a1 ... 'an. T + + Can only appear in the following context: + + - As the core_type of a Ppat_constraint node corresponding + to a constraint on a let-binding: let x : 'a1 ... 'an. T + = e ... + + - Under Cfk_virtual for methods (not values). + + - As the core_type of a Pctf_method node. + + - As the core_type of a Pexp_poly node. + + - As the pld_type field of a label_declaration. + + - As a core_type of a Ptyp_object node. + *) + + | Ptyp_package of package_type + (* (module S) *) + | Ptyp_extension of extension + (* [%id] *) + +and package_type = Longident.t loc * (Longident.t loc * core_type) list + (* + (module S) + (module S with type t1 = T1 and ... and tn = Tn) + *) + +and row_field = + | Rtag of label loc * attributes * bool * core_type list + (* [`A] ( true, [] ) + [`A of T] ( false, [T] ) + [`A of T1 & .. & Tn] ( false, [T1;...Tn] ) + [`A of & T1 & .. & Tn] ( true, [T1;...Tn] ) + + - The 2nd field is true if the tag contains a + constant (empty) constructor. + - '&' occurs when several types are used for the same constructor + (see 4.2 in the manual) + + - TODO: switch to a record representation, and keep location + *) + | Rinherit of core_type + (* [ T ] *) + +and object_field = + | Otag of label loc * attributes * core_type + | Oinherit of core_type + +(* Patterns *) + +and pattern = + { + ppat_desc: pattern_desc; + ppat_loc: Location.t; + ppat_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and pattern_desc = + | Ppat_any + (* _ *) + | Ppat_var of string loc + (* x *) + | Ppat_alias of pattern * string loc + (* P as 'a *) + | Ppat_constant of constant + (* 1, 'a', "true", 1.0, 1l, 1L, 1n *) + | Ppat_interval of constant * constant + (* 'a'..'z' + + Other forms of interval are recognized by the parser + but rejected by the type-checker. *) + | Ppat_tuple of pattern list + (* (P1, ..., Pn) + + Invariant: n >= 2 + *) + | Ppat_construct of Longident.t loc * pattern option + (* C None + C P Some P + C (P1, ..., Pn) Some (Ppat_tuple [P1; ...; Pn]) + *) + | Ppat_variant of label * pattern option + (* `A (None) + `A P (Some P) + *) + | Ppat_record of (Longident.t loc * pattern) list * closed_flag + (* { l1=P1; ...; ln=Pn } (flag = Closed) + { l1=P1; ...; ln=Pn; _} (flag = Open) + + Invariant: n > 0 + *) + | Ppat_array of pattern list + (* [| P1; ...; Pn |] *) + | Ppat_or of pattern * pattern + (* P1 | P2 *) + | Ppat_constraint of pattern * core_type + (* (P : T) *) + | Ppat_type of Longident.t loc + (* #tconst *) + | Ppat_lazy of pattern + (* lazy P *) + | Ppat_unpack of string loc + (* (module P) + Note: (module P : S) is represented as + Ppat_constraint(Ppat_unpack, Ptyp_package) + *) + | Ppat_exception of pattern + (* exception P *) + | Ppat_extension of extension + (* [%id] *) + | Ppat_open of Longident.t loc * pattern + (* M.(P) *) + +(* Value expressions *) + +and expression = + { + pexp_desc: expression_desc; + pexp_loc: Location.t; + pexp_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and expression_desc = + | Pexp_ident of Longident.t loc + (* x + M.x + *) + | Pexp_constant of constant + (* 1, 'a', "true", 1.0, 1l, 1L, 1n *) + | Pexp_let of rec_flag * value_binding list * expression + (* let P1 = E1 and ... and Pn = EN in E (flag = Nonrecursive) + let rec P1 = E1 and ... and Pn = EN in E (flag = Recursive) + *) + | Pexp_function of case list + (* function P1 -> E1 | ... | Pn -> En *) + | Pexp_fun of arg_label * expression option * pattern * expression + (* fun P -> E1 (Simple, None) + fun ~l:P -> E1 (Labelled l, None) + fun ?l:P -> E1 (Optional l, None) + fun ?l:(P = E0) -> E1 (Optional l, Some E0) + + Notes: + - If E0 is provided, only Optional is allowed. + - "fun P1 P2 .. Pn -> E1" is represented as nested Pexp_fun. + - "let f P = E" is represented using Pexp_fun. + *) + | Pexp_apply of expression * (arg_label * expression) list + (* E0 ~l1:E1 ... ~ln:En + li can be empty (non labeled argument) or start with '?' + (optional argument). + + Invariant: n > 0 + *) + | Pexp_match of expression * case list + (* match E0 with P1 -> E1 | ... | Pn -> En *) + | Pexp_try of expression * case list + (* try E0 with P1 -> E1 | ... | Pn -> En *) + | Pexp_tuple of expression list + (* (E1, ..., En) + + Invariant: n >= 2 + *) + | Pexp_construct of Longident.t loc * expression option + (* C None + C E Some E + C (E1, ..., En) Some (Pexp_tuple[E1;...;En]) + *) + | Pexp_variant of label * expression option + (* `A (None) + `A E (Some E) + *) + | Pexp_record of (Longident.t loc * expression) list * expression option + (* { l1=P1; ...; ln=Pn } (None) + { E0 with l1=P1; ...; ln=Pn } (Some E0) + + Invariant: n > 0 + *) + | Pexp_field of expression * Longident.t loc + (* E.l *) + | Pexp_setfield of expression * Longident.t loc * expression + (* E1.l <- E2 *) + | Pexp_array of expression list + (* [| E1; ...; En |] *) + | Pexp_ifthenelse of expression * expression * expression option + (* if E1 then E2 else E3 *) + | Pexp_sequence of expression * expression + (* E1; E2 *) + | Pexp_while of expression * expression + (* while E1 do E2 done *) + | Pexp_for of + pattern * expression * expression * direction_flag * expression + (* for i = E1 to E2 do E3 done (flag = Upto) + for i = E1 downto E2 do E3 done (flag = Downto) + *) + | Pexp_constraint of expression * core_type + (* (E : T) *) + | Pexp_coerce of expression * core_type option * core_type + (* (E :> T) (None, T) + (E : T0 :> T) (Some T0, T) + *) + | Pexp_send of expression * label loc + (* E # m *) + | Pexp_new of Longident.t loc + (* new M.c *) + | Pexp_setinstvar of label loc * expression + (* x <- 2 *) + | Pexp_override of (label loc * expression) list + (* {< x1 = E1; ...; Xn = En >} *) + | Pexp_letmodule of string loc * module_expr * expression + (* let module M = ME in E *) + | Pexp_letexception of extension_constructor * expression + (* let exception C in E *) + | Pexp_assert of expression + (* assert E + Note: "assert false" is treated in a special way by the + type-checker. *) + | Pexp_lazy of expression + (* lazy E *) + | Pexp_poly of expression * core_type option + (* Used for method bodies. + + Can only be used as the expression under Cfk_concrete + for methods (not values). *) + | Pexp_object of class_structure + (* object ... end *) + | Pexp_newtype of string loc * expression + (* fun (type t) -> E *) + | Pexp_pack of module_expr + (* (module ME) + + (module ME : S) is represented as + Pexp_constraint(Pexp_pack, Ptyp_package S) *) + | Pexp_open of override_flag * Longident.t loc * expression + (* M.(E) + let open M in E + let! open M in E *) + | Pexp_extension of extension + (* [%id] *) + | Pexp_unreachable + (* . *) + +and case = (* (P -> E) or (P when E0 -> E) *) + { + pc_lhs: pattern; + pc_guard: expression option; + pc_rhs: expression; + } + +(* Value descriptions *) + +and value_description = + { + pval_name: string loc; + pval_type: core_type; + pval_prim: string list; + pval_attributes: attributes; (* ... [@@id1] [@@id2] *) + pval_loc: Location.t; + } + +(* + val x: T (prim = []) + external x: T = "s1" ... "sn" (prim = ["s1";..."sn"]) +*) + +(* Type declarations *) + +and type_declaration = + { + ptype_name: string loc; + ptype_params: (core_type * variance) list; + (* ('a1,...'an) t; None represents _*) + ptype_cstrs: (core_type * core_type * Location.t) list; + (* ... constraint T1=T1' ... constraint Tn=Tn' *) + ptype_kind: type_kind; + ptype_private: private_flag; (* = private ... *) + ptype_manifest: core_type option; (* = T *) + ptype_attributes: attributes; (* ... [@@id1] [@@id2] *) + ptype_loc: Location.t; + } + +(* + type t (abstract, no manifest) + type t = T0 (abstract, manifest=T0) + type t = C of T | ... (variant, no manifest) + type t = T0 = C of T | ... (variant, manifest=T0) + type t = {l: T; ...} (record, no manifest) + type t = T0 = {l : T; ...} (record, manifest=T0) + type t = .. (open, no manifest) +*) + +and type_kind = + | Ptype_abstract + | Ptype_variant of constructor_declaration list + (* Invariant: non-empty list *) + | Ptype_record of label_declaration list + (* Invariant: non-empty list *) + | Ptype_open + +and label_declaration = + { + pld_name: string loc; + pld_mutable: mutable_flag; + pld_type: core_type; + pld_loc: Location.t; + pld_attributes: attributes; (* l : T [@id1] [@id2] *) + } + +(* { ...; l: T; ... } (mutable=Immutable) + { ...; mutable l: T; ... } (mutable=Mutable) + + Note: T can be a Ptyp_poly. +*) + +and constructor_declaration = + { + pcd_name: string loc; + pcd_args: constructor_arguments; + pcd_res: core_type option; + pcd_loc: Location.t; + pcd_attributes: attributes; (* C of ... [@id1] [@id2] *) + } + +and constructor_arguments = + | Pcstr_tuple of core_type list + | Pcstr_record of label_declaration list + +(* + | C of T1 * ... * Tn (res = None, args = Pcstr_tuple []) + | C: T0 (res = Some T0, args = []) + | C: T1 * ... * Tn -> T0 (res = Some T0, args = Pcstr_tuple) + | C of {...} (res = None, args = Pcstr_record) + | C: {...} -> T0 (res = Some T0, args = Pcstr_record) + | C of {...} as t (res = None, args = Pcstr_record) +*) + +and type_extension = + { + ptyext_path: Longident.t loc; + ptyext_params: (core_type * variance) list; + ptyext_constructors: extension_constructor list; + ptyext_private: private_flag; + ptyext_attributes: attributes; (* ... [@@id1] [@@id2] *) + } +(* + type t += ... +*) + +and extension_constructor = + { + pext_name: string loc; + pext_kind : extension_constructor_kind; + pext_loc : Location.t; + pext_attributes: attributes; (* C of ... [@id1] [@id2] *) + } + +and extension_constructor_kind = + Pext_decl of constructor_arguments * core_type option + (* + | C of T1 * ... * Tn ([T1; ...; Tn], None) + | C: T0 ([], Some T0) + | C: T1 * ... * Tn -> T0 ([T1; ...; Tn], Some T0) + *) + | Pext_rebind of Longident.t loc + (* + | C = D + *) + +(** {1 Class language} *) + +(* Type expressions for the class language *) + +and class_type = + { + pcty_desc: class_type_desc; + pcty_loc: Location.t; + pcty_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and class_type_desc = + | Pcty_constr of Longident.t loc * core_type list + (* c + ['a1, ..., 'an] c *) + | Pcty_signature of class_signature + (* object ... end *) + | Pcty_arrow of arg_label * core_type * class_type + (* T -> CT Simple + ~l:T -> CT Labelled l + ?l:T -> CT Optional l + *) + | Pcty_extension of extension + (* [%id] *) + | Pcty_open of override_flag * Longident.t loc * class_type + (* let open M in CT *) + +and class_signature = + { + pcsig_self: core_type; + pcsig_fields: class_type_field list; + } +(* object('selfpat) ... end + object ... end (self = Ptyp_any) + *) + +and class_type_field = + { + pctf_desc: class_type_field_desc; + pctf_loc: Location.t; + pctf_attributes: attributes; (* ... [@@id1] [@@id2] *) + } + +and class_type_field_desc = + | Pctf_inherit of class_type + (* inherit CT *) + | Pctf_val of (label loc * mutable_flag * virtual_flag * core_type) + (* val x: T *) + | Pctf_method of (label loc * private_flag * virtual_flag * core_type) + (* method x: T + + Note: T can be a Ptyp_poly. + *) + | Pctf_constraint of (core_type * core_type) + (* constraint T1 = T2 *) + | Pctf_attribute of attribute + (* [@@@id] *) + | Pctf_extension of extension + (* [%%id] *) + +and 'a class_infos = + { + pci_virt: virtual_flag; + pci_params: (core_type * variance) list; + pci_name: string loc; + pci_expr: 'a; + pci_loc: Location.t; + pci_attributes: attributes; (* ... [@@id1] [@@id2] *) + } +(* class c = ... + class ['a1,...,'an] c = ... + class virtual c = ... + + Also used for "class type" declaration. +*) + +and class_description = class_type class_infos + +and class_type_declaration = class_type class_infos + +(* Value expressions for the class language *) + +and class_expr = + { + pcl_desc: class_expr_desc; + pcl_loc: Location.t; + pcl_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and class_expr_desc = + | Pcl_constr of Longident.t loc * core_type list + (* c + ['a1, ..., 'an] c *) + | Pcl_structure of class_structure + (* object ... end *) + | Pcl_fun of arg_label * expression option * pattern * class_expr + (* fun P -> CE (Simple, None) + fun ~l:P -> CE (Labelled l, None) + fun ?l:P -> CE (Optional l, None) + fun ?l:(P = E0) -> CE (Optional l, Some E0) + *) + | Pcl_apply of class_expr * (arg_label * expression) list + (* CE ~l1:E1 ... ~ln:En + li can be empty (non labeled argument) or start with '?' + (optional argument). + + Invariant: n > 0 + *) + | Pcl_let of rec_flag * value_binding list * class_expr + (* let P1 = E1 and ... and Pn = EN in CE (flag = Nonrecursive) + let rec P1 = E1 and ... and Pn = EN in CE (flag = Recursive) + *) + | Pcl_constraint of class_expr * class_type + (* (CE : CT) *) + | Pcl_extension of extension + (* [%id] *) + | Pcl_open of override_flag * Longident.t loc * class_expr + (* let open M in CE *) + + +and class_structure = + { + pcstr_self: pattern; + pcstr_fields: class_field list; + } +(* object(selfpat) ... end + object ... end (self = Ppat_any) + *) + +and class_field = + { + pcf_desc: class_field_desc; + pcf_loc: Location.t; + pcf_attributes: attributes; (* ... [@@id1] [@@id2] *) + } + +and class_field_desc = + | Pcf_inherit of override_flag * class_expr * string loc option + (* inherit CE + inherit CE as x + inherit! CE + inherit! CE as x + *) + | Pcf_val of (label loc * mutable_flag * class_field_kind) + (* val x = E + val virtual x: T + *) + | Pcf_method of (label loc * private_flag * class_field_kind) + (* method x = E (E can be a Pexp_poly) + method virtual x: T (T can be a Ptyp_poly) + *) + | Pcf_constraint of (core_type * core_type) + (* constraint T1 = T2 *) + | Pcf_initializer of expression + (* initializer E *) + | Pcf_attribute of attribute + (* [@@@id] *) + | Pcf_extension of extension + (* [%%id] *) + +and class_field_kind = + | Cfk_virtual of core_type + | Cfk_concrete of override_flag * expression + +and class_declaration = class_expr class_infos + +(** {1 Module language} *) + +(* Type expressions for the module language *) + +and module_type = + { + pmty_desc: module_type_desc; + pmty_loc: Location.t; + pmty_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and module_type_desc = + | Pmty_ident of Longident.t loc + (* S *) + | Pmty_signature of signature + (* sig ... end *) + | Pmty_functor of string loc * module_type option * module_type + (* functor(X : MT1) -> MT2 *) + | Pmty_with of module_type * with_constraint list + (* MT with ... *) + | Pmty_typeof of module_expr + (* module type of ME *) + | Pmty_extension of extension + (* [%id] *) + | Pmty_alias of Longident.t loc + (* (module M) *) + +and signature = signature_item list + +and signature_item = + { + psig_desc: signature_item_desc; + psig_loc: Location.t; + } + +and signature_item_desc = + | Psig_value of value_description + (* + val x: T + external x: T = "s1" ... "sn" + *) + | Psig_type of rec_flag * type_declaration list + (* type t1 = ... and ... and tn = ... *) + | Psig_typext of type_extension + (* type t1 += ... *) + | Psig_exception of extension_constructor + (* exception C of T *) + | Psig_module of module_declaration + (* module X : MT *) + | Psig_recmodule of module_declaration list + (* module rec X1 : MT1 and ... and Xn : MTn *) + | Psig_modtype of module_type_declaration + (* module type S = MT + module type S *) + | Psig_open of open_description + (* open X *) + | Psig_include of include_description + (* include MT *) + | Psig_class of class_description list + (* class c1 : ... and ... and cn : ... *) + | Psig_class_type of class_type_declaration list + (* class type ct1 = ... and ... and ctn = ... *) + | Psig_attribute of attribute + (* [@@@id] *) + | Psig_extension of extension * attributes + (* [%%id] *) + +and module_declaration = + { + pmd_name: string loc; + pmd_type: module_type; + pmd_attributes: attributes; (* ... [@@id1] [@@id2] *) + pmd_loc: Location.t; + } +(* S : MT *) + +and module_type_declaration = + { + pmtd_name: string loc; + pmtd_type: module_type option; + pmtd_attributes: attributes; (* ... [@@id1] [@@id2] *) + pmtd_loc: Location.t; + } +(* S = MT + S (abstract module type declaration, pmtd_type = None) +*) + +and open_description = + { + popen_lid: Longident.t loc; + popen_override: override_flag; + popen_loc: Location.t; + popen_attributes: attributes; + } +(* open! X - popen_override = Override (silences the 'used identifier + shadowing' warning) + open X - popen_override = Fresh + *) + +and 'a include_infos = + { + pincl_mod: 'a; + pincl_loc: Location.t; + pincl_attributes: attributes; + } + +and include_description = module_type include_infos +(* include MT *) + +and include_declaration = module_expr include_infos +(* include ME *) + +and with_constraint = + | Pwith_type of Longident.t loc * type_declaration + (* with type X.t = ... + + Note: the last component of the longident must match + the name of the type_declaration. *) + | Pwith_module of Longident.t loc * Longident.t loc + (* with module X.Y = Z *) + | Pwith_typesubst of Longident.t loc * type_declaration + (* with type X.t := ..., same format as [Pwith_type] *) + | Pwith_modsubst of Longident.t loc * Longident.t loc + (* with module X.Y := Z *) + +(* Value expressions for the module language *) + +and module_expr = + { + pmod_desc: module_expr_desc; + pmod_loc: Location.t; + pmod_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and module_expr_desc = + | Pmod_ident of Longident.t loc + (* X *) + | Pmod_structure of structure + (* struct ... end *) + | Pmod_functor of string loc * module_type option * module_expr + (* functor(X : MT1) -> ME *) + | Pmod_apply of module_expr * module_expr + (* ME1(ME2) *) + | Pmod_constraint of module_expr * module_type + (* (ME : MT) *) + | Pmod_unpack of expression + (* (val E) *) + | Pmod_extension of extension + (* [%id] *) + +and structure = structure_item list + +and structure_item = + { + pstr_desc: structure_item_desc; + pstr_loc: Location.t; + } + +and structure_item_desc = + | Pstr_eval of expression * attributes + (* E *) + | Pstr_value of rec_flag * value_binding list + (* let P1 = E1 and ... and Pn = EN (flag = Nonrecursive) + let rec P1 = E1 and ... and Pn = EN (flag = Recursive) + *) + | Pstr_primitive of value_description + (* val x: T + external x: T = "s1" ... "sn" *) + | Pstr_type of rec_flag * type_declaration list + (* type t1 = ... and ... and tn = ... *) + | Pstr_typext of type_extension + (* type t1 += ... *) + | Pstr_exception of extension_constructor + (* exception C of T + exception C = M.X *) + | Pstr_module of module_binding + (* module X = ME *) + | Pstr_recmodule of module_binding list + (* module rec X1 = ME1 and ... and Xn = MEn *) + | Pstr_modtype of module_type_declaration + (* module type S = MT *) + | Pstr_open of open_description + (* open X *) + | Pstr_class of class_declaration list + (* class c1 = ... and ... and cn = ... *) + | Pstr_class_type of class_type_declaration list + (* class type ct1 = ... and ... and ctn = ... *) + | Pstr_include of include_declaration + (* include ME *) + | Pstr_attribute of attribute + (* [@@@id] *) + | Pstr_extension of extension * attributes + (* [%%id] *) + +and value_binding = + { + pvb_pat: pattern; + pvb_expr: expression; + pvb_attributes: attributes; + pvb_loc: Location.t; + } + +and module_binding = + { + pmb_name: string loc; + pmb_expr: module_expr; + pmb_attributes: attributes; + pmb_loc: Location.t; + } +(* X = ME *) + +(** {1 Toplevel} *) + +(* Toplevel phrases *) + +type toplevel_phrase = + | Ptop_def of structure + | Ptop_dir of string * directive_argument + (* #use, #load ... *) + +and directive_argument = + | Pdir_none + | Pdir_string of string + | Pdir_int of string * char option + | Pdir_ident of Longident.t + | Pdir_bool of bool diff --git a/res_syntax/compiler-libs-406/parsing.ml b/res_syntax/compiler-libs-406/parsing.ml new file mode 100644 index 0000000000..3b779f5c7f --- /dev/null +++ b/res_syntax/compiler-libs-406/parsing.ml @@ -0,0 +1,211 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* The parsing engine *) + +open Lexing + +(* Internal interface to the parsing engine *) + +type parser_env = + { mutable s_stack : int array; (* States *) + mutable v_stack : Obj.t array; (* Semantic attributes *) + mutable symb_start_stack : position array; (* Start positions *) + mutable symb_end_stack : position array; (* End positions *) + mutable stacksize : int; (* Size of the stacks *) + mutable stackbase : int; (* Base sp for current parse *) + mutable curr_char : int; (* Last token read *) + mutable lval : Obj.t; (* Its semantic attribute *) + mutable symb_start : position; (* Start pos. of the current symbol*) + mutable symb_end : position; (* End pos. of the current symbol *) + mutable asp : int; (* The stack pointer for attributes *) + mutable rule_len : int; (* Number of rhs items in the rule *) + mutable rule_number : int; (* Rule number to reduce by *) + mutable sp : int; (* Saved sp for parse_engine *) + mutable state : int; (* Saved state for parse_engine *) + mutable errflag : int } (* Saved error flag for parse_engine *) + +type parse_tables = + { actions : (parser_env -> Obj.t) array; + transl_const : int array; + transl_block : int array; + lhs : string; + len : string; + defred : string; + dgoto : string; + sindex : string; + rindex : string; + gindex : string; + tablesize : int; + table : string; + check : string; + error_function : string -> unit; + names_const : string; + names_block : string } + +exception YYexit of Obj.t +exception Parse_error + +type parser_input = + Start + | Token_read + | Stacks_grown_1 + | Stacks_grown_2 + | Semantic_action_computed + | Error_detected + +type parser_output = + Read_token + | Raise_parse_error + | Grow_stacks_1 + | Grow_stacks_2 + | Compute_semantic_action + | Call_error_function + +(* to avoid warnings *) +let _ = [Read_token; Raise_parse_error; Grow_stacks_1; Grow_stacks_2; + Compute_semantic_action; Call_error_function] + +external parse_engine : + parse_tables -> parser_env -> parser_input -> Obj.t -> parser_output + = "caml_parse_engine" + +external set_trace: bool -> bool + = "caml_set_parser_trace" + +let env = + { s_stack = Array.make 100 0; + v_stack = Array.make 100 (Obj.repr ()); + symb_start_stack = Array.make 100 dummy_pos; + symb_end_stack = Array.make 100 dummy_pos; + stacksize = 100; + stackbase = 0; + curr_char = 0; + lval = Obj.repr (); + symb_start = dummy_pos; + symb_end = dummy_pos; + asp = 0; + rule_len = 0; + rule_number = 0; + sp = 0; + state = 0; + errflag = 0 } + +let grow_stacks() = + let oldsize = env.stacksize in + let newsize = oldsize * 2 in + let new_s = Array.make newsize 0 + and new_v = Array.make newsize (Obj.repr ()) + and new_start = Array.make newsize dummy_pos + and new_end = Array.make newsize dummy_pos in + Array.blit env.s_stack 0 new_s 0 oldsize; + env.s_stack <- new_s; + Array.blit env.v_stack 0 new_v 0 oldsize; + env.v_stack <- new_v; + Array.blit env.symb_start_stack 0 new_start 0 oldsize; + env.symb_start_stack <- new_start; + Array.blit env.symb_end_stack 0 new_end 0 oldsize; + env.symb_end_stack <- new_end; + env.stacksize <- newsize + +let clear_parser() = + Array.fill env.v_stack 0 env.stacksize (Obj.repr ()); + env.lval <- Obj.repr () + +let current_lookahead_fun = ref (fun (_ : Obj.t) -> false) + +let yyparse tables start lexer lexbuf = + let rec loop cmd arg = + match parse_engine tables env cmd arg with + Read_token -> + let t = Obj.repr(lexer lexbuf) in + env.symb_start <- lexbuf.lex_start_p; + env.symb_end <- lexbuf.lex_curr_p; + loop Token_read t + | Raise_parse_error -> + raise Parse_error + | Compute_semantic_action -> + let (action, value) = + try + (Semantic_action_computed, tables.actions.(env.rule_number) env) + with Parse_error -> + (Error_detected, Obj.repr ()) in + loop action value + | Grow_stacks_1 -> + grow_stacks(); loop Stacks_grown_1 (Obj.repr ()) + | Grow_stacks_2 -> + grow_stacks(); loop Stacks_grown_2 (Obj.repr ()) + | Call_error_function -> + tables.error_function "syntax error"; + loop Error_detected (Obj.repr ()) in + let init_asp = env.asp + and init_sp = env.sp + and init_stackbase = env.stackbase + and init_state = env.state + and init_curr_char = env.curr_char + and init_lval = env.lval + and init_errflag = env.errflag in + env.stackbase <- env.sp + 1; + env.curr_char <- start; + env.symb_end <- lexbuf.lex_curr_p; + try + loop Start (Obj.repr ()) + with exn -> + let curr_char = env.curr_char in + env.asp <- init_asp; + env.sp <- init_sp; + env.stackbase <- init_stackbase; + env.state <- init_state; + env.curr_char <- init_curr_char; + env.lval <- init_lval; + env.errflag <- init_errflag; + match exn with + YYexit v -> + Obj.magic v + | _ -> + current_lookahead_fun := + (fun tok -> + if Obj.is_block tok + then tables.transl_block.(Obj.tag tok) = curr_char + else tables.transl_const.(Obj.magic tok) = curr_char); + raise exn + +let peek_val env n = + Obj.magic env.v_stack.(env.asp - n) + +let symbol_start_pos () = + let rec loop i = + if i <= 0 then env.symb_end_stack.(env.asp) + else begin + let st = env.symb_start_stack.(env.asp - i + 1) in + let en = env.symb_end_stack.(env.asp - i + 1) in + if st <> en then st else loop (i - 1) + end + in + loop env.rule_len + +let symbol_end_pos () = env.symb_end_stack.(env.asp) +let rhs_start_pos n = env.symb_start_stack.(env.asp - (env.rule_len - n)) +let rhs_end_pos n = env.symb_end_stack.(env.asp - (env.rule_len - n)) + +let symbol_start () = (symbol_start_pos ()).pos_cnum +let symbol_end () = (symbol_end_pos ()).pos_cnum +let rhs_start n = (rhs_start_pos n).pos_cnum +let rhs_end n = (rhs_end_pos n).pos_cnum + +let is_current_lookahead tok = + (!current_lookahead_fun)(Obj.repr tok) + +let parse_error (_ : string) = () diff --git a/res_syntax/compiler-libs-406/parsing.mli b/res_syntax/compiler-libs-406/parsing.mli new file mode 100644 index 0000000000..73b9504d4f --- /dev/null +++ b/res_syntax/compiler-libs-406/parsing.mli @@ -0,0 +1,105 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** The run-time library for parsers generated by [ocamlyacc]. *) + +val symbol_start : unit -> int +(** [symbol_start] and {!Parsing.symbol_end} are to be called in the + action part of a grammar rule only. They return the offset of the + string that matches the left-hand side of the rule: [symbol_start()] + returns the offset of the first character; [symbol_end()] returns the + offset after the last character. The first character in a file is at + offset 0. *) + +val symbol_end : unit -> int +(** See {!Parsing.symbol_start}. *) + +val rhs_start : int -> int +(** Same as {!Parsing.symbol_start} and {!Parsing.symbol_end}, but + return the offset of the string matching the [n]th item on the + right-hand side of the rule, where [n] is the integer parameter + to [rhs_start] and [rhs_end]. [n] is 1 for the leftmost item. *) + +val rhs_end : int -> int +(** See {!Parsing.rhs_start}. *) + +val symbol_start_pos : unit -> Lexing.position +(** Same as [symbol_start], but return a [position] instead of an offset. *) + +val symbol_end_pos : unit -> Lexing.position +(** Same as [symbol_end], but return a [position] instead of an offset. *) + +val rhs_start_pos : int -> Lexing.position +(** Same as [rhs_start], but return a [position] instead of an offset. *) + +val rhs_end_pos : int -> Lexing.position +(** Same as [rhs_end], but return a [position] instead of an offset. *) + +val clear_parser : unit -> unit +(** Empty the parser stack. Call it just after a parsing function + has returned, to remove all pointers from the parser stack + to structures that were built by semantic actions during parsing. + This is optional, but lowers the memory requirements of the + programs. *) + +exception Parse_error +(** Raised when a parser encounters a syntax error. + Can also be raised from the action part of a grammar rule, + to initiate error recovery. *) + +val set_trace: bool -> bool +(** Control debugging support for [ocamlyacc]-generated parsers. + After [Parsing.set_trace true], the pushdown automaton that + executes the parsers prints a trace of its actions (reading a token, + shifting a state, reducing by a rule) on standard output. + [Parsing.set_trace false] turns this debugging trace off. + The boolean returned is the previous state of the trace flag. + @since 3.11.0 +*) + +(**/**) + +(** {1 } *) + +(** The following definitions are used by the generated parsers only. + They are not intended to be used directly by user programs. *) + +type parser_env + +type parse_tables = + { actions : (parser_env -> Obj.t) array; + transl_const : int array; + transl_block : int array; + lhs : string; + len : string; + defred : string; + dgoto : string; + sindex : string; + rindex : string; + gindex : string; + tablesize : int; + table : string; + check : string; + error_function : string -> unit; + names_const : string; + names_block : string } + +exception YYexit of Obj.t + +val yyparse : + parse_tables -> int -> (Lexing.lexbuf -> 'a) -> Lexing.lexbuf -> 'b +val peek_val : parser_env -> int -> 'a +val is_current_lookahead : 'a -> bool +val parse_error : string -> unit diff --git a/res_syntax/compiler-libs-406/path.ml b/res_syntax/compiler-libs-406/path.ml new file mode 100644 index 0000000000..cff31cb81c --- /dev/null +++ b/res_syntax/compiler-libs-406/path.ml @@ -0,0 +1,109 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type t = + Pident of Ident.t + | Pdot of t * string * int + | Papply of t * t + +let nopos = -1 + +let rec same p1 p2 = + match (p1, p2) with + (Pident id1, Pident id2) -> Ident.same id1 id2 + | (Pdot(p1, s1, _pos1), Pdot(p2, s2, _pos2)) -> s1 = s2 && same p1 p2 + | (Papply(fun1, arg1), Papply(fun2, arg2)) -> + same fun1 fun2 && same arg1 arg2 + | (_, _) -> false + +let rec compare p1 p2 = + match (p1, p2) with + (Pident id1, Pident id2) -> Ident.compare id1 id2 + | (Pdot(p1, s1, _pos1), Pdot(p2, s2, _pos2)) -> + let h = compare p1 p2 in + if h <> 0 then h else String.compare s1 s2 + | (Papply(fun1, arg1), Papply(fun2, arg2)) -> + let h = compare fun1 fun2 in + if h <> 0 then h else compare arg1 arg2 + | ((Pident _ | Pdot _), (Pdot _ | Papply _)) -> -1 + | ((Pdot _ | Papply _), (Pident _ | Pdot _)) -> 1 + +let rec isfree id = function + Pident id' -> Ident.same id id' + | Pdot(p, _s, _pos) -> isfree id p + | Papply(p1, p2) -> isfree id p1 || isfree id p2 + +let rec binding_time = function + Pident id -> Ident.binding_time id + | Pdot(p, _s, _pos) -> binding_time p + | Papply(p1, p2) -> max (binding_time p1) (binding_time p2) + +let kfalse _ = false + +let rec name ?(paren=kfalse) = function + Pident id -> Ident.name id + | Pdot(p, s, _pos) -> + name ~paren p ^ if paren s then ".( " ^ s ^ " )" else "." ^ s + | Papply(p1, p2) -> name ~paren p1 ^ "(" ^ name ~paren p2 ^ ")" + +let rec head = function + Pident id -> id + | Pdot(p, _s, _pos) -> head p + | Papply _ -> assert false + +let flatten = + let rec flatten acc = function + | Pident id -> `Ok (id, acc) + | Pdot (p, s, _) -> flatten (s :: acc) p + | Papply _ -> `Contains_apply + in + fun t -> flatten [] t + +let heads p = + let rec heads p acc = match p with + | Pident id -> id :: acc + | Pdot (p, _s, _pos) -> heads p acc + | Papply(p1, p2) -> + heads p1 (heads p2 acc) + in heads p [] + +let rec last = function + | Pident id -> Ident.name id + | Pdot(_, s, _) -> s + | Papply(_, p) -> last p + +let is_uident s = + assert (s <> ""); + match s.[0] with + | 'A'..'Z' -> true + | _ -> false + +type typath = + | Regular of t + | Ext of t * string + | LocalExt of Ident.t + | Cstr of t * string + +let constructor_typath = function + | Pident id when is_uident (Ident.name id) -> LocalExt id + | Pdot(ty_path, s, _) when is_uident s -> + if is_uident (last ty_path) then Ext (ty_path, s) + else Cstr (ty_path, s) + | p -> Regular p + +let is_constructor_typath p = + match constructor_typath p with + | Regular _ -> false + | _ -> true diff --git a/res_syntax/compiler-libs-406/path.mli b/res_syntax/compiler-libs-406/path.mli new file mode 100644 index 0000000000..18491462e8 --- /dev/null +++ b/res_syntax/compiler-libs-406/path.mli @@ -0,0 +1,46 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Access paths *) + +type t = + Pident of Ident.t + | Pdot of t * string * int + | Papply of t * t + +val same: t -> t -> bool +val compare: t -> t -> int +val isfree: Ident.t -> t -> bool +val binding_time: t -> int +val flatten : t -> [ `Contains_apply | `Ok of Ident.t * string list ] + +val nopos: int + +val name: ?paren:(string -> bool) -> t -> string + (* [paren] tells whether a path suffix needs parentheses *) +val head: t -> Ident.t + +val heads: t -> Ident.t list + +val last: t -> string + +type typath = + | Regular of t + | Ext of t * string + | LocalExt of Ident.t + | Cstr of t * string + +val constructor_typath: t -> typath +val is_constructor_typath: t -> bool diff --git a/res_syntax/compiler-libs-406/pprintast.ml b/res_syntax/compiler-libs-406/pprintast.ml new file mode 100644 index 0000000000..bfa53cf77e --- /dev/null +++ b/res_syntax/compiler-libs-406/pprintast.ml @@ -0,0 +1,1555 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Thomas Gazagnaire, OCamlPro *) +(* Fabrice Le Fessant, INRIA Saclay *) +(* Hongbo Zhang, University of Pennsylvania *) +(* *) +(* Copyright 2007 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Original Code from Ber-metaocaml, modified for 3.12.0 and fixed *) +(* Printing code expressions *) +(* Authors: Ed Pizzi, Fabrice Le Fessant *) +(* Extensive Rewrite: Hongbo Zhang: University of Pennsylvania *) +(* TODO more fine-grained precedence pretty-printing *) + +open Asttypes +open Format +open Location +open Longident +open Parsetree +open Ast_helper + +let prefix_symbols = [ '!'; '?'; '~' ] ;; +let infix_symbols = [ '='; '<'; '>'; '@'; '^'; '|'; '&'; '+'; '-'; '*'; '/'; + '$'; '%'; '#' ] + +(* type fixity = Infix| Prefix *) +let special_infix_strings = + ["asr"; "land"; "lor"; "lsl"; "lsr"; "lxor"; "mod"; "or"; ":="; "!="; "::" ] + +(* determines if the string is an infix string. + checks backwards, first allowing a renaming postfix ("_102") which + may have resulted from Pexp -> Texp -> Pexp translation, then checking + if all the characters in the beginning of the string are valid infix + characters. *) +let fixity_of_string = function + | s when List.mem s special_infix_strings -> `Infix s + | s when List.mem s.[0] infix_symbols -> `Infix s + | s when List.mem s.[0] prefix_symbols -> `Prefix s + | s when s.[0] = '.' -> `Mixfix s + | _ -> `Normal + +let view_fixity_of_exp = function + | {pexp_desc = Pexp_ident {txt=Lident l;_}; pexp_attributes = []} -> + fixity_of_string l + | _ -> `Normal + +let is_infix = function | `Infix _ -> true | _ -> false +let is_mixfix = function `Mixfix _ -> true | _ -> false + +(* which identifiers are in fact operators needing parentheses *) +let needs_parens txt = + let fix = fixity_of_string txt in + is_infix fix + || is_mixfix fix + || List.mem txt.[0] prefix_symbols + +(* some infixes need spaces around parens to avoid clashes with comment + syntax *) +let needs_spaces txt = + txt.[0]='*' || txt.[String.length txt - 1] = '*' + +(* add parentheses to binders when they are in fact infix or prefix operators *) +let protect_ident ppf txt = + let format : (_, _, _) format = + if not (needs_parens txt) then "%s" + else if needs_spaces txt then "(@;%s@;)" + else "(%s)" + in fprintf ppf format txt + +let protect_longident ppf print_longident longprefix txt = + let format : (_, _, _) format = + if not (needs_parens txt) then "%a.%s" + else if needs_spaces txt then "%a.(@;%s@;)" + else "%a.(%s)" in + fprintf ppf format print_longident longprefix txt + +type space_formatter = (unit, Format.formatter, unit) format + +let override = function + | Override -> "!" + | Fresh -> "" + +(* variance encoding: need to sync up with the [parser.mly] *) +let type_variance = function + | Invariant -> "" + | Covariant -> "+" + | Contravariant -> "-" + +type construct = + [ `cons of expression list + | `list of expression list + | `nil + | `normal + | `simple of Longident.t + | `tuple ] + +let view_expr x = + match x.pexp_desc with + | Pexp_construct ( {txt= Lident "()"; _},_) -> `tuple + | Pexp_construct ( {txt= Lident "[]";_},_) -> `nil + | Pexp_construct ( {txt= Lident"::";_},Some _) -> + let rec loop exp acc = match exp with + | {pexp_desc=Pexp_construct ({txt=Lident "[]";_},_); + pexp_attributes = []} -> + (List.rev acc,true) + | {pexp_desc= + Pexp_construct ({txt=Lident "::";_}, + Some ({pexp_desc= Pexp_tuple([e1;e2]); + pexp_attributes = []})); + pexp_attributes = []} + -> + loop e2 (e1::acc) + | e -> (List.rev (e::acc),false) in + let (ls,b) = loop x [] in + if b then + `list ls + else `cons ls + | Pexp_construct (x,None) -> `simple (x.txt) + | _ -> `normal + +let is_simple_construct :construct -> bool = function + | `nil | `tuple | `list _ | `simple _ -> true + | `cons _ | `normal -> false + +let pp = fprintf + +type ctxt = { + pipe : bool; + semi : bool; + ifthenelse : bool; +} + +let reset_ctxt = { pipe=false; semi=false; ifthenelse=false } +let under_pipe ctxt = { ctxt with pipe=true } +let under_semi ctxt = { ctxt with semi=true } +let under_ifthenelse ctxt = { ctxt with ifthenelse=true } +(* +let reset_semi ctxt = { ctxt with semi=false } +let reset_ifthenelse ctxt = { ctxt with ifthenelse=false } +let reset_pipe ctxt = { ctxt with pipe=false } +*) + +let list : 'a . ?sep:space_formatter -> ?first:space_formatter -> + ?last:space_formatter -> (Format.formatter -> 'a -> unit) -> + Format.formatter -> 'a list -> unit + = fun ?sep ?first ?last fu f xs -> + let first = match first with Some x -> x |None -> ("": _ format6) + and last = match last with Some x -> x |None -> ("": _ format6) + and sep = match sep with Some x -> x |None -> ("@ ": _ format6) in + let aux f = function + | [] -> () + | [x] -> fu f x + | xs -> + let rec loop f = function + | [x] -> fu f x + | x::xs -> fu f x; pp f sep; loop f xs; + | _ -> assert false in begin + pp f first; loop f xs; pp f last; + end in + aux f xs + +let option : 'a. ?first:space_formatter -> ?last:space_formatter -> + (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a option -> unit + = fun ?first ?last fu f a -> + let first = match first with Some x -> x | None -> ("": _ format6) + and last = match last with Some x -> x | None -> ("": _ format6) in + match a with + | None -> () + | Some x -> pp f first; fu f x; pp f last + +let paren: 'a . ?first:space_formatter -> ?last:space_formatter -> + bool -> (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a -> unit + = fun ?(first=("": _ format6)) ?(last=("": _ format6)) b fu f x -> + if b then (pp f "("; pp f first; fu f x; pp f last; pp f ")") + else fu f x + +let rec longident f = function + | Lident s -> protect_ident f s + | Ldot(y,s) -> protect_longident f longident y s + | Lapply (y,s) -> + pp f "%a(%a)" longident y longident s + +let longident_loc f x = pp f "%a" longident x.txt + +let string_of_int_as_char i = + let str = match Char.unsafe_chr i with + | '\'' -> "\\'" + | '\\' -> "\\\\" + | '\n' -> "\\n" + | '\t' -> "\\t" + | '\r' -> "\\r" + | '\b' -> "\\b" + | ' ' .. '~' as c -> + let s = (Bytes.create [@doesNotRaise]) 1 in + Bytes.unsafe_set s 0 c; + Bytes.unsafe_to_string s + | _ -> Printf.sprintf "\\%d" i + in + Printf.sprintf "\'%s\'" str + +let constant f = function + | Pconst_char i -> pp f "%s" (string_of_int_as_char i) + | Pconst_string (i, None) -> pp f "%S" i + | Pconst_string (i, Some delim) -> pp f "{%s|%s|%s}" delim i delim + | Pconst_integer (i, None) -> paren (i.[0]='-') (fun f -> pp f "%s") f i + | Pconst_integer (i, Some m) -> + paren (i.[0]='-') (fun f (i, m) -> pp f "%s%c" i m) f (i,m) + | Pconst_float (i, None) -> paren (i.[0]='-') (fun f -> pp f "%s") f i + | Pconst_float (i, Some m) -> paren (i.[0]='-') (fun f (i,m) -> + pp f "%s%c" i m) f (i,m) + +(* trailing space*) +let mutable_flag f = function + | Immutable -> () + | Mutable -> pp f "mutable@;" +let virtual_flag f = function + | Concrete -> () + | Virtual -> pp f "virtual@;" + +(* trailing space added *) +let rec_flag f rf = + match rf with + | Nonrecursive -> () + | Recursive -> pp f "rec " +let nonrec_flag f rf = + match rf with + | Nonrecursive -> pp f "nonrec " + | Recursive -> () +let direction_flag f = function + | Upto -> pp f "to@ " + | Downto -> pp f "downto@ " +let private_flag f = function + | Public -> () + | Private -> pp f "private@ " + +let constant_string f s = pp f "%S" s +let tyvar f str = pp f "'%s" str +let tyvar_loc f str = pp f "'%s" str.txt +let string_quot f x = pp f "`%s" x + +(* c ['a,'b] *) +let rec class_params_def ctxt f = function + | [] -> () + | l -> + pp f "[%a] " (* space *) + (list (type_param ctxt) ~sep:",") l + +and type_with_label ctxt f (label, c) = + match label with + | Nolabel -> core_type1 ctxt f c (* otherwise parenthesize *) + | Labelled s -> pp f "%s:%a" s (core_type1 ctxt) c + | Optional s -> pp f "?%s:%a" s (core_type1 ctxt) c + +and core_type ctxt f x = + if x.ptyp_attributes <> [] then begin + pp f "((%a)%a)" (core_type ctxt) {x with ptyp_attributes=[]} + (attributes ctxt) x.ptyp_attributes + end + else match x.ptyp_desc with + | Ptyp_arrow (l, ct1, ct2) -> + pp f "@[<2>%a@;->@;%a@]" (* FIXME remove parens later *) + (type_with_label ctxt) (l,ct1) (core_type ctxt) ct2 + | Ptyp_alias (ct, s) -> + pp f "@[<2>%a@;as@;'%s@]" (core_type1 ctxt) ct s + | Ptyp_poly ([], ct) -> + core_type ctxt f ct + | Ptyp_poly (sl, ct) -> + pp f "@[<2>%a%a@]" + (fun f l -> + pp f "%a" + (fun f l -> match l with + | [] -> () + | _ -> + pp f "%a@;.@;" + (list tyvar_loc ~sep:"@;") l) + l) + sl (core_type ctxt) ct + | _ -> pp f "@[<2>%a@]" (core_type1 ctxt) x + +and core_type1 ctxt f x = + if x.ptyp_attributes <> [] then core_type ctxt f x + else match x.ptyp_desc with + | Ptyp_any -> pp f "_"; + | Ptyp_var s -> tyvar f s; + | Ptyp_tuple l -> pp f "(%a)" (list (core_type1 ctxt) ~sep:"@;*@;") l + | Ptyp_constr (li, l) -> + pp f (* "%a%a@;" *) "%a%a" + (fun f l -> match l with + |[] -> () + |[x]-> pp f "%a@;" (core_type1 ctxt) x + | _ -> list ~first:"(" ~last:")@;" (core_type ctxt) ~sep:",@;" f l) + l longident_loc li + | Ptyp_variant (l, closed, low) -> + let type_variant_helper f x = + match x with + | Rtag (l, attrs, _, ctl) -> + pp f "@[<2>%a%a@;%a@]" string_quot l.txt + (fun f l -> match l with + |[] -> () + | _ -> pp f "@;of@;%a" + (list (core_type ctxt) ~sep:"&") ctl) ctl + (attributes ctxt) attrs + | Rinherit ct -> core_type ctxt f ct in + pp f "@[<2>[%a%a]@]" + (fun f l -> + match l, closed with + | [], Closed -> () + | [], Open -> pp f ">" (* Cf #7200: print [>] correctly *) + | _ -> + pp f "%s@;%a" + (match (closed,low) with + | (Closed,None) -> "" + | (Closed,Some _) -> "<" (* FIXME desugar the syntax sugar*) + | (Open,_) -> ">") + (list type_variant_helper ~sep:"@;<1 -2>| ") l) l + (fun f low -> match low with + |Some [] |None -> () + |Some xs -> + pp f ">@ %a" + (list string_quot) xs) low + | Ptyp_object (l, o) -> + let core_field_type f = function + | Otag (l, attrs, ct) -> + pp f "@[%s: %a@ %a@ @]" l.txt + (core_type ctxt) ct (attributes ctxt) attrs (* Cf #7200 *) + | Oinherit ct -> + pp f "@[%a@ @]" (core_type ctxt) ct + in + let field_var f = function + | Asttypes.Closed -> () + | Asttypes.Open -> + match l with + | [] -> pp f ".." + | _ -> pp f " ;.." + in + pp f "@[<@ %a%a@ > @]" (list core_field_type ~sep:";") l + field_var o (* Cf #7200 *) + | Ptyp_class (li, l) -> (*FIXME*) + pp f "@[%a#%a@]" + (list (core_type ctxt) ~sep:"," ~first:"(" ~last:")") l + longident_loc li + | Ptyp_package (lid, cstrs) -> + let aux f (s, ct) = + pp f "type %a@ =@ %a" longident_loc s (core_type ctxt) ct in + (match cstrs with + |[] -> pp f "@[(module@ %a)@]" longident_loc lid + |_ -> + pp f "@[(module@ %a@ with@ %a)@]" longident_loc lid + (list aux ~sep:"@ and@ ") cstrs) + | Ptyp_extension e -> extension ctxt f e + | _ -> paren true (core_type ctxt) f x + +(********************pattern********************) +(* be cautious when use [pattern], [pattern1] is preferred *) +and pattern ctxt f x = + let rec list_of_pattern acc = function (* only consider ((A|B)|C)*) + | {ppat_desc= Ppat_or (p1,p2); ppat_attributes = []} -> + list_of_pattern (p2::acc) p1 + | x -> x::acc + in + if x.ppat_attributes <> [] then begin + pp f "((%a)%a)" (pattern ctxt) {x with ppat_attributes=[]} + (attributes ctxt) x.ppat_attributes + end + else match x.ppat_desc with + | Ppat_alias (p, s) -> + pp f "@[<2>%a@;as@;%a@]" (pattern ctxt) p protect_ident s.txt (* RA*) + | Ppat_or _ -> (* *) + pp f "@[%a@]" (list ~sep:"@,|" (pattern ctxt)) + (list_of_pattern [] x) + | _ -> pattern1 ctxt f x + +and pattern1 ctxt (f:Format.formatter) (x:pattern) : unit = + let rec pattern_list_helper f = function + | {ppat_desc = + Ppat_construct + ({ txt = Lident("::") ;_}, + Some ({ppat_desc = Ppat_tuple([pat1; pat2]);_})); + ppat_attributes = []} + + -> + pp f "%a::%a" (simple_pattern ctxt) pat1 pattern_list_helper pat2 (*RA*) + | p -> pattern1 ctxt f p + in + if x.ppat_attributes <> [] then pattern ctxt f x + else match x.ppat_desc with + | Ppat_variant (l, Some p) -> + pp f "@[<2>`%s@;%a@]" l (simple_pattern ctxt) p + | Ppat_construct (({txt=Lident("()"|"[]");_}), _) -> simple_pattern ctxt f x + | Ppat_construct (({txt;_} as li), po) -> + (* FIXME The third field always false *) + if txt = Lident "::" then + pp f "%a" pattern_list_helper x + else + (match po with + | Some x -> pp f "%a@;%a" longident_loc li (simple_pattern ctxt) x + | None -> pp f "%a" longident_loc li) + | _ -> simple_pattern ctxt f x + +and simple_pattern ctxt (f:Format.formatter) (x:pattern) : unit = + if x.ppat_attributes <> [] then pattern ctxt f x + else match x.ppat_desc with + | Ppat_construct (({txt=Lident ("()"|"[]" as x);_}), _) -> pp f "%s" x + | Ppat_any -> pp f "_"; + | Ppat_var ({txt = txt;_}) -> protect_ident f txt + | Ppat_array l -> + pp f "@[<2>[|%a|]@]" (list (pattern1 ctxt) ~sep:";") l + | Ppat_unpack (s) -> + pp f "(module@ %s)@ " s.txt + | Ppat_type li -> + pp f "#%a" longident_loc li + | Ppat_record (l, closed) -> + let longident_x_pattern f (li, p) = + match (li,p) with + | ({txt=Lident s;_ }, + {ppat_desc=Ppat_var {txt;_}; + ppat_attributes=[]; _}) + when s = txt -> + pp f "@[<2>%a@]" longident_loc li + | _ -> + pp f "@[<2>%a@;=@;%a@]" longident_loc li (pattern1 ctxt) p + in + begin match closed with + | Closed -> + pp f "@[<2>{@;%a@;}@]" (list longident_x_pattern ~sep:";@;") l + | _ -> + pp f "@[<2>{@;%a;_}@]" (list longident_x_pattern ~sep:";@;") l + end + | Ppat_tuple l -> + pp f "@[<1>(%a)@]" (list ~sep:",@;" (pattern1 ctxt)) l (* level1*) + | Ppat_constant (c) -> pp f "%a" constant c + | Ppat_interval (c1, c2) -> pp f "%a..%a" constant c1 constant c2 + | Ppat_variant (l,None) -> pp f "`%s" l + | Ppat_constraint (p, ct) -> + pp f "@[<2>(%a@;:@;%a)@]" (pattern1 ctxt) p (core_type ctxt) ct + | Ppat_lazy p -> + pp f "@[<2>(lazy@;%a)@]" (pattern1 ctxt) p + | Ppat_exception p -> + pp f "@[<2>exception@;%a@]" (pattern1 ctxt) p + | Ppat_extension e -> extension ctxt f e + | Ppat_open (lid, p) -> + let with_paren = + match p.ppat_desc with + | Ppat_array _ | Ppat_record _ + | Ppat_construct (({txt=Lident ("()"|"[]");_}), _) -> false + | _ -> true in + pp f "@[<2>%a.%a @]" longident_loc lid + (paren with_paren @@ pattern1 ctxt) p + | _ -> paren true (pattern ctxt) f x + +and label_exp ctxt f (l,opt,p) = + match l with + | Nolabel -> + (* single case pattern parens needed here *) + pp f "%a@ " (simple_pattern ctxt) p + | Optional rest -> + begin match p with + | {ppat_desc = Ppat_var {txt;_}; ppat_attributes = []} + when txt = rest -> + (match opt with + | Some o -> pp f "?(%s=@;%a)@;" rest (expression ctxt) o + | None -> pp f "?%s@ " rest) + | _ -> + (match opt with + | Some o -> + pp f "?%s:(%a=@;%a)@;" + rest (pattern1 ctxt) p (expression ctxt) o + | None -> pp f "?%s:%a@;" rest (simple_pattern ctxt) p) + end + | Labelled l -> match p with + | {ppat_desc = Ppat_var {txt;_}; ppat_attributes = []} + when txt = l -> + pp f "~%s@;" l + | _ -> pp f "~%s:%a@;" l (simple_pattern ctxt) p + +and sugar_expr ctxt f e = + if e.pexp_attributes <> [] then false + else match e.pexp_desc with + | Pexp_apply ({ pexp_desc = Pexp_ident {txt = id; _}; + pexp_attributes=[]; _}, args) + when List.for_all (fun (lab, _) -> lab = Nolabel) args -> begin + let print_indexop a path_prefix assign left right print_index indices + rem_args = + let print_path ppf = function + | None -> () + | Some m -> pp ppf ".%a" longident m in + match assign, rem_args with + | false, [] -> + pp f "@[%a%a%s%a%s@]" + (simple_expr ctxt) a print_path path_prefix + left (list ~sep:"," print_index) indices right; true + | true, [v] -> + pp f "@[%a%a%s%a%s@ <-@;<1 2>%a@]" + (simple_expr ctxt) a print_path path_prefix + left (list ~sep:"," print_index) indices right + (simple_expr ctxt) v; true + | _ -> false in + match id, List.map snd args with + | Lident "!", [e] -> + pp f "@[!%a@]" (simple_expr ctxt) e; true + | Ldot (path, ("get"|"set" as func)), a :: other_args -> begin + let assign = func = "set" in + let print = print_indexop a None assign in + match path, other_args with + | Lident "Array", i :: rest -> + print ".(" ")" (expression ctxt) [i] rest + | Lident "String", i :: rest -> + print ".[" "]" (expression ctxt) [i] rest + | Ldot (Lident "Bigarray", "Array1"), i1 :: rest -> + print ".{" "}" (simple_expr ctxt) [i1] rest + | Ldot (Lident "Bigarray", "Array2"), i1 :: i2 :: rest -> + print ".{" "}" (simple_expr ctxt) [i1; i2] rest + | Ldot (Lident "Bigarray", "Array3"), i1 :: i2 :: i3 :: rest -> + print ".{" "}" (simple_expr ctxt) [i1; i2; i3] rest + | Ldot (Lident "Bigarray", "Genarray"), + {pexp_desc = Pexp_array indexes; pexp_attributes = []} :: rest -> + print ".{" "}" (simple_expr ctxt) indexes rest + | _ -> false + end + | (Lident s | Ldot(_,s)) , a :: i :: rest + when s.[0] = '.' -> + let n = String.length s in + (* extract operator: + assignment operators end with [right_bracket ^ "<-"], + access operators end with [right_bracket] directly + *) + let assign = s.[n - 1] = '-' in + let kind = + (* extract the right end bracket *) + if assign then s.[n - 3] else s.[n - 1] in + let left, right = match kind with + | ')' -> '(', ")" + | ']' -> '[', "]" + | '}' -> '{', "}" + | _ -> assert false in + let path_prefix = match id with + | Ldot(m,_) -> Some m + | _ -> None in + let left = String.sub s 0 (1+String.index s left) in + print_indexop a path_prefix assign left right + (expression ctxt) [i] rest + | _ -> false + end + | _ -> false + +and expression ctxt f x = + if x.pexp_attributes <> [] then + pp f "((%a)@,%a)" (expression ctxt) {x with pexp_attributes=[]} + (attributes ctxt) x.pexp_attributes + else match x.pexp_desc with + | Pexp_function _ | Pexp_fun _ | Pexp_match _ | Pexp_try _ | Pexp_sequence _ + when ctxt.pipe || ctxt.semi -> + paren true (expression reset_ctxt) f x + | Pexp_ifthenelse _ | Pexp_sequence _ when ctxt.ifthenelse -> + paren true (expression reset_ctxt) f x + | Pexp_let _ | Pexp_letmodule _ | Pexp_open _ | Pexp_letexception _ + when ctxt.semi -> + paren true (expression reset_ctxt) f x + | Pexp_fun (l, e0, p, e) -> + pp f "@[<2>fun@;%a->@;%a@]" + (label_exp ctxt) (l, e0, p) + (expression ctxt) e + | Pexp_function l -> + pp f "@[function%a@]" (case_list ctxt) l + | Pexp_match (e, l) -> + pp f "@[@[@[<2>match %a@]@ with@]%a@]" + (expression reset_ctxt) e (case_list ctxt) l + + | Pexp_try (e, l) -> + pp f "@[<0>@[try@ %a@]@ @[<0>with%a@]@]" + (* "try@;@[<2>%a@]@\nwith@\n%a"*) + (expression reset_ctxt) e (case_list ctxt) l + | Pexp_let (rf, l, e) -> + (* pp f "@[<2>let %a%a in@;<1 -2>%a@]" + (*no indentation here, a new line*) *) + (* rec_flag rf *) + pp f "@[<2>%a in@;<1 -2>%a@]" + (bindings reset_ctxt) (rf,l) + (expression ctxt) e + | Pexp_apply (e, l) -> + begin if not (sugar_expr ctxt f x) then + match view_fixity_of_exp e with + | `Infix s -> + begin match l with + | [ (Nolabel, _) as arg1; (Nolabel, _) as arg2 ] -> + (* FIXME associativity label_x_expression_param *) + pp f "@[<2>%a@;%s@;%a@]" + (label_x_expression_param reset_ctxt) arg1 s + (label_x_expression_param ctxt) arg2 + | _ -> + pp f "@[<2>%a %a@]" + (simple_expr ctxt) e + (list (label_x_expression_param ctxt)) l + end + | `Prefix s -> + let s = + if List.mem s ["~+";"~-";"~+.";"~-."] && + (match l with + (* See #7200: avoid turning (~- 1) into (- 1) which is + parsed as an int literal *) + |[(_,{pexp_desc=Pexp_constant _})] -> false + | _ -> true) + then String.sub s 1 (String.length s -1) + else s in + begin match l with + | [(Nolabel, x)] -> + pp f "@[<2>%s@;%a@]" s (simple_expr ctxt) x + | _ -> + pp f "@[<2>%a %a@]" (simple_expr ctxt) e + (list (label_x_expression_param ctxt)) l + end + | _ -> + pp f "@[%a@]" begin fun f (e,l) -> + pp f "%a@ %a" (expression2 ctxt) e + (list (label_x_expression_param reset_ctxt)) l + (* reset here only because [function,match,try,sequence] + are lower priority *) + end (e,l) + end + + | Pexp_construct (li, Some eo) + when not (is_simple_construct (view_expr x))-> (* Not efficient FIXME*) + (match view_expr x with + | `cons ls -> list (simple_expr ctxt) f ls ~sep:"@;::@;" + | `normal -> + pp f "@[<2>%a@;%a@]" longident_loc li + (simple_expr ctxt) eo + | _ -> assert false) + | Pexp_setfield (e1, li, e2) -> + pp f "@[<2>%a.%a@ <-@ %a@]" + (simple_expr ctxt) e1 longident_loc li (simple_expr ctxt) e2 + | Pexp_ifthenelse (e1, e2, eo) -> + (* @;@[<2>else@ %a@]@] *) + let fmt:(_,_,_)format ="@[@[<2>if@ %a@]@;@[<2>then@ %a@]%a@]" in + let expression_under_ifthenelse = expression (under_ifthenelse ctxt) in + pp f fmt expression_under_ifthenelse e1 expression_under_ifthenelse e2 + (fun f eo -> match eo with + | Some x -> + pp f "@;@[<2>else@;%a@]" (expression (under_semi ctxt)) x + | None -> () (* pp f "()" *)) eo + | Pexp_sequence _ -> + let rec sequence_helper acc = function + | {pexp_desc=Pexp_sequence(e1,e2); pexp_attributes = []} -> + sequence_helper (e1::acc) e2 + | v -> List.rev (v::acc) in + let lst = sequence_helper [] x in + pp f "@[%a@]" + (list (expression (under_semi ctxt)) ~sep:";@;") lst + | Pexp_new (li) -> + pp f "@[new@ %a@]" longident_loc li; + | Pexp_setinstvar (s, e) -> + pp f "@[%s@ <-@ %a@]" s.txt (expression ctxt) e + | Pexp_override l -> (* FIXME *) + let string_x_expression f (s, e) = + pp f "@[%s@ =@ %a@]" s.txt (expression ctxt) e in + pp f "@[{<%a>}@]" + (list string_x_expression ~sep:";" ) l; + | Pexp_letmodule (s, me, e) -> + pp f "@[let@ module@ %s@ =@ %a@ in@ %a@]" s.txt + (module_expr reset_ctxt) me (expression ctxt) e + | Pexp_letexception (cd, e) -> + pp f "@[let@ exception@ %a@ in@ %a@]" + (extension_constructor ctxt) cd + (expression ctxt) e + | Pexp_assert e -> + pp f "@[assert@ %a@]" (simple_expr ctxt) e + | Pexp_lazy (e) -> + pp f "@[lazy@ %a@]" (simple_expr ctxt) e + (* Pexp_poly: impossible but we should print it anyway, rather than + assert false *) + | Pexp_poly (e, None) -> + pp f "@[!poly!@ %a@]" (simple_expr ctxt) e + | Pexp_poly (e, Some ct) -> + pp f "@[(!poly!@ %a@ : %a)@]" + (simple_expr ctxt) e (core_type ctxt) ct + | Pexp_open (ovf, lid, e) -> + pp f "@[<2>let open%s %a in@;%a@]" (override ovf) longident_loc lid + (expression ctxt) e + | Pexp_variant (l,Some eo) -> + pp f "@[<2>`%s@;%a@]" l (simple_expr ctxt) eo + | Pexp_extension e -> extension ctxt f e + | Pexp_unreachable -> pp f "." + | _ -> expression1 ctxt f x + +and expression1 ctxt f x = + if x.pexp_attributes <> [] then expression ctxt f x + else match x.pexp_desc with + | Pexp_object cs -> pp f "%a" (class_structure ctxt) cs + | _ -> expression2 ctxt f x +(* used in [Pexp_apply] *) + +and expression2 ctxt f x = + if x.pexp_attributes <> [] then expression ctxt f x + else match x.pexp_desc with + | Pexp_field (e, li) -> + pp f "@[%a.%a@]" (simple_expr ctxt) e longident_loc li + | Pexp_send (e, s) -> pp f "@[%a#%s@]" (simple_expr ctxt) e s.txt + + | _ -> simple_expr ctxt f x + +and simple_expr ctxt f x = + if x.pexp_attributes <> [] then expression ctxt f x + else match x.pexp_desc with + | Pexp_construct _ when is_simple_construct (view_expr x) -> + (match view_expr x with + | `nil -> pp f "[]" + | `tuple -> pp f "()" + | `list xs -> + pp f "@[[%a]@]" + (list (expression (under_semi ctxt)) ~sep:";@;") xs + | `simple x -> longident f x + | _ -> assert false) + | Pexp_ident li -> + longident_loc f li + (* (match view_fixity_of_exp x with *) + (* |`Normal -> longident_loc f li *) + (* | `Prefix _ | `Infix _ -> pp f "( %a )" longident_loc li) *) + | Pexp_constant c -> constant f c; + | Pexp_pack me -> + pp f "(module@;%a)" (module_expr ctxt) me + | Pexp_newtype (lid, e) -> + pp f "fun@;(type@;%s)@;->@;%a" lid.txt (expression ctxt) e + | Pexp_tuple l -> + pp f "@[(%a)@]" (list (simple_expr ctxt) ~sep:",@;") l + | Pexp_constraint (e, ct) -> + pp f "(%a : %a)" (expression ctxt) e (core_type ctxt) ct + | Pexp_coerce (e, cto1, ct) -> + pp f "(%a%a :> %a)" (expression ctxt) e + (option (core_type ctxt) ~first:" : " ~last:" ") cto1 (* no sep hint*) + (core_type ctxt) ct + | Pexp_variant (l, None) -> pp f "`%s" l + | Pexp_record (l, eo) -> + let longident_x_expression f ( li, e) = + match e with + | {pexp_desc=Pexp_ident {txt;_}; + pexp_attributes=[]; _} when li.txt = txt -> + pp f "@[%a@]" longident_loc li + | _ -> + pp f "@[%a@;=@;%a@]" longident_loc li (simple_expr ctxt) e + in + pp f "@[@[{@;%a%a@]@;}@]"(* "@[{%a%a}@]" *) + (option ~last:" with@;" (simple_expr ctxt)) eo + (list longident_x_expression ~sep:";@;") l + | Pexp_array (l) -> + pp f "@[<0>@[<2>[|%a|]@]@]" + (list (simple_expr (under_semi ctxt)) ~sep:";") l + | Pexp_while (e1, e2) -> + let fmt : (_,_,_) format = "@[<2>while@;%a@;do@;%a@;done@]" in + pp f fmt (expression ctxt) e1 (expression ctxt) e2 + | Pexp_for (s, e1, e2, df, e3) -> + let fmt:(_,_,_)format = + "@[@[@[<2>for %a =@;%a@;%a%a@;do@]@;%a@]@;done@]" in + let expression = expression ctxt in + pp f fmt (pattern ctxt) s expression e1 direction_flag + df expression e2 expression e3 + | _ -> paren true (expression ctxt) f x + +and attributes ctxt f l = + List.iter (attribute ctxt f) l + +and item_attributes ctxt f l = + List.iter (item_attribute ctxt f) l + +and attribute ctxt f (s, e) = + pp f "@[<2>[@@%s@ %a]@]" s.txt (payload ctxt) e + +and item_attribute ctxt f (s, e) = + pp f "@[<2>[@@@@%s@ %a]@]" s.txt (payload ctxt) e + +and floating_attribute ctxt f (s, e) = + pp f "@[<2>[@@@@@@%s@ %a]@]" s.txt (payload ctxt) e + +and value_description ctxt f x = + (* note: value_description has an attribute field, + but they're already printed by the callers this method *) + pp f "@[%a%a@]" (core_type ctxt) x.pval_type + (fun f x -> + if x.pval_prim <> [] + then pp f "@ =@ %a" (list constant_string) x.pval_prim + ) x + +and extension ctxt f (s, e) = + pp f "@[<2>[%%%s@ %a]@]" s.txt (payload ctxt) e + +and item_extension ctxt f (s, e) = + pp f "@[<2>[%%%%%s@ %a]@]" s.txt (payload ctxt) e + +and exception_declaration ctxt f ext = + pp f "@[exception@ %a@]" (extension_constructor ctxt) ext + +and class_signature ctxt f { pcsig_self = ct; pcsig_fields = l ;_} = + let class_type_field f x = + match x.pctf_desc with + | Pctf_inherit (ct) -> + pp f "@[<2>inherit@ %a@]%a" (class_type ctxt) ct + (item_attributes ctxt) x.pctf_attributes + | Pctf_val (s, mf, vf, ct) -> + pp f "@[<2>val @ %a%a%s@ :@ %a@]%a" + mutable_flag mf virtual_flag vf s.txt (core_type ctxt) ct + (item_attributes ctxt) x.pctf_attributes + | Pctf_method (s, pf, vf, ct) -> + pp f "@[<2>method %a %a%s :@;%a@]%a" + private_flag pf virtual_flag vf s.txt (core_type ctxt) ct + (item_attributes ctxt) x.pctf_attributes + | Pctf_constraint (ct1, ct2) -> + pp f "@[<2>constraint@ %a@ =@ %a@]%a" + (core_type ctxt) ct1 (core_type ctxt) ct2 + (item_attributes ctxt) x.pctf_attributes + | Pctf_attribute a -> floating_attribute ctxt f a + | Pctf_extension e -> + item_extension ctxt f e; + item_attributes ctxt f x.pctf_attributes + in + pp f "@[@[object@[<1>%a@]@ %a@]@ end@]" + (fun f -> function + {ptyp_desc=Ptyp_any; ptyp_attributes=[]; _} -> () + | ct -> pp f " (%a)" (core_type ctxt) ct) ct + (list class_type_field ~sep:"@;") l + +(* call [class_signature] called by [class_signature] *) +and class_type ctxt f x = + match x.pcty_desc with + | Pcty_signature cs -> + class_signature ctxt f cs; + attributes ctxt f x.pcty_attributes + | Pcty_constr (li, l) -> + pp f "%a%a%a" + (fun f l -> match l with + | [] -> () + | _ -> pp f "[%a]@ " (list (core_type ctxt) ~sep:"," ) l) l + longident_loc li + (attributes ctxt) x.pcty_attributes + | Pcty_arrow (l, co, cl) -> + pp f "@[<2>%a@;->@;%a@]" (* FIXME remove parens later *) + (type_with_label ctxt) (l,co) + (class_type ctxt) cl + | Pcty_extension e -> + extension ctxt f e; + attributes ctxt f x.pcty_attributes + | Pcty_open (ovf, lid, e) -> + pp f "@[<2>let open%s %a in@;%a@]" (override ovf) longident_loc lid + (class_type ctxt) e + +(* [class type a = object end] *) +and class_type_declaration_list ctxt f l = + let class_type_declaration kwd f x = + let { pci_params=ls; pci_name={ txt; _ }; _ } = x in + pp f "@[<2>%s %a%a%s@ =@ %a@]%a" kwd + virtual_flag x.pci_virt + (class_params_def ctxt) ls txt + (class_type ctxt) x.pci_expr + (item_attributes ctxt) x.pci_attributes + in + match l with + | [] -> () + | [x] -> class_type_declaration "class type" f x + | x :: xs -> + pp f "@[%a@,%a@]" + (class_type_declaration "class type") x + (list ~sep:"@," (class_type_declaration "and")) xs + +and class_field ctxt f x = + match x.pcf_desc with + | Pcf_inherit (ovf, ce, so) -> + pp f "@[<2>inherit@ %s@ %a%a@]%a" (override ovf) + (class_expr ctxt) ce + (fun f so -> match so with + | None -> (); + | Some (s) -> pp f "@ as %s" s.txt ) so + (item_attributes ctxt) x.pcf_attributes + | Pcf_val (s, mf, Cfk_concrete (ovf, e)) -> + pp f "@[<2>val%s %a%s =@;%a@]%a" (override ovf) + mutable_flag mf s.txt + (expression ctxt) e + (item_attributes ctxt) x.pcf_attributes + | Pcf_method (s, pf, Cfk_virtual ct) -> + pp f "@[<2>method virtual %a %s :@;%a@]%a" + private_flag pf s.txt + (core_type ctxt) ct + (item_attributes ctxt) x.pcf_attributes + | Pcf_val (s, mf, Cfk_virtual ct) -> + pp f "@[<2>val virtual %a%s :@ %a@]%a" + mutable_flag mf s.txt + (core_type ctxt) ct + (item_attributes ctxt) x.pcf_attributes + | Pcf_method (s, pf, Cfk_concrete (ovf, e)) -> + let bind e = + binding ctxt f + {pvb_pat= + {ppat_desc=Ppat_var s;ppat_loc=Location.none;ppat_attributes=[]}; + pvb_expr=e; + pvb_attributes=[]; + pvb_loc=Location.none; + } + in + pp f "@[<2>method%s %a%a@]%a" + (override ovf) + private_flag pf + (fun f -> function + | {pexp_desc=Pexp_poly (e, Some ct); pexp_attributes=[]; _} -> + pp f "%s :@;%a=@;%a" + s.txt (core_type ctxt) ct (expression ctxt) e + | {pexp_desc=Pexp_poly (e, None); pexp_attributes=[]; _} -> + bind e + | _ -> bind e) e + (item_attributes ctxt) x.pcf_attributes + | Pcf_constraint (ct1, ct2) -> + pp f "@[<2>constraint %a =@;%a@]%a" + (core_type ctxt) ct1 + (core_type ctxt) ct2 + (item_attributes ctxt) x.pcf_attributes + | Pcf_initializer (e) -> + pp f "@[<2>initializer@ %a@]%a" + (expression ctxt) e + (item_attributes ctxt) x.pcf_attributes + | Pcf_attribute a -> floating_attribute ctxt f a + | Pcf_extension e -> + item_extension ctxt f e; + item_attributes ctxt f x.pcf_attributes + +and class_structure ctxt f { pcstr_self = p; pcstr_fields = l } = + pp f "@[@[object%a@;%a@]@;end@]" + (fun f p -> match p.ppat_desc with + | Ppat_any -> () + | Ppat_constraint _ -> pp f " %a" (pattern ctxt) p + | _ -> pp f " (%a)" (pattern ctxt) p) p + (list (class_field ctxt)) l + +and class_expr ctxt f x = + if x.pcl_attributes <> [] then begin + pp f "((%a)%a)" (class_expr ctxt) {x with pcl_attributes=[]} + (attributes ctxt) x.pcl_attributes + end else + match x.pcl_desc with + | Pcl_structure (cs) -> class_structure ctxt f cs + | Pcl_fun (l, eo, p, e) -> + pp f "fun@ %a@ ->@ %a" + (label_exp ctxt) (l,eo,p) + (class_expr ctxt) e + | Pcl_let (rf, l, ce) -> + pp f "%a@ in@ %a" + (bindings ctxt) (rf,l) + (class_expr ctxt) ce + | Pcl_apply (ce, l) -> + pp f "((%a)@ %a)" (* Cf: #7200 *) + (class_expr ctxt) ce + (list (label_x_expression_param ctxt)) l + | Pcl_constr (li, l) -> + pp f "%a%a" + (fun f l-> if l <>[] then + pp f "[%a]@ " + (list (core_type ctxt) ~sep:",") l) l + longident_loc li + | Pcl_constraint (ce, ct) -> + pp f "(%a@ :@ %a)" + (class_expr ctxt) ce + (class_type ctxt) ct + | Pcl_extension e -> extension ctxt f e + | Pcl_open (ovf, lid, e) -> + pp f "@[<2>let open%s %a in@;%a@]" (override ovf) longident_loc lid + (class_expr ctxt) e + +and module_type ctxt f x = + if x.pmty_attributes <> [] then begin + pp f "((%a)%a)" (module_type ctxt) {x with pmty_attributes=[]} + (attributes ctxt) x.pmty_attributes + end else + match x.pmty_desc with + | Pmty_ident li -> + pp f "%a" longident_loc li; + | Pmty_alias li -> + pp f "(module %a)" longident_loc li; + | Pmty_signature (s) -> + pp f "@[@[sig@ %a@]@ end@]" (* "@[sig@ %a@ end@]" *) + (list (signature_item ctxt)) s (* FIXME wrong indentation*) + | Pmty_functor (_, None, mt2) -> + pp f "@[functor () ->@ %a@]" (module_type ctxt) mt2 + | Pmty_functor (s, Some mt1, mt2) -> + if s.txt = "_" then + pp f "@[%a@ ->@ %a@]" + (module_type ctxt) mt1 (module_type ctxt) mt2 + else + pp f "@[functor@ (%s@ :@ %a)@ ->@ %a@]" s.txt + (module_type ctxt) mt1 (module_type ctxt) mt2 + | Pmty_with (mt, l) -> + let with_constraint f = function + | Pwith_type (li, ({ptype_params= ls ;_} as td)) -> + let ls = List.map fst ls in + pp f "type@ %a %a =@ %a" + (list (core_type ctxt) ~sep:"," ~first:"(" ~last:")") + ls longident_loc li (type_declaration ctxt) td + | Pwith_module (li, li2) -> + pp f "module %a =@ %a" longident_loc li longident_loc li2; + | Pwith_typesubst (li, ({ptype_params=ls;_} as td)) -> + let ls = List.map fst ls in + pp f "type@ %a %a :=@ %a" + (list (core_type ctxt) ~sep:"," ~first:"(" ~last:")") + ls longident_loc li + (type_declaration ctxt) td + | Pwith_modsubst (li, li2) -> + pp f "module %a :=@ %a" longident_loc li longident_loc li2 in + (match l with + | [] -> pp f "@[%a@]" (module_type ctxt) mt + | _ -> pp f "@[(%a@ with@ %a)@]" + (module_type ctxt) mt (list with_constraint ~sep:"@ and@ ") l) + | Pmty_typeof me -> + pp f "@[module@ type@ of@ %a@]" (module_expr ctxt) me + | Pmty_extension e -> extension ctxt f e + +and signature ctxt f x = list ~sep:"@\n" (signature_item ctxt) f x + +and signature_item ctxt f x : unit = + match x.psig_desc with + | Psig_type (rf, l) -> + type_def_list ctxt f (rf, l) + | Psig_value vd -> + let intro = if vd.pval_prim = [] then "val" else "external" in + pp f "@[<2>%s@ %a@ :@ %a@]%a" intro + protect_ident vd.pval_name.txt + (value_description ctxt) vd + (item_attributes ctxt) vd.pval_attributes + | Psig_typext te -> + type_extension ctxt f te + | Psig_exception ed -> + exception_declaration ctxt f ed + | Psig_class l -> + let class_description kwd f ({pci_params=ls;pci_name={txt;_};_} as x) = + pp f "@[<2>%s %a%a%s@;:@;%a@]%a" kwd + virtual_flag x.pci_virt + (class_params_def ctxt) ls txt + (class_type ctxt) x.pci_expr + (item_attributes ctxt) x.pci_attributes + in begin + match l with + | [] -> () + | [x] -> class_description "class" f x + | x :: xs -> + pp f "@[%a@,%a@]" + (class_description "class") x + (list ~sep:"@," (class_description "and")) xs + end + | Psig_module ({pmd_type={pmty_desc=Pmty_alias alias; + pmty_attributes=[]; _};_} as pmd) -> + pp f "@[module@ %s@ =@ %a@]%a" pmd.pmd_name.txt + longident_loc alias + (item_attributes ctxt) pmd.pmd_attributes + | Psig_module pmd -> + pp f "@[module@ %s@ :@ %a@]%a" + pmd.pmd_name.txt + (module_type ctxt) pmd.pmd_type + (item_attributes ctxt) pmd.pmd_attributes + | Psig_open od -> + pp f "@[open%s@ %a@]%a" + (override od.popen_override) + longident_loc od.popen_lid + (item_attributes ctxt) od.popen_attributes + | Psig_include incl -> + pp f "@[include@ %a@]%a" + (module_type ctxt) incl.pincl_mod + (item_attributes ctxt) incl.pincl_attributes + | Psig_modtype {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} -> + pp f "@[module@ type@ %s%a@]%a" + s.txt + (fun f md -> match md with + | None -> () + | Some mt -> + pp_print_space f () ; + pp f "@ =@ %a" (module_type ctxt) mt + ) md + (item_attributes ctxt) attrs + | Psig_class_type (l) -> class_type_declaration_list ctxt f l + | Psig_recmodule decls -> + let rec string_x_module_type_list f ?(first=true) l = + match l with + | [] -> () ; + | pmd :: tl -> + if not first then + pp f "@ @[and@ %s:@ %a@]%a" pmd.pmd_name.txt + (module_type ctxt) pmd.pmd_type + (item_attributes ctxt) pmd.pmd_attributes + else + pp f "@[module@ rec@ %s:@ %a@]%a" pmd.pmd_name.txt + (module_type ctxt) pmd.pmd_type + (item_attributes ctxt) pmd.pmd_attributes; + string_x_module_type_list f ~first:false tl + in + string_x_module_type_list f decls + | Psig_attribute a -> floating_attribute ctxt f a + | Psig_extension(e, a) -> + item_extension ctxt f e; + item_attributes ctxt f a + +and module_expr ctxt f x = + if x.pmod_attributes <> [] then + pp f "((%a)%a)" (module_expr ctxt) {x with pmod_attributes=[]} + (attributes ctxt) x.pmod_attributes + else match x.pmod_desc with + | Pmod_structure (s) -> + pp f "@[struct@;@[<0>%a@]@;<1 -2>end@]" + (list (structure_item ctxt) ~sep:"@\n") s; + | Pmod_constraint (me, mt) -> + pp f "@[(%a@ :@ %a)@]" + (module_expr ctxt) me + (module_type ctxt) mt + | Pmod_ident (li) -> + pp f "%a" longident_loc li; + | Pmod_functor (_, None, me) -> + pp f "functor ()@;->@;%a" (module_expr ctxt) me + | Pmod_functor (s, Some mt, me) -> + pp f "functor@ (%s@ :@ %a)@;->@;%a" + s.txt (module_type ctxt) mt (module_expr ctxt) me + | Pmod_apply (me1, me2) -> + pp f "(%a)(%a)" (module_expr ctxt) me1 (module_expr ctxt) me2 + (* Cf: #7200 *) + | Pmod_unpack e -> + pp f "(val@ %a)" (expression ctxt) e + | Pmod_extension e -> extension ctxt f e + +and structure ctxt f x = list ~sep:"@\n" (structure_item ctxt) f x + +and payload ctxt f = function + | PStr [{pstr_desc = Pstr_eval (e, attrs)}] -> + pp f "@[<2>%a@]%a" + (expression ctxt) e + (item_attributes ctxt) attrs + | PStr x -> structure ctxt f x + | PTyp x -> pp f ":"; core_type ctxt f x + | PSig x -> pp f ":"; signature ctxt f x + | PPat (x, None) -> pp f "?"; pattern ctxt f x + | PPat (x, Some e) -> + pp f "?"; pattern ctxt f x; + pp f " when "; expression ctxt f e + +(* transform [f = fun g h -> ..] to [f g h = ... ] could be improved *) +and binding ctxt f {pvb_pat=p; pvb_expr=x; _} = + (* .pvb_attributes have already been printed by the caller, #bindings *) + let rec pp_print_pexp_function f x = + if x.pexp_attributes <> [] then pp f "=@;%a" (expression ctxt) x + else match x.pexp_desc with + | Pexp_fun (label, eo, p, e) -> + if label=Nolabel then + pp f "%a@ %a" (simple_pattern ctxt) p pp_print_pexp_function e + else + pp f "%a@ %a" + (label_exp ctxt) (label,eo,p) pp_print_pexp_function e + | Pexp_newtype (str,e) -> + pp f "(type@ %s)@ %a" str.txt pp_print_pexp_function e + | _ -> pp f "=@;%a" (expression ctxt) x + in + let tyvars_str tyvars = List.map (fun v -> v.txt) tyvars in + let is_desugared_gadt p e = + let gadt_pattern = + match p with + | {ppat_desc=Ppat_constraint({ppat_desc=Ppat_var _} as pat, + {ptyp_desc=Ptyp_poly (args_tyvars, rt)}); + ppat_attributes=[]}-> + Some (pat, args_tyvars, rt) + | _ -> None in + let rec gadt_exp tyvars e = + match e with + | {pexp_desc=Pexp_newtype (tyvar, e); pexp_attributes=[]} -> + gadt_exp (tyvar :: tyvars) e + | {pexp_desc=Pexp_constraint (e, ct); pexp_attributes=[]} -> + Some (List.rev tyvars, e, ct) + | _ -> None in + let gadt_exp = gadt_exp [] e in + match gadt_pattern, gadt_exp with + | Some (p, pt_tyvars, pt_ct), Some (e_tyvars, e, e_ct) + when tyvars_str pt_tyvars = tyvars_str e_tyvars -> + let ety = Typ.varify_constructors e_tyvars e_ct in + if ety = pt_ct then + Some (p, pt_tyvars, e_ct, e) else None + | _ -> None in + if x.pexp_attributes <> [] + then pp f "%a@;=@;%a" (pattern ctxt) p (expression ctxt) x else + match is_desugared_gadt p x with + | Some (p, [], ct, e) -> + pp f "%a@;: %a@;=@;%a" + (simple_pattern ctxt) p (core_type ctxt) ct (expression ctxt) e + | Some (p, tyvars, ct, e) -> begin + pp f "%a@;: type@;%a.@;%a@;=@;%a" + (simple_pattern ctxt) p (list pp_print_string ~sep:"@;") + (tyvars_str tyvars) (core_type ctxt) ct (expression ctxt) e + end + | None -> begin + match p with + | {ppat_desc=Ppat_constraint(p ,ty); + ppat_attributes=[]} -> (* special case for the first*) + begin match ty with + | {ptyp_desc=Ptyp_poly _; ptyp_attributes=[]} -> + pp f "%a@;:@;%a@;=@;%a" (simple_pattern ctxt) p + (core_type ctxt) ty (expression ctxt) x + | _ -> + pp f "(%a@;:@;%a)@;=@;%a" (simple_pattern ctxt) p + (core_type ctxt) ty (expression ctxt) x + end + | {ppat_desc=Ppat_var _; ppat_attributes=[]} -> + pp f "%a@ %a" (simple_pattern ctxt) p pp_print_pexp_function x + | _ -> + pp f "%a@;=@;%a" (pattern ctxt) p (expression ctxt) x + end + +(* [in] is not printed *) +and bindings ctxt f (rf,l) = + let binding kwd rf f x = + pp f "@[<2>%s %a%a@]%a" kwd rec_flag rf + (binding ctxt) x (item_attributes ctxt) x.pvb_attributes + in + match l with + | [] -> () + | [x] -> binding "let" rf f x + | x::xs -> + pp f "@[%a@,%a@]" + (binding "let" rf) x + (list ~sep:"@," (binding "and" Nonrecursive)) xs + +and structure_item ctxt f x = + match x.pstr_desc with + | Pstr_eval (e, attrs) -> + pp f "@[;;%a@]%a" + (expression ctxt) e + (item_attributes ctxt) attrs + | Pstr_type (_, []) -> assert false + | Pstr_type (rf, l) -> type_def_list ctxt f (rf, l) + | Pstr_value (rf, l) -> + (* pp f "@[let %a%a@]" rec_flag rf bindings l *) + pp f "@[<2>%a@]" (bindings ctxt) (rf,l) + | Pstr_typext te -> type_extension ctxt f te + | Pstr_exception ed -> exception_declaration ctxt f ed + | Pstr_module x -> + let rec module_helper = function + | {pmod_desc=Pmod_functor(s,mt,me'); pmod_attributes = []} -> + if mt = None then pp f "()" + else Misc.may (pp f "(%s:%a)" s.txt (module_type ctxt)) mt; + module_helper me' + | me -> me + in + pp f "@[module %s%a@]%a" + x.pmb_name.txt + (fun f me -> + let me = module_helper me in + match me with + | {pmod_desc= + Pmod_constraint + (me', + ({pmty_desc=(Pmty_ident (_) + | Pmty_signature (_));_} as mt)); + pmod_attributes = []} -> + pp f " :@;%a@;=@;%a@;" + (module_type ctxt) mt (module_expr ctxt) me' + | _ -> pp f " =@ %a" (module_expr ctxt) me + ) x.pmb_expr + (item_attributes ctxt) x.pmb_attributes + | Pstr_open od -> + pp f "@[<2>open%s@;%a@]%a" + (override od.popen_override) + longident_loc od.popen_lid + (item_attributes ctxt) od.popen_attributes + | Pstr_modtype {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} -> + pp f "@[module@ type@ %s%a@]%a" + s.txt + (fun f md -> match md with + | None -> () + | Some mt -> + pp_print_space f () ; + pp f "@ =@ %a" (module_type ctxt) mt + ) md + (item_attributes ctxt) attrs + | Pstr_class l -> + let extract_class_args cl = + let rec loop acc = function + | {pcl_desc=Pcl_fun (l, eo, p, cl'); pcl_attributes = []} -> + loop ((l,eo,p) :: acc) cl' + | cl -> List.rev acc, cl + in + let args, cl = loop [] cl in + let constr, cl = + match cl with + | {pcl_desc=Pcl_constraint (cl', ct); pcl_attributes = []} -> + Some ct, cl' + | _ -> None, cl + in + args, constr, cl + in + let class_constraint f ct = pp f ": @[%a@] " (class_type ctxt) ct in + let class_declaration kwd f + ({pci_params=ls; pci_name={txt;_}; _} as x) = + let args, constr, cl = extract_class_args x.pci_expr in + pp f "@[<2>%s %a%a%s %a%a=@;%a@]%a" kwd + virtual_flag x.pci_virt + (class_params_def ctxt) ls txt + (list (label_exp ctxt)) args + (option class_constraint) constr + (class_expr ctxt) cl + (item_attributes ctxt) x.pci_attributes + in begin + match l with + | [] -> () + | [x] -> class_declaration "class" f x + | x :: xs -> + pp f "@[%a@,%a@]" + (class_declaration "class") x + (list ~sep:"@," (class_declaration "and")) xs + end + | Pstr_class_type l -> class_type_declaration_list ctxt f l + | Pstr_primitive vd -> + pp f "@[external@ %a@ :@ %a@]%a" + protect_ident vd.pval_name.txt + (value_description ctxt) vd + (item_attributes ctxt) vd.pval_attributes + | Pstr_include incl -> + pp f "@[include@ %a@]%a" + (module_expr ctxt) incl.pincl_mod + (item_attributes ctxt) incl.pincl_attributes + | Pstr_recmodule decls -> (* 3.07 *) + let aux f = function + | ({pmb_expr={pmod_desc=Pmod_constraint (expr, typ)}} as pmb) -> + pp f "@[@ and@ %s:%a@ =@ %a@]%a" pmb.pmb_name.txt + (module_type ctxt) typ + (module_expr ctxt) expr + (item_attributes ctxt) pmb.pmb_attributes + | _ -> assert false + in + begin match decls with + | ({pmb_expr={pmod_desc=Pmod_constraint (expr, typ)}} as pmb) :: l2 -> + pp f "@[@[module@ rec@ %s:%a@ =@ %a@]%a@ %a@]" + pmb.pmb_name.txt + (module_type ctxt) typ + (module_expr ctxt) expr + (item_attributes ctxt) pmb.pmb_attributes + (fun f l2 -> List.iter (aux f) l2) l2 + | _ -> assert false + end + | Pstr_attribute a -> floating_attribute ctxt f a + | Pstr_extension(e, a) -> + item_extension ctxt f e; + item_attributes ctxt f a + +and type_param ctxt f (ct, a) = + pp f "%s%a" (type_variance a) (core_type ctxt) ct + +and type_params ctxt f = function + | [] -> () + | l -> pp f "%a " (list (type_param ctxt) ~first:"(" ~last:")" ~sep:",@;") l + +and type_def_list ctxt f (rf, l) = + let type_decl kwd rf f x = + let eq = + if (x.ptype_kind = Ptype_abstract) + && (x.ptype_manifest = None) then "" + else " =" + in + pp f "@[<2>%s %a%a%s%s%a@]%a" kwd + nonrec_flag rf + (type_params ctxt) x.ptype_params + x.ptype_name.txt eq + (type_declaration ctxt) x + (item_attributes ctxt) x.ptype_attributes + in + match l with + | [] -> assert false + | [x] -> type_decl "type" rf f x + | x :: xs -> pp f "@[%a@,%a@]" + (type_decl "type" rf) x + (list ~sep:"@," (type_decl "and" Recursive)) xs + +and record_declaration ctxt f lbls = + let type_record_field f pld = + pp f "@[<2>%a%s:@;%a@;%a@]" + mutable_flag pld.pld_mutable + pld.pld_name.txt + (core_type ctxt) pld.pld_type + (attributes ctxt) pld.pld_attributes + in + pp f "{@\n%a}" + (list type_record_field ~sep:";@\n" ) lbls + +and type_declaration ctxt f x = + (* type_declaration has an attribute field, + but it's been printed by the caller of this method *) + let priv f = + match x.ptype_private with + | Public -> () + | Private -> pp f "@;private" + in + let manifest f = + match x.ptype_manifest with + | None -> () + | Some y -> + if x.ptype_kind = Ptype_abstract then + pp f "%t@;%a" priv (core_type ctxt) y + else + pp f "@;%a" (core_type ctxt) y + in + let constructor_declaration f pcd = + pp f "|@;"; + constructor_declaration ctxt f + (pcd.pcd_name.txt, pcd.pcd_args, pcd.pcd_res, pcd.pcd_attributes) + in + let repr f = + let intro f = + if x.ptype_manifest = None then () + else pp f "@;=" + in + match x.ptype_kind with + | Ptype_variant xs -> + pp f "%t%t@\n%a" intro priv + (list ~sep:"@\n" constructor_declaration) xs + | Ptype_abstract -> () + | Ptype_record l -> + pp f "%t%t@;%a" intro priv (record_declaration ctxt) l + | Ptype_open -> pp f "%t%t@;.." intro priv + in + let constraints f = + List.iter + (fun (ct1,ct2,_) -> + pp f "@[@ constraint@ %a@ =@ %a@]" + (core_type ctxt) ct1 (core_type ctxt) ct2) + x.ptype_cstrs + in + pp f "%t%t%t" manifest repr constraints + +and type_extension ctxt f x = + let extension_constructor f x = + pp f "@\n|@;%a" (extension_constructor ctxt) x + in + pp f "@[<2>type %a%a += %a@ %a@]%a" + (fun f -> function + | [] -> () + | l -> + pp f "%a@;" (list (type_param ctxt) ~first:"(" ~last:")" ~sep:",") l) + x.ptyext_params + longident_loc x.ptyext_path + private_flag x.ptyext_private (* Cf: #7200 *) + (list ~sep:"" extension_constructor) + x.ptyext_constructors + (item_attributes ctxt) x.ptyext_attributes + +and constructor_declaration ctxt f (name, args, res, attrs) = + let name = + match name with + | "::" -> "(::)" + | s -> s in + match res with + | None -> + pp f "%s%a@;%a" name + (fun f -> function + | Pcstr_tuple [] -> () + | Pcstr_tuple l -> + pp f "@;of@;%a" (list (core_type1 ctxt) ~sep:"@;*@;") l + | Pcstr_record l -> pp f "@;of@;%a" (record_declaration ctxt) l + ) args + (attributes ctxt) attrs + | Some r -> + pp f "%s:@;%a@;%a" name + (fun f -> function + | Pcstr_tuple [] -> core_type1 ctxt f r + | Pcstr_tuple l -> pp f "%a@;->@;%a" + (list (core_type1 ctxt) ~sep:"@;*@;") l + (core_type1 ctxt) r + | Pcstr_record l -> + pp f "%a@;->@;%a" (record_declaration ctxt) l (core_type1 ctxt) r + ) + args + (attributes ctxt) attrs + +and extension_constructor ctxt f x = + (* Cf: #7200 *) + match x.pext_kind with + | Pext_decl(l, r) -> + constructor_declaration ctxt f (x.pext_name.txt, l, r, x.pext_attributes) + | Pext_rebind li -> + pp f "%s%a@;=@;%a" x.pext_name.txt + (attributes ctxt) x.pext_attributes + longident_loc li + +and case_list ctxt f l : unit = + let aux f {pc_lhs; pc_guard; pc_rhs} = + pp f "@;| @[<2>%a%a@;->@;%a@]" + (pattern ctxt) pc_lhs (option (expression ctxt) ~first:"@;when@;") + pc_guard (expression (under_pipe ctxt)) pc_rhs + in + list aux f l ~sep:"" + +and label_x_expression_param ctxt f (l,e) = + let simple_name = match e with + | {pexp_desc=Pexp_ident {txt=Lident l;_}; + pexp_attributes=[]} -> Some l + | _ -> None + in match l with + | Nolabel -> expression2 ctxt f e (* level 2*) + | Optional str -> + if Some str = simple_name then + pp f "?%s" str + else + pp f "?%s:%a" str (simple_expr ctxt) e + | Labelled lbl -> + if Some lbl = simple_name then + pp f "~%s" lbl + else + pp f "~%s:%a" lbl (simple_expr ctxt) e + +and directive_argument f x = + match x with + | Pdir_none -> () + | Pdir_string (s) -> pp f "@ %S" s + | Pdir_int (n, None) -> pp f "@ %s" n + | Pdir_int (n, Some m) -> pp f "@ %s%c" n m + | Pdir_ident (li) -> pp f "@ %a" longident li + | Pdir_bool (b) -> pp f "@ %s" (string_of_bool b) + +let toplevel_phrase f x = + match x with + | Ptop_def (s) ->pp f "@[%a@]" (list (structure_item reset_ctxt)) s + (* pp_open_hvbox f 0; *) + (* pp_print_list structure_item f s ; *) + (* pp_close_box f (); *) + | Ptop_dir (s, da) -> + pp f "@[#%s@ %a@]" s directive_argument da + (* pp f "@[#%s@ %a@]" s directive_argument da *) + +let expression f x = + pp f "@[%a@]" (expression reset_ctxt) x + +let string_of_expression x = + ignore (flush_str_formatter ()) ; + let f = str_formatter in + expression f x; + flush_str_formatter () + +let string_of_structure x = + ignore (flush_str_formatter ()); + let f = str_formatter in + structure reset_ctxt f x; + flush_str_formatter () + +let top_phrase f x = + pp_print_newline f (); + toplevel_phrase f x; + pp f ";;"; + pp_print_newline f () + +let core_type = core_type reset_ctxt +let pattern = pattern reset_ctxt +let signature = signature reset_ctxt +let structure = structure reset_ctxt diff --git a/res_syntax/compiler-libs-406/pprintast.mli b/res_syntax/compiler-libs-406/pprintast.mli new file mode 100644 index 0000000000..74779dc90d --- /dev/null +++ b/res_syntax/compiler-libs-406/pprintast.mli @@ -0,0 +1,27 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Hongbo Zhang (University of Pennsylvania) *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type space_formatter = (unit, Format.formatter, unit) format + +val toplevel_phrase : Format.formatter -> Parsetree.toplevel_phrase -> unit +val expression : Format.formatter -> Parsetree.expression -> unit +val string_of_expression : Parsetree.expression -> string +val top_phrase: Format.formatter -> Parsetree.toplevel_phrase -> unit +val core_type: Format.formatter -> Parsetree.core_type -> unit +val pattern: Format.formatter -> Parsetree.pattern -> unit +val signature: Format.formatter -> Parsetree.signature -> unit +val structure: Format.formatter -> Parsetree.structure -> unit +val string_of_structure: Parsetree.structure -> string +val string_of_int_as_char: int -> string diff --git a/res_syntax/compiler-libs-406/predef.ml b/res_syntax/compiler-libs-406/predef.ml new file mode 100644 index 0000000000..e00df7febd --- /dev/null +++ b/res_syntax/compiler-libs-406/predef.ml @@ -0,0 +1,258 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Predefined type constructors (with special typing rules in typecore) *) + +open Path +open Types +open Btype + +let builtin_idents = ref [] + +let wrap create s = + let id = create s in + builtin_idents := (s, id) :: !builtin_idents; + id + +let ident_create = wrap Ident.create +let ident_create_predef_exn = wrap Ident.create_predef_exn + +let ident_int = ident_create "int" +and ident_char = ident_create "char" +and ident_bytes = ident_create "bytes" +and ident_float = ident_create "float" +and ident_bool = ident_create "bool" +and ident_unit = ident_create "unit" +and ident_exn = ident_create "exn" +and ident_array = ident_create "array" +and ident_list = ident_create "list" +and ident_option = ident_create "option" +and ident_nativeint = ident_create "nativeint" +and ident_int32 = ident_create "int32" +and ident_int64 = ident_create "int64" +and ident_lazy_t = ident_create "lazy_t" +and ident_string = ident_create "string" +and ident_extension_constructor = ident_create "extension_constructor" +and ident_floatarray = ident_create "floatarray" + +let path_int = Pident ident_int +and path_char = Pident ident_char +and path_bytes = Pident ident_bytes +and path_float = Pident ident_float +and path_bool = Pident ident_bool +and path_unit = Pident ident_unit +and path_exn = Pident ident_exn +and path_array = Pident ident_array +and path_list = Pident ident_list +and path_option = Pident ident_option +and path_nativeint = Pident ident_nativeint +and path_int32 = Pident ident_int32 +and path_int64 = Pident ident_int64 +and path_lazy_t = Pident ident_lazy_t +and path_string = Pident ident_string +and path_extension_constructor = Pident ident_extension_constructor +and path_floatarray = Pident ident_floatarray + +let type_int = newgenty (Tconstr(path_int, [], ref Mnil)) +and type_char = newgenty (Tconstr(path_char, [], ref Mnil)) +and type_bytes = newgenty (Tconstr(path_bytes, [], ref Mnil)) +and type_float = newgenty (Tconstr(path_float, [], ref Mnil)) +and type_bool = newgenty (Tconstr(path_bool, [], ref Mnil)) +and type_unit = newgenty (Tconstr(path_unit, [], ref Mnil)) +and type_exn = newgenty (Tconstr(path_exn, [], ref Mnil)) +and type_array t = newgenty (Tconstr(path_array, [t], ref Mnil)) +and type_list t = newgenty (Tconstr(path_list, [t], ref Mnil)) +and type_option t = newgenty (Tconstr(path_option, [t], ref Mnil)) +and type_nativeint = newgenty (Tconstr(path_nativeint, [], ref Mnil)) +and type_int32 = newgenty (Tconstr(path_int32, [], ref Mnil)) +and type_int64 = newgenty (Tconstr(path_int64, [], ref Mnil)) +and type_lazy_t t = newgenty (Tconstr(path_lazy_t, [t], ref Mnil)) +and type_string = newgenty (Tconstr(path_string, [], ref Mnil)) +and type_extension_constructor = + newgenty (Tconstr(path_extension_constructor, [], ref Mnil)) +and type_floatarray = newgenty (Tconstr(path_floatarray, [], ref Mnil)) + +let ident_match_failure = ident_create_predef_exn "Match_failure" +and ident_out_of_memory = ident_create_predef_exn "Out_of_memory" +and ident_invalid_argument = ident_create_predef_exn "Invalid_argument" +and ident_failure = ident_create_predef_exn "Failure" +and ident_not_found = ident_create_predef_exn "Not_found" +and ident_sys_error = ident_create_predef_exn "Sys_error" +and ident_end_of_file = ident_create_predef_exn "End_of_file" +and ident_division_by_zero = ident_create_predef_exn "Division_by_zero" +and ident_stack_overflow = ident_create_predef_exn "Stack_overflow" +and ident_sys_blocked_io = ident_create_predef_exn "Sys_blocked_io" +and ident_assert_failure = ident_create_predef_exn "Assert_failure" +and ident_undefined_recursive_module = + ident_create_predef_exn "Undefined_recursive_module" + +let all_predef_exns = [ + ident_match_failure; + ident_out_of_memory; + ident_invalid_argument; + ident_failure; + ident_not_found; + ident_sys_error; + ident_end_of_file; + ident_division_by_zero; + ident_stack_overflow; + ident_sys_blocked_io; + ident_assert_failure; + ident_undefined_recursive_module; +] + +let path_match_failure = Pident ident_match_failure +and path_assert_failure = Pident ident_assert_failure +and path_undefined_recursive_module = Pident ident_undefined_recursive_module + +let decl_abstr = + {type_params = []; + type_arity = 0; + type_kind = Type_abstract; + type_loc = Location.none; + type_private = Asttypes.Public; + type_manifest = None; + type_variance = []; + type_newtype_level = None; + type_attributes = []; + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } + +let decl_abstr_imm = {decl_abstr with type_immediate = true} + +let cstr id args = + { + cd_id = id; + cd_args = Cstr_tuple args; + cd_res = None; + cd_loc = Location.none; + cd_attributes = []; + } + +let ident_false = ident_create "false" +and ident_true = ident_create "true" +and ident_void = ident_create "()" +and ident_nil = ident_create "[]" +and ident_cons = ident_create "::" +and ident_none = ident_create "None" +and ident_some = ident_create "Some" +let common_initial_env add_type add_extension empty_env = + let decl_bool = + {decl_abstr with + type_kind = Type_variant([cstr ident_false []; cstr ident_true []]); + type_immediate = true} + and decl_unit = + {decl_abstr with + type_kind = Type_variant([cstr ident_void []]); + type_immediate = true} + and decl_exn = + {decl_abstr with + type_kind = Type_open} + and decl_array = + let tvar = newgenvar() in + {decl_abstr with + type_params = [tvar]; + type_arity = 1; + type_variance = [Variance.full]} + and decl_list = + let tvar = newgenvar() in + {decl_abstr with + type_params = [tvar]; + type_arity = 1; + type_kind = + Type_variant([cstr ident_nil []; cstr ident_cons [tvar; type_list tvar]]); + type_variance = [Variance.covariant]} + and decl_option = + let tvar = newgenvar() in + {decl_abstr with + type_params = [tvar]; + type_arity = 1; + type_kind = Type_variant([cstr ident_none []; cstr ident_some [tvar]]); + type_variance = [Variance.covariant]} + and decl_lazy_t = + let tvar = newgenvar() in + {decl_abstr with + type_params = [tvar]; + type_arity = 1; + type_variance = [Variance.covariant]} + in + + let add_extension id l = + add_extension id + { ext_type_path = path_exn; + ext_type_params = []; + ext_args = Cstr_tuple l; + ext_ret_type = None; + ext_private = Asttypes.Public; + ext_loc = Location.none; + ext_attributes = [{Asttypes.txt="ocaml.warn_on_literal_pattern"; + loc=Location.none}, + Parsetree.PStr[]] } + in + add_extension ident_match_failure + [newgenty (Ttuple[type_string; type_int; type_int])] ( + add_extension ident_out_of_memory [] ( + add_extension ident_stack_overflow [] ( + add_extension ident_invalid_argument [type_string] ( + add_extension ident_failure [type_string] ( + add_extension ident_not_found [] ( + add_extension ident_sys_blocked_io [] ( + add_extension ident_sys_error [type_string] ( + add_extension ident_end_of_file [] ( + add_extension ident_division_by_zero [] ( + add_extension ident_assert_failure + [newgenty (Ttuple[type_string; type_int; type_int])] ( + add_extension ident_undefined_recursive_module + [newgenty (Ttuple[type_string; type_int; type_int])] ( + add_type ident_int64 decl_abstr ( + add_type ident_int32 decl_abstr ( + add_type ident_nativeint decl_abstr ( + add_type ident_lazy_t decl_lazy_t ( + add_type ident_option decl_option ( + add_type ident_list decl_list ( + add_type ident_array decl_array ( + add_type ident_exn decl_exn ( + add_type ident_unit decl_unit ( + add_type ident_bool decl_bool ( + add_type ident_float decl_abstr ( + add_type ident_string decl_abstr ( + add_type ident_char decl_abstr_imm ( + add_type ident_int decl_abstr_imm ( + add_type ident_extension_constructor decl_abstr ( + add_type ident_floatarray decl_abstr ( + empty_env)))))))))))))))))))))))))))) + +let build_initial_env add_type add_exception empty_env = + let common = common_initial_env add_type add_exception empty_env in + let safe_string = add_type ident_bytes decl_abstr common in + let decl_bytes_unsafe = {decl_abstr with type_manifest = Some type_string} in + let unsafe_string = add_type ident_bytes decl_bytes_unsafe common in + (safe_string, unsafe_string) + +let builtin_values = + List.map (fun id -> Ident.make_global id; (Ident.name id, id)) + [ident_match_failure; ident_out_of_memory; ident_stack_overflow; + ident_invalid_argument; + ident_failure; ident_not_found; ident_sys_error; ident_end_of_file; + ident_division_by_zero; ident_sys_blocked_io; + ident_assert_failure; ident_undefined_recursive_module ] + +(* Start non-predef identifiers at 1000. This way, more predefs can + be defined in this file (above!) without breaking .cmi + compatibility. *) + +let _ = Ident.set_current_time 999 +let builtin_idents = List.rev !builtin_idents diff --git a/res_syntax/compiler-libs-406/predef.mli b/res_syntax/compiler-libs-406/predef.mli new file mode 100644 index 0000000000..878dc6eb9f --- /dev/null +++ b/res_syntax/compiler-libs-406/predef.mli @@ -0,0 +1,79 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Predefined type constructors (with special typing rules in typecore) *) + +open Types + +val type_int: type_expr +val type_char: type_expr +val type_string: type_expr +val type_bytes: type_expr +val type_float: type_expr +val type_bool: type_expr +val type_unit: type_expr +val type_exn: type_expr +val type_array: type_expr -> type_expr +val type_list: type_expr -> type_expr +val type_option: type_expr -> type_expr +val type_nativeint: type_expr +val type_int32: type_expr +val type_int64: type_expr +val type_lazy_t: type_expr -> type_expr +val type_extension_constructor:type_expr +val type_floatarray:type_expr + +val path_int: Path.t +val path_char: Path.t +val path_string: Path.t +val path_bytes: Path.t +val path_float: Path.t +val path_bool: Path.t +val path_unit: Path.t +val path_exn: Path.t +val path_array: Path.t +val path_list: Path.t +val path_option: Path.t +val path_nativeint: Path.t +val path_int32: Path.t +val path_int64: Path.t +val path_lazy_t: Path.t +val path_extension_constructor: Path.t +val path_floatarray: Path.t + +val path_match_failure: Path.t +val path_assert_failure : Path.t +val path_undefined_recursive_module : Path.t + +(* To build the initial environment. Since there is a nasty mutual + recursion between predef and env, we break it by parameterizing + over Env.t, Env.add_type and Env.add_extension. *) + +val build_initial_env: + (Ident.t -> type_declaration -> 'a -> 'a) -> + (Ident.t -> extension_constructor -> 'a -> 'a) -> + 'a -> 'a * 'a + +(* To initialize linker tables *) + +val builtin_values: (string * Ident.t) list +val builtin_idents: (string * Ident.t) list + +(** All predefined exceptions, exposed as [Ident.t] for flambda (for + building value approximations). + The [Ident.t] for division by zero is also exported explicitly + so flambda can generate code to raise it. *) +val ident_division_by_zero: Ident.t +val all_predef_exns : Ident.t list diff --git a/res_syntax/compiler-libs-406/primitive.ml b/res_syntax/compiler-libs-406/primitive.ml new file mode 100644 index 0000000000..4d0a070d55 --- /dev/null +++ b/res_syntax/compiler-libs-406/primitive.ml @@ -0,0 +1,222 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Description of primitive functions *) + +open Misc +open Parsetree + +type boxed_integer = Pnativeint | Pint32 | Pint64 + +type native_repr = + | Same_as_ocaml_repr + | Unboxed_float + | Unboxed_integer of boxed_integer + | Untagged_int + +type description = + { prim_name: string; (* Name of primitive or C function *) + prim_arity: int; (* Number of arguments *) + prim_alloc: bool; (* Does it allocates or raise? *) + prim_native_name: string; (* Name of C function for the nat. code gen. *) + prim_native_repr_args: native_repr list; + prim_native_repr_res: native_repr } + +type error = + | Old_style_float_with_native_repr_attribute + | Old_style_noalloc_with_noalloc_attribute + | No_native_primitive_with_repr_attribute + +exception Error of Location.t * error + +let is_ocaml_repr = function + | Same_as_ocaml_repr -> true + | Unboxed_float + | Unboxed_integer _ + | Untagged_int -> false + +let is_unboxed = function + | Same_as_ocaml_repr + | Untagged_int -> false + | Unboxed_float + | Unboxed_integer _ -> true + +let is_untagged = function + | Untagged_int -> true + | Same_as_ocaml_repr + | Unboxed_float + | Unboxed_integer _ -> false + +let rec make_native_repr_args arity x = + if arity = 0 then + [] + else + x :: make_native_repr_args (arity - 1) x + +let simple ~name ~arity ~alloc = + {prim_name = name; + prim_arity = arity; + prim_alloc = alloc; + prim_native_name = ""; + prim_native_repr_args = make_native_repr_args arity Same_as_ocaml_repr; + prim_native_repr_res = Same_as_ocaml_repr} + +let make ~name ~alloc ~native_name ~native_repr_args ~native_repr_res = + {prim_name = name; + prim_arity = List.length native_repr_args; + prim_alloc = alloc; + prim_native_name = native_name; + prim_native_repr_args = native_repr_args; + prim_native_repr_res = native_repr_res} + +let parse_declaration valdecl ~native_repr_args ~native_repr_res = + let arity = List.length native_repr_args in + let name, native_name, old_style_noalloc, old_style_float = + match valdecl.pval_prim with + | name :: "noalloc" :: name2 :: "float" :: _ -> (name, name2, true, true) + | name :: "noalloc" :: name2 :: _ -> (name, name2, true, false) + | name :: name2 :: "float" :: _ -> (name, name2, false, true) + | name :: "noalloc" :: _ -> (name, "", true, false) + | name :: name2 :: _ -> (name, name2, false, false) + | name :: _ -> (name, "", false, false) + | [] -> + fatal_error "Primitive.parse_declaration" + in + let noalloc_attribute = + Attr_helper.has_no_payload_attribute ["noalloc"; "ocaml.noalloc"] + valdecl.pval_attributes + in + if old_style_float && + not (List.for_all is_ocaml_repr native_repr_args && + is_ocaml_repr native_repr_res) then + raise (Error (valdecl.pval_loc, + Old_style_float_with_native_repr_attribute)); + if old_style_noalloc && noalloc_attribute then + raise (Error (valdecl.pval_loc, + Old_style_noalloc_with_noalloc_attribute)); + (* The compiler used to assume "noalloc" with "float", we just make this + explicit now (GPR#167): *) + let old_style_noalloc = old_style_noalloc || old_style_float in + if old_style_float then + Location.deprecated valdecl.pval_loc + "[@@unboxed] + [@@noalloc] should be used instead of \"float\"" + else if old_style_noalloc then + Location.deprecated valdecl.pval_loc + "[@@noalloc] should be used instead of \"noalloc\""; + if native_name = "" && + not (List.for_all is_ocaml_repr native_repr_args && + is_ocaml_repr native_repr_res) then + raise (Error (valdecl.pval_loc, + No_native_primitive_with_repr_attribute)); + let noalloc = old_style_noalloc || noalloc_attribute in + let native_repr_args, native_repr_res = + if old_style_float then + (make_native_repr_args arity Unboxed_float, Unboxed_float) + else + (native_repr_args, native_repr_res) + in + {prim_name = name; + prim_arity = arity; + prim_alloc = not noalloc; + prim_native_name = native_name; + prim_native_repr_args = native_repr_args; + prim_native_repr_res = native_repr_res} + +open Outcometree + +let rec add_native_repr_attributes ty attrs = + match ty, attrs with + | Otyp_arrow (label, a, b), attr_opt :: rest -> + let b = add_native_repr_attributes b rest in + let a = + match attr_opt with + | None -> a + | Some attr -> Otyp_attribute (a, attr) + in + Otyp_arrow (label, a, b) + | _, [Some attr] -> Otyp_attribute (ty, attr) + | _ -> + assert (List.for_all (fun x -> x = None) attrs); + ty + +let oattr_unboxed = { oattr_name = "unboxed" } +let oattr_untagged = { oattr_name = "untagged" } +let oattr_noalloc = { oattr_name = "noalloc" } + +let print p osig_val_decl = + let prims = + if p.prim_native_name <> "" then + [p.prim_name; p.prim_native_name] + else + [p.prim_name] + in + let for_all f = + List.for_all f p.prim_native_repr_args && f p.prim_native_repr_res + in + let all_unboxed = for_all is_unboxed in + let all_untagged = for_all is_untagged in + let attrs = if p.prim_alloc then [] else [oattr_noalloc] in + let attrs = + if all_unboxed then + oattr_unboxed :: attrs + else if all_untagged then + oattr_untagged :: attrs + else + attrs + in + let attr_of_native_repr = function + | Same_as_ocaml_repr -> None + | Unboxed_float + | Unboxed_integer _ -> if all_unboxed then None else Some oattr_unboxed + | Untagged_int -> if all_untagged then None else Some oattr_untagged + in + let type_attrs = + List.map attr_of_native_repr p.prim_native_repr_args @ + [attr_of_native_repr p.prim_native_repr_res] + in + { osig_val_decl with + oval_prims = prims; + oval_type = add_native_repr_attributes osig_val_decl.oval_type type_attrs; + oval_attributes = attrs } + +let native_name p = + if p.prim_native_name <> "" + then p.prim_native_name + else p.prim_name + +let byte_name p = + p.prim_name + +let report_error ppf err = + match err with + | Old_style_float_with_native_repr_attribute -> + Format.fprintf ppf "Cannot use \"float\" in conjunction with \ + [%@unboxed]/[%@untagged]" + | Old_style_noalloc_with_noalloc_attribute -> + Format.fprintf ppf "Cannot use \"noalloc\" in conjunction with \ + [%@%@noalloc]" + | No_native_primitive_with_repr_attribute -> + Format.fprintf ppf + "The native code version of the primitive is mandatory when \ + attributes [%@untagged] or [%@unboxed] are present" + +let () = + Location.register_error_of_exn + (function + | Error (loc, err) -> + Some (Location.error_of_printer loc report_error err) + | _ -> + None + ) diff --git a/res_syntax/compiler-libs-406/primitive.mli b/res_syntax/compiler-libs-406/primitive.mli new file mode 100644 index 0000000000..02ece7d96c --- /dev/null +++ b/res_syntax/compiler-libs-406/primitive.mli @@ -0,0 +1,71 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Description of primitive functions *) + +type boxed_integer = Pnativeint | Pint32 | Pint64 + +(* Representation of arguments/result for the native code version + of a primitive *) +type native_repr = + | Same_as_ocaml_repr + | Unboxed_float + | Unboxed_integer of boxed_integer + | Untagged_int + +type description = private + { prim_name: string; (* Name of primitive or C function *) + prim_arity: int; (* Number of arguments *) + prim_alloc: bool; (* Does it allocates or raise? *) + prim_native_name: string; (* Name of C function for the nat. code gen. *) + prim_native_repr_args: native_repr list; + prim_native_repr_res: native_repr } + +(* Invariant [List.length d.prim_native_repr_args = d.prim_arity] *) + +val simple + : name:string + -> arity:int + -> alloc:bool + -> description + +val make + : name:string + -> alloc:bool + -> native_name:string + -> native_repr_args: native_repr list + -> native_repr_res: native_repr + -> description + +val parse_declaration + : Parsetree.value_description + -> native_repr_args:native_repr list + -> native_repr_res:native_repr + -> description + +val print + : description + -> Outcometree.out_val_decl + -> Outcometree.out_val_decl + +val native_name: description -> string +val byte_name: description -> string + +type error = + | Old_style_float_with_native_repr_attribute + | Old_style_noalloc_with_noalloc_attribute + | No_native_primitive_with_repr_attribute + +exception Error of Location.t * error diff --git a/res_syntax/compiler-libs-406/printast.ml b/res_syntax/compiler-libs-406/printast.ml new file mode 100644 index 0000000000..70abefcd20 --- /dev/null +++ b/res_syntax/compiler-libs-406/printast.ml @@ -0,0 +1,923 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1999 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Asttypes;; +open Format;; +open Lexing;; +open Location;; +open Parsetree;; + +let fmt_position with_name f l = + let fname = if with_name then l.pos_fname else "" in + if l.pos_lnum = -1 + then fprintf f "%s[%d]" fname l.pos_cnum + else fprintf f "%s[%d,%d+%d]" fname l.pos_lnum l.pos_bol + (l.pos_cnum - l.pos_bol) +;; + +let fmt_location f loc = + let p_2nd_name = loc.loc_start.pos_fname <> loc.loc_end.pos_fname in + fprintf f "(%a..%a)" (fmt_position true) loc.loc_start + (fmt_position p_2nd_name) loc.loc_end; + if loc.loc_ghost then fprintf f " ghost"; +;; + +let rec fmt_longident_aux f x = + match x with + | Longident.Lident (s) -> fprintf f "%s" s; + | Longident.Ldot (y, s) -> fprintf f "%a.%s" fmt_longident_aux y s; + | Longident.Lapply (y, z) -> + fprintf f "%a(%a)" fmt_longident_aux y fmt_longident_aux z; +;; + +let fmt_longident f x = fprintf f "\"%a\"" fmt_longident_aux x;; + +let fmt_longident_loc f (x : Longident.t loc) = + fprintf f "\"%a\" %a" fmt_longident_aux x.txt fmt_location x.loc; +;; + +let fmt_string_loc f (x : string loc) = + fprintf f "\"%s\" %a" x.txt fmt_location x.loc; +;; + +let fmt_char_option f = function + | None -> fprintf f "None" + | Some c -> fprintf f "Some %c" c + +let fmt_constant f x = + match x with + | Pconst_integer (i,m) -> fprintf f "PConst_int (%s,%a)" i fmt_char_option m; + | Pconst_char (i) -> fprintf f "PConst_char %02x" i; + | Pconst_string (s, None) -> fprintf f "PConst_string(%S,None)" s; + | Pconst_string (s, Some delim) -> + fprintf f "PConst_string (%S,Some %S)" s delim; + | Pconst_float (s,m) -> fprintf f "PConst_float (%s,%a)" s fmt_char_option m; +;; + +let fmt_mutable_flag f x = + match x with + | Immutable -> fprintf f "Immutable"; + | Mutable -> fprintf f "Mutable"; +;; + +let fmt_virtual_flag f x = + match x with + | Virtual -> fprintf f "Virtual"; + | Concrete -> fprintf f "Concrete"; +;; + +let fmt_override_flag f x = + match x with + | Override -> fprintf f "Override"; + | Fresh -> fprintf f "Fresh"; +;; + +let fmt_closed_flag f x = + match x with + | Closed -> fprintf f "Closed" + | Open -> fprintf f "Open" + +let fmt_rec_flag f x = + match x with + | Nonrecursive -> fprintf f "Nonrec"; + | Recursive -> fprintf f "Rec"; +;; + +let fmt_direction_flag f x = + match x with + | Upto -> fprintf f "Up"; + | Downto -> fprintf f "Down"; +;; + +let fmt_private_flag f x = + match x with + | Public -> fprintf f "Public"; + | Private -> fprintf f "Private"; +;; + +let line i f s (*...*) = + fprintf f "%s" (String.make ((2*i) mod 72) ' '); + fprintf f s (*...*) +;; + +let list i f ppf l = + match l with + | [] -> line i ppf "[]\n"; + | _ :: _ -> + line i ppf "[\n"; + List.iter (f (i+1) ppf) l; + line i ppf "]\n"; +;; + +let option i f ppf x = + match x with + | None -> line i ppf "None\n"; + | Some x -> + line i ppf "Some\n"; + f (i+1) ppf x; +;; + +let longident_loc i ppf li = line i ppf "%a\n" fmt_longident_loc li;; +let string i ppf s = line i ppf "\"%s\"\n" s;; +let string_loc i ppf s = line i ppf "%a\n" fmt_string_loc s;; +let arg_label i ppf = function + | Nolabel -> line i ppf "Nolabel\n" + | Optional s -> line i ppf "Optional \"%s\"\n" s + | Labelled s -> line i ppf "Labelled \"%s\"\n" s +;; + +let rec core_type i ppf x = + line i ppf "core_type %a\n" fmt_location x.ptyp_loc; + attributes i ppf x.ptyp_attributes; + let i = i+1 in + match x.ptyp_desc with + | Ptyp_any -> line i ppf "Ptyp_any\n"; + | Ptyp_var (s) -> line i ppf "Ptyp_var %s\n" s; + | Ptyp_arrow (l, ct1, ct2) -> + line i ppf "Ptyp_arrow\n"; + arg_label i ppf l; + core_type i ppf ct1; + core_type i ppf ct2; + | Ptyp_tuple l -> + line i ppf "Ptyp_tuple\n"; + list i core_type ppf l; + | Ptyp_constr (li, l) -> + line i ppf "Ptyp_constr %a\n" fmt_longident_loc li; + list i core_type ppf l; + | Ptyp_variant (l, closed, low) -> + line i ppf "Ptyp_variant closed=%a\n" fmt_closed_flag closed; + list i label_x_bool_x_core_type_list ppf l; + option i (fun i -> list i string) ppf low + | Ptyp_object (l, c) -> + line i ppf "Ptyp_object %a\n" fmt_closed_flag c; + let i = i + 1 in + List.iter ( + function + | Otag (l, attrs, t) -> + line i ppf "method %s\n" l.txt; + attributes i ppf attrs; + core_type (i + 1) ppf t + | Oinherit ct -> + line i ppf "Oinherit\n"; + core_type (i + 1) ppf ct + ) l + | Ptyp_class (li, l) -> + line i ppf "Ptyp_class %a\n" fmt_longident_loc li; + list i core_type ppf l + | Ptyp_alias (ct, s) -> + line i ppf "Ptyp_alias \"%s\"\n" s; + core_type i ppf ct; + | Ptyp_poly (sl, ct) -> + line i ppf "Ptyp_poly%a\n" + (fun ppf -> List.iter (fun x -> fprintf ppf " '%s" x.txt)) sl; + core_type i ppf ct; + | Ptyp_package (s, l) -> + line i ppf "Ptyp_package %a\n" fmt_longident_loc s; + list i package_with ppf l; + | Ptyp_extension (s, arg) -> + line i ppf "Ptyp_extension \"%s\"\n" s.txt; + payload i ppf arg + +and package_with i ppf (s, t) = + line i ppf "with type %a\n" fmt_longident_loc s; + core_type i ppf t + +and pattern i ppf x = + line i ppf "pattern %a\n" fmt_location x.ppat_loc; + attributes i ppf x.ppat_attributes; + let i = i+1 in + match x.ppat_desc with + | Ppat_any -> line i ppf "Ppat_any\n"; + | Ppat_var (s) -> line i ppf "Ppat_var %a\n" fmt_string_loc s; + | Ppat_alias (p, s) -> + line i ppf "Ppat_alias %a\n" fmt_string_loc s; + pattern i ppf p; + | Ppat_constant (c) -> line i ppf "Ppat_constant %a\n" fmt_constant c; + | Ppat_interval (c1, c2) -> + line i ppf "Ppat_interval %a..%a\n" fmt_constant c1 fmt_constant c2; + | Ppat_tuple (l) -> + line i ppf "Ppat_tuple\n"; + list i pattern ppf l; + | Ppat_construct (li, po) -> + line i ppf "Ppat_construct %a\n" fmt_longident_loc li; + option i pattern ppf po; + | Ppat_variant (l, po) -> + line i ppf "Ppat_variant \"%s\"\n" l; + option i pattern ppf po; + | Ppat_record (l, c) -> + line i ppf "Ppat_record %a\n" fmt_closed_flag c; + list i longident_x_pattern ppf l; + | Ppat_array (l) -> + line i ppf "Ppat_array\n"; + list i pattern ppf l; + | Ppat_or (p1, p2) -> + line i ppf "Ppat_or\n"; + pattern i ppf p1; + pattern i ppf p2; + | Ppat_lazy p -> + line i ppf "Ppat_lazy\n"; + pattern i ppf p; + | Ppat_constraint (p, ct) -> + line i ppf "Ppat_constraint\n"; + pattern i ppf p; + core_type i ppf ct; + | Ppat_type (li) -> + line i ppf "Ppat_type\n"; + longident_loc i ppf li + | Ppat_unpack s -> + line i ppf "Ppat_unpack %a\n" fmt_string_loc s; + | Ppat_exception p -> + line i ppf "Ppat_exception\n"; + pattern i ppf p + | Ppat_open (m,p) -> + line i ppf "Ppat_open \"%a\"\n" fmt_longident_loc m; + pattern i ppf p + | Ppat_extension (s, arg) -> + line i ppf "Ppat_extension \"%s\"\n" s.txt; + payload i ppf arg + +and expression i ppf x = + line i ppf "expression %a\n" fmt_location x.pexp_loc; + attributes i ppf x.pexp_attributes; + let i = i+1 in + match x.pexp_desc with + | Pexp_ident (li) -> line i ppf "Pexp_ident %a\n" fmt_longident_loc li; + | Pexp_constant (c) -> line i ppf "Pexp_constant %a\n" fmt_constant c; + | Pexp_let (rf, l, e) -> + line i ppf "Pexp_let %a\n" fmt_rec_flag rf; + list i value_binding ppf l; + expression i ppf e; + | Pexp_function l -> + line i ppf "Pexp_function\n"; + list i case ppf l; + | Pexp_fun (l, eo, p, e) -> + line i ppf "Pexp_fun\n"; + arg_label i ppf l; + option i expression ppf eo; + pattern i ppf p; + expression i ppf e; + | Pexp_apply (e, l) -> + line i ppf "Pexp_apply\n"; + expression i ppf e; + list i label_x_expression ppf l; + | Pexp_match (e, l) -> + line i ppf "Pexp_match\n"; + expression i ppf e; + list i case ppf l; + | Pexp_try (e, l) -> + line i ppf "Pexp_try\n"; + expression i ppf e; + list i case ppf l; + | Pexp_tuple (l) -> + line i ppf "Pexp_tuple\n"; + list i expression ppf l; + | Pexp_construct (li, eo) -> + line i ppf "Pexp_construct %a\n" fmt_longident_loc li; + option i expression ppf eo; + | Pexp_variant (l, eo) -> + line i ppf "Pexp_variant \"%s\"\n" l; + option i expression ppf eo; + | Pexp_record (l, eo) -> + line i ppf "Pexp_record\n"; + list i longident_x_expression ppf l; + option i expression ppf eo; + | Pexp_field (e, li) -> + line i ppf "Pexp_field\n"; + expression i ppf e; + longident_loc i ppf li; + | Pexp_setfield (e1, li, e2) -> + line i ppf "Pexp_setfield\n"; + expression i ppf e1; + longident_loc i ppf li; + expression i ppf e2; + | Pexp_array (l) -> + line i ppf "Pexp_array\n"; + list i expression ppf l; + | Pexp_ifthenelse (e1, e2, eo) -> + line i ppf "Pexp_ifthenelse\n"; + expression i ppf e1; + expression i ppf e2; + option i expression ppf eo; + | Pexp_sequence (e1, e2) -> + line i ppf "Pexp_sequence\n"; + expression i ppf e1; + expression i ppf e2; + | Pexp_while (e1, e2) -> + line i ppf "Pexp_while\n"; + expression i ppf e1; + expression i ppf e2; + | Pexp_for (p, e1, e2, df, e3) -> + line i ppf "Pexp_for %a\n" fmt_direction_flag df; + pattern i ppf p; + expression i ppf e1; + expression i ppf e2; + expression i ppf e3; + | Pexp_constraint (e, ct) -> + line i ppf "Pexp_constraint\n"; + expression i ppf e; + core_type i ppf ct; + | Pexp_coerce (e, cto1, cto2) -> + line i ppf "Pexp_coerce\n"; + expression i ppf e; + option i core_type ppf cto1; + core_type i ppf cto2; + | Pexp_send (e, s) -> + line i ppf "Pexp_send \"%s\"\n" s.txt; + expression i ppf e; + | Pexp_new (li) -> line i ppf "Pexp_new %a\n" fmt_longident_loc li; + | Pexp_setinstvar (s, e) -> + line i ppf "Pexp_setinstvar %a\n" fmt_string_loc s; + expression i ppf e; + | Pexp_override (l) -> + line i ppf "Pexp_override\n"; + list i string_x_expression ppf l; + | Pexp_letmodule (s, me, e) -> + line i ppf "Pexp_letmodule %a\n" fmt_string_loc s; + module_expr i ppf me; + expression i ppf e; + | Pexp_letexception (cd, e) -> + line i ppf "Pexp_letexception\n"; + extension_constructor i ppf cd; + expression i ppf e; + | Pexp_assert (e) -> + line i ppf "Pexp_assert\n"; + expression i ppf e; + | Pexp_lazy (e) -> + line i ppf "Pexp_lazy\n"; + expression i ppf e; + | Pexp_poly (e, cto) -> + line i ppf "Pexp_poly\n"; + expression i ppf e; + option i core_type ppf cto; + | Pexp_object s -> + line i ppf "Pexp_object\n"; + class_structure i ppf s + | Pexp_newtype (s, e) -> + line i ppf "Pexp_newtype \"%s\"\n" s.txt; + expression i ppf e + | Pexp_pack me -> + line i ppf "Pexp_pack\n"; + module_expr i ppf me + | Pexp_open (ovf, m, e) -> + line i ppf "Pexp_open %a \"%a\"\n" fmt_override_flag ovf + fmt_longident_loc m; + expression i ppf e + | Pexp_extension (s, arg) -> + line i ppf "Pexp_extension \"%s\"\n" s.txt; + payload i ppf arg + | Pexp_unreachable -> + line i ppf "Pexp_unreachable" + +and value_description i ppf x = + line i ppf "value_description %a %a\n" fmt_string_loc + x.pval_name fmt_location x.pval_loc; + attributes i ppf x.pval_attributes; + core_type (i+1) ppf x.pval_type; + list (i+1) string ppf x.pval_prim + +and type_parameter i ppf (x, _variance) = core_type i ppf x + +and type_declaration i ppf x = + line i ppf "type_declaration %a %a\n" fmt_string_loc x.ptype_name + fmt_location x.ptype_loc; + attributes i ppf x.ptype_attributes; + let i = i+1 in + line i ppf "ptype_params =\n"; + list (i+1) type_parameter ppf x.ptype_params; + line i ppf "ptype_cstrs =\n"; + list (i+1) core_type_x_core_type_x_location ppf x.ptype_cstrs; + line i ppf "ptype_kind =\n"; + type_kind (i+1) ppf x.ptype_kind; + line i ppf "ptype_private = %a\n" fmt_private_flag x.ptype_private; + line i ppf "ptype_manifest =\n"; + option (i+1) core_type ppf x.ptype_manifest + +and attributes i ppf l = + let i = i + 1 in + List.iter + (fun ({Location.loc; txt}, arg) -> + line i ppf "attribute \"%s\" %a\n" txt fmt_location loc; + payload (i + 1) ppf arg; + ) + l + +and payload i ppf = function + | PStr x -> structure i ppf x + | PSig x -> signature i ppf x + | PTyp x -> core_type i ppf x + | PPat (x, None) -> pattern i ppf x + | PPat (x, Some g) -> + pattern i ppf x; + line i ppf "\n"; + expression (i + 1) ppf g + + +and type_kind i ppf x = + match x with + | Ptype_abstract -> + line i ppf "Ptype_abstract\n" + | Ptype_variant l -> + line i ppf "Ptype_variant\n"; + list (i+1) constructor_decl ppf l; + | Ptype_record l -> + line i ppf "Ptype_record\n"; + list (i+1) label_decl ppf l; + | Ptype_open -> + line i ppf "Ptype_open\n"; + +and type_extension i ppf x = + line i ppf "type_extension\n"; + attributes i ppf x.ptyext_attributes; + let i = i+1 in + line i ppf "ptyext_path = %a\n" fmt_longident_loc x.ptyext_path; + line i ppf "ptyext_params =\n"; + list (i+1) type_parameter ppf x.ptyext_params; + line i ppf "ptyext_constructors =\n"; + list (i+1) extension_constructor ppf x.ptyext_constructors; + line i ppf "ptyext_private = %a\n" fmt_private_flag x.ptyext_private; + +and extension_constructor i ppf x = + line i ppf "extension_constructor %a\n" fmt_location x.pext_loc; + attributes i ppf x.pext_attributes; + let i = i + 1 in + line i ppf "pext_name = \"%s\"\n" x.pext_name.txt; + line i ppf "pext_kind =\n"; + extension_constructor_kind (i + 1) ppf x.pext_kind; + +and extension_constructor_kind i ppf x = + match x with + Pext_decl(a, r) -> + line i ppf "Pext_decl\n"; + constructor_arguments (i+1) ppf a; + option (i+1) core_type ppf r; + | Pext_rebind li -> + line i ppf "Pext_rebind\n"; + line (i+1) ppf "%a\n" fmt_longident_loc li; + +and class_type i ppf x = + line i ppf "class_type %a\n" fmt_location x.pcty_loc; + attributes i ppf x.pcty_attributes; + let i = i+1 in + match x.pcty_desc with + | Pcty_constr (li, l) -> + line i ppf "Pcty_constr %a\n" fmt_longident_loc li; + list i core_type ppf l; + | Pcty_signature (cs) -> + line i ppf "Pcty_signature\n"; + class_signature i ppf cs; + | Pcty_arrow (l, co, cl) -> + line i ppf "Pcty_arrow\n"; + arg_label i ppf l; + core_type i ppf co; + class_type i ppf cl; + | Pcty_extension (s, arg) -> + line i ppf "Pcty_extension \"%s\"\n" s.txt; + payload i ppf arg + | Pcty_open (ovf, m, e) -> + line i ppf "Pcty_open %a \"%a\"\n" fmt_override_flag ovf + fmt_longident_loc m; + class_type i ppf e + +and class_signature i ppf cs = + line i ppf "class_signature\n"; + core_type (i+1) ppf cs.pcsig_self; + list (i+1) class_type_field ppf cs.pcsig_fields; + +and class_type_field i ppf x = + line i ppf "class_type_field %a\n" fmt_location x.pctf_loc; + let i = i+1 in + attributes i ppf x.pctf_attributes; + match x.pctf_desc with + | Pctf_inherit (ct) -> + line i ppf "Pctf_inherit\n"; + class_type i ppf ct; + | Pctf_val (s, mf, vf, ct) -> + line i ppf "Pctf_val \"%s\" %a %a\n" s.txt fmt_mutable_flag mf + fmt_virtual_flag vf; + core_type (i+1) ppf ct; + | Pctf_method (s, pf, vf, ct) -> + line i ppf "Pctf_method \"%s\" %a %a\n" s.txt fmt_private_flag pf + fmt_virtual_flag vf; + core_type (i+1) ppf ct; + | Pctf_constraint (ct1, ct2) -> + line i ppf "Pctf_constraint\n"; + core_type (i+1) ppf ct1; + core_type (i+1) ppf ct2; + | Pctf_attribute (s, arg) -> + line i ppf "Pctf_attribute \"%s\"\n" s.txt; + payload i ppf arg + | Pctf_extension (s, arg) -> + line i ppf "Pctf_extension \"%s\"\n" s.txt; + payload i ppf arg + +and class_description i ppf x = + line i ppf "class_description %a\n" fmt_location x.pci_loc; + attributes i ppf x.pci_attributes; + let i = i+1 in + line i ppf "pci_virt = %a\n" fmt_virtual_flag x.pci_virt; + line i ppf "pci_params =\n"; + list (i+1) type_parameter ppf x.pci_params; + line i ppf "pci_name = %a\n" fmt_string_loc x.pci_name; + line i ppf "pci_expr =\n"; + class_type (i+1) ppf x.pci_expr; + +and class_type_declaration i ppf x = + line i ppf "class_type_declaration %a\n" fmt_location x.pci_loc; + attributes i ppf x.pci_attributes; + let i = i+1 in + line i ppf "pci_virt = %a\n" fmt_virtual_flag x.pci_virt; + line i ppf "pci_params =\n"; + list (i+1) type_parameter ppf x.pci_params; + line i ppf "pci_name = %a\n" fmt_string_loc x.pci_name; + line i ppf "pci_expr =\n"; + class_type (i+1) ppf x.pci_expr; + +and class_expr i ppf x = + line i ppf "class_expr %a\n" fmt_location x.pcl_loc; + attributes i ppf x.pcl_attributes; + let i = i+1 in + match x.pcl_desc with + | Pcl_constr (li, l) -> + line i ppf "Pcl_constr %a\n" fmt_longident_loc li; + list i core_type ppf l; + | Pcl_structure (cs) -> + line i ppf "Pcl_structure\n"; + class_structure i ppf cs; + | Pcl_fun (l, eo, p, e) -> + line i ppf "Pcl_fun\n"; + arg_label i ppf l; + option i expression ppf eo; + pattern i ppf p; + class_expr i ppf e; + | Pcl_apply (ce, l) -> + line i ppf "Pcl_apply\n"; + class_expr i ppf ce; + list i label_x_expression ppf l; + | Pcl_let (rf, l, ce) -> + line i ppf "Pcl_let %a\n" fmt_rec_flag rf; + list i value_binding ppf l; + class_expr i ppf ce; + | Pcl_constraint (ce, ct) -> + line i ppf "Pcl_constraint\n"; + class_expr i ppf ce; + class_type i ppf ct; + | Pcl_extension (s, arg) -> + line i ppf "Pcl_extension \"%s\"\n" s.txt; + payload i ppf arg + | Pcl_open (ovf, m, e) -> + line i ppf "Pcl_open %a \"%a\"\n" fmt_override_flag ovf + fmt_longident_loc m; + class_expr i ppf e + +and class_structure i ppf { pcstr_self = p; pcstr_fields = l } = + line i ppf "class_structure\n"; + pattern (i+1) ppf p; + list (i+1) class_field ppf l; + +and class_field i ppf x = + line i ppf "class_field %a\n" fmt_location x.pcf_loc; + let i = i + 1 in + attributes i ppf x.pcf_attributes; + match x.pcf_desc with + | Pcf_inherit (ovf, ce, so) -> + line i ppf "Pcf_inherit %a\n" fmt_override_flag ovf; + class_expr (i+1) ppf ce; + option (i+1) string_loc ppf so; + | Pcf_val (s, mf, k) -> + line i ppf "Pcf_val %a\n" fmt_mutable_flag mf; + line (i+1) ppf "%a\n" fmt_string_loc s; + class_field_kind (i+1) ppf k + | Pcf_method (s, pf, k) -> + line i ppf "Pcf_method %a\n" fmt_private_flag pf; + line (i+1) ppf "%a\n" fmt_string_loc s; + class_field_kind (i+1) ppf k + | Pcf_constraint (ct1, ct2) -> + line i ppf "Pcf_constraint\n"; + core_type (i+1) ppf ct1; + core_type (i+1) ppf ct2; + | Pcf_initializer (e) -> + line i ppf "Pcf_initializer\n"; + expression (i+1) ppf e; + | Pcf_attribute (s, arg) -> + line i ppf "Pcf_attribute \"%s\"\n" s.txt; + payload i ppf arg + | Pcf_extension (s, arg) -> + line i ppf "Pcf_extension \"%s\"\n" s.txt; + payload i ppf arg + +and class_field_kind i ppf = function + | Cfk_concrete (o, e) -> + line i ppf "Concrete %a\n" fmt_override_flag o; + expression i ppf e + | Cfk_virtual t -> + line i ppf "Virtual\n"; + core_type i ppf t + +and class_declaration i ppf x = + line i ppf "class_declaration %a\n" fmt_location x.pci_loc; + attributes i ppf x.pci_attributes; + let i = i+1 in + line i ppf "pci_virt = %a\n" fmt_virtual_flag x.pci_virt; + line i ppf "pci_params =\n"; + list (i+1) type_parameter ppf x.pci_params; + line i ppf "pci_name = %a\n" fmt_string_loc x.pci_name; + line i ppf "pci_expr =\n"; + class_expr (i+1) ppf x.pci_expr; + +and module_type i ppf x = + line i ppf "module_type %a\n" fmt_location x.pmty_loc; + attributes i ppf x.pmty_attributes; + let i = i+1 in + match x.pmty_desc with + | Pmty_ident li -> line i ppf "Pmty_ident %a\n" fmt_longident_loc li; + | Pmty_alias li -> line i ppf "Pmty_alias %a\n" fmt_longident_loc li; + | Pmty_signature (s) -> + line i ppf "Pmty_signature\n"; + signature i ppf s; + | Pmty_functor (s, mt1, mt2) -> + line i ppf "Pmty_functor %a\n" fmt_string_loc s; + Misc.may (module_type i ppf) mt1; + module_type i ppf mt2; + | Pmty_with (mt, l) -> + line i ppf "Pmty_with\n"; + module_type i ppf mt; + list i with_constraint ppf l; + | Pmty_typeof m -> + line i ppf "Pmty_typeof\n"; + module_expr i ppf m; + | Pmty_extension (s, arg) -> + line i ppf "Pmod_extension \"%s\"\n" s.txt; + payload i ppf arg + +and signature i ppf x = list i signature_item ppf x + +and signature_item i ppf x = + line i ppf "signature_item %a\n" fmt_location x.psig_loc; + let i = i+1 in + match x.psig_desc with + | Psig_value vd -> + line i ppf "Psig_value\n"; + value_description i ppf vd; + | Psig_type (rf, l) -> + line i ppf "Psig_type %a\n" fmt_rec_flag rf; + list i type_declaration ppf l; + | Psig_typext te -> + line i ppf "Psig_typext\n"; + type_extension i ppf te + | Psig_exception ext -> + line i ppf "Psig_exception\n"; + extension_constructor i ppf ext; + | Psig_module pmd -> + line i ppf "Psig_module %a\n" fmt_string_loc pmd.pmd_name; + attributes i ppf pmd.pmd_attributes; + module_type i ppf pmd.pmd_type + | Psig_recmodule decls -> + line i ppf "Psig_recmodule\n"; + list i module_declaration ppf decls; + | Psig_modtype x -> + line i ppf "Psig_modtype %a\n" fmt_string_loc x.pmtd_name; + attributes i ppf x.pmtd_attributes; + modtype_declaration i ppf x.pmtd_type + | Psig_open od -> + line i ppf "Psig_open %a %a\n" + fmt_override_flag od.popen_override + fmt_longident_loc od.popen_lid; + attributes i ppf od.popen_attributes + | Psig_include incl -> + line i ppf "Psig_include\n"; + module_type i ppf incl.pincl_mod; + attributes i ppf incl.pincl_attributes + | Psig_class (l) -> + line i ppf "Psig_class\n"; + list i class_description ppf l; + | Psig_class_type (l) -> + line i ppf "Psig_class_type\n"; + list i class_type_declaration ppf l; + | Psig_extension ((s, arg), attrs) -> + line i ppf "Psig_extension \"%s\"\n" s.txt; + attributes i ppf attrs; + payload i ppf arg + | Psig_attribute (s, arg) -> + line i ppf "Psig_attribute \"%s\"\n" s.txt; + payload i ppf arg + +and modtype_declaration i ppf = function + | None -> line i ppf "#abstract" + | Some mt -> module_type (i+1) ppf mt + +and with_constraint i ppf x = + match x with + | Pwith_type (lid, td) -> + line i ppf "Pwith_type %a\n" fmt_longident_loc lid; + type_declaration (i+1) ppf td; + | Pwith_typesubst (lid, td) -> + line i ppf "Pwith_typesubst %a\n" fmt_longident_loc lid; + type_declaration (i+1) ppf td; + | Pwith_module (lid1, lid2) -> + line i ppf "Pwith_module %a = %a\n" + fmt_longident_loc lid1 + fmt_longident_loc lid2; + | Pwith_modsubst (lid1, lid2) -> + line i ppf "Pwith_modsubst %a = %a\n" + fmt_longident_loc lid1 + fmt_longident_loc lid2; + +and module_expr i ppf x = + line i ppf "module_expr %a\n" fmt_location x.pmod_loc; + attributes i ppf x.pmod_attributes; + let i = i+1 in + match x.pmod_desc with + | Pmod_ident (li) -> line i ppf "Pmod_ident %a\n" fmt_longident_loc li; + | Pmod_structure (s) -> + line i ppf "Pmod_structure\n"; + structure i ppf s; + | Pmod_functor (s, mt, me) -> + line i ppf "Pmod_functor %a\n" fmt_string_loc s; + Misc.may (module_type i ppf) mt; + module_expr i ppf me; + | Pmod_apply (me1, me2) -> + line i ppf "Pmod_apply\n"; + module_expr i ppf me1; + module_expr i ppf me2; + | Pmod_constraint (me, mt) -> + line i ppf "Pmod_constraint\n"; + module_expr i ppf me; + module_type i ppf mt; + | Pmod_unpack (e) -> + line i ppf "Pmod_unpack\n"; + expression i ppf e; + | Pmod_extension (s, arg) -> + line i ppf "Pmod_extension \"%s\"\n" s.txt; + payload i ppf arg + +and structure i ppf x = list i structure_item ppf x + +and structure_item i ppf x = + line i ppf "structure_item %a\n" fmt_location x.pstr_loc; + let i = i+1 in + match x.pstr_desc with + | Pstr_eval (e, attrs) -> + line i ppf "Pstr_eval\n"; + attributes i ppf attrs; + expression i ppf e; + | Pstr_value (rf, l) -> + line i ppf "Pstr_value %a\n" fmt_rec_flag rf; + list i value_binding ppf l; + | Pstr_primitive vd -> + line i ppf "Pstr_primitive\n"; + value_description i ppf vd; + | Pstr_type (rf, l) -> + line i ppf "Pstr_type %a\n" fmt_rec_flag rf; + list i type_declaration ppf l; + | Pstr_typext te -> + line i ppf "Pstr_typext\n"; + type_extension i ppf te + | Pstr_exception ext -> + line i ppf "Pstr_exception\n"; + extension_constructor i ppf ext; + | Pstr_module x -> + line i ppf "Pstr_module\n"; + module_binding i ppf x + | Pstr_recmodule bindings -> + line i ppf "Pstr_recmodule\n"; + list i module_binding ppf bindings; + | Pstr_modtype x -> + line i ppf "Pstr_modtype %a\n" fmt_string_loc x.pmtd_name; + attributes i ppf x.pmtd_attributes; + modtype_declaration i ppf x.pmtd_type + | Pstr_open od -> + line i ppf "Pstr_open %a %a\n" + fmt_override_flag od.popen_override + fmt_longident_loc od.popen_lid; + attributes i ppf od.popen_attributes + | Pstr_class (l) -> + line i ppf "Pstr_class\n"; + list i class_declaration ppf l; + | Pstr_class_type (l) -> + line i ppf "Pstr_class_type\n"; + list i class_type_declaration ppf l; + | Pstr_include incl -> + line i ppf "Pstr_include"; + attributes i ppf incl.pincl_attributes; + module_expr i ppf incl.pincl_mod + | Pstr_extension ((s, arg), attrs) -> + line i ppf "Pstr_extension \"%s\"\n" s.txt; + attributes i ppf attrs; + payload i ppf arg + | Pstr_attribute (s, arg) -> + line i ppf "Pstr_attribute \"%s\"\n" s.txt; + payload i ppf arg + +and module_declaration i ppf pmd = + string_loc i ppf pmd.pmd_name; + attributes i ppf pmd.pmd_attributes; + module_type (i+1) ppf pmd.pmd_type; + +and module_binding i ppf x = + string_loc i ppf x.pmb_name; + attributes i ppf x.pmb_attributes; + module_expr (i+1) ppf x.pmb_expr + +and core_type_x_core_type_x_location i ppf (ct1, ct2, l) = + line i ppf " %a\n" fmt_location l; + core_type (i+1) ppf ct1; + core_type (i+1) ppf ct2; + +and constructor_decl i ppf + {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} = + line i ppf "%a\n" fmt_location pcd_loc; + line (i+1) ppf "%a\n" fmt_string_loc pcd_name; + attributes i ppf pcd_attributes; + constructor_arguments (i+1) ppf pcd_args; + option (i+1) core_type ppf pcd_res + +and constructor_arguments i ppf = function + | Pcstr_tuple l -> list i core_type ppf l + | Pcstr_record l -> list i label_decl ppf l + +and label_decl i ppf {pld_name; pld_mutable; pld_type; pld_loc; pld_attributes}= + line i ppf "%a\n" fmt_location pld_loc; + attributes i ppf pld_attributes; + line (i+1) ppf "%a\n" fmt_mutable_flag pld_mutable; + line (i+1) ppf "%a" fmt_string_loc pld_name; + core_type (i+1) ppf pld_type + +and longident_x_pattern i ppf (li, p) = + line i ppf "%a\n" fmt_longident_loc li; + pattern (i+1) ppf p; + +and case i ppf {pc_lhs; pc_guard; pc_rhs} = + line i ppf "\n"; + pattern (i+1) ppf pc_lhs; + begin match pc_guard with + | None -> () + | Some g -> line (i+1) ppf "\n"; expression (i + 2) ppf g + end; + expression (i+1) ppf pc_rhs; + +and value_binding i ppf x = + line i ppf "\n"; + attributes (i+1) ppf x.pvb_attributes; + pattern (i+1) ppf x.pvb_pat; + expression (i+1) ppf x.pvb_expr + +and string_x_expression i ppf (s, e) = + line i ppf " %a\n" fmt_string_loc s; + expression (i+1) ppf e; + +and longident_x_expression i ppf (li, e) = + line i ppf "%a\n" fmt_longident_loc li; + expression (i+1) ppf e; + +and label_x_expression i ppf (l,e) = + line i ppf "\n"; + arg_label i ppf l; + expression (i+1) ppf e; + +and label_x_bool_x_core_type_list i ppf x = + match x with + Rtag (l, attrs, b, ctl) -> + line i ppf "Rtag \"%s\" %s\n" l.txt (string_of_bool b); + attributes (i+1) ppf attrs; + list (i+1) core_type ppf ctl + | Rinherit (ct) -> + line i ppf "Rinherit\n"; + core_type (i+1) ppf ct +;; + +let rec toplevel_phrase i ppf x = + match x with + | Ptop_def (s) -> + line i ppf "Ptop_def\n"; + structure (i+1) ppf s; + | Ptop_dir (s, da) -> + line i ppf "Ptop_dir \"%s\"\n" s; + directive_argument i ppf da; + +and directive_argument i ppf x = + match x with + | Pdir_none -> line i ppf "Pdir_none\n" + | Pdir_string (s) -> line i ppf "Pdir_string \"%s\"\n" s; + | Pdir_int (n, None) -> line i ppf "Pdir_int %s\n" n; + | Pdir_int (n, Some m) -> line i ppf "Pdir_int %s%c\n" n m; + | Pdir_ident (li) -> line i ppf "Pdir_ident %a\n" fmt_longident li; + | Pdir_bool (b) -> line i ppf "Pdir_bool %s\n" (string_of_bool b); +;; + +let interface ppf x = list 0 signature_item ppf x;; + +let implementation ppf x = list 0 structure_item ppf x;; + +let top_phrase ppf x = toplevel_phrase 0 ppf x;; diff --git a/res_syntax/compiler-libs-406/printast.mli b/res_syntax/compiler-libs-406/printast.mli new file mode 100644 index 0000000000..b77a2ca5a7 --- /dev/null +++ b/res_syntax/compiler-libs-406/printast.mli @@ -0,0 +1,25 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1999 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Parsetree;; +open Format;; + +val interface : formatter -> signature_item list -> unit;; +val implementation : formatter -> structure_item list -> unit;; +val top_phrase : formatter -> toplevel_phrase -> unit;; + +val expression: int -> formatter -> expression -> unit +val structure: int -> formatter -> structure -> unit +val payload: int -> formatter -> payload -> unit diff --git a/res_syntax/compiler-libs-406/printtyp.ml b/res_syntax/compiler-libs-406/printtyp.ml new file mode 100644 index 0000000000..6b0c4ce4d0 --- /dev/null +++ b/res_syntax/compiler-libs-406/printtyp.ml @@ -0,0 +1,1631 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy and Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Printing functions *) + +open Misc +open Ctype +open Format +open Longident +open Path +open Asttypes +open Types +open Btype +open Outcometree + +(* Print a long identifier *) + +let rec longident ppf = function + | Lident s -> pp_print_string ppf s + | Ldot(p, s) -> fprintf ppf "%a.%s" longident p s + | Lapply(p1, p2) -> fprintf ppf "%a(%a)" longident p1 longident p2 + +(* Print an identifier *) + +let unique_names = ref Ident.empty + +let ident_name id = + try Ident.find_same id !unique_names with Not_found -> Ident.name id + +let add_unique id = + try ignore (Ident.find_same id !unique_names) + with Not_found -> + unique_names := Ident.add id (Ident.unique_toplevel_name id) !unique_names + +let ident ppf id = pp_print_string ppf (ident_name id) + +(* Print a path *) + +let ident_pervasives = Ident.create_persistent "Pervasives" +let printing_env = ref Env.empty +let non_shadowed_pervasive = function + | Pdot(Pident id, s, _pos) as path -> + Ident.same id ident_pervasives && + (try Path.same path (Env.lookup_type (Lident s) !printing_env) + with Not_found -> true) + | _ -> false + +let rec tree_of_path = function + | Pident id -> + Oide_ident (ident_name id) + | Pdot(_, s, _pos) as path when non_shadowed_pervasive path -> + Oide_ident s + | Pdot(p, s, _pos) -> + Oide_dot (tree_of_path p, s) + | Papply(p1, p2) -> + Oide_apply (tree_of_path p1, tree_of_path p2) + +let rec path ppf = function + | Pident id -> + ident ppf id + | Pdot(_, s, _pos) as path when non_shadowed_pervasive path -> + pp_print_string ppf s + | Pdot(p, s, _pos) -> + path ppf p; + pp_print_char ppf '.'; + pp_print_string ppf s + | Papply(p1, p2) -> + fprintf ppf "%a(%a)" path p1 path p2 + +let rec string_of_out_ident = function + | Oide_ident s -> s + | Oide_dot (id, s) -> String.concat "." [string_of_out_ident id; s] + | Oide_apply (id1, id2) -> + String.concat "" + [string_of_out_ident id1; "("; string_of_out_ident id2; ")"] + +let string_of_path p = string_of_out_ident (tree_of_path p) + +(* Print a recursive annotation *) + +let tree_of_rec = function + | Trec_not -> Orec_not + | Trec_first -> Orec_first + | Trec_next -> Orec_next + +(* Print a raw type expression, with sharing *) + +let raw_list pr ppf = function + [] -> fprintf ppf "[]" + | a :: l -> + fprintf ppf "@[<1>[%a%t]@]" pr a + (fun ppf -> List.iter (fun x -> fprintf ppf ";@,%a" pr x) l) + +let kind_vars = ref [] +let kind_count = ref 0 + +let rec safe_kind_repr v = function + Fvar {contents=Some k} -> + if List.memq k v then "Fvar loop" else + safe_kind_repr (k::v) k + | Fvar r -> + let vid = + try List.assq r !kind_vars + with Not_found -> + let c = incr kind_count; !kind_count in + kind_vars := (r,c) :: !kind_vars; + c + in + Printf.sprintf "Fvar {None}@%d" vid + | Fpresent -> "Fpresent" + | Fabsent -> "Fabsent" + +let rec safe_commu_repr v = function + Cok -> "Cok" + | Cunknown -> "Cunknown" + | Clink r -> + if List.memq r v then "Clink loop" else + safe_commu_repr (r::v) !r + +let rec safe_repr v = function + {desc = Tlink t} when not (List.memq t v) -> + safe_repr (t::v) t + | t -> t + +let rec list_of_memo = function + Mnil -> [] + | Mcons (_priv, p, _t1, _t2, rem) -> p :: list_of_memo rem + | Mlink rem -> list_of_memo !rem + +let print_name ppf = function + None -> fprintf ppf "None" + | Some name -> fprintf ppf "\"%s\"" name + +let string_of_label = function + Nolabel -> "" + | Labelled s -> s + | Optional s -> "?"^s + +let visited = ref [] +let rec raw_type ppf ty = + let ty = safe_repr [] ty in + if List.memq ty !visited then fprintf ppf "{id=%d}" ty.id else begin + visited := ty :: !visited; + fprintf ppf "@[<1>{id=%d;level=%d;desc=@,%a}@]" ty.id ty.level + raw_type_desc ty.desc + end +and raw_type_list tl = raw_list raw_type tl +and raw_type_desc ppf = function + Tvar name -> fprintf ppf "Tvar %a" print_name name + | Tarrow(l,t1,t2,c) -> + fprintf ppf "@[Tarrow(\"%s\",@,%a,@,%a,@,%s)@]" + (string_of_label l) raw_type t1 raw_type t2 + (safe_commu_repr [] c) + | Ttuple tl -> + fprintf ppf "@[<1>Ttuple@,%a@]" raw_type_list tl + | Tconstr (p, tl, abbrev) -> + fprintf ppf "@[Tconstr(@,%a,@,%a,@,%a)@]" path p + raw_type_list tl + (raw_list path) (list_of_memo !abbrev) + | Tobject (t, nm) -> + fprintf ppf "@[Tobject(@,%a,@,@[<1>ref%t@])@]" raw_type t + (fun ppf -> + match !nm with None -> fprintf ppf " None" + | Some(p,tl) -> + fprintf ppf "(Some(@,%a,@,%a))" path p raw_type_list tl) + | Tfield (f, k, t1, t2) -> + fprintf ppf "@[Tfield(@,%s,@,%s,@,%a,@;<0 -1>%a)@]" f + (safe_kind_repr [] k) + raw_type t1 raw_type t2 + | Tnil -> fprintf ppf "Tnil" + | Tlink t -> fprintf ppf "@[<1>Tlink@,%a@]" raw_type t + | Tsubst t -> fprintf ppf "@[<1>Tsubst@,%a@]" raw_type t + | Tunivar name -> fprintf ppf "Tunivar %a" print_name name + | Tpoly (t, tl) -> + fprintf ppf "@[Tpoly(@,%a,@,%a)@]" + raw_type t + raw_type_list tl + | Tvariant row -> + fprintf ppf + "@[{@[%s@,%a;@]@ @[%s@,%a;@]@ %s%B;@ %s%B;@ @[<1>%s%t@]}@]" + "row_fields=" + (raw_list (fun ppf (l, f) -> + fprintf ppf "@[%s,@ %a@]" l raw_field f)) + row.row_fields + "row_more=" raw_type row.row_more + "row_closed=" row.row_closed + "row_fixed=" row.row_fixed + "row_name=" + (fun ppf -> + match row.row_name with None -> fprintf ppf "None" + | Some(p,tl) -> + fprintf ppf "Some(@,%a,@,%a)" path p raw_type_list tl) + | Tpackage (p, _, tl) -> + fprintf ppf "@[Tpackage(@,%a@,%a)@]" path p + raw_type_list tl + +and raw_field ppf = function + Rpresent None -> fprintf ppf "Rpresent None" + | Rpresent (Some t) -> fprintf ppf "@[<1>Rpresent(Some@,%a)@]" raw_type t + | Reither (c,tl,m,e) -> + fprintf ppf "@[Reither(%B,@,%a,@,%B,@,@[<1>ref%t@])@]" c + raw_type_list tl m + (fun ppf -> + match !e with None -> fprintf ppf " None" + | Some f -> fprintf ppf "@,@[<1>(%a)@]" raw_field f) + | Rabsent -> fprintf ppf "Rabsent" + +let raw_type_expr ppf t = + visited := []; kind_vars := []; kind_count := 0; + raw_type ppf t; + visited := []; kind_vars := [] + +let () = Btype.print_raw := raw_type_expr + +(* Normalize paths *) + +type param_subst = Id | Nth of int | Map of int list + +let is_nth = function + Nth _ -> true + | _ -> false + +let compose l1 = function + | Id -> Map l1 + | Map l2 -> Map (List.map (List.nth l1) l2) + | Nth n -> Nth (List.nth l1 n) + +let apply_subst s1 tyl = + if tyl = [] then [] + (* cf. PR#7543: Typemod.type_package doesn't respect type constructor arity *) + else + match s1 with + Nth n1 -> [List.nth tyl n1] + | Map l1 -> List.map (List.nth tyl) l1 + | Id -> tyl + +type best_path = Paths of Path.t list | Best of Path.t + +let printing_depth = ref 0 +let printing_cont = ref ([] : Env.iter_cont list) +let printing_old = ref Env.empty +let printing_pers = ref Concr.empty +module PathMap = Map.Make(Path) +let printing_map = ref PathMap.empty + +let same_type t t' = repr t == repr t' + +let rec index l x = + match l with + [] -> raise Not_found + | a :: l -> if x == a then 0 else 1 + index l x + +let rec uniq = function + [] -> true + | a :: l -> not (List.memq a l) && uniq l + +let rec normalize_type_path ?(cache=false) env p = + try + let (params, ty, _) = Env.find_type_expansion p env in + let params = List.map repr params in + match repr ty with + {desc = Tconstr (p1, tyl, _)} -> + let tyl = List.map repr tyl in + if List.length params = List.length tyl + && List.for_all2 (==) params tyl + then normalize_type_path ~cache env p1 + else if cache || List.length params <= List.length tyl + || not (uniq tyl) then (p, Id) + else + let l1 = List.map (index params) tyl in + let (p2, s2) = normalize_type_path ~cache env p1 in + (p2, compose l1 s2) + | ty -> + (p, Nth (index params ty)) + with + Not_found -> + (Env.normalize_path None env p, Id) + +let penalty s = + if s <> "" && s.[0] = '_' then + 10 + else + try + for i = 0 to String.length s - 2 do + if s.[i] = '_' && s.[i + 1] = '_' then + raise Exit + done; + 1 + with Exit -> 10 + +let rec path_size = function + Pident id -> + penalty (Ident.name id), -Ident.binding_time id + | Pdot (p, _, _) -> + let (l, b) = path_size p in (1+l, b) + | Papply (p1, p2) -> + let (l, b) = path_size p1 in + (l + fst (path_size p2), b) + +let same_printing_env env = + let used_pers = Env.used_persistent () in + Env.same_types !printing_old env && Concr.equal !printing_pers used_pers + +let set_printing_env env = + printing_env := env; + if !Clflags.real_paths + || !printing_env == Env.empty || same_printing_env env then () else + begin + (* printf "Reset printing_map@."; *) + printing_old := env; + printing_pers := Env.used_persistent (); + printing_map := PathMap.empty; + printing_depth := 0; + (* printf "Recompute printing_map.@."; *) + let cont = + Env.iter_types + (fun p (p', _decl) -> + let (p1, s1) = normalize_type_path env p' ~cache:true in + (* Format.eprintf "%a -> %a = %a@." path p path p' path p1 *) + if s1 = Id then + try + let r = PathMap.find p1 !printing_map in + match !r with + Paths l -> r := Paths (p :: l) + | Best p' -> r := Paths [p; p'] (* assert false *) + with Not_found -> + printing_map := PathMap.add p1 (ref (Paths [p])) !printing_map) + env in + printing_cont := [cont]; + end + +let wrap_printing_env env f = + set_printing_env env; + try_finally f (fun () -> set_printing_env Env.empty) + +let wrap_printing_env env f = + Env.without_cmis (wrap_printing_env env) f + +let is_unambiguous path env = + let l = Env.find_shadowed_types path env in + List.exists (Path.same path) l || (* concrete paths are ok *) + match l with + [] -> true + | p :: rem -> + (* allow also coherent paths: *) + let normalize p = fst (normalize_type_path ~cache:true env p) in + let p' = normalize p in + List.for_all (fun p -> Path.same (normalize p) p') rem || + (* also allow repeatedly defining and opening (for toplevel) *) + let id = lid_of_path p in + List.for_all (fun p -> lid_of_path p = id) rem && + Path.same p (Env.lookup_type id env) + +let rec get_best_path r = + match !r with + Best p' -> p' + | Paths [] -> raise Not_found + | Paths l -> + r := Paths []; + List.iter + (fun p -> + (* Format.eprintf "evaluating %a@." path p; *) + match !r with + Best p' when path_size p >= path_size p' -> () + | _ -> if is_unambiguous p !printing_env then r := Best p) + (* else Format.eprintf "%a ignored as ambiguous@." path p *) + l; + get_best_path r + +let best_type_path p = + if !Clflags.real_paths || !printing_env == Env.empty + then (p, Id) + else + let (p', s) = normalize_type_path !printing_env p in + let get_path () = get_best_path (PathMap.find p' !printing_map) in + while !printing_cont <> [] && + try fst (path_size (get_path ())) > !printing_depth with Not_found -> true + do + printing_cont := List.map snd (Env.run_iter_cont !printing_cont); + incr printing_depth; + done; + let p'' = try get_path () with Not_found -> p' in + (* Format.eprintf "%a = %a -> %a@." path p path p' path p''; *) + (p'', s) + +(* Print a type expression *) + +let names = ref ([] : (type_expr * string) list) +let name_counter = ref 0 +let named_vars = ref ([] : string list) + +let weak_counter = ref 1 +let weak_var_map = ref TypeMap.empty +let named_weak_vars = ref StringSet.empty + +let reset_names () = names := []; name_counter := 0; named_vars := [] +let add_named_var ty = + match ty.desc with + Tvar (Some name) | Tunivar (Some name) -> + if List.mem name !named_vars then () else + named_vars := name :: !named_vars + | _ -> () + +let name_is_already_used name = + List.mem name !named_vars + || List.exists (fun (_, name') -> name = name') !names + || StringSet.mem name !named_weak_vars + +let rec new_name () = + let name = + if !name_counter < 26 + then String.make 1 (Char.chr(97 + !name_counter)) + else String.make 1 (Char.chr(97 + !name_counter mod 26)) ^ + string_of_int(!name_counter / 26) in + incr name_counter; + if name_is_already_used name then new_name () else name + +let rec new_weak_name ty () = + let name = "weak" ^ string_of_int !weak_counter in + incr weak_counter; + if name_is_already_used name then new_weak_name ty () + else begin + named_weak_vars := StringSet.add name !named_weak_vars; + weak_var_map := TypeMap.add ty name !weak_var_map; + name + end + +let name_of_type name_generator t = + (* We've already been through repr at this stage, so t is our representative + of the union-find class. *) + try List.assq t !names with Not_found -> + try TypeMap.find t !weak_var_map with Not_found -> + let name = + match t.desc with + Tvar (Some name) | Tunivar (Some name) -> + (* Some part of the type we've already printed has assigned another + * unification variable to that name. We want to keep the name, so try + * adding a number until we find a name that's not taken. *) + let current_name = ref name in + let i = ref 0 in + while List.exists (fun (_, name') -> !current_name = name') !names do + current_name := name ^ (string_of_int !i); + i := !i + 1; + done; + !current_name + | _ -> + (* No name available, create a new one *) + name_generator () + in + (* Exception for type declarations *) + if name <> "_" then names := (t, name) :: !names; + name + +let check_name_of_type t = ignore(name_of_type new_name t) + +let remove_names tyl = + let tyl = List.map repr tyl in + names := List.filter (fun (ty,_) -> not (List.memq ty tyl)) !names + +let visited_objects = ref ([] : type_expr list) +let aliased = ref ([] : type_expr list) +let delayed = ref ([] : type_expr list) + +let add_delayed t = + if not (List.memq t !delayed) then delayed := t :: !delayed + +let is_aliased ty = List.memq (proxy ty) !aliased +let add_alias ty = + let px = proxy ty in + if not (is_aliased px) then begin + aliased := px :: !aliased; + add_named_var px + end + +let aliasable ty = + match ty.desc with + Tvar _ | Tunivar _ | Tpoly _ -> false + | Tconstr (p, _, _) -> + not (is_nth (snd (best_type_path p))) + | _ -> true + +let namable_row row = + row.row_name <> None && + List.for_all + (fun (_, f) -> + match row_field_repr f with + | Reither(c, l, _, _) -> + row.row_closed && if c then l = [] else List.length l = 1 + | _ -> true) + row.row_fields + +let rec mark_loops_rec visited ty = + let ty = repr ty in + let px = proxy ty in + if List.memq px visited && aliasable ty then add_alias px else + let visited = px :: visited in + match ty.desc with + | Tvar _ -> add_named_var ty + | Tarrow(_, ty1, ty2, _) -> + mark_loops_rec visited ty1; mark_loops_rec visited ty2 + | Ttuple tyl -> List.iter (mark_loops_rec visited) tyl + | Tconstr(p, tyl, _) -> + let (_p', s) = best_type_path p in + List.iter (mark_loops_rec visited) (apply_subst s tyl) + | Tpackage (_, _, tyl) -> + List.iter (mark_loops_rec visited) tyl + | Tvariant row -> + if List.memq px !visited_objects then add_alias px else + begin + let row = row_repr row in + if not (static_row row) then + visited_objects := px :: !visited_objects; + match row.row_name with + | Some(_p, tyl) when namable_row row -> + List.iter (mark_loops_rec visited) tyl + | _ -> + iter_row (mark_loops_rec visited) row + end + | Tobject (fi, nm) -> + if List.memq px !visited_objects then add_alias px else + begin + if opened_object ty then + visited_objects := px :: !visited_objects; + begin match !nm with + | None -> + let fields, _ = flatten_fields fi in + List.iter + (fun (_, kind, ty) -> + if field_kind_repr kind = Fpresent then + mark_loops_rec visited ty) + fields + | Some (_, l) -> + List.iter (mark_loops_rec visited) (List.tl l) + end + end + | Tfield(_, kind, ty1, ty2) when field_kind_repr kind = Fpresent -> + mark_loops_rec visited ty1; mark_loops_rec visited ty2 + | Tfield(_, _, _, ty2) -> + mark_loops_rec visited ty2 + | Tnil -> () + | Tsubst ty -> mark_loops_rec visited ty + | Tlink _ -> fatal_error "Printtyp.mark_loops_rec (2)" + | Tpoly (ty, tyl) -> + List.iter (fun t -> add_alias t) tyl; + mark_loops_rec visited ty + | Tunivar _ -> add_named_var ty + +let mark_loops ty = + normalize_type Env.empty ty; + mark_loops_rec [] ty;; + +let reset_loop_marks () = + visited_objects := []; aliased := []; delayed := [] + +let reset () = + unique_names := Ident.empty; reset_names (); reset_loop_marks () + +let reset_and_mark_loops ty = + reset (); mark_loops ty + +let reset_and_mark_loops_list tyl = + reset (); List.iter mark_loops tyl + +(* Disabled in classic mode when printing an unification error *) +let print_labels = ref true + +let rec tree_of_typexp sch ty = + let ty = repr ty in + let px = proxy ty in + if List.mem_assq px !names && not (List.memq px !delayed) then + let mark = is_non_gen sch ty in + let name = name_of_type (if mark then new_weak_name ty else new_name) px in + Otyp_var (mark, name) else + + let pr_typ () = + match ty.desc with + | Tvar _ -> + (*let lev = + if is_non_gen sch ty then "/" ^ string_of_int ty.level else "" in*) + let non_gen = is_non_gen sch ty in + let name_gen = if non_gen then new_weak_name ty else new_name in + Otyp_var (non_gen, name_of_type name_gen ty) + | Tarrow(l, ty1, ty2, _) -> + let pr_arrow l ty1 ty2 = + let lab = + if !print_labels || is_optional l then string_of_label l else "" + in + let t1 = + if is_optional l then + match (repr ty1).desc with + | Tconstr(path, [ty], _) + when Path.same path Predef.path_option -> + tree_of_typexp sch ty + | _ -> Otyp_stuff "" + else tree_of_typexp sch ty1 in + Otyp_arrow (lab, t1, tree_of_typexp sch ty2) in + pr_arrow l ty1 ty2 + | Ttuple tyl -> + Otyp_tuple (tree_of_typlist sch tyl) + | Tconstr(p, tyl, _abbrev) -> + let p', s = best_type_path p in + let tyl' = apply_subst s tyl in + if is_nth s && not (tyl'=[]) then tree_of_typexp sch (List.hd tyl') else + Otyp_constr (tree_of_path p', tree_of_typlist sch tyl') + | Tvariant row -> + let row = row_repr row in + let fields = + if row.row_closed then + List.filter (fun (_, f) -> row_field_repr f <> Rabsent) + row.row_fields + else row.row_fields in + let present = + List.filter + (fun (_, f) -> + match row_field_repr f with + | Rpresent _ -> true + | _ -> false) + fields in + let all_present = List.length present = List.length fields in + begin match row.row_name with + | Some(p, tyl) when namable_row row -> + let (p', s) = best_type_path p in + let id = tree_of_path p' in + let args = tree_of_typlist sch (apply_subst s tyl) in + let out_variant = + if is_nth s then List.hd args else Otyp_constr (id, args) in + if row.row_closed && all_present then + out_variant + else + let non_gen = is_non_gen sch px in + let tags = + if all_present then None else Some (List.map fst present) in + Otyp_variant (non_gen, Ovar_typ out_variant, row.row_closed, tags) + | _ -> + let non_gen = + not (row.row_closed && all_present) && is_non_gen sch px in + let fields = List.map (tree_of_row_field sch) fields in + let tags = + if all_present then None else Some (List.map fst present) in + Otyp_variant (non_gen, Ovar_fields fields, row.row_closed, tags) + end + | Tobject (fi, nm) -> + tree_of_typobject sch fi !nm + | Tnil | Tfield _ -> + tree_of_typobject sch ty None + | Tsubst ty -> + tree_of_typexp sch ty + | Tlink _ -> + fatal_error "Printtyp.tree_of_typexp" + | Tpoly (ty, []) -> + tree_of_typexp sch ty + | Tpoly (ty, tyl) -> + (*let print_names () = + List.iter (fun (_, name) -> prerr_string (name ^ " ")) !names; + prerr_string "; " in *) + let tyl = List.map repr tyl in + if tyl = [] then tree_of_typexp sch ty else begin + let old_delayed = !delayed in + (* Make the names delayed, so that the real type is + printed once when used as proxy *) + List.iter add_delayed tyl; + let tl = List.map (name_of_type new_name) tyl in + let tr = Otyp_poly (tl, tree_of_typexp sch ty) in + (* Forget names when we leave scope *) + remove_names tyl; + delayed := old_delayed; tr + end + | Tunivar _ -> + Otyp_var (false, name_of_type new_name ty) + | Tpackage (p, n, tyl) -> + let n = + List.map (fun li -> String.concat "." (Longident.flatten li)) n in + Otyp_module (Path.name p, n, tree_of_typlist sch tyl) + in + if List.memq px !delayed then delayed := List.filter ((!=) px) !delayed; + if is_aliased px && aliasable ty then begin + check_name_of_type px; + Otyp_alias (pr_typ (), name_of_type new_name px) end + else pr_typ () + +and tree_of_row_field sch (l, f) = + match row_field_repr f with + | Rpresent None | Reither(true, [], _, _) -> (l, false, []) + | Rpresent(Some ty) -> (l, false, [tree_of_typexp sch ty]) + | Reither(c, tyl, _, _) -> + if c (* contradiction: constant constructor with an argument *) + then (l, true, tree_of_typlist sch tyl) + else (l, false, tree_of_typlist sch tyl) + | Rabsent -> (l, false, [] (* actually, an error *)) + +and tree_of_typlist sch tyl = + List.map (tree_of_typexp sch) tyl + +and tree_of_typobject sch fi nm = + begin match nm with + | None -> + let pr_fields fi = + let (fields, rest) = flatten_fields fi in + let present_fields = + List.fold_right + (fun (n, k, t) l -> + match field_kind_repr k with + | Fpresent -> (n, t) :: l + | _ -> l) + fields [] in + let sorted_fields = + List.sort + (fun (n, _) (n', _) -> String.compare n n') present_fields in + tree_of_typfields sch rest sorted_fields in + let (fields, rest) = pr_fields fi in + Otyp_object (fields, rest) + | Some (p, ty :: tyl) -> + let non_gen = is_non_gen sch (repr ty) in + let args = tree_of_typlist sch tyl in + let (p', s) = best_type_path p in + assert (s = Id); + Otyp_class (non_gen, tree_of_path p', args) + | _ -> + fatal_error "Printtyp.tree_of_typobject" + end + +and is_non_gen sch ty = + sch && is_Tvar ty && ty.level <> generic_level + +and tree_of_typfields sch rest = function + | [] -> + let rest = + match rest.desc with + | Tvar _ | Tunivar _ -> Some (is_non_gen sch rest) + | Tconstr _ -> Some false + | Tnil -> None + | _ -> fatal_error "typfields (1)" + in + ([], rest) + | (s, t) :: l -> + let field = (s, tree_of_typexp sch t) in + let (fields, rest) = tree_of_typfields sch rest l in + (field :: fields, rest) + +let typexp sch ppf ty = + !Oprint.out_type ppf (tree_of_typexp sch ty) + +let type_expr ppf ty = typexp false ppf ty + +and type_sch ppf ty = typexp true ppf ty + +and type_scheme ppf ty = reset_and_mark_loops ty; typexp true ppf ty + +(* Maxence *) +let type_scheme_max ?(b_reset_names=true) ppf ty = + if b_reset_names then reset_names () ; + typexp true ppf ty +(* End Maxence *) + +let tree_of_type_scheme ty = reset_and_mark_loops ty; tree_of_typexp true ty + +(* Print one type declaration *) + +let tree_of_constraints params = + List.fold_right + (fun ty list -> + let ty' = unalias ty in + if proxy ty != proxy ty' then + let tr = tree_of_typexp true ty in + (tr, tree_of_typexp true ty') :: list + else list) + params [] + +let filter_params tyl = + let params = + List.fold_left + (fun tyl ty -> + let ty = repr ty in + if List.memq ty tyl then Btype.newgenty (Tsubst ty) :: tyl + else ty :: tyl) + [] tyl + in List.rev params + +let mark_loops_constructor_arguments = function + | Cstr_tuple l -> List.iter mark_loops l + | Cstr_record l -> List.iter (fun l -> mark_loops l.ld_type) l + +let rec tree_of_type_decl id decl = + + reset(); + + let params = filter_params decl.type_params in + + begin match decl.type_manifest with + | Some ty -> + let vars = free_variables ty in + List.iter + (function {desc = Tvar (Some "_")} as ty -> + if List.memq ty vars then ty.desc <- Tvar None + | _ -> ()) + params + | None -> () + end; + + List.iter add_alias params; + List.iter mark_loops params; + List.iter check_name_of_type (List.map proxy params); + let ty_manifest = + match decl.type_manifest with + | None -> None + | Some ty -> + let ty = + (* Special hack to hide variant name *) + match repr ty with {desc=Tvariant row} -> + let row = row_repr row in + begin match row.row_name with + Some (Pident id', _) when Ident.same id id' -> + newgenty (Tvariant {row with row_name = None}) + | _ -> ty + end + | _ -> ty + in + mark_loops ty; + Some ty + in + begin match decl.type_kind with + | Type_abstract -> () + | Type_variant cstrs -> + List.iter + (fun c -> + mark_loops_constructor_arguments c.cd_args; + may mark_loops c.cd_res) + cstrs + | Type_record(l, _rep) -> + List.iter (fun l -> mark_loops l.ld_type) l + | Type_open -> () + end; + + let type_param = + function + | Otyp_var (_, id) -> id + | _ -> "?" + in + let type_defined decl = + let abstr = + match decl.type_kind with + Type_abstract -> + decl.type_manifest = None || decl.type_private = Private + | Type_record _ -> + decl.type_private = Private + | Type_variant tll -> + decl.type_private = Private || + List.exists (fun cd -> cd.cd_res <> None) tll + | Type_open -> + decl.type_manifest = None + in + let vari = + List.map2 + (fun ty v -> + if abstr || not (is_Tvar (repr ty)) then Variance.get_upper v + else (true,true)) + decl.type_params decl.type_variance + in + (Ident.name id, + List.map2 (fun ty cocn -> type_param (tree_of_typexp false ty), cocn) + params vari) + in + let tree_of_manifest ty1 = + match ty_manifest with + | None -> ty1 + | Some ty -> Otyp_manifest (tree_of_typexp false ty, ty1) + in + let (name, args) = type_defined decl in + let constraints = tree_of_constraints params in + let ty, priv = + match decl.type_kind with + | Type_abstract -> + begin match ty_manifest with + | None -> (Otyp_abstract, Public) + | Some ty -> + tree_of_typexp false ty, decl.type_private + end + | Type_variant cstrs -> + tree_of_manifest (Otyp_sum (List.map tree_of_constructor cstrs)), + decl.type_private + | Type_record(lbls, _rep) -> + tree_of_manifest (Otyp_record (List.map tree_of_label lbls)), + decl.type_private + | Type_open -> + tree_of_manifest Otyp_open, + decl.type_private + in + let immediate = + Builtin_attributes.immediate decl.type_attributes + in + { otype_name = name; + otype_params = args; + otype_type = ty; + otype_private = priv; + otype_immediate = immediate; + otype_unboxed = decl.type_unboxed.unboxed; + otype_cstrs = constraints } + +and tree_of_constructor_arguments = function + | Cstr_tuple l -> tree_of_typlist false l + | Cstr_record l -> [ Otyp_record (List.map tree_of_label l) ] + +and tree_of_constructor cd = + let name = Ident.name cd.cd_id in + let arg () = tree_of_constructor_arguments cd.cd_args in + match cd.cd_res with + | None -> (name, arg (), None) + | Some res -> + let nm = !names in + names := []; + let ret = tree_of_typexp false res in + let args = arg () in + names := nm; + (name, args, Some ret) + +and tree_of_label l = + let opt = l.ld_attributes |> List.exists (fun ({txt}, _) -> txt = "ns.optional") in + let typ = match l.ld_type.desc with + | Tconstr (p, [t1], _) when opt && Path.same p Predef.path_option -> t1 + | _ -> l.ld_type in + (Ident.name l.ld_id, l.ld_mutable = Mutable, opt, tree_of_typexp false typ) + +let tree_of_type_declaration id decl rs = + Osig_type (tree_of_type_decl id decl, tree_of_rec rs) + +let type_declaration id ppf decl = + !Oprint.out_sig_item ppf (tree_of_type_declaration id decl Trec_first) + +let constructor_arguments ppf a = + let tys = tree_of_constructor_arguments a in + !Oprint.out_type ppf (Otyp_tuple tys) + +(* Print an extension declaration *) + +let tree_of_extension_constructor id ext es = + reset (); + let ty_name = Path.name ext.ext_type_path in + let ty_params = filter_params ext.ext_type_params in + List.iter add_alias ty_params; + List.iter mark_loops ty_params; + List.iter check_name_of_type (List.map proxy ty_params); + mark_loops_constructor_arguments ext.ext_args; + may mark_loops ext.ext_ret_type; + let type_param = + function + | Otyp_var (_, id) -> id + | _ -> "?" + in + let ty_params = + List.map (fun ty -> type_param (tree_of_typexp false ty)) ty_params + in + let name = Ident.name id in + let args, ret = + match ext.ext_ret_type with + | None -> (tree_of_constructor_arguments ext.ext_args, None) + | Some res -> + let nm = !names in + names := []; + let ret = tree_of_typexp false res in + let args = tree_of_constructor_arguments ext.ext_args in + names := nm; + (args, Some ret) + in + let ext = + { oext_name = name; + oext_type_name = ty_name; + oext_type_params = ty_params; + oext_args = args; + oext_ret_type = ret; + oext_private = ext.ext_private } + in + let es = + match es with + Text_first -> Oext_first + | Text_next -> Oext_next + | Text_exception -> Oext_exception + in + Osig_typext (ext, es) + +let extension_constructor id ppf ext = + !Oprint.out_sig_item ppf (tree_of_extension_constructor id ext Text_first) + +(* Print a value declaration *) + +let tree_of_value_description id decl = + (* Format.eprintf "@[%a@]@." raw_type_expr decl.val_type; *) + let id = Ident.name id in + let ty = tree_of_type_scheme decl.val_type in + let vd = + { oval_name = id; + oval_type = ty; + oval_prims = []; + oval_attributes = [] } + in + let vd = + match decl.val_kind with + | Val_prim p -> Primitive.print p vd + | _ -> vd + in + Osig_value vd + +let value_description id ppf decl = + !Oprint.out_sig_item ppf (tree_of_value_description id decl) + +(* Print a class type *) + +let method_type (_, kind, ty) = + match field_kind_repr kind, repr ty with + Fpresent, {desc=Tpoly(ty, tyl)} -> (ty, tyl) + | _ , ty -> (ty, []) + +let tree_of_metho sch concrete csil (lab, kind, ty) = + if lab <> dummy_method then begin + let kind = field_kind_repr kind in + let priv = kind <> Fpresent in + let virt = not (Concr.mem lab concrete) in + let (ty, tyl) = method_type (lab, kind, ty) in + let tty = tree_of_typexp sch ty in + remove_names tyl; + Ocsg_method (lab, priv, virt, tty) :: csil + end + else csil + +let rec prepare_class_type params = function + | Cty_constr (_p, tyl, cty) -> + let sty = Ctype.self_type cty in + if List.memq (proxy sty) !visited_objects + || not (List.for_all is_Tvar params) + || List.exists (deep_occur sty) tyl + then prepare_class_type params cty + else List.iter mark_loops tyl + | Cty_signature sign -> + let sty = repr sign.csig_self in + (* Self may have a name *) + let px = proxy sty in + if List.memq px !visited_objects then add_alias sty + else visited_objects := px :: !visited_objects; + let (fields, _) = + Ctype.flatten_fields (Ctype.object_fields sign.csig_self) + in + List.iter (fun met -> mark_loops (fst (method_type met))) fields; + Vars.iter (fun _ (_, _, ty) -> mark_loops ty) sign.csig_vars + | Cty_arrow (_, ty, cty) -> + mark_loops ty; + prepare_class_type params cty + +let rec tree_of_class_type sch params = + function + | Cty_constr (p', tyl, cty) -> + let sty = Ctype.self_type cty in + if List.memq (proxy sty) !visited_objects + || not (List.for_all is_Tvar params) + then + tree_of_class_type sch params cty + else + Octy_constr (tree_of_path p', tree_of_typlist true tyl) + | Cty_signature sign -> + let sty = repr sign.csig_self in + let self_ty = + if is_aliased sty then + Some (Otyp_var (false, name_of_type new_name (proxy sty))) + else None + in + let (fields, _) = + Ctype.flatten_fields (Ctype.object_fields sign.csig_self) + in + let csil = [] in + let csil = + List.fold_left + (fun csil (ty1, ty2) -> Ocsg_constraint (ty1, ty2) :: csil) + csil (tree_of_constraints params) + in + let all_vars = + Vars.fold (fun l (m, v, t) all -> (l, m, v, t) :: all) sign.csig_vars [] + in + (* Consequence of PR#3607: order of Map.fold has changed! *) + let all_vars = List.rev all_vars in + let csil = + List.fold_left + (fun csil (l, m, v, t) -> + Ocsg_value (l, m = Mutable, v = Virtual, tree_of_typexp sch t) + :: csil) + csil all_vars + in + let csil = + List.fold_left (tree_of_metho sch sign.csig_concr) csil fields + in + Octy_signature (self_ty, List.rev csil) + | Cty_arrow (l, ty, cty) -> + let lab = + if !print_labels || is_optional l then string_of_label l else "" + in + let ty = + if is_optional l then + match (repr ty).desc with + | Tconstr(path, [ty], _) when Path.same path Predef.path_option -> ty + | _ -> newconstr (Path.Pident(Ident.create "")) [] + else ty in + let tr = tree_of_typexp sch ty in + Octy_arrow (lab, tr, tree_of_class_type sch params cty) + +let class_type ppf cty = + reset (); + prepare_class_type [] cty; + !Oprint.out_class_type ppf (tree_of_class_type false [] cty) + +let tree_of_class_param param variance = + (match tree_of_typexp true param with + Otyp_var (_, s) -> s + | _ -> "?"), + if is_Tvar (repr param) then (true, true) else variance + +let class_variance = + List.map Variance.(fun v -> mem May_pos v, mem May_neg v) + +let tree_of_class_declaration id cl rs = + let params = filter_params cl.cty_params in + + reset (); + List.iter add_alias params; + prepare_class_type params cl.cty_type; + let sty = Ctype.self_type cl.cty_type in + List.iter mark_loops params; + + List.iter check_name_of_type (List.map proxy params); + if is_aliased sty then check_name_of_type (proxy sty); + + let vir_flag = cl.cty_new = None in + Osig_class + (vir_flag, Ident.name id, + List.map2 tree_of_class_param params (class_variance cl.cty_variance), + tree_of_class_type true params cl.cty_type, + tree_of_rec rs) + +let class_declaration id ppf cl = + !Oprint.out_sig_item ppf (tree_of_class_declaration id cl Trec_first) + +let tree_of_cltype_declaration id cl rs = + let params = List.map repr cl.clty_params in + + reset (); + List.iter add_alias params; + prepare_class_type params cl.clty_type; + let sty = Ctype.self_type cl.clty_type in + List.iter mark_loops params; + + List.iter check_name_of_type (List.map proxy params); + if is_aliased sty then check_name_of_type (proxy sty); + + let sign = Ctype.signature_of_class_type cl.clty_type in + + let virt = + let (fields, _) = + Ctype.flatten_fields (Ctype.object_fields sign.csig_self) in + List.exists + (fun (lab, _, _) -> + not (lab = dummy_method || Concr.mem lab sign.csig_concr)) + fields + || Vars.fold (fun _ (_,vr,_) b -> vr = Virtual || b) sign.csig_vars false + in + + Osig_class_type + (virt, Ident.name id, + List.map2 tree_of_class_param params (class_variance cl.clty_variance), + tree_of_class_type true params cl.clty_type, + tree_of_rec rs) + +let cltype_declaration id ppf cl = + !Oprint.out_sig_item ppf (tree_of_cltype_declaration id cl Trec_first) + +(* Print a module type *) + +let wrap_env fenv ftree arg = + let env = !printing_env in + set_printing_env (fenv env); + let tree = ftree arg in + set_printing_env env; + tree + +let filter_rem_sig item rem = + match item, rem with + | Sig_class _, ctydecl :: tydecl1 :: tydecl2 :: rem -> + ([ctydecl; tydecl1; tydecl2], rem) + | Sig_class_type _, tydecl1 :: tydecl2 :: rem -> + ([tydecl1; tydecl2], rem) + | _ -> + ([], rem) + +let dummy = + { type_params = []; type_arity = 0; type_kind = Type_abstract; + type_private = Public; type_manifest = None; type_variance = []; + type_newtype_level = None; type_loc = Location.none; + type_attributes = []; + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } + +let hide_rec_items = function + | Sig_type(id, _decl, rs) ::rem + when rs = Trec_first && not !Clflags.real_paths -> + let rec get_ids = function + Sig_type (id, _, Trec_next) :: rem -> + id :: get_ids rem + | _ -> [] + in + let ids = id :: get_ids rem in + set_printing_env + (List.fold_right + (fun id -> Env.add_type ~check:false (Ident.rename id) dummy) + ids !printing_env) + | _ -> () + +let rec tree_of_modtype ?(ellipsis=false) = function + | Mty_ident p -> + Omty_ident (tree_of_path p) + | Mty_signature sg -> + Omty_signature (if ellipsis then [Osig_ellipsis] + else tree_of_signature sg) + | Mty_functor(param, ty_arg, ty_res) -> + let res = + match ty_arg with None -> tree_of_modtype ~ellipsis ty_res + | Some mty -> + wrap_env (Env.add_module ~arg:true param mty) + (tree_of_modtype ~ellipsis) ty_res + in + Omty_functor (Ident.name param, + may_map (tree_of_modtype ~ellipsis:false) ty_arg, res) + | Mty_alias(_, p) -> + Omty_alias (tree_of_path p) + +and tree_of_signature sg = + wrap_env (fun env -> env) (tree_of_signature_rec !printing_env false) sg + +and tree_of_signature_rec env' in_type_group = function + [] -> [] + | item :: rem as items -> + let in_type_group = + match in_type_group, item with + true, Sig_type (_, _, Trec_next) -> true + | _, Sig_type (_, _, (Trec_not | Trec_first)) -> + set_printing_env env'; true + | _ -> set_printing_env env'; false + in + let (sg, rem) = filter_rem_sig item rem in + hide_rec_items items; + let trees = trees_of_sigitem item in + let env' = Env.add_signature (item :: sg) env' in + trees @ tree_of_signature_rec env' in_type_group rem + +and trees_of_sigitem = function + | Sig_value(id, decl) -> + [tree_of_value_description id decl] + | Sig_type(id, _, _) when is_row_name (Ident.name id) -> + [] + | Sig_type(id, decl, rs) -> + [tree_of_type_declaration id decl rs] + | Sig_typext(id, ext, es) -> + [tree_of_extension_constructor id ext es] + | Sig_module(id, md, rs) -> + let ellipsis = + List.exists (function ({txt="..."}, Parsetree.PStr []) -> true + | _ -> false) + md.md_attributes in + [tree_of_module id md.md_type rs ~ellipsis] + | Sig_modtype(id, decl) -> + [tree_of_modtype_declaration id decl] + | Sig_class(id, decl, rs) -> + [tree_of_class_declaration id decl rs] + | Sig_class_type(id, decl, rs) -> + [tree_of_cltype_declaration id decl rs] + +and tree_of_modtype_declaration id decl = + let mty = + match decl.mtd_type with + | None -> Omty_abstract + | Some mty -> tree_of_modtype mty + in + Osig_modtype (Ident.name id, mty) + +and tree_of_module id ?ellipsis mty rs = + Osig_module (Ident.name id, tree_of_modtype ?ellipsis mty, tree_of_rec rs) + +let modtype ppf mty = !Oprint.out_module_type ppf (tree_of_modtype mty) +let modtype_declaration id ppf decl = + !Oprint.out_sig_item ppf (tree_of_modtype_declaration id decl) + +(* For the toplevel: merge with tree_of_signature? *) + +(* Refresh weak variable map in the toplevel *) +let refresh_weak () = + let refresh t name (m,s) = + if is_non_gen true (repr t) then + begin + TypeMap.add t name m, + StringSet.add name s + end + else m, s in + let m, s = + TypeMap.fold refresh !weak_var_map (TypeMap.empty ,StringSet.empty) in + named_weak_vars := s; + weak_var_map := m + +let print_items showval env x = + refresh_weak(); + let rec print showval env = function + | [] -> [] + | item :: rem as items -> + let (_sg, rem) = filter_rem_sig item rem in + hide_rec_items items; + let trees = trees_of_sigitem item in + List.map (fun d -> (d, showval env item)) trees @ + print showval env rem in + print showval env x + +(* Print a signature body (used by -i when compiling a .ml) *) + +let print_signature ppf tree = + fprintf ppf "@[%a@]" !Oprint.out_signature tree + +let signature ppf sg = + fprintf ppf "%a" print_signature (tree_of_signature sg) + +(* Print an unification error *) + +let same_path t t' = + let t = repr t and t' = repr t' in + t == t' || + match t.desc, t'.desc with + Tconstr(p,tl,_), Tconstr(p',tl',_) -> + let (p1, s1) = best_type_path p and (p2, s2) = best_type_path p' in + begin match s1, s2 with + Nth n1, Nth n2 when n1 = n2 -> true + | (Id | Map _), (Id | Map _) when Path.same p1 p2 -> + let tl = apply_subst s1 tl and tl' = apply_subst s2 tl' in + List.length tl = List.length tl' && + List.for_all2 same_type tl tl' + | _ -> false + end + | _ -> + false + +let type_expansion t ppf t' = + if same_path t t' + then begin add_delayed (proxy t); type_expr ppf t end + else + let t' = if proxy t == proxy t' then unalias t' else t' in + fprintf ppf "@[<2>%a@ =@ %a@]" type_expr t type_expr t' + +let type_path_expansion tp ppf tp' = + if Path.same tp tp' then path ppf tp else + fprintf ppf "@[<2>%a@ =@ %a@]" path tp path tp' + +let rec trace fst txt ppf = function + | (t1, t1') :: (t2, t2') :: rem -> + if not fst then fprintf ppf "@,"; + fprintf ppf "@[Type@;<1 2>%a@ %s@;<1 2>%a@] %a" + (type_expansion t1) t1' txt (type_expansion t2) t2' + (trace false txt) rem + | _ -> () + +let rec filter_trace keep_last = function + | (_, t1') :: (_, t2') :: [] when is_Tvar t1' || is_Tvar t2' -> + [] + | (t1, t1') :: (t2, t2') :: rem -> + let rem' = filter_trace keep_last rem in + if is_constr_row ~allow_ident:true t1' + || is_constr_row ~allow_ident:true t2' + || same_path t1 t1' && same_path t2 t2' && not (keep_last && rem' = []) + then rem' + else (t1, t1') :: (t2, t2') :: rem' + | _ -> [] + +let rec type_path_list ppf = function + | [tp, tp'] -> type_path_expansion tp ppf tp' + | (tp, tp') :: rem -> + fprintf ppf "%a@;<2 0>%a" + (type_path_expansion tp) tp' + type_path_list rem + | [] -> () + +(* Hide variant name and var, to force printing the expanded type *) +let hide_variant_name t = + match repr t with + | {desc = Tvariant row} as t when (row_repr row).row_name <> None -> + newty2 t.level + (Tvariant {(row_repr row) with row_name = None; + row_more = newvar2 (row_more row).level}) + | _ -> t + +let prepare_expansion (t, t') = + let t' = hide_variant_name t' in + mark_loops t; + if not (same_path t t') then mark_loops t'; + (t, t') + +let may_prepare_expansion compact (t, t') = + match (repr t').desc with + Tvariant _ | Tobject _ when compact -> + mark_loops t; (t, t) + | _ -> prepare_expansion (t, t') + +let print_tags ppf fields = + match fields with [] -> () + | (t, _) :: fields -> + fprintf ppf "`%s" t; + List.iter (fun (t, _) -> fprintf ppf ",@ `%s" t) fields + +let has_explanation t3 t4 = + match t3.desc, t4.desc with + Tfield _, (Tnil|Tconstr _) | (Tnil|Tconstr _), Tfield _ + | Tnil, Tconstr _ | Tconstr _, Tnil + | _, Tvar _ | Tvar _, _ + | Tvariant _, Tvariant _ -> true + | Tfield (l,_,_,{desc=Tnil}), Tfield (l',_,_,{desc=Tnil}) -> l = l' + | _ -> false + +let rec mismatch = function + (_, t) :: (_, t') :: rem -> + begin match mismatch rem with + Some _ as m -> m + | None -> + if has_explanation t t' then Some(t,t') else None + end + | [] -> None + | _ -> assert false + +let explanation unif t3 t4 ppf = + match t3.desc, t4.desc with + | Ttuple [], Tvar _ | Tvar _, Ttuple [] -> + fprintf ppf "@,Self type cannot escape its class" + | Tconstr (p, _, _), Tvar _ + when unif && t4.level < Path.binding_time p -> + fprintf ppf + "@,@[The type constructor@;<1 2>%a@ would escape its scope@]" + path p + | Tvar _, Tconstr (p, _, _) + when unif && t3.level < Path.binding_time p -> + fprintf ppf + "@,@[The type constructor@;<1 2>%a@ would escape its scope@]" + path p + | Tvar _, Tunivar _ | Tunivar _, Tvar _ -> + fprintf ppf "@,The universal variable %a would escape its scope" + type_expr (if is_Tunivar t3 then t3 else t4) + | Tvar _, _ | _, Tvar _ -> + let t, t' = if is_Tvar t3 then (t3, t4) else (t4, t3) in + if occur_in Env.empty t t' then + fprintf ppf "@,@[The type variable %a occurs inside@ %a@]" + type_expr t type_expr t' + else + fprintf ppf "@,@[This instance of %a is ambiguous:@ %s@]" + type_expr t' + "it would escape the scope of its equation" + | Tfield (lab, _, _, _), _ when lab = dummy_method -> + fprintf ppf + "@,Self type cannot be unified with a closed object type" + | _, Tfield (lab, _, _, _) when lab = dummy_method -> + fprintf ppf + "@,Self type cannot be unified with a closed object type" + | Tfield (l,_,_,{desc=Tnil}), Tfield (l',_,_,{desc=Tnil}) when l = l' -> + fprintf ppf "@,Types for method %s are incompatible" l + | (Tnil|Tconstr _), Tfield (l, _, _, _) -> + fprintf ppf + "@,@[The first object type has no method %s@]" l + | Tfield (l, _, _, _), (Tnil|Tconstr _) -> + fprintf ppf + "@,@[The second object type has no method %s@]" l + | Tnil, Tconstr _ | Tconstr _, Tnil -> + fprintf ppf + "@,@[The %s object type has an abstract row, it cannot be closed@]" + (if t4.desc = Tnil then "first" else "second") + | Tvariant row1, Tvariant row2 -> + let row1 = row_repr row1 and row2 = row_repr row2 in + begin match + row1.row_fields, row1.row_closed, row2.row_fields, row2.row_closed with + | [], true, [], true -> + fprintf ppf "@,These two variant types have no intersection" + | [], true, (_::_ as fields), _ -> + fprintf ppf + "@,@[The first variant type does not allow tag(s)@ @[%a@]@]" + print_tags fields + | (_::_ as fields), _, [], true -> + fprintf ppf + "@,@[The second variant type does not allow tag(s)@ @[%a@]@]" + print_tags fields + | [l1,_], true, [l2,_], true when l1 = l2 -> + fprintf ppf "@,Types for tag `%s are incompatible" l1 + | _ -> () + end + | _ -> () + + +let warn_on_missing_def env ppf t = + match t.desc with + | Tconstr (p,_,_) -> + begin + try + ignore(Env.find_type p env : Types.type_declaration) + with Not_found -> + fprintf ppf + "@,@[%a is abstract because no corresponding cmi file was found \ + in path.@]" path p + end + | _ -> () + +let explanation unif mis ppf = + match mis with + None -> () + | Some (t3, t4) -> explanation unif t3 t4 ppf + +let ident_same_name id1 id2 = + if Ident.equal id1 id2 && not (Ident.same id1 id2) then begin + add_unique id1; add_unique id2 + end + +let rec path_same_name p1 p2 = + match p1, p2 with + Pident id1, Pident id2 -> ident_same_name id1 id2 + | Pdot (p1, s1, _), Pdot (p2, s2, _) when s1 = s2 -> path_same_name p1 p2 + | Papply (p1, p1'), Papply (p2, p2') -> + path_same_name p1 p2; path_same_name p1' p2' + | _ -> () + +let type_same_name t1 t2 = + match (repr t1).desc, (repr t2).desc with + Tconstr (p1, _, _), Tconstr (p2, _, _) -> + path_same_name (fst (best_type_path p1)) (fst (best_type_path p2)) + | _ -> () + +let rec trace_same_names = function + (t1, t1') :: (t2, t2') :: rem -> + type_same_name t1 t2; type_same_name t1' t2'; trace_same_names rem + | _ -> () + +let unification_error env unif tr txt1 ppf txt2 = + reset (); + trace_same_names tr; + let tr = List.map (fun (t, t') -> (t, hide_variant_name t')) tr in + let mis = mismatch tr in + match tr with + | [] | _ :: [] -> assert false + | t1 :: t2 :: tr -> + try + let tr = filter_trace (mis = None) tr in + let t1, t1' = may_prepare_expansion (tr = []) t1 + and t2, t2' = may_prepare_expansion (tr = []) t2 in + print_labels := not !Clflags.classic; + let tr = List.map prepare_expansion tr in + fprintf ppf + "@[\ + @[%t@;<1 2>%a@ \ + %t@;<1 2>%a\ + @]%a%t\ + @]" + txt1 (type_expansion t1) t1' + txt2 (type_expansion t2) t2' + (trace false "is not compatible with type") tr + (explanation unif mis); + if env <> Env.empty + then begin + warn_on_missing_def env ppf t1; + warn_on_missing_def env ppf t2 + end; + print_labels := true + with exn -> + print_labels := true; + raise exn + +let report_unification_error ppf env ?(unif=true) + tr txt1 txt2 = + wrap_printing_env env (fun () -> unification_error env unif tr txt1 ppf txt2) +;; + +let trace fst keep_last txt ppf tr = + print_labels := not !Clflags.classic; + trace_same_names tr; + try match tr with + t1 :: t2 :: tr' -> + if fst then trace fst txt ppf (t1 :: t2 :: filter_trace keep_last tr') + else trace fst txt ppf (filter_trace keep_last tr); + print_labels := true + | _ -> () + with exn -> + print_labels := true; + raise exn + +let report_subtyping_error ppf env tr1 txt1 tr2 = + wrap_printing_env env (fun () -> + reset (); + let tr1 = List.map prepare_expansion tr1 + and tr2 = List.map prepare_expansion tr2 in + fprintf ppf "@[%a" (trace true (tr2 = []) txt1) tr1; + if tr2 = [] then fprintf ppf "@]" else + let mis = mismatch tr2 in + fprintf ppf "%a%t@]" + (trace false (mis = None) "is not compatible with type") tr2 + (explanation true mis)) + +let report_ambiguous_type_error ppf env (tp0, tp0') tpl txt1 txt2 txt3 = + wrap_printing_env env (fun () -> + reset (); + List.iter + (fun (tp, tp') -> path_same_name tp0 tp; path_same_name tp0' tp') + tpl; + match tpl with + [] -> assert false + | [tp, tp'] -> + fprintf ppf + "@[%t@;<1 2>%a@ \ + %t@;<1 2>%a\ + @]" + txt1 (type_path_expansion tp) tp' + txt3 (type_path_expansion tp0) tp0' + | _ -> + fprintf ppf + "@[%t@;<1 2>@[%a@]\ + @ %t@;<1 2>%a\ + @]" + txt2 type_path_list tpl + txt3 (type_path_expansion tp0) tp0') diff --git a/res_syntax/compiler-libs-406/printtyp.mli b/res_syntax/compiler-libs-406/printtyp.mli new file mode 100644 index 0000000000..8fd027ec60 --- /dev/null +++ b/res_syntax/compiler-libs-406/printtyp.mli @@ -0,0 +1,90 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Printing functions *) + +open Format +open Types +open Outcometree + +val longident: formatter -> Longident.t -> unit +val ident: formatter -> Ident.t -> unit +val tree_of_path: Path.t -> out_ident +val path: formatter -> Path.t -> unit +val string_of_path: Path.t -> string +val raw_type_expr: formatter -> type_expr -> unit +val string_of_label: Asttypes.arg_label -> string + +val wrap_printing_env: Env.t -> (unit -> 'a) -> 'a + (* Call the function using the environment for type path shortening *) + (* This affects all the printing functions below *) + +val reset: unit -> unit +val mark_loops: type_expr -> unit +val reset_and_mark_loops: type_expr -> unit +val reset_and_mark_loops_list: type_expr list -> unit +val type_expr: formatter -> type_expr -> unit +val constructor_arguments: formatter -> constructor_arguments -> unit +val tree_of_type_scheme: type_expr -> out_type +val type_sch : formatter -> type_expr -> unit +val type_scheme: formatter -> type_expr -> unit +(* Maxence *) +val reset_names: unit -> unit +val type_scheme_max: ?b_reset_names: bool -> + formatter -> type_expr -> unit +(* End Maxence *) +val tree_of_value_description: Ident.t -> value_description -> out_sig_item +val value_description: Ident.t -> formatter -> value_description -> unit +val tree_of_type_declaration: + Ident.t -> type_declaration -> rec_status -> out_sig_item +val type_declaration: Ident.t -> formatter -> type_declaration -> unit +val tree_of_extension_constructor: + Ident.t -> extension_constructor -> ext_status -> out_sig_item +val extension_constructor: + Ident.t -> formatter -> extension_constructor -> unit +val tree_of_module: + Ident.t -> ?ellipsis:bool -> module_type -> rec_status -> out_sig_item +val modtype: formatter -> module_type -> unit +val signature: formatter -> signature -> unit +val tree_of_modtype_declaration: + Ident.t -> modtype_declaration -> out_sig_item +val tree_of_signature: Types.signature -> out_sig_item list +val tree_of_typexp: bool -> type_expr -> out_type +val modtype_declaration: Ident.t -> formatter -> modtype_declaration -> unit +val class_type: formatter -> class_type -> unit +val tree_of_class_declaration: + Ident.t -> class_declaration -> rec_status -> out_sig_item +val class_declaration: Ident.t -> formatter -> class_declaration -> unit +val tree_of_cltype_declaration: + Ident.t -> class_type_declaration -> rec_status -> out_sig_item +val cltype_declaration: Ident.t -> formatter -> class_type_declaration -> unit +val type_expansion: type_expr -> Format.formatter -> type_expr -> unit +val prepare_expansion: type_expr * type_expr -> type_expr * type_expr +val trace: + bool -> bool-> string -> formatter -> (type_expr * type_expr) list -> unit +val report_unification_error: + formatter -> Env.t -> ?unif:bool -> (type_expr * type_expr) list -> + (formatter -> unit) -> (formatter -> unit) -> + unit +val report_subtyping_error: + formatter -> Env.t -> (type_expr * type_expr) list -> + string -> (type_expr * type_expr) list -> unit +val report_ambiguous_type_error: + formatter -> Env.t -> (Path.t * Path.t) -> (Path.t * Path.t) list -> + (formatter -> unit) -> (formatter -> unit) -> (formatter -> unit) -> unit + +(* for toploop *) +val print_items: (Env.t -> signature_item -> 'a option) -> + Env.t -> signature_item list -> (out_sig_item * 'a option) list diff --git a/res_syntax/compiler-libs-406/profile.ml b/res_syntax/compiler-libs-406/profile.ml new file mode 100644 index 0000000000..59d67a124f --- /dev/null +++ b/res_syntax/compiler-libs-406/profile.ml @@ -0,0 +1,334 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* *) +(* Copyright 2015 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +[@@@ocaml.warning "+a-18-40-42-48"] + +type file = string + +external time_include_children: bool -> float = "caml_sys_time_include_children" +let cpu_time () = time_include_children true + +module Measure = struct + type t = { + time : float; + allocated_words : float; + top_heap_words : int; + } + let create () = + let stat = Gc.quick_stat () in + { + time = cpu_time (); + allocated_words = stat.minor_words +. stat.major_words; + top_heap_words = stat.top_heap_words; + } + let zero = { time = 0.; allocated_words = 0.; top_heap_words = 0 } +end + +module Measure_diff = struct + let timestamp = let r = ref (-1) in fun () -> incr r; !r + type t = { + timestamp : int; + duration : float; + allocated_words : float; + top_heap_words_increase : int; + } + let zero () = { + timestamp = timestamp (); + duration = 0.; + allocated_words = 0.; + top_heap_words_increase = 0; + } + let accumulate t (m1 : Measure.t) (m2 : Measure.t) = { + timestamp = t.timestamp; + duration = t.duration +. (m2.time -. m1.time); + allocated_words = + t.allocated_words +. (m2.allocated_words -. m1.allocated_words); + top_heap_words_increase = + t.top_heap_words_increase + (m2.top_heap_words - m1.top_heap_words); + } + let of_diff m1 m2 = + accumulate (zero ()) m1 m2 +end + +type hierarchy = + | E of (string, Measure_diff.t * hierarchy) Hashtbl.t +[@@unboxed] + +let create () = E (Hashtbl.create 2) +let hierarchy = ref (create ()) +let initial_measure = ref None +let reset () = hierarchy := create (); initial_measure := None + +let record_call ?(accumulate = false) name f = + let E prev_hierarchy = !hierarchy in + let start_measure = Measure.create () in + if !initial_measure = None then initial_measure := Some start_measure; + let this_measure_diff, this_table = + (* We allow the recording of multiple categories by the same name, for tools + like ocamldoc that use the compiler libs but don't care about profile + information, and so may record, say, "parsing" multiple times. *) + if accumulate + then + match Hashtbl.find prev_hierarchy name with + | exception Not_found -> Measure_diff.zero (), Hashtbl.create 2 + | measure_diff, E table -> + Hashtbl.remove prev_hierarchy name; + measure_diff, table + else Measure_diff.zero (), Hashtbl.create 2 + in + hierarchy := E this_table; + Misc.try_finally f + (fun () -> + hierarchy := E prev_hierarchy; + let end_measure = Measure.create () in + let measure_diff = + Measure_diff.accumulate this_measure_diff start_measure end_measure in + Hashtbl.add prev_hierarchy name (measure_diff, E this_table)) + +let record ?accumulate pass f x = record_call ?accumulate pass (fun () -> f x) + +type display = { + to_string : max:float -> width:int -> string; + worth_displaying : max:float -> bool; +} + +let time_display v : display = + (* Because indentation is meaningful, and because the durations are + the first element of each row, we can't pad them with spaces. *) + let to_string_without_unit v ~width = Printf.sprintf "%0*.03f" width v in + let to_string ~max:_ ~width = + to_string_without_unit v ~width:(width - 1) ^ "s" in + let worth_displaying ~max:_ = + float_of_string (to_string_without_unit v ~width:0) <> 0. in + { to_string; worth_displaying } + +let memory_word_display = + (* To make memory numbers easily comparable across rows, we choose a single + scale for an entire column. To keep the display compact and not overly + precise (no one cares about the exact number of bytes), we pick the largest + scale we can and we only show 3 digits. Avoiding showing tiny numbers also + allows us to avoid displaying passes that barely allocate compared to the + rest of the compiler. *) + let bytes_of_words words = words *. float_of_int (Sys.word_size / 8) in + let to_string_without_unit v ~width scale = + let precision = 3 and precision_power = 1e3 in + let v_rescaled = bytes_of_words v /. scale in + let v_rounded = + floor (v_rescaled *. precision_power +. 0.5) /. precision_power in + let v_str = Printf.sprintf "%.*f" precision v_rounded in + let index_of_dot = String.index v_str '.' in + let v_str_truncated = + String.sub v_str 0 + (if index_of_dot >= precision + then index_of_dot + else precision + 1) + in + Printf.sprintf "%*s" width v_str_truncated + in + let choose_memory_scale = + let units = [|"B"; "kB"; "MB"; "GB"|] in + fun words -> + let bytes = bytes_of_words words in + let scale = ref (Array.length units - 1) in + while !scale > 0 && bytes < 1024. ** float_of_int !scale do + decr scale + done; + 1024. ** float_of_int !scale, units.(!scale) + in + fun ?previous v : display -> + let to_string ~max ~width = + let scale, scale_str = choose_memory_scale max in + let width = width - String.length scale_str in + to_string_without_unit v ~width scale ^ scale_str + in + let worth_displaying ~max = + let scale, _ = choose_memory_scale max in + float_of_string (to_string_without_unit v ~width:0 scale) <> 0. + && match previous with + | None -> true + | Some p -> + (* This branch is for numbers that represent absolute quantity, rather + than differences. It allows us to skip displaying the same absolute + quantity many times in a row. *) + to_string_without_unit p ~width:0 scale + <> to_string_without_unit v ~width:0 scale + in + { to_string; worth_displaying } + +let profile_list (E table) = + let l = Hashtbl.fold (fun k d l -> (k, d) :: l) table [] in + List.sort (fun (_, (p1, _)) (_, (p2, _)) -> + compare p1.Measure_diff.timestamp p2.Measure_diff.timestamp) l + +let compute_other_category (E table : hierarchy) (total : Measure_diff.t) = + let r = ref total in + Hashtbl.iter (fun _pass ((p2 : Measure_diff.t), _) -> + let p1 = !r in + r := { + timestamp = p1.timestamp; + duration = p1.duration -. p2.duration; + allocated_words = p1.allocated_words -. p2.allocated_words; + top_heap_words_increase = + p1.top_heap_words_increase - p2.top_heap_words_increase; + } + ) table; + !r + +type row = R of string * (float * display) list * row list +type column = [ `Time | `Alloc | `Top_heap | `Abs_top_heap ] + +let rec rows_of_hierarchy ~nesting make_row name measure_diff hierarchy env = + let rows = + rows_of_hierarchy_list + ~nesting:(nesting + 1) make_row hierarchy measure_diff env in + let values, env = + make_row env measure_diff ~toplevel_other:(nesting = 0 && name = "other") in + R (name, values, rows), env + +and rows_of_hierarchy_list ~nesting make_row hierarchy total env = + let list = profile_list hierarchy in + let list = + if list <> [] || nesting = 0 + then list @ [ "other", (compute_other_category hierarchy total, create ()) ] + else [] + in + let env = ref env in + List.map (fun (name, (measure_diff, hierarchy)) -> + let a, env' = + rows_of_hierarchy ~nesting make_row name measure_diff hierarchy !env in + env := env'; + a + ) list + +let rows_of_hierarchy hierarchy measure_diff initial_measure columns = + (* Computing top heap size is a bit complicated: if the compiler applies a + list of passes n times (rather than applying pass1 n times, then pass2 n + times etc), we only show one row for that pass but what does "top heap + size at the end of that pass" even mean? + It seems the only sensible answer is to pretend the compiler applied pass1 + n times, pass2 n times by accumulating all the heap size increases that + happened during each pass, and then compute what the heap size would have + been. So that's what we do. + There's a bit of extra complication, which is that the heap can increase in + between measurements. So the heap sizes can be a bit off until the "other" + rows account for what's missing. We special case the toplevel "other" row + so that any increases that happened before the start of the compilation is + correctly reported, as a lot of code may run before the start of the + compilation (eg functor applications). *) + let make_row prev_top_heap_words (p : Measure_diff.t) ~toplevel_other = + let top_heap_words = + prev_top_heap_words + + p.top_heap_words_increase + - if toplevel_other + then initial_measure.Measure.top_heap_words + else 0 + in + let make value ~f = value, f value in + List.map (function + | `Time -> + make p.duration ~f:time_display + | `Alloc -> + make p.allocated_words ~f:memory_word_display + | `Top_heap -> + make (float_of_int p.top_heap_words_increase) ~f:memory_word_display + | `Abs_top_heap -> + make (float_of_int top_heap_words) + ~f:(memory_word_display ~previous:(float_of_int prev_top_heap_words)) + ) columns, + top_heap_words + in + rows_of_hierarchy_list ~nesting:0 make_row hierarchy measure_diff + initial_measure.top_heap_words + +let max_by_column ~n_columns rows = + let a = Array.make n_columns 0. in + let rec loop (R (_, values, rows)) = + List.iteri (fun i (v, _) -> a.(i) <- max a.(i) v) values; + List.iter loop rows + in + List.iter loop rows; + a + +let width_by_column ~n_columns ~display_cell rows = + let a = Array.make n_columns 1 in + let rec loop (R (_, values, rows)) = + List.iteri (fun i cell -> + let _, str = display_cell i cell ~width:0 in + a.(i) <- max a.(i) (String.length str) + ) values; + List.iter loop rows; + in + List.iter loop rows; + a + +let display_rows ppf rows = + let n_columns = + match rows with + | [] -> 0 + | R (_, values, _) :: _ -> List.length values + in + let maxs = max_by_column ~n_columns rows in + let display_cell i (_, c) ~width = + let display_cell = c.worth_displaying ~max:maxs.(i) in + display_cell, if display_cell + then c.to_string ~max:maxs.(i) ~width + else String.make width '-' + in + let widths = width_by_column ~n_columns ~display_cell rows in + let rec loop (R (name, values, rows)) ~indentation = + let worth_displaying, cell_strings = + values + |> List.mapi (fun i cell -> display_cell i cell ~width:widths.(i)) + |> List.split + in + if List.exists (fun b -> b) worth_displaying then + Format.fprintf ppf "%s%s %s@\n" + indentation (String.concat " " cell_strings) name; + List.iter (loop ~indentation:(" " ^ indentation)) rows; + in + List.iter (loop ~indentation:"") rows + +let print ppf columns = + match columns with + | [] -> () + | _ :: _ -> + let initial_measure = + match !initial_measure with + | Some v -> v + | None -> Measure.zero + in + let total = Measure_diff.of_diff Measure.zero (Measure.create ()) in + display_rows ppf (rows_of_hierarchy !hierarchy total initial_measure columns) + +let column_mapping = [ + "time", `Time; + "alloc", `Alloc; + "top-heap", `Top_heap; + "absolute-top-heap", `Abs_top_heap; +] + +let column_names = List.map fst column_mapping + +let options_doc = + Printf.sprintf + " Print performance information for each pass\ + \n The columns are: %s." + (String.concat " " column_names) + +let all_columns = List.map snd column_mapping + +let generate = "generate" +let transl = "transl" +let typing = "typing" diff --git a/res_syntax/compiler-libs-406/profile.mli b/res_syntax/compiler-libs-406/profile.mli new file mode 100644 index 0000000000..83a8252412 --- /dev/null +++ b/res_syntax/compiler-libs-406/profile.mli @@ -0,0 +1,44 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* *) +(* Copyright 2015 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Compiler performance recording *) + +type file = string + +val reset : unit -> unit +(** erase all recorded profile information *) + +val record_call : ?accumulate:bool -> string -> (unit -> 'a) -> 'a +(** [record_call pass f] calls [f] and records its profile information. *) + +val record : ?accumulate:bool -> string -> ('a -> 'b) -> 'a -> 'b +(** [record pass f arg] records the profile information of [f arg] *) + +type column = [ `Time | `Alloc | `Top_heap | `Abs_top_heap ] + +val print : Format.formatter -> column list -> unit +(** Prints the selected recorded profiling information to the formatter. *) + +(** Command line flags *) + +val options_doc : string +val all_columns : column list + +(** A few pass names that are needed in several places, and shared to + avoid typos. *) + +val generate : string +val transl : string +val typing : string diff --git a/res_syntax/compiler-libs-406/queue.ml b/res_syntax/compiler-libs-406/queue.ml new file mode 100644 index 0000000000..ffda7a4672 --- /dev/null +++ b/res_syntax/compiler-libs-406/queue.ml @@ -0,0 +1,132 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Francois Pottier, projet Cristal, INRIA Rocquencourt *) +(* Jeremie Dimino, Jane Street Europe *) +(* *) +(* Copyright 2002 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +exception Empty + +type 'a cell = + | Nil + | Cons of { content: 'a; mutable next: 'a cell } + +type 'a t = { + mutable length: int; + mutable first: 'a cell; + mutable last: 'a cell +} + +let create () = { + length = 0; + first = Nil; + last = Nil +} + +let clear q = + q.length <- 0; + q.first <- Nil; + q.last <- Nil + +let add x q = + let cell = Cons { + content = x; + next = Nil + } in + match q.last with + | Nil -> + q.length <- 1; + q.first <- cell; + q.last <- cell + | Cons last -> + q.length <- q.length + 1; + last.next <- cell; + q.last <- cell + +let push = + add + +let peek q = + match q.first with + | Nil -> raise Empty + | Cons { content } -> content + +let top = + peek + +let take q = + match q.first with + | Nil -> raise Empty + | Cons { content; next = Nil } -> + clear q; + content + | Cons { content; next } -> + q.length <- q.length - 1; + q.first <- next; + content + +let pop = + take + +let copy = + let rec copy q_res prev cell = + match cell with + | Nil -> q_res.last <- prev; q_res + | Cons { content; next } -> + let res = Cons { content; next = Nil } in + begin match prev with + | Nil -> q_res.first <- res + | Cons p -> p.next <- res + end; + copy q_res res next + in + fun q -> copy { length = q.length; first = Nil; last = Nil } Nil q.first + +let is_empty q = + q.length = 0 + +let length q = + q.length + +let iter = + let rec iter f cell = + match cell with + | Nil -> () + | Cons { content; next } -> + f content; + iter f next + in + fun f q -> iter f q.first + +let fold = + let rec fold f accu cell = + match cell with + | Nil -> accu + | Cons { content; next } -> + let accu = f accu content in + fold f accu next + in + fun f accu q -> fold f accu q.first + +let transfer q1 q2 = + if q1.length > 0 then + match q2.last with + | Nil -> + q2.length <- q1.length; + q2.first <- q1.first; + q2.last <- q1.last; + clear q1 + | Cons last -> + q2.length <- q2.length + q1.length; + last.next <- q1.first; + q2.last <- q1.last; + clear q1 diff --git a/res_syntax/compiler-libs-406/queue.mli b/res_syntax/compiler-libs-406/queue.mli new file mode 100644 index 0000000000..46e48fd051 --- /dev/null +++ b/res_syntax/compiler-libs-406/queue.mli @@ -0,0 +1,82 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** First-in first-out queues. + + This module implements queues (FIFOs), with in-place modification. + + {b Warning} This module is not thread-safe: each {!Queue.t} value + must be protected from concurrent access (e.g. with a [Mutex.t]). + Failure to do so can lead to a crash. +*) + +type 'a t +(** The type of queues containing elements of type ['a]. *) + + +exception Empty +(** Raised when {!Queue.take} or {!Queue.peek} is applied to an empty queue. *) + + +val create : unit -> 'a t +(** Return a new queue, initially empty. *) + +val add : 'a -> 'a t -> unit +(** [add x q] adds the element [x] at the end of the queue [q]. *) + +val push : 'a -> 'a t -> unit +(** [push] is a synonym for [add]. *) + +val take : 'a t -> 'a +(** [take q] removes and returns the first element in queue [q], + or raises {!Empty} if the queue is empty. *) + +val pop : 'a t -> 'a +(** [pop] is a synonym for [take]. *) + +val peek : 'a t -> 'a +(** [peek q] returns the first element in queue [q], without removing + it from the queue, or raises {!Empty} if the queue is empty. *) + +val top : 'a t -> 'a +(** [top] is a synonym for [peek]. *) + +val clear : 'a t -> unit +(** Discard all elements from a queue. *) + +val copy : 'a t -> 'a t +(** Return a copy of the given queue. *) + +val is_empty : 'a t -> bool +(** Return [true] if the given queue is empty, [false] otherwise. *) + +val length : 'a t -> int +(** Return the number of elements in a queue. *) + +val iter : ('a -> unit) -> 'a t -> unit +(** [iter f q] applies [f] in turn to all elements of [q], + from the least recently entered to the most recently entered. + The queue itself is unchanged. *) + +val fold : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b +(** [fold f accu q] is equivalent to [List.fold_left f accu l], + where [l] is the list of [q]'s elements. The queue remains + unchanged. *) + +val transfer : 'a t -> 'a t -> unit +(** [transfer q1 q2] adds all of [q1]'s elements at the end of + the queue [q2], then clears [q1]. It is equivalent to the + sequence [iter (fun x -> add x q2) q1; clear q1], but runs + in constant time. *) diff --git a/res_syntax/compiler-libs-406/random.ml b/res_syntax/compiler-libs-406/random.ml new file mode 100644 index 0000000000..1c33e5e6e6 --- /dev/null +++ b/res_syntax/compiler-libs-406/random.ml @@ -0,0 +1,277 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Pseudo-random number generator + This is a lagged-Fibonacci F(55, 24, +) with a modified addition + function to enhance the mixing of bits. + If we use normal addition, the low-order bit fails tests 1 and 7 + of the Diehard test suite, and bits 1 and 2 also fail test 7. + If we use multiplication as suggested by Marsaglia, it doesn't fare + much better. + By mixing the bits of one of the numbers before addition (XOR the + 5 high-order bits into the low-order bits), we get a generator that + passes all the Diehard tests. +*) + +external random_seed: unit -> int array = "caml_sys_random_seed" + +module State = struct + + type t = { st : int array; mutable idx : int } + + let new_state () = { st = Array.make 55 0; idx = 0 } + let assign st1 st2 = + Array.blit st2.st 0 st1.st 0 55; + st1.idx <- st2.idx + + + let full_init s seed = + let combine accu x = Digest.string (accu ^ string_of_int x) in + let extract d = + Char.code d.[0] + (Char.code d.[1] lsl 8) + (Char.code d.[2] lsl 16) + + (Char.code d.[3] lsl 24) + in + let seed = if Array.length seed = 0 then [| 0 |] else seed in + let l = Array.length seed in + for i = 0 to 54 do + s.st.(i) <- i; + done; + let accu = ref "x" in + for i = 0 to 54 + max 55 l do + let j = i mod 55 in + let k = i mod l in + accu := combine !accu seed.(k); + s.st.(j) <- (s.st.(j) lxor extract !accu) land 0x3FFFFFFF; (* PR#5575 *) + done; + s.idx <- 0 + + + let make seed = + let result = new_state () in + full_init result seed; + result + + + let make_self_init () = make (random_seed ()) + + let copy s = + let result = new_state () in + assign result s; + result + + + (* Returns 30 random bits as an integer 0 <= x < 1073741824 *) + let bits s = + s.idx <- (s.idx + 1) mod 55; + let curval = s.st.(s.idx) in + let newval = s.st.((s.idx + 24) mod 55) + + (curval lxor ((curval lsr 25) land 0x1F)) in + let newval30 = newval land 0x3FFFFFFF in (* PR#5575 *) + s.st.(s.idx) <- newval30; + newval30 + + + let rec intaux s n = + let r = bits s in + let v = r mod n in + if r - v > 0x3FFFFFFF - n + 1 then intaux s n else v + + let int s bound = + if bound > 0x3FFFFFFF || bound <= 0 + then invalid_arg "Random.int" + else intaux s bound + + + let rec int32aux s n = + let b1 = Int32.of_int (bits s) in + let b2 = Int32.shift_left (Int32.of_int (bits s land 1)) 30 in + let r = Int32.logor b1 b2 in + let v = Int32.rem r n in + if Int32.sub r v > Int32.add (Int32.sub Int32.max_int n) 1l + then int32aux s n + else v + + let int32 s bound = + if bound <= 0l + then invalid_arg "Random.int32" + else int32aux s bound + + + let rec int64aux s n = + let b1 = Int64.of_int (bits s) in + let b2 = Int64.shift_left (Int64.of_int (bits s)) 30 in + let b3 = Int64.shift_left (Int64.of_int (bits s land 7)) 60 in + let r = Int64.logor b1 (Int64.logor b2 b3) in + let v = Int64.rem r n in + if Int64.sub r v > Int64.add (Int64.sub Int64.max_int n) 1L + then int64aux s n + else v + + let int64 s bound = + if bound <= 0L + then invalid_arg "Random.int64" + else int64aux s bound + + + let nativeint = + if Nativeint.size = 32 + then fun s bound -> Nativeint.of_int32 (int32 s (Nativeint.to_int32 bound)) + else fun s bound -> Int64.to_nativeint (int64 s (Int64.of_nativeint bound)) + + + (* Returns a float 0 <= x <= 1 with at most 60 bits of precision. *) + let rawfloat s = + let scale = 1073741824.0 (* 2^30 *) + and r1 = Stdlib.float (bits s) + and r2 = Stdlib.float (bits s) + in (r1 /. scale +. r2) /. scale + + + let float s bound = rawfloat s *. bound + + let bool s = (bits s land 1 = 0) + +end + +(* This is the state you get with [init 27182818] and then applying + the "land 0x3FFFFFFF" filter to them. See #5575, #5793, #5977. *) +let default = { + State.st = [| + 0x3ae2522b; 0x1d8d4634; 0x15b4fad0; 0x18b14ace; 0x12f8a3c4; 0x3b086c47; + 0x16d467d6; 0x101d91c7; 0x321df177; 0x0176c193; 0x1ff72bf1; 0x1e889109; + 0x0b464b18; 0x2b86b97c; 0x0891da48; 0x03137463; 0x085ac5a1; 0x15d61f2f; + 0x3bced359; 0x29c1c132; 0x3a86766e; 0x366d8c86; 0x1f5b6222; 0x3ce1b59f; + 0x2ebf78e1; 0x27cd1b86; 0x258f3dc3; 0x389a8194; 0x02e4c44c; 0x18c43f7d; + 0x0f6e534f; 0x1e7df359; 0x055d0b7e; 0x10e84e7e; 0x126198e4; 0x0e7722cb; + 0x1cbede28; 0x3391b964; 0x3d40e92a; 0x0c59933d; 0x0b8cd0b7; 0x24efff1c; + 0x2803fdaa; 0x08ebc72e; 0x0f522e32; 0x05398edc; 0x2144a04c; 0x0aef3cbd; + 0x01ad4719; 0x35b93cd6; 0x2a559d4f; 0x1e6fd768; 0x26e27f36; 0x186f18c3; + 0x2fbf967a; + |]; + State.idx = 0; +} + +let bits () = State.bits default +let int bound = State.int default bound +let int32 bound = State.int32 default bound +let nativeint bound = State.nativeint default bound +let int64 bound = State.int64 default bound +let float scale = State.float default scale +let bool () = State.bool default + +let full_init seed = State.full_init default seed +let init seed = State.full_init default [| seed |] +let self_init () = full_init (random_seed()) + +(* Manipulating the current state. *) + +let get_state () = State.copy default +let set_state s = State.assign default s + +(******************** + +(* Test functions. Not included in the library. + The [chisquare] function should be called with n > 10r. + It returns a triple (low, actual, high). + If low <= actual <= high, the [g] function passed the test, + otherwise it failed. + + Some results: + +init 27182818; chisquare int 100000 1000 +init 27182818; chisquare int 100000 100 +init 27182818; chisquare int 100000 5000 +init 27182818; chisquare int 1000000 1000 +init 27182818; chisquare int 100000 1024 +init 299792643; chisquare int 100000 1024 +init 14142136; chisquare int 100000 1024 +init 27182818; init_diff 1024; chisquare diff 100000 1024 +init 27182818; init_diff 100; chisquare diff 100000 100 +init 27182818; init_diff2 1024; chisquare diff2 100000 1024 +init 27182818; init_diff2 100; chisquare diff2 100000 100 +init 14142136; init_diff2 100; chisquare diff2 100000 100 +init 299792643; init_diff2 100; chisquare diff2 100000 100 +- : float * float * float = (936.754446796632465, 997.5, 1063.24555320336754) +# - : float * float * float = (80., 89.7400000000052387, 120.) +# - : float * float * float = (4858.57864376269, 5045.5, 5141.42135623731) +# - : float * float * float = +(936.754446796632465, 944.805999999982305, 1063.24555320336754) +# - : float * float * float = (960., 1019.19744000000355, 1088.) +# - : float * float * float = (960., 1059.31776000000536, 1088.) +# - : float * float * float = (960., 1039.98463999999512, 1088.) +# - : float * float * float = (960., 1054.38207999999577, 1088.) +# - : float * float * float = (80., 90.096000000005, 120.) +# - : float * float * float = (960., 1076.78720000000612, 1088.) +# - : float * float * float = (80., 85.1760000000067521, 120.) +# - : float * float * float = (80., 85.2160000000003492, 120.) +# - : float * float * float = (80., 80.6220000000030268, 120.) + +*) + +(* Return the sum of the squares of v[i0,i1[ *) +let rec sumsq v i0 i1 = + if i0 >= i1 then 0.0 + else if i1 = i0 + 1 then Stdlib.float v.(i0) *. Stdlib.float v.(i0) + else sumsq v i0 ((i0+i1)/2) +. sumsq v ((i0+i1)/2) i1 + + +let chisquare g n r = + if n <= 10 * r then invalid_arg "chisquare"; + let f = Array.make r 0 in + for i = 1 to n do + let t = g r in + f.(t) <- f.(t) + 1 + done; + let t = sumsq f 0 r + and r = Stdlib.float r + and n = Stdlib.float n in + let sr = 2.0 *. sqrt r in + (r -. sr, (r *. t /. n) -. n, r +. sr) + + +(* This is to test for linear dependencies between successive random numbers. +*) +let st = ref 0 +let init_diff r = st := int r +let diff r = + let x1 = !st + and x2 = int r + in + st := x2; + if x1 >= x2 then + x1 - x2 + else + r + x1 - x2 + + +let st1 = ref 0 +and st2 = ref 0 + + +(* This is to test for quadratic dependencies between successive random + numbers. +*) +let init_diff2 r = st1 := int r; st2 := int r +let diff2 r = + let x1 = !st1 + and x2 = !st2 + and x3 = int r + in + st1 := x2; + st2 := x3; + (x3 - x2 - x2 + x1 + 2*r) mod r + + +********************) diff --git a/res_syntax/compiler-libs-406/random.mli b/res_syntax/compiler-libs-406/random.mli new file mode 100644 index 0000000000..f8eae5fac9 --- /dev/null +++ b/res_syntax/compiler-libs-406/random.mli @@ -0,0 +1,107 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Pseudo-random number generators (PRNG). *) + +(** {1 Basic functions} *) + +val init : int -> unit +(** Initialize the generator, using the argument as a seed. + The same seed will always yield the same sequence of numbers. *) + +val full_init : int array -> unit +(** Same as {!Random.init} but takes more data as seed. *) + +val self_init : unit -> unit +(** Initialize the generator with a random seed chosen + in a system-dependent way. If [/dev/urandom] is available on + the host machine, it is used to provide a highly random initial + seed. Otherwise, a less random seed is computed from system + parameters (current time, process IDs). *) + +val bits : unit -> int +(** Return 30 random bits in a nonnegative integer. + @before 3.12.0 used a different algorithm (affects all the following + functions) +*) + +val int : int -> int +(** [Random.int bound] returns a random integer between 0 (inclusive) + and [bound] (exclusive). [bound] must be greater than 0 and less + than 2{^30}. *) + +val int32 : Int32.t -> Int32.t +(** [Random.int32 bound] returns a random integer between 0 (inclusive) + and [bound] (exclusive). [bound] must be greater than 0. *) + +val nativeint : Nativeint.t -> Nativeint.t +(** [Random.nativeint bound] returns a random integer between 0 (inclusive) + and [bound] (exclusive). [bound] must be greater than 0. *) + +val int64 : Int64.t -> Int64.t +(** [Random.int64 bound] returns a random integer between 0 (inclusive) + and [bound] (exclusive). [bound] must be greater than 0. *) + +val float : float -> float +(** [Random.float bound] returns a random floating-point number + between 0 and [bound] (inclusive). If [bound] is + negative, the result is negative or zero. If [bound] is 0, + the result is 0. *) + +val bool : unit -> bool +(** [Random.bool ()] returns [true] or [false] with probability 0.5 each. *) + + +(** {1 Advanced functions} *) + +(** The functions from module {!State} manipulate the current state + of the random generator explicitly. + This allows using one or several deterministic PRNGs, + even in a multi-threaded program, without interference from + other parts of the program. +*) + +module State : sig + type t + (** The type of PRNG states. *) + + val make : int array -> t + (** Create a new state and initialize it with the given seed. *) + + val make_self_init : unit -> t + (** Create a new state and initialize it with a system-dependent + low-entropy seed. *) + + val copy : t -> t + (** Return a copy of the given state. *) + + val bits : t -> int + val int : t -> int -> int + val int32 : t -> Int32.t -> Int32.t + val nativeint : t -> Nativeint.t -> Nativeint.t + val int64 : t -> Int64.t -> Int64.t + val float : t -> float -> float + val bool : t -> bool + (** These functions are the same as the basic functions, except that they + use (and update) the given PRNG state instead of the default one. + *) +end + + +val get_state : unit -> State.t +(** Return the current state of the generator used by the basic functions. *) + +val set_state : State.t -> unit +(** Set the state of the generator used by the basic functions. *) diff --git a/res_syntax/compiler-libs-406/set.ml b/res_syntax/compiler-libs-406/set.ml new file mode 100644 index 0000000000..b3cbda47d2 --- /dev/null +++ b/res_syntax/compiler-libs-406/set.ml @@ -0,0 +1,526 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Sets over ordered types *) + +module type OrderedType = + sig + type t + val compare: t -> t -> int + end + +module type S = + sig + type elt + type t + val empty: t + val is_empty: t -> bool + val mem: elt -> t -> bool + val add: elt -> t -> t + val singleton: elt -> t + val remove: elt -> t -> t + val union: t -> t -> t + val inter: t -> t -> t + val diff: t -> t -> t + val compare: t -> t -> int + val equal: t -> t -> bool + val subset: t -> t -> bool + val iter: (elt -> unit) -> t -> unit + val map: (elt -> elt) -> t -> t + val fold: (elt -> 'a -> 'a) -> t -> 'a -> 'a + val for_all: (elt -> bool) -> t -> bool + val exists: (elt -> bool) -> t -> bool + val filter: (elt -> bool) -> t -> t + val partition: (elt -> bool) -> t -> t * t + val cardinal: t -> int + val elements: t -> elt list + val min_elt: t -> elt + val min_elt_opt: t -> elt option + val max_elt: t -> elt + val max_elt_opt: t -> elt option + val choose: t -> elt + val choose_opt: t -> elt option + val split: elt -> t -> t * bool * t + val find: elt -> t -> elt + val find_opt: elt -> t -> elt option + val find_first: (elt -> bool) -> t -> elt + val find_first_opt: (elt -> bool) -> t -> elt option + val find_last: (elt -> bool) -> t -> elt + val find_last_opt: (elt -> bool) -> t -> elt option + val of_list: elt list -> t + end + +module Make(Ord: OrderedType) = + struct + type elt = Ord.t + type t = Empty | Node of {l:t; v:elt; r:t; h:int} + + (* Sets are represented by balanced binary trees (the heights of the + children differ by at most 2 *) + + let height = function + Empty -> 0 + | Node {h} -> h + + (* Creates a new node with left son l, value v and right son r. + We must have all elements of l < v < all elements of r. + l and r must be balanced and | height l - height r | <= 2. + Inline expansion of height for better speed. *) + + let create l v r = + let hl = match l with Empty -> 0 | Node {h} -> h in + let hr = match r with Empty -> 0 | Node {h} -> h in + Node{l; v; r; h=(if hl >= hr then hl + 1 else hr + 1)} + + (* Same as create, but performs one step of rebalancing if necessary. + Assumes l and r balanced and | height l - height r | <= 3. + Inline expansion of create for better speed in the most frequent case + where no rebalancing is required. *) + + let bal l v r = + let hl = match l with Empty -> 0 | Node {h} -> h in + let hr = match r with Empty -> 0 | Node {h} -> h in + if hl > hr + 2 then begin + match l with + Empty -> invalid_arg "Set.bal" + | Node{l=ll; v=lv; r=lr} -> + if height ll >= height lr then + create ll lv (create lr v r) + else begin + match lr with + Empty -> invalid_arg "Set.bal" + | Node{l=lrl; v=lrv; r=lrr}-> + create (create ll lv lrl) lrv (create lrr v r) + end + end else if hr > hl + 2 then begin + match r with + Empty -> invalid_arg "Set.bal" + | Node{l=rl; v=rv; r=rr} -> + if height rr >= height rl then + create (create l v rl) rv rr + else begin + match rl with + Empty -> invalid_arg "Set.bal" + | Node{l=rll; v=rlv; r=rlr} -> + create (create l v rll) rlv (create rlr rv rr) + end + end else + Node{l; v; r; h=(if hl >= hr then hl + 1 else hr + 1)} + + (* Insertion of one element *) + + let rec add x = function + Empty -> Node{l=Empty; v=x; r=Empty; h=1} + | Node{l; v; r} as t -> + let c = Ord.compare x v in + if c = 0 then t else + if c < 0 then + let ll = add x l in + if l == ll then t else bal ll v r + else + let rr = add x r in + if r == rr then t else bal l v rr + + let singleton x = Node{l=Empty; v=x; r=Empty; h=1} + + (* Beware: those two functions assume that the added v is *strictly* + smaller (or bigger) than all the present elements in the tree; it + does not test for equality with the current min (or max) element. + Indeed, they are only used during the "join" operation which + respects this precondition. + *) + + let rec add_min_element x = function + | Empty -> singleton x + | Node {l; v; r} -> + bal (add_min_element x l) v r + + let rec add_max_element x = function + | Empty -> singleton x + | Node {l; v; r} -> + bal l v (add_max_element x r) + + (* Same as create and bal, but no assumptions are made on the + relative heights of l and r. *) + + let rec join l v r = + match (l, r) with + (Empty, _) -> add_min_element v r + | (_, Empty) -> add_max_element v l + | (Node{l=ll; v=lv; r=lr; h=lh}, Node{l=rl; v=rv; r=rr; h=rh}) -> + if lh > rh + 2 then bal ll lv (join lr v r) else + if rh > lh + 2 then bal (join l v rl) rv rr else + create l v r + + (* Smallest and greatest element of a set *) + + let rec min_elt = function + Empty -> raise Not_found + | Node{l=Empty; v} -> v + | Node{l} -> min_elt l + + let rec min_elt_opt = function + Empty -> None + | Node{l=Empty; v} -> Some v + | Node{l} -> min_elt_opt l + + let rec max_elt = function + Empty -> raise Not_found + | Node{v; r=Empty} -> v + | Node{r} -> max_elt r + + let rec max_elt_opt = function + Empty -> None + | Node{v; r=Empty} -> Some v + | Node{r} -> max_elt_opt r + + (* Remove the smallest element of the given set *) + + let rec remove_min_elt = function + Empty -> invalid_arg "Set.remove_min_elt" + | Node{l=Empty; r} -> r + | Node{l; v; r} -> bal (remove_min_elt l) v r + + (* Merge two trees l and r into one. + All elements of l must precede the elements of r. + Assume | height l - height r | <= 2. *) + + let merge t1 t2 = + match (t1, t2) with + (Empty, t) -> t + | (t, Empty) -> t + | (_, _) -> bal t1 (min_elt t2) (remove_min_elt t2) + + (* Merge two trees l and r into one. + All elements of l must precede the elements of r. + No assumption on the heights of l and r. *) + + let concat t1 t2 = + match (t1, t2) with + (Empty, t) -> t + | (t, Empty) -> t + | (_, _) -> join t1 (min_elt t2) (remove_min_elt t2) + + (* Splitting. split x s returns a triple (l, present, r) where + - l is the set of elements of s that are < x + - r is the set of elements of s that are > x + - present is false if s contains no element equal to x, + or true if s contains an element equal to x. *) + + let rec split x = function + Empty -> + (Empty, false, Empty) + | Node{l; v; r} -> + let c = Ord.compare x v in + if c = 0 then (l, true, r) + else if c < 0 then + let (ll, pres, rl) = split x l in (ll, pres, join rl v r) + else + let (lr, pres, rr) = split x r in (join l v lr, pres, rr) + + (* Implementation of the set operations *) + + let empty = Empty + + let is_empty = function Empty -> true | _ -> false + + let rec mem x = function + Empty -> false + | Node{l; v; r} -> + let c = Ord.compare x v in + c = 0 || mem x (if c < 0 then l else r) + + let rec remove x = function + Empty -> Empty + | (Node{l; v; r} as t) -> + let c = Ord.compare x v in + if c = 0 then merge l r + else + if c < 0 then + let ll = remove x l in + if l == ll then t + else bal ll v r + else + let rr = remove x r in + if r == rr then t + else bal l v rr + + let rec union s1 s2 = + match (s1, s2) with + (Empty, t2) -> t2 + | (t1, Empty) -> t1 + | (Node{l=l1; v=v1; r=r1; h=h1}, Node{l=l2; v=v2; r=r2; h=h2}) -> + if h1 >= h2 then + if h2 = 1 then add v2 s1 else begin + let (l2, _, r2) = split v1 s2 in + join (union l1 l2) v1 (union r1 r2) + end + else + if h1 = 1 then add v1 s2 else begin + let (l1, _, r1) = split v2 s1 in + join (union l1 l2) v2 (union r1 r2) + end + + let rec inter s1 s2 = + match (s1, s2) with + (Empty, _) -> Empty + | (_, Empty) -> Empty + | (Node{l=l1; v=v1; r=r1}, t2) -> + match split v1 t2 with + (l2, false, r2) -> + concat (inter l1 l2) (inter r1 r2) + | (l2, true, r2) -> + join (inter l1 l2) v1 (inter r1 r2) + + let rec diff s1 s2 = + match (s1, s2) with + (Empty, _) -> Empty + | (t1, Empty) -> t1 + | (Node{l=l1; v=v1; r=r1}, t2) -> + match split v1 t2 with + (l2, false, r2) -> + join (diff l1 l2) v1 (diff r1 r2) + | (l2, true, r2) -> + concat (diff l1 l2) (diff r1 r2) + + type enumeration = End | More of elt * t * enumeration + + let rec cons_enum s e = + match s with + Empty -> e + | Node{l; v; r} -> cons_enum l (More(v, r, e)) + + let rec compare_aux e1 e2 = + match (e1, e2) with + (End, End) -> 0 + | (End, _) -> -1 + | (_, End) -> 1 + | (More(v1, r1, e1), More(v2, r2, e2)) -> + let c = Ord.compare v1 v2 in + if c <> 0 + then c + else compare_aux (cons_enum r1 e1) (cons_enum r2 e2) + + let compare s1 s2 = + compare_aux (cons_enum s1 End) (cons_enum s2 End) + + let equal s1 s2 = + compare s1 s2 = 0 + + let rec subset s1 s2 = + match (s1, s2) with + Empty, _ -> + true + | _, Empty -> + false + | Node {l=l1; v=v1; r=r1}, (Node {l=l2; v=v2; r=r2} as t2) -> + let c = Ord.compare v1 v2 in + if c = 0 then + subset l1 l2 && subset r1 r2 + else if c < 0 then + subset (Node {l=l1; v=v1; r=Empty; h=0}) l2 && subset r1 t2 + else + subset (Node {l=Empty; v=v1; r=r1; h=0}) r2 && subset l1 t2 + + let rec iter f = function + Empty -> () + | Node{l; v; r} -> iter f l; f v; iter f r + + let rec fold f s accu = + match s with + Empty -> accu + | Node{l; v; r} -> fold f r (f v (fold f l accu)) + + let rec for_all p = function + Empty -> true + | Node{l; v; r} -> p v && for_all p l && for_all p r + + let rec exists p = function + Empty -> false + | Node{l; v; r} -> p v || exists p l || exists p r + + let rec filter p = function + Empty -> Empty + | (Node{l; v; r}) as t -> + (* call [p] in the expected left-to-right order *) + let l' = filter p l in + let pv = p v in + let r' = filter p r in + if pv then + if l==l' && r==r' then t else join l' v r' + else concat l' r' + + let rec partition p = function + Empty -> (Empty, Empty) + | Node{l; v; r} -> + (* call [p] in the expected left-to-right order *) + let (lt, lf) = partition p l in + let pv = p v in + let (rt, rf) = partition p r in + if pv + then (join lt v rt, concat lf rf) + else (concat lt rt, join lf v rf) + + let rec cardinal = function + Empty -> 0 + | Node{l; r} -> cardinal l + 1 + cardinal r + + let rec elements_aux accu = function + Empty -> accu + | Node{l; v; r} -> elements_aux (v :: elements_aux accu r) l + + let elements s = + elements_aux [] s + + let choose = min_elt + + let choose_opt = min_elt_opt + + let rec find x = function + Empty -> raise Not_found + | Node{l; v; r} -> + let c = Ord.compare x v in + if c = 0 then v + else find x (if c < 0 then l else r) + + let rec find_first_aux v0 f = function + Empty -> + v0 + | Node{l; v; r} -> + if f v then + find_first_aux v f l + else + find_first_aux v0 f r + + let rec find_first f = function + Empty -> + raise Not_found + | Node{l; v; r} -> + if f v then + find_first_aux v f l + else + find_first f r + + let rec find_first_opt_aux v0 f = function + Empty -> + Some v0 + | Node{l; v; r} -> + if f v then + find_first_opt_aux v f l + else + find_first_opt_aux v0 f r + + let rec find_first_opt f = function + Empty -> + None + | Node{l; v; r} -> + if f v then + find_first_opt_aux v f l + else + find_first_opt f r + + let rec find_last_aux v0 f = function + Empty -> + v0 + | Node{l; v; r} -> + if f v then + find_last_aux v f r + else + find_last_aux v0 f l + + let rec find_last f = function + Empty -> + raise Not_found + | Node{l; v; r} -> + if f v then + find_last_aux v f r + else + find_last f l + + let rec find_last_opt_aux v0 f = function + Empty -> + Some v0 + | Node{l; v; r} -> + if f v then + find_last_opt_aux v f r + else + find_last_opt_aux v0 f l + + let rec find_last_opt f = function + Empty -> + None + | Node{l; v; r} -> + if f v then + find_last_opt_aux v f r + else + find_last_opt f l + + let rec find_opt x = function + Empty -> None + | Node{l; v; r} -> + let c = Ord.compare x v in + if c = 0 then Some v + else find_opt x (if c < 0 then l else r) + + let try_join l v r = + (* [join l v r] can only be called when (elements of l < v < + elements of r); use [try_join l v r] when this property may + not hold, but you hope it does hold in the common case *) + if (l = Empty || Ord.compare (max_elt l) v < 0) + && (r = Empty || Ord.compare v (min_elt r) < 0) + then join l v r + else union l (add v r) + + let rec map f = function + | Empty -> Empty + | Node{l; v; r} as t -> + (* enforce left-to-right evaluation order *) + let l' = map f l in + let v' = f v in + let r' = map f r in + if l == l' && v == v' && r == r' then t + else try_join l' v' r' + + let of_sorted_list l = + let rec sub n l = + match n, l with + | 0, l -> Empty, l + | 1, x0 :: l -> Node {l=Empty; v=x0; r=Empty; h=1}, l + | 2, x0 :: x1 :: l -> + Node{l=Node{l=Empty; v=x0; r=Empty; h=1}; v=x1; r=Empty; h=2}, l + | 3, x0 :: x1 :: x2 :: l -> + Node{l=Node{l=Empty; v=x0; r=Empty; h=1}; v=x1; + r=Node{l=Empty; v=x2; r=Empty; h=1}; h=2}, l + | n, l -> + let nl = n / 2 in + let left, l = sub nl l in + match l with + | [] -> assert false + | mid :: l -> + let right, l = sub (n - nl - 1) l in + create left mid right, l + in + fst (sub (List.length l) l) + + let of_list l = + match l with + | [] -> empty + | [x0] -> singleton x0 + | [x0; x1] -> add x1 (singleton x0) + | [x0; x1; x2] -> add x2 (add x1 (singleton x0)) + | [x0; x1; x2; x3] -> add x3 (add x2 (add x1 (singleton x0))) + | [x0; x1; x2; x3; x4] -> add x4 (add x3 (add x2 (add x1 (singleton x0)))) + | _ -> of_sorted_list (List.sort_uniq Ord.compare l) + end diff --git a/res_syntax/compiler-libs-406/set.mli b/res_syntax/compiler-libs-406/set.mli new file mode 100644 index 0000000000..a20cfe5386 --- /dev/null +++ b/res_syntax/compiler-libs-406/set.mli @@ -0,0 +1,266 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Sets over ordered types. + + This module implements the set data structure, given a total ordering + function over the set elements. All operations over sets + are purely applicative (no side-effects). + The implementation uses balanced binary trees, and is therefore + reasonably efficient: insertion and membership take time + logarithmic in the size of the set, for instance. + + The {!Make} functor constructs implementations for any type, given a + [compare] function. + For instance: + {[ + module IntPairs = + struct + type t = int * int + let compare (x0,y0) (x1,y1) = + match Stdlib.compare x0 x1 with + 0 -> Stdlib.compare y0 y1 + | c -> c + end + + module PairsSet = Set.Make(IntPairs) + + let m = PairsSet.(empty |> add (2,3) |> add (5,7) |> add (11,13)) + ]} + + This creates a new module [PairsSet], with a new type [PairsSet.t] + of sets of [int * int]. +*) + +module type OrderedType = + sig + type t + (** The type of the set elements. *) + + val compare : t -> t -> int + (** A total ordering function over the set elements. + This is a two-argument function [f] such that + [f e1 e2] is zero if the elements [e1] and [e2] are equal, + [f e1 e2] is strictly negative if [e1] is smaller than [e2], + and [f e1 e2] is strictly positive if [e1] is greater than [e2]. + Example: a suitable ordering function is the generic structural + comparison function {!Stdlib.compare}. *) + end +(** Input signature of the functor {!Set.Make}. *) + +module type S = + sig + type elt + (** The type of the set elements. *) + + type t + (** The type of sets. *) + + val empty: t + (** The empty set. *) + + val is_empty: t -> bool + (** Test whether a set is empty or not. *) + + val mem: elt -> t -> bool + (** [mem x s] tests whether [x] belongs to the set [s]. *) + + val add: elt -> t -> t + (** [add x s] returns a set containing all elements of [s], + plus [x]. If [x] was already in [s], [s] is returned unchanged + (the result of the function is then physically equal to [s]). + @before 4.03 Physical equality was not ensured. *) + + val singleton: elt -> t + (** [singleton x] returns the one-element set containing only [x]. *) + + val remove: elt -> t -> t + (** [remove x s] returns a set containing all elements of [s], + except [x]. If [x] was not in [s], [s] is returned unchanged + (the result of the function is then physically equal to [s]). + @before 4.03 Physical equality was not ensured. *) + + val union: t -> t -> t + (** Set union. *) + + val inter: t -> t -> t + (** Set intersection. *) + + val diff: t -> t -> t + (** Set difference. *) + + val compare: t -> t -> int + (** Total ordering between sets. Can be used as the ordering function + for doing sets of sets. *) + + val equal: t -> t -> bool + (** [equal s1 s2] tests whether the sets [s1] and [s2] are + equal, that is, contain equal elements. *) + + val subset: t -> t -> bool + (** [subset s1 s2] tests whether the set [s1] is a subset of + the set [s2]. *) + + val iter: (elt -> unit) -> t -> unit + (** [iter f s] applies [f] in turn to all elements of [s]. + The elements of [s] are presented to [f] in increasing order + with respect to the ordering over the type of the elements. *) + + val map: (elt -> elt) -> t -> t + (** [map f s] is the set whose elements are [f a0],[f a1]... [f + aN], where [a0],[a1]...[aN] are the elements of [s]. + + The elements are passed to [f] in increasing order + with respect to the ordering over the type of the elements. + + If no element of [s] is changed by [f], [s] is returned + unchanged. (If each output of [f] is physically equal to its + input, the returned set is physically equal to [s].) + @since 4.04.0 *) + + val fold: (elt -> 'a -> 'a) -> t -> 'a -> 'a + (** [fold f s a] computes [(f xN ... (f x2 (f x1 a))...)], + where [x1 ... xN] are the elements of [s], in increasing order. *) + + val for_all: (elt -> bool) -> t -> bool + (** [for_all p s] checks if all elements of the set + satisfy the predicate [p]. *) + + val exists: (elt -> bool) -> t -> bool + (** [exists p s] checks if at least one element of + the set satisfies the predicate [p]. *) + + val filter: (elt -> bool) -> t -> t + (** [filter p s] returns the set of all elements in [s] + that satisfy predicate [p]. If [p] satisfies every element in [s], + [s] is returned unchanged (the result of the function is then + physically equal to [s]). + @before 4.03 Physical equality was not ensured.*) + + val partition: (elt -> bool) -> t -> t * t + (** [partition p s] returns a pair of sets [(s1, s2)], where + [s1] is the set of all the elements of [s] that satisfy the + predicate [p], and [s2] is the set of all the elements of + [s] that do not satisfy [p]. *) + + val cardinal: t -> int + (** Return the number of elements of a set. *) + + val elements: t -> elt list + (** Return the list of all elements of the given set. + The returned list is sorted in increasing order with respect + to the ordering [Ord.compare], where [Ord] is the argument + given to {!Set.Make}. *) + + val min_elt: t -> elt + (** Return the smallest element of the given set + (with respect to the [Ord.compare] ordering), or raise + [Not_found] if the set is empty. *) + + val min_elt_opt: t -> elt option + (** Return the smallest element of the given set + (with respect to the [Ord.compare] ordering), or [None] + if the set is empty. + @since 4.05 + *) + + val max_elt: t -> elt + (** Same as {!Set.S.min_elt}, but returns the largest element of the + given set. *) + + val max_elt_opt: t -> elt option + (** Same as {!Set.S.min_elt_opt}, but returns the largest element of the + given set. + @since 4.05 + *) + + val choose: t -> elt + (** Return one element of the given set, or raise [Not_found] if + the set is empty. Which element is chosen is unspecified, + but equal elements will be chosen for equal sets. *) + + val choose_opt: t -> elt option + (** Return one element of the given set, or [None] if + the set is empty. Which element is chosen is unspecified, + but equal elements will be chosen for equal sets. + @since 4.05 + *) + + val split: elt -> t -> t * bool * t + (** [split x s] returns a triple [(l, present, r)], where + [l] is the set of elements of [s] that are + strictly less than [x]; + [r] is the set of elements of [s] that are + strictly greater than [x]; + [present] is [false] if [s] contains no element equal to [x], + or [true] if [s] contains an element equal to [x]. *) + + val find: elt -> t -> elt + (** [find x s] returns the element of [s] equal to [x] (according + to [Ord.compare]), or raise [Not_found] if no such element + exists. + @since 4.01.0 *) + + val find_opt: elt -> t -> elt option + (** [find_opt x s] returns the element of [s] equal to [x] (according + to [Ord.compare]), or [None] if no such element + exists. + @since 4.05 *) + + val find_first: (elt -> bool) -> t -> elt + (** [find_first f s], where [f] is a monotonically increasing function, + returns the lowest element [e] of [s] such that [f e], + or raises [Not_found] if no such element exists. + + For example, [find_first (fun e -> Ord.compare e x >= 0) s] will return + the first element [e] of [s] where [Ord.compare e x >= 0] (intuitively: + [e >= x]), or raise [Not_found] if [x] is greater than any element of + [s]. + + @since 4.05 + *) + + val find_first_opt: (elt -> bool) -> t -> elt option + (** [find_first_opt f s], where [f] is a monotonically increasing function, + returns an option containing the lowest element [e] of [s] such that + [f e], or [None] if no such element exists. + @since 4.05 + *) + + val find_last: (elt -> bool) -> t -> elt + (** [find_last f s], where [f] is a monotonically decreasing function, + returns the highest element [e] of [s] such that [f e], + or raises [Not_found] if no such element exists. + @since 4.05 + *) + + val find_last_opt: (elt -> bool) -> t -> elt option + (** [find_last_opt f s], where [f] is a monotonically decreasing function, + returns an option containing the highest element [e] of [s] such that + [f e], or [None] if no such element exists. + @since 4.05 + *) + + val of_list: elt list -> t + (** [of_list l] creates a set from a list of elements. + This is usually more efficient than folding [add] over the list, + except perhaps for lists with many duplicated elements. + @since 4.02.0 *) + end +(** Output signature of the functor {!Set.Make}. *) + +module Make (Ord : OrderedType) : S with type elt = Ord.t +(** Functor building an implementation of the set structure + given a totally ordered type. *) diff --git a/res_syntax/compiler-libs-406/sort.ml b/res_syntax/compiler-libs-406/sort.ml new file mode 100644 index 0000000000..3e3b12e06d --- /dev/null +++ b/res_syntax/compiler-libs-406/sort.ml @@ -0,0 +1,99 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Merging and sorting *) + +open Array + +let rec merge order l1 l2 = + match l1 with + [] -> l2 + | h1 :: t1 -> + match l2 with + [] -> l1 + | h2 :: t2 -> + if order h1 h2 + then h1 :: merge order t1 l2 + else h2 :: merge order l1 t2 + +let list order l = + let rec initlist = function + [] -> [] + | [e] -> [[e]] + | e1::e2::rest -> + (if order e1 e2 then [e1;e2] else [e2;e1]) :: initlist rest in + let rec merge2 = function + l1::l2::rest -> merge order l1 l2 :: merge2 rest + | x -> x in + let rec mergeall = function + [] -> [] + | [l] -> l + | llist -> mergeall (merge2 llist) in + mergeall(initlist l) + +let swap arr i j = + let tmp = unsafe_get arr i in + unsafe_set arr i (unsafe_get arr j); + unsafe_set arr j tmp + +(* There is a known performance bug in the code below. If you find + it, don't bother reporting it. You're not supposed to use this + module anyway. *) +let array cmp arr = + let rec qsort lo hi = + if hi - lo >= 6 then begin + let mid = (lo + hi) lsr 1 in + (* Select median value from among LO, MID, and HI. Rearrange + LO and HI so the three values are sorted. This lowers the + probability of picking a pathological pivot. It also + avoids extra comparisons on i and j in the two tight "while" + loops below. *) + if cmp (unsafe_get arr mid) (unsafe_get arr lo) then swap arr mid lo; + if cmp (unsafe_get arr hi) (unsafe_get arr mid) then begin + swap arr mid hi; + if cmp (unsafe_get arr mid) (unsafe_get arr lo) then swap arr mid lo + end; + let pivot = unsafe_get arr mid in + let i = ref (lo + 1) and j = ref (hi - 1) in + if not (cmp pivot (unsafe_get arr hi)) + || not (cmp (unsafe_get arr lo) pivot) + then raise (Invalid_argument "Sort.array"); + while !i < !j do + while not (cmp pivot (unsafe_get arr !i)) do incr i done; + while not (cmp (unsafe_get arr !j) pivot) do decr j done; + if !i < !j then swap arr !i !j; + incr i; decr j + done; + (* Recursion on smaller half, tail-call on larger half *) + if !j - lo <= hi - !i then begin + qsort lo !j; qsort !i hi + end else begin + qsort !i hi; qsort lo !j + end + end in + qsort 0 (Array.length arr - 1); + (* Finish sorting by insertion sort *) + for i = 1 to Array.length arr - 1 do + let val_i = (unsafe_get arr i) in + if not (cmp (unsafe_get arr (i - 1)) val_i) then begin + unsafe_set arr i (unsafe_get arr (i - 1)); + let j = ref (i - 1) in + while !j >= 1 && not (cmp (unsafe_get arr (!j - 1)) val_i) do + unsafe_set arr !j (unsafe_get arr (!j - 1)); + decr j + done; + unsafe_set arr !j val_i + end + done diff --git a/res_syntax/compiler-libs-406/sort.mli b/res_syntax/compiler-libs-406/sort.mli new file mode 100644 index 0000000000..80ebad2600 --- /dev/null +++ b/res_syntax/compiler-libs-406/sort.mli @@ -0,0 +1,44 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Sorting and merging lists. + + @deprecated This module is obsolete and exists only for backward + compatibility. + The sorting functions in {!Array} and {!List} should be used instead. + The new functions are faster and use less memory. +*) + +val list : ('a -> 'a -> bool) -> 'a list -> 'a list + [@@ocaml.deprecated "Use List.sort instead."] +(** Sort a list in increasing order according to an ordering predicate. + The predicate should return [true] if its first argument is + less than or equal to its second argument. *) + +val array : ('a -> 'a -> bool) -> 'a array -> unit + [@@ocaml.deprecated "Use Array.sort instead."] +(** Sort an array in increasing order according to an + ordering predicate. + The predicate should return [true] if its first argument is + less than or equal to its second argument. + The array is sorted in place. *) + +val merge : ('a -> 'a -> bool) -> 'a list -> 'a list -> 'a list + [@@ocaml.deprecated "Use List.merge instead."] +(** Merge two lists according to the given predicate. + Assuming the two argument lists are sorted according to the + predicate, [merge] returns a sorted list containing the elements + from the two lists. The behavior is undefined if the two + argument lists were not sorted. *) diff --git a/res_syntax/compiler-libs-406/spacetime.ml b/res_syntax/compiler-libs-406/spacetime.ml new file mode 100644 index 0000000000..3e8abe1d09 --- /dev/null +++ b/res_syntax/compiler-libs-406/spacetime.ml @@ -0,0 +1,91 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2015--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +external spacetime_enabled : unit -> bool + = "caml_spacetime_enabled" [@@noalloc] + +let enabled = spacetime_enabled () + +let if_spacetime_enabled f = + if enabled then f () else () + +module Series = struct + type t = { + channel : out_channel; + mutable closed : bool; + } + + external write_magic_number : out_channel -> unit + = "caml_spacetime_only_works_for_native_code" + "caml_spacetime_write_magic_number" + + external register_channel_for_spacetime : out_channel -> unit + = "caml_register_channel_for_spacetime" + + let create ~path = + if spacetime_enabled () then begin + let channel = open_out path in + register_channel_for_spacetime channel; + let t = + { channel = channel; + closed = false; + } + in + write_magic_number t.channel; + t + end else begin + { channel = stdout; (* arbitrary value *) + closed = true; + } + end + + external save_event : ?time:float -> out_channel -> event_name:string -> unit + = "caml_spacetime_only_works_for_native_code" + "caml_spacetime_save_event" + + let save_event ?time t ~event_name = + if_spacetime_enabled (fun () -> + save_event ?time t.channel ~event_name) + + external save_trie : ?time:float -> out_channel -> unit + = "caml_spacetime_only_works_for_native_code" + "caml_spacetime_save_trie" + + let save_and_close ?time t = + if_spacetime_enabled (fun () -> + if t.closed then failwith "Series is closed"; + save_trie ?time t.channel; + close_out t.channel; + t.closed <- true) +end + +module Snapshot = struct + external take : ?time:float -> out_channel -> unit + = "caml_spacetime_only_works_for_native_code" + "caml_spacetime_take_snapshot" + + let take ?time { Series.closed; channel } = + if_spacetime_enabled (fun () -> + if closed then failwith "Series is closed"; + Gc.minor (); + take ?time channel) +end + +external save_event_for_automatic_snapshots : event_name:string -> unit + = "caml_spacetime_only_works_for_native_code" + "caml_spacetime_save_event_for_automatic_snapshots" + +let save_event_for_automatic_snapshots ~event_name = + if_spacetime_enabled (fun () -> + save_event_for_automatic_snapshots ~event_name) diff --git a/res_syntax/compiler-libs-406/spacetime.mli b/res_syntax/compiler-libs-406/spacetime.mli new file mode 100644 index 0000000000..1f770905d7 --- /dev/null +++ b/res_syntax/compiler-libs-406/spacetime.mli @@ -0,0 +1,99 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2015--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Profiling of a program's space behaviour over time. + Currently only supported on x86-64 platforms running 64-bit code. + + To use the functions in this module you must: + - configure the compiler with "-spacetime"; + - compile to native code. + Without these conditions being satisfied the functions in this module + will have no effect. + + Instead of manually taking profiling heap snapshots with this module it is + possible to use an automatic snapshot facility that writes profiling + information at fixed intervals to a file. To enable this, all that needs to + be done is to build the relevant program using a compiler configured with + -spacetime; and set the environment variable OCAML_SPACETIME_INTERVAL to an + integer number of milliseconds giving the interval between profiling heap + snapshots. This interval should not be made excessively small relative to + the running time of the program. A typical interval to start with might be + 1/100 of the running time of the program. The program must exit "normally" + (i.e. by calling [exit], with whatever exit code, rather than being + abnormally terminated by a signal) so that the snapshot file is + correctly completed. + + When using the automatic snapshot mode the profiling output is written + to a file called "spacetime-" where is the process ID of the + program. (If the program forks and continues executing then multiple + files may be produced with different pid numbers.) The profiling output + is by default written to the current working directory when the program + starts. This may be customised by setting the OCAML_SPACETIME_SNAPSHOT_DIR + environment variable to the name of the desired directory. + + If using automatic snapshots the presence of the + [save_event_for_automatic_snapshots] function, below, should be noted. + + The functions in this module are thread safe. + + For functions to decode the information recorded by the profiler, + see the Spacetime offline library in otherlibs/. *) + +(** [enabled] is [true] if the compiler is configured with spacetime and [false] + otherwise *) +val enabled : bool + +module Series : sig + (** Type representing a file that will hold a series of heap snapshots + together with additional information required to interpret those + snapshots. *) + type t + + (** [create ~path] creates a series file at [path]. *) + val create : path:string -> t + + (** [save_event] writes an event, which is an arbitrary string, into the + given series file. This may be used for identifying particular points + during program execution when analysing the profile. + The optional [time] parameter is as for {!Snapshot.take}. + *) + val save_event : ?time:float -> t -> event_name:string -> unit + + (** [save_and_close series] writes information into [series] required for + interpreting the snapshots that [series] contains and then closes the + [series] file. This function must be called to produce a valid series + file. + The optional [time] parameter is as for {!Snapshot.take}. + *) + val save_and_close : ?time:float -> t -> unit +end + +module Snapshot : sig + (** [take series] takes a snapshot of the profiling annotations on the values + in the minor and major heaps, together with GC stats, and write the + result to the [series] file. This function triggers a minor GC but does + not allocate any memory itself. + If the optional [time] is specified, it will be used instead of the + result of {!Sys.time} as the timestamp of the snapshot. Such [time]s + should start from zero and be monotonically increasing. This parameter + is intended to be used so that snapshots can be correlated against wall + clock time (which is not supported in the standard library) rather than + elapsed CPU time. + *) + val take : ?time:float -> Series.t -> unit +end + +(** Like {!Series.save_event}, but writes to the automatic snapshot file. + This function is a no-op if OCAML_SPACETIME_INTERVAL was not set. *) +val save_event_for_automatic_snapshots : event_name:string -> unit diff --git a/res_syntax/compiler-libs-406/stack.ml b/res_syntax/compiler-libs-406/stack.ml new file mode 100644 index 0000000000..21dad3e848 --- /dev/null +++ b/res_syntax/compiler-libs-406/stack.ml @@ -0,0 +1,44 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type 'a t = { mutable c : 'a list; mutable len : int; } + +exception Empty + +let create () = { c = []; len = 0; } + +let clear s = s.c <- []; s.len <- 0 + +let copy s = { c = s.c; len = s.len; } + +let push x s = s.c <- x :: s.c; s.len <- s.len + 1 + +let pop s = + match s.c with + | hd::tl -> s.c <- tl; s.len <- s.len - 1; hd + | [] -> raise Empty + +let top s = + match s.c with + | hd::_ -> hd + | [] -> raise Empty + +let is_empty s = (s.c = []) + +let length s = s.len + +let iter f s = List.iter f s.c + +let fold f acc s = List.fold_left f acc s.c diff --git a/res_syntax/compiler-libs-406/stack.mli b/res_syntax/compiler-libs-406/stack.mli new file mode 100644 index 0000000000..4ce899536c --- /dev/null +++ b/res_syntax/compiler-libs-406/stack.mli @@ -0,0 +1,63 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Last-in first-out stacks. + + This module implements stacks (LIFOs), with in-place modification. +*) + +type 'a t +(** The type of stacks containing elements of type ['a]. *) + +exception Empty +(** Raised when {!Stack.pop} or {!Stack.top} is applied to an empty stack. *) + + +val create : unit -> 'a t +(** Return a new stack, initially empty. *) + +val push : 'a -> 'a t -> unit +(** [push x s] adds the element [x] at the top of stack [s]. *) + +val pop : 'a t -> 'a +(** [pop s] removes and returns the topmost element in stack [s], + or raises {!Empty} if the stack is empty. *) + +val top : 'a t -> 'a +(** [top s] returns the topmost element in stack [s], + or raises {!Empty} if the stack is empty. *) + +val clear : 'a t -> unit +(** Discard all elements from a stack. *) + +val copy : 'a t -> 'a t +(** Return a copy of the given stack. *) + +val is_empty : 'a t -> bool +(** Return [true] if the given stack is empty, [false] otherwise. *) + +val length : 'a t -> int +(** Return the number of elements in a stack. Time complexity O(1) *) + +val iter : ('a -> unit) -> 'a t -> unit +(** [iter f s] applies [f] in turn to all elements of [s], + from the element at the top of the stack to the element at the + bottom of the stack. The stack itself is unchanged. *) + +val fold : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b +(** [fold f accu s] is [(f (... (f (f accu x1) x2) ...) xn)] + where [x1] is the top of the stack, [x2] the second element, + and [xn] the bottom element. The stack is unchanged. + @since 4.03 *) diff --git a/res_syntax/compiler-libs-406/stdLabels.ml b/res_syntax/compiler-libs-406/stdLabels.ml new file mode 100644 index 0000000000..664472b180 --- /dev/null +++ b/res_syntax/compiler-libs-406/stdLabels.ml @@ -0,0 +1,24 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jacques Garrigue, Kyoto University RIMS *) +(* *) +(* Copyright 2001 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Module [StdLabels]: meta-module for labelled libraries *) + +module Array = ArrayLabels + +module List = ListLabels + +module String = StringLabels + +module Bytes = BytesLabels diff --git a/res_syntax/compiler-libs-406/stdLabels.mli b/res_syntax/compiler-libs-406/stdLabels.mli new file mode 100644 index 0000000000..4b24fd2b5f --- /dev/null +++ b/res_syntax/compiler-libs-406/stdLabels.mli @@ -0,0 +1,29 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jacques Garrigue, Kyoto University RIMS *) +(* *) +(* Copyright 2001 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Standard labeled libraries. + + This meta-module provides labelized version of the {!Array}, + {!Bytes}, {!List} and {!String} modules. + + They only differ by their labels. Detailed interfaces can be found + in [arrayLabels.mli], [bytesLabels.mli], [listLabels.mli] + and [stringLabels.mli]. +*) + +module Array = ArrayLabels +module Bytes = BytesLabels +module List = ListLabels +module String = StringLabels diff --git a/res_syntax/compiler-libs-406/std_exit.ml b/res_syntax/compiler-libs-406/std_exit.ml new file mode 100644 index 0000000000..1b97652da7 --- /dev/null +++ b/res_syntax/compiler-libs-406/std_exit.ml @@ -0,0 +1,18 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Ensure that [at_exit] functions are called at the end of every program *) + +let _ = do_at_exit() diff --git a/res_syntax/compiler-libs-406/stream.ml b/res_syntax/compiler-libs-406/stream.ml new file mode 100644 index 0000000000..e9b5e6113c --- /dev/null +++ b/res_syntax/compiler-libs-406/stream.ml @@ -0,0 +1,233 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Daniel de Rauglaudre, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type 'a t = 'a cell option +and 'a cell = { mutable count : int; mutable data : 'a data } +and 'a data = + Sempty + | Scons of 'a * 'a data + | Sapp of 'a data * 'a data + | Slazy of 'a data Lazy.t + | Sgen of 'a gen + | Sbuffio : buffio -> char data +and 'a gen = { mutable curr : 'a option option; func : int -> 'a option } +and buffio = + { ic : in_channel; buff : bytes; mutable len : int; mutable ind : int } + +exception Failure +exception Error of string + +let count = function + | None -> 0 + | Some { count } -> count +let data = function + | None -> Sempty + | Some { data } -> data + +let fill_buff b = + b.len <- input b.ic b.buff 0 (Bytes.length b.buff); b.ind <- 0 + + +let rec get_data : type v. int -> v data -> v data = fun count d -> match d with + (* Returns either Sempty or Scons(a, _) even when d is a generator + or a buffer. In those cases, the item a is seen as extracted from + the generator/buffer. + The count parameter is used for calling `Sgen-functions'. *) + Sempty | Scons (_, _) -> d + | Sapp (d1, d2) -> + begin match get_data count d1 with + Scons (a, d11) -> Scons (a, Sapp (d11, d2)) + | Sempty -> get_data count d2 + | _ -> assert false + end + | Sgen {curr = Some None} -> Sempty + | Sgen ({curr = Some(Some a)} as g) -> + g.curr <- None; Scons(a, d) + | Sgen g -> + begin match g.func count with + None -> g.curr <- Some(None); Sempty + | Some a -> Scons(a, d) + (* Warning: anyone using g thinks that an item has been read *) + end + | Sbuffio b -> + if b.ind >= b.len then fill_buff b; + if b.len == 0 then Sempty else + let r = Bytes.unsafe_get b.buff b.ind in + (* Warning: anyone using g thinks that an item has been read *) + b.ind <- succ b.ind; Scons(r, d) + | Slazy f -> get_data count (Lazy.force f) + + +let rec peek_data : type v. v cell -> v option = fun s -> + (* consult the first item of s *) + match s.data with + Sempty -> None + | Scons (a, _) -> Some a + | Sapp (_, _) -> + begin match get_data s.count s.data with + Scons(a, _) as d -> s.data <- d; Some a + | Sempty -> None + | _ -> assert false + end + | Slazy f -> s.data <- (Lazy.force f); peek_data s + | Sgen {curr = Some a} -> a + | Sgen g -> let x = g.func s.count in g.curr <- Some x; x + | Sbuffio b -> + if b.ind >= b.len then fill_buff b; + if b.len == 0 then begin s.data <- Sempty; None end + else Some (Bytes.unsafe_get b.buff b.ind) + + +let peek = function + | None -> None + | Some s -> peek_data s + + +let rec junk_data : type v. v cell -> unit = fun s -> + match s.data with + Scons (_, d) -> s.count <- (succ s.count); s.data <- d + | Sgen ({curr = Some _} as g) -> s.count <- (succ s.count); g.curr <- None + | Sbuffio b -> s.count <- (succ s.count); b.ind <- succ b.ind + | _ -> + match peek_data s with + None -> () + | Some _ -> junk_data s + + +let junk = function + | None -> () + | Some data -> junk_data data + +let rec nget_data n s = + if n <= 0 then [], s.data, 0 + else + match peek_data s with + Some a -> + junk_data s; + let (al, d, k) = nget_data (pred n) s in a :: al, Scons (a, d), succ k + | None -> [], s.data, 0 + + +let npeek_data n s = + let (al, d, len) = nget_data n s in + s.count <- (s.count - len); + s.data <- d; + al + + +let npeek n = function + | None -> [] + | Some d -> npeek_data n d + +let next s = + match peek s with + Some a -> junk s; a + | None -> raise Failure + + +let empty s = + match peek s with + Some _ -> raise Failure + | None -> () + + +let iter f strm = + let rec do_rec () = + match peek strm with + Some a -> junk strm; ignore(f a); do_rec () + | None -> () + in + do_rec () + + +(* Stream building functions *) + +let from f = Some {count = 0; data = Sgen {curr = None; func = f}} + +let of_list l = + Some {count = 0; data = List.fold_right (fun x l -> Scons (x, l)) l Sempty} + + +let of_string s = + let count = ref 0 in + from (fun _ -> + (* We cannot use the index passed by the [from] function directly + because it returns the current stream count, with absolutely no + guarantee that it will start from 0. For example, in the case + of [Stream.icons 'c' (Stream.from_string "ab")], the first + access to the string will be made with count [1] already. + *) + let c = !count in + if c < String.length s + then (incr count; Some s.[c]) + else None) + + +let of_bytes s = + let count = ref 0 in + from (fun _ -> + let c = !count in + if c < Bytes.length s + then (incr count; Some (Bytes.get s c)) + else None) + + +let of_channel ic = + Some {count = 0; + data = Sbuffio {ic = ic; buff = Bytes.create 4096; len = 0; ind = 0}} + + +(* Stream expressions builders *) + +let iapp i s = Some {count = 0; data = Sapp (data i, data s)} +let icons i s = Some {count = 0; data = Scons (i, data s)} +let ising i = Some {count = 0; data = Scons (i, Sempty)} + +let lapp f s = + Some {count = 0; data = Slazy (lazy(Sapp (data (f ()), data s)))} + +let lcons f s = Some {count = 0; data = Slazy (lazy(Scons (f (), data s)))} +let lsing f = Some {count = 0; data = Slazy (lazy(Scons (f (), Sempty)))} + +let sempty = None +let slazy f = Some {count = 0; data = Slazy (lazy(data (f ())))} + +(* For debugging use *) + +let rec dump : type v. (v -> unit) -> v t -> unit = fun f s -> + print_string "{count = "; + print_int (count s); + print_string "; data = "; + dump_data f (data s); + print_string "}"; + print_newline () +and dump_data : type v. (v -> unit) -> v data -> unit = fun f -> + function + Sempty -> print_string "Sempty" + | Scons (a, d) -> + print_string "Scons ("; + f a; + print_string ", "; + dump_data f d; + print_string ")" + | Sapp (d1, d2) -> + print_string "Sapp ("; + dump_data f d1; + print_string ", "; + dump_data f d2; + print_string ")" + | Slazy _ -> print_string "Slazy" + | Sgen _ -> print_string "Sgen" + | Sbuffio _ -> print_string "Sbuffio" diff --git a/res_syntax/compiler-libs-406/stream.mli b/res_syntax/compiler-libs-406/stream.mli new file mode 100644 index 0000000000..e52bab8961 --- /dev/null +++ b/res_syntax/compiler-libs-406/stream.mli @@ -0,0 +1,110 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Daniel de Rauglaudre, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Streams and parsers. *) + +type 'a t +(** The type of streams holding values of type ['a]. *) + +exception Failure +(** Raised by parsers when none of the first components of the stream + patterns is accepted. *) + +exception Error of string +(** Raised by parsers when the first component of a stream pattern is + accepted, but one of the following components is rejected. *) + + +(** {1 Stream builders} *) + +val from : (int -> 'a option) -> 'a t +(** [Stream.from f] returns a stream built from the function [f]. + To create a new stream element, the function [f] is called with + the current stream count. The user function [f] must return either + [Some ] for a value or [None] to specify the end of the + stream. + + Do note that the indices passed to [f] may not start at [0] in the + general case. For example, [[< '0; '1; Stream.from f >]] would call + [f] the first time with count [2]. +*) + +val of_list : 'a list -> 'a t +(** Return the stream holding the elements of the list in the same + order. *) + +val of_string : string -> char t +(** Return the stream of the characters of the string parameter. *) + +val of_bytes : bytes -> char t +(** Return the stream of the characters of the bytes parameter. + @since 4.02.0 *) + +val of_channel : in_channel -> char t +(** Return the stream of the characters read from the input channel. *) + + +(** {1 Stream iterator} *) + +val iter : ('a -> unit) -> 'a t -> unit +(** [Stream.iter f s] scans the whole stream s, applying function [f] + in turn to each stream element encountered. *) + + +(** {1 Predefined parsers} *) + +val next : 'a t -> 'a +(** Return the first element of the stream and remove it from the + stream. Raise {!Stream.Failure} if the stream is empty. *) + +val empty : 'a t -> unit +(** Return [()] if the stream is empty, else raise {!Stream.Failure}. *) + + +(** {1 Useful functions} *) + +val peek : 'a t -> 'a option +(** Return [Some] of "the first element" of the stream, or [None] if + the stream is empty. *) + +val junk : 'a t -> unit +(** Remove the first element of the stream, possibly unfreezing + it before. *) + +val count : 'a t -> int +(** Return the current count of the stream elements, i.e. the number + of the stream elements discarded. *) + +val npeek : int -> 'a t -> 'a list +(** [npeek n] returns the list of the [n] first elements of + the stream, or all its remaining elements if less than [n] + elements are available. *) + +(**/**) + +(* The following is for system use only. Do not call directly. *) + +val iapp : 'a t -> 'a t -> 'a t +val icons : 'a -> 'a t -> 'a t +val ising : 'a -> 'a t + +val lapp : (unit -> 'a t) -> 'a t -> 'a t +val lcons : (unit -> 'a) -> 'a t -> 'a t +val lsing : (unit -> 'a) -> 'a t + +val sempty : 'a t +val slazy : (unit -> 'a t) -> 'a t + +val dump : ('a -> unit) -> 'a t -> unit diff --git a/res_syntax/compiler-libs-406/stypes.ml b/res_syntax/compiler-libs-406/stypes.ml new file mode 100644 index 0000000000..8435669ec6 --- /dev/null +++ b/res_syntax/compiler-libs-406/stypes.ml @@ -0,0 +1,210 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Moscova, INRIA Rocquencourt *) +(* *) +(* Copyright 2003 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Recording and dumping (partial) type information *) + +(* + We record all types in a list as they are created. + This means we can dump type information even if type inference fails, + which is extremely important, since type information is most + interesting in case of errors. +*) + +open Annot;; +open Lexing;; +open Location;; +open Typedtree;; + +let output_int oc i = output_string oc (string_of_int i) + +type annotation = + | Ti_pat of pattern + | Ti_expr of expression + | Ti_class of class_expr + | Ti_mod of module_expr + | An_call of Location.t * Annot.call + | An_ident of Location.t * string * Annot.ident +;; + +let get_location ti = + match ti with + Ti_pat p -> p.pat_loc + | Ti_expr e -> e.exp_loc + | Ti_class c -> c.cl_loc + | Ti_mod m -> m.mod_loc + | An_call (l, _k) -> l + | An_ident (l, _s, _k) -> l +;; + +let annotations = ref ([] : annotation list);; +let phrases = ref ([] : Location.t list);; + +let record ti = + if !Clflags.annotations && not (get_location ti).Location.loc_ghost then + annotations := ti :: !annotations +;; + +let record_phrase loc = + if !Clflags.annotations then phrases := loc :: !phrases; +;; + +(* comparison order: + the intervals are sorted by order of increasing upper bound + same upper bound -> sorted by decreasing lower bound +*) +let cmp_loc_inner_first loc1 loc2 = + match compare loc1.loc_end.pos_cnum loc2.loc_end.pos_cnum with + | 0 -> compare loc2.loc_start.pos_cnum loc1.loc_start.pos_cnum + | x -> x +;; +let cmp_ti_inner_first ti1 ti2 = + cmp_loc_inner_first (get_location ti1) (get_location ti2) +;; + +let print_position pp pos = + if pos = dummy_pos then + output_string pp "--" + else begin + output_char pp '\"'; + output_string pp (String.escaped pos.pos_fname); + output_string pp "\" "; + output_int pp pos.pos_lnum; + output_char pp ' '; + output_int pp pos.pos_bol; + output_char pp ' '; + output_int pp pos.pos_cnum; + end +;; + +let print_location pp loc = + print_position pp loc.loc_start; + output_char pp ' '; + print_position pp loc.loc_end; +;; + +let sort_filter_phrases () = + let ph = List.sort (fun x y -> cmp_loc_inner_first y x) !phrases in + let rec loop accu cur l = + match l with + | [] -> accu + | loc :: t -> + if cur.loc_start.pos_cnum <= loc.loc_start.pos_cnum + && cur.loc_end.pos_cnum >= loc.loc_end.pos_cnum + then loop accu cur t + else loop (loc :: accu) loc t + in + phrases := loop [] Location.none ph; +;; + +let rec printtyp_reset_maybe loc = + match !phrases with + | cur :: t when cur.loc_start.pos_cnum <= loc.loc_start.pos_cnum -> + Printtyp.reset (); + phrases := t; + printtyp_reset_maybe loc; + | _ -> () +;; + +let call_kind_string k = + match k with + | Tail -> "tail" + | Stack -> "stack" + | Inline -> "inline" +;; + +let print_ident_annot pp str k = + match k with + | Idef l -> + output_string pp "def "; + output_string pp str; + output_char pp ' '; + print_location pp l; + output_char pp '\n' + | Iref_internal l -> + output_string pp "int_ref "; + output_string pp str; + output_char pp ' '; + print_location pp l; + output_char pp '\n' + | Iref_external -> + output_string pp "ext_ref "; + output_string pp str; + output_char pp '\n' +;; + +(* The format of the annotation file is documented in emacs/caml-types.el. *) + +let print_info pp prev_loc ti = + match ti with + | Ti_class _ | Ti_mod _ -> prev_loc + | Ti_pat {pat_loc = loc; pat_type = typ; pat_env = env} + | Ti_expr {exp_loc = loc; exp_type = typ; exp_env = env} -> + if loc <> prev_loc then begin + print_location pp loc; + output_char pp '\n' + end; + output_string pp "type(\n"; + printtyp_reset_maybe loc; + Printtyp.mark_loops typ; + Format.pp_print_string Format.str_formatter " "; + Printtyp.wrap_printing_env env + (fun () -> Printtyp.type_sch Format.str_formatter typ); + Format.pp_print_newline Format.str_formatter (); + let s = Format.flush_str_formatter () in + output_string pp s; + output_string pp ")\n"; + loc + | An_call (loc, k) -> + if loc <> prev_loc then begin + print_location pp loc; + output_char pp '\n' + end; + output_string pp "call(\n "; + output_string pp (call_kind_string k); + output_string pp "\n)\n"; + loc + | An_ident (loc, str, k) -> + if loc <> prev_loc then begin + print_location pp loc; + output_char pp '\n' + end; + output_string pp "ident(\n "; + print_ident_annot pp str k; + output_string pp ")\n"; + loc +;; + +let get_info () = + let info = List.fast_sort cmp_ti_inner_first !annotations in + annotations := []; + info +;; + +let dump filename = + if !Clflags.annotations then begin + let do_dump _temp_filename pp = + let info = get_info () in + sort_filter_phrases (); + ignore (List.fold_left (print_info pp) Location.none info) in + begin match filename with + | None -> do_dump "" stdout + | Some filename -> + Misc.output_to_file_via_temporary ~mode:[Open_text] filename do_dump + end; + phrases := []; + end else begin + annotations := []; + end; +;; diff --git a/res_syntax/compiler-libs-406/stypes.mli b/res_syntax/compiler-libs-406/stypes.mli new file mode 100644 index 0000000000..46df1ce69d --- /dev/null +++ b/res_syntax/compiler-libs-406/stypes.mli @@ -0,0 +1,36 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Moscova, INRIA Rocquencourt *) +(* *) +(* Copyright 2003 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Recording and dumping (partial) type information *) + +(* Clflags.save_types must be true *) + +open Typedtree;; + +type annotation = + | Ti_pat of pattern + | Ti_expr of expression + | Ti_class of class_expr + | Ti_mod of module_expr + | An_call of Location.t * Annot.call + | An_ident of Location.t * string * Annot.ident +;; + +val record : annotation -> unit;; +val record_phrase : Location.t -> unit;; +val dump : string option -> unit;; + +val get_location : annotation -> Location.t;; +val get_info : unit -> annotation list;; diff --git a/res_syntax/compiler-libs-406/subst.ml b/res_syntax/compiler-libs-406/subst.ml new file mode 100644 index 0000000000..fb5f901945 --- /dev/null +++ b/res_syntax/compiler-libs-406/subst.ml @@ -0,0 +1,491 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Substitutions *) + +open Misc +open Path +open Types +open Btype + +type type_replacement = + | Path of Path.t + | Type_function of { params : type_expr list; body : type_expr } + +module PathMap = Map.Make(Path) + +type t = + { types: type_replacement PathMap.t; + modules: Path.t PathMap.t; + modtypes: (Ident.t, module_type) Tbl.t; + for_saving: bool; + } + +let identity = + { types = PathMap.empty; + modules = PathMap.empty; + modtypes = Tbl.empty; + for_saving = false; + } + +let add_type_path id p s = { s with types = PathMap.add id (Path p) s.types } +let add_type id p s = add_type_path (Pident id) p s + +let add_type_function id ~params ~body s = + { s with types = PathMap.add id (Type_function { params; body }) s.types } + +let add_module_path id p s = { s with modules = PathMap.add id p s.modules } +let add_module id p s = add_module_path (Pident id) p s + +let add_modtype id ty s = { s with modtypes = Tbl.add id ty s.modtypes } + +let for_saving s = { s with for_saving = true } + +let loc s x = + if s.for_saving && not !Clflags.keep_locs then Location.none else x + +let remove_loc = + let open Ast_mapper in + {default_mapper with location = (fun _this _loc -> Location.none)} + +let is_not_doc = function + | ({Location.txt = "ocaml.doc"}, _) -> false + | ({Location.txt = "ocaml.text"}, _) -> false + | ({Location.txt = "doc"}, _) -> false + | ({Location.txt = "text"}, _) -> false + | _ -> true + +let attrs s x = + let x = + if s.for_saving && not !Clflags.keep_docs then + List.filter is_not_doc x + else x + in + if s.for_saving && not !Clflags.keep_locs + then remove_loc.Ast_mapper.attributes remove_loc x + else x + +let rec module_path s path = + try PathMap.find path s.modules + with Not_found -> + match path with + | Pident _ -> path + | Pdot(p, n, pos) -> + Pdot(module_path s p, n, pos) + | Papply(p1, p2) -> + Papply(module_path s p1, module_path s p2) + +let modtype_path s = function + Pident id as p -> + begin try + match Tbl.find id s.modtypes with + | Mty_ident p -> p + | _ -> fatal_error "Subst.modtype_path" + with Not_found -> p end + | Pdot(p, n, pos) -> + Pdot(module_path s p, n, pos) + | Papply _ -> + fatal_error "Subst.modtype_path" + +let type_path s path = + match PathMap.find path s.types with + | Path p -> p + | Type_function _ -> assert false + | exception Not_found -> + match path with + | Pident _ -> path + | Pdot(p, n, pos) -> + Pdot(module_path s p, n, pos) + | Papply _ -> + fatal_error "Subst.type_path" + +let type_path s p = + match Path.constructor_typath p with + | Regular p -> type_path s p + | Cstr (ty_path, cstr) -> Pdot(type_path s ty_path, cstr, nopos) + | LocalExt _ -> type_path s p + | Ext (p, cstr) -> Pdot(module_path s p, cstr, nopos) + +let to_subst_by_type_function s p = + match PathMap.find p s.types with + | Path _ -> false + | Type_function _ -> true + | exception Not_found -> false + +(* Special type ids for saved signatures *) + +let new_id = ref (-1) +let reset_for_saving () = new_id := -1 + +let newpersty desc = + decr new_id; + { desc = desc; level = generic_level; id = !new_id } + +(* ensure that all occurrences of 'Tvar None' are physically shared *) +let tvar_none = Tvar None +let tunivar_none = Tunivar None +let norm = function + | Tvar None -> tvar_none + | Tunivar None -> tunivar_none + | d -> d + +let ctype_apply_env_empty = ref (fun _ -> assert false) + +(* Similar to [Ctype.nondep_type_rec]. *) +let rec typexp s ty = + let ty = repr ty in + match ty.desc with + Tvar _ | Tunivar _ as desc -> + if s.for_saving || ty.id < 0 then + let ty' = + if s.for_saving then newpersty (norm desc) + else newty2 ty.level desc + in + save_desc ty desc; ty.desc <- Tsubst ty'; ty' + else ty + | Tsubst ty -> + ty + | Tfield (m, k, _t1, _t2) when not s.for_saving && m = dummy_method + && field_kind_repr k <> Fabsent && (repr ty).level < generic_level -> + (* do not copy the type of self when it is not generalized *) + ty +(* cannot do it, since it would omit substitution + | Tvariant row when not (static_row row) -> + ty +*) + | _ -> + let desc = ty.desc in + save_desc ty desc; + let tm = row_of_type ty in + let has_fixed_row = + not (is_Tconstr ty) && is_constr_row ~allow_ident:false tm in + (* Make a stub *) + let ty' = if s.for_saving then newpersty (Tvar None) else newgenvar () in + ty.desc <- Tsubst ty'; + ty'.desc <- + begin if has_fixed_row then + match tm.desc with (* PR#7348 *) + Tconstr (Pdot(m,i,pos), tl, _abbrev) -> + let i' = String.sub i 0 (String.length i - 4) in + Tconstr(type_path s (Pdot(m,i',pos)), tl, ref Mnil) + | _ -> assert false + else match desc with + | Tconstr (p, args, _abbrev) -> + let args = List.map (typexp s) args in + begin match PathMap.find p s.types with + | exception Not_found -> Tconstr(type_path s p, args, ref Mnil) + | Path _ -> Tconstr(type_path s p, args, ref Mnil) + | Type_function { params; body } -> + (!ctype_apply_env_empty params body args).desc + end + | Tpackage(p, n, tl) -> + Tpackage(modtype_path s p, n, List.map (typexp s) tl) + | Tobject (t1, name) -> + Tobject (typexp s t1, + ref (match !name with + None -> None + | Some (p, tl) -> + if to_subst_by_type_function s p + then None + else Some (type_path s p, List.map (typexp s) tl))) + | Tvariant row -> + let row = row_repr row in + let more = repr row.row_more in + (* We must substitute in a subtle way *) + (* Tsubst takes a tuple containing the row var and the variant *) + begin match more.desc with + Tsubst {desc = Ttuple [_;ty2]} -> + (* This variant type has been already copied *) + ty.desc <- Tsubst ty2; (* avoid Tlink in the new type *) + Tlink ty2 + | _ -> + let dup = + s.for_saving || more.level = generic_level || static_row row || + match more.desc with Tconstr _ -> true | _ -> false in + (* Various cases for the row variable *) + let more' = + match more.desc with + Tsubst ty -> ty + | Tconstr _ | Tnil -> typexp s more + | Tunivar _ | Tvar _ -> + save_desc more more.desc; + if s.for_saving then newpersty (norm more.desc) else + if dup && is_Tvar more then newgenty more.desc else more + | _ -> assert false + in + (* Register new type first for recursion *) + more.desc <- Tsubst(newgenty(Ttuple[more';ty'])); + (* Return a new copy *) + let row = + copy_row (typexp s) true row (not dup) more' in + match row.row_name with + | Some (p, tl) -> + Tvariant {row with row_name = + if to_subst_by_type_function s p + then None + else Some (type_path s p, tl)} + | None -> + Tvariant row + end + | Tfield(_label, kind, _t1, t2) when field_kind_repr kind = Fabsent -> + Tlink (typexp s t2) + | _ -> copy_type_desc (typexp s) desc + end; + ty' + +(* + Always make a copy of the type. If this is not done, type levels + might not be correct. +*) +let type_expr s ty = + let ty' = typexp s ty in + cleanup_types (); + ty' + +let label_declaration s l = + { + ld_id = l.ld_id; + ld_mutable = l.ld_mutable; + ld_type = typexp s l.ld_type; + ld_loc = loc s l.ld_loc; + ld_attributes = attrs s l.ld_attributes; + } + +let constructor_arguments s = function + | Cstr_tuple l -> + Cstr_tuple (List.map (typexp s) l) + | Cstr_record l -> + Cstr_record (List.map (label_declaration s) l) + +let constructor_declaration s c = + { + cd_id = c.cd_id; + cd_args = constructor_arguments s c.cd_args; + cd_res = may_map (typexp s) c.cd_res; + cd_loc = loc s c.cd_loc; + cd_attributes = attrs s c.cd_attributes; + } + +let type_declaration s decl = + let decl = + { type_params = List.map (typexp s) decl.type_params; + type_arity = decl.type_arity; + type_kind = + begin match decl.type_kind with + Type_abstract -> Type_abstract + | Type_variant cstrs -> + Type_variant (List.map (constructor_declaration s) cstrs) + | Type_record(lbls, rep) -> + Type_record (List.map (label_declaration s) lbls, rep) + | Type_open -> Type_open + end; + type_manifest = + begin + match decl.type_manifest with + None -> None + | Some ty -> Some(typexp s ty) + end; + type_private = decl.type_private; + type_variance = decl.type_variance; + type_newtype_level = None; + type_loc = loc s decl.type_loc; + type_attributes = attrs s decl.type_attributes; + type_immediate = decl.type_immediate; + type_unboxed = decl.type_unboxed; + } + in + cleanup_types (); + decl + +let class_signature s sign = + { csig_self = typexp s sign.csig_self; + csig_vars = + Vars.map (function (m, v, t) -> (m, v, typexp s t)) sign.csig_vars; + csig_concr = sign.csig_concr; + csig_inher = + List.map (fun (p, tl) -> (type_path s p, List.map (typexp s) tl)) + sign.csig_inher; + } + +let rec class_type s = + function + Cty_constr (p, tyl, cty) -> + Cty_constr (type_path s p, List.map (typexp s) tyl, class_type s cty) + | Cty_signature sign -> + Cty_signature (class_signature s sign) + | Cty_arrow (l, ty, cty) -> + Cty_arrow (l, typexp s ty, class_type s cty) + +let class_declaration s decl = + let decl = + { cty_params = List.map (typexp s) decl.cty_params; + cty_variance = decl.cty_variance; + cty_type = class_type s decl.cty_type; + cty_path = type_path s decl.cty_path; + cty_new = + begin match decl.cty_new with + None -> None + | Some ty -> Some (typexp s ty) + end; + cty_loc = loc s decl.cty_loc; + cty_attributes = attrs s decl.cty_attributes; + } + in + (* Do not clean up if saving: next is cltype_declaration *) + if not s.for_saving then cleanup_types (); + decl + +let cltype_declaration s decl = + let decl = + { clty_params = List.map (typexp s) decl.clty_params; + clty_variance = decl.clty_variance; + clty_type = class_type s decl.clty_type; + clty_path = type_path s decl.clty_path; + clty_loc = loc s decl.clty_loc; + clty_attributes = attrs s decl.clty_attributes; + } + in + (* Do clean up even if saving: type_declaration may be recursive *) + cleanup_types (); + decl + +let class_type s cty = + let cty = class_type s cty in + cleanup_types (); + cty + +let value_description s descr = + { val_type = type_expr s descr.val_type; + val_kind = descr.val_kind; + val_loc = loc s descr.val_loc; + val_attributes = attrs s descr.val_attributes; + } + +let extension_constructor s ext = + let ext = + { ext_type_path = type_path s ext.ext_type_path; + ext_type_params = List.map (typexp s) ext.ext_type_params; + ext_args = constructor_arguments s ext.ext_args; + ext_ret_type = may_map (typexp s) ext.ext_ret_type; + ext_private = ext.ext_private; + ext_attributes = attrs s ext.ext_attributes; + ext_loc = if s.for_saving then Location.none else ext.ext_loc; } + in + cleanup_types (); + ext + +let rec rename_bound_idents s idents = function + [] -> (List.rev idents, s) + | Sig_type(id, _, _) :: sg -> + let id' = Ident.rename id in + rename_bound_idents (add_type id (Pident id') s) (id' :: idents) sg + | Sig_module(id, _, _) :: sg -> + let id' = Ident.rename id in + rename_bound_idents (add_module id (Pident id') s) (id' :: idents) sg + | Sig_modtype(id, _) :: sg -> + let id' = Ident.rename id in + rename_bound_idents (add_modtype id (Mty_ident(Pident id')) s) + (id' :: idents) sg + | (Sig_class(id, _, _) | Sig_class_type(id, _, _)) :: sg -> + (* cheat and pretend they are types cf. PR#6650 *) + let id' = Ident.rename id in + rename_bound_idents (add_type id (Pident id') s) (id' :: idents) sg + | (Sig_value(id, _) | Sig_typext(id, _, _)) :: sg -> + let id' = Ident.rename id in + rename_bound_idents s (id' :: idents) sg + +let rec modtype s = function + Mty_ident p as mty -> + begin match p with + Pident id -> + begin try Tbl.find id s.modtypes with Not_found -> mty end + | Pdot(p, n, pos) -> + Mty_ident(Pdot(module_path s p, n, pos)) + | Papply _ -> + fatal_error "Subst.modtype" + end + | Mty_signature sg -> + Mty_signature(signature s sg) + | Mty_functor(id, arg, res) -> + let id' = Ident.rename id in + Mty_functor(id', may_map (modtype s) arg, + modtype (add_module id (Pident id') s) res) + | Mty_alias(pres, p) -> + Mty_alias(pres, module_path s p) + +and signature s sg = + (* Components of signature may be mutually recursive (e.g. type declarations + or class and type declarations), so first build global renaming + substitution... *) + let (new_idents, s') = rename_bound_idents s [] sg in + (* ... then apply it to each signature component in turn *) + List.map2 (signature_component s') sg new_idents + +and signature_component s comp newid = + match comp with + Sig_value(_id, d) -> + Sig_value(newid, value_description s d) + | Sig_type(_id, d, rs) -> + Sig_type(newid, type_declaration s d, rs) + | Sig_typext(_id, ext, es) -> + Sig_typext(newid, extension_constructor s ext, es) + | Sig_module(_id, d, rs) -> + Sig_module(newid, module_declaration s d, rs) + | Sig_modtype(_id, d) -> + Sig_modtype(newid, modtype_declaration s d) + | Sig_class(_id, d, rs) -> + Sig_class(newid, class_declaration s d, rs) + | Sig_class_type(_id, d, rs) -> + Sig_class_type(newid, cltype_declaration s d, rs) + +and module_declaration s decl = + { + md_type = modtype s decl.md_type; + md_attributes = attrs s decl.md_attributes; + md_loc = loc s decl.md_loc; + } + +and modtype_declaration s decl = + { + mtd_type = may_map (modtype s) decl.mtd_type; + mtd_attributes = attrs s decl.mtd_attributes; + mtd_loc = loc s decl.mtd_loc; + } + +(* For every binding k |-> d of m1, add k |-> f d to m2 + and return resulting merged map. *) + +let merge_tbls f m1 m2 = + Tbl.fold (fun k d accu -> Tbl.add k (f d) accu) m1 m2 + +let merge_path_maps f m1 m2 = + PathMap.fold (fun k d accu -> PathMap.add k (f d) accu) m1 m2 + +let type_replacement s = function + | Path p -> Path (type_path s p) + | Type_function { params; body } -> + let params = List.map (typexp s) params in + let body = typexp s body in + Type_function { params; body } + +(* Composition of substitutions: + apply (compose s1 s2) x = apply s2 (apply s1 x) *) + +let compose s1 s2 = + { types = merge_path_maps (type_replacement s2) s1.types s2.types; + modules = merge_path_maps (module_path s2) s1.modules s2.modules; + modtypes = merge_tbls (modtype s2) s1.modtypes s2.modtypes; + for_saving = s1.for_saving || s2.for_saving; + } diff --git a/res_syntax/compiler-libs-406/subst.mli b/res_syntax/compiler-libs-406/subst.mli new file mode 100644 index 0000000000..f81cb4da56 --- /dev/null +++ b/res_syntax/compiler-libs-406/subst.mli @@ -0,0 +1,70 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Substitutions *) + +open Types + +type t + +(* + Substitutions are used to translate a type from one context to + another. This requires substituting paths for identifiers, and + possibly also lowering the level of non-generic variables so that + they are inferior to the maximum level of the new context. + + Substitutions can also be used to create a "clean" copy of a type. + Indeed, non-variable node of a type are duplicated, with their + levels set to generic level. That way, the resulting type is + well-formed (decreasing levels), even if the original one was not. +*) + +val identity: t + +val add_type: Ident.t -> Path.t -> t -> t +val add_type_path: Path.t -> Path.t -> t -> t +val add_type_function: + Path.t -> params:type_expr list -> body:type_expr -> t -> t +val add_module: Ident.t -> Path.t -> t -> t +val add_module_path: Path.t -> Path.t -> t -> t +val add_modtype: Ident.t -> module_type -> t -> t +val for_saving: t -> t +val reset_for_saving: unit -> unit + +val module_path: t -> Path.t -> Path.t +val type_path: t -> Path.t -> Path.t + +val type_expr: t -> type_expr -> type_expr +val class_type: t -> class_type -> class_type +val value_description: t -> value_description -> value_description +val type_declaration: t -> type_declaration -> type_declaration +val extension_constructor: + t -> extension_constructor -> extension_constructor +val class_declaration: t -> class_declaration -> class_declaration +val cltype_declaration: t -> class_type_declaration -> class_type_declaration +val modtype: t -> module_type -> module_type +val signature: t -> signature -> signature +val modtype_declaration: t -> modtype_declaration -> modtype_declaration +val module_declaration: t -> module_declaration -> module_declaration +val typexp : t -> Types.type_expr -> Types.type_expr +val class_signature: t -> class_signature -> class_signature + +(* Composition of substitutions: + apply (compose s1 s2) x = apply s2 (apply s1 x) *) +val compose: t -> t -> t + +(* A forward reference to be filled in ctype.ml. *) +val ctype_apply_env_empty: + (type_expr list -> type_expr -> type_expr list -> type_expr) ref diff --git a/res_syntax/compiler-libs-406/syntaxerr.ml b/res_syntax/compiler-libs-406/syntaxerr.ml new file mode 100644 index 0000000000..0bb55ab676 --- /dev/null +++ b/res_syntax/compiler-libs-406/syntaxerr.ml @@ -0,0 +1,87 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Auxiliary type for reporting syntax errors *) + +type error = + Unclosed of Location.t * string * Location.t * string + | Expecting of Location.t * string + | Not_expecting of Location.t * string + | Applicative_path of Location.t + | Variable_in_scope of Location.t * string + | Other of Location.t + | Ill_formed_ast of Location.t * string + | Invalid_package_type of Location.t * string + +exception Error of error +exception Escape_error + +let prepare_error = function + | Unclosed(opening_loc, opening, closing_loc, closing) -> + Location.errorf ~loc:closing_loc + ~sub:[ + Location.errorf ~loc:opening_loc + "This '%s' might be unmatched" opening + ] + ~if_highlight: + (Printf.sprintf "Syntax error: '%s' expected, \ + the highlighted '%s' might be unmatched" + closing opening) + "Syntax error: '%s' expected" closing + + | Expecting (loc, nonterm) -> + Location.errorf ~loc "Syntax error: %s expected." nonterm + | Not_expecting (loc, nonterm) -> + Location.errorf ~loc "Syntax error: %s not expected." nonterm + | Applicative_path loc -> + Location.errorf ~loc + "Syntax error: applicative paths of the form F(X).t \ + are not supported when the option -no-app-func is set." + | Variable_in_scope (loc, var) -> + Location.errorf ~loc + "In this scoped type, variable '%s \ + is reserved for the local type %s." + var var + | Other loc -> + Location.errorf ~loc "Syntax error" + | Ill_formed_ast (loc, s) -> + Location.errorf ~loc "broken invariant in parsetree: %s" s + | Invalid_package_type (loc, s) -> + Location.errorf ~loc "invalid package type: %s" s + +let () = + Location.register_error_of_exn + (function + | Error err -> Some (prepare_error err) + | _ -> None + ) + + +let report_error ppf err = + Location.report_error ppf (prepare_error err) + +let location_of_error = function + | Unclosed(l,_,_,_) + | Applicative_path l + | Variable_in_scope(l,_) + | Other l + | Not_expecting (l, _) + | Ill_formed_ast (l, _) + | Invalid_package_type (l, _) + | Expecting (l, _) -> l + + +let ill_formed_ast loc s = + raise (Error (Ill_formed_ast (loc, s))) diff --git a/res_syntax/compiler-libs-406/syntaxerr.mli b/res_syntax/compiler-libs-406/syntaxerr.mli new file mode 100644 index 0000000000..319eb57948 --- /dev/null +++ b/res_syntax/compiler-libs-406/syntaxerr.mli @@ -0,0 +1,37 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Auxiliary type for reporting syntax errors *) + +open Format + +type error = + Unclosed of Location.t * string * Location.t * string + | Expecting of Location.t * string + | Not_expecting of Location.t * string + | Applicative_path of Location.t + | Variable_in_scope of Location.t * string + | Other of Location.t + | Ill_formed_ast of Location.t * string + | Invalid_package_type of Location.t * string + +exception Error of error +exception Escape_error + +val report_error: formatter -> error -> unit + (** @deprecated Use {!Location.error_of_exn}, {!Location.report_error}. *) + +val location_of_error: error -> Location.t +val ill_formed_ast: Location.t -> string -> 'a diff --git a/res_syntax/compiler-libs-406/tast_mapper.ml b/res_syntax/compiler-libs-406/tast_mapper.ml new file mode 100644 index 0000000000..36e33e3f2f --- /dev/null +++ b/res_syntax/compiler-libs-406/tast_mapper.ml @@ -0,0 +1,700 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2015 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Asttypes +open Typedtree + +(* TODO: add 'methods' for location, attribute, extension, + open_description, include_declaration, include_description *) + +type mapper = + { + case: mapper -> case -> case; + cases: mapper -> case list -> case list; + class_declaration: mapper -> class_declaration -> class_declaration; + class_description: mapper -> class_description -> class_description; + class_expr: mapper -> class_expr -> class_expr; + class_field: mapper -> class_field -> class_field; + class_signature: mapper -> class_signature -> class_signature; + class_structure: mapper -> class_structure -> class_structure; + class_type: mapper -> class_type -> class_type; + class_type_declaration: mapper -> class_type_declaration -> + class_type_declaration; + class_type_field: mapper -> class_type_field -> class_type_field; + env: mapper -> Env.t -> Env.t; + expr: mapper -> expression -> expression; + extension_constructor: mapper -> extension_constructor -> + extension_constructor; + module_binding: mapper -> module_binding -> module_binding; + module_coercion: mapper -> module_coercion -> module_coercion; + module_declaration: mapper -> module_declaration -> module_declaration; + module_expr: mapper -> module_expr -> module_expr; + module_type: mapper -> module_type -> module_type; + module_type_declaration: + mapper -> module_type_declaration -> module_type_declaration; + package_type: mapper -> package_type -> package_type; + pat: mapper -> pattern -> pattern; + row_field: mapper -> row_field -> row_field; + object_field: mapper -> object_field -> object_field; + signature: mapper -> signature -> signature; + signature_item: mapper -> signature_item -> signature_item; + structure: mapper -> structure -> structure; + structure_item: mapper -> structure_item -> structure_item; + typ: mapper -> core_type -> core_type; + type_declaration: mapper -> type_declaration -> type_declaration; + type_declarations: mapper -> (rec_flag * type_declaration list) -> + (rec_flag * type_declaration list); + type_extension: mapper -> type_extension -> type_extension; + type_kind: mapper -> type_kind -> type_kind; + value_binding: mapper -> value_binding -> value_binding; + value_bindings: mapper -> (rec_flag * value_binding list) -> + (rec_flag * value_binding list); + value_description: mapper -> value_description -> value_description; + with_constraint: mapper -> with_constraint -> with_constraint; + } + +let id x = x +let tuple2 f1 f2 (x, y) = (f1 x, f2 y) +let tuple3 f1 f2 f3 (x, y, z) = (f1 x, f2 y, f3 z) +let opt f = function None -> None | Some x -> Some (f x) + +let structure sub {str_items; str_type; str_final_env} = + { + str_items = List.map (sub.structure_item sub) str_items; + str_final_env = sub.env sub str_final_env; + str_type; + } + +let class_infos sub f x = + {x with + ci_params = List.map (tuple2 (sub.typ sub) id) x.ci_params; + ci_expr = f x.ci_expr; + } + +let module_type_declaration sub x = + let mtd_type = opt (sub.module_type sub) x.mtd_type in + {x with mtd_type} + +let module_declaration sub x = + let md_type = sub.module_type sub x.md_type in + {x with md_type} + +let include_infos f x = {x with incl_mod = f x.incl_mod} + +let class_type_declaration sub x = + class_infos sub (sub.class_type sub) x + +let class_declaration sub x = + class_infos sub (sub.class_expr sub) x + +let structure_item sub {str_desc; str_loc; str_env} = + let str_env = sub.env sub str_env in + let str_desc = + match str_desc with + | Tstr_eval (exp, attrs) -> Tstr_eval (sub.expr sub exp, attrs) + | Tstr_value (rec_flag, list) -> + let (rec_flag, list) = sub.value_bindings sub (rec_flag, list) in + Tstr_value (rec_flag, list) + | Tstr_primitive v -> Tstr_primitive (sub.value_description sub v) + | Tstr_type (rec_flag, list) -> + let (rec_flag, list) = sub.type_declarations sub (rec_flag, list) in + Tstr_type (rec_flag, list) + | Tstr_typext te -> Tstr_typext (sub.type_extension sub te) + | Tstr_exception ext -> Tstr_exception (sub.extension_constructor sub ext) + | Tstr_module mb -> Tstr_module (sub.module_binding sub mb) + | Tstr_recmodule list -> + Tstr_recmodule (List.map (sub.module_binding sub) list) + | Tstr_modtype x -> Tstr_modtype (sub.module_type_declaration sub x) + | Tstr_class list -> + Tstr_class + (List.map (tuple2 (sub.class_declaration sub) id) list) + | Tstr_class_type list -> + Tstr_class_type + (List.map (tuple3 id id (sub.class_type_declaration sub)) list) + | Tstr_include incl -> + Tstr_include (include_infos (sub.module_expr sub) incl) + | Tstr_open _ + | Tstr_attribute _ as d -> d + in + {str_desc; str_env; str_loc} + +let value_description sub x = + let val_desc = sub.typ sub x.val_desc in + {x with val_desc} + +let label_decl sub x = + let ld_type = sub.typ sub x.ld_type in + {x with ld_type} + +let constructor_args sub = function + | Cstr_tuple l -> Cstr_tuple (List.map (sub.typ sub) l) + | Cstr_record l -> Cstr_record (List.map (label_decl sub) l) + +let constructor_decl sub cd = + let cd_args = constructor_args sub cd.cd_args in + let cd_res = opt (sub.typ sub) cd.cd_res in + {cd with cd_args; cd_res} + +let type_kind sub = function + | Ttype_abstract -> Ttype_abstract + | Ttype_variant list -> Ttype_variant (List.map (constructor_decl sub) list) + | Ttype_record list -> Ttype_record (List.map (label_decl sub) list) + | Ttype_open -> Ttype_open + +let type_declaration sub x = + let typ_cstrs = + List.map + (tuple3 (sub.typ sub) (sub.typ sub) id) + x.typ_cstrs + in + let typ_kind = sub.type_kind sub x.typ_kind in + let typ_manifest = opt (sub.typ sub) x.typ_manifest in + let typ_params = List.map (tuple2 (sub.typ sub) id) x.typ_params in + {x with typ_cstrs; typ_kind; typ_manifest; typ_params} + +let type_declarations sub (rec_flag, list) = + (rec_flag, List.map (sub.type_declaration sub) list) + +let type_extension sub x = + let tyext_params = List.map (tuple2 (sub.typ sub) id) x.tyext_params in + let tyext_constructors = + List.map (sub.extension_constructor sub) x.tyext_constructors + in + {x with tyext_constructors; tyext_params} + +let extension_constructor sub x = + let ext_kind = + match x.ext_kind with + Text_decl(ctl, cto) -> + Text_decl(constructor_args sub ctl, opt (sub.typ sub) cto) + | Text_rebind _ as d -> d + in + {x with ext_kind} + +let pat sub x = + let extra = function + | Tpat_type _ + | Tpat_unpack as d -> d + | Tpat_open (path,loc,env) -> Tpat_open (path, loc, sub.env sub env) + | Tpat_constraint ct -> Tpat_constraint (sub.typ sub ct) + in + let pat_env = sub.env sub x.pat_env in + let pat_extra = List.map (tuple3 extra id id) x.pat_extra in + let pat_desc = + match x.pat_desc with + | Tpat_any + | Tpat_var _ + | Tpat_constant _ as d -> d + | Tpat_tuple l -> Tpat_tuple (List.map (sub.pat sub) l) + | Tpat_construct (loc, cd, l) -> + Tpat_construct (loc, cd, List.map (sub.pat sub) l) + | Tpat_variant (l, po, rd) -> Tpat_variant (l, opt (sub.pat sub) po, rd) + | Tpat_record (l, closed) -> + Tpat_record (List.map (tuple3 id id (sub.pat sub)) l, closed) + | Tpat_array l -> Tpat_array (List.map (sub.pat sub) l) + | Tpat_or (p1, p2, rd) -> + Tpat_or (sub.pat sub p1, sub.pat sub p2, rd) + | Tpat_alias (p, id, s) -> Tpat_alias (sub.pat sub p, id, s) + | Tpat_lazy p -> Tpat_lazy (sub.pat sub p) + in + {x with pat_extra; pat_desc; pat_env} + +let expr sub x = + let extra = function + | Texp_constraint cty -> + Texp_constraint (sub.typ sub cty) + | Texp_coerce (cty1, cty2) -> + Texp_coerce (opt (sub.typ sub) cty1, sub.typ sub cty2) + | Texp_open (ovf, path, loc, env) -> + Texp_open (ovf, path, loc, sub.env sub env) + | Texp_newtype _ as d -> d + | Texp_poly cto -> Texp_poly (opt (sub.typ sub) cto) + in + let exp_extra = List.map (tuple3 extra id id) x.exp_extra in + let exp_env = sub.env sub x.exp_env in + let exp_desc = + match x.exp_desc with + | Texp_ident _ + | Texp_constant _ as d -> d + | Texp_let (rec_flag, list, exp) -> + let (rec_flag, list) = sub.value_bindings sub (rec_flag, list) in + Texp_let (rec_flag, list, sub.expr sub exp) + | Texp_function { arg_label; param; cases; partial; } -> + Texp_function { arg_label; param; cases = sub.cases sub cases; + partial; } + | Texp_apply (exp, list) -> + Texp_apply ( + sub.expr sub exp, + List.map (tuple2 id (opt (sub.expr sub))) list + ) + | Texp_match (exp, cases, exn_cases, p) -> + Texp_match ( + sub.expr sub exp, + sub.cases sub cases, + sub.cases sub exn_cases, + p + ) + | Texp_try (exp, cases) -> + Texp_try ( + sub.expr sub exp, + sub.cases sub cases + ) + | Texp_tuple list -> + Texp_tuple (List.map (sub.expr sub) list) + | Texp_construct (lid, cd, args) -> + Texp_construct (lid, cd, List.map (sub.expr sub) args) + | Texp_variant (l, expo) -> + Texp_variant (l, opt (sub.expr sub) expo) + | Texp_record { fields; representation; extended_expression } -> + let fields = Array.map (function + | label, Kept t -> label, Kept t + | label, Overridden (lid, exp) -> + label, Overridden (lid, sub.expr sub exp)) + fields + in + Texp_record { + fields; representation; + extended_expression = opt (sub.expr sub) extended_expression; + } + | Texp_field (exp, lid, ld) -> + Texp_field (sub.expr sub exp, lid, ld) + | Texp_setfield (exp1, lid, ld, exp2) -> + Texp_setfield ( + sub.expr sub exp1, + lid, + ld, + sub.expr sub exp2 + ) + | Texp_array list -> + Texp_array (List.map (sub.expr sub) list) + | Texp_ifthenelse (exp1, exp2, expo) -> + Texp_ifthenelse ( + sub.expr sub exp1, + sub.expr sub exp2, + opt (sub.expr sub) expo + ) + | Texp_sequence (exp1, exp2) -> + Texp_sequence ( + sub.expr sub exp1, + sub.expr sub exp2 + ) + | Texp_while (exp1, exp2) -> + Texp_while ( + sub.expr sub exp1, + sub.expr sub exp2 + ) + | Texp_for (id, p, exp1, exp2, dir, exp3) -> + Texp_for ( + id, + p, + sub.expr sub exp1, + sub.expr sub exp2, + dir, + sub.expr sub exp3 + ) + | Texp_send (exp, meth, expo) -> + Texp_send + ( + sub.expr sub exp, + meth, + opt (sub.expr sub) expo + ) + | Texp_new _ + | Texp_instvar _ as d -> d + | Texp_setinstvar (path1, path2, id, exp) -> + Texp_setinstvar ( + path1, + path2, + id, + sub.expr sub exp + ) + | Texp_override (path, list) -> + Texp_override ( + path, + List.map (tuple3 id id (sub.expr sub)) list + ) + | Texp_letmodule (id, s, mexpr, exp) -> + Texp_letmodule ( + id, + s, + sub.module_expr sub mexpr, + sub.expr sub exp + ) + | Texp_letexception (cd, exp) -> + Texp_letexception ( + sub.extension_constructor sub cd, + sub.expr sub exp + ) + | Texp_assert exp -> + Texp_assert (sub.expr sub exp) + | Texp_lazy exp -> + Texp_lazy (sub.expr sub exp) + | Texp_object (cl, sl) -> + Texp_object (sub.class_structure sub cl, sl) + | Texp_pack mexpr -> + Texp_pack (sub.module_expr sub mexpr) + | Texp_unreachable -> + Texp_unreachable + | Texp_extension_constructor _ as e -> + e + in + {x with exp_extra; exp_desc; exp_env} + + +let package_type sub x = + let pack_fields = List.map (tuple2 id (sub.typ sub)) x.pack_fields in + {x with pack_fields} + +let signature sub x = + let sig_final_env = sub.env sub x.sig_final_env in + let sig_items = List.map (sub.signature_item sub) x.sig_items in + {x with sig_items; sig_final_env} + +let signature_item sub x = + let sig_env = sub.env sub x.sig_env in + let sig_desc = + match x.sig_desc with + | Tsig_value v -> + Tsig_value (sub.value_description sub v) + | Tsig_type (rec_flag, list) -> + let (rec_flag, list) = sub.type_declarations sub (rec_flag, list) in + Tsig_type (rec_flag, list) + | Tsig_typext te -> + Tsig_typext (sub.type_extension sub te) + | Tsig_exception ext -> + Tsig_exception (sub.extension_constructor sub ext) + | Tsig_module x -> + Tsig_module (sub.module_declaration sub x) + | Tsig_recmodule list -> + Tsig_recmodule (List.map (sub.module_declaration sub) list) + | Tsig_modtype x -> + Tsig_modtype (sub.module_type_declaration sub x) + | Tsig_include incl -> + Tsig_include (include_infos (sub.module_type sub) incl) + | Tsig_class list -> + Tsig_class (List.map (sub.class_description sub) list) + | Tsig_class_type list -> + Tsig_class_type + (List.map (sub.class_type_declaration sub) list) + | Tsig_open _ + | Tsig_attribute _ as d -> d + in + {x with sig_desc; sig_env} + +let class_description sub x = + class_infos sub (sub.class_type sub) x + +let module_type sub x = + let mty_env = sub.env sub x.mty_env in + let mty_desc = + match x.mty_desc with + | Tmty_ident _ + | Tmty_alias _ as d -> d + | Tmty_signature sg -> Tmty_signature (sub.signature sub sg) + | Tmty_functor (id, s, mtype1, mtype2) -> + Tmty_functor ( + id, + s, + opt (sub.module_type sub) mtype1, + sub.module_type sub mtype2 + ) + | Tmty_with (mtype, list) -> + Tmty_with ( + sub.module_type sub mtype, + List.map (tuple3 id id (sub.with_constraint sub)) list + ) + | Tmty_typeof mexpr -> + Tmty_typeof (sub.module_expr sub mexpr) + in + {x with mty_desc; mty_env} + +let with_constraint sub = function + | Twith_type decl -> Twith_type (sub.type_declaration sub decl) + | Twith_typesubst decl -> Twith_typesubst (sub.type_declaration sub decl) + | Twith_module _ + | Twith_modsubst _ as d -> d + +let module_coercion sub = function + | Tcoerce_none -> Tcoerce_none + | Tcoerce_functor (c1,c2) -> + Tcoerce_functor (sub.module_coercion sub c1, sub.module_coercion sub c2) + | Tcoerce_alias (p, c1) -> + Tcoerce_alias (p, sub.module_coercion sub c1) + | Tcoerce_structure (l1, l2) -> + let l1' = List.map (fun (i,c) -> i, sub.module_coercion sub c) l1 in + let l2' = + List.map (fun (id,i,c) -> id, i, sub.module_coercion sub c) l2 + in + Tcoerce_structure (l1', l2') + | Tcoerce_primitive pc -> + Tcoerce_primitive {pc with pc_env = sub.env sub pc.pc_env} + +let module_expr sub x = + let mod_env = sub.env sub x.mod_env in + let mod_desc = + match x.mod_desc with + | Tmod_ident _ as d -> d + | Tmod_structure st -> Tmod_structure (sub.structure sub st) + | Tmod_functor (id, s, mtype, mexpr) -> + Tmod_functor ( + id, + s, + opt (sub.module_type sub) mtype, + sub.module_expr sub mexpr + ) + | Tmod_apply (mexp1, mexp2, c) -> + Tmod_apply ( + sub.module_expr sub mexp1, + sub.module_expr sub mexp2, + sub.module_coercion sub c + ) + | Tmod_constraint (mexpr, mt, Tmodtype_implicit, c) -> + Tmod_constraint (sub.module_expr sub mexpr, mt, Tmodtype_implicit, + sub.module_coercion sub c) + | Tmod_constraint (mexpr, mt, Tmodtype_explicit mtype, c) -> + Tmod_constraint ( + sub.module_expr sub mexpr, + mt, + Tmodtype_explicit (sub.module_type sub mtype), + sub.module_coercion sub c + ) + | Tmod_unpack (exp, mty) -> + Tmod_unpack + ( + sub.expr sub exp, + mty + ) + in + {x with mod_desc; mod_env} + +let module_binding sub x = + let mb_expr = sub.module_expr sub x.mb_expr in + {x with mb_expr} + +let class_expr sub x = + let cl_env = sub.env sub x.cl_env in + let cl_desc = + match x.cl_desc with + | Tcl_constraint (cl, clty, vals, meths, concrs) -> + Tcl_constraint ( + sub.class_expr sub cl, + opt (sub.class_type sub) clty, + vals, + meths, + concrs + ) + | Tcl_structure clstr -> + Tcl_structure (sub.class_structure sub clstr) + | Tcl_fun (label, pat, priv, cl, partial) -> + Tcl_fun ( + label, + sub.pat sub pat, + List.map (tuple3 id id (sub.expr sub)) priv, + sub.class_expr sub cl, + partial + ) + | Tcl_apply (cl, args) -> + Tcl_apply ( + sub.class_expr sub cl, + List.map (tuple2 id (opt (sub.expr sub))) args + ) + | Tcl_let (rec_flag, value_bindings, ivars, cl) -> + let (rec_flag, value_bindings) = + sub.value_bindings sub (rec_flag, value_bindings) + in + Tcl_let ( + rec_flag, + value_bindings, + List.map (tuple3 id id (sub.expr sub)) ivars, + sub.class_expr sub cl + ) + | Tcl_ident (path, lid, tyl) -> + Tcl_ident (path, lid, List.map (sub.typ sub) tyl) + | Tcl_open (ovf, p, lid, env, e) -> + Tcl_open (ovf, p, lid, sub.env sub env, sub.class_expr sub e) + in + {x with cl_desc; cl_env} + +let class_type sub x = + let cltyp_env = sub.env sub x.cltyp_env in + let cltyp_desc = + match x.cltyp_desc with + | Tcty_signature csg -> Tcty_signature (sub.class_signature sub csg) + | Tcty_constr (path, lid, list) -> + Tcty_constr ( + path, + lid, + List.map (sub.typ sub) list + ) + | Tcty_arrow (label, ct, cl) -> + Tcty_arrow + (label, + sub.typ sub ct, + sub.class_type sub cl + ) + | Tcty_open (ovf, p, lid, env, e) -> + Tcty_open (ovf, p, lid, sub.env sub env, sub.class_type sub e) + in + {x with cltyp_desc; cltyp_env} + +let class_signature sub x = + let csig_self = sub.typ sub x.csig_self in + let csig_fields = List.map (sub.class_type_field sub) x.csig_fields in + {x with csig_self; csig_fields} + +let class_type_field sub x = + let ctf_desc = + match x.ctf_desc with + | Tctf_inherit ct -> + Tctf_inherit (sub.class_type sub ct) + | Tctf_val (s, mut, virt, ct) -> + Tctf_val (s, mut, virt, sub.typ sub ct) + | Tctf_method (s, priv, virt, ct) -> + Tctf_method (s, priv, virt, sub.typ sub ct) + | Tctf_constraint (ct1, ct2) -> + Tctf_constraint (sub.typ sub ct1, sub.typ sub ct2) + | Tctf_attribute _ as d -> d + in + {x with ctf_desc} + +let typ sub x = + let ctyp_env = sub.env sub x.ctyp_env in + let ctyp_desc = + match x.ctyp_desc with + | Ttyp_any + | Ttyp_var _ as d -> d + | Ttyp_arrow (label, ct1, ct2) -> + Ttyp_arrow (label, sub.typ sub ct1, sub.typ sub ct2) + | Ttyp_tuple list -> Ttyp_tuple (List.map (sub.typ sub) list) + | Ttyp_constr (path, lid, list) -> + Ttyp_constr (path, lid, List.map (sub.typ sub) list) + | Ttyp_object (list, closed) -> + Ttyp_object ((List.map (sub.object_field sub) list), closed) + | Ttyp_class (path, lid, list) -> + Ttyp_class + (path, + lid, + List.map (sub.typ sub) list + ) + | Ttyp_alias (ct, s) -> + Ttyp_alias (sub.typ sub ct, s) + | Ttyp_variant (list, closed, labels) -> + Ttyp_variant (List.map (sub.row_field sub) list, closed, labels) + | Ttyp_poly (sl, ct) -> + Ttyp_poly (sl, sub.typ sub ct) + | Ttyp_package pack -> + Ttyp_package (sub.package_type sub pack) + in + {x with ctyp_desc; ctyp_env} + +let class_structure sub x = + let cstr_self = sub.pat sub x.cstr_self in + let cstr_fields = List.map (sub.class_field sub) x.cstr_fields in + {x with cstr_self; cstr_fields} + +let row_field sub = function + | Ttag (label, attrs, b, list) -> + Ttag (label, attrs, b, List.map (sub.typ sub) list) + | Tinherit ct -> Tinherit (sub.typ sub ct) + +let object_field sub = function + | OTtag (label, attrs, ct) -> + OTtag (label, attrs, (sub.typ sub ct)) + | OTinherit ct -> OTinherit (sub.typ sub ct) + +let class_field_kind sub = function + | Tcfk_virtual ct -> Tcfk_virtual (sub.typ sub ct) + | Tcfk_concrete (ovf, e) -> Tcfk_concrete (ovf, sub.expr sub e) + +let class_field sub x = + let cf_desc = + match x.cf_desc with + | Tcf_inherit (ovf, cl, super, vals, meths) -> + Tcf_inherit (ovf, sub.class_expr sub cl, super, vals, meths) + | Tcf_constraint (cty, cty') -> + Tcf_constraint ( + sub.typ sub cty, + sub.typ sub cty' + ) + | Tcf_val (s, mf, id, k, b) -> + Tcf_val (s, mf, id, class_field_kind sub k, b) + | Tcf_method (s, priv, k) -> + Tcf_method (s, priv, class_field_kind sub k) + | Tcf_initializer exp -> + Tcf_initializer (sub.expr sub exp) + | Tcf_attribute _ as d -> d + in + {x with cf_desc} + +let value_bindings sub (rec_flag, list) = + (rec_flag, List.map (sub.value_binding sub) list) + +let cases sub l = + List.map (sub.case sub) l + +let case sub {c_lhs; c_guard; c_rhs} = + { + c_lhs = sub.pat sub c_lhs; + c_guard = opt (sub.expr sub) c_guard; + c_rhs = sub.expr sub c_rhs; + } + +let value_binding sub x = + let vb_pat = sub.pat sub x.vb_pat in + let vb_expr = sub.expr sub x.vb_expr in + {x with vb_pat; vb_expr} + +let env _sub x = x + +let default = + { + case; + cases; + class_declaration; + class_description; + class_expr; + class_field; + class_signature; + class_structure; + class_type; + class_type_declaration; + class_type_field; + env; + expr; + extension_constructor; + module_binding; + module_coercion; + module_declaration; + module_expr; + module_type; + module_type_declaration; + package_type; + pat; + row_field; + object_field; + signature; + signature_item; + structure; + structure_item; + typ; + type_declaration; + type_declarations; + type_extension; + type_kind; + value_binding; + value_bindings; + value_description; + with_constraint; + } diff --git a/res_syntax/compiler-libs-406/tast_mapper.mli b/res_syntax/compiler-libs-406/tast_mapper.mli new file mode 100644 index 0000000000..2251fa5709 --- /dev/null +++ b/res_syntax/compiler-libs-406/tast_mapper.mli @@ -0,0 +1,68 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2015 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Asttypes +open Typedtree + +(** {1 A generic Typedtree mapper} *) + +type mapper = + { + case: mapper -> case -> case; + cases: mapper -> case list -> case list; + class_declaration: mapper -> class_declaration -> class_declaration; + class_description: mapper -> class_description -> class_description; + class_expr: mapper -> class_expr -> class_expr; + class_field: mapper -> class_field -> class_field; + class_signature: mapper -> class_signature -> class_signature; + class_structure: mapper -> class_structure -> class_structure; + class_type: mapper -> class_type -> class_type; + class_type_declaration: mapper -> class_type_declaration -> + class_type_declaration; + class_type_field: mapper -> class_type_field -> class_type_field; + env: mapper -> Env.t -> Env.t; + expr: mapper -> expression -> expression; + extension_constructor: mapper -> extension_constructor -> + extension_constructor; + module_binding: mapper -> module_binding -> module_binding; + module_coercion: mapper -> module_coercion -> module_coercion; + module_declaration: mapper -> module_declaration -> module_declaration; + module_expr: mapper -> module_expr -> module_expr; + module_type: mapper -> module_type -> module_type; + module_type_declaration: + mapper -> module_type_declaration -> module_type_declaration; + package_type: mapper -> package_type -> package_type; + pat: mapper -> pattern -> pattern; + row_field: mapper -> row_field -> row_field; + object_field: mapper -> object_field -> object_field; + signature: mapper -> signature -> signature; + signature_item: mapper -> signature_item -> signature_item; + structure: mapper -> structure -> structure; + structure_item: mapper -> structure_item -> structure_item; + typ: mapper -> core_type -> core_type; + type_declaration: mapper -> type_declaration -> type_declaration; + type_declarations: mapper -> (rec_flag * type_declaration list) -> + (rec_flag * type_declaration list); + type_extension: mapper -> type_extension -> type_extension; + type_kind: mapper -> type_kind -> type_kind; + value_binding: mapper -> value_binding -> value_binding; + value_bindings: mapper -> (rec_flag * value_binding list) -> + (rec_flag * value_binding list); + value_description: mapper -> value_description -> value_description; + with_constraint: mapper -> with_constraint -> with_constraint; + } + + +val default: mapper diff --git a/res_syntax/compiler-libs-406/tbl.ml b/res_syntax/compiler-libs-406/tbl.ml new file mode 100644 index 0000000000..fa278b43bb --- /dev/null +++ b/res_syntax/compiler-libs-406/tbl.ml @@ -0,0 +1,123 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type ('k, 'v) t = + Empty + | Node of ('k, 'v) t * 'k * 'v * ('k, 'v) t * int + +let empty = Empty + +let height = function + Empty -> 0 + | Node(_,_,_,_,h) -> h + +let create l x d r = + let hl = height l and hr = height r in + Node(l, x, d, r, (if hl >= hr then hl + 1 else hr + 1)) + +let bal l x d r = + let hl = height l and hr = height r in + if hl > hr + 1 then + match l with + | Node (ll, lv, ld, lr, _) when height ll >= height lr -> + create ll lv ld (create lr x d r) + | Node (ll, lv, ld, Node (lrl, lrv, lrd, lrr, _), _) -> + create (create ll lv ld lrl) lrv lrd (create lrr x d r) + | _ -> assert false + else if hr > hl + 1 then + match r with + | Node (rl, rv, rd, rr, _) when height rr >= height rl -> + create (create l x d rl) rv rd rr + | Node (Node (rll, rlv, rld, rlr, _), rv, rd, rr, _) -> + create (create l x d rll) rlv rld (create rlr rv rd rr) + | _ -> assert false + else + create l x d r + +let rec add x data = function + Empty -> + Node(Empty, x, data, Empty, 1) + | Node(l, v, d, r, h) -> + let c = compare x v in + if c = 0 then + Node(l, x, data, r, h) + else if c < 0 then + bal (add x data l) v d r + else + bal l v d (add x data r) + +let rec find x = function + Empty -> + raise Not_found + | Node(l, v, d, r, _) -> + let c = compare x v in + if c = 0 then d + else find x (if c < 0 then l else r) + +let rec find_str (x : string) = function + Empty -> + raise Not_found + | Node(l, v, d, r, _) -> + let c = compare x v in + if c = 0 then d + else find_str x (if c < 0 then l else r) + +let rec mem x = function + Empty -> false + | Node(l, v, _d, r, _) -> + let c = compare x v in + c = 0 || mem x (if c < 0 then l else r) + +let rec merge t1 t2 = + match (t1, t2) with + (Empty, t) -> t + | (t, Empty) -> t + | (Node(l1, v1, d1, r1, _h1), Node(l2, v2, d2, r2, _h2)) -> + bal l1 v1 d1 (bal (merge r1 l2) v2 d2 r2) + +let rec remove x = function + Empty -> + Empty + | Node(l, v, d, r, _h) -> + let c = compare x v in + if c = 0 then + merge l r + else if c < 0 then + bal (remove x l) v d r + else + bal l v d (remove x r) + +let rec iter f = function + Empty -> () + | Node(l, v, d, r, _) -> + iter f l; f v d; iter f r + +let rec map f = function + Empty -> Empty + | Node(l, v, d, r, h) -> Node(map f l, v, f v d, map f r, h) + +let rec fold f m accu = + match m with + | Empty -> accu + | Node(l, v, d, r, _) -> + fold f r (f v d (fold f l accu)) + +open Format + +let print print_key print_data ppf tbl = + let print_tbl ppf tbl = + iter (fun k d -> fprintf ppf "@[<2>%a ->@ %a;@]@ " print_key k print_data d) + tbl in + fprintf ppf "@[[[%a]]@]" print_tbl tbl diff --git a/res_syntax/compiler-libs-406/tbl.mli b/res_syntax/compiler-libs-406/tbl.mli new file mode 100644 index 0000000000..d23b959c72 --- /dev/null +++ b/res_syntax/compiler-libs-406/tbl.mli @@ -0,0 +1,34 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Association tables from any ordered type to any type. + We use the generic ordering to compare keys. *) + +type ('k, 'v) t + +val empty: ('k, 'v) t +val add: 'k -> 'v -> ('k, 'v) t -> ('k, 'v) t +val find: 'k -> ('k, 'v) t -> 'v +val find_str: string -> (string, 'v) t -> 'v +val mem: 'k -> ('k, 'v) t -> bool +val remove: 'k -> ('k, 'v) t -> ('k, 'v) t +val iter: ('k -> 'v -> unit) -> ('k, 'v) t -> unit +val map: ('k -> 'v1 -> 'v2) -> ('k, 'v1) t -> ('k, 'v2) t +val fold: ('k -> 'v -> 'acc -> 'acc) -> ('k, 'v) t -> 'acc -> 'acc + +open Format + +val print: (formatter -> 'k -> unit) -> (formatter -> 'v -> unit) -> + formatter -> ('k, 'v) t -> unit diff --git a/res_syntax/compiler-libs-406/typeclass.ml b/res_syntax/compiler-libs-406/typeclass.ml new file mode 100644 index 0000000000..371d2ad7d2 --- /dev/null +++ b/res_syntax/compiler-libs-406/typeclass.ml @@ -0,0 +1,1988 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Parsetree +open Asttypes +open Path +open Types +open Typecore +open Typetexp +open Format + +type 'a class_info = { + cls_id : Ident.t; + cls_id_loc : string loc; + cls_decl : class_declaration; + cls_ty_id : Ident.t; + cls_ty_decl : class_type_declaration; + cls_obj_id : Ident.t; + cls_obj_abbr : type_declaration; + cls_typesharp_id : Ident.t; + cls_abbr : type_declaration; + cls_arity : int; + cls_pub_methods : string list; + cls_info : 'a; +} + +type class_type_info = { + clsty_ty_id : Ident.t; + clsty_id_loc : string loc; + clsty_ty_decl : class_type_declaration; + clsty_obj_id : Ident.t; + clsty_obj_abbr : type_declaration; + clsty_typesharp_id : Ident.t; + clsty_abbr : type_declaration; + clsty_info : Typedtree.class_type_declaration; +} + +type error = + Unconsistent_constraint of (type_expr * type_expr) list + | Field_type_mismatch of string * string * (type_expr * type_expr) list + | Structure_expected of class_type + | Cannot_apply of class_type + | Apply_wrong_label of arg_label + | Pattern_type_clash of type_expr + | Repeated_parameter + | Unbound_class_2 of Longident.t + | Unbound_class_type_2 of Longident.t + | Abbrev_type_clash of type_expr * type_expr * type_expr + | Constructor_type_mismatch of string * (type_expr * type_expr) list + | Virtual_class of bool * bool * string list * string list + | Parameter_arity_mismatch of Longident.t * int * int + | Parameter_mismatch of (type_expr * type_expr) list + | Bad_parameters of Ident.t * type_expr * type_expr + | Class_match_failure of Ctype.class_match_failure list + | Unbound_val of string + | Unbound_type_var of (formatter -> unit) * Ctype.closed_class_failure + | Make_nongen_seltype of type_expr + | Non_generalizable_class of Ident.t * Types.class_declaration + | Cannot_coerce_self of type_expr + | Non_collapsable_conjunction of + Ident.t * Types.class_declaration * (type_expr * type_expr) list + | Final_self_clash of (type_expr * type_expr) list + | Mutability_mismatch of string * mutable_flag + | No_overriding of string * string + | Duplicate of string * string + +exception Error of Location.t * Env.t * error +exception Error_forward of Location.error + +open Typedtree + +let ctyp desc typ env loc = + { ctyp_desc = desc; ctyp_type = typ; ctyp_loc = loc; ctyp_env = env; + ctyp_attributes = [] } + + (**********************) + (* Useful constants *) + (**********************) + + +(* + Self type have a dummy private method, thus preventing it to become + closed. +*) +let dummy_method = Btype.dummy_method + +(* + Path associated to the temporary class type of a class being typed + (its constructor is not available). +*) +let unbound_class = Path.Pident (Ident.create "*undef*") + + + (************************************) + (* Some operations on class types *) + (************************************) + + +(* Fully expand the head of a class type *) +let rec scrape_class_type = + function + Cty_constr (_, _, cty) -> scrape_class_type cty + | cty -> cty + +(* Generalize a class type *) +let rec generalize_class_type gen = + function + Cty_constr (_, params, cty) -> + List.iter gen params; + generalize_class_type gen cty + | Cty_signature {csig_self = sty; csig_vars = vars; csig_inher = inher} -> + gen sty; + Vars.iter (fun _ (_, _, ty) -> gen ty) vars; + List.iter (fun (_,tl) -> List.iter gen tl) inher + | Cty_arrow (_, ty, cty) -> + gen ty; + generalize_class_type gen cty + +let generalize_class_type vars = + let gen = if vars then Ctype.generalize else Ctype.generalize_structure in + generalize_class_type gen + +(* Return the virtual methods of a class type *) +let virtual_methods sign = + let (fields, _) = + Ctype.flatten_fields (Ctype.object_fields sign.Types.csig_self) + in + List.fold_left + (fun virt (lab, _, _) -> + if lab = dummy_method then virt else + if Concr.mem lab sign.csig_concr then virt else + lab::virt) + [] fields + +(* Return the constructor type associated to a class type *) +let rec constructor_type constr cty = + match cty with + Cty_constr (_, _, cty) -> + constructor_type constr cty + | Cty_signature _ -> + constr + | Cty_arrow (l, ty, cty) -> + Ctype.newty (Tarrow (l, ty, constructor_type constr cty, Cok)) + +let rec class_body cty = + match cty with + Cty_constr _ -> + cty (* Only class bodies can be abbreviated *) + | Cty_signature _ -> + cty + | Cty_arrow (_, _, cty) -> + class_body cty + +let extract_constraints cty = + let sign = Ctype.signature_of_class_type cty in + (Vars.fold (fun lab _ vars -> lab :: vars) sign.csig_vars [], + begin let (fields, _) = + Ctype.flatten_fields (Ctype.object_fields sign.csig_self) + in + List.fold_left + (fun meths (lab, _, _) -> + if lab = dummy_method then meths else lab::meths) + [] fields + end, + sign.csig_concr) + +let rec abbreviate_class_type path params cty = + match cty with + Cty_constr (_, _, _) | Cty_signature _ -> + Cty_constr (path, params, cty) + | Cty_arrow (l, ty, cty) -> + Cty_arrow (l, ty, abbreviate_class_type path params cty) + +(* Check that all type variables are generalizable *) +(* Use Env.empty to prevent expansion of recursively defined object types; + cf. typing-poly/poly.ml *) +let rec closed_class_type = + function + Cty_constr (_, params, _) -> + List.for_all (Ctype.closed_schema Env.empty) params + | Cty_signature sign -> + Ctype.closed_schema Env.empty sign.csig_self + && + Vars.fold (fun _ (_, _, ty) cc -> Ctype.closed_schema Env.empty ty && cc) + sign.csig_vars + true + | Cty_arrow (_, ty, cty) -> + Ctype.closed_schema Env.empty ty + && + closed_class_type cty + +let closed_class cty = + List.for_all (Ctype.closed_schema Env.empty) cty.cty_params + && + closed_class_type cty.cty_type + +let rec limited_generalize rv = + function + Cty_constr (_path, params, cty) -> + List.iter (Ctype.limited_generalize rv) params; + limited_generalize rv cty + | Cty_signature sign -> + Ctype.limited_generalize rv sign.csig_self; + Vars.iter (fun _ (_, _, ty) -> Ctype.limited_generalize rv ty) + sign.csig_vars; + List.iter (fun (_, tl) -> List.iter (Ctype.limited_generalize rv) tl) + sign.csig_inher + | Cty_arrow (_, ty, cty) -> + Ctype.limited_generalize rv ty; + limited_generalize rv cty + +(* Record a class type *) +let rc node = + Cmt_format.add_saved_type (Cmt_format.Partial_class_expr node); + Stypes.record (Stypes.Ti_class node); (* moved to genannot *) + node + + + (***********************************) + (* Primitives for typing classes *) + (***********************************) + + +(* Enter a value in the method environment only *) +let enter_met_env ?check loc lab kind ty val_env met_env par_env = + let (id, val_env) = + Env.enter_value lab {val_type = ty; val_kind = Val_unbound; + val_attributes = []; + Types.val_loc = loc} val_env + in + (id, val_env, + Env.add_value ?check id {val_type = ty; val_kind = kind; + val_attributes = []; + Types.val_loc = loc} met_env, + Env.add_value id {val_type = ty; val_kind = Val_unbound; + val_attributes = []; + Types.val_loc = loc} par_env) + +(* Enter an instance variable in the environment *) +let enter_val cl_num vars inh lab mut virt ty val_env met_env par_env loc = + let instance = Ctype.instance val_env in + let (id, virt) = + try + let (id, mut', virt', ty') = Vars.find lab !vars in + if mut' <> mut then + raise (Error(loc, val_env, Mutability_mismatch(lab, mut))); + Ctype.unify val_env (instance ty) (instance ty'); + (if not inh then Some id else None), + (if virt' = Concrete then virt' else virt) + with + Ctype.Unify tr -> + raise (Error(loc, val_env, + Field_type_mismatch("instance variable", lab, tr))) + | Not_found -> None, virt + in + let (id, _, _, _) as result = + match id with Some id -> (id, val_env, met_env, par_env) + | None -> + enter_met_env Location.none lab (Val_ivar (mut, cl_num)) + ty val_env met_env par_env + in + vars := Vars.add lab (id, mut, virt, ty) !vars; + result + +let concr_vals vars = + Vars.fold + (fun id (_, vf, _) s -> if vf = Virtual then s else Concr.add id s) + vars Concr.empty + +let inheritance self_type env ovf concr_meths warn_vals loc parent = + match scrape_class_type parent with + Cty_signature cl_sig -> + + (* Methods *) + begin try + Ctype.unify env self_type cl_sig.csig_self + with Ctype.Unify trace -> + match trace with + _::_::_::({desc = Tfield(n, _, _, _)}, _)::rem -> + raise(Error(loc, env, Field_type_mismatch ("method", n, rem))) + | _ -> + assert false + end; + + (* Overriding *) + let over_meths = Concr.inter cl_sig.csig_concr concr_meths in + let concr_vals = concr_vals cl_sig.csig_vars in + let over_vals = Concr.inter concr_vals warn_vals in + begin match ovf with + Some Fresh -> + let cname = + match parent with + Cty_constr (p, _, _) -> Path.name p + | _ -> "inherited" + in + if not (Concr.is_empty over_meths) then + Location.prerr_warning loc + (Warnings.Method_override (cname :: Concr.elements over_meths)); + if not (Concr.is_empty over_vals) then + Location.prerr_warning loc + (Warnings.Instance_variable_override + (cname :: Concr.elements over_vals)); + | Some Override + when Concr.is_empty over_meths && Concr.is_empty over_vals -> + raise (Error(loc, env, No_overriding ("",""))) + | _ -> () + end; + + let concr_meths = Concr.union cl_sig.csig_concr concr_meths + and warn_vals = Concr.union concr_vals warn_vals in + + (cl_sig, concr_meths, warn_vals) + + | _ -> + raise(Error(loc, env, Structure_expected parent)) + +let virtual_method val_env meths self_type lab priv sty loc = + let (_, ty') = + Ctype.filter_self_method val_env lab priv meths self_type + in + let sty = Ast_helper.Typ.force_poly sty in + let cty = transl_simple_type val_env false sty in + let ty = cty.ctyp_type in + begin + try Ctype.unify val_env ty ty' with Ctype.Unify trace -> + raise(Error(loc, val_env, Field_type_mismatch ("method", lab, trace))); + end; + cty + +let delayed_meth_specs = ref [] + +let declare_method val_env meths self_type lab priv sty loc = + let (_, ty') = + Ctype.filter_self_method val_env lab priv meths self_type + in + let unif ty = + try Ctype.unify val_env ty ty' with Ctype.Unify trace -> + raise(Error(loc, val_env, Field_type_mismatch ("method", lab, trace))) + in + let sty = Ast_helper.Typ.force_poly sty in + match sty.ptyp_desc, priv with + Ptyp_poly ([],sty'), Public -> +(* TODO: we moved the [transl_simple_type_univars] outside of the lazy, +so that we can get an immediate value. Is that correct ? Ask Jacques. *) + let returned_cty = ctyp Ttyp_any (Ctype.newty Tnil) val_env loc in + delayed_meth_specs := + Warnings.mk_lazy (fun () -> + let cty = transl_simple_type_univars val_env sty' in + let ty = cty.ctyp_type in + unif ty; + returned_cty.ctyp_desc <- Ttyp_poly ([], cty); + returned_cty.ctyp_type <- ty; + ) :: + !delayed_meth_specs; + returned_cty + | _ -> + let cty = transl_simple_type val_env false sty in + let ty = cty.ctyp_type in + unif ty; + cty + +let type_constraint val_env sty sty' loc = + let cty = transl_simple_type val_env false sty in + let ty = cty.ctyp_type in + let cty' = transl_simple_type val_env false sty' in + let ty' = cty'.ctyp_type in + begin + try Ctype.unify val_env ty ty' with Ctype.Unify trace -> + raise(Error(loc, val_env, Unconsistent_constraint trace)); + end; + (cty, cty') + +let make_method loc cl_num expr = + let open Ast_helper in + let mkid s = mkloc s loc in + Exp.fun_ ~loc:expr.pexp_loc Nolabel None + (Pat.alias ~loc (Pat.var ~loc (mkid "self-*")) (mkid ("self-" ^ cl_num))) + expr + +(*******************************) + +let add_val lab (mut, virt, ty) val_sig = + let virt = + try + let (_mut', virt', _ty') = Vars.find lab val_sig in + if virt' = Concrete then virt' else virt + with Not_found -> virt + in + Vars.add lab (mut, virt, ty) val_sig + +let rec class_type_field env self_type meths arg ctf = + Builtin_attributes.warning_scope ctf.pctf_attributes + (fun () -> class_type_field_aux env self_type meths arg ctf) + +and class_type_field_aux env self_type meths + (fields, val_sig, concr_meths, inher) ctf = + + let loc = ctf.pctf_loc in + let mkctf desc = + { ctf_desc = desc; ctf_loc = loc; ctf_attributes = ctf.pctf_attributes } + in + match ctf.pctf_desc with + Pctf_inherit sparent -> + let parent = class_type env sparent in + let inher = + match parent.cltyp_type with + Cty_constr (p, tl, _) -> (p, tl) :: inher + | _ -> inher + in + let (cl_sig, concr_meths, _) = + inheritance self_type env None concr_meths Concr.empty sparent.pcty_loc + parent.cltyp_type + in + let val_sig = + Vars.fold add_val cl_sig.csig_vars val_sig in + (mkctf (Tctf_inherit parent) :: fields, + val_sig, concr_meths, inher) + + | Pctf_val ({txt=lab}, mut, virt, sty) -> + let cty = transl_simple_type env false sty in + let ty = cty.ctyp_type in + (mkctf (Tctf_val (lab, mut, virt, cty)) :: fields, + add_val lab (mut, virt, ty) val_sig, concr_meths, inher) + + | Pctf_method ({txt=lab}, priv, virt, sty) -> + let cty = + declare_method env meths self_type lab priv sty ctf.pctf_loc in + let concr_meths = + match virt with + | Concrete -> Concr.add lab concr_meths + | Virtual -> concr_meths + in + (mkctf (Tctf_method (lab, priv, virt, cty)) :: fields, + val_sig, concr_meths, inher) + + | Pctf_constraint (sty, sty') -> + let (cty, cty') = type_constraint env sty sty' ctf.pctf_loc in + (mkctf (Tctf_constraint (cty, cty')) :: fields, + val_sig, concr_meths, inher) + + | Pctf_attribute x -> + Builtin_attributes.warning_attribute x; + (mkctf (Tctf_attribute x) :: fields, + val_sig, concr_meths, inher) + + | Pctf_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + +and class_signature env {pcsig_self=sty; pcsig_fields=sign} = + let meths = ref Meths.empty in + let self_cty = transl_simple_type env false sty in + let self_cty = { self_cty with + ctyp_type = Ctype.expand_head env self_cty.ctyp_type } in + let self_type = self_cty.ctyp_type in + + (* Check that the binder is a correct type, and introduce a dummy + method preventing self type from being closed. *) + let dummy_obj = Ctype.newvar () in + Ctype.unify env (Ctype.filter_method env dummy_method Private dummy_obj) + (Ctype.newty (Ttuple [])); + begin try + Ctype.unify env self_type dummy_obj + with Ctype.Unify _ -> + raise(Error(sty.ptyp_loc, env, Pattern_type_clash self_type)) + end; + + (* Class type fields *) + let (rev_fields, val_sig, concr_meths, inher) = + Builtin_attributes.warning_scope [] + (fun () -> + List.fold_left (class_type_field env self_type meths) + ([], Vars.empty, Concr.empty, []) + sign + ) + in + let cty = {csig_self = self_type; + csig_vars = val_sig; + csig_concr = concr_meths; + csig_inher = inher} + in + { csig_self = self_cty; + csig_fields = List.rev rev_fields; + csig_type = cty; + } + +and class_type env scty = + Builtin_attributes.warning_scope scty.pcty_attributes + (fun () -> class_type_aux env scty) + +and class_type_aux env scty = + let cltyp desc typ = + { + cltyp_desc = desc; + cltyp_type = typ; + cltyp_loc = scty.pcty_loc; + cltyp_env = env; + cltyp_attributes = scty.pcty_attributes; + } + in + match scty.pcty_desc with + Pcty_constr (lid, styl) -> + let (path, decl) = Typetexp.find_class_type env scty.pcty_loc lid.txt in + if Path.same decl.clty_path unbound_class then + raise(Error(scty.pcty_loc, env, Unbound_class_type_2 lid.txt)); + let (params, clty) = + Ctype.instance_class decl.clty_params decl.clty_type + in + if List.length params <> List.length styl then + raise(Error(scty.pcty_loc, env, + Parameter_arity_mismatch (lid.txt, List.length params, + List.length styl))); + let ctys = List.map2 + (fun sty ty -> + let cty' = transl_simple_type env false sty in + let ty' = cty'.ctyp_type in + begin + try Ctype.unify env ty' ty with Ctype.Unify trace -> + raise(Error(sty.ptyp_loc, env, Parameter_mismatch trace)) + end; + cty' + ) styl params + in + let typ = Cty_constr (path, params, clty) in + cltyp (Tcty_constr ( path, lid , ctys)) typ + + | Pcty_signature pcsig -> + let clsig = class_signature env pcsig in + let typ = Cty_signature clsig.csig_type in + cltyp (Tcty_signature clsig) typ + + | Pcty_arrow (l, sty, scty) -> + let cty = transl_simple_type env false sty in + let ty = cty.ctyp_type in + let ty = + if Btype.is_optional l + then Ctype.newty (Tconstr(Predef.path_option,[ty], ref Mnil)) + else ty in + let clty = class_type env scty in + let typ = Cty_arrow (l, ty, clty.cltyp_type) in + cltyp (Tcty_arrow (l, cty, clty)) typ + + | Pcty_open (ovf, lid, e) -> + let (path, newenv) = !Typecore.type_open ovf env scty.pcty_loc lid in + let clty = class_type newenv e in + cltyp (Tcty_open (ovf, path, lid, newenv, clty)) clty.cltyp_type + + | Pcty_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + +let class_type env scty = + delayed_meth_specs := []; + let cty = class_type env scty in + List.iter Lazy.force (List.rev !delayed_meth_specs); + delayed_meth_specs := []; + cty + +(*******************************) + +let rec class_field self_loc cl_num self_type meths vars arg cf = + Builtin_attributes.warning_scope cf.pcf_attributes + (fun () -> class_field_aux self_loc cl_num self_type meths vars arg cf) + +and class_field_aux self_loc cl_num self_type meths vars + (val_env, met_env, par_env, fields, concr_meths, warn_vals, inher, + local_meths, local_vals) cf = + let loc = cf.pcf_loc in + let mkcf desc = + { cf_desc = desc; cf_loc = loc; cf_attributes = cf.pcf_attributes } + in + match cf.pcf_desc with + Pcf_inherit (ovf, sparent, super) -> + let parent = class_expr cl_num val_env par_env sparent in + let inher = + match parent.cl_type with + Cty_constr (p, tl, _) -> (p, tl) :: inher + | _ -> inher + in + let (cl_sig, concr_meths, warn_vals) = + inheritance self_type val_env (Some ovf) concr_meths warn_vals + sparent.pcl_loc parent.cl_type + in + (* Variables *) + let (val_env, met_env, par_env, inh_vars) = + Vars.fold + (fun lab info (val_env, met_env, par_env, inh_vars) -> + let mut, vr, ty = info in + let (id, val_env, met_env, par_env) = + enter_val cl_num vars true lab mut vr ty val_env met_env par_env + sparent.pcl_loc + in + (val_env, met_env, par_env, (lab, id) :: inh_vars)) + cl_sig.csig_vars (val_env, met_env, par_env, []) + in + (* Inherited concrete methods *) + let inh_meths = + Concr.fold (fun lab rem -> (lab, Ident.create lab)::rem) + cl_sig.csig_concr [] + in + (* Super *) + let (val_env, met_env, par_env,super) = + match super with + None -> + (val_env, met_env, par_env,None) + | Some {txt=name} -> + let (_id, val_env, met_env, par_env) = + enter_met_env ~check:(fun s -> Warnings.Unused_ancestor s) + sparent.pcl_loc name (Val_anc (inh_meths, cl_num)) self_type + val_env met_env par_env + in + (val_env, met_env, par_env,Some name) + in + (val_env, met_env, par_env, + lazy (mkcf (Tcf_inherit (ovf, parent, super, inh_vars, inh_meths))) + :: fields, + concr_meths, warn_vals, inher, local_meths, local_vals) + + | Pcf_val (lab, mut, Cfk_virtual styp) -> + if !Clflags.principal then Ctype.begin_def (); + let cty = Typetexp.transl_simple_type val_env false styp in + let ty = cty.ctyp_type in + if !Clflags.principal then begin + Ctype.end_def (); + Ctype.generalize_structure ty + end; + let (id, val_env, met_env', par_env) = + enter_val cl_num vars false lab.txt mut Virtual ty + val_env met_env par_env loc + in + (val_env, met_env', par_env, + lazy (mkcf (Tcf_val (lab, mut, id, Tcfk_virtual cty, + met_env == met_env'))) + :: fields, + concr_meths, warn_vals, inher, local_meths, local_vals) + + | Pcf_val (lab, mut, Cfk_concrete (ovf, sexp)) -> + if Concr.mem lab.txt local_vals then + raise(Error(loc, val_env, Duplicate ("instance variable", lab.txt))); + if Concr.mem lab.txt warn_vals then begin + if ovf = Fresh then + Location.prerr_warning lab.loc + (Warnings.Instance_variable_override[lab.txt]) + end else begin + if ovf = Override then + raise(Error(loc, val_env, + No_overriding ("instance variable", lab.txt))) + end; + if !Clflags.principal then Ctype.begin_def (); + let exp = + try type_exp val_env sexp with Ctype.Unify [(ty, _)] -> + raise(Error(loc, val_env, Make_nongen_seltype ty)) + in + if !Clflags.principal then begin + Ctype.end_def (); + Ctype.generalize_structure exp.exp_type + end; + let (id, val_env, met_env', par_env) = + enter_val cl_num vars false lab.txt mut Concrete exp.exp_type + val_env met_env par_env loc + in + (val_env, met_env', par_env, + lazy (mkcf (Tcf_val (lab, mut, id, + Tcfk_concrete (ovf, exp), met_env == met_env'))) + :: fields, + concr_meths, Concr.add lab.txt warn_vals, inher, local_meths, + Concr.add lab.txt local_vals) + + | Pcf_method (lab, priv, Cfk_virtual sty) -> + let cty = virtual_method val_env meths self_type lab.txt priv sty loc in + (val_env, met_env, par_env, + lazy (mkcf(Tcf_method (lab, priv, Tcfk_virtual cty))) + ::fields, + concr_meths, warn_vals, inher, local_meths, local_vals) + + | Pcf_method (lab, priv, Cfk_concrete (ovf, expr)) -> + let expr = + match expr.pexp_desc with + | Pexp_poly _ -> expr + | _ -> Ast_helper.Exp.poly ~loc:expr.pexp_loc expr None + in + if Concr.mem lab.txt local_meths then + raise(Error(loc, val_env, Duplicate ("method", lab.txt))); + if Concr.mem lab.txt concr_meths then begin + if ovf = Fresh then + Location.prerr_warning loc (Warnings.Method_override [lab.txt]) + end else begin + if ovf = Override then + raise(Error(loc, val_env, No_overriding("method", lab.txt))) + end; + let (_, ty) = + Ctype.filter_self_method val_env lab.txt priv meths self_type + in + begin try match expr.pexp_desc with + Pexp_poly (sbody, sty) -> + begin match sty with None -> () + | Some sty -> + let sty = Ast_helper.Typ.force_poly sty in + let cty' = Typetexp.transl_simple_type val_env false sty in + let ty' = cty'.ctyp_type in + Ctype.unify val_env ty' ty + end; + begin match (Ctype.repr ty).desc with + Tvar _ -> + let ty' = Ctype.newvar () in + Ctype.unify val_env (Ctype.newty (Tpoly (ty', []))) ty; + Ctype.unify val_env (type_approx val_env sbody) ty' + | Tpoly (ty1, tl) -> + let _, ty1' = Ctype.instance_poly false tl ty1 in + let ty2 = type_approx val_env sbody in + Ctype.unify val_env ty2 ty1' + | _ -> assert false + end + | _ -> assert false + with Ctype.Unify trace -> + raise(Error(loc, val_env, + Field_type_mismatch ("method", lab.txt, trace))) + end; + let meth_expr = make_method self_loc cl_num expr in + (* backup variables for Pexp_override *) + let vars_local = !vars in + + let field = + Warnings.mk_lazy + (fun () -> + (* Read the generalized type *) + let (_, ty) = Meths.find lab.txt !meths in + let meth_type = + Btype.newgenty (Tarrow(Nolabel, self_type, ty, Cok)) in + Ctype.raise_nongen_level (); + vars := vars_local; + let texp = type_expect met_env meth_expr meth_type in + Ctype.end_def (); + mkcf (Tcf_method (lab, priv, Tcfk_concrete (ovf, texp))) + ) + in + (val_env, met_env, par_env, field::fields, + Concr.add lab.txt concr_meths, warn_vals, inher, + Concr.add lab.txt local_meths, local_vals) + + | Pcf_constraint (sty, sty') -> + let (cty, cty') = type_constraint val_env sty sty' loc in + (val_env, met_env, par_env, + lazy (mkcf (Tcf_constraint (cty, cty'))) :: fields, + concr_meths, warn_vals, inher, local_meths, local_vals) + + | Pcf_initializer expr -> + let expr = make_method self_loc cl_num expr in + let vars_local = !vars in + let field = + lazy begin + Ctype.raise_nongen_level (); + let meth_type = + Ctype.newty + (Tarrow (Nolabel, self_type, + Ctype.instance_def Predef.type_unit, Cok)) in + vars := vars_local; + let texp = type_expect met_env expr meth_type in + Ctype.end_def (); + mkcf (Tcf_initializer texp) + end in + (val_env, met_env, par_env, field::fields, concr_meths, warn_vals, + inher, local_meths, local_vals) + | Pcf_attribute x -> + Builtin_attributes.warning_attribute x; + (val_env, met_env, par_env, + lazy (mkcf (Tcf_attribute x)) :: fields, + concr_meths, warn_vals, inher, local_meths, local_vals) + | Pcf_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + +and class_structure cl_num final val_env met_env loc + { pcstr_self = spat; pcstr_fields = str } = + (* Environment for substructures *) + let par_env = met_env in + + (* Location of self. Used for locations of self arguments *) + let self_loc = {spat.ppat_loc with Location.loc_ghost = true} in + + (* Self type, with a dummy method preventing it from being closed/escaped. *) + let self_type = Ctype.newvar () in + Ctype.unify val_env + (Ctype.filter_method val_env dummy_method Private self_type) + (Ctype.newty (Ttuple [])); + + (* Private self is used for private method calls *) + let private_self = if final then Ctype.newvar () else self_type in + + (* Self binder *) + let (pat, meths, vars, val_env, meth_env, par_env) = + type_self_pattern cl_num private_self val_env met_env par_env spat + in + let public_self = pat.pat_type in + + (* Check that the binder has a correct type *) + let ty = + if final then Ctype.newty (Tobject (Ctype.newvar(), ref None)) + else self_type in + begin try Ctype.unify val_env public_self ty with + Ctype.Unify _ -> + raise(Error(spat.ppat_loc, val_env, Pattern_type_clash public_self)) + end; + let get_methods ty = + (fst (Ctype.flatten_fields + (Ctype.object_fields (Ctype.expand_head val_env ty)))) in + if final then begin + (* Copy known information to still empty self_type *) + List.iter + (fun (lab,kind,ty) -> + let k = + if Btype.field_kind_repr kind = Fpresent then Public else Private in + try Ctype.unify val_env ty + (Ctype.filter_method val_env lab k self_type) + with _ -> assert false) + (get_methods public_self) + end; + + (* Typing of class fields *) + let (_, _, _, fields, concr_meths, _, inher, _local_meths, _local_vals) = + Builtin_attributes.warning_scope [] + (fun () -> + List.fold_left (class_field self_loc cl_num self_type meths vars) + (val_env, meth_env, par_env, [], Concr.empty, Concr.empty, [], + Concr.empty, Concr.empty) + str + ) + in + Ctype.unify val_env self_type (Ctype.newvar ()); + let sign = + {csig_self = public_self; + csig_vars = Vars.map (fun (_id, mut, vr, ty) -> (mut, vr, ty)) !vars; + csig_concr = concr_meths; + csig_inher = inher} in + let methods = get_methods self_type in + let priv_meths = + List.filter (fun (_,kind,_) -> Btype.field_kind_repr kind <> Fpresent) + methods in + if final then begin + (* Unify private_self and a copy of self_type. self_type will not + be modified after this point *) + Ctype.close_object self_type; + let mets = virtual_methods {sign with csig_self = self_type} in + let vals = + Vars.fold + (fun name (_mut, vr, _ty) l -> if vr = Virtual then name :: l else l) + sign.csig_vars [] in + if mets <> [] || vals <> [] then + raise(Error(loc, val_env, Virtual_class(true, final, mets, vals))); + let self_methods = + List.fold_right + (fun (lab,kind,ty) rem -> + if lab = dummy_method then + (* allow public self and private self to be unified *) + match Btype.field_kind_repr kind with + Fvar r -> Btype.set_kind r Fabsent; rem + | _ -> rem + else + Ctype.newty(Tfield(lab, Btype.copy_kind kind, ty, rem))) + methods (Ctype.newty Tnil) in + begin try + Ctype.unify val_env private_self + (Ctype.newty (Tobject(self_methods, ref None))); + Ctype.unify val_env public_self self_type + with Ctype.Unify trace -> raise(Error(loc, val_env, Final_self_clash trace)) + end; + end; + + (* Typing of method bodies *) + (* if !Clflags.principal then *) begin + let ms = !meths in + (* Generalize the spine of methods accessed through self *) + Meths.iter (fun _ (_,ty) -> Ctype.generalize_spine ty) ms; + meths := + Meths.map (fun (id,ty) -> (id, Ctype.generic_instance val_env ty)) ms; + (* But keep levels correct on the type of self *) + Meths.iter (fun _ (_,ty) -> Ctype.unify val_env ty (Ctype.newvar ())) ms + end; + let fields = List.map Lazy.force (List.rev fields) in + let meths = Meths.map (function (id, _ty) -> id) !meths in + + (* Check for private methods made public *) + let pub_meths' = + List.filter (fun (_,kind,_) -> Btype.field_kind_repr kind = Fpresent) + (get_methods public_self) in + let names = List.map (fun (x,_,_) -> x) in + let l1 = names priv_meths and l2 = names pub_meths' in + let added = List.filter (fun x -> List.mem x l1) l2 in + if added <> [] then + Location.prerr_warning loc (Warnings.Implicit_public_methods added); + let sign = if final then sign else + {sign with Types.csig_self = Ctype.expand_head val_env public_self} in + { + cstr_self = pat; + cstr_fields = fields; + cstr_type = sign; + cstr_meths = meths}, sign (* redondant, since already in cstr_type *) + +and class_expr cl_num val_env met_env scl = + Builtin_attributes.warning_scope scl.pcl_attributes + (fun () -> class_expr_aux cl_num val_env met_env scl) + +and class_expr_aux cl_num val_env met_env scl = + match scl.pcl_desc with + Pcl_constr (lid, styl) -> + let (path, decl) = Typetexp.find_class val_env scl.pcl_loc lid.txt in + if Path.same decl.cty_path unbound_class then + raise(Error(scl.pcl_loc, val_env, Unbound_class_2 lid.txt)); + let tyl = List.map + (fun sty -> transl_simple_type val_env false sty) + styl + in + let (params, clty) = + Ctype.instance_class decl.cty_params decl.cty_type + in + let clty' = abbreviate_class_type path params clty in + if List.length params <> List.length tyl then + raise(Error(scl.pcl_loc, val_env, + Parameter_arity_mismatch (lid.txt, List.length params, + List.length tyl))); + List.iter2 + (fun cty' ty -> + let ty' = cty'.ctyp_type in + try Ctype.unify val_env ty' ty with Ctype.Unify trace -> + raise(Error(cty'.ctyp_loc, val_env, Parameter_mismatch trace))) + tyl params; + let cl = + rc {cl_desc = Tcl_ident (path, lid, tyl); + cl_loc = scl.pcl_loc; + cl_type = clty'; + cl_env = val_env; + cl_attributes = scl.pcl_attributes; + } + in + let (vals, meths, concrs) = extract_constraints clty in + rc {cl_desc = Tcl_constraint (cl, None, vals, meths, concrs); + cl_loc = scl.pcl_loc; + cl_type = clty'; + cl_env = val_env; + cl_attributes = []; (* attributes are kept on the inner cl node *) + } + | Pcl_structure cl_str -> + let (desc, ty) = + class_structure cl_num false val_env met_env scl.pcl_loc cl_str in + rc {cl_desc = Tcl_structure desc; + cl_loc = scl.pcl_loc; + cl_type = Cty_signature ty; + cl_env = val_env; + cl_attributes = scl.pcl_attributes; + } + | Pcl_fun (l, Some default, spat, sbody) -> + let loc = default.pexp_loc in + let open Ast_helper in + let scases = [ + Exp.case + (Pat.construct ~loc + (mknoloc (Longident.(Ldot (Lident "*predef*", "Some")))) + (Some (Pat.var ~loc (mknoloc "*sth*")))) + (Exp.ident ~loc (mknoloc (Longident.Lident "*sth*"))); + + Exp.case + (Pat.construct ~loc + (mknoloc (Longident.(Ldot (Lident "*predef*", "None")))) + None) + default; + ] + in + let smatch = + Exp.match_ ~loc (Exp.ident ~loc (mknoloc (Longident.Lident "*opt*"))) + scases + in + let sfun = + Cl.fun_ ~loc:scl.pcl_loc + l None + (Pat.var ~loc (mknoloc "*opt*")) + (Cl.let_ ~loc:scl.pcl_loc Nonrecursive [Vb.mk spat smatch] sbody) + (* Note: we don't put the '#default' attribute, as it + is not detected for class-level let bindings. See #5975.*) + in + class_expr cl_num val_env met_env sfun + | Pcl_fun (l, None, spat, scl') -> + if !Clflags.principal then Ctype.begin_def (); + let (pat, pv, val_env', met_env) = + Typecore.type_class_arg_pattern cl_num val_env met_env l spat + in + if !Clflags.principal then begin + Ctype.end_def (); + iter_pattern (fun {pat_type=ty} -> Ctype.generalize_structure ty) pat + end; + let pv = + List.map + begin fun (id, id_loc, id', _ty) -> + let path = Pident id' in + (* do not mark the value as being used *) + let vd = Env.find_value path val_env' in + (id, id_loc, + {exp_desc = + Texp_ident(path, mknoloc (Longident.Lident (Ident.name id)), vd); + exp_loc = Location.none; exp_extra = []; + exp_type = Ctype.instance val_env' vd.val_type; + exp_attributes = []; (* check *) + exp_env = val_env'}) + end + pv + in + let not_function = function + Cty_arrow _ -> false + | _ -> true + in + let partial = + let dummy = type_exp val_env (Ast_helper.Exp.unreachable ()) in + Typecore.check_partial val_env pat.pat_type pat.pat_loc + [{c_lhs = pat; c_guard = None; c_rhs = dummy}] + in + Ctype.raise_nongen_level (); + let cl = class_expr cl_num val_env' met_env scl' in + Ctype.end_def (); + if Btype.is_optional l && not_function cl.cl_type then + Location.prerr_warning pat.pat_loc + Warnings.Unerasable_optional_argument; + rc {cl_desc = Tcl_fun (l, pat, pv, cl, partial); + cl_loc = scl.pcl_loc; + cl_type = Cty_arrow + (l, Ctype.instance_def pat.pat_type, cl.cl_type); + cl_env = val_env; + cl_attributes = scl.pcl_attributes; + } + | Pcl_apply (scl', sargs) -> + assert (sargs <> []); + if !Clflags.principal then Ctype.begin_def (); + let cl = class_expr cl_num val_env met_env scl' in + if !Clflags.principal then begin + Ctype.end_def (); + generalize_class_type false cl.cl_type; + end; + let rec nonopt_labels ls ty_fun = + match ty_fun with + | Cty_arrow (l, _, ty_res) -> + if Btype.is_optional l then nonopt_labels ls ty_res + else nonopt_labels (l::ls) ty_res + | _ -> ls + in + let ignore_labels = + !Clflags.classic || + let labels = nonopt_labels [] cl.cl_type in + List.length labels = List.length sargs && + List.for_all (fun (l,_) -> l = Nolabel) sargs && + List.exists (fun l -> l <> Nolabel) labels && + begin + Location.prerr_warning + cl.cl_loc + (Warnings.Labels_omitted + (List.map Printtyp.string_of_label + (List.filter ((<>) Nolabel) labels))); + true + end + in + let rec type_args args omitted ty_fun ty_fun0 sargs more_sargs = + match ty_fun, ty_fun0 with + | Cty_arrow (l, ty, ty_fun), Cty_arrow (_, ty0, ty_fun0) + when sargs <> [] || more_sargs <> [] -> + let name = Btype.label_name l + and optional = Btype.is_optional l in + let sargs, more_sargs, arg = + if ignore_labels && not (Btype.is_optional l) then begin + match sargs, more_sargs with + (l', sarg0)::_, _ -> + raise(Error(sarg0.pexp_loc, val_env, Apply_wrong_label l')) + | _, (l', sarg0)::more_sargs -> + if l <> l' && l' <> Nolabel then + raise(Error(sarg0.pexp_loc, val_env, + Apply_wrong_label l')) + else ([], more_sargs, + Some (type_argument val_env sarg0 ty ty0)) + | _ -> + assert false + end else try + let (l', sarg0, sargs, more_sargs) = + try + let (l', sarg0, sargs1, sargs2) = + Btype.extract_label name sargs + in (l', sarg0, sargs1 @ sargs2, more_sargs) + with Not_found -> + let (l', sarg0, sargs1, sargs2) = + Btype.extract_label name more_sargs + in (l', sarg0, sargs @ sargs1, sargs2) + in + if not optional && Btype.is_optional l' then + Location.prerr_warning sarg0.pexp_loc + (Warnings.Nonoptional_label (Printtyp.string_of_label l)); + sargs, more_sargs, + if not optional || Btype.is_optional l' then + Some (type_argument val_env sarg0 ty ty0) + else + let ty' = extract_option_type val_env ty + and ty0' = extract_option_type val_env ty0 in + let arg = type_argument val_env sarg0 ty' ty0' in + Some (option_some arg) + with Not_found -> + sargs, more_sargs, + if Btype.is_optional l + && (List.mem_assoc Nolabel sargs + || List.mem_assoc Nolabel more_sargs) + then + Some (option_none ty0 Location.none) + else None + in + let omitted = if arg = None then (l,ty0) :: omitted else omitted in + type_args ((l,arg)::args) omitted ty_fun ty_fun0 + sargs more_sargs + | _ -> + match sargs @ more_sargs with + (l, sarg0)::_ -> + if omitted <> [] then + raise(Error(sarg0.pexp_loc, val_env, Apply_wrong_label l)) + else + raise(Error(cl.cl_loc, val_env, Cannot_apply cl.cl_type)) + | [] -> + (List.rev args, + List.fold_left + (fun ty_fun (l,ty) -> Cty_arrow(l,ty,ty_fun)) + ty_fun0 omitted) + in + let (args, cty) = + let (_, ty_fun0) = Ctype.instance_class [] cl.cl_type in + if ignore_labels then + type_args [] [] cl.cl_type ty_fun0 [] sargs + else + type_args [] [] cl.cl_type ty_fun0 sargs [] + in + rc {cl_desc = Tcl_apply (cl, args); + cl_loc = scl.pcl_loc; + cl_type = cty; + cl_env = val_env; + cl_attributes = scl.pcl_attributes; + } + | Pcl_let (rec_flag, sdefs, scl') -> + let (defs, val_env) = + try + Typecore.type_let val_env rec_flag sdefs None + with Ctype.Unify [(ty, _)] -> + raise(Error(scl.pcl_loc, val_env, Make_nongen_seltype ty)) + in + let (vals, met_env) = + List.fold_right + (fun (id, id_loc) (vals, met_env) -> + let path = Pident id in + (* do not mark the value as used *) + let vd = Env.find_value path val_env in + Ctype.begin_def (); + let expr = + {exp_desc = + Texp_ident(path, mknoloc(Longident.Lident (Ident.name id)),vd); + exp_loc = Location.none; exp_extra = []; + exp_type = Ctype.instance val_env vd.val_type; + exp_attributes = []; + exp_env = val_env; + } + in + Ctype.end_def (); + Ctype.generalize expr.exp_type; + let desc = + {val_type = expr.exp_type; val_kind = Val_ivar (Immutable, + cl_num); + val_attributes = []; + Types.val_loc = vd.Types.val_loc; + } + in + let id' = Ident.create (Ident.name id) in + ((id', id_loc, expr) + :: vals, + Env.add_value id' desc met_env)) + (let_bound_idents_with_loc defs) + ([], met_env) + in + let cl = class_expr cl_num val_env met_env scl' in + let () = if rec_flag = Recursive then + check_recursive_bindings val_env defs + in + rc {cl_desc = Tcl_let (rec_flag, defs, vals, cl); + cl_loc = scl.pcl_loc; + cl_type = cl.cl_type; + cl_env = val_env; + cl_attributes = scl.pcl_attributes; + } + | Pcl_constraint (scl', scty) -> + Ctype.begin_class_def (); + let context = Typetexp.narrow () in + let cl = class_expr cl_num val_env met_env scl' in + Typetexp.widen context; + let context = Typetexp.narrow () in + let clty = class_type val_env scty in + Typetexp.widen context; + Ctype.end_def (); + + limited_generalize (Ctype.row_variable (Ctype.self_type cl.cl_type)) + cl.cl_type; + limited_generalize (Ctype.row_variable (Ctype.self_type clty.cltyp_type)) + clty.cltyp_type; + + begin match + Includeclass.class_types val_env cl.cl_type clty.cltyp_type + with + [] -> () + | error -> raise(Error(cl.cl_loc, val_env, Class_match_failure error)) + end; + let (vals, meths, concrs) = extract_constraints clty.cltyp_type in + rc {cl_desc = Tcl_constraint (cl, Some clty, vals, meths, concrs); + cl_loc = scl.pcl_loc; + cl_type = snd (Ctype.instance_class [] clty.cltyp_type); + cl_env = val_env; + cl_attributes = scl.pcl_attributes; + } + | Pcl_open (ovf, lid, e) -> + let used_slot = ref false in + let (path, new_val_env) = !Typecore.type_open ~used_slot ovf val_env scl.pcl_loc lid in + let (_path, new_met_env) = !Typecore.type_open ~used_slot ovf met_env scl.pcl_loc lid in + let cl = class_expr cl_num new_val_env new_met_env e in + rc {cl_desc = Tcl_open (ovf, path, lid, new_val_env, cl); + cl_loc = scl.pcl_loc; + cl_type = cl.cl_type; + cl_env = val_env; + cl_attributes = scl.pcl_attributes; + } + | Pcl_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + +(*******************************) + +(* Approximate the type of the constructor to allow recursive use *) +(* of optional parameters *) + +let var_option = Predef.type_option (Btype.newgenvar ()) + +let rec approx_declaration cl = + match cl.pcl_desc with + Pcl_fun (l, _, _, cl) -> + let arg = + if Btype.is_optional l then Ctype.instance_def var_option + else Ctype.newvar () in + Ctype.newty (Tarrow (l, arg, approx_declaration cl, Cok)) + | Pcl_let (_, _, cl) -> + approx_declaration cl + | Pcl_constraint (cl, _) -> + approx_declaration cl + | _ -> Ctype.newvar () + +let rec approx_description ct = + match ct.pcty_desc with + Pcty_arrow (l, _, ct) -> + let arg = + if Btype.is_optional l then Ctype.instance_def var_option + else Ctype.newvar () in + Ctype.newty (Tarrow (l, arg, approx_description ct, Cok)) + | _ -> Ctype.newvar () + +(*******************************) + +let temp_abbrev loc env id arity = + let params = ref [] in + for _i = 1 to arity do + params := Ctype.newvar () :: !params + done; + let ty = Ctype.newobj (Ctype.newvar ()) in + let env = + Env.add_type ~check:true id + {type_params = !params; + type_arity = arity; + type_kind = Type_abstract; + type_private = Public; + type_manifest = Some ty; + type_variance = Misc.replicate_list Variance.full arity; + type_newtype_level = None; + type_loc = loc; + type_attributes = []; (* or keep attrs from the class decl? *) + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } + env + in + (!params, ty, env) + +let initial_env define_class approx + (res, env) (cl, id, ty_id, obj_id, cl_id) = + (* Temporary abbreviations *) + let arity = List.length cl.pci_params in + let (obj_params, obj_ty, env) = temp_abbrev cl.pci_loc env obj_id arity in + let (cl_params, cl_ty, env) = temp_abbrev cl.pci_loc env cl_id arity in + + (* Temporary type for the class constructor *) + let constr_type = approx cl.pci_expr in + if !Clflags.principal then Ctype.generalize_spine constr_type; + let dummy_cty = + Cty_signature + { csig_self = Ctype.newvar (); + csig_vars = Vars.empty; + csig_concr = Concr.empty; + csig_inher = [] } + in + let dummy_class = + {Types.cty_params = []; (* Dummy value *) + cty_variance = []; + cty_type = dummy_cty; (* Dummy value *) + cty_path = unbound_class; + cty_new = + begin match cl.pci_virt with + | Virtual -> None + | Concrete -> Some constr_type + end; + cty_loc = Location.none; + cty_attributes = []; + } + in + let env = + Env.add_cltype ty_id + {clty_params = []; (* Dummy value *) + clty_variance = []; + clty_type = dummy_cty; (* Dummy value *) + clty_path = unbound_class; + clty_loc = Location.none; + clty_attributes = []; + } + ( + if define_class then + Env.add_class id dummy_class env + else + env + ) + in + ((cl, id, ty_id, + obj_id, obj_params, obj_ty, + cl_id, cl_params, cl_ty, + constr_type, dummy_class)::res, + env) + +let class_infos define_class kind + (cl, id, ty_id, + obj_id, obj_params, obj_ty, + cl_id, cl_params, cl_ty, + constr_type, dummy_class) + (res, env) = + + reset_type_variables (); + Ctype.begin_class_def (); + + (* Introduce class parameters *) + let ci_params = + let make_param (sty, v) = + try + (transl_type_param env sty, v) + with Already_bound -> + raise(Error(sty.ptyp_loc, env, Repeated_parameter)) + in + List.map make_param cl.pci_params + in + let params = List.map (fun (cty, _) -> cty.ctyp_type) ci_params in + + (* Allow self coercions (only for class declarations) *) + let coercion_locs = ref [] in + + (* Type the class expression *) + let (expr, typ) = + try + Typecore.self_coercion := + (Path.Pident obj_id, coercion_locs) :: !Typecore.self_coercion; + let res = kind env cl.pci_expr in + Typecore.self_coercion := List.tl !Typecore.self_coercion; + res + with exn -> + Typecore.self_coercion := []; raise exn + in + + Ctype.end_def (); + + let sty = Ctype.self_type typ in + + (* First generalize the type of the dummy method (cf PR#6123) *) + let (fields, _) = Ctype.flatten_fields (Ctype.object_fields sty) in + List.iter (fun (met, _, ty) -> if met = dummy_method then Ctype.generalize ty) + fields; + (* Generalize the row variable *) + let rv = Ctype.row_variable sty in + List.iter (Ctype.limited_generalize rv) params; + limited_generalize rv typ; + + (* Check the abbreviation for the object type *) + let (obj_params', obj_type) = Ctype.instance_class params typ in + let constr = Ctype.newconstr (Path.Pident obj_id) obj_params in + begin + let ty = Ctype.self_type obj_type in + Ctype.hide_private_methods ty; + Ctype.close_object ty; + begin try + List.iter2 (Ctype.unify env) obj_params obj_params' + with Ctype.Unify _ -> + raise(Error(cl.pci_loc, env, + Bad_parameters (obj_id, constr, + Ctype.newconstr (Path.Pident obj_id) + obj_params'))) + end; + begin try + Ctype.unify env ty constr + with Ctype.Unify _ -> + raise(Error(cl.pci_loc, env, + Abbrev_type_clash (constr, ty, Ctype.expand_head env constr))) + end + end; + + (* Check the other temporary abbreviation (#-type) *) + begin + let (cl_params', cl_type) = Ctype.instance_class params typ in + let ty = Ctype.self_type cl_type in + Ctype.hide_private_methods ty; + Ctype.set_object_name obj_id (Ctype.row_variable ty) cl_params ty; + begin try + List.iter2 (Ctype.unify env) cl_params cl_params' + with Ctype.Unify _ -> + raise(Error(cl.pci_loc, env, + Bad_parameters (cl_id, + Ctype.newconstr (Path.Pident cl_id) + cl_params, + Ctype.newconstr (Path.Pident cl_id) + cl_params'))) + end; + begin try + Ctype.unify env ty cl_ty + with Ctype.Unify _ -> + let constr = Ctype.newconstr (Path.Pident cl_id) params in + raise(Error(cl.pci_loc, env, Abbrev_type_clash (constr, ty, cl_ty))) + end + end; + + (* Type of the class constructor *) + begin try + Ctype.unify env + (constructor_type constr obj_type) + (Ctype.instance env constr_type) + with Ctype.Unify trace -> + raise(Error(cl.pci_loc, env, + Constructor_type_mismatch (cl.pci_name.txt, trace))) + end; + + (* Class and class type temporary definitions *) + let cty_variance = List.map (fun _ -> Variance.full) params in + let cltydef = + {clty_params = params; clty_type = class_body typ; + clty_variance = cty_variance; + clty_path = Path.Pident obj_id; + clty_loc = cl.pci_loc; + clty_attributes = cl.pci_attributes; + } + and clty = + {cty_params = params; cty_type = typ; + cty_variance = cty_variance; + cty_path = Path.Pident obj_id; + cty_new = + begin match cl.pci_virt with + | Virtual -> None + | Concrete -> Some constr_type + end; + cty_loc = cl.pci_loc; + cty_attributes = cl.pci_attributes; + } + in + dummy_class.cty_type <- typ; + let env = + Env.add_cltype ty_id cltydef ( + if define_class then Env.add_class id clty env else env) + in + + if cl.pci_virt = Concrete then begin + let sign = Ctype.signature_of_class_type typ in + let mets = virtual_methods sign in + let vals = + Vars.fold + (fun name (_mut, vr, _ty) l -> if vr = Virtual then name :: l else l) + sign.csig_vars [] in + if mets <> [] || vals <> [] then + raise(Error(cl.pci_loc, env, Virtual_class(define_class, false, mets, + vals))); + end; + + (* Misc. *) + let arity = Ctype.class_type_arity typ in + let pub_meths = + let (fields, _) = + Ctype.flatten_fields (Ctype.object_fields (Ctype.expand_head env obj_ty)) + in + List.map (function (lab, _, _) -> lab) fields + in + + (* Final definitions *) + let (params', typ') = Ctype.instance_class params typ in + let cltydef = + {clty_params = params'; clty_type = class_body typ'; + clty_variance = cty_variance; + clty_path = Path.Pident obj_id; + clty_loc = cl.pci_loc; + clty_attributes = cl.pci_attributes; + } + and clty = + {cty_params = params'; cty_type = typ'; + cty_variance = cty_variance; + cty_path = Path.Pident obj_id; + cty_new = + begin match cl.pci_virt with + | Virtual -> None + | Concrete -> Some (Ctype.instance env constr_type) + end; + cty_loc = cl.pci_loc; + cty_attributes = cl.pci_attributes; + } + in + let obj_abbr = + {type_params = obj_params; + type_arity = List.length obj_params; + type_kind = Type_abstract; + type_private = Public; + type_manifest = Some obj_ty; + type_variance = List.map (fun _ -> Variance.full) obj_params; + type_newtype_level = None; + type_loc = cl.pci_loc; + type_attributes = []; (* or keep attrs from cl? *) + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } + in + let (cl_params, cl_ty) = + Ctype.instance_parameterized_type params (Ctype.self_type typ) + in + Ctype.hide_private_methods cl_ty; + Ctype.set_object_name obj_id (Ctype.row_variable cl_ty) cl_params cl_ty; + let cl_abbr = + {type_params = cl_params; + type_arity = List.length cl_params; + type_kind = Type_abstract; + type_private = Public; + type_manifest = Some cl_ty; + type_variance = List.map (fun _ -> Variance.full) cl_params; + type_newtype_level = None; + type_loc = cl.pci_loc; + type_attributes = []; (* or keep attrs from cl? *) + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } + in + ((cl, id, clty, ty_id, cltydef, obj_id, obj_abbr, cl_id, cl_abbr, ci_params, + arity, pub_meths, List.rev !coercion_locs, expr) :: res, + env) + +let final_decl env define_class + (cl, id, clty, ty_id, cltydef, obj_id, obj_abbr, cl_id, cl_abbr, ci_params, + arity, pub_meths, coe, expr) = + + begin try Ctype.collapse_conj_params env clty.cty_params + with Ctype.Unify trace -> + raise(Error(cl.pci_loc, env, Non_collapsable_conjunction (id, clty, trace))) + end; + + List.iter Ctype.generalize clty.cty_params; + generalize_class_type true clty.cty_type; + Misc.may Ctype.generalize clty.cty_new; + List.iter Ctype.generalize obj_abbr.type_params; + Misc.may Ctype.generalize obj_abbr.type_manifest; + List.iter Ctype.generalize cl_abbr.type_params; + Misc.may Ctype.generalize cl_abbr.type_manifest; + + if not (closed_class clty) then + raise(Error(cl.pci_loc, env, Non_generalizable_class (id, clty))); + + begin match + Ctype.closed_class clty.cty_params + (Ctype.signature_of_class_type clty.cty_type) + with + None -> () + | Some reason -> + let printer = + if define_class + then function ppf -> Printtyp.class_declaration id ppf clty + else function ppf -> Printtyp.cltype_declaration id ppf cltydef + in + raise(Error(cl.pci_loc, env, Unbound_type_var(printer, reason))) + end; + + (id, cl.pci_name, clty, ty_id, cltydef, obj_id, obj_abbr, cl_id, cl_abbr, + arity, pub_meths, coe, expr, + { ci_loc = cl.pci_loc; + ci_virt = cl.pci_virt; + ci_params = ci_params; +(* TODO : check that we have the correct use of identifiers *) + ci_id_name = cl.pci_name; + ci_id_class = id; + ci_id_class_type = ty_id; + ci_id_object = obj_id; + ci_id_typehash = cl_id; + ci_expr = expr; + ci_decl = clty; + ci_type_decl = cltydef; + ci_attributes = cl.pci_attributes; + }) +(* (cl.pci_variance, cl.pci_loc)) *) + +let class_infos define_class kind + (cl, id, ty_id, + obj_id, obj_params, obj_ty, + cl_id, cl_params, cl_ty, + constr_type, dummy_class) + (res, env) = + Builtin_attributes.warning_scope cl.pci_attributes + (fun () -> + class_infos define_class kind + (cl, id, ty_id, + obj_id, obj_params, obj_ty, + cl_id, cl_params, cl_ty, + constr_type, dummy_class) + (res, env) + ) + +let extract_type_decls + (_id, _id_loc, clty, _ty_id, cltydef, obj_id, obj_abbr, _cl_id, cl_abbr, + _arity, _pub_meths, _coe, _expr, required) decls = + (obj_id, obj_abbr, cl_abbr, clty, cltydef, required) :: decls + +let merge_type_decls + (id, id_loc, _clty, ty_id, _cltydef, obj_id, _obj_abbr, cl_id, _cl_abbr, + arity, pub_meths, coe, expr, req) (obj_abbr, cl_abbr, clty, cltydef) = + (id, id_loc, clty, ty_id, cltydef, obj_id, obj_abbr, cl_id, cl_abbr, + arity, pub_meths, coe, expr, req) + +let final_env define_class env + (id, _id_loc, clty, ty_id, cltydef, obj_id, obj_abbr, cl_id, cl_abbr, + _arity, _pub_meths, _coe, _expr, _req) = + (* Add definitions after cleaning them *) + Env.add_type ~check:true obj_id + (Subst.type_declaration Subst.identity obj_abbr) ( + Env.add_type ~check:true cl_id + (Subst.type_declaration Subst.identity cl_abbr) ( + Env.add_cltype ty_id (Subst.cltype_declaration Subst.identity cltydef) ( + if define_class then + Env.add_class id (Subst.class_declaration Subst.identity clty) env + else env))) + +(* Check that #c is coercible to c if there is a self-coercion *) +let check_coercions env + (id, id_loc, clty, ty_id, cltydef, obj_id, obj_abbr, cl_id, cl_abbr, + arity, pub_meths, coercion_locs, _expr, req) = + begin match coercion_locs with [] -> () + | loc :: _ -> + let cl_ty, obj_ty = + match cl_abbr.type_manifest, obj_abbr.type_manifest with + Some cl_ab, Some obj_ab -> + let cl_params, cl_ty = + Ctype.instance_parameterized_type cl_abbr.type_params cl_ab + and obj_params, obj_ty = + Ctype.instance_parameterized_type obj_abbr.type_params obj_ab + in + List.iter2 (Ctype.unify env) cl_params obj_params; + cl_ty, obj_ty + | _ -> assert false + in + begin try Ctype.subtype env cl_ty obj_ty () + with Ctype.Subtype (tr1, tr2) -> + raise(Typecore.Error(loc, env, Typecore.Not_subtype(tr1, tr2))) + end; + if not (Ctype.opened_object cl_ty) then + raise(Error(loc, env, Cannot_coerce_self obj_ty)) + end; + {cls_id = id; + cls_id_loc = id_loc; + cls_decl = clty; + cls_ty_id = ty_id; + cls_ty_decl = cltydef; + cls_obj_id = obj_id; + cls_obj_abbr = obj_abbr; + cls_typesharp_id = cl_id; + cls_abbr = cl_abbr; + cls_arity = arity; + cls_pub_methods = pub_meths; + cls_info=req} + +(*******************************) + +let type_classes define_class approx kind env cls = + let cls = + List.map + (function cl -> + (cl, + Ident.create cl.pci_name.txt, Ident.create cl.pci_name.txt, + Ident.create cl.pci_name.txt, Ident.create ("#" ^ cl.pci_name.txt))) + cls + in + Ctype.init_def (Ident.current_time ()); + Ctype.begin_class_def (); + let (res, env) = + List.fold_left (initial_env define_class approx) ([], env) cls + in + let (res, env) = + List.fold_right (class_infos define_class kind) res ([], env) + in + Ctype.end_def (); + let res = List.rev_map (final_decl env define_class) res in + let decls = List.fold_right extract_type_decls res [] in + let decls = Typedecl.compute_variance_decls env decls in + let res = List.map2 merge_type_decls res decls in + let env = List.fold_left (final_env define_class) env res in + let res = List.map (check_coercions env) res in + (res, env) + +let class_num = ref 0 +let class_declaration env sexpr = + incr class_num; + let expr = class_expr (string_of_int !class_num) env env sexpr in + (expr, expr.cl_type) + +let class_description env sexpr = + let expr = class_type env sexpr in + (expr, expr.cltyp_type) + +let class_declarations env cls = + let info, env = + type_classes true approx_declaration class_declaration env cls + in + let ids, exprs = + List.split + (List.map + (fun ci -> ci.cls_id, ci.cls_info.ci_expr) + info) + in + check_recursive_class_bindings env ids exprs; + info, env + +let class_descriptions env cls = + type_classes true approx_description class_description env cls + +let class_type_declarations env cls = + let (decls, env) = + type_classes false approx_description class_description env cls + in + (List.map + (fun decl -> + {clsty_ty_id = decl.cls_ty_id; + clsty_id_loc = decl.cls_id_loc; + clsty_ty_decl = decl.cls_ty_decl; + clsty_obj_id = decl.cls_obj_id; + clsty_obj_abbr = decl.cls_obj_abbr; + clsty_typesharp_id = decl.cls_typesharp_id; + clsty_abbr = decl.cls_abbr; + clsty_info = decl.cls_info}) + decls, + env) + +let rec unify_parents env ty cl = + match cl.cl_desc with + Tcl_ident (p, _, _) -> + begin try + let decl = Env.find_class p env in + let _, body = Ctype.find_cltype_for_path env decl.cty_path in + Ctype.unify env ty (Ctype.instance env body) + with + Not_found -> () + | _exn -> assert false + end + | Tcl_structure st -> unify_parents_struct env ty st + | Tcl_open (_, _, _, _, cl) + | Tcl_fun (_, _, _, cl, _) + | Tcl_apply (cl, _) + | Tcl_let (_, _, _, cl) + | Tcl_constraint (cl, _, _, _, _) -> unify_parents env ty cl +and unify_parents_struct env ty st = + List.iter + (function + | {cf_desc = Tcf_inherit (_, cl, _, _, _)} -> + unify_parents env ty cl + | _ -> ()) + st.cstr_fields + +let type_object env loc s = + incr class_num; + let (desc, sign) = + class_structure (string_of_int !class_num) true env env loc s in + let sty = Ctype.expand_head env sign.csig_self in + Ctype.hide_private_methods sty; + let (fields, _) = Ctype.flatten_fields (Ctype.object_fields sty) in + let meths = List.map (fun (s,_,_) -> s) fields in + unify_parents_struct env sign.csig_self desc; + (desc, sign, meths) + +let () = + Typecore.type_object := type_object + +(*******************************) + +(* Approximate the class declaration as class ['params] id = object end *) +let approx_class sdecl = + let open Ast_helper in + let self' = Typ.any () in + let clty' = Cty.signature ~loc:sdecl.pci_expr.pcty_loc (Csig.mk self' []) in + { sdecl with pci_expr = clty' } + +let approx_class_declarations env sdecls = + fst (class_type_declarations env (List.map approx_class sdecls)) + +(*******************************) + +(* Error report *) + +open Format + +let report_error env ppf = function + | Repeated_parameter -> + fprintf ppf "A type parameter occurs several times" + | Unconsistent_constraint trace -> + fprintf ppf "The class constraints are not consistent.@."; + Printtyp.report_unification_error ppf env trace + (fun ppf -> fprintf ppf "Type") + (fun ppf -> fprintf ppf "is not compatible with type") + | Field_type_mismatch (k, m, trace) -> + Printtyp.report_unification_error ppf env trace + (function ppf -> + fprintf ppf "The %s %s@ has type" k m) + (function ppf -> + fprintf ppf "but is expected to have type") + | Structure_expected clty -> + fprintf ppf + "@[This class expression is not a class structure; it has type@ %a@]" + Printtyp.class_type clty + | Cannot_apply _ -> + fprintf ppf + "This class expression is not a class function, it cannot be applied" + | Apply_wrong_label l -> + let mark_label = function + | Nolabel -> "out label" + | l -> sprintf " label %s" (Btype.prefixed_label_name l) in + fprintf ppf "This argument cannot be applied with%s" (mark_label l) + | Pattern_type_clash ty -> + (* XXX Trace *) + (* XXX Revoir message d'erreur | Improve error message *) + Printtyp.reset_and_mark_loops ty; + fprintf ppf "@[%s@ %a@]" + "This pattern cannot match self: it only matches values of type" + Printtyp.type_expr ty + | Unbound_class_2 cl -> + fprintf ppf "@[The class@ %a@ is not yet completely defined@]" + Printtyp.longident cl + | Unbound_class_type_2 cl -> + fprintf ppf "@[The class type@ %a@ is not yet completely defined@]" + Printtyp.longident cl + | Abbrev_type_clash (abbrev, actual, expected) -> + (* XXX Afficher une trace ? | Print a trace? *) + Printtyp.reset_and_mark_loops_list [abbrev; actual; expected]; + fprintf ppf "@[The abbreviation@ %a@ expands to type@ %a@ \ + but is used with type@ %a@]" + Printtyp.type_expr abbrev + Printtyp.type_expr actual + Printtyp.type_expr expected + | Constructor_type_mismatch (c, trace) -> + Printtyp.report_unification_error ppf env trace + (function ppf -> + fprintf ppf "The expression \"new %s\" has type" c) + (function ppf -> + fprintf ppf "but is used with type") + | Virtual_class (cl, imm, mets, vals) -> + let print_mets ppf mets = + List.iter (function met -> fprintf ppf "@ %s" met) mets in + let missings = + match mets, vals with + [], _ -> "variables" + | _, [] -> "methods" + | _ -> "methods and variables" + in + let print_msg ppf = + if imm then fprintf ppf "This object has virtual %s" missings + else if cl then fprintf ppf "This class should be virtual" + else fprintf ppf "This class type should be virtual" + in + fprintf ppf + "@[%t.@ @[<2>The following %s are undefined :%a@]@]" + print_msg missings print_mets (mets @ vals) + | Parameter_arity_mismatch(lid, expected, provided) -> + fprintf ppf + "@[The class constructor %a@ expects %i type argument(s),@ \ + but is here applied to %i type argument(s)@]" + Printtyp.longident lid expected provided + | Parameter_mismatch trace -> + Printtyp.report_unification_error ppf env trace + (function ppf -> + fprintf ppf "The type parameter") + (function ppf -> + fprintf ppf "does not meet its constraint: it should be") + | Bad_parameters (id, params, cstrs) -> + Printtyp.reset_and_mark_loops_list [params; cstrs]; + fprintf ppf + "@[The abbreviation %a@ is used with parameters@ %a@ \ + which are incompatible with constraints@ %a@]" + Printtyp.ident id Printtyp.type_expr params Printtyp.type_expr cstrs + | Class_match_failure error -> + Includeclass.report_error ppf error + | Unbound_val lab -> + fprintf ppf "Unbound instance variable %s" lab + | Unbound_type_var (printer, reason) -> + let print_common ppf kind ty0 real lab ty = + let ty1 = + if real then ty0 else Btype.newgenty(Tobject(ty0, ref None)) in + List.iter Printtyp.mark_loops [ty; ty1]; + fprintf ppf + "The %s %s@ has type@;<1 2>%a@ where@ %a@ is unbound" + kind lab Printtyp.type_expr ty Printtyp.type_expr ty0 + in + let print_reason ppf = function + | Ctype.CC_Method (ty0, real, lab, ty) -> + print_common ppf "method" ty0 real lab ty + | Ctype.CC_Value (ty0, real, lab, ty) -> + print_common ppf "instance variable" ty0 real lab ty + in + Printtyp.reset (); + fprintf ppf + "@[@[Some type variables are unbound in this type:@;<1 2>%t@]@ \ + @[%a@]@]" + printer print_reason reason + | Make_nongen_seltype ty -> + fprintf ppf + "@[@[Self type should not occur in the non-generic type@;<1 2>\ + %a@]@,\ + It would escape the scope of its class@]" + Printtyp.type_scheme ty + | Non_generalizable_class (id, clty) -> + fprintf ppf + "@[The type of this class,@ %a,@ \ + contains type variables that cannot be generalized@]" + (Printtyp.class_declaration id) clty + | Cannot_coerce_self ty -> + fprintf ppf + "@[The type of self cannot be coerced to@ \ + the type of the current class:@ %a.@.\ + Some occurrences are contravariant@]" + Printtyp.type_scheme ty + | Non_collapsable_conjunction (id, clty, trace) -> + fprintf ppf + "@[The type of this class,@ %a,@ \ + contains non-collapsible conjunctive types in constraints@]" + (Printtyp.class_declaration id) clty; + Printtyp.report_unification_error ppf env trace + (fun ppf -> fprintf ppf "Type") + (fun ppf -> fprintf ppf "is not compatible with type") + | Final_self_clash trace -> + Printtyp.report_unification_error ppf env trace + (function ppf -> + fprintf ppf "This object is expected to have type") + (function ppf -> + fprintf ppf "but actually has type") + | Mutability_mismatch (_lab, mut) -> + let mut1, mut2 = + if mut = Immutable then "mutable", "immutable" + else "immutable", "mutable" in + fprintf ppf + "@[The instance variable is %s;@ it cannot be redefined as %s@]" + mut1 mut2 + | No_overriding (_, "") -> + fprintf ppf "@[This inheritance does not override any method@ %s@]" + "instance variable" + | No_overriding (kind, name) -> + fprintf ppf "@[The %s `%s'@ has no previous definition@]" kind name + | Duplicate (kind, name) -> + fprintf ppf "@[The %s `%s'@ has multiple definitions in this object@]" + kind name + +let report_error env ppf err = + Printtyp.wrap_printing_env env (fun () -> report_error env ppf err) + +let () = + Location.register_error_of_exn + (function + | Error (loc, env, err) -> + Some (Location.error_of_printer loc (report_error env) err) + | Error_forward err -> + Some err + | _ -> + None + ) diff --git a/res_syntax/compiler-libs-406/typeclass.mli b/res_syntax/compiler-libs-406/typeclass.mli new file mode 100644 index 0000000000..1735bf9e9a --- /dev/null +++ b/res_syntax/compiler-libs-406/typeclass.mli @@ -0,0 +1,124 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Asttypes +open Types +open Format + +type 'a class_info = { + cls_id : Ident.t; + cls_id_loc : string loc; + cls_decl : class_declaration; + cls_ty_id : Ident.t; + cls_ty_decl : class_type_declaration; + cls_obj_id : Ident.t; + cls_obj_abbr : type_declaration; + cls_typesharp_id : Ident.t; + cls_abbr : type_declaration; + cls_arity : int; + cls_pub_methods : string list; + cls_info : 'a; +} + +type class_type_info = { + clsty_ty_id : Ident.t; + clsty_id_loc : string loc; + clsty_ty_decl : class_type_declaration; + clsty_obj_id : Ident.t; + clsty_obj_abbr : type_declaration; + clsty_typesharp_id : Ident.t; + clsty_abbr : type_declaration; + clsty_info : Typedtree.class_type_declaration; +} + +val class_declarations: + Env.t -> Parsetree.class_declaration list -> + Typedtree.class_declaration class_info list * Env.t + +(* +and class_declaration = + (class_expr, Types.class_declaration) class_infos +*) + +val class_descriptions: + Env.t -> Parsetree.class_description list -> + Typedtree.class_description class_info list * Env.t + +(* +and class_description = + (class_type, unit) class_infos +*) + +val class_type_declarations: + Env.t -> Parsetree.class_description list -> class_type_info list * Env.t + +(* +and class_type_declaration = + (class_type, Types.class_type_declaration) class_infos +*) + +val approx_class_declarations: + Env.t -> Parsetree.class_description list -> class_type_info list + +val virtual_methods: Types.class_signature -> label list + +(* +val type_classes : + bool -> + ('a -> Types.type_expr) -> + (Env.t -> 'a -> 'b * Types.class_type) -> + Env.t -> + 'a Parsetree.class_infos list -> + ( Ident.t * Types.class_declaration * + Ident.t * Types.class_type_declaration * + Ident.t * Types.type_declaration * + Ident.t * Types.type_declaration * + int * string list * 'b * 'b Typedtree.class_infos) + list * Env.t +*) + +type error = + Unconsistent_constraint of (type_expr * type_expr) list + | Field_type_mismatch of string * string * (type_expr * type_expr) list + | Structure_expected of class_type + | Cannot_apply of class_type + | Apply_wrong_label of arg_label + | Pattern_type_clash of type_expr + | Repeated_parameter + | Unbound_class_2 of Longident.t + | Unbound_class_type_2 of Longident.t + | Abbrev_type_clash of type_expr * type_expr * type_expr + | Constructor_type_mismatch of string * (type_expr * type_expr) list + | Virtual_class of bool * bool * string list * string list + | Parameter_arity_mismatch of Longident.t * int * int + | Parameter_mismatch of (type_expr * type_expr) list + | Bad_parameters of Ident.t * type_expr * type_expr + | Class_match_failure of Ctype.class_match_failure list + | Unbound_val of string + | Unbound_type_var of (formatter -> unit) * Ctype.closed_class_failure + | Make_nongen_seltype of type_expr + | Non_generalizable_class of Ident.t * Types.class_declaration + | Cannot_coerce_self of type_expr + | Non_collapsable_conjunction of + Ident.t * Types.class_declaration * (type_expr * type_expr) list + | Final_self_clash of (type_expr * type_expr) list + | Mutability_mismatch of string * mutable_flag + | No_overriding of string * string + | Duplicate of string * string + +exception Error of Location.t * Env.t * error +exception Error_forward of Location.error + +val report_error : Env.t -> formatter -> error -> unit diff --git a/res_syntax/compiler-libs-406/typecore.ml b/res_syntax/compiler-libs-406/typecore.ml new file mode 100644 index 0000000000..d7a4b3f36a --- /dev/null +++ b/res_syntax/compiler-libs-406/typecore.ml @@ -0,0 +1,5154 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Typechecking for the core language *) + +open Misc +open Asttypes +open Parsetree +open Types +open Typedtree +open Btype +open Ctype + +type error = + Polymorphic_label of Longident.t + | Constructor_arity_mismatch of Longident.t * int * int + | Label_mismatch of Longident.t * (type_expr * type_expr) list + | Pattern_type_clash of (type_expr * type_expr) list + | Or_pattern_type_clash of Ident.t * (type_expr * type_expr) list + | Multiply_bound_variable of string + | Orpat_vars of Ident.t * Ident.t list + | Expr_type_clash of (type_expr * type_expr) list + | Apply_non_function of type_expr + | Apply_wrong_label of arg_label * type_expr + | Label_multiply_defined of string + | Label_missing of Ident.t list + | Label_not_mutable of Longident.t + | Wrong_name of string * type_expr * string * Path.t * string * string list + | Name_type_mismatch of + string * Longident.t * (Path.t * Path.t) * (Path.t * Path.t) list + | Invalid_format of string + | Undefined_method of type_expr * string * string list option + | Undefined_inherited_method of string * string list + | Virtual_class of Longident.t + | Private_type of type_expr + | Private_label of Longident.t * type_expr + | Unbound_instance_variable of string * string list + | Instance_variable_not_mutable of bool * string + | Not_subtype of (type_expr * type_expr) list * (type_expr * type_expr) list + | Outside_class + | Value_multiply_overridden of string + | Coercion_failure of + type_expr * type_expr * (type_expr * type_expr) list * bool + | Too_many_arguments of bool * type_expr + | Abstract_wrong_label of arg_label * type_expr + | Scoping_let_module of string * type_expr + | Masked_instance_variable of Longident.t + | Not_a_variant_type of Longident.t + | Incoherent_label_order + | Less_general of string * (type_expr * type_expr) list + | Modules_not_allowed + | Cannot_infer_signature + | Not_a_packed_module of type_expr + | Recursive_local_constraint of (type_expr * type_expr) list + | Unexpected_existential + | Unqualified_gadt_pattern of Path.t * string + | Invalid_interval + | Invalid_for_loop_index + | No_value_clauses + | Exception_pattern_below_toplevel + | Inlined_record_escape + | Inlined_record_expected + | Unrefuted_pattern of pattern + | Invalid_extension_constructor_payload + | Not_an_extension_constructor + | Literal_overflow of string + | Unknown_literal of string * char + | Illegal_letrec_pat + | Illegal_letrec_expr + | Illegal_class_expr + +exception Error of Location.t * Env.t * error +exception Error_forward of Location.error + +(* Forward declaration, to be filled in by Typemod.type_module *) + +let type_module = + ref ((fun _env _md -> assert false) : + Env.t -> Parsetree.module_expr -> Typedtree.module_expr) + +(* Forward declaration, to be filled in by Typemod.type_open *) + +let type_open : + (?used_slot:bool ref -> override_flag -> Env.t -> Location.t -> + Longident.t loc -> Path.t * Env.t) + ref = + ref (fun ?used_slot:_ _ -> assert false) + +(* Forward declaration, to be filled in by Typemod.type_package *) + +let type_package = + ref (fun _ -> assert false) + +(* Forward declaration, to be filled in by Typeclass.class_structure *) +let type_object = + ref (fun _env _s -> assert false : + Env.t -> Location.t -> Parsetree.class_structure -> + Typedtree.class_structure * Types.class_signature * string list) + +(* + Saving and outputting type information. + We keep these function names short, because they have to be + called each time we create a record of type [Typedtree.expression] + or [Typedtree.pattern] that will end up in the typed AST. +*) +let re node = + Cmt_format.add_saved_type (Cmt_format.Partial_expression node); + Stypes.record (Stypes.Ti_expr node); + node +;; +let rp node = + Cmt_format.add_saved_type (Cmt_format.Partial_pattern node); + Stypes.record (Stypes.Ti_pat node); + node +;; + + +type recarg = + | Allowed + | Required + | Rejected + + +let case lhs rhs = + {c_lhs = lhs; c_guard = None; c_rhs = rhs} + +(* Upper approximation of free identifiers on the parse tree *) + +let iter_expression f e = + + let rec expr e = + f e; + match e.pexp_desc with + | Pexp_extension _ (* we don't iterate under extension point *) + | Pexp_ident _ + | Pexp_new _ + | Pexp_constant _ -> () + | Pexp_function pel -> List.iter case pel + | Pexp_fun (_, eo, _, e) -> may expr eo; expr e + | Pexp_apply (e, lel) -> expr e; List.iter (fun (_, e) -> expr e) lel + | Pexp_let (_, pel, e) -> expr e; List.iter binding pel + | Pexp_match (e, pel) + | Pexp_try (e, pel) -> expr e; List.iter case pel + | Pexp_array el + | Pexp_tuple el -> List.iter expr el + | Pexp_construct (_, eo) + | Pexp_variant (_, eo) -> may expr eo + | Pexp_record (iel, eo) -> + may expr eo; List.iter (fun (_, e) -> expr e) iel + | Pexp_open (_, _, e) + | Pexp_newtype (_, e) + | Pexp_poly (e, _) + | Pexp_lazy e + | Pexp_assert e + | Pexp_setinstvar (_, e) + | Pexp_send (e, _) + | Pexp_constraint (e, _) + | Pexp_coerce (e, _, _) + | Pexp_letexception (_, e) + | Pexp_field (e, _) -> expr e + | Pexp_while (e1, e2) + | Pexp_sequence (e1, e2) + | Pexp_setfield (e1, _, e2) -> expr e1; expr e2 + | Pexp_ifthenelse (e1, e2, eo) -> expr e1; expr e2; may expr eo + | Pexp_for (_, e1, e2, _, e3) -> expr e1; expr e2; expr e3 + | Pexp_override sel -> List.iter (fun (_, e) -> expr e) sel + | Pexp_letmodule (_, me, e) -> expr e; module_expr me + | Pexp_object { pcstr_fields = fs } -> List.iter class_field fs + | Pexp_pack me -> module_expr me + | Pexp_unreachable -> () + + and case {pc_lhs = _; pc_guard; pc_rhs} = + may expr pc_guard; + expr pc_rhs + + and binding x = + expr x.pvb_expr + + and module_expr me = + match me.pmod_desc with + | Pmod_extension _ + | Pmod_ident _ -> () + | Pmod_structure str -> List.iter structure_item str + | Pmod_constraint (me, _) + | Pmod_functor (_, _, me) -> module_expr me + | Pmod_apply (me1, me2) -> module_expr me1; module_expr me2 + | Pmod_unpack e -> expr e + + + and structure_item str = + match str.pstr_desc with + | Pstr_eval (e, _) -> expr e + | Pstr_value (_, pel) -> List.iter binding pel + | Pstr_primitive _ + | Pstr_type _ + | Pstr_typext _ + | Pstr_exception _ + | Pstr_modtype _ + | Pstr_open _ + | Pstr_class_type _ + | Pstr_attribute _ + | Pstr_extension _ -> () + | Pstr_include {pincl_mod = me} + | Pstr_module {pmb_expr = me} -> module_expr me + | Pstr_recmodule l -> List.iter (fun x -> module_expr x.pmb_expr) l + | Pstr_class cdl -> List.iter (fun c -> class_expr c.pci_expr) cdl + + and class_expr ce = + match ce.pcl_desc with + | Pcl_constr _ -> () + | Pcl_structure { pcstr_fields = fs } -> List.iter class_field fs + | Pcl_fun (_, eo, _, ce) -> may expr eo; class_expr ce + | Pcl_apply (ce, lel) -> + class_expr ce; List.iter (fun (_, e) -> expr e) lel + | Pcl_let (_, pel, ce) -> + List.iter binding pel; class_expr ce + | Pcl_open (_, _, ce) + | Pcl_constraint (ce, _) -> class_expr ce + | Pcl_extension _ -> () + + and class_field cf = + match cf.pcf_desc with + | Pcf_inherit (_, ce, _) -> class_expr ce + | Pcf_val (_, _, Cfk_virtual _) + | Pcf_method (_, _, Cfk_virtual _ ) | Pcf_constraint _ -> () + | Pcf_val (_, _, Cfk_concrete (_, e)) + | Pcf_method (_, _, Cfk_concrete (_, e)) -> expr e + | Pcf_initializer e -> expr e + | Pcf_attribute _ | Pcf_extension _ -> () + + in + expr e + + +let all_idents_cases el = + let idents = Hashtbl.create 8 in + let f = function + | {pexp_desc=Pexp_ident { txt = Longident.Lident id; _ }; _} -> + Hashtbl.replace idents id () + | _ -> () + in + List.iter + (fun cp -> + may (iter_expression f) cp.pc_guard; + iter_expression f cp.pc_rhs + ) + el; + Hashtbl.fold (fun x () rest -> x :: rest) idents [] + + +(* Typing of constants *) + +let type_constant = function + Const_int _ -> instance_def Predef.type_int + | Const_char _ -> instance_def Predef.type_char + | Const_string _ -> instance_def Predef.type_string + | Const_float _ -> instance_def Predef.type_float + | Const_int32 _ -> instance_def Predef.type_int32 + | Const_int64 _ -> instance_def Predef.type_int64 + | Const_nativeint _ -> instance_def Predef.type_nativeint + +let constant : Parsetree.constant -> (Asttypes.constant, error) result = + function + | Pconst_integer (i,None) -> + begin + try Ok (Const_int (Misc.Int_literal_converter.int i)) + with Failure _ -> Error (Literal_overflow "int") + end + | Pconst_integer (i,Some 'l') -> + begin + try Ok (Const_int32 (Misc.Int_literal_converter.int32 i)) + with Failure _ -> Error (Literal_overflow "int32") + end + | Pconst_integer (i,Some 'L') -> + begin + try Ok (Const_int64 (Misc.Int_literal_converter.int64 i)) + with Failure _ -> Error (Literal_overflow "int64") + end + | Pconst_integer (i,Some 'n') -> + begin + try Ok (Const_nativeint (Misc.Int_literal_converter.nativeint i)) + with Failure _ -> Error (Literal_overflow "nativeint") + end + | Pconst_integer (i,Some c) -> Error (Unknown_literal (i, c)) + | Pconst_char c -> Ok (Const_char c) + | Pconst_string (s,d) -> Ok (Const_string (s,d)) + | Pconst_float (f,None)-> Ok (Const_float f) + | Pconst_float (f,Some c) -> Error (Unknown_literal (f, c)) + +let constant_or_raise env loc cst = + match constant cst with + | Ok c -> c + | Error err -> raise (Error (loc, env, err)) + +(* Specific version of type_option, using newty rather than newgenty *) + +let type_option ty = + newty (Tconstr(Predef.path_option,[ty], ref Mnil)) + +let mkexp exp_desc exp_type exp_loc exp_env = + { exp_desc; exp_type; exp_loc; exp_env; exp_extra = []; exp_attributes = [] } + +let option_none ty loc = + let lid = Longident.Lident "None" + and env = Env.initial_safe_string in + let cnone = Env.lookup_constructor lid env in + mkexp (Texp_construct(mknoloc lid, cnone, [])) ty loc env + +let option_some texp = + let lid = Longident.Lident "Some" in + let csome = Env.lookup_constructor lid Env.initial_safe_string in + mkexp ( Texp_construct(mknoloc lid , csome, [texp]) ) + (type_option texp.exp_type) texp.exp_loc texp.exp_env + +let extract_option_type env ty = + match expand_head env ty with {desc = Tconstr(path, [ty], _)} + when Path.same path Predef.path_option -> ty + | _ -> assert false + +let extract_concrete_record env ty = + match extract_concrete_typedecl env ty with + (p0, p, {type_kind=Type_record (fields, _)}) -> (p0, p, fields) + | _ -> raise Not_found + +let extract_concrete_variant env ty = + match extract_concrete_typedecl env ty with + (p0, p, {type_kind=Type_variant cstrs}) -> (p0, p, cstrs) + | (p0, p, {type_kind=Type_open}) -> (p0, p, []) + | _ -> raise Not_found + +let extract_label_names env ty = + try + let (_, _,fields) = extract_concrete_record env ty in + List.map (fun l -> l.Types.ld_id) fields + with Not_found -> + assert false + +(* Typing of patterns *) + +(* unification inside type_pat*) +let unify_pat_types loc env ty ty' = + try + unify env ty ty' + with + Unify trace -> + raise(Error(loc, env, Pattern_type_clash(trace))) + | Tags(l1,l2) -> + raise(Typetexp.Error(loc, env, Typetexp.Variant_tags (l1, l2))) + +(* unification inside type_exp and type_expect *) +let unify_exp_types loc env ty expected_ty = + (* Format.eprintf "@[%a@ %a@]@." Printtyp.raw_type_expr exp.exp_type + Printtyp.raw_type_expr expected_ty; *) + try + unify env ty expected_ty + with + Unify trace -> + raise(Error(loc, env, Expr_type_clash(trace))) + | Tags(l1,l2) -> + raise(Typetexp.Error(loc, env, Typetexp.Variant_tags (l1, l2))) + +(* level at which to create the local type declarations *) +let newtype_level = ref None +let get_newtype_level () = + match !newtype_level with + Some y -> y + | None -> assert false + +let unify_pat_types_gadt loc env ty ty' = + let newtype_level = + match !newtype_level with + | None -> assert false + | Some x -> x + in + try + unify_gadt ~newtype_level env ty ty' + with + Unify trace -> + raise(Error(loc, !env, Pattern_type_clash(trace))) + | Tags(l1,l2) -> + raise(Typetexp.Error(loc, !env, Typetexp.Variant_tags (l1, l2))) + | Unification_recursive_abbrev trace -> + raise(Error(loc, !env, Recursive_local_constraint trace)) + + +(* Creating new conjunctive types is not allowed when typing patterns *) + +let unify_pat env pat expected_ty = + unify_pat_types pat.pat_loc env pat.pat_type expected_ty + +(* make all Reither present in open variants *) +let finalize_variant pat = + match pat.pat_desc with + Tpat_variant(tag, opat, r) -> + let row = + match expand_head pat.pat_env pat.pat_type with + {desc = Tvariant row} -> r := row; row_repr row + | _ -> assert false + in + begin match row_field tag row with + | Rabsent -> () (* assert false *) + | Reither (true, [], _, e) when not row.row_closed -> + set_row_field e (Rpresent None) + | Reither (false, ty::tl, _, e) when not row.row_closed -> + set_row_field e (Rpresent (Some ty)); + begin match opat with None -> assert false + | Some pat -> List.iter (unify_pat pat.pat_env pat) (ty::tl) + end + | Reither (c, _l, true, e) when not (row_fixed row) -> + set_row_field e (Reither (c, [], false, ref None)) + | _ -> () + end; + (* Force check of well-formedness WHY? *) + (* unify_pat pat.pat_env pat + (newty(Tvariant{row_fields=[]; row_more=newvar(); row_closed=false; + row_bound=(); row_fixed=false; row_name=None})); *) + | _ -> () + +let rec iter_pattern f p = + f p; + iter_pattern_desc (iter_pattern f) p.pat_desc + +let has_variants p = + try + iter_pattern (function {pat_desc=Tpat_variant _} -> raise Exit | _ -> ()) + p; + false + with Exit -> + true + + +(* pattern environment *) +let pattern_variables = ref ([] : + (Ident.t * type_expr * string loc * Location.t * bool (* as-variable *)) list) +let pattern_force = ref ([] : (unit -> unit) list) +let pattern_scope = ref (None : Annot.ident option);; +let allow_modules = ref false +let module_variables = ref ([] : (string loc * Location.t) list) +let reset_pattern scope allow = + pattern_variables := []; + pattern_force := []; + pattern_scope := scope; + allow_modules := allow; + module_variables := []; +;; + +let enter_variable ?(is_module=false) ?(is_as_variable=false) loc name ty = + if List.exists (fun (id, _, _, _, _) -> Ident.name id = name.txt) + !pattern_variables + then raise(Error(loc, Env.empty, Multiply_bound_variable name.txt)); + let id = Ident.create name.txt in + pattern_variables := + (id, ty, name, loc, is_as_variable) :: !pattern_variables; + if is_module then begin + (* Note: unpack patterns enter a variable of the same name *) + if not !allow_modules then + raise (Error (loc, Env.empty, Modules_not_allowed)); + module_variables := (name, loc) :: !module_variables + end else + (* moved to genannot *) + may (fun s -> Stypes.record (Stypes.An_ident (name.loc, name.txt, s))) + !pattern_scope; + id + +let sort_pattern_variables vs = + List.sort + (fun (x,_,_,_,_) (y,_,_,_,_) -> + compare (Ident.name x) (Ident.name y)) + vs + +let enter_orpat_variables loc env p1_vs p2_vs = + (* unify_vars operate on sorted lists *) + + let p1_vs = sort_pattern_variables p1_vs + and p2_vs = sort_pattern_variables p2_vs in + + let rec unify_vars p1_vs p2_vs = + let vars vs = List.map (fun (x,_t,_,_l,_a) -> x) vs in + match p1_vs, p2_vs with + | (x1,t1,_,_l1,_a1)::rem1, (x2,t2,_,_l2,_a2)::rem2 + when Ident.equal x1 x2 -> + if x1==x2 then + unify_vars rem1 rem2 + else begin + begin try + unify env t1 t2 + with + | Unify trace -> + raise(Error(loc, env, Or_pattern_type_clash(x1, trace))) + end; + (x2,x1)::unify_vars rem1 rem2 + end + | [],[] -> [] + | (x,_,_,_,_)::_, [] -> raise (Error (loc, env, Orpat_vars (x, []))) + | [],(y,_,_,_,_)::_ -> raise (Error (loc, env, Orpat_vars (y, []))) + | (x,_,_,_,_)::_, (y,_,_,_,_)::_ -> + let err = + if Ident.name x < Ident.name y + then Orpat_vars (x, vars p2_vs) + else Orpat_vars (y, vars p1_vs) in + raise (Error (loc, env, err)) in + unify_vars p1_vs p2_vs + +let rec build_as_type env p = + match p.pat_desc with + Tpat_alias(p1,_, _) -> build_as_type env p1 + | Tpat_tuple pl -> + let tyl = List.map (build_as_type env) pl in + newty (Ttuple tyl) + | Tpat_construct(_, cstr, pl) -> + let keep = cstr.cstr_private = Private || cstr.cstr_existentials <> [] in + if keep then p.pat_type else + let tyl = List.map (build_as_type env) pl in + let ty_args, ty_res = instance_constructor cstr in + List.iter2 (fun (p,ty) -> unify_pat env {p with pat_type = ty}) + (List.combine pl tyl) ty_args; + ty_res + | Tpat_variant(l, p', _) -> + let ty = may_map (build_as_type env) p' in + newty (Tvariant{row_fields=[l, Rpresent ty]; row_more=newvar(); + row_bound=(); row_name=None; + row_fixed=false; row_closed=false}) + | Tpat_record (lpl,_) -> + let lbl = snd3 (List.hd lpl) in + if lbl.lbl_private = Private then p.pat_type else + let ty = newvar () in + let ppl = List.map (fun (_, l, p) -> l.lbl_pos, p) lpl in + let do_label lbl = + let _, ty_arg, ty_res = instance_label false lbl in + unify_pat env {p with pat_type = ty} ty_res; + let refinable = + lbl.lbl_mut = Immutable && List.mem_assoc lbl.lbl_pos ppl && + match (repr lbl.lbl_arg).desc with Tpoly _ -> false | _ -> true in + if refinable then begin + let arg = List.assoc lbl.lbl_pos ppl in + unify_pat env {arg with pat_type = build_as_type env arg} ty_arg + end else begin + let _, ty_arg', ty_res' = instance_label false lbl in + unify env ty_arg ty_arg'; + unify_pat env p ty_res' + end in + Array.iter do_label lbl.lbl_all; + ty + | Tpat_or(p1, p2, row) -> + begin match row with + None -> + let ty1 = build_as_type env p1 and ty2 = build_as_type env p2 in + unify_pat env {p2 with pat_type = ty2} ty1; + ty1 + | Some row -> + let row = row_repr row in + newty (Tvariant{row with row_closed=false; row_more=newvar()}) + end + | Tpat_any | Tpat_var _ | Tpat_constant _ + | Tpat_array _ | Tpat_lazy _ -> p.pat_type + +let build_or_pat env loc lid = + let path, decl = Typetexp.find_type env lid.loc lid.txt + in + let tyl = List.map (fun _ -> newvar()) decl.type_params in + let row0 = + let ty = expand_head env (newty(Tconstr(path, tyl, ref Mnil))) in + match ty.desc with + Tvariant row when static_row row -> row + | _ -> raise(Error(lid.loc, env, Not_a_variant_type lid.txt)) + in + let pats, fields = + List.fold_left + (fun (pats,fields) (l,f) -> + match row_field_repr f with + Rpresent None -> + (l,None) :: pats, + (l, Reither(true,[], true, ref None)) :: fields + | Rpresent (Some ty) -> + (l, Some {pat_desc=Tpat_any; pat_loc=Location.none; pat_env=env; + pat_type=ty; pat_extra=[]; pat_attributes=[]}) + :: pats, + (l, Reither(false, [ty], true, ref None)) :: fields + | _ -> pats, fields) + ([],[]) (row_repr row0).row_fields in + let row = + { row_fields = List.rev fields; row_more = newvar(); row_bound = (); + row_closed = false; row_fixed = false; row_name = Some (path, tyl) } + in + let ty = newty (Tvariant row) in + let gloc = {loc with Location.loc_ghost=true} in + let row' = ref {row with row_more=newvar()} in + let pats = + List.map + (fun (l,p) -> + {pat_desc=Tpat_variant(l,p,row'); pat_loc=gloc; + pat_env=env; pat_type=ty; pat_extra=[]; pat_attributes=[]}) + pats + in + match pats with + [] -> raise(Error(lid.loc, env, Not_a_variant_type lid.txt)) + | pat :: pats -> + let r = + List.fold_left + (fun pat pat0 -> + {pat_desc=Tpat_or(pat0,pat,Some row0); pat_extra=[]; + pat_loc=gloc; pat_env=env; pat_type=ty; pat_attributes=[]}) + pat pats in + (path, rp { r with pat_loc = loc },ty) + +(* Type paths *) + +let rec expand_path env p = + let decl = + try Some (Env.find_type p env) with Not_found -> None + in + match decl with + Some {type_manifest = Some ty} -> + begin match repr ty with + {desc=Tconstr(p,_,_)} -> expand_path env p + | _ -> p + (* PR#6394: recursive module may introduce incoherent manifest *) + end + | _ -> + let p' = Env.normalize_path None env p in + if Path.same p p' then p else expand_path env p' + +let compare_type_path env tpath1 tpath2 = + Path.same (expand_path env tpath1) (expand_path env tpath2) + +(* Records *) +let label_of_kind kind = + if kind = "record" then "field" else "constructor" + +module NameChoice(Name : sig + type t + val type_kind: string + val get_name: t -> string + val get_type: t -> type_expr + val get_descrs: Env.type_descriptions -> t list + val unbound_name_error: Env.t -> Longident.t loc -> 'a + val in_env: t -> bool +end) = struct + open Name + + let get_type_path d = + match (repr (get_type d)).desc with + | Tconstr(p, _, _) -> p + | _ -> assert false + + let lookup_from_type env tpath lid = + let descrs = get_descrs (Env.find_type_descrs tpath env) in + Env.mark_type_used env (Path.last tpath) (Env.find_type tpath env); + match lid.txt with + Longident.Lident s -> begin + try + List.find (fun nd -> get_name nd = s) descrs + with Not_found -> + let names = List.map get_name descrs in + raise (Error (lid.loc, env, + Wrong_name ("", newvar (), type_kind, tpath, s, names))) + end + | _ -> raise Not_found + + let rec unique eq acc = function + [] -> List.rev acc + | x :: rem -> + if List.exists (eq x) acc then unique eq acc rem + else unique eq (x :: acc) rem + + let ambiguous_types env lbl others = + let tpath = get_type_path lbl in + let others = + List.map (fun (lbl, _) -> get_type_path lbl) others in + let tpaths = unique (compare_type_path env) [tpath] others in + match tpaths with + [_] -> [] + | _ -> List.map Printtyp.string_of_path tpaths + + let disambiguate_by_type env tpath lbls = + let check_type (lbl, _) = + let lbl_tpath = get_type_path lbl in + compare_type_path env tpath lbl_tpath + in + List.find check_type lbls + + let disambiguate ?(warn=Location.prerr_warning) ?(check_lk=fun _ _ -> ()) + ?scope lid env opath lbls = + let scope = match scope with None -> lbls | Some l -> l in + let lbl = match opath with + None -> + begin match lbls with + [] -> unbound_name_error env lid + | (lbl, use) :: rest -> + use (); + let paths = ambiguous_types env lbl rest in + if paths <> [] then + warn lid.loc + (Warnings.Ambiguous_name ([Longident.last lid.txt], + paths, false)); + lbl + end + | Some(tpath0, tpath, pr) -> + let warn_pr () = + let label = label_of_kind type_kind in + warn lid.loc + (Warnings.Not_principal + ("this type-based " ^ label ^ " disambiguation")) + in + try + let lbl, use = disambiguate_by_type env tpath scope in + use (); + if not pr then begin + (* Check if non-principal type is affecting result *) + match lbls with + [] -> warn_pr () + | (lbl', _use') :: rest -> + let lbl_tpath = get_type_path lbl' in + if not (compare_type_path env tpath lbl_tpath) then warn_pr () + else + let paths = ambiguous_types env lbl rest in + if paths <> [] then + warn lid.loc + (Warnings.Ambiguous_name ([Longident.last lid.txt], + paths, false)) + end; + lbl + with Not_found -> try + let lbl = lookup_from_type env tpath lid in + check_lk tpath lbl; + if in_env lbl then + begin + let s = Printtyp.string_of_path tpath in + warn lid.loc + (Warnings.Name_out_of_scope (s, [Longident.last lid.txt], false)); + end; + if not pr then warn_pr (); + lbl + with Not_found -> + if lbls = [] then unbound_name_error env lid else + let tp = (tpath0, expand_path env tpath) in + let tpl = + List.map + (fun (lbl, _) -> + let tp0 = get_type_path lbl in + let tp = expand_path env tp0 in + (tp0, tp)) + lbls + in + raise (Error (lid.loc, env, + Name_type_mismatch (type_kind, lid.txt, tp, tpl))) + in + if in_env lbl then + begin match scope with + (lab1,_)::_ when lab1 == lbl -> () + | _ -> + Location.prerr_warning lid.loc + (Warnings.Disambiguated_name(get_name lbl)) + end; + lbl +end + +let wrap_disambiguate kind ty f x = + try f x with Error (loc, env, Wrong_name ("",_,tk,tp,name,valid_names)) -> + raise (Error (loc, env, Wrong_name (kind,ty,tk,tp,name,valid_names))) + +module Label = NameChoice (struct + type t = label_description + let type_kind = "record" + let get_name lbl = lbl.lbl_name + let get_type lbl = lbl.lbl_res + let get_descrs = snd + let unbound_name_error = Typetexp.unbound_label_error + let in_env lbl = + match lbl.lbl_repres with + | Record_regular | Record_float | Record_unboxed false -> true + | Record_unboxed true | Record_inlined _ | Record_extension -> false +end) + +let disambiguate_label_by_ids keep closed ids labels = + let check_ids (lbl, _) = + let lbls = Hashtbl.create 8 in + Array.iter (fun lbl -> Hashtbl.add lbls lbl.lbl_name ()) lbl.lbl_all; + List.for_all (Hashtbl.mem lbls) ids + and check_closed (lbl, _) = + (not closed || List.length ids = Array.length lbl.lbl_all) + in + let labels' = List.filter check_ids labels in + if keep && labels' = [] then (false, labels) else + let labels'' = List.filter check_closed labels' in + if keep && labels'' = [] then (false, labels') else (true, labels'') + +(* Only issue warnings once per record constructor/pattern *) +let disambiguate_lid_a_list loc closed env opath lid_a_list = + let ids = List.map (fun (lid, _) -> Longident.last lid.txt) lid_a_list in + let w_pr = ref false and w_amb = ref [] + and w_scope = ref [] and w_scope_ty = ref "" in + let warn loc msg = + let open Warnings in + match msg with + | Not_principal _ -> w_pr := true + | Ambiguous_name([s], l, _) -> w_amb := (s, l) :: !w_amb + | Name_out_of_scope(ty, [s], _) -> + w_scope := s :: !w_scope; w_scope_ty := ty + | _ -> Location.prerr_warning loc msg + in + let process_label lid = + (* Strategy for each field: + * collect all the labels in scope for that name + * if the type is known and principal, just eventually warn + if the real label was not in scope + * fail if there is no known type and no label found + * otherwise use other fields to reduce the list of candidates + * if there is no known type reduce it incrementally, so that + there is still at least one candidate (for error message) + * if the reduced list is valid, call Label.disambiguate + *) + let scope = Typetexp.find_all_labels env lid.loc lid.txt in + if opath = None && scope = [] then + Typetexp.unbound_label_error env lid; + let (ok, labels) = + match opath with + Some (_, _, true) -> (true, scope) (* disambiguate only checks scope *) + | _ -> disambiguate_label_by_ids (opath=None) closed ids scope + in + if ok then Label.disambiguate lid env opath labels ~warn ~scope + else fst (List.hd labels) (* will fail later *) + in + let lbl_a_list = + List.map (fun (lid,a) -> lid, process_label lid, a) lid_a_list in + if !w_pr then + Location.prerr_warning loc + (Warnings.Not_principal "this type-based record disambiguation") + else begin + match List.rev !w_amb with + (_,types)::_ as amb -> + let paths = + List.map (fun (_,lbl,_) -> Label.get_type_path lbl) lbl_a_list in + let path = List.hd paths in + if List.for_all (compare_type_path env path) (List.tl paths) then + Location.prerr_warning loc + (Warnings.Ambiguous_name (List.map fst amb, types, true)) + else + List.iter + (fun (s,l) -> Location.prerr_warning loc + (Warnings.Ambiguous_name ([s],l,false))) + amb + | _ -> () + end; + if !w_scope <> [] then + Location.prerr_warning loc + (Warnings.Name_out_of_scope (!w_scope_ty, List.rev !w_scope, true)); + lbl_a_list + +let rec find_record_qual = function + | [] -> None + | ({ txt = Longident.Ldot (modname, _) }, _) :: _ -> Some modname + | _ :: rest -> find_record_qual rest + +let map_fold_cont f xs k = + List.fold_right (fun x k ys -> f x (fun y -> k (y :: ys))) + xs (fun ys -> k (List.rev ys)) [] + +let type_label_a_list ?labels loc closed env type_lbl_a opath lid_a_list k = + let lbl_a_list = + match lid_a_list, labels with + ({txt=Longident.Lident s}, _)::_, Some labels when Hashtbl.mem labels s -> + (* Special case for rebuilt syntax trees *) + List.map + (function lid, a -> match lid.txt with + Longident.Lident s -> lid, Hashtbl.find labels s, a + | _ -> assert false) + lid_a_list + | _ -> + let lid_a_list = + match find_record_qual lid_a_list with + None -> lid_a_list + | Some modname -> + List.map + (fun (lid, a as lid_a) -> + match lid.txt with Longident.Lident s -> + {lid with txt=Longident.Ldot (modname, s)}, a + | _ -> lid_a) + lid_a_list + in + disambiguate_lid_a_list loc closed env opath lid_a_list + in + (* Invariant: records are sorted in the typed tree *) + let lbl_a_list = + List.sort + (fun (_,lbl1,_) (_,lbl2,_) -> compare lbl1.lbl_pos lbl2.lbl_pos) + lbl_a_list + in + map_fold_cont type_lbl_a lbl_a_list k +;; + +(* Checks over the labels mentioned in a record pattern: + no duplicate definitions (error); properly closed (warning) *) + +let check_recordpat_labels loc lbl_pat_list closed = + match lbl_pat_list with + | [] -> () (* should not happen *) + | (_, label1, _) :: _ -> + let all = label1.lbl_all in + let defined = Array.make (Array.length all) false in + let check_defined (_, label, _) = + if defined.(label.lbl_pos) + then raise(Error(loc, Env.empty, Label_multiply_defined label.lbl_name)) + else defined.(label.lbl_pos) <- true in + List.iter check_defined lbl_pat_list; + if closed = Closed + && Warnings.is_active (Warnings.Non_closed_record_pattern "") + then begin + let undefined = ref [] in + for i = 0 to Array.length all - 1 do + if not defined.(i) then undefined := all.(i).lbl_name :: !undefined + done; + if !undefined <> [] then begin + let u = String.concat ", " (List.rev !undefined) in + Location.prerr_warning loc (Warnings.Non_closed_record_pattern u) + end + end + +(* Constructors *) + +module Constructor = NameChoice (struct + type t = constructor_description + let type_kind = "variant" + let get_name cstr = cstr.cstr_name + let get_type cstr = cstr.cstr_res + let get_descrs = fst + let unbound_name_error = Typetexp.unbound_constructor_error + let in_env _ = true +end) + +(* unification of a type with a tconstr with + freshly created arguments *) +let unify_head_only loc env ty constr = + let (_, ty_res) = instance_constructor constr in + match (repr ty_res).desc with + | Tconstr(p,args,m) -> + ty_res.desc <- Tconstr(p,List.map (fun _ -> newvar ()) args,m); + enforce_constraints env ty_res; + unify_pat_types loc env ty_res ty + | _ -> assert false + +(* Typing of patterns *) + +(* Remember current state for backtracking. + No variable information, as we only backtrack on + patterns without variables (cf. assert statements). *) +type state = + { snapshot: Btype.snapshot; + levels: Ctype.levels; + env: Env.t; } +let save_state env = + { snapshot = Btype.snapshot (); + levels = Ctype.save_levels (); + env = !env; } +let set_state s env = + Btype.backtrack s.snapshot; + Ctype.set_levels s.levels; + env := s.env + +(* type_pat does not generate local constraints inside or patterns *) +type type_pat_mode = + | Normal + | Splitting_or (* splitting an or-pattern *) + | Inside_or (* inside a non-split or-pattern *) + | Split_or (* always split or-patterns *) + +exception Need_backtrack + +(* type_pat propagates the expected type as well as maps for + constructors and labels. + Unification may update the typing environment. *) +(* constrs <> None => called from parmatch: backtrack on or-patterns + explode > 0 => explode Ppat_any for gadts *) +let rec type_pat ~constrs ~labels ~no_existentials ~mode ~explode ~env + sp expected_ty k = + Builtin_attributes.warning_scope sp.ppat_attributes + (fun () -> + type_pat_aux ~constrs ~labels ~no_existentials ~mode ~explode ~env + sp expected_ty k + ) + +and type_pat_aux ~constrs ~labels ~no_existentials ~mode ~explode ~env + sp expected_ty k = + let mode' = if mode = Splitting_or then Normal else mode in + let type_pat ?(constrs=constrs) ?(labels=labels) ?(mode=mode') + ?(explode=explode) ?(env=env) = + type_pat ~constrs ~labels ~no_existentials ~mode ~explode ~env in + let loc = sp.ppat_loc in + let rp k x : pattern = if constrs = None then k (rp x) else k x in + match sp.ppat_desc with + Ppat_any -> + let k' d = rp k { + pat_desc = d; + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env } + in + if explode > 0 then + let (sp, constrs, labels) = Parmatch.ppat_of_type !env expected_ty in + if sp.ppat_desc = Parsetree.Ppat_any then k' Tpat_any else + if mode = Inside_or then raise Need_backtrack else + let explode = + match sp.ppat_desc with + Parsetree.Ppat_or _ -> explode - 5 + | _ -> explode - 1 + in + type_pat ~constrs:(Some constrs) ~labels:(Some labels) + ~explode sp expected_ty k + else k' Tpat_any + | Ppat_var name -> + let id = (* PR#7330 *) + if name.txt = "*extension*" then Ident.create name.txt else + enter_variable loc name expected_ty + in + rp k { + pat_desc = Tpat_var (id, name); + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env } + | Ppat_unpack name -> + assert (constrs = None); + let id = enter_variable loc name expected_ty ~is_module:true in + rp k { + pat_desc = Tpat_var (id, name); + pat_loc = sp.ppat_loc; + pat_extra=[Tpat_unpack, loc, sp.ppat_attributes]; + pat_type = expected_ty; + pat_attributes = []; + pat_env = !env } + | Ppat_constraint({ppat_desc=Ppat_var name; ppat_loc=lloc}, + ({ptyp_desc=Ptyp_poly _} as sty)) -> + (* explicitly polymorphic type *) + assert (constrs = None); + let cty, force = Typetexp.transl_simple_type_delayed !env sty in + let ty = cty.ctyp_type in + unify_pat_types lloc !env ty expected_ty; + pattern_force := force :: !pattern_force; + begin match ty.desc with + | Tpoly (body, tyl) -> + begin_def (); + let _, ty' = instance_poly ~keep_names:true false tyl body in + end_def (); + generalize ty'; + let id = enter_variable lloc name ty' in + rp k { + pat_desc = Tpat_var (id, name); + pat_loc = lloc; + pat_extra = [Tpat_constraint cty, loc, sp.ppat_attributes]; + pat_type = ty; + pat_attributes = []; + pat_env = !env + } + | _ -> assert false + end + | Ppat_alias(sq, name) -> + assert (constrs = None); + type_pat sq expected_ty (fun q -> + begin_def (); + let ty_var = build_as_type !env q in + end_def (); + generalize ty_var; + let id = enter_variable ~is_as_variable:true loc name ty_var in + rp k { + pat_desc = Tpat_alias(q, id, name); + pat_loc = loc; pat_extra=[]; + pat_type = q.pat_type; + pat_attributes = sp.ppat_attributes; + pat_env = !env }) + | Ppat_constant cst -> + let cst = constant_or_raise !env loc cst in + unify_pat_types loc !env (type_constant cst) expected_ty; + rp k { + pat_desc = Tpat_constant cst; + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env } + | Ppat_interval (Pconst_char c1, Pconst_char c2) -> + let open Ast_helper.Pat in + let gloc = {loc with Location.loc_ghost=true} in + let rec loop c1 c2 = + if c1 = c2 then constant ~loc:gloc (Pconst_char c1) + else + or_ ~loc:gloc + (constant ~loc:gloc (Pconst_char c1)) + (loop (c1 + 1) c2) + in + let p = if c1 <= c2 then loop c1 c2 else loop c2 c1 in + let p = {p with ppat_loc=loc} in + type_pat ~explode:0 p expected_ty k + (* TODO: record 'extra' to remember about interval *) + | Ppat_interval _ -> + raise (Error (loc, !env, Invalid_interval)) + | Ppat_tuple spl -> + assert (List.length spl >= 2); + let spl_ann = List.map (fun p -> (p,newvar ())) spl in + let ty = newty (Ttuple(List.map snd spl_ann)) in + unify_pat_types loc !env ty expected_ty; + map_fold_cont (fun (p,t) -> type_pat p t) spl_ann (fun pl -> + rp k { + pat_desc = Tpat_tuple pl; + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env }) + | Ppat_construct(lid, sarg) -> + let opath = + try + let (p0, p, _) = extract_concrete_variant !env expected_ty in + Some (p0, p, true) + with Not_found -> None + in + let candidates = + match lid.txt, constrs with + Longident.Lident s, Some constrs when Hashtbl.mem constrs s -> + [Hashtbl.find constrs s, (fun () -> ())] + | _ -> Typetexp.find_all_constructors !env lid.loc lid.txt + in + let check_lk tpath constr = + if constr.cstr_generalized then + raise (Error (lid.loc, !env, + Unqualified_gadt_pattern (tpath, constr.cstr_name))) + in + let constr = + wrap_disambiguate "This variant pattern is expected to have" expected_ty + (Constructor.disambiguate lid !env opath ~check_lk) candidates + in + if constr.cstr_generalized && constrs <> None && mode = Inside_or + then raise Need_backtrack; + Env.mark_constructor Env.Pattern !env (Longident.last lid.txt) constr; + Builtin_attributes.check_deprecated loc constr.cstr_attributes + constr.cstr_name; + if no_existentials && constr.cstr_existentials <> [] then + raise (Error (loc, !env, Unexpected_existential)); + (* if constructor is gadt, we must verify that the expected type has the + correct head *) + if constr.cstr_generalized then + unify_head_only loc !env expected_ty constr; + let sargs = + match sarg with + None -> [] + | Some {ppat_desc = Ppat_tuple spl} when + constr.cstr_arity > 1 || + Builtin_attributes.explicit_arity sp.ppat_attributes + -> spl + | Some({ppat_desc = Ppat_any} as sp) when constr.cstr_arity <> 1 -> + if constr.cstr_arity = 0 then + Location.prerr_warning sp.ppat_loc + Warnings.Wildcard_arg_to_constant_constr; + replicate_list sp constr.cstr_arity + | Some sp -> [sp] in + begin match sargs with + | [{ppat_desc = Ppat_constant _} as sp] + when Builtin_attributes.warn_on_literal_pattern + constr.cstr_attributes -> + Location.prerr_warning sp.ppat_loc + Warnings.Fragile_literal_pattern + | _ -> () + end; + if List.length sargs <> constr.cstr_arity then + raise(Error(loc, !env, Constructor_arity_mismatch(lid.txt, + constr.cstr_arity, List.length sargs))); + let (ty_args, ty_res) = + instance_constructor ~in_pattern:(env, get_newtype_level ()) constr + in + (* PR#7214: do not use gadt unification for toplevel lets *) + if not constr.cstr_generalized || mode = Inside_or || no_existentials + then unify_pat_types loc !env ty_res expected_ty + else unify_pat_types_gadt loc env ty_res expected_ty; + + let rec check_non_escaping p = + match p.ppat_desc with + | Ppat_or (p1, p2) -> + check_non_escaping p1; + check_non_escaping p2 + | Ppat_alias (p, _) -> + check_non_escaping p + | Ppat_constraint _ -> + raise (Error (p.ppat_loc, !env, Inlined_record_escape)) + | _ -> + () + in + if constr.cstr_inlined <> None then List.iter check_non_escaping sargs; + + map_fold_cont (fun (p,t) -> type_pat p t) (List.combine sargs ty_args) + (fun args -> + rp k { + pat_desc=Tpat_construct(lid, constr, args); + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env }) + | Ppat_variant(l, sarg) -> + let arg_type = match sarg with None -> [] | Some _ -> [newvar()] in + let row = { row_fields = + [l, Reither(sarg = None, arg_type, true, ref None)]; + row_bound = (); + row_closed = false; + row_more = newvar (); + row_fixed = false; + row_name = None } in + (* PR#7404: allow some_other_tag blindly, as it would not unify with + the abstract row variable *) + if l = Parmatch.some_other_tag then assert (constrs <> None) + else unify_pat_types loc !env (newty (Tvariant row)) expected_ty; + let k arg = + rp k { + pat_desc = Tpat_variant(l, arg, ref {row with row_more = newvar()}); + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env } + in begin + (* PR#6235: propagate type information *) + match sarg, arg_type with + Some p, [ty] -> type_pat p ty (fun p -> k (Some p)) + | _ -> k None + end + | Ppat_record(lid_sp_list, closed) -> + assert (lid_sp_list <> []); + let opath, record_ty = + try + let (p0, p,_) = extract_concrete_record !env expected_ty in + Some (p0, p, true), expected_ty + with Not_found -> None, newvar () + in + let type_label_pat (label_lid, label, sarg) k = + begin_def (); + let (vars, ty_arg, ty_res) = instance_label false label in + if vars = [] then end_def (); + begin try + unify_pat_types loc !env ty_res record_ty + with Unify trace -> + raise(Error(label_lid.loc, !env, + Label_mismatch(label_lid.txt, trace))) + end; + type_pat sarg ty_arg (fun arg -> + if vars <> [] then begin + end_def (); + generalize ty_arg; + List.iter generalize vars; + let instantiated tv = + let tv = expand_head !env tv in + not (is_Tvar tv) || tv.level <> generic_level in + if List.exists instantiated vars then + raise + (Error(label_lid.loc, !env, Polymorphic_label label_lid.txt)) + end; + k (label_lid, label, arg)) + in + let k' k lbl_pat_list = + check_recordpat_labels loc lbl_pat_list closed; + unify_pat_types loc !env record_ty expected_ty; + rp k { + pat_desc = Tpat_record (lbl_pat_list, closed); + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env } + in + if constrs = None then + k (wrap_disambiguate "This record pattern is expected to have" + expected_ty + (type_label_a_list ?labels loc false !env type_label_pat opath + lid_sp_list) + (k' (fun x -> x))) + else + type_label_a_list ?labels loc false !env type_label_pat opath + lid_sp_list (k' k) + | Ppat_array spl -> + let ty_elt = newvar() in + unify_pat_types + loc !env (instance_def (Predef.type_array ty_elt)) expected_ty; + let spl_ann = List.map (fun p -> (p,newvar())) spl in + map_fold_cont (fun (p,_) -> type_pat p ty_elt) spl_ann (fun pl -> + rp k { + pat_desc = Tpat_array pl; + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env }) + | Ppat_or(sp1, sp2) -> + let state = save_state env in + begin match + if mode = Split_or || mode = Splitting_or then raise Need_backtrack; + let initial_pattern_variables = !pattern_variables in + let initial_module_variables = !module_variables in + let p1 = + try Some (type_pat ~mode:Inside_or sp1 expected_ty (fun x -> x)) + with Need_backtrack -> None in + let p1_variables = !pattern_variables in + let p1_module_variables = !module_variables in + pattern_variables := initial_pattern_variables; + module_variables := initial_module_variables; + let p2 = + try Some (type_pat ~mode:Inside_or sp2 expected_ty (fun x -> x)) + with Need_backtrack -> None in + let p2_variables = !pattern_variables in + match p1, p2 with + None, None -> raise Need_backtrack + | Some p, None | None, Some p -> p (* no variables in this case *) + | Some p1, Some p2 -> + let alpha_env = + enter_orpat_variables loc !env p1_variables p2_variables in + pattern_variables := p1_variables; + module_variables := p1_module_variables; + { pat_desc = Tpat_or(p1, alpha_pat alpha_env p2, None); + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env } + with + p -> rp k p + | exception Need_backtrack when mode <> Inside_or -> + assert (constrs <> None); + set_state state env; + let mode = + if mode = Split_or then mode else Splitting_or in + try type_pat ~mode sp1 expected_ty k with Error _ -> + set_state state env; + type_pat ~mode sp2 expected_ty k + end + | Ppat_lazy sp1 -> + let nv = newvar () in + unify_pat_types loc !env (instance_def (Predef.type_lazy_t nv)) + expected_ty; + (* do not explode under lazy: PR#7421 *) + type_pat ~explode:0 sp1 nv (fun p1 -> + rp k { + pat_desc = Tpat_lazy p1; + pat_loc = loc; pat_extra=[]; + pat_type = expected_ty; + pat_attributes = sp.ppat_attributes; + pat_env = !env }) + | Ppat_constraint(sp, sty) -> + (* Separate when not already separated by !principal *) + let separate = true in + if separate then begin_def(); + let cty, force = Typetexp.transl_simple_type_delayed !env sty in + let ty = cty.ctyp_type in + let ty, expected_ty' = + if separate then begin + end_def(); + generalize_structure ty; + instance !env ty, instance !env ty + end else ty, ty + in + unify_pat_types loc !env ty expected_ty; + type_pat sp expected_ty' (fun p -> + (*Format.printf "%a@.%a@." + Printtyp.raw_type_expr ty + Printtyp.raw_type_expr p.pat_type;*) + pattern_force := force :: !pattern_force; + let extra = (Tpat_constraint cty, loc, sp.ppat_attributes) in + let p = + if not separate then p else + match p.pat_desc with + Tpat_var (id,s) -> + {p with pat_type = ty; + pat_desc = Tpat_alias + ({p with pat_desc = Tpat_any; pat_attributes = []}, id,s); + pat_extra = [extra]; + } + | _ -> {p with pat_type = ty; + pat_extra = extra :: p.pat_extra} + in k p) + | Ppat_type lid -> + let (path, p,ty) = build_or_pat !env loc lid in + unify_pat_types loc !env ty expected_ty; + k { p with pat_extra = + (Tpat_type (path, lid), loc, sp.ppat_attributes) :: p.pat_extra } + | Ppat_open (lid,p) -> + let path, new_env = + !type_open Asttypes.Fresh !env sp.ppat_loc lid in + let new_env = ref new_env in + type_pat ~env:new_env p expected_ty ( fun p -> + env := Env.copy_local !env ~from:!new_env; + k { p with pat_extra =( Tpat_open (path,lid,!new_env), + loc, sp.ppat_attributes) :: p.pat_extra } + ) + | Ppat_exception _ -> + raise (Error (loc, !env, Exception_pattern_below_toplevel)) + | Ppat_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + +let type_pat ?(allow_existentials=false) ?constrs ?labels ?(mode=Normal) + ?(explode=0) ?(lev=get_current_level()) env sp expected_ty = + newtype_level := Some lev; + try + let r = + type_pat ~no_existentials:(not allow_existentials) ~constrs ~labels + ~mode ~explode ~env sp expected_ty (fun x -> x) in + iter_pattern (fun p -> p.pat_env <- !env) r; + newtype_level := None; + r + with e -> + newtype_level := None; + raise e + + +(* this function is passed to Partial.parmatch + to type check gadt nonexhaustiveness *) +let partial_pred ~lev ?mode ?explode env expected_ty constrs labels p = + let env = ref env in + let state = save_state env in + try + reset_pattern None true; + let typed_p = + Ctype.with_passive_variants + (type_pat ~allow_existentials:true ~lev + ~constrs ~labels ?mode ?explode env p) + expected_ty + in + set_state state env; + (* types are invalidated but we don't need them here *) + Some typed_p + with Error _ -> + set_state state env; + None + +let check_partial ?(lev=get_current_level ()) env expected_ty loc cases = + let explode = match cases with [_] -> 5 | _ -> 0 in + Parmatch.check_partial_gadt + (partial_pred ~lev ~explode env expected_ty) loc cases + +let check_unused ?(lev=get_current_level ()) env expected_ty cases = + Parmatch.check_unused + (fun refute constrs labels spat -> + match + partial_pred ~lev ~mode:Split_or ~explode:5 + env expected_ty constrs labels spat + with + Some pat when refute -> + raise (Error (spat.ppat_loc, env, Unrefuted_pattern pat)) + | r -> r) + cases + +let add_pattern_variables ?check ?check_as env = + let pv = get_ref pattern_variables in + (List.fold_right + (fun (id, ty, _name, loc, as_var) env -> + let check = if as_var then check_as else check in + Env.add_value ?check id + {val_type = ty; val_kind = Val_reg; Types.val_loc = loc; + val_attributes = []; + } env + ) + pv env, + get_ref module_variables) + +let type_pattern ~lev env spat scope expected_ty = + reset_pattern scope true; + let new_env = ref env in + let pat = type_pat ~allow_existentials:true ~lev new_env spat expected_ty in + let new_env, unpacks = + add_pattern_variables !new_env + ~check:(fun s -> Warnings.Unused_var_strict s) + ~check_as:(fun s -> Warnings.Unused_var s) in + (pat, new_env, get_ref pattern_force, unpacks) + +let type_pattern_list env spatl scope expected_tys allow = + reset_pattern scope allow; + let new_env = ref env in + let type_pat (attrs, pat) ty = + Builtin_attributes.warning_scope ~ppwarning:false attrs + (fun () -> + type_pat new_env pat ty + ) + in + let patl = List.map2 type_pat spatl expected_tys in + let new_env, unpacks = add_pattern_variables !new_env in + (patl, new_env, get_ref pattern_force, unpacks) + +let type_class_arg_pattern cl_num val_env met_env l spat = + reset_pattern None false; + let nv = newvar () in + let pat = type_pat (ref val_env) spat nv in + if has_variants pat then begin + Parmatch.pressure_variants val_env [pat]; + iter_pattern finalize_variant pat + end; + List.iter (fun f -> f()) (get_ref pattern_force); + if is_optional l then unify_pat val_env pat (type_option (newvar ())); + let (pv, met_env) = + List.fold_right + (fun (id, ty, name, loc, as_var) (pv, env) -> + let check s = + if as_var then Warnings.Unused_var s + else Warnings.Unused_var_strict s in + let id' = Ident.create (Ident.name id) in + ((id', name, id, ty)::pv, + Env.add_value id' {val_type = ty; + val_kind = Val_ivar (Immutable, cl_num); + val_attributes = []; + Types.val_loc = loc; + } ~check + env)) + !pattern_variables ([], met_env) + in + let val_env, _ = add_pattern_variables val_env in + (pat, pv, val_env, met_env) + +let type_self_pattern cl_num privty val_env met_env par_env spat = + let open Ast_helper in + let spat = + Pat.mk (Ppat_alias (Pat.mk(Ppat_alias (spat, mknoloc "selfpat-*")), + mknoloc ("selfpat-" ^ cl_num))) + in + reset_pattern None false; + let nv = newvar() in + let pat = type_pat (ref val_env) spat nv in + List.iter (fun f -> f()) (get_ref pattern_force); + let meths = ref Meths.empty in + let vars = ref Vars.empty in + let pv = !pattern_variables in + pattern_variables := []; + let (val_env, met_env, par_env) = + List.fold_right + (fun (id, ty, _name, loc, as_var) (val_env, met_env, par_env) -> + (Env.add_value id {val_type = ty; + val_kind = Val_unbound; + val_attributes = []; + Types.val_loc = loc; + } val_env, + Env.add_value id {val_type = ty; + val_kind = Val_self (meths, vars, cl_num, privty); + val_attributes = []; + Types.val_loc = loc; + } + ~check:(fun s -> if as_var then Warnings.Unused_var s + else Warnings.Unused_var_strict s) + met_env, + Env.add_value id {val_type = ty; val_kind = Val_unbound; + val_attributes = []; + Types.val_loc = loc; + } par_env)) + pv (val_env, met_env, par_env) + in + (pat, meths, vars, val_env, met_env, par_env) + +let delayed_checks = ref [] +let reset_delayed_checks () = delayed_checks := [] +let add_delayed_check f = + delayed_checks := (f, Warnings.backup ()) :: !delayed_checks + +let force_delayed_checks () = + (* checks may change type levels *) + let snap = Btype.snapshot () in + let w_old = Warnings.backup () in + List.iter + (fun (f, w) -> Warnings.restore w; f ()) + (List.rev !delayed_checks); + Warnings.restore w_old; + reset_delayed_checks (); + Btype.backtrack snap + +let rec final_subexpression sexp = + match sexp.pexp_desc with + Pexp_let (_, _, e) + | Pexp_sequence (_, e) + | Pexp_try (e, _) + | Pexp_ifthenelse (_, e, _) + | Pexp_match (_, {pc_rhs=e} :: _) + -> final_subexpression e + | _ -> sexp + +(* Generalization criterion for expressions *) + +let rec is_nonexpansive exp = + match exp.exp_desc with + Texp_ident(_,_,_) -> true + | Texp_constant _ -> true + | Texp_let(_rec_flag, pat_exp_list, body) -> + List.for_all (fun vb -> is_nonexpansive vb.vb_expr) pat_exp_list && + is_nonexpansive body + | Texp_function _ -> true + | Texp_apply(e, (_,None)::el) -> + is_nonexpansive e && List.for_all is_nonexpansive_opt (List.map snd el) + | Texp_match(e, cases, [], _) -> + is_nonexpansive e && + List.for_all + (fun {c_lhs = _; c_guard; c_rhs} -> + is_nonexpansive_opt c_guard && is_nonexpansive c_rhs + ) cases + | Texp_tuple el -> + List.for_all is_nonexpansive el + | Texp_construct( _, _, el) -> + List.for_all is_nonexpansive el + | Texp_variant(_, arg) -> is_nonexpansive_opt arg + | Texp_record { fields; extended_expression } -> + Array.for_all + (fun (lbl, definition) -> + match definition with + | Overridden (_, exp) -> + lbl.lbl_mut = Immutable && is_nonexpansive exp + | Kept _ -> true) + fields + && is_nonexpansive_opt extended_expression + | Texp_field(exp, _, _) -> is_nonexpansive exp + | Texp_array [] -> true + | Texp_ifthenelse(_cond, ifso, ifnot) -> + is_nonexpansive ifso && is_nonexpansive_opt ifnot + | Texp_sequence (_e1, e2) -> is_nonexpansive e2 (* PR#4354 *) + | Texp_new (_, _, cl_decl) when Ctype.class_type_arity cl_decl.cty_type > 0 -> + true + (* Note: nonexpansive only means no _observable_ side effects *) + | Texp_lazy e -> is_nonexpansive e + | Texp_object ({cstr_fields=fields; cstr_type = { csig_vars=vars}}, _) -> + let count = ref 0 in + List.for_all + (fun field -> match field.cf_desc with + Tcf_method _ -> true + | Tcf_val (_, _, _, Tcfk_concrete (_, e), _) -> + incr count; is_nonexpansive e + | Tcf_val (_, _, _, Tcfk_virtual _, _) -> + incr count; true + | Tcf_initializer e -> is_nonexpansive e + | Tcf_constraint _ -> true + | Tcf_inherit _ -> false + | Tcf_attribute _ -> true) + fields && + Vars.fold (fun _ (mut,_,_) b -> decr count; b && mut = Immutable) + vars true && + !count = 0 + | Texp_letmodule (_, _, mexp, e) -> + is_nonexpansive_mod mexp && is_nonexpansive e + | Texp_pack mexp -> + is_nonexpansive_mod mexp + (* Computations which raise exceptions are nonexpansive, since (raise e) is equivalent + to (raise e; diverge), and a nonexpansive "diverge" can be produced using lazy values + or the relaxed value restriction. See GPR#1142 *) + | Texp_assert exp -> + is_nonexpansive exp + | Texp_apply ( + { exp_desc = Texp_ident (_, _, {val_kind = + Val_prim {Primitive.prim_name = "%raise"}}) }, + [Nolabel, Some e]) -> + is_nonexpansive e + | _ -> false + +and is_nonexpansive_mod mexp = + match mexp.mod_desc with + | Tmod_ident _ -> true + | Tmod_functor _ -> true + | Tmod_unpack (e, _) -> is_nonexpansive e + | Tmod_constraint (m, _, _, _) -> is_nonexpansive_mod m + | Tmod_structure str -> + List.for_all + (fun item -> match item.str_desc with + | Tstr_eval _ | Tstr_primitive _ | Tstr_type _ + | Tstr_modtype _ | Tstr_open _ | Tstr_class_type _ -> true + | Tstr_value (_, pat_exp_list) -> + List.for_all (fun vb -> is_nonexpansive vb.vb_expr) pat_exp_list + | Tstr_module {mb_expr=m;_} + | Tstr_include {incl_mod=m;_} -> is_nonexpansive_mod m + | Tstr_recmodule id_mod_list -> + List.for_all (fun {mb_expr=m;_} -> is_nonexpansive_mod m) + id_mod_list + | Tstr_exception {ext_kind = Text_decl _} -> + false (* true would be unsound *) + | Tstr_exception {ext_kind = Text_rebind _} -> true + | Tstr_typext te -> + List.for_all + (function {ext_kind = Text_decl _} -> false + | {ext_kind = Text_rebind _} -> true) + te.tyext_constructors + | Tstr_class _ -> false (* could be more precise *) + | Tstr_attribute _ -> true + ) + str.str_items + | Tmod_apply _ -> false + +and is_nonexpansive_opt = function + None -> true + | Some e -> is_nonexpansive e + +module Env' = Env +module Rec_context = +struct + type access = + Dereferenced + (** [Dereferenced] indicates that the value (not just the address) of a + variable is accessed *) + + | Guarded + (** [Guarded] indicates that the address of a variable is used in a + guarded context, i.e. under a constructor. A variable that is + dereferenced within a function body or lazy context is also considered + guarded. *) + + | Unguarded + (** [Unguarded] indicates that the address of a variable is used in an + unguarded context, i.e. not under a constructor. *) + + (** [guard] represents guarded contexts such as [C -] and [{l = -}] *) + let guard : access -> access = function + | Dereferenced -> Dereferenced + | Guarded -> Guarded + | Unguarded -> Guarded + + (** [inspect] represents elimination contexts such as [match - with cases], + [e -] and [- e] *) + let inspect : access -> access = function + | Dereferenced -> Dereferenced + | Guarded -> Dereferenced + | Unguarded -> Dereferenced + + (** [delay] represents contexts that delay evaluation such as [fun p -> -] + or [lazy -] *) + let delay : access -> access = function + | Dereferenced -> Guarded + | Guarded -> Guarded + | Unguarded -> Guarded + + module Use : + sig + type t + val guard : t -> t + (** An expression appears in a guarded context *) + + val discard : t -> t + (** The address of a subexpression is not used, but may be bound *) + + val inspect : t -> t + (** The value of a subexpression is inspected with match, application, etc. *) + + val delay : t -> t + (** An expression appears under 'fun p ->' or 'lazy' *) + + val join : t -> t -> t + (** Combine the access information of two expressions *) + + val single : Ident.t -> access -> t + (** Combine the access information of two expressions *) + + val empty : t + (** No variables are accessed in an expression; it might be a + constant or a global identifier *) + + val unguarded : t -> Ident.t list + (** The list of identifiers that are used in an unguarded context *) + + val dependent : t -> Ident.t list + (** The list of all used identifiers *) + end = + struct + module M = Map.Make(Ident) + + (** A "t" maps each rec-bound variable to an access status *) + type t = access M.t + + let map f tbl = M.map f tbl + let guard t = map guard t + let inspect t = map inspect t + let delay t = map delay t + let discard = guard + + let prec x y = + match x, y with + | Dereferenced, _ + | _, Dereferenced -> Dereferenced + | Unguarded, _ + | _, Unguarded -> Unguarded + | _ -> Guarded + + let join x y = + M.fold + (fun id v tbl -> + let v' = try M.find id tbl with Not_found -> Guarded in + M.add id (prec v v') tbl) + x y + + let single id access = M.add id access M.empty + + let empty = M.empty + + let list_matching p t = + let r = ref [] in + M.iter (fun id v -> if p v then r := id :: !r) t; + !r + + let unguarded = + list_matching (function Unguarded | Dereferenced -> true | _ -> false) + + let dependent = + list_matching (function _ -> true) + end + + module Env = + struct + (* A typing environment maps identifiers to types *) + type env = Use.t Ident.tbl + + let empty = Ident.empty + + let join x y = + let r = + Ident.fold_all + (fun id v tbl -> + let v' = try Ident.find_same id tbl with Not_found -> Use.empty in + Ident.add id (Use.join v v') tbl) + x + y + in + r + end +end + +let rec pattern_variables : Typedtree.pattern -> Ident.t list = + fun pat -> match pat.pat_desc with + | Tpat_any -> [] + | Tpat_var (id, _) -> [id] + | Tpat_alias (pat, id, _) -> id :: pattern_variables pat + | Tpat_constant _ -> [] + | Tpat_tuple pats -> List.concat (List.map pattern_variables pats) + | Tpat_construct (_, _, pats) -> + List.concat (List.map pattern_variables pats) + | Tpat_variant (_, Some pat, _) -> pattern_variables pat + | Tpat_variant (_, None, _) -> [] + | Tpat_record (fields, _) -> + List.concat (List.map (fun (_,_,p) -> pattern_variables p) fields) + | Tpat_array pats -> + List.concat (List.map pattern_variables pats) + | Tpat_or (l,r,_) -> + pattern_variables l @ pattern_variables r + | Tpat_lazy p -> + pattern_variables p + +module Rec_check = +struct + open Rec_context + + let build_unguarded_env : Ident.t list -> Env.env = fun idlist -> + List.fold_left + (fun env id -> Ident.add id (Use.single id Unguarded) env) + Env.empty + idlist + + let is_ref : Types.value_description -> bool = function + | { Types.val_kind = + Types.Val_prim { Primitive.prim_name = "%makemutable"; + prim_arity = 1 } } -> + true + | _ -> false + + let scrape env ty = + (Ctype.repr (Ctype.expand_head_opt env (Ctype.correct_levels ty))).desc + + let array_element_kind env ty = + match scrape env ty with + | Tvar _ | Tunivar _ -> + `Pgenarray + | Tconstr(p, _, _) -> + if Path.same p Predef.path_int || Path.same p Predef.path_char then + `Pintarray + else if Path.same p Predef.path_float then + `Pfloatarray + else if Path.same p Predef.path_string + || Path.same p Predef.path_array + || Path.same p Predef.path_nativeint + || Path.same p Predef.path_int32 + || Path.same p Predef.path_int64 then + `Paddrarray + else begin + try + match Env'.find_type p env with + {type_kind = Type_abstract} -> + `Pgenarray + | {type_kind = Type_variant cstrs} + when List.for_all (fun c -> c.Types.cd_args = Types.Cstr_tuple []) + cstrs -> + `Pintarray + | {type_kind = _} -> + `Paddrarray + with Not_found -> + (* This can happen due to e.g. missing -I options, + causing some .cmi files to be unavailable. + Maybe we should emit a warning. *) + `Pgenarray + end + | _ -> + `Paddrarray + + let array_type_kind env ty = + match scrape env ty with + | Tconstr(p, [elt_ty], _) | Tpoly({desc = Tconstr(p, [elt_ty], _)}, _) + when Path.same p Predef.path_array -> + array_element_kind env elt_ty + | _ -> + (* This can happen with e.g. Obj.field *) + `Pgenarray + + let array_kind exp = array_type_kind exp.exp_env exp.exp_type + + let has_concrete_element_type : Typedtree.expression -> bool = + fun e -> array_kind e <> `Pgenarray + + type sd = Static | Dynamic + + let rec classify_expression : Typedtree.expression -> sd = + fun exp -> match exp.exp_desc with + | Texp_let (_, _, e) + | Texp_letmodule (_, _, _, e) + | Texp_sequence (_, e) + | Texp_letexception (_, e) -> classify_expression e + | Texp_ident _ + | Texp_for _ + | Texp_constant _ + | Texp_new _ + | Texp_instvar _ + | Texp_tuple _ + | Texp_array _ + | Texp_construct _ + | Texp_variant _ + | Texp_record _ + | Texp_setfield _ + | Texp_while _ + | Texp_setinstvar _ + | Texp_pack _ + | Texp_object _ + | Texp_function _ + | Texp_lazy _ + | Texp_unreachable + | Texp_extension_constructor _ -> Static + | Texp_apply ({exp_desc = Texp_ident (_, _, vd)}, _) + when is_ref vd -> Static + | Texp_apply _ + | Texp_match _ + | Texp_ifthenelse _ + | Texp_send _ + | Texp_field _ + | Texp_assert _ + | Texp_try _ + | Texp_override _ -> Dynamic + + let rec expression : Env.env -> Typedtree.expression -> Use.t = + fun env exp -> match exp.exp_desc with + | Texp_ident (pth, _, _) -> + (path env pth) + | Texp_let (rec_flag, bindings, body) -> + let env', ty = value_bindings rec_flag env bindings in + (* Here and in other binding constructs 'discard' is used in a + similar way to the way it's used in sequence: uses are + propagated, but unguarded access are not. *) + Use.join (Use.discard ty) (expression (Env.join env env') body) + | Texp_letmodule (x, _, m, e) -> + let ty = modexp env m in + Use.join (Use.discard ty) (expression (Ident.add x ty env) e) + | Texp_match (e, val_cases, exn_cases, _) -> + let t = expression env e in + let exn_case env {Typedtree.c_rhs} = expression env c_rhs in + let cs = list (case ~scrutinee:t) env val_cases + and es = list exn_case env exn_cases in + Use.(join cs es) + | Texp_for (_, _, e1, e2, _, e3) -> + Use.(join + (join + (inspect (expression env e1)) + (inspect (expression env e2))) + (* The body is evaluated, but not used, and not available + for inclusion in another value *) + (discard (expression env e3))) + + | Texp_constant _ -> + Use.empty + | Texp_new (pth, _, _) -> + Use.inspect (path env pth) + | Texp_instvar _ -> + Use.empty + | Texp_apply ({exp_desc = Texp_ident (_, _, vd)}, [_, Some arg]) + when is_ref vd -> + Use.guard (expression env arg) + | Texp_apply (e, args) -> + let arg env (_, eo) = option expression env eo in + Use.(join + (inspect (expression env e)) + (inspect (list arg env args))) + | Texp_tuple exprs -> + Use.guard (list expression env exprs) + | Texp_array exprs when array_kind exp = `Pfloatarray -> + Use.inspect (list expression env exprs) + | Texp_array exprs when has_concrete_element_type exp -> + Use.guard (list expression env exprs) + | Texp_array exprs -> + (* This is counted as a use, because constructing a generic array + involves inspecting the elements (PR#6939). *) + Use.inspect (list expression env exprs) + | Texp_construct (_, desc, exprs) -> + let access_constructor = + match desc.cstr_tag with + | Cstr_extension (pth, _) -> Use.inspect (path env pth) + | _ -> Use.empty + in + let use = match desc.cstr_tag with + | Cstr_unboxed -> (fun x -> x) + | Cstr_constant _ | Cstr_block _ | Cstr_extension _ -> Use.guard + in + Use.join access_constructor (use (list expression env exprs)) + | Texp_variant (_, eo) -> + Use.guard (option expression env eo) + | Texp_record { fields = es; extended_expression = eo; + representation = rep } -> + let use = match rep with + | Record_float -> Use.inspect + | Record_unboxed _ -> (fun x -> x) + | Record_regular | Record_inlined _ + | Record_extension -> Use.guard + in + let field env = function + _, Kept _ -> Use.empty + | _, Overridden (_, e) -> expression env e + in + Use.join + (use (array field env es)) + (option expression env eo) + | Texp_ifthenelse (cond, ifso, ifnot) -> + Use.(join (inspect (expression env cond)) + (join + (expression env ifso) + (option expression env ifnot))) + | Texp_setfield (e1, _, _, e2) -> + Use.(join (inspect (expression env e1)) + (inspect (expression env e2))) + | Texp_sequence (e1, e2) -> + Use.(join (discard (expression env e1)) + (expression env e2)) + | Texp_while (e1, e2) -> + Use.(join (inspect (expression env e1)) + (discard (expression env e2))) + | Texp_send (e1, _, eo) -> + Use.(join (inspect (expression env e1)) + (inspect (option expression env eo))) + | Texp_field (e, _, _) -> + Use.(inspect (expression env e)) + | Texp_setinstvar (_,_,_,e) -> + Use.(inspect (expression env e)) + | Texp_letexception (_, e) -> + expression env e + | Texp_assert e -> + Use.inspect (expression env e) + | Texp_pack m -> + modexp env m + | Texp_object (clsstrct, _) -> + class_structure env clsstrct + | Texp_try (e, cases) -> + (* This is more permissive than the old check. *) + let case env {Typedtree.c_rhs} = expression env c_rhs in + Use.join (expression env e) + (list case env cases) + | Texp_override (_, fields) -> + let field env (_, _, e) = expression env e in + Use.inspect (list field env fields) + | Texp_function { cases } -> + Use.delay (list (case ~scrutinee:Use.empty) env cases) + | Texp_lazy e -> + begin match Typeopt.classify_lazy_argument e with + | `Constant_or_function + | `Identifier _ + | `Float -> + expression env e + | `Other -> + Use.delay (expression env e) + end + | Texp_unreachable -> + Use.empty + | Texp_extension_constructor _ -> + Use.empty + and option : 'a. (Env.env -> 'a -> Use.t) -> Env.env -> 'a option -> Use.t = + fun f env -> Misc.Stdlib.Option.value_default (f env) ~default:Use.empty + and list : 'a. (Env.env -> 'a -> Use.t) -> Env.env -> 'a list -> Use.t = + fun f env -> + List.fold_left (fun typ item -> Use.join (f env item) typ) Use.empty + and array : 'a. (Env.env -> 'a -> Use.t) -> Env.env -> 'a array -> Use.t = + fun f env -> + Array.fold_left (fun typ item -> Use.join (f env item) typ) Use.empty + and class_structure : Env.env -> Typedtree.class_structure -> Use.t = + fun env cs -> Use.(inspect (list class_field env cs.cstr_fields)) + and class_field : Env.env -> Typedtree.class_field -> Use.t = + fun env cf -> match cf.cf_desc with + | Tcf_inherit (_, ce, _super, _inh_vars, _inh_meths) -> + Use.inspect (class_expr env ce) + | Tcf_val (_lab, _mut, _, cfk, _) -> + class_field_kind env cfk + | Tcf_method (_, _, cfk) -> + class_field_kind env cfk + | Tcf_constraint _ -> + Use.empty + | Tcf_initializer e -> + Use.inspect (expression env e) + | Tcf_attribute _ -> + Use.empty + and class_field_kind : Env.env -> Typedtree.class_field_kind -> Use.t = + fun env cfk -> match cfk with + | Tcfk_virtual _ -> + Use.empty + | Tcfk_concrete (_, e) -> + Use.inspect (expression env e) + and modexp : Env.env -> Typedtree.module_expr -> Use.t = + fun env m -> match m.mod_desc with + | Tmod_ident (pth, _) -> + (path env pth) + | Tmod_structure s -> + structure env s + | Tmod_functor (_, _, _, e) -> + Use.delay (modexp env e) + | Tmod_apply (f, p, _) -> + Use.(join + (inspect (modexp env f)) + (inspect (modexp env p))) + | Tmod_constraint (m, _, _, Tcoerce_none) -> + modexp env m + | Tmod_constraint (m, _, _, _) -> + Use.inspect (modexp env m) + | Tmod_unpack (e, _) -> + expression env e + and path : Env.env -> Path.t -> Use.t = + fun env pth -> match pth with + | Path.Pident x -> + (try Ident.find_same x env with Not_found -> Use.empty) + | Path.Pdot (t, _, _) -> + Use.inspect (path env t) + | Path.Papply (f, p) -> + Use.(inspect (join (path env f) (path env p))) + and structure : Env.env -> Typedtree.structure -> Use.t = + fun env s -> + let _, ty = + List.fold_left + (fun (env, ty) item -> + let env', ty' = structure_item env item in + Env.join env env', Use.join ty ty') + (env, Use.empty) + s.str_items + in + Use.guard ty + and structure_item : Env.env -> Typedtree.structure_item -> Env.env * Use.t = + fun env s -> match s.str_desc with + | Tstr_eval (e, _) -> + Env.empty, expression env e + | Tstr_value (rec_flag, valbinds) -> + value_bindings rec_flag env valbinds + | Tstr_module {mb_id; mb_expr} -> + let ty = modexp env mb_expr in + Ident.add mb_id ty Env.empty, ty + | Tstr_recmodule mbs -> + let modbind env {mb_expr} = modexp env mb_expr in + (* Over-approximate: treat any access as a use *) + Env.empty, Use.inspect (list modbind env mbs) + | Tstr_primitive _ -> + Env.empty, Use.empty + | Tstr_type _ -> + Env.empty, Use.empty + | Tstr_typext _ -> + Env.empty, Use.empty + | Tstr_exception _ -> + Env.empty, Use.empty + | Tstr_modtype _ -> + Env.empty, Use.empty + | Tstr_open _ -> + Env.empty, Use.empty + | Tstr_class classes -> + (* Any occurrence in a class definition is counted as a use, + so there's no need to add anything to the environment. *) + let cls env ({ci_expr=ce}, _) = class_expr env ce in + Env.empty, Use.inspect (list cls env classes) + | Tstr_class_type _ -> + Env.empty, Use.empty + | Tstr_include inc -> + (* This is a kind of projection. There's no need to add + anything to the environment because everything is used in + the type component already *) + Env.empty, Use.inspect (modexp env inc.incl_mod) + | Tstr_attribute _ -> + Env.empty, Use.empty + and class_expr : Env.env -> Typedtree.class_expr -> Use.t = + fun env ce -> match ce.cl_desc with + | Tcl_ident (pth, _, _) -> + Use.inspect (path env pth) + | Tcl_structure cs -> + class_structure env cs + | Tcl_fun (_, _, args, ce, _) -> + let arg env (_, _, e) = expression env e in + Use.inspect (Use.join (list arg env args) + (class_expr env ce)) + | Tcl_apply (ce, args) -> + let arg env (_, eo) = option expression env eo in + Use.inspect (Use.join (class_expr env ce) + (list arg env args)) + | Tcl_let (rec_flag, valbinds, _, ce) -> + let _, ty = value_bindings rec_flag env valbinds in + Use.(inspect (join ty (class_expr env ce))) + | Tcl_constraint (ce, _, _, _, _) -> + class_expr env ce + | Tcl_open (_, _, _, _, ce) -> + class_expr env ce + and case : Env.env -> Typedtree.case -> scrutinee:Use.t -> Use.t = + fun env { Typedtree.c_lhs; c_guard; c_rhs } ~scrutinee:ty -> + let ty = + if is_destructuring_pattern c_lhs then Use.inspect ty + else Use.discard ty (* as in 'let' *) + in + let vars = pattern_variables c_lhs in + let env = + List.fold_left + (fun env id -> Ident.add id ty env) + env + vars + in + Use.(join ty + (join (expression env c_rhs) + (inspect (option expression env c_guard)))) + and value_bindings : rec_flag -> Env.env -> Typedtree.value_binding list -> Env.env * Use.t = + fun rec_flag env bindings -> + match rec_flag with + | Recursive -> + (* Approximation: + let rec y = + let rec x1 = e1 + and x2 = e2 + in e + treated as + let rec y = + let rec x = (e1, e2)[x1:=fst x, x2:=snd x] in + e[x1:=fst x, x2:=snd x] + Further, use the fact that x1,x2 cannot occur unguarded in e1, e2 + to avoid recursive trickiness. + *) + let ids, ty = + List.fold_left + (fun (pats, tys) {vb_pat=p; vb_expr=e} -> + (pattern_variables p @ pats, + Use.join (expression env e) tys)) + ([], Use.empty) + bindings + in + (List.fold_left (fun (env : Env.env) (id : Ident.t) -> + Ident.add id ty env) Env.empty ids, + ty) + | Nonrecursive -> + List.fold_left + (fun (env2, ty) binding -> + let env', ty' = value_binding env binding in + (Env.join env2 env', Use.join ty ty')) + (Env.empty, Use.empty) + bindings + and value_binding : Env.env -> Typedtree.value_binding -> Env.env * Use.t = + (* NB: returns new environment only *) + fun env { vb_pat; vb_expr } -> + let vars = pattern_variables vb_pat in + let ty = expression env vb_expr in + let ty = if is_destructuring_pattern vb_pat then Use.inspect ty else ty in + (List.fold_left + (fun env id -> Ident.add id ty env) + Env.empty + vars, + ty) + and is_destructuring_pattern : Typedtree.pattern -> bool = + fun pat -> match pat.pat_desc with + | Tpat_any -> false + | Tpat_var (_, _) -> false + | Tpat_alias (pat, _, _) -> is_destructuring_pattern pat + | Tpat_constant _ -> true + | Tpat_tuple _ -> true + | Tpat_construct (_, _, _) -> true + | Tpat_variant _ -> true + | Tpat_record (_, _) -> true + | Tpat_array _ -> true + | Tpat_or (l,r,_) -> is_destructuring_pattern l || is_destructuring_pattern r + | Tpat_lazy _ -> true + + let check_recursive_expression env idlist expr = + let ty = expression (build_unguarded_env idlist) expr in + match Use.unguarded ty, Use.dependent ty, classify_expression expr with + | _ :: _, _, _ (* The expression inspects rec-bound variables *) + | _, _ :: _, Dynamic -> (* The expression depends on rec-bound variables + and its size is unknown *) + raise(Error(expr.exp_loc, env, Illegal_letrec_expr)) + | [], _, Static (* The expression has known size *) + | [], [], Dynamic -> (* The expression has unknown size, + but does not depend on rec-bound variables *) + () + let check_class_expr env idlist ce = + let rec class_expr : Env.env -> Typedtree.class_expr -> Use.t = + fun env ce -> match ce.cl_desc with + | Tcl_ident (_, _, _) -> Use.empty + | Tcl_structure _ -> Use.empty + | Tcl_fun (_, _, _, _, _) -> Use.empty + | Tcl_apply (_, _) -> Use.empty + | Tcl_let (rec_flag, valbinds, _, ce) -> + let _, ty = value_bindings rec_flag env valbinds in + Use.join ty (class_expr env ce) + | Tcl_constraint (ce, _, _, _, _) -> + class_expr env ce + | Tcl_open (_, _, _, _, ce) -> + class_expr env ce + in + match Use.unguarded (class_expr (build_unguarded_env idlist) ce) with + | [] -> () + | _ :: _ -> raise(Error(ce.cl_loc, env, Illegal_class_expr)) +end + +let check_recursive_bindings env valbinds = + let ids = List.concat + (List.map (fun b -> pattern_variables b.vb_pat) valbinds) in + List.iter + (fun {vb_expr} -> + Rec_check.check_recursive_expression env ids vb_expr) + valbinds + +let check_recursive_class_bindings env ids exprs = + List.iter + (fun expr -> + Rec_check.check_class_expr env ids expr) + exprs + +(* Approximate the type of an expression, for better recursion *) + +let rec approx_type env sty = + match sty.ptyp_desc with + Ptyp_arrow (p, _, sty) -> + let ty1 = if is_optional p then type_option (newvar ()) else newvar () in + newty (Tarrow (p, ty1, approx_type env sty, Cok)) + | Ptyp_tuple args -> + newty (Ttuple (List.map (approx_type env) args)) + | Ptyp_constr (lid, ctl) -> + begin try + let path = Env.lookup_type lid.txt env in + let decl = Env.find_type path env in + if List.length ctl <> decl.type_arity then raise Not_found; + let tyl = List.map (approx_type env) ctl in + newconstr path tyl + with Not_found -> newvar () + end + | Ptyp_poly (_, sty) -> + approx_type env sty + | _ -> newvar () + +let rec type_approx env sexp = + match sexp.pexp_desc with + Pexp_let (_, _, e) -> type_approx env e + | Pexp_fun (p, _, _, e) -> + let ty = if is_optional p then type_option (newvar ()) else newvar () in + newty (Tarrow(p, ty, type_approx env e, Cok)) + | Pexp_function ({pc_rhs=e}::_) -> + newty (Tarrow(Nolabel, newvar (), type_approx env e, Cok)) + | Pexp_match (_, {pc_rhs=e}::_) -> type_approx env e + | Pexp_try (e, _) -> type_approx env e + | Pexp_tuple l -> newty (Ttuple(List.map (type_approx env) l)) + | Pexp_ifthenelse (_,e,_) -> type_approx env e + | Pexp_sequence (_,e) -> type_approx env e + | Pexp_constraint (e, sty) -> + let ty = type_approx env e in + let ty1 = approx_type env sty in + begin try unify env ty ty1 with Unify trace -> + raise(Error(sexp.pexp_loc, env, Expr_type_clash trace)) + end; + ty1 + | Pexp_coerce (e, sty1, sty2) -> + let approx_ty_opt = function + | None -> newvar () + | Some sty -> approx_type env sty + in + let ty = type_approx env e + and ty1 = approx_ty_opt sty1 + and ty2 = approx_type env sty2 in + begin try unify env ty ty1 with Unify trace -> + raise(Error(sexp.pexp_loc, env, Expr_type_clash trace)) + end; + ty2 + | _ -> newvar () + +(* List labels in a function type, and whether return type is a variable *) +let rec list_labels_aux env visited ls ty_fun = + let ty = expand_head env ty_fun in + if List.memq ty visited then + List.rev ls, false + else match ty.desc with + Tarrow (l, _, ty_res, _) -> + list_labels_aux env (ty::visited) (l::ls) ty_res + | _ -> + List.rev ls, is_Tvar ty + +let list_labels env ty = + wrap_trace_gadt_instances env (list_labels_aux env [] []) ty + +(* Check that all univars are safe in a type *) +let check_univars env expans kind exp ty_expected vars = + if expans && not (is_nonexpansive exp) then + generalize_expansive env exp.exp_type; + (* need to expand twice? cf. Ctype.unify2 *) + let vars = List.map (expand_head env) vars in + let vars = List.map (expand_head env) vars in + let vars' = + List.filter + (fun t -> + let t = repr t in + generalize t; + match t.desc with + Tvar name when t.level = generic_level -> + log_type t; t.desc <- Tunivar name; true + | _ -> false) + vars in + if List.length vars = List.length vars' then () else + let ty = newgenty (Tpoly(repr exp.exp_type, vars')) + and ty_expected = repr ty_expected in + raise (Error (exp.exp_loc, env, + Less_general(kind, [ty, ty; ty_expected, ty_expected]))) + +(* Check that a type is not a function *) +let check_application_result env statement exp = + let loc = exp.exp_loc in + match (expand_head env exp.exp_type).desc with + | Tarrow _ -> + Location.prerr_warning exp.exp_loc Warnings.Partial_application + | Tvar _ -> () + | Tconstr (p, _, _) when Path.same p Predef.path_unit -> () + | _ -> + if statement then + Location.prerr_warning loc Warnings.Statement_type + +(* Check that a type is generalizable at some level *) +let generalizable level ty = + let rec check ty = + let ty = repr ty in + if ty.level < lowest_level then () else + if ty.level <= level then raise Exit else + (mark_type_node ty; iter_type_expr check ty) + in + try check ty; unmark_type ty; true + with Exit -> unmark_type ty; false + +(* Hack to allow coercion of self. Will clean-up later. *) +let self_coercion = ref ([] : (Path.t * Location.t list ref) list) + +(* Helpers for packaged modules. *) +let create_package_type loc env (p, l) = + let s = !Typetexp.transl_modtype_longident loc env p in + let fields = List.map (fun (name, ct) -> + name, Typetexp.transl_simple_type env false ct) l in + let ty = newty (Tpackage (s, + List.map fst l, + List.map (fun (_, cty) -> cty.ctyp_type) fields)) + in + (s, fields, ty) + + let wrap_unpacks sexp unpacks = + let open Ast_helper in + List.fold_left + (fun sexp (name, loc) -> + Exp.letmodule ~loc:sexp.pexp_loc ~attrs:[mknoloc "#modulepat",PStr []] + name + (Mod.unpack ~loc + (Exp.ident ~loc:name.loc (mkloc (Longident.Lident name.txt) + name.loc))) + sexp + ) + sexp unpacks + +(* Helpers for type_cases *) + +let contains_variant_either ty = + let rec loop ty = + let ty = repr ty in + if ty.level >= lowest_level then begin + mark_type_node ty; + match ty.desc with + Tvariant row -> + let row = row_repr row in + if not row.row_fixed then + List.iter + (fun (_,f) -> + match row_field_repr f with Reither _ -> raise Exit | _ -> ()) + row.row_fields; + iter_row loop row + | _ -> + iter_type_expr loop ty + end + in + try loop ty; unmark_type ty; false + with Exit -> unmark_type ty; true + +let iter_ppat f p = + match p.ppat_desc with + | Ppat_any | Ppat_var _ | Ppat_constant _ | Ppat_interval _ + | Ppat_extension _ + | Ppat_type _ | Ppat_unpack _ -> () + | Ppat_array pats -> List.iter f pats + | Ppat_or (p1,p2) -> f p1; f p2 + | Ppat_variant (_, arg) | Ppat_construct (_, arg) -> may f arg + | Ppat_tuple lst -> List.iter f lst + | Ppat_exception p | Ppat_alias (p,_) + | Ppat_open (_,p) + | Ppat_constraint (p,_) | Ppat_lazy p -> f p + | Ppat_record (args, _flag) -> List.iter (fun (_,p) -> f p) args + +let contains_polymorphic_variant p = + let rec loop p = + match p.ppat_desc with + Ppat_variant _ | Ppat_type _ -> raise Exit + | _ -> iter_ppat loop p + in + try loop p; false with Exit -> true + +let contains_gadt env p = + let rec loop env p = + match p.ppat_desc with + | Ppat_construct (lid, _) -> + begin try + let cstrs = Env.lookup_all_constructors lid.txt env in + List.iter (fun (cstr,_) -> if cstr.cstr_generalized then raise Exit) + cstrs + with Not_found -> () + end; iter_ppat (loop env) p + | Ppat_open (lid,sub_p) -> + let _, new_env = !type_open Asttypes.Override env p.ppat_loc lid in + loop new_env sub_p + | _ -> iter_ppat (loop env) p + in + try loop env p; false with Exit -> true + +let check_absent_variant env = + iter_pattern + (function {pat_desc = Tpat_variant (s, arg, row)} as pat -> + let row = row_repr !row in + if List.exists (fun (s',fi) -> s = s' && row_field_repr fi <> Rabsent) + row.row_fields + || not row.row_fixed && not (static_row row) (* same as Ctype.poly *) + then () else + let ty_arg = + match arg with None -> [] | Some p -> [correct_levels p.pat_type] in + let row' = {row_fields = [s, Reither(arg=None,ty_arg,true,ref None)]; + row_more = newvar (); row_bound = (); + row_closed = false; row_fixed = false; row_name = None} in + (* Should fail *) + unify_pat env {pat with pat_type = newty (Tvariant row')} + (correct_levels pat.pat_type) + | _ -> ()) + +(* Duplicate types of values in the environment *) +(* XXX Should we do something about global type variables too? *) + +let duplicate_ident_types caselist env = + let caselist = + List.filter (fun {pc_lhs} -> contains_gadt env pc_lhs) caselist in + Env.copy_types (all_idents_cases caselist) env + +(* Getting proper location of already typed expressions. + + Used to avoid confusing locations on type error messages in presence of + type constraints. + For example: + + (* Before patch *) + # let x : string = (5 : int);; + ^ + (* After patch *) + # let x : string = (5 : int);; + ^^^^^^^^^ +*) +let proper_exp_loc exp = + let rec aux = function + | [] -> exp.exp_loc + | ((Texp_constraint _ | Texp_coerce _), loc, _) :: _ -> loc + | _ :: rest -> aux rest + in + aux exp.exp_extra + +(* To find reasonable names for let-bound and lambda-bound idents *) + +let rec name_pattern default = function + [] -> Ident.create default + | {c_lhs=p; _} :: rem -> + match p.pat_desc with + Tpat_var (id, _) -> id + | Tpat_alias(_, id, _) -> id + | _ -> name_pattern default rem + +(* Typing of expressions *) + +let unify_exp env exp expected_ty = + let loc = proper_exp_loc exp in + unify_exp_types loc env exp.exp_type expected_ty + +let rec type_exp ?recarg env sexp = + (* We now delegate everything to type_expect *) + type_expect ?recarg env sexp (newvar ()) + +(* Typing of an expression with an expected type. + This provide better error messages, and allows controlled + propagation of return type information. + In the principal case, [type_expected'] may be at generic_level. + *) + +and type_expect ?in_function ?recarg env sexp ty_expected = + let previous_saved_types = Cmt_format.get_saved_types () in + let exp = + Builtin_attributes.warning_scope sexp.pexp_attributes + (fun () -> + type_expect_ ?in_function ?recarg env sexp ty_expected + ) + in + Cmt_format.set_saved_types + (Cmt_format.Partial_expression exp :: previous_saved_types); + exp + +and type_expect_ ?in_function ?(recarg=Rejected) env sexp ty_expected = + let loc = sexp.pexp_loc in + (* Record the expression type before unifying it with the expected type *) + let rue exp = + unify_exp env (re exp) (instance env ty_expected); + exp + in + match sexp.pexp_desc with + | Pexp_ident lid -> + begin + let (path, desc) = Typetexp.find_value env lid.loc lid.txt in + if !Clflags.annotations then begin + let dloc = desc.Types.val_loc in + let annot = + if dloc.Location.loc_ghost then Annot.Iref_external + else Annot.Iref_internal dloc + in + let name = Path.name ~paren:Oprint.parenthesized_ident path in + Stypes.record (Stypes.An_ident (loc, name, annot)) + end; + let is_recarg = + match (repr desc.val_type).desc with + | Tconstr(p, _, _) -> Path.is_constructor_typath p + | _ -> false + in + + begin match is_recarg, recarg, (repr desc.val_type).desc with + | _, Allowed, _ + | true, Required, _ + | false, Rejected, _ + -> () + | true, Rejected, _ + | false, Required, (Tvar _ | Tconstr _) -> + raise (Error (loc, env, Inlined_record_escape)) + | false, Required, _ -> + () (* will fail later *) + end; + rue { + exp_desc = + begin match desc.val_kind with + Val_ivar (_, cl_num) -> + let (self_path, _) = + Env.lookup_value (Longident.Lident ("self-" ^ cl_num)) env + in + Texp_instvar(self_path, path, + match lid.txt with + Longident.Lident txt -> { txt; loc = lid.loc } + | _ -> assert false) + | Val_self (_, _, cl_num, _) -> + let (path, _) = + Env.lookup_value (Longident.Lident ("self-" ^ cl_num)) env + in + Texp_ident(path, lid, desc) + | Val_unbound -> + raise(Error(loc, env, Masked_instance_variable lid.txt)) + (*| Val_prim _ -> + let p = Env.normalize_path (Some loc) env path in + Env.add_required_global (Path.head p); + Texp_ident(path, lid, desc)*) + | _ -> + Texp_ident(path, lid, desc) + end; + exp_loc = loc; exp_extra = []; + exp_type = instance env desc.val_type; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + end + | Pexp_constant(Pconst_string (str, _) as cst) -> ( + let cst = constant_or_raise env loc cst in + (* Terrible hack for format strings *) + let ty_exp = expand_head env ty_expected in + let fmt6_path = + Path.(Pdot (Pident (Ident.create_persistent "CamlinternalFormatBasics"), + "format6", 0)) in + let is_format = match ty_exp.desc with + | Tconstr(path, _, _) when Path.same path fmt6_path -> + if !Clflags.principal && ty_exp.level <> generic_level then + Location.prerr_warning loc + (Warnings.Not_principal "this coercion to format6"); + true + | _ -> false + in + if is_format then + let format_parsetree = + { (type_format loc str env) with pexp_loc = sexp.pexp_loc } in + type_expect ?in_function env format_parsetree ty_expected + else + rue { + exp_desc = Texp_constant cst; + exp_loc = loc; exp_extra = []; + exp_type = instance_def Predef.type_string; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + ) + | Pexp_constant cst -> + let cst = constant_or_raise env loc cst in + rue { + exp_desc = Texp_constant cst; + exp_loc = loc; exp_extra = []; + exp_type = type_constant cst; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_let(Nonrecursive, + [{pvb_pat=spat; pvb_expr=sval; pvb_attributes=[]}], sbody) + when contains_gadt env spat -> + (* TODO: allow non-empty attributes? *) + type_expect ?in_function env + {sexp with + pexp_desc = Pexp_match (sval, [Ast_helper.Exp.case spat sbody])} + ty_expected + | Pexp_let(rec_flag, spat_sexp_list, sbody) -> + let scp = + match sexp.pexp_attributes, rec_flag with + | [{txt="#default"},_], _ -> None + | _, Recursive -> Some (Annot.Idef loc) + | _, Nonrecursive -> Some (Annot.Idef sbody.pexp_loc) + in + let (pat_exp_list, new_env, unpacks) = + type_let env rec_flag spat_sexp_list scp true in + let body = + type_expect new_env (wrap_unpacks sbody unpacks) ty_expected in + let () = + if rec_flag = Recursive then + check_recursive_bindings env pat_exp_list + in + re { + exp_desc = Texp_let(rec_flag, pat_exp_list, body); + exp_loc = loc; exp_extra = []; + exp_type = body.exp_type; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_fun (l, Some default, spat, sbody) -> + assert(is_optional l); (* default allowed only with optional argument *) + let open Ast_helper in + let default_loc = default.pexp_loc in + let scases = [ + Exp.case + (Pat.construct ~loc:default_loc + (mknoloc (Longident.(Ldot (Lident "*predef*", "Some")))) + (Some (Pat.var ~loc:default_loc (mknoloc "*sth*")))) + (Exp.ident ~loc:default_loc (mknoloc (Longident.Lident "*sth*"))); + + Exp.case + (Pat.construct ~loc:default_loc + (mknoloc (Longident.(Ldot (Lident "*predef*", "None")))) + None) + default; + ] + in + let sloc = + { Location.loc_start = spat.ppat_loc.Location.loc_start; + loc_end = default_loc.Location.loc_end; + loc_ghost = true } + in + let smatch = + Exp.match_ ~loc:sloc + (Exp.ident ~loc (mknoloc (Longident.Lident "*opt*"))) + scases + in + let pat = Pat.var ~loc:sloc (mknoloc "*opt*") in + let body = + Exp.let_ ~loc Nonrecursive ~attrs:[mknoloc "#default",PStr []] + [Vb.mk spat smatch] sbody + in + type_function ?in_function loc sexp.pexp_attributes env ty_expected + l [Exp.case pat body] + | Pexp_fun (l, None, spat, sbody) -> + type_function ?in_function loc sexp.pexp_attributes env ty_expected + l [Ast_helper.Exp.case spat sbody] + | Pexp_function caselist -> + type_function ?in_function + loc sexp.pexp_attributes env ty_expected Nolabel caselist + | Pexp_apply(sfunct, sargs) -> + assert (sargs <> []); + begin_def (); (* one more level for non-returning functions *) + if !Clflags.principal then begin_def (); + let funct = type_exp env sfunct in + if !Clflags.principal then begin + end_def (); + generalize_structure funct.exp_type + end; + let rec lower_args seen ty_fun = + let ty = expand_head env ty_fun in + if List.memq ty seen then () else + match ty.desc with + Tarrow (_l, ty_arg, ty_fun, _com) -> + (try unify_var env (newvar()) ty_arg with Unify _ -> assert false); + lower_args (ty::seen) ty_fun + | _ -> () + in + let ty = instance env funct.exp_type in + end_def (); + wrap_trace_gadt_instances env (lower_args []) ty; + begin_def (); + let (args, ty_res) = type_application env funct sargs in + end_def (); + unify_var env (newvar()) funct.exp_type; + rue { + exp_desc = Texp_apply(funct, args); + exp_loc = loc; exp_extra = []; + exp_type = ty_res; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_match(sarg, caselist) -> + begin_def (); + let arg = type_exp env sarg in + end_def (); + if not (is_nonexpansive arg) then generalize_expansive env arg.exp_type; + generalize arg.exp_type; + let rec split_cases vc ec = function + | [] -> List.rev vc, List.rev ec + | {pc_lhs = {ppat_desc=Ppat_exception p}} as c :: rest -> + split_cases vc ({c with pc_lhs = p} :: ec) rest + | c :: rest -> + split_cases (c :: vc) ec rest + in + let val_caselist, exn_caselist = split_cases [] [] caselist in + if val_caselist = [] && exn_caselist <> [] then + raise (Error (loc, env, No_value_clauses)); + (* Note: val_caselist = [] and exn_caselist = [], i.e. a fully + empty pattern matching can be generated by Camlp4 with its + revised syntax. Let's accept it for backward compatibility. *) + let val_cases, partial = + type_cases env arg.exp_type ty_expected true loc val_caselist in + let exn_cases, _ = + type_cases env Predef.type_exn ty_expected false loc exn_caselist in + re { + exp_desc = Texp_match(arg, val_cases, exn_cases, partial); + exp_loc = loc; exp_extra = []; + exp_type = instance env ty_expected; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_try(sbody, caselist) -> + let body = type_expect env sbody ty_expected in + let cases, _ = + type_cases env Predef.type_exn ty_expected false loc caselist in + re { + exp_desc = Texp_try(body, cases); + exp_loc = loc; exp_extra = []; + exp_type = body.exp_type; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_tuple sexpl -> + assert (List.length sexpl >= 2); + let subtypes = List.map (fun _ -> newgenvar ()) sexpl in + let to_unify = newgenty (Ttuple subtypes) in + unify_exp_types loc env to_unify ty_expected; + let expl = + List.map2 (fun body ty -> type_expect env body ty) sexpl subtypes + in + re { + exp_desc = Texp_tuple expl; + exp_loc = loc; exp_extra = []; + (* Keep sharing *) + exp_type = newty (Ttuple (List.map (fun e -> e.exp_type) expl)); + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_construct(lid, sarg) -> + type_construct env loc lid sarg ty_expected sexp.pexp_attributes + | Pexp_variant(l, sarg) -> + (* Keep sharing *) + let ty_expected0 = instance env ty_expected in + begin try match + sarg, expand_head env ty_expected, expand_head env ty_expected0 with + | Some sarg, {desc = Tvariant row}, {desc = Tvariant row0} -> + let row = row_repr row in + begin match row_field_repr (List.assoc l row.row_fields), + row_field_repr (List.assoc l row0.row_fields) with + Rpresent (Some ty), Rpresent (Some ty0) -> + let arg = type_argument env sarg ty ty0 in + re { exp_desc = Texp_variant(l, Some arg); + exp_loc = loc; exp_extra = []; + exp_type = ty_expected0; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | _ -> raise Not_found + end + | _ -> raise Not_found + with Not_found -> + let arg = may_map (type_exp env) sarg in + let arg_type = may_map (fun arg -> arg.exp_type) arg in + rue { + exp_desc = Texp_variant(l, arg); + exp_loc = loc; exp_extra = []; + exp_type= newty (Tvariant{row_fields = [l, Rpresent arg_type]; + row_more = newvar (); + row_bound = (); + row_closed = false; + row_fixed = false; + row_name = None}); + exp_attributes = sexp.pexp_attributes; + exp_env = env } + end + | Pexp_record(lid_sexp_list, opt_sexp) -> + assert (lid_sexp_list <> []); + let opt_exp = + match opt_sexp with + None -> None + | Some sexp -> + if !Clflags.principal then begin_def (); + let exp = type_exp ~recarg env sexp in + if !Clflags.principal then begin + end_def (); + generalize_structure exp.exp_type + end; + Some exp + in + let ty_record, opath = + let get_path ty = + try + let (p0, p,_) = extract_concrete_record env ty in + (* XXX level may be wrong *) + Some (p0, p, ty.level = generic_level || not !Clflags.principal) + with Not_found -> None + in + match get_path ty_expected with + None -> + begin match opt_exp with + None -> newvar (), None + | Some exp -> + match get_path exp.exp_type with + None -> newvar (), None + | Some (_, p', _) as op -> + let decl = Env.find_type p' env in + begin_def (); + let ty = + newconstr p' (instance_list env decl.type_params) in + end_def (); + generalize_structure ty; + ty, op + end + | op -> ty_expected, op + in + let closed = (opt_sexp = None) in + let lbl_exp_list = + wrap_disambiguate "This record expression is expected to have" ty_record + (type_label_a_list loc closed env + (fun e k -> k (type_label_exp true env loc ty_record e)) + opath lid_sexp_list) + (fun x -> x) + in + unify_exp_types loc env ty_record (instance env ty_expected); + + (* type_label_a_list returns a list of labels sorted by lbl_pos *) + (* note: check_duplicates would better be implemented in + type_label_a_list directly *) + let rec check_duplicates = function + | (_, lbl1, _) :: (_, lbl2, _) :: _ when lbl1.lbl_pos = lbl2.lbl_pos -> + raise(Error(loc, env, Label_multiply_defined lbl1.lbl_name)) + | _ :: rem -> + check_duplicates rem + | [] -> () + in + check_duplicates lbl_exp_list; + let opt_exp, label_definitions = + let (_lid, lbl, _lbl_exp) = List.hd lbl_exp_list in + let matching_label lbl = + List.find + (fun (_, lbl',_) -> lbl'.lbl_pos = lbl.lbl_pos) + lbl_exp_list + in + match opt_exp with + None -> + let label_definitions = + Array.map (fun lbl -> + match matching_label lbl with + | (lid, _lbl, lbl_exp) -> + Overridden (lid, lbl_exp) + | exception Not_found -> + let present_indices = + List.map (fun (_, lbl, _) -> lbl.lbl_pos) lbl_exp_list + in + let label_names = extract_label_names env ty_expected in + let rec missing_labels n = function + [] -> [] + | lbl :: rem -> + if List.mem n present_indices + then missing_labels (n + 1) rem + else lbl :: missing_labels (n + 1) rem + in + let missing = missing_labels 0 label_names in + raise(Error(loc, env, Label_missing missing))) + lbl.lbl_all + in + None, label_definitions + | Some exp -> + let ty_exp = instance env exp.exp_type in + let unify_kept lbl = + let _, ty_arg1, ty_res1 = instance_label false lbl in + unify_exp_types exp.exp_loc env ty_exp ty_res1; + match matching_label lbl with + | lid, _lbl, lbl_exp -> + (* do not connect result types for overridden labels *) + Overridden (lid, lbl_exp) + | exception Not_found -> begin + let _, ty_arg2, ty_res2 = instance_label false lbl in + unify env ty_arg1 ty_arg2; + unify env (instance env ty_expected) ty_res2; + Kept ty_arg1 + end + in + let label_definitions = Array.map unify_kept lbl.lbl_all in + Some {exp with exp_type = ty_exp}, label_definitions + in + let num_fields = + match lbl_exp_list with [] -> assert false + | (_, lbl,_)::_ -> Array.length lbl.lbl_all in + let opt_exp = + if opt_sexp <> None && List.length lid_sexp_list = num_fields then + (Location.prerr_warning loc Warnings.Useless_record_with; None) + else opt_exp + in + let label_descriptions, representation = + let (_, { lbl_all; lbl_repres }, _) = List.hd lbl_exp_list in + lbl_all, lbl_repres + in + let fields = + Array.map2 (fun descr def -> descr, def) + label_descriptions label_definitions + in + re { + exp_desc = Texp_record { + fields; representation; + extended_expression = opt_exp + }; + exp_loc = loc; exp_extra = []; + exp_type = instance env ty_expected; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_field(srecord, lid) -> + let (record, label, _) = type_label_access env srecord lid in + let (_, ty_arg, ty_res) = instance_label false label in + unify_exp env record ty_res; + rue { + exp_desc = Texp_field(record, lid, label); + exp_loc = loc; exp_extra = []; + exp_type = ty_arg; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_setfield(srecord, lid, snewval) -> + let (record, label, opath) = type_label_access env srecord lid in + let ty_record = if opath = None then newvar () else record.exp_type in + let (label_loc, label, newval) = + type_label_exp false env loc ty_record (lid, label, snewval) in + unify_exp env record ty_record; + if label.lbl_mut = Immutable then + raise(Error(loc, env, Label_not_mutable lid.txt)); + Builtin_attributes.check_deprecated_mutable lid.loc label.lbl_attributes + (Longident.last lid.txt); + rue { + exp_desc = Texp_setfield(record, label_loc, label, newval); + exp_loc = loc; exp_extra = []; + exp_type = instance_def Predef.type_unit; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_array(sargl) -> + let ty = newgenvar() in + let to_unify = Predef.type_array ty in + unify_exp_types loc env to_unify ty_expected; + let argl = List.map (fun sarg -> type_expect env sarg ty) sargl in + re { + exp_desc = Texp_array argl; + exp_loc = loc; exp_extra = []; + exp_type = instance env ty_expected; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_ifthenelse(scond, sifso, sifnot) -> + let cond = type_expect env scond Predef.type_bool in + begin match sifnot with + None -> + let ifso = type_expect env sifso Predef.type_unit in + rue { + exp_desc = Texp_ifthenelse(cond, ifso, None); + exp_loc = loc; exp_extra = []; + exp_type = ifso.exp_type; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Some sifnot -> + let ifso = type_expect env sifso ty_expected in + let ifnot = type_expect env sifnot ty_expected in + (* Keep sharing *) + unify_exp env ifnot ifso.exp_type; + re { + exp_desc = Texp_ifthenelse(cond, ifso, Some ifnot); + exp_loc = loc; exp_extra = []; + exp_type = ifso.exp_type; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + end + | Pexp_sequence(sexp1, sexp2) -> + let exp1 = type_statement env sexp1 in + let exp2 = type_expect env sexp2 ty_expected in + re { + exp_desc = Texp_sequence(exp1, exp2); + exp_loc = loc; exp_extra = []; + exp_type = exp2.exp_type; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_while(scond, sbody) -> + let cond = type_expect env scond Predef.type_bool in + let body = type_statement env sbody in + rue { + exp_desc = Texp_while(cond, body); + exp_loc = loc; exp_extra = []; + exp_type = instance_def Predef.type_unit; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_for(param, slow, shigh, dir, sbody) -> + let low = type_expect env slow Predef.type_int in + let high = type_expect env shigh Predef.type_int in + let id, new_env = + match param.ppat_desc with + | Ppat_any -> Ident.create "_for", env + | Ppat_var {txt} -> + Env.enter_value txt {val_type = instance_def Predef.type_int; + val_attributes = []; + val_kind = Val_reg; Types.val_loc = loc; } env + ~check:(fun s -> Warnings.Unused_for_index s) + | _ -> + raise (Error (param.ppat_loc, env, Invalid_for_loop_index)) + in + let body = type_statement new_env sbody in + rue { + exp_desc = Texp_for(id, param, low, high, dir, body); + exp_loc = loc; exp_extra = []; + exp_type = instance_def Predef.type_unit; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_constraint (sarg, sty) -> + let separate = true in (* always separate, 1% slowdown for lablgtk *) + if separate then begin_def (); + let cty = Typetexp.transl_simple_type env false sty in + let ty = cty.ctyp_type in + let (arg, ty') = + if separate then begin + end_def (); + generalize_structure ty; + (type_argument env sarg ty (instance env ty), instance env ty) + end else + (type_argument env sarg ty ty, ty) + in + rue { + exp_desc = arg.exp_desc; + exp_loc = arg.exp_loc; + exp_type = ty'; + exp_attributes = arg.exp_attributes; + exp_env = env; + exp_extra = + (Texp_constraint cty, loc, sexp.pexp_attributes) :: arg.exp_extra; + } + | Pexp_coerce(sarg, sty, sty') -> + let separate = true in (* always separate, 1% slowdown for lablgtk *) + (* Also see PR#7199 for a problem with the following: + let separate = !Clflags.principal || Env.has_local_constraints env in*) + let (arg, ty',cty,cty') = + match sty with + | None -> + let (cty', force) = + Typetexp.transl_simple_type_delayed env sty' + in + let ty' = cty'.ctyp_type in + if separate then begin_def (); + let arg = type_exp env sarg in + let gen = + if separate then begin + end_def (); + let tv = newvar () in + let gen = generalizable tv.level arg.exp_type in + (try unify_var env tv arg.exp_type with Unify trace -> + raise(Error(arg.exp_loc, env, Expr_type_clash trace))); + gen + end else true + in + begin match arg.exp_desc, !self_coercion, (repr ty').desc with + Texp_ident(_, _, {val_kind=Val_self _}), (path,r) :: _, + Tconstr(path',_,_) when Path.same path path' -> + (* prerr_endline "self coercion"; *) + r := loc :: !r; + force () + | _ when free_variables ~env arg.exp_type = [] + && free_variables ~env ty' = [] -> + if not gen && (* first try a single coercion *) + let snap = snapshot () in + let ty, _b = enlarge_type env ty' in + try + force (); Ctype.unify env arg.exp_type ty; true + with Unify _ -> + backtrack snap; false + then () + else begin try + let force' = subtype env arg.exp_type ty' in + force (); force' (); + if not gen && !Clflags.principal then + Location.prerr_warning loc + (Warnings.Not_principal "this ground coercion"); + with Subtype (tr1, tr2) -> + (* prerr_endline "coercion failed"; *) + raise(Error(loc, env, Not_subtype(tr1, tr2))) + end; + | _ -> + let ty, b = enlarge_type env ty' in + force (); + begin try Ctype.unify env arg.exp_type ty with Unify trace -> + raise(Error(sarg.pexp_loc, env, + Coercion_failure(ty', full_expand env ty', trace, b))) + end + end; + (arg, ty', None, cty') + | Some sty -> + if separate then begin_def (); + let (cty, force) = + Typetexp.transl_simple_type_delayed env sty + and (cty', force') = + Typetexp.transl_simple_type_delayed env sty' + in + let ty = cty.ctyp_type in + let ty' = cty'.ctyp_type in + begin try + let force'' = subtype env ty ty' in + force (); force' (); force'' () + with Subtype (tr1, tr2) -> + raise(Error(loc, env, Not_subtype(tr1, tr2))) + end; + if separate then begin + end_def (); + generalize_structure ty; + generalize_structure ty'; + (type_argument env sarg ty (instance env ty), + instance env ty', Some cty, cty') + end else + (type_argument env sarg ty ty, ty', Some cty, cty') + in + rue { + exp_desc = arg.exp_desc; + exp_loc = arg.exp_loc; + exp_type = ty'; + exp_attributes = arg.exp_attributes; + exp_env = env; + exp_extra = (Texp_coerce (cty, cty'), loc, sexp.pexp_attributes) :: + arg.exp_extra; + } + | Pexp_send (e, {txt=met}) -> + if !Clflags.principal then begin_def (); + let obj = type_exp env e in + let obj_meths = ref None in + begin try + let (meth, exp, typ) = + match obj.exp_desc with + Texp_ident(_path, _, {val_kind = Val_self (meths, _, _, privty)}) -> + obj_meths := Some meths; + let (id, typ) = + filter_self_method env met Private meths privty + in + if is_Tvar (repr typ) then + Location.prerr_warning loc + (Warnings.Undeclared_virtual_method met); + (Tmeth_val id, None, typ) + | Texp_ident(_path, lid, {val_kind = Val_anc (methods, cl_num)}) -> + let method_id = + begin try List.assoc met methods with Not_found -> + let valid_methods = List.map fst methods in + raise(Error(e.pexp_loc, env, + Undefined_inherited_method (met, valid_methods))) + end + in + begin match + Env.lookup_value (Longident.Lident ("selfpat-" ^ cl_num)) env, + Env.lookup_value (Longident.Lident ("self-" ^cl_num)) env + with + (_, ({val_kind = Val_self (meths, _, _, privty)} as desc)), + (path, _) -> + obj_meths := Some meths; + let (_, typ) = + filter_self_method env met Private meths privty + in + let method_type = newvar () in + let (obj_ty, res_ty) = filter_arrow env method_type Nolabel in + unify env obj_ty desc.val_type; + unify env res_ty (instance env typ); + let exp = + Texp_apply({exp_desc = + Texp_ident(Path.Pident method_id, lid, + {val_type = method_type; + val_kind = Val_reg; + val_attributes = []; + Types.val_loc = Location.none}); + exp_loc = loc; exp_extra = []; + exp_type = method_type; + exp_attributes = []; (* check *) + exp_env = env}, + [ Nolabel, + Some {exp_desc = Texp_ident(path, lid, desc); + exp_loc = obj.exp_loc; exp_extra = []; + exp_type = desc.val_type; + exp_attributes = []; (* check *) + exp_env = env} + ]) + in + (Tmeth_name met, Some (re {exp_desc = exp; + exp_loc = loc; exp_extra = []; + exp_type = typ; + exp_attributes = []; (* check *) + exp_env = env}), typ) + | _ -> + assert false + end + | _ -> + (Tmeth_name met, None, + filter_method env met Public obj.exp_type) + in + if !Clflags.principal then begin + end_def (); + generalize_structure typ; + end; + let typ = + match repr typ with + {desc = Tpoly (ty, [])} -> + instance env ty + | {desc = Tpoly (ty, tl); level = l} -> + if !Clflags.principal && l <> generic_level then + Location.prerr_warning loc + (Warnings.Not_principal "this use of a polymorphic method"); + snd (instance_poly false tl ty) + | {desc = Tvar _} as ty -> + let ty' = newvar () in + unify env (instance_def ty) (newty(Tpoly(ty',[]))); + (* if not !Clflags.nolabels then + Location.prerr_warning loc (Warnings.Unknown_method met); *) + ty' + | _ -> + assert false + in + rue { + exp_desc = Texp_send(obj, meth, exp); + exp_loc = loc; exp_extra = []; + exp_type = typ; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + with Unify _ -> + let valid_methods = + match !obj_meths with + | Some meths -> + Some (Meths.fold (fun meth _meth_ty li -> meth::li) !meths []) + | None -> + match (expand_head env obj.exp_type).desc with + | Tobject (fields, _) -> + let (fields, _) = Ctype.flatten_fields fields in + let collect_fields li (meth, meth_kind, _meth_ty) = + if meth_kind = Fpresent then meth::li else li in + Some (List.fold_left collect_fields [] fields) + | _ -> None + in + raise(Error(e.pexp_loc, env, + Undefined_method (obj.exp_type, met, valid_methods))) + end + | Pexp_new cl -> + let (cl_path, cl_decl) = Typetexp.find_class env cl.loc cl.txt in + begin match cl_decl.cty_new with + None -> + raise(Error(loc, env, Virtual_class cl.txt)) + | Some ty -> + rue { + exp_desc = Texp_new (cl_path, cl, cl_decl); + exp_loc = loc; exp_extra = []; + exp_type = instance_def ty; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + end + | Pexp_setinstvar (lab, snewval) -> + begin try + let (path, desc) = Env.lookup_value (Longident.Lident lab.txt) env in + match desc.val_kind with + Val_ivar (Mutable, cl_num) -> + let newval = + type_expect env snewval (instance env desc.val_type) in + let (path_self, _) = + Env.lookup_value (Longident.Lident ("self-" ^ cl_num)) env + in + rue { + exp_desc = Texp_setinstvar(path_self, path, lab, newval); + exp_loc = loc; exp_extra = []; + exp_type = instance_def Predef.type_unit; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Val_ivar _ -> + raise(Error(loc, env, Instance_variable_not_mutable(true,lab.txt))) + | _ -> + raise(Error(loc, env, Instance_variable_not_mutable(false,lab.txt))) + with + Not_found -> + let collect_vars name _path val_desc li = + match val_desc.val_kind with + | Val_ivar (Mutable, _) -> name::li + | _ -> li in + let valid_vars = Env.fold_values collect_vars None env [] in + raise(Error(loc, env, + Unbound_instance_variable (lab.txt, valid_vars))) + end + | Pexp_override lst -> + let _ = + List.fold_right + (fun (lab, _) l -> + if List.exists (fun l -> l.txt = lab.txt) l then + raise(Error(loc, env, + Value_multiply_overridden lab.txt)); + lab::l) + lst + [] in + begin match + try + Env.lookup_value (Longident.Lident "selfpat-*") env, + Env.lookup_value (Longident.Lident "self-*") env + with Not_found -> + raise(Error(loc, env, Outside_class)) + with + (_, {val_type = self_ty; val_kind = Val_self (_, vars, _, _)}), + (path_self, _) -> + let type_override (lab, snewval) = + begin try + let (id, _, _, ty) = Vars.find lab.txt !vars in + (Path.Pident id, lab, type_expect env snewval (instance env ty)) + with + Not_found -> + let vars = Vars.fold (fun var _ li -> var::li) !vars [] in + raise(Error(loc, env, + Unbound_instance_variable (lab.txt, vars))) + end + in + let modifs = List.map type_override lst in + rue { + exp_desc = Texp_override(path_self, modifs); + exp_loc = loc; exp_extra = []; + exp_type = self_ty; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | _ -> + assert false + end + | Pexp_letmodule(name, smodl, sbody) -> + let ty = newvar() in + (* remember original level *) + begin_def (); + Ident.set_current_time ty.level; + let context = Typetexp.narrow () in + let modl = !type_module env smodl in + let (id, new_env) = Env.enter_module name.txt modl.mod_type env in + Ctype.init_def(Ident.current_time()); + Typetexp.widen context; + let body = type_expect new_env sbody ty_expected in + (* go back to original level *) + end_def (); + (* Unification of body.exp_type with the fresh variable ty + fails if and only if the prefix condition is violated, + i.e. if generative types rooted at id show up in the + type body.exp_type. Thus, this unification enforces the + scoping condition on "let module". *) + (* Note that this code will only be reached if ty_expected + is a generic type variable, otherwise the error will occur + above in type_expect *) + begin try + Ctype.unify_var new_env ty body.exp_type + with Unify _ -> + raise(Error(loc, env, Scoping_let_module(name.txt, body.exp_type))) + end; + re { + exp_desc = Texp_letmodule(id, name, modl, body); + exp_loc = loc; exp_extra = []; + exp_type = ty; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_letexception(cd, sbody) -> + let (cd, newenv) = Typedecl.transl_exception env cd in + let body = type_expect newenv sbody ty_expected in + re { + exp_desc = Texp_letexception(cd, body); + exp_loc = loc; exp_extra = []; + exp_type = body.exp_type; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + + | Pexp_assert (e) -> + let cond = type_expect env e Predef.type_bool in + let exp_type = + match cond.exp_desc with + | Texp_construct(_, {cstr_name="false"}, _) -> + instance env ty_expected + | _ -> + instance_def Predef.type_unit + in + rue { + exp_desc = Texp_assert cond; + exp_loc = loc; exp_extra = []; + exp_type; + exp_attributes = sexp.pexp_attributes; + exp_env = env; + } + | Pexp_lazy e -> + let ty = newgenvar () in + let to_unify = Predef.type_lazy_t ty in + unify_exp_types loc env to_unify ty_expected; + let arg = type_expect env e ty in + re { + exp_desc = Texp_lazy arg; + exp_loc = loc; exp_extra = []; + exp_type = instance env ty_expected; + exp_attributes = sexp.pexp_attributes; + exp_env = env; + } + | Pexp_object s -> + let desc, sign, meths = !type_object env loc s in + rue { + exp_desc = Texp_object (desc, (*sign,*) meths); + exp_loc = loc; exp_extra = []; + exp_type = sign.csig_self; + exp_attributes = sexp.pexp_attributes; + exp_env = env; + } + | Pexp_poly(sbody, sty) -> + if !Clflags.principal then begin_def (); + let ty, cty = + match sty with None -> repr ty_expected, None + | Some sty -> + let sty = Ast_helper.Typ.force_poly sty in + let cty = Typetexp.transl_simple_type env false sty in + repr cty.ctyp_type, Some cty + in + if !Clflags.principal then begin + end_def (); + generalize_structure ty + end; + if sty <> None then + unify_exp_types loc env (instance env ty) (instance env ty_expected); + let exp = + match (expand_head env ty).desc with + Tpoly (ty', []) -> + let exp = type_expect env sbody ty' in + { exp with exp_type = instance env ty } + | Tpoly (ty', tl) -> + (* One more level to generalize locally *) + begin_def (); + if !Clflags.principal then begin_def (); + let vars, ty'' = instance_poly true tl ty' in + if !Clflags.principal then begin + end_def (); + generalize_structure ty'' + end; + let exp = type_expect env sbody ty'' in + end_def (); + check_univars env false "method" exp ty_expected vars; + { exp with exp_type = instance env ty } + | Tvar _ -> + let exp = type_exp env sbody in + let exp = {exp with exp_type = newty (Tpoly (exp.exp_type, []))} in + unify_exp env exp ty; + exp + | _ -> assert false + in + re { exp with exp_extra = + (Texp_poly cty, loc, sexp.pexp_attributes) :: exp.exp_extra } + | Pexp_newtype({txt=name}, sbody) -> + let ty = newvar () in + (* remember original level *) + begin_def (); + (* Create a fake abstract type declaration for name. *) + let level = get_current_level () in + let decl = { + type_params = []; + type_arity = 0; + type_kind = Type_abstract; + type_private = Public; + type_manifest = None; + type_variance = []; + type_newtype_level = Some (level, level); + type_loc = loc; + type_attributes = []; + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } + in + Ident.set_current_time ty.level; + let (id, new_env) = Env.enter_type name decl env in + Ctype.init_def(Ident.current_time()); + + let body = type_exp new_env sbody in + (* Replace every instance of this type constructor in the resulting + type. *) + let seen = Hashtbl.create 8 in + let rec replace t = + if Hashtbl.mem seen t.id then () + else begin + Hashtbl.add seen t.id (); + match t.desc with + | Tconstr (Path.Pident id', _, _) when id == id' -> link_type t ty + | _ -> Btype.iter_type_expr replace t + end + in + let ety = Subst.type_expr Subst.identity body.exp_type in + replace ety; + (* back to original level *) + end_def (); + (* lower the levels of the result type *) + (* unify_var env ty ety; *) + + (* non-expansive if the body is non-expansive, so we don't introduce + any new extra node in the typed AST. *) + rue { body with exp_loc = loc; exp_type = ety; + exp_extra = + (Texp_newtype name, loc, sexp.pexp_attributes) :: body.exp_extra } + | Pexp_pack m -> + let (p, nl) = + match Ctype.expand_head env (instance env ty_expected) with + {desc = Tpackage (p, nl, _tl)} -> + if !Clflags.principal && + (Ctype.expand_head env ty_expected).level < Btype.generic_level + then + Location.prerr_warning loc + (Warnings.Not_principal "this module packing"); + (p, nl) + | {desc = Tvar _} -> + raise (Error (loc, env, Cannot_infer_signature)) + | _ -> + raise (Error (loc, env, Not_a_packed_module ty_expected)) + in + let (modl, tl') = !type_package env m p nl in + rue { + exp_desc = Texp_pack modl; + exp_loc = loc; exp_extra = []; + exp_type = newty (Tpackage (p, nl, tl')); + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | Pexp_open (ovf, lid, e) -> + let (path, newenv) = !type_open ovf env sexp.pexp_loc lid in + let exp = type_expect newenv e ty_expected in + { exp with + exp_extra = (Texp_open (ovf, path, lid, newenv), loc, + sexp.pexp_attributes) :: + exp.exp_extra; + } + + | Pexp_extension ({ txt = ("ocaml.extension_constructor" + |"extension_constructor"); _ }, + payload) -> + begin match payload with + | PStr [ { pstr_desc = + Pstr_eval ({ pexp_desc = Pexp_construct (lid, None); _ }, _) + } ] -> + let path = + match (Typetexp.find_constructor env lid.loc lid.txt).cstr_tag with + | Cstr_extension (path, _) -> path + | _ -> raise (Error (lid.loc, env, Not_an_extension_constructor)) + in + rue { + exp_desc = Texp_extension_constructor (lid, path); + exp_loc = loc; exp_extra = []; + exp_type = instance_def Predef.type_extension_constructor; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + | _ -> + raise (Error (loc, env, Invalid_extension_constructor_payload)) + end + | Pexp_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + + | Pexp_unreachable -> + re { exp_desc = Texp_unreachable; + exp_loc = loc; exp_extra = []; + exp_type = instance env ty_expected; + exp_attributes = sexp.pexp_attributes; + exp_env = env } + +and type_function ?in_function loc attrs env ty_expected l caselist = + let (loc_fun, ty_fun) = + match in_function with Some p -> p + | None -> (loc, instance env ty_expected) + in + let separate = !Clflags.principal || Env.has_local_constraints env in + if separate then begin_def (); + let (ty_arg, ty_res) = + try filter_arrow env (instance env ty_expected) l + with Unify _ -> + match expand_head env ty_expected with + {desc = Tarrow _} as ty -> + raise(Error(loc, env, Abstract_wrong_label(l, ty))) + | _ -> + raise(Error(loc_fun, env, + Too_many_arguments (in_function <> None, ty_fun))) + in + let ty_arg = + if is_optional l then + let tv = newvar() in + begin + try unify env ty_arg (type_option tv) + with Unify _ -> assert false + end; + type_option tv + else ty_arg + in + if separate then begin + end_def (); + generalize_structure ty_arg; + generalize_structure ty_res + end; + let cases, partial = + type_cases ~in_function:(loc_fun,ty_fun) env ty_arg ty_res + true loc caselist in + let not_function ty = + let ls, tvar = list_labels env ty in + ls = [] && not tvar + in + if is_optional l && not_function ty_res then + Location.prerr_warning (List.hd cases).c_lhs.pat_loc + Warnings.Unerasable_optional_argument; + let param = name_pattern "param" cases in + re { + exp_desc = Texp_function { arg_label = l; param; cases; partial; }; + exp_loc = loc; exp_extra = []; + exp_type = instance env (newgenty (Tarrow(l, ty_arg, ty_res, Cok))); + exp_attributes = attrs; + exp_env = env } + + +and type_label_access env srecord lid = + if !Clflags.principal then begin_def (); + let record = type_exp ~recarg:Allowed env srecord in + if !Clflags.principal then begin + end_def (); + generalize_structure record.exp_type + end; + let ty_exp = record.exp_type in + let opath = + try + let (p0, p,_) = extract_concrete_record env ty_exp in + Some(p0, p, ty_exp.level = generic_level || not !Clflags.principal) + with Not_found -> None + in + let labels = Typetexp.find_all_labels env lid.loc lid.txt in + let label = + wrap_disambiguate "This expression has" ty_exp + (Label.disambiguate lid env opath) labels in + (record, label, opath) + +(* Typing format strings for printing or reading. + These formats are used by functions in modules Printf, Format, and Scanf. + (Handling of * modifiers contributed by Thorsten Ohl.) *) + +and type_format loc str env = + let loc = {loc with Location.loc_ghost = true} in + try + CamlinternalFormatBasics.(CamlinternalFormat.( + let mk_exp_loc pexp_desc = { + pexp_desc = pexp_desc; + pexp_loc = loc; + pexp_attributes = []; + } and mk_lid_loc lid = { + txt = lid; + loc = loc; + } in + let mk_constr name args = + let lid = Longident.(Ldot(Lident "CamlinternalFormatBasics", name)) in + let arg = match args with + | [] -> None + | [ e ] -> Some e + | _ :: _ :: _ -> Some (mk_exp_loc (Pexp_tuple args)) in + mk_exp_loc (Pexp_construct (mk_lid_loc lid, arg)) in + let mk_cst cst = mk_exp_loc (Pexp_constant cst) in + let mk_int n = mk_cst (Pconst_integer (string_of_int n, None)) + and mk_string str = mk_cst (Pconst_string (str, None)) + and mk_char chr = mk_cst (Pconst_char chr) in + let rec mk_formatting_lit fmting = match fmting with + | Close_box -> + mk_constr "Close_box" [] + | Close_tag -> + mk_constr "Close_tag" [] + | Break (org, ns, ni) -> + mk_constr "Break" [ mk_string org; mk_int ns; mk_int ni ] + | FFlush -> + mk_constr "FFlush" [] + | Force_newline -> + mk_constr "Force_newline" [] + | Flush_newline -> + mk_constr "Flush_newline" [] + | Magic_size (org, sz) -> + mk_constr "Magic_size" [ mk_string org; mk_int sz ] + | Escaped_at -> + mk_constr "Escaped_at" [] + | Escaped_percent -> + mk_constr "Escaped_percent" [] + | Scan_indic c -> + mk_constr "Scan_indic" [ mk_char (Char.code c) ] + and mk_formatting_gen : type a b c d e f . + (a, b, c, d, e, f) formatting_gen -> Parsetree.expression = + fun fmting -> match fmting with + | Open_tag (Format (fmt', str')) -> + mk_constr "Open_tag" [ mk_format fmt' str' ] + | Open_box (Format (fmt', str')) -> + mk_constr "Open_box" [ mk_format fmt' str' ] + and mk_format : type a b c d e f . + (a, b, c, d, e, f) CamlinternalFormatBasics.fmt -> string -> + Parsetree.expression = fun fmt str -> + mk_constr "Format" [ mk_fmt fmt; mk_string str ] + and mk_side side = match side with + | Left -> mk_constr "Left" [] + | Right -> mk_constr "Right" [] + | Zeros -> mk_constr "Zeros" [] + and mk_iconv iconv = match iconv with + | Int_d -> mk_constr "Int_d" [] | Int_pd -> mk_constr "Int_pd" [] + | Int_sd -> mk_constr "Int_sd" [] | Int_i -> mk_constr "Int_i" [] + | Int_pi -> mk_constr "Int_pi" [] | Int_si -> mk_constr "Int_si" [] + | Int_x -> mk_constr "Int_x" [] | Int_Cx -> mk_constr "Int_Cx" [] + | Int_X -> mk_constr "Int_X" [] | Int_CX -> mk_constr "Int_CX" [] + | Int_o -> mk_constr "Int_o" [] | Int_Co -> mk_constr "Int_Co" [] + | Int_u -> mk_constr "Int_u" [] + and mk_fconv fconv = match fconv with + | Float_f -> mk_constr "Float_f" [] + | Float_e -> mk_constr "Float_e" [] + | Float_E -> mk_constr "Float_E" [] + | Float_g -> mk_constr "Float_g" [] + | Float_G -> mk_constr "Float_G" [] + | Float_h -> mk_constr "Float_h" [] + | Float_H -> mk_constr "Float_H" [] + | Float_F -> mk_constr "Float_F" [] + and mk_counter cnt = match cnt with + | Line_counter -> mk_constr "Line_counter" [] + | Char_counter -> mk_constr "Char_counter" [] + | Token_counter -> mk_constr "Token_counter" [] + and mk_int_opt n_opt = match n_opt with + | None -> + let lid_loc = mk_lid_loc (Longident.Lident "None") in + mk_exp_loc (Pexp_construct (lid_loc, None)) + | Some n -> + let lid_loc = mk_lid_loc (Longident.Lident "Some") in + mk_exp_loc (Pexp_construct (lid_loc, Some (mk_int n))) + and mk_fmtty : type a b c d e f g h i j k l . + (a, b, c, d, e, f, g, h, i, j, k, l) fmtty_rel -> Parsetree.expression + = + fun fmtty -> match fmtty with + | Char_ty rest -> mk_constr "Char_ty" [ mk_fmtty rest ] + | String_ty rest -> mk_constr "String_ty" [ mk_fmtty rest ] + | Int_ty rest -> mk_constr "Int_ty" [ mk_fmtty rest ] + | Int32_ty rest -> mk_constr "Int32_ty" [ mk_fmtty rest ] + | Nativeint_ty rest -> mk_constr "Nativeint_ty" [ mk_fmtty rest ] + | Int64_ty rest -> mk_constr "Int64_ty" [ mk_fmtty rest ] + | Float_ty rest -> mk_constr "Float_ty" [ mk_fmtty rest ] + | Bool_ty rest -> mk_constr "Bool_ty" [ mk_fmtty rest ] + | Alpha_ty rest -> mk_constr "Alpha_ty" [ mk_fmtty rest ] + | Theta_ty rest -> mk_constr "Theta_ty" [ mk_fmtty rest ] + | Any_ty rest -> mk_constr "Any_ty" [ mk_fmtty rest ] + | Reader_ty rest -> mk_constr "Reader_ty" [ mk_fmtty rest ] + | Ignored_reader_ty rest -> + mk_constr "Ignored_reader_ty" [ mk_fmtty rest ] + | Format_arg_ty (sub_fmtty, rest) -> + mk_constr "Format_arg_ty" [ mk_fmtty sub_fmtty; mk_fmtty rest ] + | Format_subst_ty (sub_fmtty1, sub_fmtty2, rest) -> + mk_constr "Format_subst_ty" + [ mk_fmtty sub_fmtty1; mk_fmtty sub_fmtty2; mk_fmtty rest ] + | End_of_fmtty -> mk_constr "End_of_fmtty" [] + and mk_ignored : type a b c d e f . + (a, b, c, d, e, f) ignored -> Parsetree.expression = + fun ign -> match ign with + | Ignored_char -> + mk_constr "Ignored_char" [] + | Ignored_caml_char -> + mk_constr "Ignored_caml_char" [] + | Ignored_string pad_opt -> + mk_constr "Ignored_string" [ mk_int_opt pad_opt ] + | Ignored_caml_string pad_opt -> + mk_constr "Ignored_caml_string" [ mk_int_opt pad_opt ] + | Ignored_int (iconv, pad_opt) -> + mk_constr "Ignored_int" [ mk_iconv iconv; mk_int_opt pad_opt ] + | Ignored_int32 (iconv, pad_opt) -> + mk_constr "Ignored_int32" [ mk_iconv iconv; mk_int_opt pad_opt ] + | Ignored_nativeint (iconv, pad_opt) -> + mk_constr "Ignored_nativeint" [ mk_iconv iconv; mk_int_opt pad_opt ] + | Ignored_int64 (iconv, pad_opt) -> + mk_constr "Ignored_int64" [ mk_iconv iconv; mk_int_opt pad_opt ] + | Ignored_float (pad_opt, prec_opt) -> + mk_constr "Ignored_float" [ mk_int_opt pad_opt; mk_int_opt prec_opt ] + | Ignored_bool pad_opt -> + mk_constr "Ignored_bool" [ mk_int_opt pad_opt ] + | Ignored_format_arg (pad_opt, fmtty) -> + mk_constr "Ignored_format_arg" [ mk_int_opt pad_opt; mk_fmtty fmtty ] + | Ignored_format_subst (pad_opt, fmtty) -> + mk_constr "Ignored_format_subst" [ + mk_int_opt pad_opt; mk_fmtty fmtty ] + | Ignored_reader -> + mk_constr "Ignored_reader" [] + | Ignored_scan_char_set (width_opt, char_set) -> + mk_constr "Ignored_scan_char_set" [ + mk_int_opt width_opt; mk_string char_set ] + | Ignored_scan_get_counter counter -> + mk_constr "Ignored_scan_get_counter" [ + mk_counter counter + ] + | Ignored_scan_next_char -> + mk_constr "Ignored_scan_next_char" [] + and mk_padding : type x y . (x, y) padding -> Parsetree.expression = + fun pad -> match pad with + | No_padding -> mk_constr "No_padding" [] + | Lit_padding (s, w) -> mk_constr "Lit_padding" [ mk_side s; mk_int w ] + | Arg_padding s -> mk_constr "Arg_padding" [ mk_side s ] + and mk_precision : type x y . (x, y) precision -> Parsetree.expression = + fun prec -> match prec with + | No_precision -> mk_constr "No_precision" [] + | Lit_precision w -> mk_constr "Lit_precision" [ mk_int w ] + | Arg_precision -> mk_constr "Arg_precision" [] + and mk_fmt : type a b c d e f . + (a, b, c, d, e, f) fmt -> Parsetree.expression = + fun fmt -> match fmt with + | Char rest -> + mk_constr "Char" [ mk_fmt rest ] + | Caml_char rest -> + mk_constr "Caml_char" [ mk_fmt rest ] + | String (pad, rest) -> + mk_constr "String" [ mk_padding pad; mk_fmt rest ] + | Caml_string (pad, rest) -> + mk_constr "Caml_string" [ mk_padding pad; mk_fmt rest ] + | Int (iconv, pad, prec, rest) -> + mk_constr "Int" [ + mk_iconv iconv; mk_padding pad; mk_precision prec; mk_fmt rest ] + | Int32 (iconv, pad, prec, rest) -> + mk_constr "Int32" [ + mk_iconv iconv; mk_padding pad; mk_precision prec; mk_fmt rest ] + | Nativeint (iconv, pad, prec, rest) -> + mk_constr "Nativeint" [ + mk_iconv iconv; mk_padding pad; mk_precision prec; mk_fmt rest ] + | Int64 (iconv, pad, prec, rest) -> + mk_constr "Int64" [ + mk_iconv iconv; mk_padding pad; mk_precision prec; mk_fmt rest ] + | Float (fconv, pad, prec, rest) -> + mk_constr "Float" [ + mk_fconv (snd fconv); mk_padding pad; mk_precision prec; mk_fmt rest ] + | Bool (pad, rest) -> + mk_constr "Bool" [ mk_padding pad; mk_fmt rest ] + | Flush rest -> + mk_constr "Flush" [ mk_fmt rest ] + | String_literal (s, rest) -> + mk_constr "String_literal" [ mk_string s; mk_fmt rest ] + | Char_literal (c, rest) -> + mk_constr "Char_literal" [ mk_char (Char.code c); mk_fmt rest ] + | Format_arg (pad_opt, fmtty, rest) -> + mk_constr "Format_arg" [ + mk_int_opt pad_opt; mk_fmtty fmtty; mk_fmt rest ] + | Format_subst (pad_opt, fmtty, rest) -> + mk_constr "Format_subst" [ + mk_int_opt pad_opt; mk_fmtty fmtty; mk_fmt rest ] + | Alpha rest -> + mk_constr "Alpha" [ mk_fmt rest ] + | Theta rest -> + mk_constr "Theta" [ mk_fmt rest ] + | Formatting_lit (fmting, rest) -> + mk_constr "Formatting_lit" [ mk_formatting_lit fmting; mk_fmt rest ] + | Formatting_gen (fmting, rest) -> + mk_constr "Formatting_gen" [ mk_formatting_gen fmting; mk_fmt rest ] + | Reader rest -> + mk_constr "Reader" [ mk_fmt rest ] + | Scan_char_set (width_opt, char_set, rest) -> + mk_constr "Scan_char_set" [ + mk_int_opt width_opt; mk_string char_set; mk_fmt rest ] + | Scan_get_counter (cnt, rest) -> + mk_constr "Scan_get_counter" [ mk_counter cnt; mk_fmt rest ] + | Scan_next_char rest -> + mk_constr "Scan_next_char" [ mk_fmt rest ] + | Ignored_param (ign, rest) -> + mk_constr "Ignored_param" [ mk_ignored ign; mk_fmt rest ] + | End_of_format -> + mk_constr "End_of_format" [] + | Custom _ -> + (* Custom formatters have no syntax so they will never appear + in formats parsed from strings. *) + assert false + in + let legacy_behavior = not !Clflags.strict_formats in + let Fmt_EBB fmt = fmt_ebb_of_string ~legacy_behavior str in + mk_constr "Format" [ mk_fmt fmt; mk_string str ] + )) + with Failure msg -> + raise (Error (loc, env, Invalid_format msg)) + +and type_label_exp create env loc ty_expected + (lid, label, sarg) = + (* Here also ty_expected may be at generic_level *) + begin_def (); + let separate = !Clflags.principal || Env.has_local_constraints env in + if separate then (begin_def (); begin_def ()); + let (vars, ty_arg, ty_res) = instance_label true label in + if separate then begin + end_def (); + (* Generalize label information *) + generalize_structure ty_arg; + generalize_structure ty_res + end; + begin try + unify env (instance_def ty_res) (instance env ty_expected) + with Unify trace -> + raise (Error(lid.loc, env, Label_mismatch(lid.txt, trace))) + end; + (* Instantiate so that we can generalize internal nodes *) + let ty_arg = instance_def ty_arg in + if separate then begin + end_def (); + (* Generalize information merged from ty_expected *) + generalize_structure ty_arg + end; + if label.lbl_private = Private then + if create then + raise (Error(loc, env, Private_type ty_expected)) + else + raise (Error(lid.loc, env, Private_label(lid.txt, ty_expected))); + let arg = + let snap = if vars = [] then None else Some (Btype.snapshot ()) in + let arg = type_argument env sarg ty_arg (instance env ty_arg) in + end_def (); + try + check_univars env (vars <> []) "field value" arg label.lbl_arg vars; + arg + with exn when not (is_nonexpansive arg) -> try + (* Try to retype without propagating ty_arg, cf PR#4862 *) + may Btype.backtrack snap; + begin_def (); + let arg = type_exp env sarg in + end_def (); + generalize_expansive env arg.exp_type; + unify_exp env arg ty_arg; + check_univars env false "field value" arg label.lbl_arg vars; + arg + with Error (_, _, Less_general _) as e -> raise e + | _ -> raise exn (* In case of failure return the first error *) + in + (lid, label, {arg with exp_type = instance env arg.exp_type}) + +and type_argument ?recarg env sarg ty_expected' ty_expected = + (* ty_expected' may be generic *) + let no_labels ty = + let ls, tvar = list_labels env ty in + not tvar && List.for_all ((=) Nolabel) ls + in + let rec is_inferred sexp = + match sexp.pexp_desc with + Pexp_ident _ | Pexp_apply _ | Pexp_field _ | Pexp_constraint _ + | Pexp_coerce _ | Pexp_send _ | Pexp_new _ -> true + | Pexp_sequence (_, e) | Pexp_open (_, _, e) -> is_inferred e + | Pexp_ifthenelse (_, e1, Some e2) -> is_inferred e1 && is_inferred e2 + | _ -> false + in + match expand_head env ty_expected' with + {desc = Tarrow(Nolabel,ty_arg,ty_res,_); level = lv} + when is_inferred sarg -> + (* apply optional arguments when expected type is "" *) + (* we must be very careful about not breaking the semantics *) + if !Clflags.principal then begin_def (); + let texp = type_exp env sarg in + if !Clflags.principal then begin + end_def (); + generalize_structure texp.exp_type + end; + let rec make_args args ty_fun = + match (expand_head env ty_fun).desc with + | Tarrow (l,ty_arg,ty_fun,_) when is_optional l -> + let ty = option_none (instance env ty_arg) sarg.pexp_loc in + make_args ((l, Some ty) :: args) ty_fun + | Tarrow (l,_,ty_res',_) when l = Nolabel || !Clflags.classic -> + List.rev args, ty_fun, no_labels ty_res' + | Tvar _ -> List.rev args, ty_fun, false + | _ -> [], texp.exp_type, false + in + let args, ty_fun', simple_res = make_args [] texp.exp_type in + let warn = !Clflags.principal && + (lv <> generic_level || (repr ty_fun').level <> generic_level) + and texp = {texp with exp_type = instance env texp.exp_type} + and ty_fun = instance env ty_fun' in + if not (simple_res || no_labels ty_res) then begin + unify_exp env texp ty_expected; + texp + end else begin + unify_exp env {texp with exp_type = ty_fun} ty_expected; + if args = [] then texp else + (* eta-expand to avoid side effects *) + let var_pair name ty = + let id = Ident.create name in + {pat_desc = Tpat_var (id, mknoloc name); pat_type = ty;pat_extra=[]; + pat_attributes = []; + pat_loc = Location.none; pat_env = env}, + {exp_type = ty; exp_loc = Location.none; exp_env = env; + exp_extra = []; exp_attributes = []; + exp_desc = + Texp_ident(Path.Pident id, mknoloc (Longident.Lident name), + {val_type = ty; val_kind = Val_reg; + val_attributes = []; + Types.val_loc = Location.none})} + in + let eta_pat, eta_var = var_pair "eta" ty_arg in + let func texp = + let e = + {texp with exp_type = ty_res; exp_desc = + Texp_apply + (texp, + args @ [Nolabel, Some eta_var])} + in + let cases = [case eta_pat e] in + let param = name_pattern "param" cases in + { texp with exp_type = ty_fun; exp_desc = + Texp_function { arg_label = Nolabel; param; cases; + partial = Total; } } + in + Location.prerr_warning texp.exp_loc + (Warnings.Eliminated_optional_arguments + (List.map (fun (l, _) -> Printtyp.string_of_label l) args)); + if warn then Location.prerr_warning texp.exp_loc + (Warnings.Without_principality "eliminated optional argument"); + (* let-expand to have side effects *) + let let_pat, let_var = var_pair "arg" texp.exp_type in + re { texp with exp_type = ty_fun; exp_desc = + Texp_let (Nonrecursive, + [{vb_pat=let_pat; vb_expr=texp; vb_attributes=[]; + vb_loc=Location.none; + }], + func let_var) } + end + | _ -> + let texp = type_expect ?recarg env sarg ty_expected' in + unify_exp env texp ty_expected; + texp + +and type_application env funct sargs = + (* funct.exp_type may be generic *) + let result_type omitted ty_fun = + List.fold_left + (fun ty_fun (l,ty,lv) -> newty2 lv (Tarrow(l,ty,ty_fun,Cok))) + ty_fun omitted + in + let has_label l ty_fun = + let ls, tvar = list_labels env ty_fun in + tvar || List.mem l ls + in + let ignored = ref [] in + let rec type_unknown_args + (args : + (Asttypes.arg_label * (unit -> Typedtree.expression) option) list) + omitted ty_fun = function + [] -> + (List.map + (function l, None -> l, None + | l, Some f -> l, Some (f ())) + (List.rev args), + instance env (result_type omitted ty_fun)) + | (l1, sarg1) :: sargl -> + let (ty1, ty2) = + let ty_fun = expand_head env ty_fun in + match ty_fun.desc with + Tvar _ -> + let t1 = newvar () and t2 = newvar () in + let not_identity = function + Texp_ident(_,_,{val_kind=Val_prim + {Primitive.prim_name="%identity"}}) -> + false + | _ -> true + in + if ty_fun.level >= t1.level && not_identity funct.exp_desc then + Location.prerr_warning sarg1.pexp_loc Warnings.Unused_argument; + unify env ty_fun (newty (Tarrow(l1,t1,t2,Clink(ref Cunknown)))); + (t1, t2) + | Tarrow (l,t1,t2,_) when l = l1 + || !Clflags.classic && l1 = Nolabel && not (is_optional l) -> + (t1, t2) + | td -> + let ty_fun = + match td with Tarrow _ -> newty td | _ -> ty_fun in + let ty_res = result_type (omitted @ !ignored) ty_fun in + match ty_res.desc with + Tarrow _ -> + if (!Clflags.classic || not (has_label l1 ty_fun)) then + raise (Error(sarg1.pexp_loc, env, + Apply_wrong_label(l1, ty_res))) + else + raise (Error(funct.exp_loc, env, Incoherent_label_order)) + | _ -> + raise(Error(funct.exp_loc, env, Apply_non_function + (expand_head env funct.exp_type))) + in + let optional = is_optional l1 in + let arg1 () = + let arg1 = type_expect env sarg1 ty1 in + if optional then + unify_exp env arg1 (type_option(newvar())); + arg1 + in + type_unknown_args ((l1, Some arg1) :: args) omitted ty2 sargl + in + let ignore_labels = + !Clflags.classic || + begin + let ls, tvar = list_labels env funct.exp_type in + not tvar && + let labels = List.filter (fun l -> not (is_optional l)) ls in + List.length labels = List.length sargs && + List.for_all (fun (l,_) -> l = Nolabel) sargs && + List.exists (fun l -> l <> Nolabel) labels && + (Location.prerr_warning + funct.exp_loc + (Warnings.Labels_omitted + (List.map Printtyp.string_of_label + (List.filter ((<>) Nolabel) labels))); + true) + end + in + let warned = ref false in + let rec type_args args omitted ty_fun ty_fun0 ty_old sargs more_sargs = + match expand_head env ty_fun, expand_head env ty_fun0 with + {desc=Tarrow (l, ty, ty_fun, com); level=lv} as ty_fun', + {desc=Tarrow (_, ty0, ty_fun0, _)} + when (sargs <> [] || more_sargs <> []) && commu_repr com = Cok -> + let may_warn loc w = + if not !warned && !Clflags.principal && lv <> generic_level + then begin + warned := true; + Location.prerr_warning loc w + end + in + let name = label_name l + and optional = is_optional l in + let sargs, more_sargs, arg = + if ignore_labels && not (is_optional l) then begin + (* In classic mode, omitted = [] *) + match sargs, more_sargs with + (l', sarg0) :: _, _ -> + raise(Error(sarg0.pexp_loc, env, + Apply_wrong_label(l', ty_old))) + | _, (l', sarg0) :: more_sargs -> + if l <> l' && l' <> Nolabel then + raise(Error(sarg0.pexp_loc, env, + Apply_wrong_label(l', ty_fun'))) + else + ([], more_sargs, + Some (fun () -> type_argument env sarg0 ty ty0)) + | _ -> + assert false + end else try + let (l', sarg0, sargs, more_sargs) = + try + let (l', sarg0, sargs1, sargs2) = extract_label name sargs in + if sargs1 <> [] then + may_warn sarg0.pexp_loc + (Warnings.Not_principal "commuting this argument"); + (l', sarg0, sargs1 @ sargs2, more_sargs) + with Not_found -> + let (l', sarg0, sargs1, sargs2) = + extract_label name more_sargs in + if sargs1 <> [] || sargs <> [] then + may_warn sarg0.pexp_loc + (Warnings.Not_principal "commuting this argument"); + (l', sarg0, sargs @ sargs1, sargs2) + in + if not optional && is_optional l' then + Location.prerr_warning sarg0.pexp_loc + (Warnings.Nonoptional_label (Printtyp.string_of_label l)); + sargs, more_sargs, + if not optional || is_optional l' then + Some (fun () -> type_argument env sarg0 ty ty0) + else begin + may_warn sarg0.pexp_loc + (Warnings.Not_principal "using an optional argument here"); + Some (fun () -> option_some (type_argument env sarg0 + (extract_option_type env ty) + (extract_option_type env ty0))) + end + with Not_found -> + sargs, more_sargs, + if optional && + (List.mem_assoc Nolabel sargs + || List.mem_assoc Nolabel more_sargs) + then begin + may_warn funct.exp_loc + (Warnings.Without_principality "eliminated optional argument"); + ignored := (l,ty,lv) :: !ignored; + Some (fun () -> option_none (instance env ty) Location.none) + end else begin + may_warn funct.exp_loc + (Warnings.Without_principality "commuted an argument"); + None + end + in + let omitted = + if arg = None then (l,ty,lv) :: omitted else omitted in + let ty_old = if sargs = [] then ty_fun else ty_old in + type_args ((l,arg)::args) omitted ty_fun ty_fun0 + ty_old sargs more_sargs + | _ -> + match sargs with + (l, sarg0) :: _ when ignore_labels -> + raise(Error(sarg0.pexp_loc, env, + Apply_wrong_label(l, ty_old))) + | _ -> + type_unknown_args args omitted ty_fun0 + (sargs @ more_sargs) + in + let is_ignore funct = + match funct.exp_desc with + Texp_ident (_, _, {val_kind=Val_prim{Primitive.prim_name="%ignore"}}) -> + (try ignore (filter_arrow env (instance env funct.exp_type) Nolabel); + true + with Unify _ -> false) + | _ -> false + in + match sargs with + (* Special case for ignore: avoid discarding warning *) + [Nolabel, sarg] when is_ignore funct -> + let ty_arg, ty_res = + filter_arrow env (instance env funct.exp_type) Nolabel + in + let exp = type_expect env sarg ty_arg in + begin match (expand_head env exp.exp_type).desc with + | Tarrow _ -> + Location.prerr_warning exp.exp_loc Warnings.Partial_application + | Tvar _ -> + add_delayed_check (fun () -> check_application_result env false exp) + | _ -> () + end; + ([Nolabel, Some exp], ty_res) + | _ -> + let ty = funct.exp_type in + if ignore_labels then + type_args [] [] ty (instance env ty) ty [] sargs + else + type_args [] [] ty (instance env ty) ty sargs [] + +and type_construct env loc lid sarg ty_expected attrs = + let opath = + try + let (p0, p,_) = extract_concrete_variant env ty_expected in + Some(p0, p, ty_expected.level = generic_level || not !Clflags.principal) + with Not_found -> None + in + let constrs = Typetexp.find_all_constructors env lid.loc lid.txt in + let constr = + wrap_disambiguate "This variant expression is expected to have" ty_expected + (Constructor.disambiguate lid env opath) constrs in + Env.mark_constructor Env.Positive env (Longident.last lid.txt) constr; + Builtin_attributes.check_deprecated loc constr.cstr_attributes + constr.cstr_name; + let sargs = + match sarg with + None -> [] + | Some {pexp_desc = Pexp_tuple sel} when + constr.cstr_arity > 1 || Builtin_attributes.explicit_arity attrs + -> sel + | Some se -> [se] in + if List.length sargs <> constr.cstr_arity then + raise(Error(loc, env, Constructor_arity_mismatch + (lid.txt, constr.cstr_arity, List.length sargs))); + let separate = !Clflags.principal || Env.has_local_constraints env in + if separate then (begin_def (); begin_def ()); + let (ty_args, ty_res) = instance_constructor constr in + let texp = + re { + exp_desc = Texp_construct(lid, constr, []); + exp_loc = loc; exp_extra = []; + exp_type = ty_res; + exp_attributes = attrs; + exp_env = env } in + if separate then begin + end_def (); + generalize_structure ty_res; + unify_exp env {texp with exp_type = instance_def ty_res} + (instance env ty_expected); + end_def (); + List.iter generalize_structure ty_args; + generalize_structure ty_res; + end; + let ty_args0, ty_res = + match instance_list env (ty_res :: ty_args) with + t :: tl -> tl, t + | _ -> assert false + in + let texp = {texp with exp_type = ty_res} in + if not separate then unify_exp env texp (instance env ty_expected); + let recarg = + match constr.cstr_inlined with + | None -> Rejected + | Some _ -> + begin match sargs with + | [{pexp_desc = + Pexp_ident _ | + Pexp_record (_, (Some {pexp_desc = Pexp_ident _}| None))}] -> + Required + | _ -> + raise (Error(loc, env, Inlined_record_expected)) + end + in + let args = + List.map2 (fun e (t,t0) -> type_argument ~recarg env e t t0) sargs + (List.combine ty_args ty_args0) in + if constr.cstr_private = Private then + raise(Error(loc, env, Private_type ty_res)); + (* NOTE: shouldn't we call "re" on this final expression? -- AF *) + { texp with + exp_desc = Texp_construct(lid, constr, args) } + +(* Typing of statements (expressions whose values are discarded) *) + +and type_statement env sexp = + let loc = (final_subexpression sexp).pexp_loc in + begin_def(); + let exp = type_exp env sexp in + end_def(); + let ty = expand_head env exp.exp_type and tv = newvar() in + if is_Tvar ty && ty.level > tv.level then + Location.prerr_warning loc Warnings.Nonreturning_statement; + if !Clflags.strict_sequence then + let expected_ty = instance_def Predef.type_unit in + unify_exp env exp expected_ty; + exp + else begin + begin match ty.desc with + | Tarrow _ -> + Location.prerr_warning loc Warnings.Partial_application + | Tconstr (p, _, _) when Path.same p Predef.path_unit -> () + | Tvar _ -> + add_delayed_check (fun () -> check_application_result env true exp) + | _ -> + Location.prerr_warning loc Warnings.Statement_type + end; + unify_var env tv ty; + exp + end + +(* Typing of match cases *) + +and type_cases ?in_function env ty_arg ty_res partial_flag loc caselist = + (* ty_arg is _fully_ generalized *) + let patterns = List.map (fun {pc_lhs=p} -> p) caselist in + let contains_polyvars = List.exists contains_polymorphic_variant patterns in + let erase_either = contains_polyvars && contains_variant_either ty_arg + and has_gadts = List.exists (contains_gadt env) patterns in +(* prerr_endline ( if has_gadts then "contains gadt" else "no gadt"); *) + let ty_arg = + if (has_gadts || erase_either) && not !Clflags.principal + then correct_levels ty_arg else ty_arg + and ty_res, env = + if has_gadts && not !Clflags.principal then + correct_levels ty_res, duplicate_ident_types caselist env + else ty_res, env + in + let rec is_var spat = + match spat.ppat_desc with + Ppat_any | Ppat_var _ -> true + | Ppat_alias (spat, _) -> is_var spat + | _ -> false in + let needs_exhaust_check = + match caselist with + [{pc_rhs = {pexp_desc = Pexp_unreachable}}] -> true + | [{pc_lhs}] when is_var pc_lhs -> false + | _ -> true + in + let init_env () = + (* raise level for existentials *) + begin_def (); + Ident.set_current_time (get_current_level ()); + let lev = Ident.current_time () in + Ctype.init_def (lev+1000); (* up to 1000 existentials *) + (lev, Env.add_gadt_instance_level lev env) + in + let lev, env = + if has_gadts then init_env () else (get_current_level (), env) + in +(* if has_gadts then + Format.printf "lev = %d@.%a@." lev Printtyp.raw_type_expr ty_res; *) + (* Do we need to propagate polymorphism *) + let propagate = + !Clflags.principal || has_gadts || (repr ty_arg).level = generic_level || + match caselist with + [{pc_lhs}] when is_var pc_lhs -> false + | _ -> true in + if propagate then begin_def (); (* propagation of the argument *) + let pattern_force = ref [] in +(* Format.printf "@[%i %i@ %a@]@." lev (get_current_level()) + Printtyp.raw_type_expr ty_arg; *) + let pat_env_list = + List.map + (fun {pc_lhs; pc_guard; pc_rhs} -> + let loc = + let open Location in + match pc_guard with + | None -> pc_rhs.pexp_loc + | Some g -> {pc_rhs.pexp_loc with loc_start=g.pexp_loc.loc_start} + in + if !Clflags.principal then begin_def (); (* propagation of pattern *) + let scope = Some (Annot.Idef loc) in + let (pat, ext_env, force, unpacks) = + let partial = + if !Clflags.principal || erase_either + then Some false else None in + let ty_arg = instance ?partial env ty_arg in + type_pattern ~lev env pc_lhs scope ty_arg + in + pattern_force := force @ !pattern_force; + let pat = + if !Clflags.principal then begin + end_def (); + iter_pattern (fun {pat_type=t} -> generalize_structure t) pat; + { pat with pat_type = instance ext_env pat.pat_type } + end else pat + in + (pat, (ext_env, unpacks))) + caselist in + (* Unify all cases (delayed to keep it order-free) *) + let ty_arg' = newvar () in + let unify_pats ty = + List.iter (fun (pat, (ext_env, _)) -> unify_pat ext_env pat ty) + pat_env_list in + unify_pats ty_arg'; + (* Check for polymorphic variants to close *) + let patl = List.map fst pat_env_list in + if List.exists has_variants patl then begin + Parmatch.pressure_variants env patl; + List.iter (iter_pattern finalize_variant) patl + end; + (* `Contaminating' unifications start here *) + List.iter (fun f -> f()) !pattern_force; + (* Post-processing and generalization *) + if propagate || erase_either then unify_pats (instance env ty_arg); + if propagate then begin + List.iter + (iter_pattern (fun {pat_type=t} -> unify_var env t (newvar()))) patl; + end_def (); + List.iter (iter_pattern (fun {pat_type=t} -> generalize t)) patl; + end; + (* type bodies *) + let in_function = if List.length caselist = 1 then in_function else None in + let cases = + List.map2 + (fun (pat, (ext_env, unpacks)) {pc_lhs; pc_guard; pc_rhs} -> + let sexp = wrap_unpacks pc_rhs unpacks in + let ty_res' = + if !Clflags.principal then begin + begin_def (); + let ty = instance ~partial:true env ty_res in + end_def (); + generalize_structure ty; ty + end + else if contains_gadt env pc_lhs then correct_levels ty_res + else ty_res in +(* Format.printf "@[%i %i, ty_res' =@ %a@]@." lev (get_current_level()) + Printtyp.raw_type_expr ty_res'; *) + let guard = + match pc_guard with + | None -> None + | Some scond -> + Some + (type_expect ext_env (wrap_unpacks scond unpacks) + Predef.type_bool) + in + let exp = type_expect ?in_function ext_env sexp ty_res' in + { + c_lhs = pat; + c_guard = guard; + c_rhs = {exp with exp_type = instance env ty_res'} + } + ) + pat_env_list caselist + in + if !Clflags.principal || has_gadts then begin + let ty_res' = instance env ty_res in + List.iter (fun c -> unify_exp env c.c_rhs ty_res') cases + end; + let do_init = has_gadts || needs_exhaust_check in + let lev, env = + if do_init && not has_gadts then init_env () else lev, env in + let ty_arg_check = + if do_init then + (* Hack: use for_saving to copy variables too *) + Subst.type_expr (Subst.for_saving Subst.identity) ty_arg + else ty_arg + in + let partial = + if partial_flag then + check_partial ~lev env ty_arg_check loc cases + else + Partial + in + let unused_check () = + List.iter (fun (pat, (env, _)) -> check_absent_variant env pat) + pat_env_list; + check_unused ~lev env (instance env ty_arg_check) cases ; + Parmatch.check_ambiguous_bindings cases + in + if contains_polyvars || do_init then + add_delayed_check unused_check + else + unused_check (); + (* Check for unused cases, do not delay because of gadts *) + if do_init then begin + end_def (); + (* Ensure that existential types do not escape *) + unify_exp_types loc env (instance env ty_res) (newvar ()) ; + end; + cases, partial + +(* Typing of let bindings *) + +and type_let ?(check = fun s -> Warnings.Unused_var s) + ?(check_strict = fun s -> Warnings.Unused_var_strict s) + env rec_flag spat_sexp_list scope allow = + let open Ast_helper in + begin_def(); + if !Clflags.principal then begin_def (); + + let is_fake_let = + match spat_sexp_list with + | [{pvb_expr={pexp_desc=Pexp_match( + {pexp_desc=Pexp_ident({ txt = Longident.Lident "*opt*"})},_)}}] -> + true (* the fake let-declaration introduced by fun ?(x = e) -> ... *) + | _ -> + false + in + let check = if is_fake_let then check_strict else check in + + let spatl = + List.map + (fun {pvb_pat=spat; pvb_expr=sexp; pvb_attributes=attrs} -> + attrs, + match spat.ppat_desc, sexp.pexp_desc with + (Ppat_any | Ppat_constraint _), _ -> spat + | _, Pexp_coerce (_, _, sty) + | _, Pexp_constraint (_, sty) when !Clflags.principal -> + (* propagate type annotation to pattern, + to allow it to be generalized in -principal mode *) + Pat.constraint_ + ~loc:{spat.ppat_loc with Location.loc_ghost=true} + spat + sty + | _ -> spat) + spat_sexp_list in + let nvs = List.map (fun _ -> newvar ()) spatl in + let (pat_list, new_env, force, unpacks) = + type_pattern_list env spatl scope nvs allow in + let attrs_list = List.map fst spatl in + let is_recursive = (rec_flag = Recursive) in + (* If recursive, first unify with an approximation of the expression *) + if is_recursive then + List.iter2 + (fun pat binding -> + let pat = + match pat.pat_type.desc with + | Tpoly (ty, tl) -> + {pat with pat_type = + snd (instance_poly ~keep_names:true false tl ty)} + | _ -> pat + in unify_pat env pat (type_approx env binding.pvb_expr)) + pat_list spat_sexp_list; + (* Polymorphic variant processing *) + List.iter + (fun pat -> + if has_variants pat then begin + Parmatch.pressure_variants env [pat]; + iter_pattern finalize_variant pat + end) + pat_list; + (* Generalize the structure *) + let pat_list = + if !Clflags.principal then begin + end_def (); + List.map + (fun pat -> + iter_pattern (fun pat -> generalize_structure pat.pat_type) pat; + {pat with pat_type = instance env pat.pat_type}) + pat_list + end else pat_list in + (* Only bind pattern variables after generalizing *) + List.iter (fun f -> f()) force; + let exp_env = + if is_recursive then new_env else env in + + let current_slot = ref None in + let rec_needed = ref false in + let warn_about_unused_bindings = + List.exists + (fun attrs -> + Builtin_attributes.warning_scope ~ppwarning:false attrs (fun () -> + Warnings.is_active (check "") || Warnings.is_active (check_strict "") || + (is_recursive && (Warnings.is_active Warnings.Unused_rec_flag)))) + attrs_list + in + let pat_slot_list = + (* Algorithm to detect unused declarations in recursive bindings: + - During type checking of the definitions, we capture the 'value_used' + events on the bound identifiers and record them in a slot corresponding + to the current definition (!current_slot). + In effect, this creates a dependency graph between definitions. + + - After type checking the definition (!current_slot = None), + when one of the bound identifier is effectively used, we trigger + again all the events recorded in the corresponding slot. + The effect is to traverse the transitive closure of the graph created + in the first step. + + We also keep track of whether *all* variables in a given pattern + are unused. If this is the case, for local declarations, the issued + warning is 26, not 27. + *) + List.map2 + (fun attrs pat -> + Builtin_attributes.warning_scope ~ppwarning:false attrs (fun () -> + if not warn_about_unused_bindings then pat, None + else + let some_used = ref false in + (* has one of the identifier of this pattern been used? *) + let slot = ref [] in + List.iter + (fun id -> + let vd = Env.find_value (Path.Pident id) new_env in + (* note: Env.find_value does not trigger the value_used event *) + let name = Ident.name id in + let used = ref false in + if not (name = "" || name.[0] = '_' || name.[0] = '#') then + add_delayed_check + (fun () -> + if not !used then + Location.prerr_warning vd.Types.val_loc + ((if !some_used then check_strict else check) name) + ); + Env.set_value_used_callback + name vd + (fun () -> + match !current_slot with + | Some slot -> + slot := (name, vd) :: !slot; rec_needed := true + | None -> + List.iter + (fun (name, vd) -> Env.mark_value_used env name vd) + (get_ref slot); + used := true; + some_used := true + ) + ) + (Typedtree.pat_bound_idents pat); + pat, Some slot + )) + attrs_list + pat_list + in + let exp_list = + List.map2 + (fun {pvb_expr=sexp; pvb_attributes; _} (pat, slot) -> + let sexp = + if rec_flag = Recursive then wrap_unpacks sexp unpacks else sexp in + if is_recursive then current_slot := slot; + match pat.pat_type.desc with + | Tpoly (ty, tl) -> + begin_def (); + if !Clflags.principal then begin_def (); + let vars, ty' = instance_poly ~keep_names:true true tl ty in + if !Clflags.principal then begin + end_def (); + generalize_structure ty' + end; + let exp = + Builtin_attributes.warning_scope pvb_attributes + (fun () -> type_expect exp_env sexp ty') + in + end_def (); + check_univars env true "definition" exp pat.pat_type vars; + {exp with exp_type = instance env exp.exp_type} + | _ -> + Builtin_attributes.warning_scope pvb_attributes (fun () -> + type_expect exp_env sexp pat.pat_type)) + spat_sexp_list pat_slot_list in + current_slot := None; + if is_recursive && not !rec_needed + && Warnings.is_active Warnings.Unused_rec_flag then begin + let {pvb_pat; pvb_attributes} = List.hd spat_sexp_list in + (* See PR#6677 *) + Builtin_attributes.warning_scope ~ppwarning:false pvb_attributes + (fun () -> + Location.prerr_warning pvb_pat.ppat_loc Warnings.Unused_rec_flag + ) + end; + List.iter2 + (fun pat (attrs, exp) -> + Builtin_attributes.warning_scope ~ppwarning:false attrs + (fun () -> + ignore(check_partial env pat.pat_type pat.pat_loc + [case pat exp]) + ) + ) + pat_list + (List.map2 (fun (attrs, _) e -> attrs, e) spatl exp_list); + end_def(); + List.iter2 + (fun pat exp -> + if not (is_nonexpansive exp) then + iter_pattern (fun pat -> generalize_expansive env pat.pat_type) pat) + pat_list exp_list; + List.iter + (fun pat -> iter_pattern (fun pat -> generalize pat.pat_type) pat) + pat_list; + let l = List.combine pat_list exp_list in + let l = + List.map2 + (fun (p, e) pvb -> + {vb_pat=p; vb_expr=e; vb_attributes=pvb.pvb_attributes; + vb_loc=pvb.pvb_loc; + }) + l spat_sexp_list + in + if is_recursive then + List.iter + (fun {vb_pat=pat} -> match pat.pat_desc with + Tpat_var _ -> () + | Tpat_alias ({pat_desc=Tpat_any}, _, _) -> () + | _ -> raise(Error(pat.pat_loc, env, Illegal_letrec_pat))) + l; + (l, new_env, unpacks) + +(* Typing of toplevel bindings *) + +let type_binding env rec_flag spat_sexp_list scope = + Typetexp.reset_type_variables(); + let (pat_exp_list, new_env, _unpacks) = + type_let + ~check:(fun s -> Warnings.Unused_value_declaration s) + ~check_strict:(fun s -> Warnings.Unused_value_declaration s) + env rec_flag spat_sexp_list scope false + in + (pat_exp_list, new_env) + +let type_let env rec_flag spat_sexp_list scope = + let (pat_exp_list, new_env, _unpacks) = + type_let env rec_flag spat_sexp_list scope false in + (pat_exp_list, new_env) + +(* Typing of toplevel expressions *) + +let type_expression env sexp = + Typetexp.reset_type_variables(); + begin_def(); + let exp = type_exp env sexp in + end_def(); + if not (is_nonexpansive exp) then generalize_expansive env exp.exp_type; + generalize exp.exp_type; + match sexp.pexp_desc with + Pexp_ident lid -> + (* Special case for keeping type variables when looking-up a variable *) + let (_path, desc) = Env.lookup_value lid.txt env in + {exp with exp_type = desc.val_type} + | _ -> exp + +(* Error report *) + +let spellcheck ppf unbound_name valid_names = + Misc.did_you_mean ppf (fun () -> + Misc.spellcheck valid_names unbound_name + ) + +let spellcheck_idents ppf unbound valid_idents = + spellcheck ppf (Ident.name unbound) (List.map Ident.name valid_idents) + +open Format +open Printtyp + +let report_error env ppf = function + | Polymorphic_label lid -> + fprintf ppf "@[The record field %a is polymorphic.@ %s@]" + longident lid "You cannot instantiate it in a pattern." + | Constructor_arity_mismatch(lid, expected, provided) -> + fprintf ppf + "@[The constructor %a@ expects %i argument(s),@ \ + but is applied here to %i argument(s)@]" + longident lid expected provided + | Label_mismatch(lid, trace) -> + report_unification_error ppf env trace + (function ppf -> + fprintf ppf "The record field %a@ belongs to the type" + longident lid) + (function ppf -> + fprintf ppf "but is mixed here with fields of type") + | Pattern_type_clash trace -> + report_unification_error ppf env trace + (function ppf -> + fprintf ppf "This pattern matches values of type") + (function ppf -> + fprintf ppf "but a pattern was expected which matches values of type") + | Or_pattern_type_clash (id, trace) -> + report_unification_error ppf env trace + (function ppf -> + fprintf ppf "The variable %s on the left-hand side of this \ + or-pattern has type" (Ident.name id)) + (function ppf -> + fprintf ppf "but on the right-hand side it has type") + | Multiply_bound_variable name -> + fprintf ppf "Variable %s is bound several times in this matching" name + | Orpat_vars (id, valid_idents) -> + fprintf ppf "Variable %s must occur on both sides of this | pattern" + (Ident.name id); + spellcheck_idents ppf id valid_idents + | Expr_type_clash trace -> + report_unification_error ppf env trace + (function ppf -> + fprintf ppf "This expression has type") + (function ppf -> + fprintf ppf "but an expression was expected of type") + | Apply_non_function typ -> + reset_and_mark_loops typ; + begin match (repr typ).desc with + Tarrow _ -> + fprintf ppf "@[@[<2>This function has type@ %a@]" + type_expr typ; + fprintf ppf "@ @[It is applied to too many arguments;@ %s@]@]" + "maybe you forgot a `;'." + | _ -> + fprintf ppf "@[@[<2>This expression has type@ %a@]@ %s@]" + type_expr typ + "This is not a function; it cannot be applied." + end + | Apply_wrong_label (l, ty) -> + let print_label ppf = function + | Nolabel -> fprintf ppf "without label" + | l -> + fprintf ppf "with label %s" (prefixed_label_name l) + in + reset_and_mark_loops ty; + fprintf ppf + "@[@[<2>The function applied to this argument has type@ %a@]@.\ + This argument cannot be applied %a@]" + type_expr ty print_label l + | Label_multiply_defined s -> + fprintf ppf "The record field label %s is defined several times" s + | Label_missing labels -> + let print_labels ppf = + List.iter (fun lbl -> fprintf ppf "@ %s" (Ident.name lbl)) in + fprintf ppf "@[Some record fields are undefined:%a@]" + print_labels labels + | Label_not_mutable lid -> + fprintf ppf "The record field %a is not mutable" longident lid + | Wrong_name (eorp, ty, kind, p, name, valid_names) -> + reset_and_mark_loops ty; + if Path.is_constructor_typath p then begin + fprintf ppf "@[The field %s is not part of the record \ + argument for the %a constructor@]" + name + path p; + end else begin + fprintf ppf "@[@[<2>%s type@ %a@]@ " + eorp type_expr ty; + fprintf ppf "The %s %s does not belong to type %a@]" + (label_of_kind kind) + name (*kind*) path p; + end; + spellcheck ppf name valid_names; + | Name_type_mismatch (kind, lid, tp, tpl) -> + let name = label_of_kind kind in + report_ambiguous_type_error ppf env tp tpl + (function ppf -> + fprintf ppf "The %s %a@ belongs to the %s type" + name longident lid kind) + (function ppf -> + fprintf ppf "The %s %a@ belongs to one of the following %s types:" + name longident lid kind) + (function ppf -> + fprintf ppf "but a %s was expected belonging to the %s type" + name kind) + | Invalid_format msg -> + fprintf ppf "%s" msg + | Undefined_method (ty, me, valid_methods) -> + reset_and_mark_loops ty; + fprintf ppf + "@[@[This expression has type@;<1 2>%a@]@,\ + It has no method %s@]" type_expr ty me; + begin match valid_methods with + | None -> () + | Some valid_methods -> spellcheck ppf me valid_methods + end + | Undefined_inherited_method (me, valid_methods) -> + fprintf ppf "This expression has no method %s" me; + spellcheck ppf me valid_methods; + | Virtual_class cl -> + fprintf ppf "Cannot instantiate the virtual class %a" + longident cl + | Unbound_instance_variable (var, valid_vars) -> + fprintf ppf "Unbound instance variable %s" var; + spellcheck ppf var valid_vars; + | Instance_variable_not_mutable (b, v) -> + if b then + fprintf ppf "The instance variable %s is not mutable" v + else + fprintf ppf "The value %s is not an instance variable" v + | Not_subtype(tr1, tr2) -> + report_subtyping_error ppf env tr1 "is not a subtype of" tr2 + | Outside_class -> + fprintf ppf "This object duplication occurs outside a method definition" + | Value_multiply_overridden v -> + fprintf ppf "The instance variable %s is overridden several times" v + | Coercion_failure (ty, ty', trace, b) -> + report_unification_error ppf env trace + (function ppf -> + let ty, ty' = prepare_expansion (ty, ty') in + fprintf ppf + "This expression cannot be coerced to type@;<1 2>%a;@ it has type" + (type_expansion ty) ty') + (function ppf -> + fprintf ppf "but is here used with type"); + if b then + fprintf ppf ".@.@[%s@ %s@]" + "This simple coercion was not fully general." + "Consider using a double coercion." + | Too_many_arguments (in_function, ty) -> + reset_and_mark_loops ty; + if in_function then begin + fprintf ppf "This function expects too many arguments,@ "; + fprintf ppf "it should have type@ %a" + type_expr ty + end else begin + fprintf ppf "This expression should not be a function,@ "; + fprintf ppf "the expected type is@ %a" + type_expr ty + end + | Abstract_wrong_label (l, ty) -> + let label_mark = function + | Nolabel -> "but its first argument is not labelled" + | l -> sprintf "but its first argument is labelled %s" + (prefixed_label_name l) in + reset_and_mark_loops ty; + fprintf ppf "@[@[<2>This function should have type@ %a@]@,%s@]" + type_expr ty (label_mark l) + | Scoping_let_module(id, ty) -> + reset_and_mark_loops ty; + fprintf ppf + "This `let module' expression has type@ %a@ " type_expr ty; + fprintf ppf + "In this type, the locally bound module name %s escapes its scope" id + | Masked_instance_variable lid -> + fprintf ppf + "The instance variable %a@ \ + cannot be accessed from the definition of another instance variable" + longident lid + | Private_type ty -> + fprintf ppf "Cannot create values of the private type %a" type_expr ty + | Private_label (lid, ty) -> + fprintf ppf "Cannot assign field %a of the private type %a" + longident lid type_expr ty + | Not_a_variant_type lid -> + fprintf ppf "The type %a@ is not a variant type" longident lid + | Incoherent_label_order -> + fprintf ppf "This function is applied to arguments@ "; + fprintf ppf "in an order different from other calls.@ "; + fprintf ppf "This is only allowed when the real type is known." + | Less_general (kind, trace) -> + report_unification_error ppf env trace + (fun ppf -> fprintf ppf "This %s has type" kind) + (fun ppf -> fprintf ppf "which is less general than") + | Modules_not_allowed -> + fprintf ppf "Modules are not allowed in this pattern." + | Cannot_infer_signature -> + fprintf ppf + "The signature for this packaged module couldn't be inferred." + | Not_a_packed_module ty -> + fprintf ppf + "This expression is packed module, but the expected type is@ %a" + type_expr ty + | Recursive_local_constraint trace -> + report_unification_error ppf env trace + (function ppf -> + fprintf ppf "Recursive local constraint when unifying") + (function ppf -> + fprintf ppf "with") + | Unexpected_existential -> + fprintf ppf + "Unexpected existential" + | Unqualified_gadt_pattern (tpath, name) -> + fprintf ppf "@[The GADT constructor %s of type %a@ %s.@]" + name path tpath + "must be qualified in this pattern" + | Invalid_interval -> + fprintf ppf "@[Only character intervals are supported in patterns.@]" + | Invalid_for_loop_index -> + fprintf ppf + "@[Invalid for-loop index: only variables and _ are allowed.@]" + | No_value_clauses -> + fprintf ppf + "None of the patterns in this 'match' expression match values." + | Exception_pattern_below_toplevel -> + fprintf ppf + "@[Exception patterns must be at the top level of a match case.@]" + | Inlined_record_escape -> + fprintf ppf + "@[This form is not allowed as the type of the inlined record could \ + escape.@]" + | Inlined_record_expected -> + fprintf ppf + "@[This constructor expects an inlined record argument.@]" + | Unrefuted_pattern pat -> + fprintf ppf + "@[%s@ %s@ %a@]" + "This match case could not be refuted." + "Here is an example of a value that would reach it:" + Parmatch.top_pretty pat + | Invalid_extension_constructor_payload -> + fprintf ppf + "Invalid [%%extension_constructor] payload, a constructor is expected." + | Not_an_extension_constructor -> + fprintf ppf + "This constructor is not an extension constructor." + | Literal_overflow ty -> + fprintf ppf "Integer literal exceeds the range of representable \ + integers of type %s" ty + | Unknown_literal (n, m) -> + fprintf ppf "Unknown modifier '%c' for literal %s%c" m n m + | Illegal_letrec_pat -> + fprintf ppf + "Only variables are allowed as left-hand side of `let rec'" + | Illegal_letrec_expr -> + fprintf ppf + "This kind of expression is not allowed as right-hand side of `let rec'" + | Illegal_class_expr -> + fprintf ppf "This kind of recursive class expression is not allowed" + +let report_error env ppf err = + wrap_printing_env env (fun () -> report_error env ppf err) + +let () = + Location.register_error_of_exn + (function + | Error (loc, env, err) -> + Some (Location.error_of_printer loc (report_error env) err) + | Error_forward err -> + Some err + | _ -> + None + ) + +let () = + Env.add_delayed_check_forward := add_delayed_check + +(* drop ?recarg argument from the external API *) +let type_expect ?in_function env e ty = type_expect ?in_function env e ty +let type_exp env e = type_exp env e +let type_argument env e t1 t2 = type_argument env e t1 t2 diff --git a/res_syntax/compiler-libs-406/typecore.mli b/res_syntax/compiler-libs-406/typecore.mli new file mode 100644 index 0000000000..42f125c176 --- /dev/null +++ b/res_syntax/compiler-libs-406/typecore.mli @@ -0,0 +1,160 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Type inference for the core language *) + +open Asttypes +open Types +open Format + +val is_nonexpansive: Typedtree.expression -> bool + +val type_binding: + Env.t -> rec_flag -> + Parsetree.value_binding list -> + Annot.ident option -> + Typedtree.value_binding list * Env.t +val type_let: + Env.t -> rec_flag -> + Parsetree.value_binding list -> + Annot.ident option -> + Typedtree.value_binding list * Env.t +val type_expression: + Env.t -> Parsetree.expression -> Typedtree.expression +val type_class_arg_pattern: + string -> Env.t -> Env.t -> arg_label -> Parsetree.pattern -> + Typedtree.pattern * (Ident.t * string loc * Ident.t * type_expr) list * + Env.t * Env.t +val type_self_pattern: + string -> type_expr -> Env.t -> Env.t -> Env.t -> Parsetree.pattern -> + Typedtree.pattern * + (Ident.t * type_expr) Meths.t ref * + (Ident.t * Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr) + Vars.t ref * + Env.t * Env.t * Env.t +val check_partial: + ?lev:int -> Env.t -> type_expr -> + Location.t -> Typedtree.case list -> Typedtree.partial +val type_expect: + ?in_function:(Location.t * type_expr) -> + Env.t -> Parsetree.expression -> type_expr -> Typedtree.expression +val type_exp: + Env.t -> Parsetree.expression -> Typedtree.expression +val type_approx: + Env.t -> Parsetree.expression -> type_expr +val type_argument: + Env.t -> Parsetree.expression -> + type_expr -> type_expr -> Typedtree.expression + +val option_some: Typedtree.expression -> Typedtree.expression +val option_none: type_expr -> Location.t -> Typedtree.expression +val extract_option_type: Env.t -> type_expr -> type_expr +val iter_pattern: (Typedtree.pattern -> unit) -> Typedtree.pattern -> unit +val generalizable: int -> type_expr -> bool +val reset_delayed_checks: unit -> unit +val force_delayed_checks: unit -> unit + +val name_pattern : string -> Typedtree.case list -> Ident.t + +val self_coercion : (Path.t * Location.t list ref) list ref + +type error = + Polymorphic_label of Longident.t + | Constructor_arity_mismatch of Longident.t * int * int + | Label_mismatch of Longident.t * (type_expr * type_expr) list + | Pattern_type_clash of (type_expr * type_expr) list + | Or_pattern_type_clash of Ident.t * (type_expr * type_expr) list + | Multiply_bound_variable of string + | Orpat_vars of Ident.t * Ident.t list + | Expr_type_clash of (type_expr * type_expr) list + | Apply_non_function of type_expr + | Apply_wrong_label of arg_label * type_expr + | Label_multiply_defined of string + | Label_missing of Ident.t list + | Label_not_mutable of Longident.t + | Wrong_name of string * type_expr * string * Path.t * string * string list + | Name_type_mismatch of + string * Longident.t * (Path.t * Path.t) * (Path.t * Path.t) list + | Invalid_format of string + | Undefined_method of type_expr * string * string list option + | Undefined_inherited_method of string * string list + | Virtual_class of Longident.t + | Private_type of type_expr + | Private_label of Longident.t * type_expr + | Unbound_instance_variable of string * string list + | Instance_variable_not_mutable of bool * string + | Not_subtype of (type_expr * type_expr) list * (type_expr * type_expr) list + | Outside_class + | Value_multiply_overridden of string + | Coercion_failure of + type_expr * type_expr * (type_expr * type_expr) list * bool + | Too_many_arguments of bool * type_expr + | Abstract_wrong_label of arg_label * type_expr + | Scoping_let_module of string * type_expr + | Masked_instance_variable of Longident.t + | Not_a_variant_type of Longident.t + | Incoherent_label_order + | Less_general of string * (type_expr * type_expr) list + | Modules_not_allowed + | Cannot_infer_signature + | Not_a_packed_module of type_expr + | Recursive_local_constraint of (type_expr * type_expr) list + | Unexpected_existential + | Unqualified_gadt_pattern of Path.t * string + | Invalid_interval + | Invalid_for_loop_index + | No_value_clauses + | Exception_pattern_below_toplevel + | Inlined_record_escape + | Inlined_record_expected + | Unrefuted_pattern of Typedtree.pattern + | Invalid_extension_constructor_payload + | Not_an_extension_constructor + | Literal_overflow of string + | Unknown_literal of string * char + | Illegal_letrec_pat + | Illegal_letrec_expr + | Illegal_class_expr + +exception Error of Location.t * Env.t * error +exception Error_forward of Location.error + +val report_error: Env.t -> formatter -> error -> unit + (* Deprecated. Use Location.{error_of_exn, report_error}. *) + +(* Forward declaration, to be filled in by Typemod.type_module *) +val type_module: (Env.t -> Parsetree.module_expr -> Typedtree.module_expr) ref +(* Forward declaration, to be filled in by Typemod.type_open *) +val type_open: + (?used_slot:bool ref -> override_flag -> Env.t -> Location.t -> + Longident.t loc -> Path.t * Env.t) + ref +(* Forward declaration, to be filled in by Typeclass.class_structure *) +val type_object: + (Env.t -> Location.t -> Parsetree.class_structure -> + Typedtree.class_structure * Types.class_signature * string list) ref +val type_package: + (Env.t -> Parsetree.module_expr -> Path.t -> Longident.t list -> + Typedtree.module_expr * type_expr list) ref + +val create_package_type : Location.t -> Env.t -> + Longident.t * (Longident.t * Parsetree.core_type) list -> + Path.t * (Longident.t * Typedtree.core_type) list * Types.type_expr + +val constant: Parsetree.constant -> (Asttypes.constant, error) result + +val check_recursive_bindings : Env.t -> Typedtree.value_binding list -> unit +val check_recursive_class_bindings : + Env.t -> Ident.t list -> Typedtree.class_expr list -> unit diff --git a/res_syntax/compiler-libs-406/typedecl.ml b/res_syntax/compiler-libs-406/typedecl.ml new file mode 100644 index 0000000000..2369b84c68 --- /dev/null +++ b/res_syntax/compiler-libs-406/typedecl.ml @@ -0,0 +1,2165 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy and Jerome Vouillon, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(**** Typing of type definitions ****) + +open Misc +open Asttypes +open Parsetree +open Primitive +open Types +open Typetexp + +type native_repr_kind = Unboxed | Untagged + +type error = + Repeated_parameter + | Duplicate_constructor of string + | Too_many_constructors + | Duplicate_label of string + | Recursive_abbrev of string + | Cycle_in_def of string * type_expr + | Definition_mismatch of type_expr * Includecore.type_mismatch list + | Constraint_failed of type_expr * type_expr + | Inconsistent_constraint of Env.t * (type_expr * type_expr) list + | Type_clash of Env.t * (type_expr * type_expr) list + | Parameters_differ of Path.t * type_expr * type_expr + | Null_arity_external + | Missing_native_external + | Unbound_type_var of type_expr * type_declaration + | Cannot_extend_private_type of Path.t + | Not_extensible_type of Path.t + | Extension_mismatch of Path.t * Includecore.type_mismatch list + | Rebind_wrong_type of Longident.t * Env.t * (type_expr * type_expr) list + | Rebind_mismatch of Longident.t * Path.t * Path.t + | Rebind_private of Longident.t + | Bad_variance of int * (bool * bool * bool) * (bool * bool * bool) + | Unavailable_type_constructor of Path.t + | Bad_fixed_type of string + | Unbound_type_var_ext of type_expr * extension_constructor + | Varying_anonymous + | Val_in_structure + | Multiple_native_repr_attributes + | Cannot_unbox_or_untag_type of native_repr_kind + | Deep_unbox_or_untag_attribute of native_repr_kind + | Bad_immediate_attribute + | Bad_unboxed_attribute of string + | Wrong_unboxed_type_float + | Boxed_and_unboxed + | Nonrec_gadt + +open Typedtree + +exception Error of Location.t * error + +(* Note: do not factor the branches in the following pattern-matching: + the records must be constants for the compiler to do sharing on them. +*) +let get_unboxed_from_attributes sdecl = + let unboxed = Builtin_attributes.has_unboxed sdecl.ptype_attributes in + let boxed = Builtin_attributes.has_boxed sdecl.ptype_attributes in + match boxed, unboxed, !Clflags.unboxed_types with + | true, true, _ -> raise (Error(sdecl.ptype_loc, Boxed_and_unboxed)) + | true, false, _ -> unboxed_false_default_false + | false, true, _ -> unboxed_true_default_false + | false, false, false -> unboxed_false_default_true + | false, false, true -> unboxed_true_default_true + +(* Enter all declared types in the environment as abstract types *) + +let enter_type rec_flag env sdecl id = + let needed = + match rec_flag with + | Asttypes.Nonrecursive -> + begin match sdecl.ptype_kind with + | Ptype_variant scds -> + List.iter (fun cd -> + if cd.pcd_res <> None then raise (Error(cd.pcd_loc, Nonrec_gadt))) + scds + | _ -> () + end; + Btype.is_row_name (Ident.name id) + | Asttypes.Recursive -> true + in + if not needed then env else + let decl = + { type_params = + List.map (fun _ -> Btype.newgenvar ()) sdecl.ptype_params; + type_arity = List.length sdecl.ptype_params; + type_kind = Type_abstract; + type_private = sdecl.ptype_private; + type_manifest = + begin match sdecl.ptype_manifest with None -> None + | Some _ -> Some(Ctype.newvar ()) end; + type_variance = List.map (fun _ -> Variance.full) sdecl.ptype_params; + type_newtype_level = None; + type_loc = sdecl.ptype_loc; + type_attributes = sdecl.ptype_attributes; + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } + in + Env.add_type ~check:true id decl env + +let update_type temp_env env id loc = + let path = Path.Pident id in + let decl = Env.find_type path temp_env in + match decl.type_manifest with None -> () + | Some ty -> + let params = List.map (fun _ -> Ctype.newvar ()) decl.type_params in + try Ctype.unify env (Ctype.newconstr path params) ty + with Ctype.Unify trace -> + raise (Error(loc, Type_clash (env, trace))) + +(* We use the Ctype.expand_head_opt version of expand_head to get access + to the manifest type of private abbreviations. *) +let rec get_unboxed_type_representation env ty fuel = + if fuel < 0 then None else + let ty = Ctype.repr (Ctype.expand_head_opt env ty) in + match ty.desc with + | Tconstr (p, args, _) -> + begin match Env.find_type p env with + | exception Not_found -> Some ty + | {type_unboxed = {unboxed = false}} -> Some ty + | {type_params; type_kind = + Type_record ([{ld_type = ty2; _}], _) + | Type_variant [{cd_args = Cstr_tuple [ty2]; _}] + | Type_variant [{cd_args = Cstr_record [{ld_type = ty2; _}]; _}]} + + -> get_unboxed_type_representation env + (Ctype.apply env type_params ty2 args) (fuel - 1) + | {type_kind=Type_abstract} -> None + (* This case can occur when checking a recursive unboxed type + declaration. *) + | _ -> assert false (* only the above can be unboxed *) + end + | _ -> Some ty + +let get_unboxed_type_representation env ty = + (* Do not give too much fuel: PR#7424 *) + get_unboxed_type_representation env ty 100 +;; + +(* Determine if a type's values are represented by floats at run-time. *) +let is_float env ty = + match get_unboxed_type_representation env ty with + Some {desc = Tconstr(p, _, _); _} -> Path.same p Predef.path_float + | _ -> false + +(* Determine if a type definition defines a fixed type. (PW) *) +let is_fixed_type sd = + let rec has_row_var sty = + match sty.ptyp_desc with + Ptyp_alias (sty, _) -> has_row_var sty + | Ptyp_class _ + | Ptyp_object (_, Open) + | Ptyp_variant (_, Open, _) + | Ptyp_variant (_, Closed, Some _) -> true + | _ -> false + in + match sd.ptype_manifest with + None -> false + | Some sty -> + sd.ptype_kind = Ptype_abstract && + sd.ptype_private = Private && + has_row_var sty + +(* Set the row variable in a fixed type *) +let set_fixed_row env loc p decl = + let tm = + match decl.type_manifest with + None -> assert false + | Some t -> Ctype.expand_head env t + in + let rv = + match tm.desc with + Tvariant row -> + let row = Btype.row_repr row in + tm.desc <- Tvariant {row with row_fixed = true}; + if Btype.static_row row then Btype.newgenty Tnil + else row.row_more + | Tobject (ty, _) -> + snd (Ctype.flatten_fields ty) + | _ -> + raise (Error (loc, Bad_fixed_type "is not an object or variant")) + in + if not (Btype.is_Tvar rv) then + raise (Error (loc, Bad_fixed_type "has no row variable")); + rv.desc <- Tconstr (p, decl.type_params, ref Mnil) + +(* Translate one type declaration *) + +module StringSet = + Set.Make(struct + type t = string + let compare (x:t) y = compare x y + end) + +let make_params env params = + let make_param (sty, v) = + try + (transl_type_param env sty, v) + with Already_bound -> + raise(Error(sty.ptyp_loc, Repeated_parameter)) + in + List.map make_param params + +let transl_labels env closed lbls = + assert (lbls <> []); + let all_labels = ref StringSet.empty in + List.iter + (fun {pld_name = {txt=name; loc}} -> + if StringSet.mem name !all_labels then + raise(Error(loc, Duplicate_label name)); + all_labels := StringSet.add name !all_labels) + lbls; + let mk {pld_name=name;pld_mutable=mut;pld_type=arg;pld_loc=loc; + pld_attributes=attrs} = + Builtin_attributes.warning_scope attrs + (fun () -> + let arg = Ast_helper.Typ.force_poly arg in + let cty = transl_simple_type env closed arg in + {ld_id = Ident.create name.txt; ld_name = name; ld_mutable = mut; + ld_type = cty; ld_loc = loc; ld_attributes = attrs} + ) + in + let lbls = List.map mk lbls in + let lbls' = + List.map + (fun ld -> + let ty = ld.ld_type.ctyp_type in + let ty = match ty.desc with Tpoly(t,[]) -> t | _ -> ty in + {Types.ld_id = ld.ld_id; + ld_mutable = ld.ld_mutable; + ld_type = ty; + ld_loc = ld.ld_loc; + ld_attributes = ld.ld_attributes + } + ) + lbls in + lbls, lbls' + +let transl_constructor_arguments env closed = function + | Pcstr_tuple l -> + let l = List.map (transl_simple_type env closed) l in + Types.Cstr_tuple (List.map (fun t -> t.ctyp_type) l), + Cstr_tuple l + | Pcstr_record l -> + let lbls, lbls' = transl_labels env closed l in + Types.Cstr_record lbls', + Cstr_record lbls + +let make_constructor env type_path type_params sargs sret_type = + match sret_type with + | None -> + let args, targs = + transl_constructor_arguments env true sargs + in + targs, None, args, None, type_params + | Some sret_type -> + (* if it's a generalized constructor we must first narrow and + then widen so as to not introduce any new constraints *) + let z = narrow () in + reset_type_variables (); + let args, targs = + transl_constructor_arguments env false sargs + in + let tret_type = transl_simple_type env false sret_type in + let ret_type = tret_type.ctyp_type in + let params = + match (Ctype.repr ret_type).desc with + | Tconstr (p', params, _) when Path.same type_path p' -> + params + | _ -> + raise (Error (sret_type.ptyp_loc, Constraint_failed + (ret_type, Ctype.newconstr type_path type_params))) + in + widen z; + targs, Some tret_type, args, Some ret_type, params + +(* Check that the variable [id] is present in the [univ] list. *) +let check_type_var loc univ id = + let f t = (Btype.repr t).id = id in + if not (List.exists f univ) then raise (Error (loc, Wrong_unboxed_type_float)) + +(* Check that all the variables found in [ty] are in [univ]. + Because [ty] is the argument to an abstract type, the representation + of that abstract type could be any subexpression of [ty], in particular + any type variable present in [ty]. +*) +let rec check_unboxed_abstract_arg loc univ ty = + match ty.desc with + | Tvar _ -> check_type_var loc univ ty.id + | Tarrow (_, t1, t2, _) + | Tfield (_, _, t1, t2) -> + check_unboxed_abstract_arg loc univ t1; + check_unboxed_abstract_arg loc univ t2 + | Ttuple args + | Tconstr (_, args, _) + | Tpackage (_, _, args) -> + List.iter (check_unboxed_abstract_arg loc univ) args + | Tobject (fields, r) -> + check_unboxed_abstract_arg loc univ fields; + begin match !r with + | None -> () + | Some (_, args) -> List.iter (check_unboxed_abstract_arg loc univ) args + end + | Tnil + | Tunivar _ -> () + | Tlink e -> check_unboxed_abstract_arg loc univ e + | Tsubst _ -> assert false + | Tvariant { row_fields; row_more; row_name } -> + List.iter (check_unboxed_abstract_row_field loc univ) row_fields; + check_unboxed_abstract_arg loc univ row_more; + begin match row_name with + | None -> () + | Some (_, args) -> List.iter (check_unboxed_abstract_arg loc univ) args + end + | Tpoly (t, _) -> check_unboxed_abstract_arg loc univ t + +and check_unboxed_abstract_row_field loc univ (_, field) = + match field with + | Rpresent (Some ty) -> check_unboxed_abstract_arg loc univ ty + | Reither (_, args, _, r) -> + List.iter (check_unboxed_abstract_arg loc univ) args; + begin match !r with + | None -> () + | Some f -> check_unboxed_abstract_row_field loc univ ("", f) + end + | Rabsent + | Rpresent None -> () + +(* Check that the argument to a GADT constructor is compatible with unboxing + the type, given the universal parameters of the type. *) +let rec check_unboxed_gadt_arg loc univ env ty = + match get_unboxed_type_representation env ty with + | Some {desc = Tvar _; id} -> check_type_var loc univ id + | Some {desc = Tarrow _ | Ttuple _ | Tpackage _ | Tobject _ | Tnil + | Tvariant _; _} -> + () + (* A comment in [Translcore.transl_exp0] claims the above cannot be + represented by floats. *) + | Some {desc = Tconstr (p, args, _); _} -> + let tydecl = Env.find_type p env in + assert (not tydecl.type_unboxed.unboxed); + if tydecl.type_kind = Type_abstract then + List.iter (check_unboxed_abstract_arg loc univ) args + | Some {desc = Tfield _ | Tlink _ | Tsubst _; _} -> assert false + | Some {desc = Tunivar _; _} -> () + | Some {desc = Tpoly (t2, _); _} -> check_unboxed_gadt_arg loc univ env t2 + | None -> () + (* This case is tricky: the argument is another (or the same) type + in the same recursive definition. In this case we don't have to + check because we will also check that other type for correctness. *) + +let transl_declaration env sdecl id = + (* Bind type parameters *) + reset_type_variables(); + Ctype.begin_def (); + let tparams = make_params env sdecl.ptype_params in + let params = List.map (fun (cty, _) -> cty.ctyp_type) tparams in + let cstrs = List.map + (fun (sty, sty', loc) -> + transl_simple_type env false sty, + transl_simple_type env false sty', loc) + sdecl.ptype_cstrs + in + let raw_status = get_unboxed_from_attributes sdecl in + if raw_status.unboxed && not raw_status.default then begin + match sdecl.ptype_kind with + | Ptype_abstract -> + raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute + "it is abstract")) + | Ptype_variant [{pcd_args = Pcstr_tuple []; _}] -> + raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute + "its constructor has no argument")) + | Ptype_variant [{pcd_args = Pcstr_tuple [_]; _}] -> () + | Ptype_variant [{pcd_args = Pcstr_tuple _; _}] -> + raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute + "its constructor has more than one argument")) + | Ptype_variant [{pcd_args = Pcstr_record + [{pld_mutable=Immutable; _}]; _}] -> () + | Ptype_variant [{pcd_args = Pcstr_record [{pld_mutable=Mutable; _}]; _}] -> + raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute "it is mutable")) + | Ptype_variant [{pcd_args = Pcstr_record _; _}] -> + raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute + "its constructor has more than one argument")) + | Ptype_variant _ -> + raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute + "it has more than one constructor")) + | Ptype_record [{pld_mutable=Immutable; _}] -> () + | Ptype_record [{pld_mutable=Mutable; _}] -> + raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute + "it is mutable")) + | Ptype_record _ -> + raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute + "it has more than one field")) + | Ptype_open -> + raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute + "extensible variant types cannot be unboxed")) + end; + let unboxed_status = + match sdecl.ptype_kind with + | Ptype_variant [{pcd_args = Pcstr_tuple [_]; _}] + | Ptype_variant [{pcd_args = Pcstr_record + [{pld_mutable = Immutable; _}]; _}] + | Ptype_record [{pld_mutable = Immutable; _}] -> + raw_status + | _ -> (* The type is not unboxable, mark it as boxed *) + unboxed_false_default_false + in + let unbox = unboxed_status.unboxed in + let (tkind, kind) = + match sdecl.ptype_kind with + | Ptype_abstract -> Ttype_abstract, Type_abstract + | Ptype_variant scstrs -> + assert (scstrs <> []); + if List.exists (fun cstr -> cstr.pcd_res <> None) scstrs then begin + match cstrs with + [] -> () + | (_,_,loc)::_ -> + Location.prerr_warning loc Warnings.Constraint_on_gadt + end; + let all_constrs = ref StringSet.empty in + List.iter + (fun {pcd_name = {txt = name}} -> + if StringSet.mem name !all_constrs then + raise(Error(sdecl.ptype_loc, Duplicate_constructor name)); + all_constrs := StringSet.add name !all_constrs) + scstrs; + if List.length + (List.filter (fun cd -> cd.pcd_args <> Pcstr_tuple []) scstrs) + > (Config.max_tag + 1) then + raise(Error(sdecl.ptype_loc, Too_many_constructors)); + let make_cstr scstr = + let name = Ident.create scstr.pcd_name.txt in + let targs, tret_type, args, ret_type, cstr_params = + make_constructor env (Path.Pident id) params + scstr.pcd_args scstr.pcd_res + in + if Config.flat_float_array && unbox then begin + (* Cannot unbox a type when the argument can be both float and + non-float because it interferes with the dynamic float array + optimization. This can only happen when the type is a GADT + and the argument is an existential type variable or an + unboxed (or abstract) type constructor applied to some + existential type variable. Of course we also have to rule + out any abstract type constructor applied to anything that + might be an existential type variable. + There is a difficulty with existential variables created + out of thin air (rather than bound by the declaration). + See PR#7511 and GPR#1133 for details. *) + match Datarepr.constructor_existentials args ret_type with + | _, [] -> () + | [argty], _ex -> + check_unboxed_gadt_arg sdecl.ptype_loc cstr_params env argty + | _ -> assert false + end; + let tcstr = + { cd_id = name; + cd_name = scstr.pcd_name; + cd_args = targs; + cd_res = tret_type; + cd_loc = scstr.pcd_loc; + cd_attributes = scstr.pcd_attributes } + in + let cstr = + { Types.cd_id = name; + cd_args = args; + cd_res = ret_type; + cd_loc = scstr.pcd_loc; + cd_attributes = scstr.pcd_attributes } + in + tcstr, cstr + in + let make_cstr scstr = + Builtin_attributes.warning_scope scstr.pcd_attributes + (fun () -> make_cstr scstr) + in + let tcstrs, cstrs = List.split (List.map make_cstr scstrs) in + Ttype_variant tcstrs, Type_variant cstrs + | Ptype_record lbls -> + let lbls, lbls' = transl_labels env true lbls in + let rep = + if unbox then Record_unboxed false + else if List.for_all (fun l -> is_float env l.Types.ld_type) lbls' + then Record_float + else Record_regular + in + Ttype_record lbls, Type_record(lbls', rep) + | Ptype_open -> Ttype_open, Type_open + in + let (tman, man) = match sdecl.ptype_manifest with + None -> None, None + | Some sty -> + let no_row = not (is_fixed_type sdecl) in + let cty = transl_simple_type env no_row sty in + Some cty, Some cty.ctyp_type + in + let decl = + { type_params = params; + type_arity = List.length params; + type_kind = kind; + type_private = sdecl.ptype_private; + type_manifest = man; + type_variance = List.map (fun _ -> Variance.full) params; + type_newtype_level = None; + type_loc = sdecl.ptype_loc; + type_attributes = sdecl.ptype_attributes; + type_immediate = false; + type_unboxed = unboxed_status; + } in + + (* Check constraints *) + List.iter + (fun (cty, cty', loc) -> + let ty = cty.ctyp_type in + let ty' = cty'.ctyp_type in + try Ctype.unify env ty ty' with Ctype.Unify tr -> + raise(Error(loc, Inconsistent_constraint (env, tr)))) + cstrs; + Ctype.end_def (); + (* Add abstract row *) + if is_fixed_type sdecl then begin + let p = + try Env.lookup_type (Longident.Lident(Ident.name id ^ "#row")) env + with Not_found -> assert false in + set_fixed_row env sdecl.ptype_loc p decl + end; + (* Check for cyclic abbreviations *) + begin match decl.type_manifest with None -> () + | Some ty -> + if Ctype.cyclic_abbrev env id ty then + raise(Error(sdecl.ptype_loc, Recursive_abbrev sdecl.ptype_name.txt)); + end; + { + typ_id = id; + typ_name = sdecl.ptype_name; + typ_params = tparams; + typ_type = decl; + typ_cstrs = cstrs; + typ_loc = sdecl.ptype_loc; + typ_manifest = tman; + typ_kind = tkind; + typ_private = sdecl.ptype_private; + typ_attributes = sdecl.ptype_attributes; + } + +(* Generalize a type declaration *) + +let generalize_decl decl = + List.iter Ctype.generalize decl.type_params; + Btype.iter_type_expr_kind Ctype.generalize decl.type_kind; + begin match decl.type_manifest with + | None -> () + | Some ty -> Ctype.generalize ty + end + +(* Check that all constraints are enforced *) + +module TypeSet = Btype.TypeSet +module TypeMap = Btype.TypeMap + +let rec check_constraints_rec env loc visited ty = + let ty = Ctype.repr ty in + if TypeSet.mem ty !visited then () else begin + visited := TypeSet.add ty !visited; + match ty.desc with + | Tconstr (path, args, _) -> + let args' = List.map (fun _ -> Ctype.newvar ()) args in + let ty' = Ctype.newconstr path args' in + begin try Ctype.enforce_constraints env ty' + with Ctype.Unify _ -> assert false + | Not_found -> raise (Error(loc, Unavailable_type_constructor path)) + end; + if not (Ctype.matches env ty ty') then + raise (Error(loc, Constraint_failed (ty, ty'))); + List.iter (check_constraints_rec env loc visited) args + | Tpoly (ty, tl) -> + let _, ty = Ctype.instance_poly false tl ty in + check_constraints_rec env loc visited ty + | _ -> + Btype.iter_type_expr (check_constraints_rec env loc visited) ty + end + +module SMap = Map.Make(String) + +let check_constraints_labels env visited l pl = + let rec get_loc name = function + [] -> assert false + | pld :: tl -> + if name = pld.pld_name.txt then pld.pld_type.ptyp_loc + else get_loc name tl + in + List.iter + (fun {Types.ld_id=name; ld_type=ty} -> + check_constraints_rec env (get_loc (Ident.name name) pl) visited ty) + l + +let check_constraints env sdecl (_, decl) = + let visited = ref TypeSet.empty in + begin match decl.type_kind with + | Type_abstract -> () + | Type_variant l -> + let find_pl = function + Ptype_variant pl -> pl + | Ptype_record _ | Ptype_abstract | Ptype_open -> assert false + in + let pl = find_pl sdecl.ptype_kind in + let pl_index = + let foldf acc x = + SMap.add x.pcd_name.txt x acc + in + List.fold_left foldf SMap.empty pl + in + List.iter + (fun {Types.cd_id=name; cd_args; cd_res} -> + let {pcd_args; pcd_res; _} = + try SMap.find (Ident.name name) pl_index + with Not_found -> assert false in + begin match cd_args, pcd_args with + | Cstr_tuple tyl, Pcstr_tuple styl -> + List.iter2 + (fun sty ty -> + check_constraints_rec env sty.ptyp_loc visited ty) + styl tyl + | Cstr_record tyl, Pcstr_record styl -> + check_constraints_labels env visited tyl styl + | _ -> assert false + end; + match pcd_res, cd_res with + | Some sr, Some r -> + check_constraints_rec env sr.ptyp_loc visited r + | _ -> + () ) + l + | Type_record (l, _) -> + let find_pl = function + Ptype_record pl -> pl + | Ptype_variant _ | Ptype_abstract | Ptype_open -> assert false + in + let pl = find_pl sdecl.ptype_kind in + check_constraints_labels env visited l pl + | Type_open -> () + end; + begin match decl.type_manifest with + | None -> () + | Some ty -> + let sty = + match sdecl.ptype_manifest with Some sty -> sty | _ -> assert false + in + check_constraints_rec env sty.ptyp_loc visited ty + end + +(* + If both a variant/record definition and a type equation are given, + need to check that the equation refers to a type of the same kind + with the same constructors and labels. +*) +let check_coherence env loc id decl = + match decl with + { type_kind = (Type_variant _ | Type_record _| Type_open); + type_manifest = Some ty } -> + begin match (Ctype.repr ty).desc with + Tconstr(path, args, _) -> + begin try + let decl' = Env.find_type path env in + let err = + if List.length args <> List.length decl.type_params + then [Includecore.Arity] + else if not (Ctype.equal env false args decl.type_params) + then [Includecore.Constraint] + else + Includecore.type_declarations ~loc ~equality:true env + (Path.last path) + decl' + id + (Subst.type_declaration + (Subst.add_type id path Subst.identity) decl) + in + if err <> [] then + raise(Error(loc, Definition_mismatch (ty, err))) + with Not_found -> + raise(Error(loc, Unavailable_type_constructor path)) + end + | _ -> raise(Error(loc, Definition_mismatch (ty, []))) + end + | _ -> () + +let check_abbrev env sdecl (id, decl) = + check_coherence env sdecl.ptype_loc id decl + +(* Check that recursion is well-founded *) + +let check_well_founded env loc path to_check ty = + let visited = ref TypeMap.empty in + let rec check ty0 parents ty = + let ty = Btype.repr ty in + if TypeSet.mem ty parents then begin + (*Format.eprintf "@[%a@]@." Printtyp.raw_type_expr ty;*) + if match ty0.desc with + | Tconstr (p, _, _) -> Path.same p path + | _ -> false + then raise (Error (loc, Recursive_abbrev (Path.name path))) + else raise (Error (loc, Cycle_in_def (Path.name path, ty0))) + end; + let (fini, parents) = + try + let prev = TypeMap.find ty !visited in + if TypeSet.subset parents prev then (true, parents) else + (false, TypeSet.union parents prev) + with Not_found -> + (false, parents) + in + if fini then () else + let rec_ok = + match ty.desc with + Tconstr(p,_,_) -> + !Clflags.recursive_types && Ctype.is_contractive env p + | Tobject _ | Tvariant _ -> true + | _ -> !Clflags.recursive_types + in + let visited' = TypeMap.add ty parents !visited in + let arg_exn = + try + visited := visited'; + let parents = + if rec_ok then TypeSet.empty else TypeSet.add ty parents in + Btype.iter_type_expr (check ty0 parents) ty; + None + with e -> + visited := visited'; Some e + in + match ty.desc with + | Tconstr(p, _, _) when arg_exn <> None || to_check p -> + if to_check p then may raise arg_exn + else Btype.iter_type_expr (check ty0 TypeSet.empty) ty; + begin try + let ty' = Ctype.try_expand_once_opt env ty in + let ty0 = if TypeSet.is_empty parents then ty else ty0 in + check ty0 (TypeSet.add ty parents) ty' + with + Ctype.Cannot_expand -> may raise arg_exn + end + | _ -> may raise arg_exn + in + let snap = Btype.snapshot () in + try Ctype.wrap_trace_gadt_instances env (check ty TypeSet.empty) ty + with Ctype.Unify _ -> + (* Will be detected by check_recursion *) + Btype.backtrack snap + +let check_well_founded_manifest env loc path decl = + if decl.type_manifest = None then () else + let args = List.map (fun _ -> Ctype.newvar()) decl.type_params in + check_well_founded env loc path (Path.same path) (Ctype.newconstr path args) + +let check_well_founded_decl env loc path decl to_check = + let open Btype in + let it = + {type_iterators with + it_type_expr = (fun _ -> check_well_founded env loc path to_check)} in + it.it_type_declaration it (Ctype.instance_declaration decl) + +(* Check for ill-defined abbrevs *) + +let check_recursion env loc path decl to_check = + (* to_check is true for potentially mutually recursive paths. + (path, decl) is the type declaration to be checked. *) + + if decl.type_params = [] then () else + + let visited = ref [] in + + let rec check_regular cpath args prev_exp ty = + let ty = Ctype.repr ty in + if not (List.memq ty !visited) then begin + visited := ty :: !visited; + match ty.desc with + | Tconstr(path', args', _) -> + if Path.same path path' then begin + if not (Ctype.equal env false args args') then + raise (Error(loc, + Parameters_differ(cpath, ty, Ctype.newconstr path args))) + end + (* Attempt to expand a type abbreviation if: + 1- [to_check path'] holds + (otherwise the expansion cannot involve [path]); + 2- we haven't expanded this type constructor before + (otherwise we could loop if [path'] is itself + a non-regular abbreviation). *) + else if to_check path' && not (List.mem path' prev_exp) then begin + try + (* Attempt expansion *) + let (params0, body0, _) = Env.find_type_expansion path' env in + let (params, body) = + Ctype.instance_parameterized_type params0 body0 in + begin + try List.iter2 (Ctype.unify env) params args' + with Ctype.Unify _ -> + raise (Error(loc, Constraint_failed + (ty, Ctype.newconstr path' params0))); + end; + check_regular path' args (path' :: prev_exp) body + with Not_found -> () + end; + List.iter (check_regular cpath args prev_exp) args' + | Tpoly (ty, tl) -> + let (_, ty) = Ctype.instance_poly ~keep_names:true false tl ty in + check_regular cpath args prev_exp ty + | _ -> + Btype.iter_type_expr (check_regular cpath args prev_exp) ty + end in + + Misc.may + (fun body -> + let (args, body) = + Ctype.instance_parameterized_type + ~keep_names:true decl.type_params body in + check_regular path args [] body) + decl.type_manifest + +let check_abbrev_recursion env id_loc_list to_check tdecl = + let decl = tdecl.typ_type in + let id = tdecl.typ_id in + check_recursion env (List.assoc id id_loc_list) (Path.Pident id) decl to_check + +(* Compute variance *) + +let get_variance ty visited = + try TypeMap.find ty !visited with Not_found -> Variance.null + +let compute_variance env visited vari ty = + let rec compute_variance_rec vari ty = + (* Format.eprintf "%a: %x@." Printtyp.type_expr ty (Obj.magic vari); *) + let ty = Ctype.repr ty in + let vari' = get_variance ty visited in + if Variance.subset vari vari' then () else + let vari = Variance.union vari vari' in + visited := TypeMap.add ty vari !visited; + let compute_same = compute_variance_rec vari in + match ty.desc with + Tarrow (_, ty1, ty2, _) -> + let open Variance in + let v = conjugate vari in + let v1 = + if mem May_pos v || mem May_neg v + then set May_weak true v else v + in + compute_variance_rec v1 ty1; + compute_same ty2 + | Ttuple tl -> + List.iter compute_same tl + | Tconstr (path, tl, _) -> + let open Variance in + if tl = [] then () else begin + try + let decl = Env.find_type path env in + let cvari f = mem f vari in + List.iter2 + (fun ty v -> + let cv f = mem f v in + let strict = + cvari Inv && cv Inj || (cvari Pos || cvari Neg) && cv Inv + in + if strict then compute_variance_rec full ty else + let p1 = inter v vari + and n1 = inter v (conjugate vari) in + let v1 = + union (inter covariant (union p1 (conjugate p1))) + (inter (conjugate covariant) (union n1 (conjugate n1))) + and weak = + cvari May_weak && (cv May_pos || cv May_neg) || + (cvari May_pos || cvari May_neg) && cv May_weak + in + let v2 = set May_weak weak v1 in + compute_variance_rec v2 ty) + tl decl.type_variance + with Not_found -> + List.iter (compute_variance_rec may_inv) tl + end + | Tobject (ty, _) -> + compute_same ty + | Tfield (_, _, ty1, ty2) -> + compute_same ty1; + compute_same ty2 + | Tsubst ty -> + compute_same ty + | Tvariant row -> + let row = Btype.row_repr row in + List.iter + (fun (_,f) -> + match Btype.row_field_repr f with + Rpresent (Some ty) -> + compute_same ty + | Reither (_, tyl, _, _) -> + let open Variance in + let upper = + List.fold_left (fun s f -> set f true s) + null [May_pos; May_neg; May_weak] + in + let v = inter vari upper in + (* cf PR#7269: + if List.length tyl > 1 then upper else inter vari upper *) + List.iter (compute_variance_rec v) tyl + | _ -> ()) + row.row_fields; + compute_same row.row_more + | Tpoly (ty, _) -> + compute_same ty + | Tvar _ | Tnil | Tlink _ | Tunivar _ -> () + | Tpackage (_, _, tyl) -> + let v = + Variance.(if mem Pos vari || mem Neg vari then full else may_inv) + in + List.iter (compute_variance_rec v) tyl + in + compute_variance_rec vari ty + +let make p n i = + let open Variance in + set May_pos p (set May_neg n (set May_weak n (set Inj i null))) + +let compute_variance_type env check (required, loc) decl tyl = + (* Requirements *) + let required = + List.map (fun (c,n,i) -> if c || n then (c,n,i) else (true,true,i)) + required + in + (* Prepare *) + let params = List.map Btype.repr decl.type_params in + let tvl = ref TypeMap.empty in + (* Compute occurrences in the body *) + let open Variance in + List.iter + (fun (cn,ty) -> + compute_variance env tvl (if cn then full else covariant) ty) + tyl; + if check then begin + (* Check variance of parameters *) + let pos = ref 0 in + List.iter2 + (fun ty (c, n, i) -> + incr pos; + let var = get_variance ty tvl in + let (co,cn) = get_upper var and ij = mem Inj var in + if Btype.is_Tvar ty && (co && not c || cn && not n || not ij && i) + then raise (Error(loc, Bad_variance (!pos, (co,cn,ij), (c,n,i))))) + params required; + (* Check propagation from constrained parameters *) + let args = Btype.newgenty (Ttuple params) in + let fvl = Ctype.free_variables args in + let fvl = List.filter (fun v -> not (List.memq v params)) fvl in + (* If there are no extra variables there is nothing to do *) + if fvl = [] then () else + let tvl2 = ref TypeMap.empty in + List.iter2 + (fun ty (p,n,_) -> + if Btype.is_Tvar ty then () else + let v = + if p then if n then full else covariant else conjugate covariant in + compute_variance env tvl2 v ty) + params required; + let visited = ref TypeSet.empty in + let rec check ty = + let ty = Ctype.repr ty in + if TypeSet.mem ty !visited then () else + let visited' = TypeSet.add ty !visited in + visited := visited'; + let v1 = get_variance ty tvl in + let snap = Btype.snapshot () in + let v2 = + TypeMap.fold + (fun t vt v -> + if Ctype.equal env false [ty] [t] then union vt v else v) + !tvl2 null in + Btype.backtrack snap; + let (c1,n1) = get_upper v1 and (c2,n2,_,i2) = get_lower v2 in + if c1 && not c2 || n1 && not n2 then + if List.memq ty fvl then + let code = if not i2 then -2 else if c2 || n2 then -1 else -3 in + raise (Error (loc, Bad_variance (code, (c1,n1,false), (c2,n2,false)))) + else + Btype.iter_type_expr check ty + in + List.iter (fun (_,ty) -> check ty) tyl; + end; + List.map2 + (fun ty (p, n, i) -> + let v = get_variance ty tvl in + let tr = decl.type_private in + (* Use required variance where relevant *) + let concr = decl.type_kind <> Type_abstract (*|| tr = Type_new*) in + let (p, n) = + if tr = Private || not (Btype.is_Tvar ty) then (p, n) (* set *) + else (false, false) (* only check *) + and i = concr || i && tr = Private in + let v = union v (make p n i) in + let v = + if not concr then v else + if mem Pos v && mem Neg v then full else + if Btype.is_Tvar ty then v else + union v + (if p then if n then full else covariant else conjugate covariant) + in + if decl.type_kind = Type_abstract && tr = Public then v else + set May_weak (mem May_neg v) v) + params required + +let add_false = List.map (fun ty -> false, ty) + +(* A parameter is constrained if it is either instantiated, + or it is a variable appearing in another parameter *) +let constrained vars ty = + match ty.desc with + | Tvar _ -> List.exists (fun tl -> List.memq ty tl) vars + | _ -> true + +let for_constr = function + | Types.Cstr_tuple l -> add_false l + | Types.Cstr_record l -> + List.map + (fun {Types.ld_mutable; ld_type} -> (ld_mutable = Mutable, ld_type)) + l + +let compute_variance_gadt env check (required, loc as rloc) decl + (tl, ret_type_opt) = + match ret_type_opt with + | None -> + compute_variance_type env check rloc {decl with type_private = Private} + (for_constr tl) + | Some ret_type -> + match Ctype.repr ret_type with + | {desc=Tconstr (_, tyl, _)} -> + (* let tyl = List.map (Ctype.expand_head env) tyl in *) + let tyl = List.map Ctype.repr tyl in + let fvl = List.map (Ctype.free_variables ?env:None) tyl in + let _ = + List.fold_left2 + (fun (fv1,fv2) ty (c,n,_) -> + match fv2 with [] -> assert false + | fv :: fv2 -> + (* fv1 @ fv2 = free_variables of other parameters *) + if (c||n) && constrained (fv1 @ fv2) ty then + raise (Error(loc, Varying_anonymous)); + (fv :: fv1, fv2)) + ([], fvl) tyl required + in + compute_variance_type env check rloc + {decl with type_params = tyl; type_private = Private} + (for_constr tl) + | _ -> assert false + +let compute_variance_extension env check decl ext rloc = + compute_variance_gadt env check rloc + {decl with type_params = ext.ext_type_params} + (ext.ext_args, ext.ext_ret_type) + +let compute_variance_decl env check decl (required, _ as rloc) = + if (decl.type_kind = Type_abstract || decl.type_kind = Type_open) + && decl.type_manifest = None then + List.map + (fun (c, n, i) -> + make (not n) (not c) (decl.type_kind <> Type_abstract || i)) + required + else + let mn = + match decl.type_manifest with + None -> [] + | Some ty -> [false, ty] + in + match decl.type_kind with + Type_abstract | Type_open -> + compute_variance_type env check rloc decl mn + | Type_variant tll -> + if List.for_all (fun c -> c.Types.cd_res = None) tll then + compute_variance_type env check rloc decl + (mn @ List.flatten (List.map (fun c -> for_constr c.Types.cd_args) + tll)) + else begin + let mn = + List.map (fun (_,ty) -> (Types.Cstr_tuple [ty],None)) mn in + let tll = + mn @ List.map (fun c -> c.Types.cd_args, c.Types.cd_res) tll in + match List.map (compute_variance_gadt env check rloc decl) tll with + | vari :: rem -> + let varl = List.fold_left (List.map2 Variance.union) vari rem in + List.map + Variance.(fun v -> if mem Pos v && mem Neg v then full else v) + varl + | _ -> assert false + end + | Type_record (ftl, _) -> + compute_variance_type env check rloc decl + (mn @ List.map (fun {Types.ld_mutable; ld_type} -> + (ld_mutable = Mutable, ld_type)) ftl) + +let is_hash id = + let s = Ident.name id in + String.length s > 0 && s.[0] = '#' + +let marked_as_immediate decl = + Builtin_attributes.immediate decl.type_attributes + +let compute_immediacy env tdecl = + match (tdecl.type_kind, tdecl.type_manifest) with + | (Type_variant [{cd_args = Cstr_tuple [arg]; _}], _) + | (Type_variant [{cd_args = Cstr_record [{ld_type = arg; _}]; _}], _) + | (Type_record ([{ld_type = arg; _}], _), _) + when tdecl.type_unboxed.unboxed -> + begin match get_unboxed_type_representation env arg with + | Some argrepr -> not (Ctype.maybe_pointer_type env argrepr) + | None -> false + end + | (Type_variant (_ :: _ as cstrs), _) -> + not (List.exists (fun c -> c.Types.cd_args <> Types.Cstr_tuple []) cstrs) + | (Type_abstract, Some(typ)) -> + not (Ctype.maybe_pointer_type env typ) + | (Type_abstract, None) -> marked_as_immediate tdecl + | _ -> false + +(* Computes the fixpoint for the variance and immediacy of type declarations *) + +let rec compute_properties_fixpoint env decls required variances immediacies = + let new_decls = + List.map2 + (fun (id, decl) (variance, immediacy) -> + id, {decl with type_variance = variance; type_immediate = immediacy}) + decls (List.combine variances immediacies) + in + let new_env = + List.fold_right + (fun (id, decl) env -> Env.add_type ~check:true id decl env) + new_decls env + in + let new_variances = + List.map2 + (fun (_id, decl) -> compute_variance_decl new_env false decl) + new_decls required + in + let new_variances = + List.map2 (List.map2 Variance.union) new_variances variances in + let new_immediacies = + List.map + (fun (_id, decl) -> compute_immediacy new_env decl) + new_decls + in + if new_variances <> variances || new_immediacies <> immediacies then + compute_properties_fixpoint env decls required new_variances new_immediacies + else begin + (* List.iter (fun (id, decl) -> + Printf.eprintf "%s:" (Ident.name id); + List.iter (fun (v : Variance.t) -> + Printf.eprintf " %x" (Obj.magic v : int)) + decl.type_variance; + prerr_endline "") + new_decls; *) + List.iter (fun (_, decl) -> + if (marked_as_immediate decl) && (not decl.type_immediate) then + raise (Error (decl.type_loc, Bad_immediate_attribute)) + else ()) + new_decls; + List.iter2 + (fun (id, decl) req -> if not (is_hash id) then + ignore (compute_variance_decl new_env true decl req)) + new_decls required; + new_decls, new_env + end + +let init_variance (_id, decl) = + List.map (fun _ -> Variance.null) decl.type_params + +let add_injectivity = + List.map + (function + | Covariant -> (true, false, false) + | Contravariant -> (false, true, false) + | Invariant -> (false, false, false) + ) + +(* for typeclass.ml *) +let compute_variance_decls env cldecls = + let decls, required = + List.fold_right + (fun (obj_id, obj_abbr, _cl_abbr, _clty, _cltydef, ci) (decls, req) -> + let variance = List.map snd ci.ci_params in + (obj_id, obj_abbr) :: decls, + (add_injectivity variance, ci.ci_loc) :: req) + cldecls ([],[]) + in + let (decls, _) = + compute_properties_fixpoint env decls required + (List.map init_variance decls) + (List.map (fun _ -> false) decls) + in + List.map2 + (fun (_,decl) (_, _, cl_abbr, clty, cltydef, _) -> + let variance = decl.type_variance in + (decl, {cl_abbr with type_variance = variance}, + {clty with cty_variance = variance}, + {cltydef with clty_variance = variance})) + decls cldecls + +(* Check multiple declarations of labels/constructors *) + +let check_duplicates sdecl_list = + let labels = Hashtbl.create 7 and constrs = Hashtbl.create 7 in + List.iter + (fun sdecl -> match sdecl.ptype_kind with + Ptype_variant cl -> + List.iter + (fun pcd -> + try + let name' = Hashtbl.find constrs pcd.pcd_name.txt in + Location.prerr_warning pcd.pcd_loc + (Warnings.Duplicate_definitions + ("constructor", pcd.pcd_name.txt, name', + sdecl.ptype_name.txt)) + with Not_found -> + Hashtbl.add constrs pcd.pcd_name.txt sdecl.ptype_name.txt) + cl + | Ptype_record fl -> + List.iter + (fun {pld_name=cname;pld_loc=loc} -> + try + let name' = Hashtbl.find labels cname.txt in + Location.prerr_warning loc + (Warnings.Duplicate_definitions + ("label", cname.txt, name', sdecl.ptype_name.txt)) + with Not_found -> Hashtbl.add labels cname.txt sdecl.ptype_name.txt) + fl + | Ptype_abstract -> () + | Ptype_open -> ()) + sdecl_list + +(* Force recursion to go through id for private types*) +let name_recursion sdecl id decl = + match decl with + | { type_kind = Type_abstract; + type_manifest = Some ty; + type_private = Private; } when is_fixed_type sdecl -> + let ty = Ctype.repr ty in + let ty' = Btype.newty2 ty.level ty.desc in + if Ctype.deep_occur ty ty' then + let td = Tconstr(Path.Pident id, decl.type_params, ref Mnil) in + Btype.link_type ty (Btype.newty2 ty.level td); + {decl with type_manifest = Some ty'} + else decl + | _ -> decl + +(* Translate a set of type declarations, mutually recursive or not *) +let transl_type_decl env rec_flag sdecl_list = + (* Add dummy types for fixed rows *) + let fixed_types = List.filter is_fixed_type sdecl_list in + let sdecl_list = + List.map + (fun sdecl -> + let ptype_name = + mkloc (sdecl.ptype_name.txt ^"#row") sdecl.ptype_name.loc in + {sdecl with + ptype_name; ptype_kind = Ptype_abstract; ptype_manifest = None}) + fixed_types + @ sdecl_list + in + + (* Create identifiers. *) + let id_list = + List.map (fun sdecl -> Ident.create sdecl.ptype_name.txt) sdecl_list + in + (* + Since we've introduced fresh idents, make sure the definition + level is at least the binding time of these events. Otherwise, + passing one of the recursively-defined type constrs as argument + to an abbreviation may fail. + *) + Ctype.init_def(Ident.current_time()); + Ctype.begin_def(); + (* Enter types. *) + let temp_env = + List.fold_left2 (enter_type rec_flag) env sdecl_list id_list in + (* Translate each declaration. *) + let current_slot = ref None in + let warn_unused = Warnings.is_active (Warnings.Unused_type_declaration "") in + let id_slots id = + match rec_flag with + | Asttypes.Recursive when warn_unused -> + (* See typecore.ml for a description of the algorithm used + to detect unused declarations in a set of recursive definitions. *) + let slot = ref [] in + let td = Env.find_type (Path.Pident id) temp_env in + let name = Ident.name id in + Env.set_type_used_callback + name td + (fun old_callback -> + match !current_slot with + | Some slot -> slot := (name, td) :: !slot + | None -> + List.iter (fun (name, d) -> Env.mark_type_used env name d) + (get_ref slot); + old_callback () + ); + id, Some slot + | Asttypes.Recursive | Asttypes.Nonrecursive -> + id, None + in + let transl_declaration name_sdecl (id, slot) = + current_slot := slot; + Builtin_attributes.warning_scope + name_sdecl.ptype_attributes + (fun () -> transl_declaration temp_env name_sdecl id) + in + let tdecls = + List.map2 transl_declaration sdecl_list (List.map id_slots id_list) in + let decls = + List.map (fun tdecl -> (tdecl.typ_id, tdecl.typ_type)) tdecls in + current_slot := None; + (* Check for duplicates *) + check_duplicates sdecl_list; + (* Build the final env. *) + let newenv = + List.fold_right + (fun (id, decl) env -> Env.add_type ~check:true id decl env) + decls env + in + (* Update stubs *) + begin match rec_flag with + | Asttypes.Nonrecursive -> () + | Asttypes.Recursive -> + List.iter2 + (fun id sdecl -> update_type temp_env newenv id sdecl.ptype_loc) + id_list sdecl_list + end; + (* Generalize type declarations. *) + Ctype.end_def(); + List.iter (fun (_, decl) -> generalize_decl decl) decls; + (* Check for ill-formed abbrevs *) + let id_loc_list = + List.map2 (fun id sdecl -> (id, sdecl.ptype_loc)) + id_list sdecl_list + in + List.iter (fun (id, decl) -> + check_well_founded_manifest newenv (List.assoc id id_loc_list) + (Path.Pident id) decl) + decls; + let to_check = + function Path.Pident id -> List.mem_assoc id id_loc_list | _ -> false in + List.iter (fun (id, decl) -> + check_well_founded_decl newenv (List.assoc id id_loc_list) (Path.Pident id) + decl to_check) + decls; + List.iter (check_abbrev_recursion newenv id_loc_list to_check) tdecls; + (* Check that all type variables are closed *) + List.iter2 + (fun sdecl tdecl -> + let decl = tdecl.typ_type in + match Ctype.closed_type_decl decl with + Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl))) + | None -> ()) + sdecl_list tdecls; + (* Check that constraints are enforced *) + List.iter2 (check_constraints newenv) sdecl_list decls; + (* Name recursion *) + let decls = + List.map2 (fun sdecl (id, decl) -> id, name_recursion sdecl id decl) + sdecl_list decls + in + (* Add variances to the environment *) + let required = + List.map + (fun sdecl -> + add_injectivity (List.map snd sdecl.ptype_params), + sdecl.ptype_loc + ) + sdecl_list + in + let final_decls, final_env = + compute_properties_fixpoint env decls required + (List.map init_variance decls) + (List.map (fun _ -> false) decls) + in + (* Check re-exportation *) + List.iter2 (check_abbrev final_env) sdecl_list final_decls; + (* Keep original declaration *) + let final_decls = + List.map2 + (fun tdecl (_id2, decl) -> + { tdecl with typ_type = decl } + ) tdecls final_decls + in + (* Done *) + (final_decls, final_env) + +(* Translating type extensions *) + +let transl_extension_constructor env type_path type_params + typext_params priv sext = + let id = Ident.create sext.pext_name.txt in + let args, ret_type, kind = + match sext.pext_kind with + Pext_decl(sargs, sret_type) -> + let targs, tret_type, args, ret_type, _ = + make_constructor env type_path typext_params + sargs sret_type + in + args, ret_type, Text_decl(targs, tret_type) + | Pext_rebind lid -> + let cdescr = Typetexp.find_constructor env lid.loc lid.txt in + let usage = + if cdescr.cstr_private = Private || priv = Public + then Env.Positive else Env.Privatize + in + Env.mark_constructor usage env (Longident.last lid.txt) cdescr; + let (args, cstr_res) = Ctype.instance_constructor cdescr in + let res, ret_type = + if cdescr.cstr_generalized then + let params = Ctype.instance_list env type_params in + let res = Ctype.newconstr type_path params in + let ret_type = Some (Ctype.newconstr type_path params) in + res, ret_type + else (Ctype.newconstr type_path typext_params), None + in + begin + try + Ctype.unify env cstr_res res + with Ctype.Unify trace -> + raise (Error(lid.loc, + Rebind_wrong_type(lid.txt, env, trace))) + end; + (* Remove "_" names from parameters used in the constructor *) + if not cdescr.cstr_generalized then begin + let vars = + Ctype.free_variables (Btype.newgenty (Ttuple args)) + in + List.iter + (function {desc = Tvar (Some "_")} as ty -> + if List.memq ty vars then ty.desc <- Tvar None + | _ -> ()) + typext_params + end; + (* Ensure that constructor's type matches the type being extended *) + let cstr_type_path, cstr_type_params = + match cdescr.cstr_res.desc with + Tconstr (p, _, _) -> + let decl = Env.find_type p env in + p, decl.type_params + | _ -> assert false + in + let cstr_types = + (Btype.newgenty + (Tconstr(cstr_type_path, cstr_type_params, ref Mnil))) + :: cstr_type_params + in + let ext_types = + (Btype.newgenty + (Tconstr(type_path, type_params, ref Mnil))) + :: type_params + in + if not (Ctype.equal env true cstr_types ext_types) then + raise (Error(lid.loc, + Rebind_mismatch(lid.txt, cstr_type_path, type_path))); + (* Disallow rebinding private constructors to non-private *) + begin + match cdescr.cstr_private, priv with + Private, Public -> + raise (Error(lid.loc, Rebind_private lid.txt)) + | _ -> () + end; + let path = + match cdescr.cstr_tag with + Cstr_extension(path, _) -> path + | _ -> assert false + in + let args = + match cdescr.cstr_inlined with + | None -> + Types.Cstr_tuple args + | Some decl -> + let tl = + match args with + | [ {desc=Tconstr(_, tl, _)} ] -> tl + | _ -> assert false + in + let decl = Ctype.instance_declaration decl in + assert (List.length decl.type_params = List.length tl); + List.iter2 (Ctype.unify env) decl.type_params tl; + let lbls = + match decl.type_kind with + | Type_record (lbls, Record_extension) -> lbls + | _ -> assert false + in + Types.Cstr_record lbls + in + args, ret_type, Text_rebind(path, lid) + in + let ext = + { ext_type_path = type_path; + ext_type_params = typext_params; + ext_args = args; + ext_ret_type = ret_type; + ext_private = priv; + Types.ext_loc = sext.pext_loc; + Types.ext_attributes = sext.pext_attributes; } + in + { ext_id = id; + ext_name = sext.pext_name; + ext_type = ext; + ext_kind = kind; + Typedtree.ext_loc = sext.pext_loc; + Typedtree.ext_attributes = sext.pext_attributes; } + +let transl_extension_constructor env type_path type_params + typext_params priv sext = + Builtin_attributes.warning_scope sext.pext_attributes + (fun () -> transl_extension_constructor env type_path type_params + typext_params priv sext) + +let transl_type_extension extend env loc styext = + reset_type_variables(); + Ctype.begin_def(); + let (type_path, type_decl) = + let lid = styext.ptyext_path in + Typetexp.find_type env lid.loc lid.txt + in + begin + match type_decl.type_kind with + | Type_open -> begin + match type_decl.type_private with + | Private when extend -> begin + match + List.find + (function {pext_kind = Pext_decl _} -> true + | {pext_kind = Pext_rebind _} -> false) + styext.ptyext_constructors + with + | {pext_loc} -> + raise (Error(pext_loc, Cannot_extend_private_type type_path)) + | exception Not_found -> () + end + | _ -> () + end + | _ -> + raise (Error(loc, Not_extensible_type type_path)) + end; + let type_variance = + List.map (fun v -> + let (co, cn) = Variance.get_upper v in + (not cn, not co, false)) + type_decl.type_variance + in + let err = + if type_decl.type_arity <> List.length styext.ptyext_params then + [Includecore.Arity] + else + if List.for_all2 + (fun (c1, n1, _) (c2, n2, _) -> (not c2 || c1) && (not n2 || n1)) + type_variance + (add_injectivity (List.map snd styext.ptyext_params)) + then [] else [Includecore.Variance] + in + if err <> [] then + raise (Error(loc, Extension_mismatch (type_path, err))); + let ttype_params = make_params env styext.ptyext_params in + let type_params = List.map (fun (cty, _) -> cty.ctyp_type) ttype_params in + List.iter2 (Ctype.unify_var env) + (Ctype.instance_list env type_decl.type_params) + type_params; + let constructors = + List.map (transl_extension_constructor env type_path + type_decl.type_params type_params styext.ptyext_private) + styext.ptyext_constructors + in + Ctype.end_def(); + (* Generalize types *) + List.iter Ctype.generalize type_params; + List.iter + (fun ext -> + Btype.iter_type_expr_cstr_args Ctype.generalize ext.ext_type.ext_args; + may Ctype.generalize ext.ext_type.ext_ret_type) + constructors; + (* Check that all type variables are closed *) + List.iter + (fun ext -> + match Ctype.closed_extension_constructor ext.ext_type with + Some ty -> + raise(Error(ext.ext_loc, Unbound_type_var_ext(ty, ext.ext_type))) + | None -> ()) + constructors; + (* Check variances are correct *) + List.iter + (fun ext-> + ignore (compute_variance_extension env true type_decl + ext.ext_type (type_variance, loc))) + constructors; + (* Add extension constructors to the environment *) + let newenv = + List.fold_left + (fun env ext -> + Env.add_extension ~check:true ext.ext_id ext.ext_type env) + env constructors + in + let tyext = + { tyext_path = type_path; + tyext_txt = styext.ptyext_path; + tyext_params = ttype_params; + tyext_constructors = constructors; + tyext_private = styext.ptyext_private; + tyext_attributes = styext.ptyext_attributes; } + in + (tyext, newenv) + +let transl_type_extension extend env loc styext = + Builtin_attributes.warning_scope styext.ptyext_attributes + (fun () -> transl_type_extension extend env loc styext) + +let transl_exception env sext = + reset_type_variables(); + Ctype.begin_def(); + let ext = + transl_extension_constructor env + Predef.path_exn [] [] Asttypes.Public sext + in + Ctype.end_def(); + (* Generalize types *) + Btype.iter_type_expr_cstr_args Ctype.generalize ext.ext_type.ext_args; + may Ctype.generalize ext.ext_type.ext_ret_type; + (* Check that all type variables are closed *) + begin match Ctype.closed_extension_constructor ext.ext_type with + Some ty -> + raise (Error(ext.ext_loc, Unbound_type_var_ext(ty, ext.ext_type))) + | None -> () + end; + let newenv = Env.add_extension ~check:true ext.ext_id ext.ext_type env in + ext, newenv + +type native_repr_attribute = + | Native_repr_attr_absent + | Native_repr_attr_present of native_repr_kind + +let get_native_repr_attribute attrs ~global_repr = + match + Attr_helper.get_no_payload_attribute ["unboxed"; "ocaml.unboxed"] attrs, + Attr_helper.get_no_payload_attribute ["untagged"; "ocaml.untagged"] attrs, + global_repr + with + | None, None, None -> Native_repr_attr_absent + | None, None, Some repr -> Native_repr_attr_present repr + | Some _, None, None -> Native_repr_attr_present Unboxed + | None, Some _, None -> Native_repr_attr_present Untagged + | Some { Location.loc }, _, _ + | _, Some { Location.loc }, _ -> + raise (Error (loc, Multiple_native_repr_attributes)) + +let native_repr_of_type env kind ty = + match kind, (Ctype.expand_head_opt env ty).desc with + | Untagged, Tconstr (path, _, _) when Path.same path Predef.path_int -> + Some Untagged_int + | Unboxed, Tconstr (path, _, _) when Path.same path Predef.path_float -> + Some Unboxed_float + | Unboxed, Tconstr (path, _, _) when Path.same path Predef.path_int32 -> + Some (Unboxed_integer Pint32) + | Unboxed, Tconstr (path, _, _) when Path.same path Predef.path_int64 -> + Some (Unboxed_integer Pint64) + | Unboxed, Tconstr (path, _, _) when Path.same path Predef.path_nativeint -> + Some (Unboxed_integer Pnativeint) + | _ -> + None + +(* Raises an error when [core_type] contains an [@unboxed] or [@untagged] + attribute in a strict sub-term. *) +let error_if_has_deep_native_repr_attributes core_type = + let open Ast_iterator in + let this_iterator = + { default_iterator with typ = fun iterator core_type -> + begin + match + get_native_repr_attribute core_type.ptyp_attributes ~global_repr:None + with + | Native_repr_attr_present kind -> + raise (Error (core_type.ptyp_loc, + Deep_unbox_or_untag_attribute kind)) + | Native_repr_attr_absent -> () + end; + default_iterator.typ iterator core_type } + in + default_iterator.typ this_iterator core_type + +let make_native_repr env core_type ty ~global_repr = + error_if_has_deep_native_repr_attributes core_type; + match get_native_repr_attribute core_type.ptyp_attributes ~global_repr with + | Native_repr_attr_absent -> + Same_as_ocaml_repr + | Native_repr_attr_present kind -> + begin match native_repr_of_type env kind ty with + | None -> + raise (Error (core_type.ptyp_loc, Cannot_unbox_or_untag_type kind)) + | Some repr -> repr + end + +let rec parse_native_repr_attributes env core_type ty ~global_repr = + match core_type.ptyp_desc, (Ctype.repr ty).desc, + get_native_repr_attribute core_type.ptyp_attributes ~global_repr:None + with + | Ptyp_arrow _, Tarrow _, Native_repr_attr_present kind -> + raise (Error (core_type.ptyp_loc, Cannot_unbox_or_untag_type kind)) + | Ptyp_arrow (_, ct1, ct2), Tarrow (_, t1, t2, _), _ -> + let repr_arg = make_native_repr env ct1 t1 ~global_repr in + let repr_args, repr_res = + parse_native_repr_attributes env ct2 t2 ~global_repr + in + (repr_arg :: repr_args, repr_res) + | Ptyp_arrow _, _, _ | _, Tarrow _, _ -> assert false + | _ -> ([], make_native_repr env core_type ty ~global_repr) + + +let check_unboxable env loc ty = + let ty = Ctype.repr (Ctype.expand_head_opt env ty) in + try match ty.desc with + | Tconstr (p, _, _) -> + let tydecl = Env.find_type p env in + if tydecl.type_unboxed.unboxed then + Location.prerr_warning loc + (Warnings.Unboxable_type_in_prim_decl (Path.name p)) + | _ -> () + with Not_found -> () + +(* Translate a value declaration *) +let transl_value_decl env loc valdecl = + let cty = Typetexp.transl_type_scheme env valdecl.pval_type in + let ty = cty.ctyp_type in + let v = + match valdecl.pval_prim with + [] when Env.is_in_signature env -> + { val_type = ty; val_kind = Val_reg; Types.val_loc = loc; + val_attributes = valdecl.pval_attributes } + | [] -> + raise (Error(valdecl.pval_loc, Val_in_structure)) + | _ -> + let global_repr = + match + get_native_repr_attribute valdecl.pval_attributes ~global_repr:None + with + | Native_repr_attr_present repr -> Some repr + | Native_repr_attr_absent -> None + in + let native_repr_args, native_repr_res = + parse_native_repr_attributes env valdecl.pval_type ty ~global_repr + in + let prim = + Primitive.parse_declaration valdecl + ~native_repr_args + ~native_repr_res + in + if prim.prim_arity = 0 && + (prim.prim_name = "" || prim.prim_name.[0] <> '%') then + raise(Error(valdecl.pval_type.ptyp_loc, Null_arity_external)); + if !Clflags.native_code + && prim.prim_arity > 5 + && prim.prim_native_name = "" + then raise(Error(valdecl.pval_type.ptyp_loc, Missing_native_external)); + Btype.iter_type_expr (check_unboxable env loc) ty; + { val_type = ty; val_kind = Val_prim prim; Types.val_loc = loc; + val_attributes = valdecl.pval_attributes } + in + let (id, newenv) = + Env.enter_value valdecl.pval_name.txt v env + ~check:(fun s -> Warnings.Unused_value_declaration s) + in + let desc = + { + val_id = id; + val_name = valdecl.pval_name; + val_desc = cty; val_val = v; + val_prim = valdecl.pval_prim; + val_loc = valdecl.pval_loc; + val_attributes = valdecl.pval_attributes; + } + in + desc, newenv + +let transl_value_decl env loc valdecl = + Builtin_attributes.warning_scope valdecl.pval_attributes + (fun () -> transl_value_decl env loc valdecl) + +(* Translate a "with" constraint -- much simplified version of + transl_type_decl. *) +let transl_with_constraint env id row_path orig_decl sdecl = + Env.mark_type_used env (Ident.name id) orig_decl; + reset_type_variables(); + Ctype.begin_def(); + let tparams = make_params env sdecl.ptype_params in + let params = List.map (fun (cty, _) -> cty.ctyp_type) tparams in + let orig_decl = Ctype.instance_declaration orig_decl in + let arity_ok = List.length params = orig_decl.type_arity in + if arity_ok then + List.iter2 (Ctype.unify_var env) params orig_decl.type_params; + let constraints = List.map + (function (ty, ty', loc) -> + try + let cty = transl_simple_type env false ty in + let cty' = transl_simple_type env false ty' in + let ty = cty.ctyp_type in + let ty' = cty'.ctyp_type in + Ctype.unify env ty ty'; + (cty, cty', loc) + with Ctype.Unify tr -> + raise(Error(loc, Inconsistent_constraint (env, tr)))) + sdecl.ptype_cstrs + in + let no_row = not (is_fixed_type sdecl) in + let (tman, man) = match sdecl.ptype_manifest with + None -> None, None + | Some sty -> + let cty = transl_simple_type env no_row sty in + Some cty, Some cty.ctyp_type + in + let priv = + if sdecl.ptype_private = Private then Private else + if arity_ok && orig_decl.type_kind <> Type_abstract + then orig_decl.type_private else sdecl.ptype_private + in + if arity_ok && orig_decl.type_kind <> Type_abstract + && sdecl.ptype_private = Private then + Location.deprecated sdecl.ptype_loc "spurious use of private"; + let type_kind, type_unboxed = + if arity_ok && man <> None then + orig_decl.type_kind, orig_decl.type_unboxed + else + Type_abstract, unboxed_false_default_false + in + let decl = + { type_params = params; + type_arity = List.length params; + type_kind; + type_private = priv; + type_manifest = man; + type_variance = []; + type_newtype_level = None; + type_loc = sdecl.ptype_loc; + type_attributes = sdecl.ptype_attributes; + type_immediate = false; + type_unboxed; + } + in + begin match row_path with None -> () + | Some p -> set_fixed_row env sdecl.ptype_loc p decl + end; + begin match Ctype.closed_type_decl decl with None -> () + | Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl))) + end; + let decl = name_recursion sdecl id decl in + let type_variance = + compute_variance_decl env true decl + (add_injectivity (List.map snd sdecl.ptype_params), sdecl.ptype_loc) + in + let type_immediate = compute_immediacy env decl in + let decl = {decl with type_variance; type_immediate} in + Ctype.end_def(); + generalize_decl decl; + { + typ_id = id; + typ_name = sdecl.ptype_name; + typ_params = tparams; + typ_type = decl; + typ_cstrs = constraints; + typ_loc = sdecl.ptype_loc; + typ_manifest = tman; + typ_kind = Ttype_abstract; + typ_private = sdecl.ptype_private; + typ_attributes = sdecl.ptype_attributes; + } + +(* Approximate a type declaration: just make all types abstract *) + +let abstract_type_decl arity = + let rec make_params n = + if n <= 0 then [] else Ctype.newvar() :: make_params (n-1) in + Ctype.begin_def(); + let decl = + { type_params = make_params arity; + type_arity = arity; + type_kind = Type_abstract; + type_private = Public; + type_manifest = None; + type_variance = replicate_list Variance.full arity; + type_newtype_level = None; + type_loc = Location.none; + type_attributes = []; + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } in + Ctype.end_def(); + generalize_decl decl; + decl + +let approx_type_decl sdecl_list = + List.map + (fun sdecl -> + (Ident.create sdecl.ptype_name.txt, + abstract_type_decl (List.length sdecl.ptype_params))) + sdecl_list + +(* Variant of check_abbrev_recursion to check the well-formedness + conditions on type abbreviations defined within recursive modules. *) + +let check_recmod_typedecl env loc recmod_ids path decl = + (* recmod_ids is the list of recursively-defined module idents. + (path, decl) is the type declaration to be checked. *) + let to_check path = + List.exists (fun id -> Path.isfree id path) recmod_ids in + check_well_founded_decl env loc path decl to_check; + check_recursion env loc path decl to_check + + +(**** Error report ****) + +open Format + +let explain_unbound_gen ppf tv tl typ kwd pr = + try + let ti = List.find (fun ti -> Ctype.deep_occur tv (typ ti)) tl in + let ty0 = (* Hack to force aliasing when needed *) + Btype.newgenty (Tobject(tv, ref None)) in + Printtyp.reset_and_mark_loops_list [typ ti; ty0]; + fprintf ppf + ".@.@[In %s@ %a@;<1 -2>the variable %a is unbound@]" + kwd pr ti Printtyp.type_expr tv + with Not_found -> () + +let explain_unbound ppf tv tl typ kwd lab = + explain_unbound_gen ppf tv tl typ kwd + (fun ppf ti -> fprintf ppf "%s%a" (lab ti) Printtyp.type_expr (typ ti)) + +let explain_unbound_single ppf tv ty = + let trivial ty = + explain_unbound ppf tv [ty] (fun t -> t) "type" (fun _ -> "") in + match (Ctype.repr ty).desc with + Tobject(fi,_) -> + let (tl, rv) = Ctype.flatten_fields fi in + if rv == tv then trivial ty else + explain_unbound ppf tv tl (fun (_,_,t) -> t) + "method" (fun (lab,_,_) -> lab ^ ": ") + | Tvariant row -> + let row = Btype.row_repr row in + if row.row_more == tv then trivial ty else + explain_unbound ppf tv row.row_fields + (fun (_l,f) -> match Btype.row_field_repr f with + Rpresent (Some t) -> t + | Reither (_,[t],_,_) -> t + | Reither (_,tl,_,_) -> Btype.newgenty (Ttuple tl) + | _ -> Btype.newgenty (Ttuple[])) + "case" (fun (lab,_) -> "`" ^ lab ^ " of ") + | _ -> trivial ty + + +let tys_of_constr_args = function + | Types.Cstr_tuple tl -> tl + | Types.Cstr_record lbls -> List.map (fun l -> l.Types.ld_type) lbls + +let report_error ppf = function + | Repeated_parameter -> + fprintf ppf "A type parameter occurs several times" + | Duplicate_constructor s -> + fprintf ppf "Two constructors are named %s" s + | Too_many_constructors -> + fprintf ppf + "@[Too many non-constant constructors@ -- maximum is %i %s@]" + (Config.max_tag + 1) "non-constant constructors" + | Duplicate_label s -> + fprintf ppf "Two labels are named %s" s + | Recursive_abbrev s -> + fprintf ppf "The type abbreviation %s is cyclic" s + | Cycle_in_def (s, ty) -> + Printtyp.reset_and_mark_loops ty; + fprintf ppf "@[The definition of %s contains a cycle:@ %a@]" + s Printtyp.type_expr ty + | Definition_mismatch (ty, errs) -> + Printtyp.reset_and_mark_loops ty; + fprintf ppf "@[@[%s@ %s@;<1 2>%a@]%a@]" + "This variant or record definition" "does not match that of type" + Printtyp.type_expr ty + (Includecore.report_type_mismatch "the original" "this" "definition") + errs + | Constraint_failed (ty, ty') -> + Printtyp.reset_and_mark_loops ty; + Printtyp.mark_loops ty'; + fprintf ppf "@[%s@ @[Type@ %a@ should be an instance of@ %a@]@]" + "Constraints are not satisfied in this type." + Printtyp.type_expr ty Printtyp.type_expr ty' + | Parameters_differ (path, ty, ty') -> + Printtyp.reset_and_mark_loops ty; + Printtyp.mark_loops ty'; + fprintf ppf + "@[In the definition of %s, type@ %a@ should be@ %a@]" + (Path.name path) Printtyp.type_expr ty Printtyp.type_expr ty' + | Inconsistent_constraint (env, trace) -> + fprintf ppf "The type constraints are not consistent.@."; + Printtyp.report_unification_error ppf env trace + (fun ppf -> fprintf ppf "Type") + (fun ppf -> fprintf ppf "is not compatible with type") + | Type_clash (env, trace) -> + Printtyp.report_unification_error ppf env trace + (function ppf -> + fprintf ppf "This type constructor expands to type") + (function ppf -> + fprintf ppf "but is used here with type") + | Null_arity_external -> + fprintf ppf "External identifiers must be functions" + | Missing_native_external -> + fprintf ppf "@[An external function with more than 5 arguments \ + requires a second stub function@ \ + for native-code compilation@]" + | Unbound_type_var (ty, decl) -> + fprintf ppf "A type variable is unbound in this type declaration"; + let ty = Ctype.repr ty in + begin match decl.type_kind, decl.type_manifest with + | Type_variant tl, _ -> + explain_unbound_gen ppf ty tl (fun c -> + let tl = tys_of_constr_args c.Types.cd_args in + Btype.newgenty (Ttuple tl) + ) + "case" (fun ppf c -> + fprintf ppf + "%s of %a" (Ident.name c.Types.cd_id) + Printtyp.constructor_arguments c.Types.cd_args) + | Type_record (tl, _), _ -> + explain_unbound ppf ty tl (fun l -> l.Types.ld_type) + "field" (fun l -> Ident.name l.Types.ld_id ^ ": ") + | Type_abstract, Some ty' -> + explain_unbound_single ppf ty ty' + | _ -> () + end + | Unbound_type_var_ext (ty, ext) -> + fprintf ppf "A type variable is unbound in this extension constructor"; + let args = tys_of_constr_args ext.ext_args in + explain_unbound ppf ty args (fun c -> c) "type" (fun _ -> "") + | Cannot_extend_private_type path -> + fprintf ppf "@[%s@ %a@]" + "Cannot extend private type definition" + Printtyp.path path + | Not_extensible_type path -> + fprintf ppf "@[%s@ %a@ %s@]" + "Type definition" + Printtyp.path path + "is not extensible" + | Extension_mismatch (path, errs) -> + fprintf ppf "@[@[%s@ %s@;<1 2>%s@]%a@]" + "This extension" "does not match the definition of type" + (Path.name path) + (Includecore.report_type_mismatch + "the type" "this extension" "definition") + errs + | Rebind_wrong_type (lid, env, trace) -> + Printtyp.report_unification_error ppf env trace + (function ppf -> + fprintf ppf "The constructor %a@ has type" + Printtyp.longident lid) + (function ppf -> + fprintf ppf "but was expected to be of type") + | Rebind_mismatch (lid, p, p') -> + fprintf ppf + "@[%s@ %a@ %s@ %s@ %s@ %s@ %s@]" + "The constructor" Printtyp.longident lid + "extends type" (Path.name p) + "whose declaration does not match" + "the declaration of type" (Path.name p') + | Rebind_private lid -> + fprintf ppf "@[%s@ %a@ %s@]" + "The constructor" + Printtyp.longident lid + "is private" + | Bad_variance (n, v1, v2) -> + let variance (p,n,i) = + let inj = if i then "injective " else "" in + match p, n with + true, true -> inj ^ "invariant" + | true, false -> inj ^ "covariant" + | false, true -> inj ^ "contravariant" + | false, false -> if inj = "" then "unrestricted" else inj + in + let suffix n = + let teen = (n mod 100)/10 = 1 in + match n mod 10 with + | 1 when not teen -> "st" + | 2 when not teen -> "nd" + | 3 when not teen -> "rd" + | _ -> "th" + in + if n = -1 then + fprintf ppf "@[%s@ %s@ It" + "In this definition, a type variable has a variance that" + "is not reflected by its occurrence in type parameters." + else if n = -2 then + fprintf ppf "@[%s@ %s@]" + "In this definition, a type variable cannot be deduced" + "from the type parameters." + else if n = -3 then + fprintf ppf "@[%s@ %s@ It" + "In this definition, a type variable has a variance that" + "cannot be deduced from the type parameters." + else + fprintf ppf "@[%s@ %s@ The %d%s type parameter" + "In this definition, expected parameter" + "variances are not satisfied." + n (suffix n); + if n <> -2 then + fprintf ppf " was expected to be %s,@ but it is %s.@]" + (variance v2) (variance v1) + | Unavailable_type_constructor p -> + fprintf ppf "The definition of type %a@ is unavailable" Printtyp.path p + | Bad_fixed_type r -> + fprintf ppf "This fixed type %s" r + | Varying_anonymous -> + fprintf ppf "@[%s@ %s@ %s@]" + "In this GADT definition," "the variance of some parameter" + "cannot be checked" + | Val_in_structure -> + fprintf ppf "Value declarations are only allowed in signatures" + | Multiple_native_repr_attributes -> + fprintf ppf "Too many [@@unboxed]/[@@untagged] attributes" + | Cannot_unbox_or_untag_type Unboxed -> + fprintf ppf "Don't know how to unbox this type. Only float, int32, \ + int64 and nativeint can be unboxed" + | Cannot_unbox_or_untag_type Untagged -> + fprintf ppf "Don't know how to untag this type. Only int \ + can be untagged" + | Deep_unbox_or_untag_attribute kind -> + fprintf ppf + "The attribute '%s' should be attached to a direct argument or \ + result of the primitive, it should not occur deeply into its type" + (match kind with Unboxed -> "@unboxed" | Untagged -> "@untagged") + | Bad_immediate_attribute -> + fprintf ppf "@[%s@ %s@]" + "Types marked with the immediate attribute must be" + "non-pointer types like int or bool" + | Bad_unboxed_attribute msg -> + fprintf ppf "@[This type cannot be unboxed because@ %s.@]" msg + | Wrong_unboxed_type_float -> + fprintf ppf "@[This type cannot be unboxed because@ \ + it might contain both float and non-float values.@ \ + You should annotate it with [%@%@ocaml.boxed].@]" + | Boxed_and_unboxed -> + fprintf ppf "@[A type cannot be boxed and unboxed at the same time.@]" + | Nonrec_gadt -> + fprintf ppf + "@[GADT case syntax cannot be used in a 'nonrec' block.@]" + +let () = + Location.register_error_of_exn + (function + | Error (loc, err) -> + Some (Location.error_of_printer loc report_error err) + | _ -> + None + ) diff --git a/res_syntax/compiler-libs-406/typedecl.mli b/res_syntax/compiler-libs-406/typedecl.mli new file mode 100644 index 0000000000..1c687cd04f --- /dev/null +++ b/res_syntax/compiler-libs-406/typedecl.mli @@ -0,0 +1,106 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Typing of type definitions and primitive definitions *) + +open Types +open Format + +val transl_type_decl: + Env.t -> Asttypes.rec_flag -> Parsetree.type_declaration list -> + Typedtree.type_declaration list * Env.t + +val transl_exception: + Env.t -> + Parsetree.extension_constructor -> Typedtree.extension_constructor * Env.t + +val transl_type_extension: + bool -> Env.t -> Location.t -> Parsetree.type_extension -> + Typedtree.type_extension * Env.t + +val transl_value_decl: + Env.t -> Location.t -> + Parsetree.value_description -> Typedtree.value_description * Env.t + +val transl_with_constraint: + Env.t -> Ident.t -> Path.t option -> Types.type_declaration -> + Parsetree.type_declaration -> Typedtree.type_declaration + +val abstract_type_decl: int -> type_declaration +val approx_type_decl: + Parsetree.type_declaration list -> + (Ident.t * type_declaration) list +val check_recmod_typedecl: + Env.t -> Location.t -> Ident.t list -> Path.t -> type_declaration -> unit +val check_coherence: + Env.t -> Location.t -> Ident.t -> type_declaration -> unit + +(* for fixed types *) +val is_fixed_type : Parsetree.type_declaration -> bool + +(* for typeclass.ml *) +val compute_variance_decls: + Env.t -> + (Ident.t * Types.type_declaration * Types.type_declaration * + Types.class_declaration * Types.class_type_declaration * + 'a Typedtree.class_infos) list -> + (Types.type_declaration * Types.type_declaration * + Types.class_declaration * Types.class_type_declaration) list + +(* for typeopt.ml *) +val get_unboxed_type_representation: Env.t -> type_expr -> type_expr option + + +type native_repr_kind = Unboxed | Untagged + +type error = + Repeated_parameter + | Duplicate_constructor of string + | Too_many_constructors + | Duplicate_label of string + | Recursive_abbrev of string + | Cycle_in_def of string * type_expr + | Definition_mismatch of type_expr * Includecore.type_mismatch list + | Constraint_failed of type_expr * type_expr + | Inconsistent_constraint of Env.t * (type_expr * type_expr) list + | Type_clash of Env.t * (type_expr * type_expr) list + | Parameters_differ of Path.t * type_expr * type_expr + | Null_arity_external + | Missing_native_external + | Unbound_type_var of type_expr * type_declaration + | Cannot_extend_private_type of Path.t + | Not_extensible_type of Path.t + | Extension_mismatch of Path.t * Includecore.type_mismatch list + | Rebind_wrong_type of Longident.t * Env.t * (type_expr * type_expr) list + | Rebind_mismatch of Longident.t * Path.t * Path.t + | Rebind_private of Longident.t + | Bad_variance of int * (bool*bool*bool) * (bool*bool*bool) + | Unavailable_type_constructor of Path.t + | Bad_fixed_type of string + | Unbound_type_var_ext of type_expr * extension_constructor + | Varying_anonymous + | Val_in_structure + | Multiple_native_repr_attributes + | Cannot_unbox_or_untag_type of native_repr_kind + | Deep_unbox_or_untag_attribute of native_repr_kind + | Bad_immediate_attribute + | Bad_unboxed_attribute of string + | Wrong_unboxed_type_float + | Boxed_and_unboxed + | Nonrec_gadt + +exception Error of Location.t * error + +val report_error: formatter -> error -> unit diff --git a/res_syntax/compiler-libs-406/typedtree.ml b/res_syntax/compiler-libs-406/typedtree.ml new file mode 100644 index 0000000000..4cc9964324 --- /dev/null +++ b/res_syntax/compiler-libs-406/typedtree.ml @@ -0,0 +1,619 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Abstract syntax tree after typing *) + +open Misc +open Asttypes +open Types + +(* Value expressions for the core language *) + +type partial = Partial | Total + +type attribute = Parsetree.attribute +type attributes = attribute list + +type pattern = + { pat_desc: pattern_desc; + pat_loc: Location.t; + pat_extra : (pat_extra * Location.t * attribute list) list; + pat_type: type_expr; + mutable pat_env: Env.t; + pat_attributes: attribute list; + } + +and pat_extra = + | Tpat_constraint of core_type + | Tpat_type of Path.t * Longident.t loc + | Tpat_open of Path.t * Longident.t loc * Env.t + | Tpat_unpack + +and pattern_desc = + Tpat_any + | Tpat_var of Ident.t * string loc + | Tpat_alias of pattern * Ident.t * string loc + | Tpat_constant of constant + | Tpat_tuple of pattern list + | Tpat_construct of + Longident.t loc * constructor_description * pattern list + | Tpat_variant of label * pattern option * row_desc ref + | Tpat_record of + (Longident.t loc * label_description * pattern) list * + closed_flag + | Tpat_array of pattern list + | Tpat_or of pattern * pattern * row_desc option + | Tpat_lazy of pattern + +and expression = + { exp_desc: expression_desc; + exp_loc: Location.t; + exp_extra: (exp_extra * Location.t * attribute list) list; + exp_type: type_expr; + exp_env: Env.t; + exp_attributes: attribute list; + } + +and exp_extra = + | Texp_constraint of core_type + | Texp_coerce of core_type option * core_type + | Texp_open of override_flag * Path.t * Longident.t loc * Env.t + | Texp_poly of core_type option + | Texp_newtype of string + +and expression_desc = + Texp_ident of Path.t * Longident.t loc * Types.value_description + | Texp_constant of constant + | Texp_let of rec_flag * value_binding list * expression + | Texp_function of { arg_label : arg_label; param : Ident.t; + cases : case list; partial : partial; } + | Texp_apply of expression * (arg_label * expression option) list + | Texp_match of expression * case list * case list * partial + | Texp_try of expression * case list + | Texp_tuple of expression list + | Texp_construct of + Longident.t loc * constructor_description * expression list + | Texp_variant of label * expression option + | Texp_record of { + fields : ( Types.label_description * record_label_definition ) array; + representation : Types.record_representation; + extended_expression : expression option; + } + | Texp_field of expression * Longident.t loc * label_description + | Texp_setfield of + expression * Longident.t loc * label_description * expression + | Texp_array of expression list + | Texp_ifthenelse of expression * expression * expression option + | Texp_sequence of expression * expression + | Texp_while of expression * expression + | Texp_for of + Ident.t * Parsetree.pattern * expression * expression * direction_flag * + expression + | Texp_send of expression * meth * expression option + | Texp_new of Path.t * Longident.t loc * Types.class_declaration + | Texp_instvar of Path.t * Path.t * string loc + | Texp_setinstvar of Path.t * Path.t * string loc * expression + | Texp_override of Path.t * (Path.t * string loc * expression) list + | Texp_letmodule of Ident.t * string loc * module_expr * expression + | Texp_letexception of extension_constructor * expression + | Texp_assert of expression + | Texp_lazy of expression + | Texp_object of class_structure * string list + | Texp_pack of module_expr + | Texp_unreachable + | Texp_extension_constructor of Longident.t loc * Path.t + +and meth = + Tmeth_name of string + | Tmeth_val of Ident.t + +and case = + { + c_lhs: pattern; + c_guard: expression option; + c_rhs: expression; + } + +and record_label_definition = + | Kept of Types.type_expr + | Overridden of Longident.t loc * expression + +(* Value expressions for the class language *) + +and class_expr = + { + cl_desc: class_expr_desc; + cl_loc: Location.t; + cl_type: Types.class_type; + cl_env: Env.t; + cl_attributes: attribute list; + } + +and class_expr_desc = + Tcl_ident of Path.t * Longident.t loc * core_type list + | Tcl_structure of class_structure + | Tcl_fun of + arg_label * pattern * (Ident.t * string loc * expression) list + * class_expr * partial + | Tcl_apply of class_expr * (arg_label * expression option) list + | Tcl_let of rec_flag * value_binding list * + (Ident.t * string loc * expression) list * class_expr + | Tcl_constraint of + class_expr * class_type option * string list * string list * Concr.t + (* Visible instance variables, methods and concrete methods *) + | Tcl_open of override_flag * Path.t * Longident.t loc * Env.t * class_expr + +and class_structure = + { + cstr_self: pattern; + cstr_fields: class_field list; + cstr_type: Types.class_signature; + cstr_meths: Ident.t Meths.t; + } + +and class_field = + { + cf_desc: class_field_desc; + cf_loc: Location.t; + cf_attributes: attribute list; + } + +and class_field_kind = + | Tcfk_virtual of core_type + | Tcfk_concrete of override_flag * expression + +and class_field_desc = + Tcf_inherit of + override_flag * class_expr * string option * (string * Ident.t) list * + (string * Ident.t) list + (* Inherited instance variables and concrete methods *) + | Tcf_val of string loc * mutable_flag * Ident.t * class_field_kind * bool + | Tcf_method of string loc * private_flag * class_field_kind + | Tcf_constraint of core_type * core_type + | Tcf_initializer of expression + | Tcf_attribute of attribute + +(* Value expressions for the module language *) + +and module_expr = + { mod_desc: module_expr_desc; + mod_loc: Location.t; + mod_type: Types.module_type; + mod_env: Env.t; + mod_attributes: attribute list; + } + +and module_type_constraint = + Tmodtype_implicit +| Tmodtype_explicit of module_type + +and module_expr_desc = + Tmod_ident of Path.t * Longident.t loc + | Tmod_structure of structure + | Tmod_functor of Ident.t * string loc * module_type option * module_expr + | Tmod_apply of module_expr * module_expr * module_coercion + | Tmod_constraint of + module_expr * Types.module_type * module_type_constraint * module_coercion + | Tmod_unpack of expression * Types.module_type + +and structure = { + str_items : structure_item list; + str_type : Types.signature; + str_final_env : Env.t; +} + +and structure_item = + { str_desc : structure_item_desc; + str_loc : Location.t; + str_env : Env.t + } + +and structure_item_desc = + Tstr_eval of expression * attributes + | Tstr_value of rec_flag * value_binding list + | Tstr_primitive of value_description + | Tstr_type of rec_flag * type_declaration list + | Tstr_typext of type_extension + | Tstr_exception of extension_constructor + | Tstr_module of module_binding + | Tstr_recmodule of module_binding list + | Tstr_modtype of module_type_declaration + | Tstr_open of open_description + | Tstr_class of (class_declaration * string list) list + | Tstr_class_type of (Ident.t * string loc * class_type_declaration) list + | Tstr_include of include_declaration + | Tstr_attribute of attribute + +and module_binding = + { + mb_id: Ident.t; + mb_name: string loc; + mb_expr: module_expr; + mb_attributes: attribute list; + mb_loc: Location.t; + } + +and value_binding = + { + vb_pat: pattern; + vb_expr: expression; + vb_attributes: attributes; + vb_loc: Location.t; + } + +and module_coercion = + Tcoerce_none + | Tcoerce_structure of (int * module_coercion) list * + (Ident.t * int * module_coercion) list + | Tcoerce_functor of module_coercion * module_coercion + | Tcoerce_primitive of primitive_coercion + | Tcoerce_alias of Path.t * module_coercion + +and module_type = + { mty_desc: module_type_desc; + mty_type : Types.module_type; + mty_env : Env.t; + mty_loc: Location.t; + mty_attributes: attribute list; + } + +and module_type_desc = + Tmty_ident of Path.t * Longident.t loc + | Tmty_signature of signature + | Tmty_functor of Ident.t * string loc * module_type option * module_type + | Tmty_with of module_type * (Path.t * Longident.t loc * with_constraint) list + | Tmty_typeof of module_expr + | Tmty_alias of Path.t * Longident.t loc + +(* Keep primitive type information for type-based lambda-code specialization *) +and primitive_coercion = + { + pc_desc: Primitive.description; + pc_type: type_expr; + pc_env: Env.t; + pc_loc : Location.t; + } + +and signature = { + sig_items : signature_item list; + sig_type : Types.signature; + sig_final_env : Env.t; +} + +and signature_item = + { sig_desc: signature_item_desc; + sig_env : Env.t; (* BINANNOT ADDED *) + sig_loc: Location.t } + +and signature_item_desc = + Tsig_value of value_description + | Tsig_type of rec_flag * type_declaration list + | Tsig_typext of type_extension + | Tsig_exception of extension_constructor + | Tsig_module of module_declaration + | Tsig_recmodule of module_declaration list + | Tsig_modtype of module_type_declaration + | Tsig_open of open_description + | Tsig_include of include_description + | Tsig_class of class_description list + | Tsig_class_type of class_type_declaration list + | Tsig_attribute of attribute + +and module_declaration = + { + md_id: Ident.t; + md_name: string loc; + md_type: module_type; + md_attributes: attribute list; + md_loc: Location.t; + } + +and module_type_declaration = + { + mtd_id: Ident.t; + mtd_name: string loc; + mtd_type: module_type option; + mtd_attributes: attribute list; + mtd_loc: Location.t; + } + +and open_description = + { + open_path: Path.t; + open_txt: Longident.t loc; + open_override: override_flag; + open_loc: Location.t; + open_attributes: attribute list; + } + +and 'a include_infos = + { + incl_mod: 'a; + incl_type: Types.signature; + incl_loc: Location.t; + incl_attributes: attribute list; + } + +and include_description = module_type include_infos + +and include_declaration = module_expr include_infos + +and with_constraint = + Twith_type of type_declaration + | Twith_module of Path.t * Longident.t loc + | Twith_typesubst of type_declaration + | Twith_modsubst of Path.t * Longident.t loc + +and core_type = +(* mutable because of [Typeclass.declare_method] *) + { mutable ctyp_desc : core_type_desc; + mutable ctyp_type : type_expr; + ctyp_env : Env.t; (* BINANNOT ADDED *) + ctyp_loc : Location.t; + ctyp_attributes: attribute list; + } + +and core_type_desc = + Ttyp_any + | Ttyp_var of string + | Ttyp_arrow of arg_label * core_type * core_type + | Ttyp_tuple of core_type list + | Ttyp_constr of Path.t * Longident.t loc * core_type list + | Ttyp_object of object_field list * closed_flag + | Ttyp_class of Path.t * Longident.t loc * core_type list + | Ttyp_alias of core_type * string + | Ttyp_variant of row_field list * closed_flag * label list option + | Ttyp_poly of string list * core_type + | Ttyp_package of package_type + +and package_type = { + pack_path : Path.t; + pack_fields : (Longident.t loc * core_type) list; + pack_type : Types.module_type; + pack_txt : Longident.t loc; +} + +and row_field = + Ttag of string loc * attributes * bool * core_type list + | Tinherit of core_type + +and object_field = + | OTtag of string loc * attributes * core_type + | OTinherit of core_type + +and value_description = + { val_id: Ident.t; + val_name: string loc; + val_desc: core_type; + val_val: Types.value_description; + val_prim: string list; + val_loc: Location.t; + val_attributes: attribute list; + } + +and type_declaration = + { typ_id: Ident.t; + typ_name: string loc; + typ_params: (core_type * variance) list; + typ_type: Types.type_declaration; + typ_cstrs: (core_type * core_type * Location.t) list; + typ_kind: type_kind; + typ_private: private_flag; + typ_manifest: core_type option; + typ_loc: Location.t; + typ_attributes: attribute list; + } + +and type_kind = + Ttype_abstract + | Ttype_variant of constructor_declaration list + | Ttype_record of label_declaration list + | Ttype_open + +and label_declaration = + { + ld_id: Ident.t; + ld_name: string loc; + ld_mutable: mutable_flag; + ld_type: core_type; + ld_loc: Location.t; + ld_attributes: attribute list; + } + +and constructor_declaration = + { + cd_id: Ident.t; + cd_name: string loc; + cd_args: constructor_arguments; + cd_res: core_type option; + cd_loc: Location.t; + cd_attributes: attribute list; + } + +and constructor_arguments = + | Cstr_tuple of core_type list + | Cstr_record of label_declaration list + +and type_extension = + { + tyext_path: Path.t; + tyext_txt: Longident.t loc; + tyext_params: (core_type * variance) list; + tyext_constructors: extension_constructor list; + tyext_private: private_flag; + tyext_attributes: attribute list; + } + +and extension_constructor = + { + ext_id: Ident.t; + ext_name: string loc; + ext_type: Types.extension_constructor; + ext_kind: extension_constructor_kind; + ext_loc: Location.t; + ext_attributes: attribute list; + } + +and extension_constructor_kind = + Text_decl of constructor_arguments * core_type option + | Text_rebind of Path.t * Longident.t loc + +and class_type = + { + cltyp_desc: class_type_desc; + cltyp_type: Types.class_type; + cltyp_env: Env.t; + cltyp_loc: Location.t; + cltyp_attributes: attribute list; + } + +and class_type_desc = + Tcty_constr of Path.t * Longident.t loc * core_type list + | Tcty_signature of class_signature + | Tcty_arrow of arg_label * core_type * class_type + | Tcty_open of override_flag * Path.t * Longident.t loc * Env.t * class_type + +and class_signature = { + csig_self: core_type; + csig_fields: class_type_field list; + csig_type: Types.class_signature; + } + +and class_type_field = { + ctf_desc: class_type_field_desc; + ctf_loc: Location.t; + ctf_attributes: attribute list; + } + +and class_type_field_desc = + | Tctf_inherit of class_type + | Tctf_val of (string * mutable_flag * virtual_flag * core_type) + | Tctf_method of (string * private_flag * virtual_flag * core_type) + | Tctf_constraint of (core_type * core_type) + | Tctf_attribute of attribute + +and class_declaration = + class_expr class_infos + +and class_description = + class_type class_infos + +and class_type_declaration = + class_type class_infos + +and 'a class_infos = + { ci_virt: virtual_flag; + ci_params: (core_type * variance) list; + ci_id_name: string loc; + ci_id_class: Ident.t; + ci_id_class_type: Ident.t; + ci_id_object: Ident.t; + ci_id_typehash: Ident.t; + ci_expr: 'a; + ci_decl: Types.class_declaration; + ci_type_decl: Types.class_type_declaration; + ci_loc: Location.t; + ci_attributes: attribute list; + } + +(* Auxiliary functions over the a.s.t. *) + +let iter_pattern_desc f = function + | Tpat_alias(p, _, _) -> f p + | Tpat_tuple patl -> List.iter f patl + | Tpat_construct(_, _, patl) -> List.iter f patl + | Tpat_variant(_, pat, _) -> may f pat + | Tpat_record (lbl_pat_list, _) -> + List.iter (fun (_, _, pat) -> f pat) lbl_pat_list + | Tpat_array patl -> List.iter f patl + | Tpat_or(p1, p2, _) -> f p1; f p2 + | Tpat_lazy p -> f p + | Tpat_any + | Tpat_var _ + | Tpat_constant _ -> () + +let map_pattern_desc f d = + match d with + | Tpat_alias (p1, id, s) -> + Tpat_alias (f p1, id, s) + | Tpat_tuple pats -> + Tpat_tuple (List.map f pats) + | Tpat_record (lpats, closed) -> + Tpat_record (List.map (fun (lid, l,p) -> lid, l, f p) lpats, closed) + | Tpat_construct (lid, c,pats) -> + Tpat_construct (lid, c, List.map f pats) + | Tpat_array pats -> + Tpat_array (List.map f pats) + | Tpat_lazy p1 -> Tpat_lazy (f p1) + | Tpat_variant (x1, Some p1, x2) -> + Tpat_variant (x1, Some (f p1), x2) + | Tpat_or (p1,p2,path) -> + Tpat_or (f p1, f p2, path) + | Tpat_var _ + | Tpat_constant _ + | Tpat_any + | Tpat_variant (_,None,_) -> d + +(* List the identifiers bound by a pattern or a let *) + +let idents = ref([]: (Ident.t * string loc) list) + +let rec bound_idents pat = + match pat.pat_desc with + | Tpat_var (id,s) -> idents := (id,s) :: !idents + | Tpat_alias(p, id, s ) -> + bound_idents p; idents := (id,s) :: !idents + | Tpat_or(p1, _, _) -> + (* Invariant : both arguments binds the same variables *) + bound_idents p1 + | d -> iter_pattern_desc bound_idents d + +let pat_bound_idents pat = + idents := []; + bound_idents pat; + let res = !idents in + idents := []; + List.map fst res + +let rev_let_bound_idents_with_loc bindings = + idents := []; + List.iter (fun vb -> bound_idents vb.vb_pat) bindings; + let res = !idents in idents := []; res + +let let_bound_idents_with_loc pat_expr_list = + List.rev(rev_let_bound_idents_with_loc pat_expr_list) + +let rev_let_bound_idents pat = List.map fst (rev_let_bound_idents_with_loc pat) +let let_bound_idents pat = List.map fst (let_bound_idents_with_loc pat) + +let alpha_var env id = List.assoc id env + +let rec alpha_pat env p = match p.pat_desc with +| Tpat_var (id, s) -> (* note the ``Not_found'' case *) + {p with pat_desc = + try Tpat_var (alpha_var env id, s) with + | Not_found -> Tpat_any} +| Tpat_alias (p1, id, s) -> + let new_p = alpha_pat env p1 in + begin try + {p with pat_desc = Tpat_alias (new_p, alpha_var env id, s)} + with + | Not_found -> new_p + end +| d -> + {p with pat_desc = map_pattern_desc (alpha_pat env) d} + +let mkloc = Location.mkloc +let mknoloc = Location.mknoloc diff --git a/res_syntax/compiler-libs-406/typedtree.mli b/res_syntax/compiler-libs-406/typedtree.mli new file mode 100644 index 0000000000..2e89ed5233 --- /dev/null +++ b/res_syntax/compiler-libs-406/typedtree.mli @@ -0,0 +1,670 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Abstract syntax tree after typing *) + + +(** By comparison with {!Parsetree}: + - Every {!Longindent.t} is accompanied by a resolved {!Path.t}. + +*) + +open Asttypes +open Types + +(* Value expressions for the core language *) + +type partial = Partial | Total + +(** {1 Extension points} *) + +type attribute = Parsetree.attribute +type attributes = attribute list + +(** {1 Core language} *) + +type pattern = + { pat_desc: pattern_desc; + pat_loc: Location.t; + pat_extra : (pat_extra * Location.t * attributes) list; + pat_type: type_expr; + mutable pat_env: Env.t; + pat_attributes: attributes; + } + +and pat_extra = + | Tpat_constraint of core_type + (** P : T { pat_desc = P + ; pat_extra = (Tpat_constraint T, _, _) :: ... } + *) + | Tpat_type of Path.t * Longident.t loc + (** #tconst { pat_desc = disjunction + ; pat_extra = (Tpat_type (P, "tconst"), _, _) :: ...} + + where [disjunction] is a [Tpat_or _] representing the + branches of [tconst]. + *) + | Tpat_open of Path.t * Longident.t loc * Env.t + | Tpat_unpack + (** (module P) { pat_desc = Tpat_var "P" + ; pat_extra = (Tpat_unpack, _, _) :: ... } + *) + +and pattern_desc = + Tpat_any + (** _ *) + | Tpat_var of Ident.t * string loc + (** x *) + | Tpat_alias of pattern * Ident.t * string loc + (** P as a *) + | Tpat_constant of constant + (** 1, 'a', "true", 1.0, 1l, 1L, 1n *) + | Tpat_tuple of pattern list + (** (P1, ..., Pn) + + Invariant: n >= 2 + *) + | Tpat_construct of + Longident.t loc * constructor_description * pattern list + (** C [] + C P [P] + C (P1, ..., Pn) [P1; ...; Pn] + *) + | Tpat_variant of label * pattern option * row_desc ref + (** `A (None) + `A P (Some P) + + See {!Types.row_desc} for an explanation of the last parameter. + *) + | Tpat_record of + (Longident.t loc * label_description * pattern) list * + closed_flag + (** { l1=P1; ...; ln=Pn } (flag = Closed) + { l1=P1; ...; ln=Pn; _} (flag = Open) + + Invariant: n > 0 + *) + | Tpat_array of pattern list + (** [| P1; ...; Pn |] *) + | Tpat_or of pattern * pattern * row_desc option + (** P1 | P2 + + [row_desc] = [Some _] when translating [Ppat_type _], + [None] otherwise. + *) + | Tpat_lazy of pattern + (** lazy P *) + +and expression = + { exp_desc: expression_desc; + exp_loc: Location.t; + exp_extra: (exp_extra * Location.t * attributes) list; + exp_type: type_expr; + exp_env: Env.t; + exp_attributes: attributes; + } + +and exp_extra = + | Texp_constraint of core_type + (** E : T *) + | Texp_coerce of core_type option * core_type + (** E :> T [Texp_coerce (None, T)] + E : T0 :> T [Texp_coerce (Some T0, T)] + *) + | Texp_open of override_flag * Path.t * Longident.t loc * Env.t + (** let open[!] M in [Texp_open (!, P, M, env)] + where [env] is the environment after opening [P] + *) + | Texp_poly of core_type option + (** Used for method bodies. *) + | Texp_newtype of string + (** fun (type t) -> *) + +and expression_desc = + Texp_ident of Path.t * Longident.t loc * Types.value_description + (** x + M.x + *) + | Texp_constant of constant + (** 1, 'a', "true", 1.0, 1l, 1L, 1n *) + | Texp_let of rec_flag * value_binding list * expression + (** let P1 = E1 and ... and Pn = EN in E (flag = Nonrecursive) + let rec P1 = E1 and ... and Pn = EN in E (flag = Recursive) + *) + | Texp_function of { arg_label : arg_label; param : Ident.t; + cases : case list; partial : partial; } + (** [Pexp_fun] and [Pexp_function] both translate to [Texp_function]. + See {!Parsetree} for more details. + + [param] is the identifier that is to be used to name the + parameter of the function. + + partial = + [Partial] if the pattern match is partial + [Total] otherwise. + *) + | Texp_apply of expression * (arg_label * expression option) list + (** E0 ~l1:E1 ... ~ln:En + + The expression can be None if the expression is abstracted over + this argument. It currently appears when a label is applied. + + For example: + let f x ~y = x + y in + f ~y:3 + + The resulting typedtree for the application is: + Texp_apply (Texp_ident "f/1037", + [(Nolabel, None); + (Labelled "y", Some (Texp_constant Const_int 3)) + ]) + *) + | Texp_match of expression * case list * case list * partial + (** match E0 with + | P1 -> E1 + | P2 -> E2 + | exception P3 -> E3 + + [Texp_match (E0, [(P1, E1); (P2, E2)], [(P3, E3)], _)] + *) + | Texp_try of expression * case list + (** try E with P1 -> E1 | ... | PN -> EN *) + | Texp_tuple of expression list + (** (E1, ..., EN) *) + | Texp_construct of + Longident.t loc * constructor_description * expression list + (** C [] + C E [E] + C (E1, ..., En) [E1;...;En] + *) + | Texp_variant of label * expression option + | Texp_record of { + fields : ( Types.label_description * record_label_definition ) array; + representation : Types.record_representation; + extended_expression : expression option; + } + (** { l1=P1; ...; ln=Pn } (extended_expression = None) + { E0 with l1=P1; ...; ln=Pn } (extended_expression = Some E0) + + Invariant: n > 0 + + If the type is { l1: t1; l2: t2 }, the expression + { E0 with t2=P2 } is represented as + Texp_record + { fields = [| l1, Kept t1; l2 Override P2 |]; representation; + extended_expression = Some E0 } + *) + | Texp_field of expression * Longident.t loc * label_description + | Texp_setfield of + expression * Longident.t loc * label_description * expression + | Texp_array of expression list + | Texp_ifthenelse of expression * expression * expression option + | Texp_sequence of expression * expression + | Texp_while of expression * expression + | Texp_for of + Ident.t * Parsetree.pattern * expression * expression * direction_flag * + expression + | Texp_send of expression * meth * expression option + | Texp_new of Path.t * Longident.t loc * Types.class_declaration + | Texp_instvar of Path.t * Path.t * string loc + | Texp_setinstvar of Path.t * Path.t * string loc * expression + | Texp_override of Path.t * (Path.t * string loc * expression) list + | Texp_letmodule of Ident.t * string loc * module_expr * expression + | Texp_letexception of extension_constructor * expression + | Texp_assert of expression + | Texp_lazy of expression + | Texp_object of class_structure * string list + | Texp_pack of module_expr + | Texp_unreachable + | Texp_extension_constructor of Longident.t loc * Path.t + +and meth = + Tmeth_name of string + | Tmeth_val of Ident.t + +and case = + { + c_lhs: pattern; + c_guard: expression option; + c_rhs: expression; + } + +and record_label_definition = + | Kept of Types.type_expr + | Overridden of Longident.t loc * expression + +(* Value expressions for the class language *) + +and class_expr = + { + cl_desc: class_expr_desc; + cl_loc: Location.t; + cl_type: Types.class_type; + cl_env: Env.t; + cl_attributes: attributes; + } + +and class_expr_desc = + Tcl_ident of Path.t * Longident.t loc * core_type list + | Tcl_structure of class_structure + | Tcl_fun of + arg_label * pattern * (Ident.t * string loc * expression) list + * class_expr * partial + | Tcl_apply of class_expr * (arg_label * expression option) list + | Tcl_let of rec_flag * value_binding list * + (Ident.t * string loc * expression) list * class_expr + | Tcl_constraint of + class_expr * class_type option * string list * string list * Concr.t + (* Visible instance variables, methods and concrete methods *) + | Tcl_open of override_flag * Path.t * Longident.t loc * Env.t * class_expr + +and class_structure = + { + cstr_self: pattern; + cstr_fields: class_field list; + cstr_type: Types.class_signature; + cstr_meths: Ident.t Meths.t; + } + +and class_field = + { + cf_desc: class_field_desc; + cf_loc: Location.t; + cf_attributes: attributes; + } + +and class_field_kind = + | Tcfk_virtual of core_type + | Tcfk_concrete of override_flag * expression + +and class_field_desc = + Tcf_inherit of + override_flag * class_expr * string option * (string * Ident.t) list * + (string * Ident.t) list + (* Inherited instance variables and concrete methods *) + | Tcf_val of string loc * mutable_flag * Ident.t * class_field_kind * bool + | Tcf_method of string loc * private_flag * class_field_kind + | Tcf_constraint of core_type * core_type + | Tcf_initializer of expression + | Tcf_attribute of attribute + +(* Value expressions for the module language *) + +and module_expr = + { mod_desc: module_expr_desc; + mod_loc: Location.t; + mod_type: Types.module_type; + mod_env: Env.t; + mod_attributes: attributes; + } + +(** Annotations for [Tmod_constraint]. *) +and module_type_constraint = + | Tmodtype_implicit + (** The module type constraint has been synthesized during typechecking. *) + | Tmodtype_explicit of module_type + (** The module type was in the source file. *) + +and module_expr_desc = + Tmod_ident of Path.t * Longident.t loc + | Tmod_structure of structure + | Tmod_functor of Ident.t * string loc * module_type option * module_expr + | Tmod_apply of module_expr * module_expr * module_coercion + | Tmod_constraint of + module_expr * Types.module_type * module_type_constraint * module_coercion + (** ME (constraint = Tmodtype_implicit) + (ME : MT) (constraint = Tmodtype_explicit MT) + *) + | Tmod_unpack of expression * Types.module_type + +and structure = { + str_items : structure_item list; + str_type : Types.signature; + str_final_env : Env.t; +} + +and structure_item = + { str_desc : structure_item_desc; + str_loc : Location.t; + str_env : Env.t + } + +and structure_item_desc = + Tstr_eval of expression * attributes + | Tstr_value of rec_flag * value_binding list + | Tstr_primitive of value_description + | Tstr_type of rec_flag * type_declaration list + | Tstr_typext of type_extension + | Tstr_exception of extension_constructor + | Tstr_module of module_binding + | Tstr_recmodule of module_binding list + | Tstr_modtype of module_type_declaration + | Tstr_open of open_description + | Tstr_class of (class_declaration * string list) list + | Tstr_class_type of (Ident.t * string loc * class_type_declaration) list + | Tstr_include of include_declaration + | Tstr_attribute of attribute + +and module_binding = + { + mb_id: Ident.t; + mb_name: string loc; + mb_expr: module_expr; + mb_attributes: attributes; + mb_loc: Location.t; + } + +and value_binding = + { + vb_pat: pattern; + vb_expr: expression; + vb_attributes: attributes; + vb_loc: Location.t; + } + +and module_coercion = + Tcoerce_none + | Tcoerce_structure of (int * module_coercion) list * + (Ident.t * int * module_coercion) list + | Tcoerce_functor of module_coercion * module_coercion + | Tcoerce_primitive of primitive_coercion + | Tcoerce_alias of Path.t * module_coercion + +and module_type = + { mty_desc: module_type_desc; + mty_type : Types.module_type; + mty_env : Env.t; + mty_loc: Location.t; + mty_attributes: attributes; + } + +and module_type_desc = + Tmty_ident of Path.t * Longident.t loc + | Tmty_signature of signature + | Tmty_functor of Ident.t * string loc * module_type option * module_type + | Tmty_with of module_type * (Path.t * Longident.t loc * with_constraint) list + | Tmty_typeof of module_expr + | Tmty_alias of Path.t * Longident.t loc + +and primitive_coercion = + { + pc_desc: Primitive.description; + pc_type: type_expr; + pc_env: Env.t; + pc_loc : Location.t; + } + +and signature = { + sig_items : signature_item list; + sig_type : Types.signature; + sig_final_env : Env.t; +} + +and signature_item = + { sig_desc: signature_item_desc; + sig_env : Env.t; (* BINANNOT ADDED *) + sig_loc: Location.t } + +and signature_item_desc = + Tsig_value of value_description + | Tsig_type of rec_flag * type_declaration list + | Tsig_typext of type_extension + | Tsig_exception of extension_constructor + | Tsig_module of module_declaration + | Tsig_recmodule of module_declaration list + | Tsig_modtype of module_type_declaration + | Tsig_open of open_description + | Tsig_include of include_description + | Tsig_class of class_description list + | Tsig_class_type of class_type_declaration list + | Tsig_attribute of attribute + +and module_declaration = + { + md_id: Ident.t; + md_name: string loc; + md_type: module_type; + md_attributes: attributes; + md_loc: Location.t; + } + +and module_type_declaration = + { + mtd_id: Ident.t; + mtd_name: string loc; + mtd_type: module_type option; + mtd_attributes: attributes; + mtd_loc: Location.t; + } + +and open_description = + { + open_path: Path.t; + open_txt: Longident.t loc; + open_override: override_flag; + open_loc: Location.t; + open_attributes: attribute list; + } + +and 'a include_infos = + { + incl_mod: 'a; + incl_type: Types.signature; + incl_loc: Location.t; + incl_attributes: attribute list; + } + +and include_description = module_type include_infos + +and include_declaration = module_expr include_infos + +and with_constraint = + Twith_type of type_declaration + | Twith_module of Path.t * Longident.t loc + | Twith_typesubst of type_declaration + | Twith_modsubst of Path.t * Longident.t loc + +and core_type = + { mutable ctyp_desc : core_type_desc; + (** mutable because of [Typeclass.declare_method] *) + mutable ctyp_type : type_expr; + (** mutable because of [Typeclass.declare_method] *) + ctyp_env : Env.t; (* BINANNOT ADDED *) + ctyp_loc : Location.t; + ctyp_attributes: attributes; + } + +and core_type_desc = + Ttyp_any + | Ttyp_var of string + | Ttyp_arrow of arg_label * core_type * core_type + | Ttyp_tuple of core_type list + | Ttyp_constr of Path.t * Longident.t loc * core_type list + | Ttyp_object of object_field list * closed_flag + | Ttyp_class of Path.t * Longident.t loc * core_type list + | Ttyp_alias of core_type * string + | Ttyp_variant of row_field list * closed_flag * label list option + | Ttyp_poly of string list * core_type + | Ttyp_package of package_type + +and package_type = { + pack_path : Path.t; + pack_fields : (Longident.t loc * core_type) list; + pack_type : Types.module_type; + pack_txt : Longident.t loc; +} + +and row_field = + Ttag of string loc * attributes * bool * core_type list + | Tinherit of core_type + +and object_field = + | OTtag of string loc * attributes * core_type + | OTinherit of core_type + +and value_description = + { val_id: Ident.t; + val_name: string loc; + val_desc: core_type; + val_val: Types.value_description; + val_prim: string list; + val_loc: Location.t; + val_attributes: attributes; + } + +and type_declaration = + { + typ_id: Ident.t; + typ_name: string loc; + typ_params: (core_type * variance) list; + typ_type: Types.type_declaration; + typ_cstrs: (core_type * core_type * Location.t) list; + typ_kind: type_kind; + typ_private: private_flag; + typ_manifest: core_type option; + typ_loc: Location.t; + typ_attributes: attributes; + } + +and type_kind = + Ttype_abstract + | Ttype_variant of constructor_declaration list + | Ttype_record of label_declaration list + | Ttype_open + +and label_declaration = + { + ld_id: Ident.t; + ld_name: string loc; + ld_mutable: mutable_flag; + ld_type: core_type; + ld_loc: Location.t; + ld_attributes: attributes; + } + +and constructor_declaration = + { + cd_id: Ident.t; + cd_name: string loc; + cd_args: constructor_arguments; + cd_res: core_type option; + cd_loc: Location.t; + cd_attributes: attributes; + } + +and constructor_arguments = + | Cstr_tuple of core_type list + | Cstr_record of label_declaration list + +and type_extension = + { + tyext_path: Path.t; + tyext_txt: Longident.t loc; + tyext_params: (core_type * variance) list; + tyext_constructors: extension_constructor list; + tyext_private: private_flag; + tyext_attributes: attributes; + } + +and extension_constructor = + { + ext_id: Ident.t; + ext_name: string loc; + ext_type : Types.extension_constructor; + ext_kind : extension_constructor_kind; + ext_loc : Location.t; + ext_attributes: attributes; + } + +and extension_constructor_kind = + Text_decl of constructor_arguments * core_type option + | Text_rebind of Path.t * Longident.t loc + +and class_type = + { + cltyp_desc: class_type_desc; + cltyp_type: Types.class_type; + cltyp_env: Env.t; + cltyp_loc: Location.t; + cltyp_attributes: attributes; + } + +and class_type_desc = + Tcty_constr of Path.t * Longident.t loc * core_type list + | Tcty_signature of class_signature + | Tcty_arrow of arg_label * core_type * class_type + | Tcty_open of override_flag * Path.t * Longident.t loc * Env.t * class_type + +and class_signature = { + csig_self : core_type; + csig_fields : class_type_field list; + csig_type : Types.class_signature; + } + +and class_type_field = { + ctf_desc: class_type_field_desc; + ctf_loc: Location.t; + ctf_attributes: attributes; + } + +and class_type_field_desc = + | Tctf_inherit of class_type + | Tctf_val of (string * mutable_flag * virtual_flag * core_type) + | Tctf_method of (string * private_flag * virtual_flag * core_type) + | Tctf_constraint of (core_type * core_type) + | Tctf_attribute of attribute + +and class_declaration = + class_expr class_infos + +and class_description = + class_type class_infos + +and class_type_declaration = + class_type class_infos + +and 'a class_infos = + { ci_virt: virtual_flag; + ci_params: (core_type * variance) list; + ci_id_name : string loc; + ci_id_class: Ident.t; + ci_id_class_type : Ident.t; + ci_id_object : Ident.t; + ci_id_typehash : Ident.t; + ci_expr: 'a; + ci_decl: Types.class_declaration; + ci_type_decl : Types.class_type_declaration; + ci_loc: Location.t; + ci_attributes: attributes; + } + +(* Auxiliary functions over the a.s.t. *) + +val iter_pattern_desc: (pattern -> unit) -> pattern_desc -> unit +val map_pattern_desc: (pattern -> pattern) -> pattern_desc -> pattern_desc + +val let_bound_idents: value_binding list -> Ident.t list +val rev_let_bound_idents: value_binding list -> Ident.t list + +val let_bound_idents_with_loc: + value_binding list -> (Ident.t * string loc) list + +(** Alpha conversion of patterns *) +val alpha_pat: (Ident.t * Ident.t) list -> pattern -> pattern + +val mknoloc: 'a -> 'a Asttypes.loc +val mkloc: 'a -> Location.t -> 'a Asttypes.loc + +val pat_bound_idents: pattern -> Ident.t list diff --git a/res_syntax/compiler-libs-406/typedtreeIter.ml b/res_syntax/compiler-libs-406/typedtreeIter.ml new file mode 100644 index 0000000000..a3be8d3be5 --- /dev/null +++ b/res_syntax/compiler-libs-406/typedtreeIter.ml @@ -0,0 +1,688 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Thomas Gazagnaire (OCamlPro), Fabrice Le Fessant (INRIA Saclay) *) +(* *) +(* Copyright 2007 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* +TODO: + - 2012/05/10: Follow camlp4 way of building map and iter using classes + and inheritance ? +*) + +open Asttypes +open Typedtree + +module type IteratorArgument = sig + + val enter_structure : structure -> unit + val enter_value_description : value_description -> unit + val enter_type_extension : type_extension -> unit + val enter_extension_constructor : extension_constructor -> unit + val enter_pattern : pattern -> unit + val enter_expression : expression -> unit + val enter_package_type : package_type -> unit + val enter_signature : signature -> unit + val enter_signature_item : signature_item -> unit + val enter_module_type_declaration : module_type_declaration -> unit + val enter_module_type : module_type -> unit + val enter_module_expr : module_expr -> unit + val enter_with_constraint : with_constraint -> unit + val enter_class_expr : class_expr -> unit + val enter_class_signature : class_signature -> unit + val enter_class_declaration : class_declaration -> unit + val enter_class_description : class_description -> unit + val enter_class_type_declaration : class_type_declaration -> unit + val enter_class_type : class_type -> unit + val enter_class_type_field : class_type_field -> unit + val enter_core_type : core_type -> unit + val enter_class_structure : class_structure -> unit + val enter_class_field : class_field -> unit + val enter_structure_item : structure_item -> unit + + + val leave_structure : structure -> unit + val leave_value_description : value_description -> unit + val leave_type_extension : type_extension -> unit + val leave_extension_constructor : extension_constructor -> unit + val leave_pattern : pattern -> unit + val leave_expression : expression -> unit + val leave_package_type : package_type -> unit + val leave_signature : signature -> unit + val leave_signature_item : signature_item -> unit + val leave_module_type_declaration : module_type_declaration -> unit + val leave_module_type : module_type -> unit + val leave_module_expr : module_expr -> unit + val leave_with_constraint : with_constraint -> unit + val leave_class_expr : class_expr -> unit + val leave_class_signature : class_signature -> unit + val leave_class_declaration : class_declaration -> unit + val leave_class_description : class_description -> unit + val leave_class_type_declaration : class_type_declaration -> unit + val leave_class_type : class_type -> unit + val leave_class_type_field : class_type_field -> unit + val leave_core_type : core_type -> unit + val leave_class_structure : class_structure -> unit + val leave_class_field : class_field -> unit + val leave_structure_item : structure_item -> unit + + val enter_bindings : rec_flag -> unit + val enter_binding : value_binding -> unit + val leave_binding : value_binding -> unit + val leave_bindings : rec_flag -> unit + + val enter_type_declarations : rec_flag -> unit + val enter_type_declaration : type_declaration -> unit + val leave_type_declaration : type_declaration -> unit + val leave_type_declarations : rec_flag -> unit + + end + +module MakeIterator(Iter : IteratorArgument) : sig + + val iter_structure : structure -> unit + val iter_signature : signature -> unit + val iter_structure_item : structure_item -> unit + val iter_signature_item : signature_item -> unit + val iter_expression : expression -> unit + val iter_module_type : module_type -> unit + val iter_pattern : pattern -> unit + val iter_class_expr : class_expr -> unit + + end = struct + + let may_iter f v = + match v with + None -> () + | Some x -> f x + + + let rec iter_structure str = + Iter.enter_structure str; + List.iter iter_structure_item str.str_items; + Iter.leave_structure str + + + and iter_binding vb = + Iter.enter_binding vb; + iter_pattern vb.vb_pat; + iter_expression vb.vb_expr; + Iter.leave_binding vb + + and iter_bindings rec_flag list = + Iter.enter_bindings rec_flag; + List.iter iter_binding list; + Iter.leave_bindings rec_flag + + and iter_case {c_lhs; c_guard; c_rhs} = + iter_pattern c_lhs; + may_iter iter_expression c_guard; + iter_expression c_rhs + + and iter_cases cases = + List.iter iter_case cases + + and iter_structure_item item = + Iter.enter_structure_item item; + begin + match item.str_desc with + Tstr_eval (exp, _attrs) -> iter_expression exp + | Tstr_value (rec_flag, list) -> + iter_bindings rec_flag list + | Tstr_primitive vd -> iter_value_description vd + | Tstr_type (rf, list) -> iter_type_declarations rf list + | Tstr_typext tyext -> iter_type_extension tyext + | Tstr_exception ext -> iter_extension_constructor ext + | Tstr_module x -> iter_module_binding x + | Tstr_recmodule list -> List.iter iter_module_binding list + | Tstr_modtype mtd -> iter_module_type_declaration mtd + | Tstr_open _ -> () + | Tstr_class list -> + List.iter (fun (ci, _) -> iter_class_declaration ci) list + | Tstr_class_type list -> + List.iter + (fun (_, _, ct) -> iter_class_type_declaration ct) + list + | Tstr_include incl -> iter_module_expr incl.incl_mod + | Tstr_attribute _ -> + () + end; + Iter.leave_structure_item item + + and iter_module_binding x = + iter_module_expr x.mb_expr + + and iter_value_description v = + Iter.enter_value_description v; + iter_core_type v.val_desc; + Iter.leave_value_description v + + and iter_constructor_arguments = function + | Cstr_tuple l -> List.iter iter_core_type l + | Cstr_record l -> List.iter (fun ld -> iter_core_type ld.ld_type) l + + and iter_constructor_declaration cd = + iter_constructor_arguments cd.cd_args; + option iter_core_type cd.cd_res; + + and iter_type_parameter (ct, _v) = + iter_core_type ct + + and iter_type_declaration decl = + Iter.enter_type_declaration decl; + List.iter iter_type_parameter decl.typ_params; + List.iter (fun (ct1, ct2, _loc) -> + iter_core_type ct1; + iter_core_type ct2 + ) decl.typ_cstrs; + begin match decl.typ_kind with + Ttype_abstract -> () + | Ttype_variant list -> + List.iter iter_constructor_declaration list + | Ttype_record list -> + List.iter + (fun ld -> + iter_core_type ld.ld_type + ) list + | Ttype_open -> () + end; + option iter_core_type decl.typ_manifest; + Iter.leave_type_declaration decl + + and iter_type_declarations rec_flag decls = + Iter.enter_type_declarations rec_flag; + List.iter iter_type_declaration decls; + Iter.leave_type_declarations rec_flag + + and iter_extension_constructor ext = + Iter.enter_extension_constructor ext; + begin match ext.ext_kind with + Text_decl(args, ret) -> + iter_constructor_arguments args; + option iter_core_type ret + | Text_rebind _ -> () + end; + Iter.leave_extension_constructor ext; + + and iter_type_extension tyext = + Iter.enter_type_extension tyext; + List.iter iter_type_parameter tyext.tyext_params; + List.iter iter_extension_constructor tyext.tyext_constructors; + Iter.leave_type_extension tyext + + and iter_pattern pat = + Iter.enter_pattern pat; + List.iter (fun (cstr, _, _attrs) -> match cstr with + | Tpat_type _ -> () + | Tpat_unpack -> () + | Tpat_open _ -> () + | Tpat_constraint ct -> iter_core_type ct) pat.pat_extra; + begin + match pat.pat_desc with + Tpat_any -> () + | Tpat_var _ -> () + | Tpat_alias (pat1, _, _) -> iter_pattern pat1 + | Tpat_constant _ -> () + | Tpat_tuple list -> + List.iter iter_pattern list + | Tpat_construct (_, _, args) -> + List.iter iter_pattern args + | Tpat_variant (_, pato, _) -> + begin match pato with + None -> () + | Some pat -> iter_pattern pat + end + | Tpat_record (list, _closed) -> + List.iter (fun (_, _, pat) -> iter_pattern pat) list + | Tpat_array list -> List.iter iter_pattern list + | Tpat_or (p1, p2, _) -> iter_pattern p1; iter_pattern p2 + | Tpat_lazy p -> iter_pattern p + end; + Iter.leave_pattern pat + + and option f x = match x with None -> () | Some e -> f e + + and iter_expression exp = + Iter.enter_expression exp; + List.iter (function (cstr, _, _attrs) -> + match cstr with + Texp_constraint ct -> + iter_core_type ct + | Texp_coerce (cty1, cty2) -> + option iter_core_type cty1; iter_core_type cty2 + | Texp_open _ -> () + | Texp_poly cto -> option iter_core_type cto + | Texp_newtype _ -> ()) + exp.exp_extra; + begin + match exp.exp_desc with + Texp_ident _ -> () + | Texp_constant _ -> () + | Texp_let (rec_flag, list, exp) -> + iter_bindings rec_flag list; + iter_expression exp + | Texp_function { cases; _ } -> + iter_cases cases + | Texp_apply (exp, list) -> + iter_expression exp; + List.iter (fun (_label, expo) -> + match expo with + None -> () + | Some exp -> iter_expression exp + ) list + | Texp_match (exp, list1, list2, _) -> + iter_expression exp; + iter_cases list1; + iter_cases list2; + | Texp_try (exp, list) -> + iter_expression exp; + iter_cases list + | Texp_tuple list -> + List.iter iter_expression list + | Texp_construct (_, _, args) -> + List.iter iter_expression args + | Texp_variant (_label, expo) -> + begin match expo with + None -> () + | Some exp -> iter_expression exp + end + | Texp_record { fields; extended_expression; _ } -> + Array.iter (function + | _, Kept _ -> () + | _, Overridden (_, exp) -> iter_expression exp) + fields; + begin match extended_expression with + None -> () + | Some exp -> iter_expression exp + end + | Texp_field (exp, _, _label) -> + iter_expression exp + | Texp_setfield (exp1, _, _label, exp2) -> + iter_expression exp1; + iter_expression exp2 + | Texp_array list -> + List.iter iter_expression list + | Texp_ifthenelse (exp1, exp2, expo) -> + iter_expression exp1; + iter_expression exp2; + begin match expo with + None -> () + | Some exp -> iter_expression exp + end + | Texp_sequence (exp1, exp2) -> + iter_expression exp1; + iter_expression exp2 + | Texp_while (exp1, exp2) -> + iter_expression exp1; + iter_expression exp2 + | Texp_for (_id, _, exp1, exp2, _dir, exp3) -> + iter_expression exp1; + iter_expression exp2; + iter_expression exp3 + | Texp_send (exp, _meth, expo) -> + iter_expression exp; + begin + match expo with + None -> () + | Some exp -> iter_expression exp + end + | Texp_new _ -> () + | Texp_instvar _ -> () + | Texp_setinstvar (_, _, _, exp) -> + iter_expression exp + | Texp_override (_, list) -> + List.iter (fun (_path, _, exp) -> + iter_expression exp + ) list + | Texp_letmodule (_id, _, mexpr, exp) -> + iter_module_expr mexpr; + iter_expression exp + | Texp_letexception (cd, exp) -> + iter_extension_constructor cd; + iter_expression exp + | Texp_assert exp -> iter_expression exp + | Texp_lazy exp -> iter_expression exp + | Texp_object (cl, _) -> + iter_class_structure cl + | Texp_pack (mexpr) -> + iter_module_expr mexpr + | Texp_unreachable -> + () + | Texp_extension_constructor _ -> + () + end; + Iter.leave_expression exp; + + and iter_package_type pack = + Iter.enter_package_type pack; + List.iter (fun (_s, ct) -> iter_core_type ct) pack.pack_fields; + Iter.leave_package_type pack; + + and iter_signature sg = + Iter.enter_signature sg; + List.iter iter_signature_item sg.sig_items; + Iter.leave_signature sg; + + and iter_signature_item item = + Iter.enter_signature_item item; + begin + match item.sig_desc with + Tsig_value vd -> + iter_value_description vd + | Tsig_type (rf, list) -> + iter_type_declarations rf list + | Tsig_exception ext -> + iter_extension_constructor ext + | Tsig_typext tyext -> + iter_type_extension tyext + | Tsig_module md -> + iter_module_type md.md_type + | Tsig_recmodule list -> + List.iter (fun md -> iter_module_type md.md_type) list + | Tsig_modtype mtd -> + iter_module_type_declaration mtd + | Tsig_open _ -> () + | Tsig_include incl -> iter_module_type incl.incl_mod + | Tsig_class list -> + List.iter iter_class_description list + | Tsig_class_type list -> + List.iter iter_class_type_declaration list + | Tsig_attribute _ -> () + end; + Iter.leave_signature_item item; + + and iter_module_type_declaration mtd = + Iter.enter_module_type_declaration mtd; + begin + match mtd.mtd_type with + | None -> () + | Some mtype -> iter_module_type mtype + end; + Iter.leave_module_type_declaration mtd + + and iter_class_declaration cd = + Iter.enter_class_declaration cd; + List.iter iter_type_parameter cd.ci_params; + iter_class_expr cd.ci_expr; + Iter.leave_class_declaration cd; + + and iter_class_description cd = + Iter.enter_class_description cd; + List.iter iter_type_parameter cd.ci_params; + iter_class_type cd.ci_expr; + Iter.leave_class_description cd; + + and iter_class_type_declaration cd = + Iter.enter_class_type_declaration cd; + List.iter iter_type_parameter cd.ci_params; + iter_class_type cd.ci_expr; + Iter.leave_class_type_declaration cd; + + and iter_module_type mty = + Iter.enter_module_type mty; + begin + match mty.mty_desc with + Tmty_ident _ -> () + | Tmty_alias _ -> () + | Tmty_signature sg -> iter_signature sg + | Tmty_functor (_, _, mtype1, mtype2) -> + Misc.may iter_module_type mtype1; iter_module_type mtype2 + | Tmty_with (mtype, list) -> + iter_module_type mtype; + List.iter (fun (_path, _, withc) -> + iter_with_constraint withc + ) list + | Tmty_typeof mexpr -> + iter_module_expr mexpr + end; + Iter.leave_module_type mty; + + and iter_with_constraint cstr = + Iter.enter_with_constraint cstr; + begin + match cstr with + Twith_type decl -> iter_type_declaration decl + | Twith_module _ -> () + | Twith_typesubst decl -> iter_type_declaration decl + | Twith_modsubst _ -> () + end; + Iter.leave_with_constraint cstr; + + and iter_module_expr mexpr = + Iter.enter_module_expr mexpr; + begin + match mexpr.mod_desc with + Tmod_ident _ -> () + | Tmod_structure st -> iter_structure st + | Tmod_functor (_, _, mtype, mexpr) -> + Misc.may iter_module_type mtype; + iter_module_expr mexpr + | Tmod_apply (mexp1, mexp2, _) -> + iter_module_expr mexp1; + iter_module_expr mexp2 + | Tmod_constraint (mexpr, _, Tmodtype_implicit, _ ) -> + iter_module_expr mexpr + | Tmod_constraint (mexpr, _, Tmodtype_explicit mtype, _) -> + iter_module_expr mexpr; + iter_module_type mtype + | Tmod_unpack (exp, _mty) -> + iter_expression exp +(* iter_module_type mty *) + end; + Iter.leave_module_expr mexpr; + + and iter_class_expr cexpr = + Iter.enter_class_expr cexpr; + begin + match cexpr.cl_desc with + | Tcl_constraint (cl, None, _, _, _ ) -> + iter_class_expr cl; + | Tcl_structure clstr -> iter_class_structure clstr + | Tcl_fun (_label, pat, priv, cl, _partial) -> + iter_pattern pat; + List.iter (fun (_id, _, exp) -> iter_expression exp) priv; + iter_class_expr cl + + | Tcl_apply (cl, args) -> + iter_class_expr cl; + List.iter (fun (_label, expo) -> + match expo with + None -> () + | Some exp -> iter_expression exp + ) args + + | Tcl_let (rec_flat, bindings, ivars, cl) -> + iter_bindings rec_flat bindings; + List.iter (fun (_id, _, exp) -> iter_expression exp) ivars; + iter_class_expr cl + + | Tcl_constraint (cl, Some clty, _vals, _meths, _concrs) -> + iter_class_expr cl; + iter_class_type clty + + | Tcl_ident (_, _, tyl) -> + List.iter iter_core_type tyl + + | Tcl_open (_, _, _, _, e) -> + iter_class_expr e + end; + Iter.leave_class_expr cexpr; + + and iter_class_type ct = + Iter.enter_class_type ct; + begin + match ct.cltyp_desc with + Tcty_signature csg -> iter_class_signature csg + | Tcty_constr (_path, _, list) -> + List.iter iter_core_type list + | Tcty_arrow (_label, ct, cl) -> + iter_core_type ct; + iter_class_type cl + | Tcty_open (_, _, _, _, e) -> + iter_class_type e + end; + Iter.leave_class_type ct; + + and iter_class_signature cs = + Iter.enter_class_signature cs; + iter_core_type cs.csig_self; + List.iter iter_class_type_field cs.csig_fields; + Iter.leave_class_signature cs + + + and iter_class_type_field ctf = + Iter.enter_class_type_field ctf; + begin + match ctf.ctf_desc with + Tctf_inherit ct -> iter_class_type ct + | Tctf_val (_s, _mut, _virt, ct) -> + iter_core_type ct + | Tctf_method (_s, _priv, _virt, ct) -> + iter_core_type ct + | Tctf_constraint (ct1, ct2) -> + iter_core_type ct1; + iter_core_type ct2 + | Tctf_attribute _ -> () + end; + Iter.leave_class_type_field ctf + + and iter_core_type ct = + Iter.enter_core_type ct; + begin + match ct.ctyp_desc with + Ttyp_any -> () + | Ttyp_var _ -> () + | Ttyp_arrow (_label, ct1, ct2) -> + iter_core_type ct1; + iter_core_type ct2 + | Ttyp_tuple list -> List.iter iter_core_type list + | Ttyp_constr (_path, _, list) -> + List.iter iter_core_type list + | Ttyp_object (list, _o) -> + List.iter iter_object_field list + | Ttyp_class (_path, _, list) -> + List.iter iter_core_type list + | Ttyp_alias (ct, _s) -> + iter_core_type ct + | Ttyp_variant (list, _bool, _labels) -> + List.iter iter_row_field list + | Ttyp_poly (_list, ct) -> iter_core_type ct + | Ttyp_package pack -> iter_package_type pack + end; + Iter.leave_core_type ct + + and iter_class_structure cs = + Iter.enter_class_structure cs; + iter_pattern cs.cstr_self; + List.iter iter_class_field cs.cstr_fields; + Iter.leave_class_structure cs; + + + and iter_row_field rf = + match rf with + Ttag (_label, _attrs, _bool, list) -> + List.iter iter_core_type list + | Tinherit ct -> iter_core_type ct + + and iter_object_field ofield = + match ofield with + OTtag (_, _, ct) | OTinherit ct -> iter_core_type ct + + and iter_class_field cf = + Iter.enter_class_field cf; + begin + match cf.cf_desc with + Tcf_inherit (_ovf, cl, _super, _vals, _meths) -> + iter_class_expr cl + | Tcf_constraint (cty, cty') -> + iter_core_type cty; + iter_core_type cty' + | Tcf_val (_lab, _, _, Tcfk_virtual cty, _) -> + iter_core_type cty + | Tcf_val (_lab, _, _, Tcfk_concrete (_, exp), _) -> + iter_expression exp + | Tcf_method (_lab, _, Tcfk_virtual cty) -> + iter_core_type cty + | Tcf_method (_lab, _, Tcfk_concrete (_, exp)) -> + iter_expression exp + | Tcf_initializer exp -> + iter_expression exp + | Tcf_attribute _ -> () + end; + Iter.leave_class_field cf; + end + +module DefaultIteratorArgument = struct + + let enter_structure _ = () + let enter_value_description _ = () + let enter_type_extension _ = () + let enter_extension_constructor _ = () + let enter_pattern _ = () + let enter_expression _ = () + let enter_package_type _ = () + let enter_signature _ = () + let enter_signature_item _ = () + let enter_module_type_declaration _ = () + let enter_module_type _ = () + let enter_module_expr _ = () + let enter_with_constraint _ = () + let enter_class_expr _ = () + let enter_class_signature _ = () + let enter_class_declaration _ = () + let enter_class_description _ = () + let enter_class_type_declaration _ = () + let enter_class_type _ = () + let enter_class_type_field _ = () + let enter_core_type _ = () + let enter_class_structure _ = () + let enter_class_field _ = () + let enter_structure_item _ = () + + + let leave_structure _ = () + let leave_value_description _ = () + let leave_type_extension _ = () + let leave_extension_constructor _ = () + let leave_pattern _ = () + let leave_expression _ = () + let leave_package_type _ = () + let leave_signature _ = () + let leave_signature_item _ = () + let leave_module_type_declaration _ = () + let leave_module_type _ = () + let leave_module_expr _ = () + let leave_with_constraint _ = () + let leave_class_expr _ = () + let leave_class_signature _ = () + let leave_class_declaration _ = () + let leave_class_description _ = () + let leave_class_type_declaration _ = () + let leave_class_type _ = () + let leave_class_type_field _ = () + let leave_core_type _ = () + let leave_class_structure _ = () + let leave_class_field _ = () + let leave_structure_item _ = () + + let enter_binding _ = () + let leave_binding _ = () + + let enter_bindings _ = () + let leave_bindings _ = () + + let enter_type_declaration _ = () + let leave_type_declaration _ = () + + let enter_type_declarations _ = () + let leave_type_declarations _ = () +end diff --git a/res_syntax/compiler-libs-406/typedtreeIter.mli b/res_syntax/compiler-libs-406/typedtreeIter.mli new file mode 100644 index 0000000000..53aa54c120 --- /dev/null +++ b/res_syntax/compiler-libs-406/typedtreeIter.mli @@ -0,0 +1,97 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Thomas Gazagnaire (OCamlPro), Fabrice Le Fessant (INRIA Saclay) *) +(* *) +(* Copyright 2007 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Asttypes +open Typedtree + + +module type IteratorArgument = sig + val enter_structure : structure -> unit + val enter_value_description : value_description -> unit + val enter_type_extension : type_extension -> unit + val enter_extension_constructor : extension_constructor -> unit + val enter_pattern : pattern -> unit + val enter_expression : expression -> unit + val enter_package_type : package_type -> unit + val enter_signature : signature -> unit + val enter_signature_item : signature_item -> unit + val enter_module_type_declaration : module_type_declaration -> unit + val enter_module_type : module_type -> unit + val enter_module_expr : module_expr -> unit + val enter_with_constraint : with_constraint -> unit + val enter_class_expr : class_expr -> unit + val enter_class_signature : class_signature -> unit + val enter_class_declaration : class_declaration -> unit + val enter_class_description : class_description -> unit + val enter_class_type_declaration : class_type_declaration -> unit + val enter_class_type : class_type -> unit + val enter_class_type_field : class_type_field -> unit + val enter_core_type : core_type -> unit + val enter_class_structure : class_structure -> unit + val enter_class_field : class_field -> unit + val enter_structure_item : structure_item -> unit + + + val leave_structure : structure -> unit + val leave_value_description : value_description -> unit + val leave_type_extension : type_extension -> unit + val leave_extension_constructor : extension_constructor -> unit + val leave_pattern : pattern -> unit + val leave_expression : expression -> unit + val leave_package_type : package_type -> unit + val leave_signature : signature -> unit + val leave_signature_item : signature_item -> unit + val leave_module_type_declaration : module_type_declaration -> unit + val leave_module_type : module_type -> unit + val leave_module_expr : module_expr -> unit + val leave_with_constraint : with_constraint -> unit + val leave_class_expr : class_expr -> unit + val leave_class_signature : class_signature -> unit + val leave_class_declaration : class_declaration -> unit + val leave_class_description : class_description -> unit + val leave_class_type_declaration : class_type_declaration -> unit + val leave_class_type : class_type -> unit + val leave_class_type_field : class_type_field -> unit + val leave_core_type : core_type -> unit + val leave_class_structure : class_structure -> unit + val leave_class_field : class_field -> unit + val leave_structure_item : structure_item -> unit + + val enter_bindings : rec_flag -> unit + val enter_binding : value_binding -> unit + val leave_binding : value_binding -> unit + val leave_bindings : rec_flag -> unit + + val enter_type_declarations : rec_flag -> unit + val enter_type_declaration : type_declaration -> unit + val leave_type_declaration : type_declaration -> unit + val leave_type_declarations : rec_flag -> unit + +end + +module MakeIterator : + functor (Iter : IteratorArgument) -> + sig + val iter_structure : structure -> unit + val iter_signature : signature -> unit + val iter_structure_item : structure_item -> unit + val iter_signature_item : signature_item -> unit + val iter_expression : expression -> unit + val iter_module_type : module_type -> unit + val iter_pattern : pattern -> unit + val iter_class_expr : class_expr -> unit + end + +module DefaultIteratorArgument : IteratorArgument diff --git a/res_syntax/compiler-libs-406/typemod.ml b/res_syntax/compiler-libs-406/typemod.ml new file mode 100644 index 0000000000..84fc649017 --- /dev/null +++ b/res_syntax/compiler-libs-406/typemod.ml @@ -0,0 +1,2036 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Misc +open Longident +open Path +open Asttypes +open Parsetree +open Types +open Format + +type error = + Cannot_apply of module_type + | Not_included of Includemod.error list + | Cannot_eliminate_dependency of module_type + | Signature_expected + | Structure_expected of module_type + | With_no_component of Longident.t + | With_mismatch of Longident.t * Includemod.error list + | With_makes_applicative_functor_ill_typed of + Longident.t * Path.t * Includemod.error list + | With_changes_module_alias of Longident.t * Ident.t * Path.t + | With_cannot_remove_constrained_type + | Repeated_name of string * string + | Non_generalizable of type_expr + | Non_generalizable_class of Ident.t * class_declaration + | Non_generalizable_module of module_type + | Implementation_is_required of string + | Interface_not_compiled of string + | Not_allowed_in_functor_body + | Not_a_packed_module of type_expr + | Incomplete_packed_module of type_expr + | Scoping_pack of Longident.t * type_expr + | Recursive_module_require_explicit_type + | Apply_generative + | Cannot_scrape_alias of Path.t + +exception Error of Location.t * Env.t * error +exception Error_forward of Location.error + +module ImplementationHooks = Misc.MakeHooks(struct + type t = Typedtree.structure * Typedtree.module_coercion + end) +module InterfaceHooks = Misc.MakeHooks(struct + type t = Typedtree.signature + end) + +open Typedtree + +let fst3 (x,_,_) = x + +let rec path_concat head p = + match p with + Pident tail -> Pdot (Pident head, Ident.name tail, 0) + | Pdot (pre, s, pos) -> Pdot (path_concat head pre, s, pos) + | Papply _ -> assert false + +(* Extract a signature from a module type *) + +let extract_sig env loc mty = + match Env.scrape_alias env mty with + Mty_signature sg -> sg + | Mty_alias(_, path) -> + raise(Error(loc, env, Cannot_scrape_alias path)) + | _ -> raise(Error(loc, env, Signature_expected)) + +let extract_sig_open env loc mty = + match Env.scrape_alias env mty with + Mty_signature sg -> sg + | Mty_alias(_, path) -> + raise(Error(loc, env, Cannot_scrape_alias path)) + | mty -> raise(Error(loc, env, Structure_expected mty)) + +(* Compute the environment after opening a module *) + +let type_open_ ?used_slot ?toplevel ovf env loc lid = + let path = Typetexp.lookup_module ~load:true env lid.loc lid.txt in + match Env.open_signature ~loc ?used_slot ?toplevel ovf path env with + | Some env -> path, env + | None -> + let md = Env.find_module path env in + ignore (extract_sig_open env lid.loc md.md_type); + assert false + +let type_open ?toplevel env sod = + let (path, newenv) = + Builtin_attributes.warning_scope sod.popen_attributes + (fun () -> + type_open_ ?toplevel sod.popen_override env sod.popen_loc + sod.popen_lid + ) + in + let od = + { + open_override = sod.popen_override; + open_path = path; + open_txt = sod.popen_lid; + open_attributes = sod.popen_attributes; + open_loc = sod.popen_loc; + } + in + (path, newenv, od) + +(* Record a module type *) +let rm node = + Stypes.record (Stypes.Ti_mod node); + node + +(* Forward declaration, to be filled in by type_module_type_of *) +let type_module_type_of_fwd : + (Env.t -> Parsetree.module_expr -> + Typedtree.module_expr * Types.module_type) ref + = ref (fun _env _m -> assert false) + +(* Merge one "with" constraint in a signature *) + +let rec add_rec_types env = function + Sig_type(id, decl, Trec_next) :: rem -> + add_rec_types (Env.add_type ~check:true id decl env) rem + | _ -> env + +let check_type_decl env loc id row_id newdecl decl rs rem = + let env = Env.add_type ~check:true id newdecl env in + let env = + match row_id with + | None -> env + | Some id -> Env.add_type ~check:false id newdecl env + in + let env = if rs = Trec_not then env else add_rec_types env rem in + Includemod.type_declarations ~loc env id newdecl decl; + Typedecl.check_coherence env loc id newdecl + +let update_rec_next rs rem = + match rs with + Trec_next -> rem + | Trec_first | Trec_not -> + match rem with + Sig_type (id, decl, Trec_next) :: rem -> + Sig_type (id, decl, rs) :: rem + | Sig_module (id, mty, Trec_next) :: rem -> + Sig_module (id, mty, rs) :: rem + | _ -> rem + +let make p n i = + let open Variance in + set May_pos p (set May_neg n (set May_weak n (set Inj i null))) + +let rec iter_path_apply p ~f = + match p with + | Pident _ -> () + | Pdot (p, _, _) -> iter_path_apply p ~f + | Papply (p1, p2) -> + iter_path_apply p1 ~f; + iter_path_apply p2 ~f; + f p1 p2 (* after recursing, so we know both paths are well typed *) + +let path_is_strict_prefix = + let rec list_is_strict_prefix l ~prefix = + match l, prefix with + | [], [] -> false + | _ :: _, [] -> true + | [], _ :: _ -> false + | s1 :: t1, s2 :: t2 -> + String.equal s1 s2 && list_is_strict_prefix t1 ~prefix:t2 + in + fun path ~prefix -> + match Path.flatten path, Path.flatten prefix with + | `Contains_apply, _ | _, `Contains_apply -> false + | `Ok (ident1, l1), `Ok (ident2, l2) -> + Ident.same ident1 ident2 + && list_is_strict_prefix l1 ~prefix:l2 + +let iterator_with_env env = + let env = ref env in + let super = Btype.type_iterators in + env, { super with + Btype.it_signature = (fun self sg -> + (* add all items to the env before recursing down, to handle recursive + definitions *) + let env_before = !env in + List.iter (fun i -> env := Env.add_item i !env) sg; + super.Btype.it_signature self sg; + env := env_before + ); + Btype.it_module_type = (fun self -> function + | Mty_functor (param, mty_arg, mty_body) -> + may (self.Btype.it_module_type self) mty_arg; + let env_before = !env in + env := Env.add_module ~arg:true param (Btype.default_mty mty_arg) !env; + self.Btype.it_module_type self mty_body; + env := env_before; + | mty -> + super.Btype.it_module_type self mty + ) + } + +let retype_applicative_functor_type ~loc env funct arg = + let mty_functor = (Env.find_module funct env).md_type in + let mty_arg = (Env.find_module arg env).md_type in + let mty_param = + match Env.scrape_alias env mty_functor with + | Mty_functor (_, Some mty_param, _) -> mty_param + | _ -> assert false (* could trigger due to MPR#7611 *) + in + let aliasable = not (Env.is_functor_arg arg env) in + ignore(Includemod.modtypes ~loc env + (Mtype.strengthen ~aliasable env mty_arg arg) mty_param) + +(* When doing a deep destructive substitution with type M.N.t := .., we change M + and M.N and so we have to check that uses of the modules other than just + extracting components from them still make sense. There are only two such + kinds of uses: + - applicative functor types: F(M).t might not be well typed anymore + - aliases: module A = M still makes sense but it doesn't mean the same thing + anymore, so it's forbidden until it's clear what we should do with it. + This function would be called with M.N.t and N.t to check for these uses. *) +let check_usage_of_path_of_substituted_item paths env signature ~loc ~lid = + let iterator = + let env, super = iterator_with_env env in + { super with + Btype.it_signature_item = (fun self -> function + | Sig_module (id, { md_type = Mty_alias (_, aliased_path); _ }, _) + when List.exists + (fun path -> path_is_strict_prefix path ~prefix:aliased_path) + paths + -> + let e = With_changes_module_alias (lid.txt, id, aliased_path) in + raise(Error(loc, !env, e)) + | sig_item -> + super.Btype.it_signature_item self sig_item + ); + Btype.it_path = (fun referenced_path -> + iter_path_apply referenced_path ~f:(fun funct arg -> + if List.exists + (fun path -> path_is_strict_prefix path ~prefix:arg) + paths + then + let env = !env in + try retype_applicative_functor_type ~loc env funct arg + with Includemod.Error explanation -> + raise(Error(loc, env, + With_makes_applicative_functor_ill_typed + (lid.txt, referenced_path, explanation))) + ) + ); + } + in + iterator.Btype.it_signature iterator signature; + Btype.unmark_iterators.Btype.it_signature Btype.unmark_iterators signature + +let type_decl_is_alias sdecl = (* assuming no explicit constraint *) + match sdecl.ptype_manifest with + | Some {ptyp_desc = Ptyp_constr (lid, stl)} + when List.length stl = List.length sdecl.ptype_params -> + begin + match + List.iter2 (fun x (y, _) -> + match x, y with + {ptyp_desc=Ptyp_var sx}, {ptyp_desc=Ptyp_var sy} + when sx = sy -> () + | _, _ -> raise Exit) + stl sdecl.ptype_params; + with + | exception Exit -> None + | () -> Some lid + end + | _ -> None +;; + +let params_are_constrained = + let rec loop = function + | [] -> false + | hd :: tl -> + match (Btype.repr hd).desc with + | Tvar _ -> List.memq hd tl || loop tl + | _ -> true + in + loop +;; + +let merge_constraint initial_env loc sg constr = + let lid = + match constr with + | Pwith_type (lid, _) | Pwith_module (lid, _) + | Pwith_typesubst (lid, _) | Pwith_modsubst (lid, _) -> lid + in + let destructive_substitution = + match constr with + | Pwith_type _ | Pwith_module _ -> false + | Pwith_typesubst _ | Pwith_modsubst _ -> true + in + let real_ids = ref [] in + let rec merge env sg namelist row_id = + match (sg, namelist, constr) with + ([], _, _) -> + raise(Error(loc, env, With_no_component lid.txt)) + | (Sig_type(id, decl, rs) :: rem, [s], + Pwith_type (_, ({ptype_kind = Ptype_abstract} as sdecl))) + when Ident.name id = s && Typedecl.is_fixed_type sdecl -> + let decl_row = + { type_params = + List.map (fun _ -> Btype.newgenvar()) sdecl.ptype_params; + type_arity = List.length sdecl.ptype_params; + type_kind = Type_abstract; + type_private = Private; + type_manifest = None; + type_variance = + List.map + (fun (_, v) -> + let (c, n) = + match v with + | Covariant -> true, false + | Contravariant -> false, true + | Invariant -> false, false + in + make (not n) (not c) false + ) + sdecl.ptype_params; + type_loc = sdecl.ptype_loc; + type_newtype_level = None; + type_attributes = []; + type_immediate = false; + type_unboxed = unboxed_false_default_false; + } + and id_row = Ident.create (s^"#row") in + let initial_env = + Env.add_type ~check:false id_row decl_row initial_env + in + let tdecl = Typedecl.transl_with_constraint + initial_env id (Some(Pident id_row)) decl sdecl in + let newdecl = tdecl.typ_type in + check_type_decl env sdecl.ptype_loc id row_id newdecl decl rs rem; + let decl_row = {decl_row with type_params = newdecl.type_params} in + let rs' = if rs = Trec_first then Trec_not else rs in + (Pident id, lid, Twith_type tdecl), + Sig_type(id_row, decl_row, rs') :: Sig_type(id, newdecl, rs) :: rem + | (Sig_type(id, decl, rs) :: rem , [s], Pwith_type (_, sdecl)) + when Ident.name id = s -> + let tdecl = + Typedecl.transl_with_constraint initial_env id None decl sdecl in + let newdecl = tdecl.typ_type in + check_type_decl env sdecl.ptype_loc id row_id newdecl decl rs rem; + (Pident id, lid, Twith_type tdecl), Sig_type(id, newdecl, rs) :: rem + | (Sig_type(id, _, _) :: rem, [s], (Pwith_type _ | Pwith_typesubst _)) + when Ident.name id = s ^ "#row" -> + merge env rem namelist (Some id) + | (Sig_type(id, decl, rs) :: rem, [s], Pwith_typesubst (_, sdecl)) + when Ident.name id = s -> + (* Check as for a normal with constraint, but discard definition *) + let tdecl = + Typedecl.transl_with_constraint initial_env id None decl sdecl in + let newdecl = tdecl.typ_type in + check_type_decl env sdecl.ptype_loc id row_id newdecl decl rs rem; + real_ids := [Pident id]; + (Pident id, lid, Twith_typesubst tdecl), + update_rec_next rs rem + | (Sig_module(id, md, rs) :: rem, [s], Pwith_module (_, lid')) + when Ident.name id = s -> + let path, md' = Typetexp.find_module initial_env loc lid'.txt in + let md'' = {md' with md_type = Mtype.remove_aliases env md'.md_type} in + let newmd = Mtype.strengthen_decl ~aliasable:false env md'' path in + ignore(Includemod.modtypes ~loc env newmd.md_type md.md_type); + (Pident id, lid, Twith_module (path, lid')), + Sig_module(id, newmd, rs) :: rem + | (Sig_module(id, md, rs) :: rem, [s], Pwith_modsubst (_, lid')) + when Ident.name id = s -> + let path, md' = Typetexp.find_module initial_env loc lid'.txt in + let newmd = Mtype.strengthen_decl ~aliasable:false env md' path in + ignore(Includemod.modtypes ~loc env newmd.md_type md.md_type); + real_ids := [Pident id]; + (Pident id, lid, Twith_modsubst (path, lid')), + update_rec_next rs rem + | (Sig_module(id, md, rs) :: rem, s :: namelist, _) + when Ident.name id = s -> + let ((path, _path_loc, tcstr), newsg) = + merge env (extract_sig env loc md.md_type) namelist None in + let path = path_concat id path in + real_ids := path :: !real_ids; + let item = Sig_module(id, {md with md_type=Mty_signature newsg}, rs) in + (path, lid, tcstr), + item :: rem + | (item :: rem, _, _) -> + let (cstr, items) = merge (Env.add_item item env) rem namelist row_id + in + cstr, item :: items + in + try + let names = Longident.flatten lid.txt in + let (tcstr, sg) = merge initial_env sg names None in + if destructive_substitution then ( + match List.rev !real_ids with + | [] -> assert false + | last :: rest -> + (* The last item is the one that's removed. We don't need to check how + it's used since it's replaced by a more specific type/module. *) + assert (match last with Pident _ -> true | _ -> false); + match rest with + | [] -> () + | _ :: _ -> + check_usage_of_path_of_substituted_item + rest initial_env sg ~loc ~lid; + ); + let sg = + match tcstr with + | (_, _, Twith_typesubst tdecl) -> + let how_to_extend_subst = + let sdecl = + match constr with + | Pwith_typesubst (_, sdecl) -> sdecl + | _ -> assert false + in + match type_decl_is_alias sdecl with + | Some lid -> + let replacement = + try Env.lookup_type lid.txt initial_env + with Not_found -> assert false + in + fun s path -> Subst.add_type_path path replacement s + | None -> + let body = + match tdecl.typ_type.type_manifest with + | None -> assert false + | Some x -> x + in + let params = tdecl.typ_type.type_params in + if params_are_constrained params + then raise(Error(loc, initial_env, With_cannot_remove_constrained_type)); + fun s path -> Subst.add_type_function path ~params ~body s + in + let sub = List.fold_left how_to_extend_subst Subst.identity !real_ids in + Subst.signature sub sg + | (_, _, Twith_modsubst (real_path, _)) -> + let sub = + List.fold_left + (fun s path -> Subst.add_module_path path real_path s) + Subst.identity + !real_ids + in + Subst.signature sub sg + | _ -> + sg + in + (tcstr, sg) + with Includemod.Error explanation -> + raise(Error(loc, initial_env, With_mismatch(lid.txt, explanation))) + +(* Add recursion flags on declarations arising from a mutually recursive + block. *) + +let map_rec fn decls rem = + match decls with + | [] -> rem + | d1 :: dl -> fn Trec_first d1 :: map_end (fn Trec_next) dl rem + +let map_rec_type ~rec_flag fn decls rem = + match decls with + | [] -> rem + | d1 :: dl -> + let first = + match rec_flag with + | Recursive -> Trec_first + | Nonrecursive -> Trec_not + in + fn first d1 :: map_end (fn Trec_next) dl rem + +let rec map_rec_type_with_row_types ~rec_flag fn decls rem = + match decls with + | [] -> rem + | d1 :: dl -> + if Btype.is_row_name (Ident.name d1.typ_id) then + fn Trec_not d1 :: map_rec_type_with_row_types ~rec_flag fn dl rem + else + map_rec_type ~rec_flag fn decls rem + +(* Add type extension flags to extension constructors *) +let map_ext fn exts rem = + match exts with + | [] -> rem + | d1 :: dl -> fn Text_first d1 :: map_end (fn Text_next) dl rem + +(* Auxiliary for translating recursively-defined module types. + Return a module type that approximates the shape of the given module + type AST. Retain only module, type, and module type + components of signatures. For types, retain only their arity, + making them abstract otherwise. *) + +let rec approx_modtype env smty = + match smty.pmty_desc with + Pmty_ident lid -> + let (path, _info) = Typetexp.find_modtype env smty.pmty_loc lid.txt in + Mty_ident path + | Pmty_alias lid -> + let path = Typetexp.lookup_module env smty.pmty_loc lid.txt in + Mty_alias(Mta_absent, path) + | Pmty_signature ssg -> + Mty_signature(approx_sig env ssg) + | Pmty_functor(param, sarg, sres) -> + let arg = may_map (approx_modtype env) sarg in + let (id, newenv) = + Env.enter_module ~arg:true param.txt (Btype.default_mty arg) env in + let res = approx_modtype newenv sres in + Mty_functor(id, arg, res) + | Pmty_with(sbody, _constraints) -> + approx_modtype env sbody + | Pmty_typeof smod -> + let (_, mty) = !type_module_type_of_fwd env smod in + mty + | Pmty_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + +and approx_module_declaration env pmd = + { + Types.md_type = approx_modtype env pmd.pmd_type; + md_attributes = pmd.pmd_attributes; + md_loc = pmd.pmd_loc; + } + +and approx_sig env ssg = + match ssg with + [] -> [] + | item :: srem -> + match item.psig_desc with + | Psig_type (rec_flag, sdecls) -> + let decls = Typedecl.approx_type_decl sdecls in + let rem = approx_sig env srem in + map_rec_type ~rec_flag + (fun rs (id, info) -> Sig_type(id, info, rs)) decls rem + | Psig_module pmd -> + let id = Ident.create pmd.pmd_name.txt in + let md = approx_module_declaration env pmd in + let newenv = Env.enter_module_declaration id md env in + Sig_module(id, md, Trec_not) :: approx_sig newenv srem + | Psig_recmodule sdecls -> + let decls = + List.map + (fun pmd -> + (Ident.create pmd.pmd_name.txt, + approx_module_declaration env pmd) + ) + sdecls + in + let newenv = + List.fold_left + (fun env (id, md) -> Env.add_module_declaration ~check:false + id md env) + env decls in + map_rec (fun rs (id, md) -> Sig_module(id, md, rs)) decls + (approx_sig newenv srem) + | Psig_modtype d -> + let info = approx_modtype_info env d in + let (id, newenv) = Env.enter_modtype d.pmtd_name.txt info env in + Sig_modtype(id, info) :: approx_sig newenv srem + | Psig_open sod -> + let (_path, mty, _od) = type_open env sod in + approx_sig mty srem + | Psig_include sincl -> + let smty = sincl.pincl_mod in + let mty = approx_modtype env smty in + let sg = Subst.signature Subst.identity + (extract_sig env smty.pmty_loc mty) in + let newenv = Env.add_signature sg env in + sg @ approx_sig newenv srem + | Psig_class sdecls | Psig_class_type sdecls -> + let decls = Typeclass.approx_class_declarations env sdecls in + let rem = approx_sig env srem in + List.flatten + (map_rec + (fun rs decl -> + let open Typeclass in + [Sig_class_type(decl.clsty_ty_id, decl.clsty_ty_decl, rs); + Sig_type(decl.clsty_obj_id, decl.clsty_obj_abbr, rs); + Sig_type(decl.clsty_typesharp_id, decl.clsty_abbr, rs)]) + decls [rem]) + | _ -> + approx_sig env srem + +and approx_modtype_info env sinfo = + { + mtd_type = may_map (approx_modtype env) sinfo.pmtd_type; + mtd_attributes = sinfo.pmtd_attributes; + mtd_loc = sinfo.pmtd_loc; + } + +let approx_modtype env smty = + Warnings.without_warnings + (fun () -> approx_modtype env smty) + +(* Additional validity checks on type definitions arising from + recursive modules *) + +let check_recmod_typedecls env sdecls decls = + let recmod_ids = List.map fst3 decls in + List.iter2 + (fun pmd (id, _, mty) -> + let mty = mty.mty_type in + List.iter + (fun path -> + Typedecl.check_recmod_typedecl env pmd.pmd_type.pmty_loc recmod_ids + path (Env.find_type path env)) + (Mtype.type_paths env (Pident id) mty)) + sdecls decls + +(* Auxiliaries for checking uniqueness of names in signatures and structures *) + +module StringSet = + Set.Make(struct type t = string let compare (x:t) y = String.compare x y end) + +let check cl loc set_ref name = + if StringSet.mem name !set_ref + then raise(Error(loc, Env.empty, Repeated_name(cl, name))) + else set_ref := StringSet.add name !set_ref + +type names = + { + types: StringSet.t ref; + modules: StringSet.t ref; + modtypes: StringSet.t ref; + typexts: StringSet.t ref; + } + +let new_names () = + { + types = ref StringSet.empty; + modules = ref StringSet.empty; + modtypes = ref StringSet.empty; + typexts = ref StringSet.empty; + } + + +let check_name check names name = check names name.loc name.txt +let check_type names loc s = check "type" loc names.types s +let check_module names loc s = check "module" loc names.modules s +let check_modtype names loc s = check "module type" loc names.modtypes s +let check_typext names loc s = check "extension constructor" loc names.typexts s + + +let check_sig_item names loc = function + | Sig_type(id, _, _) -> check_type names loc (Ident.name id) + | Sig_module(id, _, _) -> check_module names loc (Ident.name id) + | Sig_modtype(id, _) -> check_modtype names loc (Ident.name id) + | Sig_typext(id, _, _) -> check_typext names loc (Ident.name id) + | _ -> () + +(* Simplify multiple specifications of a value or an extension in a signature. + (Other signature components, e.g. types, modules, etc, are checked for + name uniqueness.) If multiple specifications with the same name, + keep only the last (rightmost) one. *) + +let simplify_signature sg = + let rec aux = function + | [] -> [], StringSet.empty + | (Sig_value(id, _descr) as component) :: sg -> + let (sg, val_names) as k = aux sg in + let name = Ident.name id in + if StringSet.mem name val_names then k + else (component :: sg, StringSet.add name val_names) + | component :: sg -> + let (sg, val_names) = aux sg in + (component :: sg, val_names) + in + let (sg, _) = aux sg in + sg + +(* Check and translate a module type expression *) + +let transl_modtype_longident loc env lid = + let (path, _info) = Typetexp.find_modtype env loc lid in + path + +let transl_module_alias loc env lid = + Typetexp.lookup_module env loc lid + +let mkmty desc typ env loc attrs = + let mty = { + mty_desc = desc; + mty_type = typ; + mty_loc = loc; + mty_env = env; + mty_attributes = attrs; + } in + Cmt_format.add_saved_type (Cmt_format.Partial_module_type mty); + mty + +let mksig desc env loc = + let sg = { sig_desc = desc; sig_loc = loc; sig_env = env } in + Cmt_format.add_saved_type (Cmt_format.Partial_signature_item sg); + sg + +(* let signature sg = List.map (fun item -> item.sig_type) sg *) + +let rec transl_modtype env smty = + Builtin_attributes.warning_scope smty.pmty_attributes + (fun () -> transl_modtype_aux env smty) + +and transl_modtype_aux env smty = + let loc = smty.pmty_loc in + match smty.pmty_desc with + Pmty_ident lid -> + let path = transl_modtype_longident loc env lid.txt in + mkmty (Tmty_ident (path, lid)) (Mty_ident path) env loc + smty.pmty_attributes + | Pmty_alias lid -> + let path = transl_module_alias loc env lid.txt in + mkmty (Tmty_alias (path, lid)) (Mty_alias(Mta_absent, path)) env loc + smty.pmty_attributes + | Pmty_signature ssg -> + let sg = transl_signature env ssg in + mkmty (Tmty_signature sg) (Mty_signature sg.sig_type) env loc + smty.pmty_attributes + | Pmty_functor(param, sarg, sres) -> + let arg = Misc.may_map (transl_modtype env) sarg in + let ty_arg = Misc.may_map (fun m -> m.mty_type) arg in + let (id, newenv) = + Env.enter_module ~arg:true param.txt (Btype.default_mty ty_arg) env in + Ctype.init_def(Ident.current_time()); (* PR#6513 *) + let res = transl_modtype newenv sres in + mkmty (Tmty_functor (id, param, arg, res)) + (Mty_functor(id, ty_arg, res.mty_type)) env loc + smty.pmty_attributes + | Pmty_with(sbody, constraints) -> + let body = transl_modtype env sbody in + let init_sg = extract_sig env sbody.pmty_loc body.mty_type in + let (rev_tcstrs, final_sg) = + List.fold_left + (fun (rev_tcstrs,sg) sdecl -> + let (tcstr, sg) = merge_constraint env smty.pmty_loc sg sdecl + in + (tcstr :: rev_tcstrs, sg) + ) + ([],init_sg) constraints in + mkmty (Tmty_with ( body, List.rev rev_tcstrs)) + (Mtype.freshen (Mty_signature final_sg)) env loc + smty.pmty_attributes + | Pmty_typeof smod -> + let env = Env.in_signature false env in + let tmty, mty = !type_module_type_of_fwd env smod in + mkmty (Tmty_typeof tmty) mty env loc smty.pmty_attributes + | Pmty_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + +and transl_signature env sg = + let names = new_names () in + let rec transl_sig env sg = + Ctype.init_def(Ident.current_time()); + match sg with + [] -> [], [], env + | item :: srem -> + let loc = item.psig_loc in + match item.psig_desc with + | Psig_value sdesc -> + let (tdesc, newenv) = + Typedecl.transl_value_decl env item.psig_loc sdesc + in + let (trem,rem, final_env) = transl_sig newenv srem in + mksig (Tsig_value tdesc) env loc :: trem, + Sig_value(tdesc.val_id, tdesc.val_val) :: rem, + final_env + | Psig_type (rec_flag, sdecls) -> + List.iter + (fun decl -> check_name check_type names decl.ptype_name) + sdecls; + let (decls, newenv) = + Typedecl.transl_type_decl env rec_flag sdecls + in + let (trem, rem, final_env) = transl_sig newenv srem in + mksig (Tsig_type (rec_flag, decls)) env loc :: trem, + map_rec_type_with_row_types ~rec_flag + (fun rs td -> Sig_type(td.typ_id, td.typ_type, rs)) decls rem, + final_env + | Psig_typext styext -> + List.iter + (fun pext -> check_name check_typext names pext.pext_name) + styext.ptyext_constructors; + let (tyext, newenv) = + Typedecl.transl_type_extension false env item.psig_loc styext + in + let (trem, rem, final_env) = transl_sig newenv srem in + let constructors = tyext.tyext_constructors in + mksig (Tsig_typext tyext) env loc :: trem, + map_ext (fun es ext -> + Sig_typext(ext.ext_id, ext.ext_type, es)) constructors rem, + final_env + | Psig_exception sext -> + check_name check_typext names sext.pext_name; + let (ext, newenv) = Typedecl.transl_exception env sext in + let (trem, rem, final_env) = transl_sig newenv srem in + mksig (Tsig_exception ext) env loc :: trem, + Sig_typext(ext.ext_id, ext.ext_type, Text_exception) :: rem, + final_env + | Psig_module pmd -> + check_name check_module names pmd.pmd_name; + let id = Ident.create pmd.pmd_name.txt in + let tmty = + Builtin_attributes.warning_scope pmd.pmd_attributes + (fun () -> transl_modtype env pmd.pmd_type) + in + let md = { + md_type=tmty.mty_type; + md_attributes=pmd.pmd_attributes; + md_loc=pmd.pmd_loc; + } + in + let newenv = Env.enter_module_declaration id md env in + let (trem, rem, final_env) = transl_sig newenv srem in + mksig (Tsig_module {md_id=id; md_name=pmd.pmd_name; md_type=tmty; + md_loc=pmd.pmd_loc; + md_attributes=pmd.pmd_attributes}) + env loc :: trem, + Sig_module(id, md, Trec_not) :: rem, + final_env + | Psig_recmodule sdecls -> + List.iter + (fun pmd -> check_name check_module names pmd.pmd_name) + sdecls; + let (decls, newenv) = + transl_recmodule_modtypes env sdecls in + let (trem, rem, final_env) = transl_sig newenv srem in + mksig (Tsig_recmodule decls) env loc :: trem, + map_rec (fun rs md -> + let d = {Types.md_type = md.md_type.mty_type; + md_attributes = md.md_attributes; + md_loc = md.md_loc; + } in + Sig_module(md.md_id, d, rs)) + decls rem, + final_env + | Psig_modtype pmtd -> + let newenv, mtd, sg = + transl_modtype_decl names env pmtd + in + let (trem, rem, final_env) = transl_sig newenv srem in + mksig (Tsig_modtype mtd) env loc :: trem, + sg :: rem, + final_env + | Psig_open sod -> + let (_path, newenv, od) = type_open env sod in + let (trem, rem, final_env) = transl_sig newenv srem in + mksig (Tsig_open od) env loc :: trem, + rem, final_env + | Psig_include sincl -> + let smty = sincl.pincl_mod in + let tmty = + Builtin_attributes.warning_scope sincl.pincl_attributes + (fun () -> transl_modtype env smty) + in + let mty = tmty.mty_type in + let sg = Subst.signature Subst.identity + (extract_sig env smty.pmty_loc mty) in + List.iter (check_sig_item names item.psig_loc) sg; + let newenv = Env.add_signature sg env in + let incl = + { incl_mod = tmty; + incl_type = sg; + incl_attributes = sincl.pincl_attributes; + incl_loc = sincl.pincl_loc; + } + in + let (trem, rem, final_env) = transl_sig newenv srem in + mksig (Tsig_include incl) env loc :: trem, + sg @ rem, + final_env + | Psig_class cl -> + List.iter + (fun {pci_name} -> check_name check_type names pci_name) + cl; + let (classes, newenv) = Typeclass.class_descriptions env cl in + let (trem, rem, final_env) = transl_sig newenv srem in + mksig (Tsig_class + (List.map (fun decr -> + decr.Typeclass.cls_info) classes)) env loc + :: trem, + List.flatten + (map_rec + (fun rs cls -> + let open Typeclass in + [Sig_class(cls.cls_id, cls.cls_decl, rs); + Sig_class_type(cls.cls_ty_id, cls.cls_ty_decl, rs); + Sig_type(cls.cls_obj_id, cls.cls_obj_abbr, rs); + Sig_type(cls.cls_typesharp_id, cls.cls_abbr, rs)]) + classes [rem]), + final_env + | Psig_class_type cl -> + List.iter + (fun {pci_name} -> check_name check_type names pci_name) + cl; + let (classes, newenv) = Typeclass.class_type_declarations env cl in + let (trem,rem, final_env) = transl_sig newenv srem in + mksig (Tsig_class_type + (List.map (fun decl -> decl.Typeclass.clsty_info) classes)) + env loc :: trem, + List.flatten + (map_rec + (fun rs decl -> + let open Typeclass in + [Sig_class_type(decl.clsty_ty_id, decl.clsty_ty_decl, rs); + Sig_type(decl.clsty_obj_id, decl.clsty_obj_abbr, rs); + Sig_type(decl.clsty_typesharp_id, decl.clsty_abbr, rs)]) + classes [rem]), + final_env + | Psig_attribute x -> + Builtin_attributes.warning_attribute x; + let (trem,rem, final_env) = transl_sig env srem in + mksig (Tsig_attribute x) env loc :: trem, rem, final_env + | Psig_extension (ext, _attrs) -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + in + let previous_saved_types = Cmt_format.get_saved_types () in + Builtin_attributes.warning_scope [] + (fun () -> + let (trem, rem, final_env) = transl_sig (Env.in_signature true env) sg in + let rem = simplify_signature rem in + let sg = { sig_items = trem; sig_type = rem; sig_final_env = final_env } in + Cmt_format.set_saved_types + ((Cmt_format.Partial_signature sg) :: previous_saved_types); + sg + ) + +and transl_modtype_decl names env pmtd = + Builtin_attributes.warning_scope pmtd.pmtd_attributes + (fun () -> transl_modtype_decl_aux names env pmtd) + +and transl_modtype_decl_aux names env + {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} = + check_name check_modtype names pmtd_name; + let tmty = Misc.may_map (transl_modtype env) pmtd_type in + let decl = + { + Types.mtd_type=may_map (fun t -> t.mty_type) tmty; + mtd_attributes=pmtd_attributes; + mtd_loc=pmtd_loc; + } + in + let (id, newenv) = Env.enter_modtype pmtd_name.txt decl env in + let mtd = + { + mtd_id=id; + mtd_name=pmtd_name; + mtd_type=tmty; + mtd_attributes=pmtd_attributes; + mtd_loc=pmtd_loc; + } + in + newenv, mtd, Sig_modtype(id, decl) + +and transl_recmodule_modtypes env sdecls = + let make_env curr = + List.fold_left + (fun env (id, _, mty) -> Env.add_module ~arg:true id mty env) + env curr in + let make_env2 curr = + List.fold_left + (fun env (id, _, mty) -> Env.add_module ~arg:true id mty.mty_type env) + env curr in + let transition env_c curr = + List.map2 + (fun pmd (id, id_loc, _mty) -> + let tmty = + Builtin_attributes.warning_scope pmd.pmd_attributes + (fun () -> transl_modtype env_c pmd.pmd_type) + in + (id, id_loc, tmty)) + sdecls curr in + let ids = List.map (fun x -> Ident.create x.pmd_name.txt) sdecls in + let approx_env = + (* + cf #5965 + We use a dummy module type in order to detect a reference to one + of the module being defined during the call to approx_modtype. + It will be detected in Env.lookup_module. + *) + List.fold_left + (fun env id -> + let dummy = Mty_ident (Path.Pident (Ident.create "#recmod#")) in + Env.add_module ~arg:true id dummy env + ) + env ids + in + Ctype.init_def(Ident.current_time()); (* PR#7082 *) + let init = + List.map2 + (fun id pmd -> + (id, pmd.pmd_name, approx_modtype approx_env pmd.pmd_type)) + ids sdecls + in + let env0 = make_env init in + let dcl1 = + Warnings.without_warnings + (fun () -> transition env0 init) + in + let env1 = make_env2 dcl1 in + check_recmod_typedecls env1 sdecls dcl1; + let dcl2 = transition env1 dcl1 in +(* + List.iter + (fun (id, mty) -> + Format.printf "%a: %a@." Printtyp.ident id Printtyp.modtype mty) + dcl2; +*) + let env2 = make_env2 dcl2 in + check_recmod_typedecls env2 sdecls dcl2; + let dcl2 = + List.map2 + (fun pmd (id, id_loc, mty) -> + {md_id=id; md_name=id_loc; md_type=mty; + md_loc=pmd.pmd_loc; + md_attributes=pmd.pmd_attributes}) + sdecls dcl2 + in + (dcl2, env2) + +(* Try to convert a module expression to a module path. *) + +exception Not_a_path + +let rec path_of_module mexp = + match mexp.mod_desc with + Tmod_ident (p,_) -> p + | Tmod_apply(funct, arg, _coercion) when !Clflags.applicative_functors -> + Papply(path_of_module funct, path_of_module arg) + | Tmod_constraint (mexp, _, _, _) -> + path_of_module mexp + | _ -> raise Not_a_path + +let path_of_module mexp = + try Some (path_of_module mexp) with Not_a_path -> None + +(* Check that all core type schemes in a structure are closed *) + +let rec closed_modtype env = function + Mty_ident _ -> true + | Mty_alias _ -> true + | Mty_signature sg -> + let env = Env.add_signature sg env in + List.for_all (closed_signature_item env) sg + | Mty_functor(id, param, body) -> + let env = Env.add_module ~arg:true id (Btype.default_mty param) env in + closed_modtype env body + +and closed_signature_item env = function + Sig_value(_id, desc) -> Ctype.closed_schema env desc.val_type + | Sig_module(_id, md, _) -> closed_modtype env md.md_type + | _ -> true + +let check_nongen_scheme env sig_item = + match sig_item with + Sig_value(_id, vd) -> + if not (Ctype.closed_schema env vd.val_type) then + raise (Error (vd.val_loc, env, Non_generalizable vd.val_type)) + | Sig_module (_id, md, _) -> + if not (closed_modtype env md.md_type) then + raise(Error(md.md_loc, env, Non_generalizable_module md.md_type)) + | _ -> () + +let check_nongen_schemes env sg = + List.iter (check_nongen_scheme env) sg + +(* Helpers for typing recursive modules *) + +let anchor_submodule name anchor = + match anchor with None -> None | Some p -> Some(Pdot(p, name, nopos)) +let anchor_recmodule id = + Some (Pident id) + +let enrich_type_decls anchor decls oldenv newenv = + match anchor with + None -> newenv + | Some p -> + List.fold_left + (fun e info -> + let id = info.typ_id in + let info' = + Mtype.enrich_typedecl oldenv (Pdot(p, Ident.name id, nopos)) + info.typ_type + in + Env.add_type ~check:true id info' e) + oldenv decls + +let enrich_module_type anchor name mty env = + match anchor with + None -> mty + | Some p -> Mtype.enrich_modtype env (Pdot(p, name, nopos)) mty + +let check_recmodule_inclusion env bindings = + (* PR#4450, PR#4470: consider + module rec X : DECL = MOD where MOD has inferred type ACTUAL + The "natural" typing condition + E, X: ACTUAL |- ACTUAL <: DECL + leads to circularities through manifest types. + Instead, we "unroll away" the potential circularities a finite number + of times. The (weaker) condition we implement is: + E, X: DECL, + X1: ACTUAL, + X2: ACTUAL{X <- X1}/X1 + ... + Xn: ACTUAL{X <- X(n-1)}/X(n-1) + |- ACTUAL{X <- Xn}/Xn <: DECL{X <- Xn} + so that manifest types rooted at X(n+1) are expanded in terms of X(n), + avoiding circularities. The strengthenings ensure that + Xn.t = X(n-1).t = ... = X2.t = X1.t. + N can be chosen arbitrarily; larger values of N result in more + recursive definitions being accepted. A good choice appears to be + the number of mutually recursive declarations. *) + + let subst_and_strengthen env s id mty = + Mtype.strengthen ~aliasable:false env (Subst.modtype s mty) + (Subst.module_path s (Pident id)) in + + let rec check_incl first_time n env s = + if n > 0 then begin + (* Generate fresh names Y_i for the rec. bound module idents X_i *) + let bindings1 = + List.map + (fun (id, _, _mty_decl, _modl, mty_actual, _attrs, _loc) -> + (id, Ident.rename id, mty_actual)) + bindings in + (* Enter the Y_i in the environment with their actual types substituted + by the input substitution s *) + let env' = + List.fold_left + (fun env (id, id', mty_actual) -> + let mty_actual' = + if first_time + then mty_actual + else subst_and_strengthen env s id mty_actual in + Env.add_module ~arg:false id' mty_actual' env) + env bindings1 in + (* Build the output substitution Y_i <- X_i *) + let s' = + List.fold_left + (fun s (id, id', _mty_actual) -> + Subst.add_module id (Pident id') s) + Subst.identity bindings1 in + (* Recurse with env' and s' *) + check_incl false (n-1) env' s' + end else begin + (* Base case: check inclusion of s(mty_actual) in s(mty_decl) + and insert coercion if needed *) + let check_inclusion (id, id_loc, mty_decl, modl, mty_actual, attrs, loc) = + let mty_decl' = Subst.modtype s mty_decl.mty_type + and mty_actual' = subst_and_strengthen env s id mty_actual in + let coercion = + try + Includemod.modtypes ~loc:modl.mod_loc env mty_actual' mty_decl' + with Includemod.Error msg -> + raise(Error(modl.mod_loc, env, Not_included msg)) in + let modl' = + { mod_desc = Tmod_constraint(modl, mty_decl.mty_type, + Tmodtype_explicit mty_decl, coercion); + mod_type = mty_decl.mty_type; + mod_env = env; + mod_loc = modl.mod_loc; + mod_attributes = []; + } in + { + mb_id = id; + mb_name = id_loc; + mb_expr = modl'; + mb_attributes = attrs; + mb_loc = loc; + } + in + List.map check_inclusion bindings + end + in check_incl true (List.length bindings) env Subst.identity + +(* Helper for unpack *) + +let rec package_constraints env loc mty constrs = + if constrs = [] then mty + else let sg = extract_sig env loc mty in + let sg' = + List.map + (function + | Sig_type (id, ({type_params=[]} as td), rs) + when List.mem_assoc [Ident.name id] constrs -> + let ty = List.assoc [Ident.name id] constrs in + Sig_type (id, {td with type_manifest = Some ty}, rs) + | Sig_module (id, md, rs) -> + let rec aux = function + | (m :: ((_ :: _) as l), t) :: rest when m = Ident.name id -> + (l, t) :: aux rest + | _ :: rest -> aux rest + | [] -> [] + in + let md = + {md with + md_type = package_constraints env loc md.md_type (aux constrs) + } + in + Sig_module (id, md, rs) + | item -> item + ) + sg + in + Mty_signature sg' + +let modtype_of_package env loc p nl tl = + try match (Env.find_modtype p env).mtd_type with + | Some mty when nl <> [] -> + package_constraints env loc mty + (List.combine (List.map Longident.flatten nl) tl) + | _ -> + if nl = [] then Mty_ident p + else raise(Error(loc, env, Signature_expected)) + with Not_found -> + let error = Typetexp.Unbound_modtype (Ctype.lid_of_path p) in + raise(Typetexp.Error(loc, env, error)) + +let package_subtype env p1 nl1 tl1 p2 nl2 tl2 = + let mkmty p nl tl = + let ntl = + List.filter (fun (_n,t) -> Ctype.free_variables t = []) + (List.combine nl tl) in + let (nl, tl) = List.split ntl in + modtype_of_package env Location.none p nl tl + in + let mty1 = mkmty p1 nl1 tl1 and mty2 = mkmty p2 nl2 tl2 in + try Includemod.modtypes ~loc:Location.none env mty1 mty2 = Tcoerce_none + with Includemod.Error _msg -> false + (* raise(Error(Location.none, env, Not_included msg)) *) + +let () = Ctype.package_subtype := package_subtype + +let wrap_constraint env arg mty explicit = + let coercion = + try + Includemod.modtypes ~loc:arg.mod_loc env arg.mod_type mty + with Includemod.Error msg -> + raise(Error(arg.mod_loc, env, Not_included msg)) in + { mod_desc = Tmod_constraint(arg, mty, explicit, coercion); + mod_type = mty; + mod_env = env; + mod_attributes = []; + mod_loc = arg.mod_loc } + +(* Type a module value expression *) + +let rec type_module ?(alias=false) sttn funct_body anchor env smod = + Builtin_attributes.warning_scope smod.pmod_attributes + (fun () -> type_module_aux ~alias sttn funct_body anchor env smod) + +and type_module_aux ~alias sttn funct_body anchor env smod = + match smod.pmod_desc with + Pmod_ident lid -> + let path = + Typetexp.lookup_module ~load:(not alias) env smod.pmod_loc lid.txt in + let md = { mod_desc = Tmod_ident (path, lid); + mod_type = Mty_alias(Mta_absent, path); + mod_env = env; + mod_attributes = smod.pmod_attributes; + mod_loc = smod.pmod_loc } in + let aliasable = not (Env.is_functor_arg path env) in + let md = + if alias && aliasable then + (Env.add_required_global (Path.head path); md) + else match (Env.find_module path env).md_type with + Mty_alias(_, p1) when not alias -> + let p1 = Env.normalize_path (Some smod.pmod_loc) env p1 in + let mty = Includemod.expand_module_alias env [] p1 in + { md with + mod_desc = Tmod_constraint (md, mty, Tmodtype_implicit, + Tcoerce_alias (p1, Tcoerce_none)); + mod_type = + if sttn then Mtype.strengthen ~aliasable:true env mty p1 + else mty } + | mty -> + let mty = + if sttn then Mtype.strengthen ~aliasable env mty path + else mty + in + { md with mod_type = mty } + in rm md + | Pmod_structure sstr -> + let (str, sg, _finalenv) = + type_structure funct_body anchor env sstr smod.pmod_loc in + let md = + rm { mod_desc = Tmod_structure str; + mod_type = Mty_signature sg; + mod_env = env; + mod_attributes = smod.pmod_attributes; + mod_loc = smod.pmod_loc } + in + let sg' = simplify_signature sg in + if List.length sg' = List.length sg then md else + wrap_constraint (Env.implicit_coercion env) md (Mty_signature sg') + Tmodtype_implicit + | Pmod_functor(name, smty, sbody) -> + let mty = may_map (transl_modtype env) smty in + let ty_arg = may_map (fun m -> m.mty_type) mty in + let (id, newenv), funct_body = + match ty_arg with None -> (Ident.create "*", env), false + | Some mty -> Env.enter_module ~arg:true name.txt mty env, true in + Ctype.init_def(Ident.current_time()); (* PR#6981 *) + let body = type_module sttn funct_body None newenv sbody in + rm { mod_desc = Tmod_functor(id, name, mty, body); + mod_type = Mty_functor(id, ty_arg, body.mod_type); + mod_env = env; + mod_attributes = smod.pmod_attributes; + mod_loc = smod.pmod_loc } + | Pmod_apply(sfunct, sarg) -> + let arg = type_module true funct_body None env sarg in + let path = path_of_module arg in + let funct = + type_module (sttn && path <> None) funct_body None env sfunct in + begin match Env.scrape_alias env funct.mod_type with + Mty_functor(param, mty_param, mty_res) as mty_functor -> + let generative, mty_param = + (mty_param = None, Btype.default_mty mty_param) in + if generative then begin + if sarg.pmod_desc <> Pmod_structure [] then + raise (Error (sfunct.pmod_loc, env, Apply_generative)); + if funct_body && Mtype.contains_type env funct.mod_type then + raise (Error (smod.pmod_loc, env, Not_allowed_in_functor_body)); + end; + let coercion = + try + Includemod.modtypes ~loc:sarg.pmod_loc env arg.mod_type mty_param + with Includemod.Error msg -> + raise(Error(sarg.pmod_loc, env, Not_included msg)) in + let mty_appl = + match path with + Some path -> + Subst.modtype (Subst.add_module param path Subst.identity) + mty_res + | None -> + if generative then mty_res else + try + Mtype.nondep_supertype + (Env.add_module ~arg:true param arg.mod_type env) + param mty_res + with Not_found -> + raise(Error(smod.pmod_loc, env, + Cannot_eliminate_dependency mty_functor)) + in + rm { mod_desc = Tmod_apply(funct, arg, coercion); + mod_type = mty_appl; + mod_env = env; + mod_attributes = smod.pmod_attributes; + mod_loc = smod.pmod_loc } + | Mty_alias(_, path) -> + raise(Error(sfunct.pmod_loc, env, Cannot_scrape_alias path)) + | _ -> + raise(Error(sfunct.pmod_loc, env, Cannot_apply funct.mod_type)) + end + | Pmod_constraint(sarg, smty) -> + let arg = type_module ~alias true funct_body anchor env sarg in + let mty = transl_modtype env smty in + rm {(wrap_constraint env arg mty.mty_type (Tmodtype_explicit mty)) with + mod_loc = smod.pmod_loc; + mod_attributes = smod.pmod_attributes; + } + + | Pmod_unpack sexp -> + if !Clflags.principal then Ctype.begin_def (); + let exp = Typecore.type_exp env sexp in + if !Clflags.principal then begin + Ctype.end_def (); + Ctype.generalize_structure exp.exp_type + end; + let mty = + match Ctype.expand_head env exp.exp_type with + {desc = Tpackage (p, nl, tl)} -> + if List.exists (fun t -> Ctype.free_variables t <> []) tl then + raise (Error (smod.pmod_loc, env, + Incomplete_packed_module exp.exp_type)); + if !Clflags.principal && + not (Typecore.generalizable (Btype.generic_level-1) exp.exp_type) + then + Location.prerr_warning smod.pmod_loc + (Warnings.Not_principal "this module unpacking"); + modtype_of_package env smod.pmod_loc p nl tl + | {desc = Tvar _} -> + raise (Typecore.Error + (smod.pmod_loc, env, Typecore.Cannot_infer_signature)) + | _ -> + raise (Error(smod.pmod_loc, env, Not_a_packed_module exp.exp_type)) + in + if funct_body && Mtype.contains_type env mty then + raise (Error (smod.pmod_loc, env, Not_allowed_in_functor_body)); + rm { mod_desc = Tmod_unpack(exp, mty); + mod_type = mty; + mod_env = env; + mod_attributes = smod.pmod_attributes; + mod_loc = smod.pmod_loc } + | Pmod_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + +and type_structure ?(toplevel = false) funct_body anchor env sstr scope = + let names = new_names () in + + let type_str_item env srem {pstr_loc = loc; pstr_desc = desc} = + match desc with + | Pstr_eval (sexpr, attrs) -> + let expr = + Builtin_attributes.warning_scope attrs + (fun () -> Typecore.type_expression env sexpr) + in + Tstr_eval (expr, attrs), [], env + | Pstr_value(rec_flag, sdefs) -> + let scope = + match rec_flag with + | Recursive -> + Some (Annot.Idef {scope with + Location.loc_start = loc.Location.loc_start}) + | Nonrecursive -> + let start = + match srem with + | [] -> loc.Location.loc_end + | {pstr_loc = loc2} :: _ -> loc2.Location.loc_start + in + Some (Annot.Idef {scope with Location.loc_start = start}) + in + let (defs, newenv) = + Typecore.type_binding env rec_flag sdefs scope in + let () = if rec_flag = Recursive then + Typecore.check_recursive_bindings env defs + in + (* Note: Env.find_value does not trigger the value_used event. Values + will be marked as being used during the signature inclusion test. *) + Tstr_value(rec_flag, defs), + List.map (fun id -> Sig_value(id, Env.find_value (Pident id) newenv)) + (let_bound_idents defs), + newenv + | Pstr_primitive sdesc -> + let (desc, newenv) = Typedecl.transl_value_decl env loc sdesc in + Tstr_primitive desc, [Sig_value(desc.val_id, desc.val_val)], newenv + | Pstr_type (rec_flag, sdecls) -> + List.iter + (fun decl -> check_name check_type names decl.ptype_name) + sdecls; + let (decls, newenv) = Typedecl.transl_type_decl env rec_flag sdecls in + Tstr_type (rec_flag, decls), + map_rec_type_with_row_types ~rec_flag + (fun rs info -> Sig_type(info.typ_id, info.typ_type, rs)) + decls [], + enrich_type_decls anchor decls env newenv + | Pstr_typext styext -> + List.iter + (fun pext -> check_name check_typext names pext.pext_name) + styext.ptyext_constructors; + let (tyext, newenv) = + Typedecl.transl_type_extension true env loc styext + in + (Tstr_typext tyext, + map_ext + (fun es ext -> Sig_typext(ext.ext_id, ext.ext_type, es)) + tyext.tyext_constructors [], + newenv) + | Pstr_exception sext -> + check_name check_typext names sext.pext_name; + let (ext, newenv) = Typedecl.transl_exception env sext in + Tstr_exception ext, + [Sig_typext(ext.ext_id, ext.ext_type, Text_exception)], + newenv + | Pstr_module {pmb_name = name; pmb_expr = smodl; pmb_attributes = attrs; + pmb_loc; + } -> + check_name check_module names name; + let id = Ident.create name.txt in (* create early for PR#6752 *) + let modl = + Builtin_attributes.warning_scope attrs + (fun () -> + type_module ~alias:true true funct_body + (anchor_submodule name.txt anchor) env smodl + ) + in + let md = + { md_type = enrich_module_type anchor name.txt modl.mod_type env; + md_attributes = attrs; + md_loc = pmb_loc; + } + in + (*prerr_endline (Ident.unique_toplevel_name id);*) + Mtype.lower_nongen (Ident.binding_time id - 1) md.md_type; + let newenv = Env.enter_module_declaration id md env in + Tstr_module {mb_id=id; mb_name=name; mb_expr=modl; + mb_attributes=attrs; mb_loc=pmb_loc; + }, + [Sig_module(id, + {md_type = modl.mod_type; + md_attributes = attrs; + md_loc = pmb_loc; + }, Trec_not)], + newenv + | Pstr_recmodule sbind -> + let sbind = + List.map + (function + | {pmb_name = name; + pmb_expr = {pmod_desc=Pmod_constraint(expr, typ)}; + pmb_attributes = attrs; + pmb_loc = loc; + } -> + name, typ, expr, attrs, loc + | mb -> + raise (Error (mb.pmb_expr.pmod_loc, env, + Recursive_module_require_explicit_type)) + ) + sbind + in + List.iter + (fun (name, _, _, _, _) -> check_name check_module names name) + sbind; + let (decls, newenv) = + transl_recmodule_modtypes env + (List.map (fun (name, smty, _smodl, attrs, loc) -> + {pmd_name=name; pmd_type=smty; + pmd_attributes=attrs; pmd_loc=loc}) sbind + ) in + let bindings1 = + List.map2 + (fun {md_id=id; md_type=mty} (name, _, smodl, attrs, loc) -> + let modl = + Builtin_attributes.warning_scope attrs + (fun () -> + type_module true funct_body (anchor_recmodule id) + newenv smodl + ) + in + let mty' = + enrich_module_type anchor (Ident.name id) modl.mod_type newenv + in + (id, name, mty, modl, mty', attrs, loc)) + decls sbind in + let newenv = (* allow aliasing recursive modules from outside *) + List.fold_left + (fun env md -> + let mdecl = + { + md_type = md.md_type.mty_type; + md_attributes = md.md_attributes; + md_loc = md.md_loc; + } + in + Env.add_module_declaration ~check:true md.md_id mdecl env + ) + env decls + in + let bindings2 = + check_recmodule_inclusion newenv bindings1 in + Tstr_recmodule bindings2, + map_rec (fun rs mb -> + Sig_module(mb.mb_id, { + md_type=mb.mb_expr.mod_type; + md_attributes=mb.mb_attributes; + md_loc=mb.mb_loc; + }, rs)) + bindings2 [], + newenv + | Pstr_modtype pmtd -> + (* check that it is non-abstract *) + let newenv, mtd, sg = + transl_modtype_decl names env pmtd + in + Tstr_modtype mtd, [sg], newenv + | Pstr_open sod -> + let (_path, newenv, od) = type_open ~toplevel env sod in + Tstr_open od, [], newenv + | Pstr_class cl -> + List.iter + (fun {pci_name} -> check_name check_type names pci_name) + cl; + let (classes, new_env) = Typeclass.class_declarations env cl in + Tstr_class + (List.map (fun cls -> + (cls.Typeclass.cls_info, + cls.Typeclass.cls_pub_methods)) classes), +(* TODO: check with Jacques why this is here + Tstr_class_type + (List.map (fun (_,_, i, d, _,_,_,_,_,_,c) -> (i, c)) classes) :: + Tstr_type + (List.map (fun (_,_,_,_, i, d, _,_,_,_,_) -> (i, d)) classes) :: + Tstr_type + (List.map (fun (_,_,_,_,_,_, i, d, _,_,_) -> (i, d)) classes) :: +*) + List.flatten + (map_rec + (fun rs cls -> + let open Typeclass in + [Sig_class(cls.cls_id, cls.cls_decl, rs); + Sig_class_type(cls.cls_ty_id, cls.cls_ty_decl, rs); + Sig_type(cls.cls_obj_id, cls.cls_obj_abbr, rs); + Sig_type(cls.cls_typesharp_id, cls.cls_abbr, rs)]) + classes []), + new_env + | Pstr_class_type cl -> + List.iter + (fun {pci_name} -> check_name check_type names pci_name) + cl; + let (classes, new_env) = Typeclass.class_type_declarations env cl in + Tstr_class_type + (List.map (fun cl -> + (cl.Typeclass.clsty_ty_id, + cl.Typeclass.clsty_id_loc, + cl.Typeclass.clsty_info)) classes), +(* TODO: check with Jacques why this is here + Tstr_type + (List.map (fun (_, _, i, d, _, _) -> (i, d)) classes) :: + Tstr_type + (List.map (fun (_, _, _, _, i, d) -> (i, d)) classes) :: *) + List.flatten + (map_rec + (fun rs decl -> + let open Typeclass in + [Sig_class_type(decl.clsty_ty_id, decl.clsty_ty_decl, rs); + Sig_type(decl.clsty_obj_id, decl.clsty_obj_abbr, rs); + Sig_type(decl.clsty_typesharp_id, decl.clsty_abbr, rs)]) + classes []), + new_env + | Pstr_include sincl -> + let smodl = sincl.pincl_mod in + let modl = + Builtin_attributes.warning_scope sincl.pincl_attributes + (fun () -> type_module true funct_body None env smodl) + in + (* Rename all identifiers bound by this signature to avoid clashes *) + let sg = Subst.signature Subst.identity + (extract_sig_open env smodl.pmod_loc modl.mod_type) in + List.iter (check_sig_item names loc) sg; + let new_env = Env.add_signature sg env in + let incl = + { incl_mod = modl; + incl_type = sg; + incl_attributes = sincl.pincl_attributes; + incl_loc = sincl.pincl_loc; + } + in + Tstr_include incl, sg, new_env + | Pstr_extension (ext, _attrs) -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + | Pstr_attribute x -> + Builtin_attributes.warning_attribute x; + Tstr_attribute x, [], env + in + let rec type_struct env sstr = + Ctype.init_def(Ident.current_time()); + match sstr with + | [] -> ([], [], env) + | pstr :: srem -> + let previous_saved_types = Cmt_format.get_saved_types () in + let desc, sg, new_env = type_str_item env srem pstr in + let str = { str_desc = desc; str_loc = pstr.pstr_loc; str_env = env } in + Cmt_format.set_saved_types (Cmt_format.Partial_structure_item str + :: previous_saved_types); + let (str_rem, sig_rem, final_env) = type_struct new_env srem in + (str :: str_rem, sg @ sig_rem, final_env) + in + if !Clflags.annotations then + (* moved to genannot *) + List.iter (function {pstr_loc = l} -> Stypes.record_phrase l) sstr; + let previous_saved_types = Cmt_format.get_saved_types () in + let run () = + let (items, sg, final_env) = type_struct env sstr in + let str = { str_items = items; str_type = sg; str_final_env = final_env } in + Cmt_format.set_saved_types + (Cmt_format.Partial_structure str :: previous_saved_types); + str, sg, final_env + in + if toplevel then run () + else Builtin_attributes.warning_scope [] run + +let type_toplevel_phrase env s = + Env.reset_required_globals (); + let (str, sg, env) = + type_structure ~toplevel:true false None env s Location.none in + let (str, _coerce) = ImplementationHooks.apply_hooks + { Misc.sourcefile = "//toplevel//" } (str, Tcoerce_none) + in + (str, sg, env) + +let type_module_alias = type_module ~alias:true true false None +let type_module = type_module true false None +let type_structure = type_structure false None + +(* Normalize types in a signature *) + +let rec normalize_modtype env = function + Mty_ident _ + | Mty_alias _ -> () + | Mty_signature sg -> normalize_signature env sg + | Mty_functor(_id, _param, body) -> normalize_modtype env body + +and normalize_signature env = List.iter (normalize_signature_item env) + +and normalize_signature_item env = function + Sig_value(_id, desc) -> Ctype.normalize_type env desc.val_type + | Sig_module(_id, md, _) -> normalize_modtype env md.md_type + | _ -> () + +(* Extract the module type of a module expression *) + +let type_module_type_of env smod = + let tmty = + match smod.pmod_desc with + | Pmod_ident lid -> (* turn off strengthening in this case *) + let path, md = Typetexp.find_module env smod.pmod_loc lid.txt in + rm { mod_desc = Tmod_ident (path, lid); + mod_type = md.md_type; + mod_env = env; + mod_attributes = smod.pmod_attributes; + mod_loc = smod.pmod_loc } + | _ -> type_module env smod in + let mty = tmty.mod_type in + (* PR#6307: expand aliases at root and submodules *) + let mty = Mtype.remove_aliases env mty in + (* PR#5036: must not contain non-generalized type variables *) + if not (closed_modtype env mty) then + raise(Error(smod.pmod_loc, env, Non_generalizable_module mty)); + tmty, mty + +(* For Typecore *) + +let type_package env m p nl = + (* Same as Pexp_letmodule *) + (* remember original level *) + let lv = Ctype.get_current_level () in + Ctype.begin_def (); + Ident.set_current_time lv; + let context = Typetexp.narrow () in + let modl = type_module env m in + Ctype.init_def(Ident.current_time()); + Typetexp.widen context; + let (mp, env) = + match modl.mod_desc with + Tmod_ident (mp,_) -> (mp, env) + | Tmod_constraint ({mod_desc=Tmod_ident (mp,_)}, _, Tmodtype_implicit, _) + -> (mp, env) (* PR#6982 *) + | _ -> + let (id, new_env) = Env.enter_module ~arg:true "%M" modl.mod_type env in + (Pident id, new_env) + in + let rec mkpath mp = function + | Lident name -> Pdot(mp, name, nopos) + | Ldot (m, name) -> Pdot(mkpath mp m, name, nopos) + | _ -> assert false + in + let tl' = + List.map + (fun name -> Btype.newgenty (Tconstr (mkpath mp name,[],ref Mnil))) + (* beware of interactions with Printtyp and short-path: + mp.name may have an arity > 0, cf. PR#7534 *) + nl in + (* go back to original level *) + Ctype.end_def (); + if nl = [] then + (wrap_constraint env modl (Mty_ident p) Tmodtype_implicit, []) + else let mty = modtype_of_package env modl.mod_loc p nl tl' in + List.iter2 + (fun n ty -> + try Ctype.unify env ty (Ctype.newvar ()) + with Ctype.Unify _ -> + raise (Error(m.pmod_loc, env, Scoping_pack (n,ty)))) + nl tl'; + (wrap_constraint env modl mty Tmodtype_implicit, tl') + +(* Fill in the forward declarations *) +let () = + Typecore.type_module := type_module_alias; + Typetexp.transl_modtype_longident := transl_modtype_longident; + Typetexp.transl_modtype := transl_modtype; + Typecore.type_open := type_open_ ?toplevel:None; + Typecore.type_package := type_package; + type_module_type_of_fwd := type_module_type_of + + +(* Typecheck an implementation file *) + +let type_implementation sourcefile outputprefix modulename initial_env ast = + Cmt_format.clear (); + try + Typecore.reset_delayed_checks (); + Env.reset_required_globals (); + if !Clflags.print_types then (* #7656 *) + Warnings.parse_options false "-32-34-37-38-60"; + let (str, sg, finalenv) = + type_structure initial_env ast (Location.in_file sourcefile) in + let simple_sg = simplify_signature sg in + if !Clflags.print_types then begin + Typecore.force_delayed_checks (); + Printtyp.wrap_printing_env initial_env + (fun () -> fprintf std_formatter "%a@." Printtyp.signature simple_sg); + (str, Tcoerce_none) (* result is ignored by Compile.implementation *) + end else begin + let sourceintf = + Filename.remove_extension sourcefile ^ !Config.interface_suffix in + if Sys.file_exists sourceintf then begin + let intf_file = + try + find_in_path_uncap !Config.load_path (modulename ^ ".cmi") + with Not_found -> + raise(Error(Location.in_file sourcefile, Env.empty, + Interface_not_compiled sourceintf)) in + let dclsig = Env.read_signature modulename intf_file in + let coercion = + Includemod.compunit initial_env sourcefile sg intf_file dclsig in + Typecore.force_delayed_checks (); + (* It is important to run these checks after the inclusion test above, + so that value declarations which are not used internally but exported + are not reported as being unused. *) + Cmt_format.save_cmt (outputprefix ^ ".cmt") modulename + (Cmt_format.Implementation str) (Some sourcefile) initial_env None; + (str, coercion) + end else begin + let coercion = + Includemod.compunit initial_env sourcefile sg + "(inferred signature)" simple_sg in + check_nongen_schemes finalenv simple_sg; + normalize_signature finalenv simple_sg; + Typecore.force_delayed_checks (); + (* See comment above. Here the target signature contains all + the value being exported. We can still capture unused + declarations like "let x = true;; let x = 1;;", because in this + case, the inferred signature contains only the last declaration. *) + if not !Clflags.dont_write_files then begin + let deprecated = Builtin_attributes.deprecated_of_str ast in + let cmi = + Env.save_signature ~deprecated + simple_sg modulename (outputprefix ^ ".cmi") + in + Cmt_format.save_cmt (outputprefix ^ ".cmt") modulename + (Cmt_format.Implementation str) + (Some sourcefile) initial_env (Some cmi); + end; + (str, coercion) + end + end + with e -> + Cmt_format.save_cmt (outputprefix ^ ".cmt") modulename + (Cmt_format.Partial_implementation + (Array.of_list (Cmt_format.get_saved_types ()))) + (Some sourcefile) initial_env None; + raise e + +let type_implementation sourcefile outputprefix modulename initial_env ast = + ImplementationHooks.apply_hooks { Misc.sourcefile } + (type_implementation sourcefile outputprefix modulename initial_env ast) + +let save_signature modname tsg outputprefix source_file initial_env cmi = + Cmt_format.save_cmt (outputprefix ^ ".cmti") modname + (Cmt_format.Interface tsg) (Some source_file) initial_env (Some cmi) + +let type_interface sourcefile env ast = + InterfaceHooks.apply_hooks { Misc.sourcefile } (transl_signature env ast) + +(* "Packaging" of several compilation units into one unit + having them as sub-modules. *) + +let rec package_signatures subst = function + [] -> [] + | (name, sg) :: rem -> + let sg' = Subst.signature subst sg in + let oldid = Ident.create_persistent name + and newid = Ident.create name in + Sig_module(newid, {md_type=Mty_signature sg'; + md_attributes=[]; + md_loc=Location.none; + }, + Trec_not) :: + package_signatures (Subst.add_module oldid (Pident newid) subst) rem + +let package_units initial_env objfiles cmifile modulename = + (* Read the signatures of the units *) + let units = + List.map + (fun f -> + let pref = chop_extensions f in + let modname = String.capitalize_ascii(Filename.basename pref) in + let sg = Env.read_signature modname (pref ^ ".cmi") in + if Filename.check_suffix f ".cmi" && + not(Mtype.no_code_needed_sig Env.initial_safe_string sg) + then raise(Error(Location.none, Env.empty, + Implementation_is_required f)); + (modname, Env.read_signature modname (pref ^ ".cmi"))) + objfiles in + (* Compute signature of packaged unit *) + Ident.reinit(); + let sg = package_signatures Subst.identity units in + (* See if explicit interface is provided *) + let prefix = Filename.remove_extension cmifile in + let mlifile = prefix ^ !Config.interface_suffix in + if Sys.file_exists mlifile then begin + if not (Sys.file_exists cmifile) then begin + raise(Error(Location.in_file mlifile, Env.empty, + Interface_not_compiled mlifile)) + end; + let dclsig = Env.read_signature modulename cmifile in + Cmt_format.save_cmt (prefix ^ ".cmt") modulename + (Cmt_format.Packed (sg, objfiles)) None initial_env None ; + Includemod.compunit initial_env "(obtained by packing)" sg mlifile dclsig + end else begin + (* Determine imports *) + let unit_names = List.map fst units in + let imports = + List.filter + (fun (name, _crc) -> not (List.mem name unit_names)) + (Env.imports()) in + (* Write packaged signature *) + if not !Clflags.dont_write_files then begin + let cmi = + Env.save_signature_with_imports ~deprecated:None + sg modulename + (prefix ^ ".cmi") imports + in + Cmt_format.save_cmt (prefix ^ ".cmt") modulename + (Cmt_format.Packed (cmi.Cmi_format.cmi_sign, objfiles)) None initial_env (Some cmi) + end; + Tcoerce_none + end + +(* Error report *) + +open Printtyp + +let report_error ppf = function + Cannot_apply mty -> + fprintf ppf + "@[This module is not a functor; it has type@ %a@]" modtype mty + | Not_included errs -> + fprintf ppf + "@[Signature mismatch:@ %a@]" Includemod.report_error errs + | Cannot_eliminate_dependency mty -> + fprintf ppf + "@[This functor has type@ %a@ \ + The parameter cannot be eliminated in the result type.@ \ + Please bind the argument to a module identifier.@]" modtype mty + | Signature_expected -> fprintf ppf "This module type is not a signature" + | Structure_expected mty -> + fprintf ppf + "@[This module is not a structure; it has type@ %a" modtype mty + | With_no_component lid -> + fprintf ppf + "@[The signature constrained by `with' has no component named %a@]" + longident lid + | With_mismatch(lid, explanation) -> + fprintf ppf + "@[\ + @[In this `with' constraint, the new definition of %a@ \ + does not match its original definition@ \ + in the constrained signature:@]@ \ + %a@]" + longident lid Includemod.report_error explanation + | With_makes_applicative_functor_ill_typed(lid, path, explanation) -> + fprintf ppf + "@[\ + @[This `with' constraint on %a makes the applicative functor @ \ + type %s ill-typed in the constrained signature:@]@ \ + %a@]" + longident lid (Path.name path) Includemod.report_error explanation + | With_changes_module_alias(lid, id, path) -> + fprintf ppf + "@[\ + @[This `with' constraint on %a changes %s, which is aliased @ \ + in the constrained signature (as %s)@].@]" + longident lid (Path.name path) (Ident.name id) + | With_cannot_remove_constrained_type -> + fprintf ppf + "@[Destructive substitutions are not supported for constrained @ \ + types (other than when replacing a type constructor with @ \ + a type constructor with the same arguments).@]" + | Repeated_name(kind, name) -> + fprintf ppf + "@[Multiple definition of the %s name %s.@ \ + Names must be unique in a given structure or signature.@]" kind name + | Non_generalizable typ -> + fprintf ppf + "@[The type of this expression,@ %a,@ \ + contains type variables that cannot be generalized@]" type_scheme typ + | Non_generalizable_class (id, desc) -> + fprintf ppf + "@[The type of this class,@ %a,@ \ + contains type variables that cannot be generalized@]" + (class_declaration id) desc + | Non_generalizable_module mty -> + fprintf ppf + "@[The type of this module,@ %a,@ \ + contains type variables that cannot be generalized@]" modtype mty + | Implementation_is_required intf_name -> + fprintf ppf + "@[The interface %a@ declares values, not just types.@ \ + An implementation must be provided.@]" + Location.print_filename intf_name + | Interface_not_compiled intf_name -> + fprintf ppf + "@[Could not find the .cmi file for interface@ %a.@]" + Location.print_filename intf_name + | Not_allowed_in_functor_body -> + fprintf ppf + "@[This expression creates fresh types.@ %s@]" + "It is not allowed inside applicative functors." + | Not_a_packed_module ty -> + fprintf ppf + "This expression is not a packed module. It has type@ %a" + type_expr ty + | Incomplete_packed_module ty -> + fprintf ppf + "The type of this packed module contains variables:@ %a" + type_expr ty + | Scoping_pack (lid, ty) -> + fprintf ppf + "The type %a in this module cannot be exported.@ " longident lid; + fprintf ppf + "Its type contains local dependencies:@ %a" type_expr ty + | Recursive_module_require_explicit_type -> + fprintf ppf "Recursive modules require an explicit module type." + | Apply_generative -> + fprintf ppf "This is a generative functor. It can only be applied to ()" + | Cannot_scrape_alias p -> + fprintf ppf + "This is an alias for module %a, which is missing" + path p + +let report_error env ppf err = + Printtyp.wrap_printing_env env (fun () -> report_error ppf err) + +let () = + Location.register_error_of_exn + (function + | Error (loc, env, err) -> + Some (Location.error_of_printer loc (report_error env) err) + | Error_forward err -> + Some err + | _ -> + None + ) diff --git a/res_syntax/compiler-libs-406/typemod.mli b/res_syntax/compiler-libs-406/typemod.mli new file mode 100644 index 0000000000..fb767db2e3 --- /dev/null +++ b/res_syntax/compiler-libs-406/typemod.mli @@ -0,0 +1,90 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Type-checking of the module language and typed ast plugin hooks *) + +open Types +open Format + +val type_module: + Env.t -> Parsetree.module_expr -> Typedtree.module_expr +val type_structure: + Env.t -> Parsetree.structure -> Location.t -> + Typedtree.structure * Types.signature * Env.t +val type_toplevel_phrase: + Env.t -> Parsetree.structure -> + Typedtree.structure * Types.signature * Env.t +val type_implementation: + string -> string -> string -> Env.t -> Parsetree.structure -> + Typedtree.structure * Typedtree.module_coercion +val type_interface: + string -> Env.t -> Parsetree.signature -> Typedtree.signature +val transl_signature: + Env.t -> Parsetree.signature -> Typedtree.signature +val check_nongen_schemes: + Env.t -> Types.signature -> unit +val type_open_: + ?used_slot:bool ref -> ?toplevel:bool -> Asttypes.override_flag -> + Env.t -> Location.t -> Longident.t Asttypes.loc -> Path.t * Env.t +val modtype_of_package: + Env.t -> Location.t -> + Path.t -> Longident.t list -> type_expr list -> module_type +val simplify_signature: signature -> signature + +val path_of_module : Typedtree.module_expr -> Path.t option + +val save_signature: + string -> Typedtree.signature -> string -> string -> + Env.t -> Cmi_format.cmi_infos -> unit + +val package_units: + Env.t -> string list -> string -> string -> Typedtree.module_coercion + +type error = + Cannot_apply of module_type + | Not_included of Includemod.error list + | Cannot_eliminate_dependency of module_type + | Signature_expected + | Structure_expected of module_type + | With_no_component of Longident.t + | With_mismatch of Longident.t * Includemod.error list + | With_makes_applicative_functor_ill_typed of + Longident.t * Path.t * Includemod.error list + | With_changes_module_alias of Longident.t * Ident.t * Path.t + | With_cannot_remove_constrained_type + | Repeated_name of string * string + | Non_generalizable of type_expr + | Non_generalizable_class of Ident.t * class_declaration + | Non_generalizable_module of module_type + | Implementation_is_required of string + | Interface_not_compiled of string + | Not_allowed_in_functor_body + | Not_a_packed_module of type_expr + | Incomplete_packed_module of type_expr + | Scoping_pack of Longident.t * type_expr + | Recursive_module_require_explicit_type + | Apply_generative + | Cannot_scrape_alias of Path.t + +exception Error of Location.t * Env.t * error +exception Error_forward of Location.error + +val report_error: Env.t -> formatter -> error -> unit + + +module ImplementationHooks : Misc.HookSig + with type t = Typedtree.structure * Typedtree.module_coercion +module InterfaceHooks : Misc.HookSig + with type t = Typedtree.signature diff --git a/res_syntax/compiler-libs-406/typeopt.ml b/res_syntax/compiler-libs-406/typeopt.ml new file mode 100644 index 0000000000..1d3101e32e --- /dev/null +++ b/res_syntax/compiler-libs-406/typeopt.ml @@ -0,0 +1,204 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1998 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Auxiliaries for type-based optimizations, e.g. array kinds *) + +open Path +open Types +open Asttypes +open Typedtree +open Lambda + +let scrape_ty env ty = + let ty = Ctype.expand_head_opt env (Ctype.correct_levels ty) in + match ty.desc with + | Tconstr (p, _, _) -> + begin match Env.find_type p env with + | {type_unboxed = {unboxed = true; _}; _} -> + begin match Typedecl.get_unboxed_type_representation env ty with + | None -> ty + | Some ty2 -> ty2 + end + | _ -> ty + | exception Not_found -> ty + end + | _ -> ty + +let scrape env ty = + (scrape_ty env ty).desc + +let is_function_type env ty = + match scrape env ty with + | Tarrow (_, lhs, rhs, _) -> Some (lhs, rhs) + | _ -> None + +let is_base_type env ty base_ty_path = + match scrape env ty with + | Tconstr(p, _, _) -> Path.same p base_ty_path + | _ -> false + +let maybe_pointer_type env ty = + if Ctype.maybe_pointer_type env ty then + Pointer + else + Immediate + +let maybe_pointer exp = maybe_pointer_type exp.exp_env exp.exp_type + +type classification = + | Int + | Float + | Lazy + | Addr (* anything except a float or a lazy *) + | Any + +let classify env ty = + let ty = scrape_ty env ty in + if maybe_pointer_type env ty = Immediate then Int + else match ty.desc with + | Tvar _ | Tunivar _ -> + Any + | Tconstr (p, _args, _abbrev) -> + if Path.same p Predef.path_float then Float + else if Path.same p Predef.path_lazy_t then Lazy + else if Path.same p Predef.path_string + || Path.same p Predef.path_bytes + || Path.same p Predef.path_array + || Path.same p Predef.path_nativeint + || Path.same p Predef.path_int32 + || Path.same p Predef.path_int64 then Addr + else begin + try + match (Env.find_type p env).type_kind with + | Type_abstract -> + Any + | Type_record _ | Type_variant _ | Type_open -> + Addr + with Not_found -> + (* This can happen due to e.g. missing -I options, + causing some .cmi files to be unavailable. + Maybe we should emit a warning. *) + Any + end + | Tarrow _ | Ttuple _ | Tpackage _ | Tobject _ | Tnil | Tvariant _ -> + Addr + | Tlink _ | Tsubst _ | Tpoly _ | Tfield _ -> + assert false + +let array_type_kind env ty = + match scrape env ty with + | Tconstr(p, [elt_ty], _) | Tpoly({desc = Tconstr(p, [elt_ty], _)}, _) + when Path.same p Predef.path_array -> + begin match classify env elt_ty with + | Any -> if Config.flat_float_array then Pgenarray else Paddrarray + | Float -> if Config.flat_float_array then Pfloatarray else Paddrarray + | Addr | Lazy -> Paddrarray + | Int -> Pintarray + end + | Tconstr(p, [], _) | Tpoly({desc = Tconstr(p, [], _)}, _) + when Path.same p Predef.path_floatarray -> + Pfloatarray + | _ -> + (* This can happen with e.g. Obj.field *) + Pgenarray + +let array_kind exp = array_type_kind exp.exp_env exp.exp_type + +let array_pattern_kind pat = array_type_kind pat.pat_env pat.pat_type + +let bigarray_decode_type env ty tbl dfl = + match scrape env ty with + | Tconstr(Pdot(Pident mod_id, type_name, _), [], _) + when Ident.name mod_id = "CamlinternalBigarray" -> + begin try List.assoc type_name tbl with Not_found -> dfl end + | _ -> + dfl + +let kind_table = + ["float32_elt", Pbigarray_float32; + "float64_elt", Pbigarray_float64; + "int8_signed_elt", Pbigarray_sint8; + "int8_unsigned_elt", Pbigarray_uint8; + "int16_signed_elt", Pbigarray_sint16; + "int16_unsigned_elt", Pbigarray_uint16; + "int32_elt", Pbigarray_int32; + "int64_elt", Pbigarray_int64; + "int_elt", Pbigarray_caml_int; + "nativeint_elt", Pbigarray_native_int; + "complex32_elt", Pbigarray_complex32; + "complex64_elt", Pbigarray_complex64] + +let layout_table = + ["c_layout", Pbigarray_c_layout; + "fortran_layout", Pbigarray_fortran_layout] + +let bigarray_type_kind_and_layout env typ = + match scrape env typ with + | Tconstr(_p, [_caml_type; elt_type; layout_type], _abbrev) -> + (bigarray_decode_type env elt_type kind_table Pbigarray_unknown, + bigarray_decode_type env layout_type layout_table + Pbigarray_unknown_layout) + | _ -> + (Pbigarray_unknown, Pbigarray_unknown_layout) + +let value_kind env ty = + match scrape env ty with + | Tconstr(p, _, _) when Path.same p Predef.path_int -> + Pintval + | Tconstr(p, _, _) when Path.same p Predef.path_char -> + Pintval + | Tconstr(p, _, _) when Path.same p Predef.path_float -> + Pfloatval + | Tconstr(p, _, _) when Path.same p Predef.path_int32 -> + Pboxedintval Pint32 + | Tconstr(p, _, _) when Path.same p Predef.path_int64 -> + Pboxedintval Pint64 + | Tconstr(p, _, _) when Path.same p Predef.path_nativeint -> + Pboxedintval Pnativeint + | _ -> + Pgenval + + +(** Whether a forward block is needed for a lazy thunk on a value, i.e. + if the value can be represented as a float/forward/lazy *) +let lazy_val_requires_forward env ty = + match classify env ty with + | Any | Lazy -> true + | Float -> Config.flat_float_array + | Addr | Int -> false + +(** The compilation of the expression [lazy e] depends on the form of e: + constants, floats and identifiers are optimized. The optimization must be + taken into account when determining whether a recursive binding is safe. *) +let classify_lazy_argument : Typedtree.expression -> + [`Constant_or_function + |`Float + |`Identifier of [`Forward_value|`Other] + |`Other] = + fun e -> match e.exp_desc with + | Texp_constant + ( Const_int _ | Const_char _ | Const_string _ + | Const_int32 _ | Const_int64 _ | Const_nativeint _ ) + | Texp_function _ + | Texp_construct (_, {cstr_arity = 0}, _) -> + `Constant_or_function + | Texp_constant(Const_float _) -> + `Float + | Texp_ident _ when lazy_val_requires_forward e.exp_env e.exp_type -> + `Identifier `Forward_value + | Texp_ident _ -> + `Identifier `Other + | _ -> + `Other diff --git a/res_syntax/compiler-libs-406/typeopt.mli b/res_syntax/compiler-libs-406/typeopt.mli new file mode 100644 index 0000000000..299e822209 --- /dev/null +++ b/res_syntax/compiler-libs-406/typeopt.mli @@ -0,0 +1,37 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1998 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Auxiliaries for type-based optimizations, e.g. array kinds *) + +val is_function_type : + Env.t -> Types.type_expr -> (Types.type_expr * Types.type_expr) option +val is_base_type : Env.t -> Types.type_expr -> Path.t -> bool + +val maybe_pointer_type : Env.t -> Types.type_expr + -> Lambda.immediate_or_pointer +val maybe_pointer : Typedtree.expression -> Lambda.immediate_or_pointer + +val array_type_kind : Env.t -> Types.type_expr -> Lambda.array_kind +val array_kind : Typedtree.expression -> Lambda.array_kind +val array_pattern_kind : Typedtree.pattern -> Lambda.array_kind +val bigarray_type_kind_and_layout : + Env.t -> Types.type_expr -> Lambda.bigarray_kind * Lambda.bigarray_layout +val value_kind : Env.t -> Types.type_expr -> Lambda.value_kind + +val classify_lazy_argument : Typedtree.expression -> + [ `Constant_or_function + | `Float + | `Identifier of [`Forward_value | `Other] + | `Other] diff --git a/res_syntax/compiler-libs-406/types.ml b/res_syntax/compiler-libs-406/types.ml new file mode 100644 index 0000000000..7ef6eaff67 --- /dev/null +++ b/res_syntax/compiler-libs-406/types.ml @@ -0,0 +1,349 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Representation of types and declarations *) + +open Asttypes + +(* Type expressions for the core language *) + +type type_expr = + { mutable desc: type_desc; + mutable level: int; + id: int } + +and type_desc = + Tvar of string option + | Tarrow of arg_label * type_expr * type_expr * commutable + | Ttuple of type_expr list + | Tconstr of Path.t * type_expr list * abbrev_memo ref + | Tobject of type_expr * (Path.t * type_expr list) option ref + | Tfield of string * field_kind * type_expr * type_expr + | Tnil + | Tlink of type_expr + | Tsubst of type_expr (* for copying *) + | Tvariant of row_desc + | Tunivar of string option + | Tpoly of type_expr * type_expr list + | Tpackage of Path.t * Longident.t list * type_expr list + +and row_desc = + { row_fields: (label * row_field) list; + row_more: type_expr; + row_bound: unit; + row_closed: bool; + row_fixed: bool; + row_name: (Path.t * type_expr list) option } + +and row_field = + Rpresent of type_expr option + | Reither of bool * type_expr list * bool * row_field option ref + (* 1st true denotes a constant constructor *) + (* 2nd true denotes a tag in a pattern matching, and + is erased later *) + | Rabsent + +and abbrev_memo = + Mnil + | Mcons of private_flag * Path.t * type_expr * type_expr * abbrev_memo + | Mlink of abbrev_memo ref + +and field_kind = + Fvar of field_kind option ref + | Fpresent + | Fabsent + +and commutable = + Cok + | Cunknown + | Clink of commutable ref + +module TypeOps = struct + type t = type_expr + let compare t1 t2 = t1.id - t2.id + let hash t = t.id + let equal t1 t2 = t1 == t2 +end + +(* Maps of methods and instance variables *) + +module OrderedString = + struct type t = string let compare (x:t) y = compare x y end +module Meths = Map.Make(OrderedString) +module Vars = Meths + +(* Value descriptions *) + +type value_description = + { val_type: type_expr; (* Type of the value *) + val_kind: value_kind; + val_loc: Location.t; + val_attributes: Parsetree.attributes; + } + +and value_kind = + Val_reg (* Regular value *) + | Val_prim of Primitive.description (* Primitive *) + | Val_ivar of mutable_flag * string (* Instance variable (mutable ?) *) + | Val_self of (Ident.t * type_expr) Meths.t ref * + (Ident.t * Asttypes.mutable_flag * + Asttypes.virtual_flag * type_expr) Vars.t ref * + string * type_expr + (* Self *) + | Val_anc of (string * Ident.t) list * string + (* Ancestor *) + | Val_unbound (* Unbound variable *) + +(* Variance *) + +module Variance = struct + type t = int + type f = May_pos | May_neg | May_weak | Inj | Pos | Neg | Inv + let single = function + | May_pos -> 1 + | May_neg -> 2 + | May_weak -> 4 + | Inj -> 8 + | Pos -> 16 + | Neg -> 32 + | Inv -> 64 + let union v1 v2 = v1 lor v2 + let inter v1 v2 = v1 land v2 + let subset v1 v2 = (v1 land v2 = v1) + let set x b v = + if b then v lor single x else v land (lnot (single x)) + let mem x = subset (single x) + let null = 0 + let may_inv = 7 + let full = 127 + let covariant = single May_pos lor single Pos lor single Inj + let swap f1 f2 v = + let v' = set f1 (mem f2 v) v in set f2 (mem f1 v) v' + let conjugate v = swap May_pos May_neg (swap Pos Neg v) + let get_upper v = (mem May_pos v, mem May_neg v) + let get_lower v = (mem Pos v, mem Neg v, mem Inv v, mem Inj v) +end + +(* Type definitions *) + +type type_declaration = + { type_params: type_expr list; + type_arity: int; + type_kind: type_kind; + type_private: private_flag; + type_manifest: type_expr option; + type_variance: Variance.t list; + type_newtype_level: (int * int) option; + type_loc: Location.t; + type_attributes: Parsetree.attributes; + type_immediate: bool; + type_unboxed: unboxed_status; + } + +and type_kind = + Type_abstract + | Type_record of label_declaration list * record_representation + | Type_variant of constructor_declaration list + | Type_open + +and record_representation = + Record_regular (* All fields are boxed / tagged *) + | Record_float (* All fields are floats *) + | Record_unboxed of bool (* Unboxed single-field record, inlined or not *) + | Record_inlined of int (* Inlined record *) + | Record_extension (* Inlined record under extension *) + +and label_declaration = + { + ld_id: Ident.t; + ld_mutable: mutable_flag; + ld_type: type_expr; + ld_loc: Location.t; + ld_attributes: Parsetree.attributes; + } + +and constructor_declaration = + { + cd_id: Ident.t; + cd_args: constructor_arguments; + cd_res: type_expr option; + cd_loc: Location.t; + cd_attributes: Parsetree.attributes; + } + +and constructor_arguments = + | Cstr_tuple of type_expr list + | Cstr_record of label_declaration list + +and unboxed_status = + { + unboxed: bool; + default: bool; (* False if the unboxed field was set from an attribute. *) + } + +let unboxed_false_default_false = {unboxed = false; default = false} +let unboxed_false_default_true = {unboxed = false; default = true} +let unboxed_true_default_false = {unboxed = true; default = false} +let unboxed_true_default_true = {unboxed = true; default = true} + +type extension_constructor = + { ext_type_path: Path.t; + ext_type_params: type_expr list; + ext_args: constructor_arguments; + ext_ret_type: type_expr option; + ext_private: private_flag; + ext_loc: Location.t; + ext_attributes: Parsetree.attributes; } + +and type_transparence = + Type_public (* unrestricted expansion *) + | Type_new (* "new" type *) + | Type_private (* private type *) + +(* Type expressions for the class language *) + +module Concr = Set.Make(OrderedString) + +type class_type = + Cty_constr of Path.t * type_expr list * class_type + | Cty_signature of class_signature + | Cty_arrow of arg_label * type_expr * class_type + +and class_signature = + { csig_self: type_expr; + csig_vars: + (Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr) Vars.t; + csig_concr: Concr.t; + csig_inher: (Path.t * type_expr list) list } + +type class_declaration = + { cty_params: type_expr list; + mutable cty_type: class_type; + cty_path: Path.t; + cty_new: type_expr option; + cty_variance: Variance.t list; + cty_loc: Location.t; + cty_attributes: Parsetree.attributes; + } + +type class_type_declaration = + { clty_params: type_expr list; + clty_type: class_type; + clty_path: Path.t; + clty_variance: Variance.t list; + clty_loc: Location.t; + clty_attributes: Parsetree.attributes; + } + +(* Type expressions for the module language *) + +type module_type = + Mty_ident of Path.t + | Mty_signature of signature + | Mty_functor of Ident.t * module_type option * module_type + | Mty_alias of alias_presence * Path.t + +and alias_presence = + | Mta_present + | Mta_absent + +and signature = signature_item list + +and signature_item = + Sig_value of Ident.t * value_description + | Sig_type of Ident.t * type_declaration * rec_status + | Sig_typext of Ident.t * extension_constructor * ext_status + | Sig_module of Ident.t * module_declaration * rec_status + | Sig_modtype of Ident.t * modtype_declaration + | Sig_class of Ident.t * class_declaration * rec_status + | Sig_class_type of Ident.t * class_type_declaration * rec_status + +and module_declaration = + { + md_type: module_type; + md_attributes: Parsetree.attributes; + md_loc: Location.t; + } + +and modtype_declaration = + { + mtd_type: module_type option; (* Note: abstract *) + mtd_attributes: Parsetree.attributes; + mtd_loc: Location.t; + } + +and rec_status = + Trec_not (* first in a nonrecursive group *) + | Trec_first (* first in a recursive group *) + | Trec_next (* not first in a recursive/nonrecursive group *) + +and ext_status = + Text_first (* first constructor of an extension *) + | Text_next (* not first constructor of an extension *) + | Text_exception (* an exception *) + + +(* Constructor and record label descriptions inserted held in typing + environments *) + +type constructor_description = + { cstr_name: string; (* Constructor name *) + cstr_res: type_expr; (* Type of the result *) + cstr_existentials: type_expr list; (* list of existentials *) + cstr_args: type_expr list; (* Type of the arguments *) + cstr_arity: int; (* Number of arguments *) + cstr_tag: constructor_tag; (* Tag for heap blocks *) + cstr_consts: int; (* Number of constant constructors *) + cstr_nonconsts: int; (* Number of non-const constructors *) + cstr_normal: int; (* Number of non generalized constrs *) + cstr_generalized: bool; (* Constrained return type? *) + cstr_private: private_flag; (* Read-only constructor? *) + cstr_loc: Location.t; + cstr_attributes: Parsetree.attributes; + cstr_inlined: type_declaration option; + } + +and constructor_tag = + Cstr_constant of int (* Constant constructor (an int) *) + | Cstr_block of int (* Regular constructor (a block) *) + | Cstr_unboxed (* Constructor of an unboxed type *) + | Cstr_extension of Path.t * bool (* Extension constructor + true if a constant false if a block*) + +let equal_tag t1 t2 = + match (t1, t2) with + | Cstr_constant i1, Cstr_constant i2 -> i2 = i1 + | Cstr_block i1, Cstr_block i2 -> i2 = i1 + | Cstr_unboxed, Cstr_unboxed -> true + | Cstr_extension (path1, b1), Cstr_extension (path2, b2) -> + Path.same path1 path2 && b1 = b2 + | (Cstr_constant _|Cstr_block _|Cstr_unboxed|Cstr_extension _), _ -> false + +let may_equal_constr c1 c2 = match c1.cstr_tag,c2.cstr_tag with +| Cstr_extension _,Cstr_extension _ -> c1.cstr_arity = c2.cstr_arity +| tag1,tag2 -> equal_tag tag1 tag2 + +type label_description = + { lbl_name: string; (* Short name *) + lbl_res: type_expr; (* Type of the result *) + lbl_arg: type_expr; (* Type of the argument *) + lbl_mut: mutable_flag; (* Is this a mutable field? *) + lbl_pos: int; (* Position in block *) + lbl_all: label_description array; (* All the labels in this type *) + lbl_repres: record_representation; (* Representation for this record *) + lbl_private: private_flag; (* Read-only field? *) + lbl_loc: Location.t; + lbl_attributes: Parsetree.attributes; + } diff --git a/res_syntax/compiler-libs-406/types.mli b/res_syntax/compiler-libs-406/types.mli new file mode 100644 index 0000000000..28317b5237 --- /dev/null +++ b/res_syntax/compiler-libs-406/types.mli @@ -0,0 +1,494 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** {0 Representation of types and declarations} *) + +(** [Types] defines the representation of types and declarations (that is, the + content of module signatures). + + CMI files are made of marshalled types. +*) + +(** Asttypes exposes basic definitions shared both by Parsetree and Types. *) +open Asttypes + +(** Type expressions for the core language. + + The [type_desc] variant defines all the possible type expressions one can + find in OCaml. [type_expr] wraps this with some annotations. + + The [level] field tracks the level of polymorphism associated to a type, + guiding the generalization algorithm. + Put shortly, when referring to a type in a given environment, both the type + and the environment have a level. If the type has an higher level, then it + can be considered fully polymorphic (type variables will be printed as + ['a]), otherwise it'll be weakly polymorphic, or non generalized (type + variables printed as ['_a]). + See [http://okmij.org/ftp/ML/generalization.html] for more information. + + Note about [type_declaration]: one should not make the confusion between + [type_expr] and [type_declaration]. + + [type_declaration] refers specifically to the [type] construct in OCaml + language, where you create and name a new type or type alias. + + [type_expr] is used when you refer to existing types, e.g. when annotating + the expected type of a value. + + Also, as the type system of OCaml is generative, a [type_declaration] can + have the side-effect of introducing a new type constructor, different from + all other known types. + Whereas [type_expr] is a pure construct which allows referring to existing + types. + + Note on mutability: TBD. + *) +type type_expr = + { mutable desc: type_desc; + mutable level: int; + id: int } + +and type_desc = + | Tvar of string option + (** [Tvar (Some "a")] ==> ['a] or ['_a] + [Tvar None] ==> [_] *) + + | Tarrow of arg_label * type_expr * type_expr * commutable + (** [Tarrow (Nolabel, e1, e2, c)] ==> [e1 -> e2] + [Tarrow (Labelled "l", e1, e2, c)] ==> [l:e1 -> e2] + [Tarrow (Optional "l", e1, e2, c)] ==> [?l:e1 -> e2] + + See [commutable] for the last argument. *) + + | Ttuple of type_expr list + (** [Ttuple [t1;...;tn]] ==> [(t1 * ... * tn)] *) + + | Tconstr of Path.t * type_expr list * abbrev_memo ref + (** [Tconstr (`A.B.t', [t1;...;tn], _)] ==> [(t1,...,tn) A.B.t] + The last parameter keep tracks of known expansions, see [abbrev_memo]. *) + + | Tobject of type_expr * (Path.t * type_expr list) option ref + (** [Tobject (`f1:t1;...;fn: tn', `None')] ==> [< f1: t1; ...; fn: tn >] + f1, fn are represented as a linked list of types using Tfield and Tnil + constructors. + + [Tobject (_, `Some (`A.ct', [t1;...;tn]')] ==> [(t1, ..., tn) A.ct]. + where A.ct is the type of some class. + + There are also special cases for so-called "class-types", cf. [Typeclass] + and [Ctype.set_object_name]: + + [Tobject (Tfield(_,_,...(Tfield(_,_,rv)...), + Some(`A.#ct`, [rv;t1;...;tn])] + ==> [(t1, ..., tn) #A.ct] + [Tobject (_, Some(`A.#ct`, [Tnil;t1;...;tn])] ==> [(t1, ..., tn) A.ct] + + where [rv] is the hidden row variable. + *) + + | Tfield of string * field_kind * type_expr * type_expr + (** [Tfield ("foo", Fpresent, t, ts)] ==> [<...; foo : t; ts>] *) + + | Tnil + (** [Tnil] ==> [<...; >] *) + + | Tlink of type_expr + (** Indirection used by unification engine. *) + + | Tsubst of type_expr (* for copying *) + (** [Tsubst] is used temporarily to store information in low-level + functions manipulating representation of types, such as + instantiation or copy. + This constructor should not appear outside of these cases. *) + + | Tvariant of row_desc + (** Representation of polymorphic variants, see [row_desc]. *) + + | Tunivar of string option + (** Occurrence of a type variable introduced by a + forall quantifier / [Tpoly]. *) + + | Tpoly of type_expr * type_expr list + (** [Tpoly (ty,tyl)] ==> ['a1... 'an. ty], + where 'a1 ... 'an are names given to types in tyl + and occurrences of those types in ty. *) + + | Tpackage of Path.t * Longident.t list * type_expr list + (** Type of a first-class module (a.k.a package). *) + +(** [ `X | `Y ] (row_closed = true) + [< `X | `Y ] (row_closed = true) + [> `X | `Y ] (row_closed = false) + [< `X | `Y > `X ] (row_closed = true) + + type t = [> `X ] as 'a (row_more = Tvar a) + type t = private [> `X ] (row_more = Tconstr (t#row, [], ref Mnil) + + And for: + + let f = function `X -> `X -> | `Y -> `X + + the type of "f" will be a [Tarrow] whose lhs will (basically) be: + + Tvariant { row_fields = [("X", _)]; + row_more = + Tvariant { row_fields = [("Y", _)]; + row_more = + Tvariant { row_fields = []; + row_more = _; + _ }; + _ }; + _ + } + +*) +and row_desc = + { row_fields: (label * row_field) list; + row_more: type_expr; + row_bound: unit; (* kept for compatibility *) + row_closed: bool; + row_fixed: bool; + row_name: (Path.t * type_expr list) option } + +and row_field = + Rpresent of type_expr option + | Reither of bool * type_expr list * bool * row_field option ref + (* 1st true denotes a constant constructor *) + (* 2nd true denotes a tag in a pattern matching, and + is erased later *) + | Rabsent + +(** [abbrev_memo] allows one to keep track of different expansions of a type + alias. This is done for performance purposes. + + For instance, when defining [type 'a pair = 'a * 'a], when one refers to an + ['a pair], it is just a shortcut for the ['a * 'a] type. + This expansion will be stored in the [abbrev_memo] of the corresponding + [Tconstr] node. + + In practice, [abbrev_memo] behaves like list of expansions with a mutable + tail. + + Note on marshalling: [abbrev_memo] must not appear in saved types. + [Btype], with [cleanup_abbrev] and [memo], takes care of tracking and + removing abbreviations. +*) +and abbrev_memo = + | Mnil (** No known abbreviation *) + + | Mcons of private_flag * Path.t * type_expr * type_expr * abbrev_memo + (** Found one abbreviation. + A valid abbreviation should be at least as visible and reachable by the + same path. + The first expression is the abbreviation and the second the expansion. *) + + | Mlink of abbrev_memo ref + (** Abbreviations can be found after this indirection *) + +and field_kind = + Fvar of field_kind option ref + | Fpresent + | Fabsent + +(** [commutable] is a flag appended to every arrow type. + + When typing an application, if the type of the functional is + known, its type is instantiated with [Cok] arrows, otherwise as + [Clink (ref Cunknown)]. + + When the type is not known, the application will be used to infer + the actual type. This is fragile in presence of labels where + there is no principal type. + + Two incompatible applications relying on [Cunknown] arrows will + trigger an error. + + let f g = + g ~a:() ~b:(); + g ~b:() ~a:(); + + Error: This function is applied to arguments + in an order different from other calls. + This is only allowed when the real type is known. +*) +and commutable = + Cok + | Cunknown + | Clink of commutable ref + +module TypeOps : sig + type t = type_expr + val compare : t -> t -> int + val equal : t -> t -> bool + val hash : t -> int +end + +(* Maps of methods and instance variables *) + +module Meths : Map.S with type key = string +module Vars : Map.S with type key = string + +(* Value descriptions *) + +type value_description = + { val_type: type_expr; (* Type of the value *) + val_kind: value_kind; + val_loc: Location.t; + val_attributes: Parsetree.attributes; + } + +and value_kind = + Val_reg (* Regular value *) + | Val_prim of Primitive.description (* Primitive *) + | Val_ivar of mutable_flag * string (* Instance variable (mutable ?) *) + | Val_self of (Ident.t * type_expr) Meths.t ref * + (Ident.t * mutable_flag * virtual_flag * type_expr) Vars.t ref * + string * type_expr + (* Self *) + | Val_anc of (string * Ident.t) list * string + (* Ancestor *) + | Val_unbound (* Unbound variable *) + +(* Variance *) + +module Variance : sig + type t + type f = May_pos | May_neg | May_weak | Inj | Pos | Neg | Inv + val null : t (* no occurrence *) + val full : t (* strictly invariant *) + val covariant : t (* strictly covariant *) + val may_inv : t (* maybe invariant *) + val union : t -> t -> t + val inter : t -> t -> t + val subset : t -> t -> bool + val set : f -> bool -> t -> t + val mem : f -> t -> bool + val conjugate : t -> t (* exchange positive and negative *) + val get_upper : t -> bool * bool (* may_pos, may_neg *) + val get_lower : t -> bool * bool * bool * bool (* pos, neg, inv, inj *) +end + +(* Type definitions *) + +type type_declaration = + { type_params: type_expr list; + type_arity: int; + type_kind: type_kind; + type_private: private_flag; + type_manifest: type_expr option; + type_variance: Variance.t list; + (* covariant, contravariant, weakly contravariant, injective *) + type_newtype_level: (int * int) option; + (* definition level * expansion level *) + type_loc: Location.t; + type_attributes: Parsetree.attributes; + type_immediate: bool; (* true iff type should not be a pointer *) + type_unboxed: unboxed_status; + } + +and type_kind = + Type_abstract + | Type_record of label_declaration list * record_representation + | Type_variant of constructor_declaration list + | Type_open + +and record_representation = + Record_regular (* All fields are boxed / tagged *) + | Record_float (* All fields are floats *) + | Record_unboxed of bool (* Unboxed single-field record, inlined or not *) + | Record_inlined of int (* Inlined record *) + | Record_extension (* Inlined record under extension *) + +and label_declaration = + { + ld_id: Ident.t; + ld_mutable: mutable_flag; + ld_type: type_expr; + ld_loc: Location.t; + ld_attributes: Parsetree.attributes; + } + +and constructor_declaration = + { + cd_id: Ident.t; + cd_args: constructor_arguments; + cd_res: type_expr option; + cd_loc: Location.t; + cd_attributes: Parsetree.attributes; + } + +and constructor_arguments = + | Cstr_tuple of type_expr list + | Cstr_record of label_declaration list + +and unboxed_status = private + (* This type must be private in order to ensure perfect sharing of the + four possible values. Otherwise, ocamlc.byte and ocamlc.opt produce + different executables. *) + { + unboxed: bool; + default: bool; (* True for unannotated unboxable types. *) + } + +val unboxed_false_default_false : unboxed_status +val unboxed_false_default_true : unboxed_status +val unboxed_true_default_false : unboxed_status +val unboxed_true_default_true : unboxed_status + +type extension_constructor = + { + ext_type_path: Path.t; + ext_type_params: type_expr list; + ext_args: constructor_arguments; + ext_ret_type: type_expr option; + ext_private: private_flag; + ext_loc: Location.t; + ext_attributes: Parsetree.attributes; + } + +and type_transparence = + Type_public (* unrestricted expansion *) + | Type_new (* "new" type *) + | Type_private (* private type *) + +(* Type expressions for the class language *) + +module Concr : Set.S with type elt = string + +type class_type = + Cty_constr of Path.t * type_expr list * class_type + | Cty_signature of class_signature + | Cty_arrow of arg_label * type_expr * class_type + +and class_signature = + { csig_self: type_expr; + csig_vars: + (Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr) Vars.t; + csig_concr: Concr.t; + csig_inher: (Path.t * type_expr list) list } + +type class_declaration = + { cty_params: type_expr list; + mutable cty_type: class_type; + cty_path: Path.t; + cty_new: type_expr option; + cty_variance: Variance.t list; + cty_loc: Location.t; + cty_attributes: Parsetree.attributes; + } + +type class_type_declaration = + { clty_params: type_expr list; + clty_type: class_type; + clty_path: Path.t; + clty_variance: Variance.t list; + clty_loc: Location.t; + clty_attributes: Parsetree.attributes; + } + +(* Type expressions for the module language *) + +type module_type = + Mty_ident of Path.t + | Mty_signature of signature + | Mty_functor of Ident.t * module_type option * module_type + | Mty_alias of alias_presence * Path.t + +and alias_presence = + | Mta_present + | Mta_absent + +and signature = signature_item list + +and signature_item = + Sig_value of Ident.t * value_description + | Sig_type of Ident.t * type_declaration * rec_status + | Sig_typext of Ident.t * extension_constructor * ext_status + | Sig_module of Ident.t * module_declaration * rec_status + | Sig_modtype of Ident.t * modtype_declaration + | Sig_class of Ident.t * class_declaration * rec_status + | Sig_class_type of Ident.t * class_type_declaration * rec_status + +and module_declaration = + { + md_type: module_type; + md_attributes: Parsetree.attributes; + md_loc: Location.t; + } + +and modtype_declaration = + { + mtd_type: module_type option; (* None: abstract *) + mtd_attributes: Parsetree.attributes; + mtd_loc: Location.t; + } + +and rec_status = + Trec_not (* first in a nonrecursive group *) + | Trec_first (* first in a recursive group *) + | Trec_next (* not first in a recursive/nonrecursive group *) + +and ext_status = + Text_first (* first constructor in an extension *) + | Text_next (* not first constructor in an extension *) + | Text_exception + + +(* Constructor and record label descriptions inserted held in typing + environments *) + +type constructor_description = + { cstr_name: string; (* Constructor name *) + cstr_res: type_expr; (* Type of the result *) + cstr_existentials: type_expr list; (* list of existentials *) + cstr_args: type_expr list; (* Type of the arguments *) + cstr_arity: int; (* Number of arguments *) + cstr_tag: constructor_tag; (* Tag for heap blocks *) + cstr_consts: int; (* Number of constant constructors *) + cstr_nonconsts: int; (* Number of non-const constructors *) + cstr_normal: int; (* Number of non generalized constrs *) + cstr_generalized: bool; (* Constrained return type? *) + cstr_private: private_flag; (* Read-only constructor? *) + cstr_loc: Location.t; + cstr_attributes: Parsetree.attributes; + cstr_inlined: type_declaration option; + } + +and constructor_tag = + Cstr_constant of int (* Constant constructor (an int) *) + | Cstr_block of int (* Regular constructor (a block) *) + | Cstr_unboxed (* Constructor of an unboxed type *) + | Cstr_extension of Path.t * bool (* Extension constructor + true if a constant false if a block*) + +(* Constructors are the same *) +val equal_tag : constructor_tag -> constructor_tag -> bool + +(* Constructors may be the same, given potential rebinding *) +val may_equal_constr : + constructor_description -> constructor_description -> bool + +type label_description = + { lbl_name: string; (* Short name *) + lbl_res: type_expr; (* Type of the result *) + lbl_arg: type_expr; (* Type of the argument *) + lbl_mut: mutable_flag; (* Is this a mutable field? *) + lbl_pos: int; (* Position in block *) + lbl_all: label_description array; (* All the labels in this type *) + lbl_repres: record_representation; (* Representation for this record *) + lbl_private: private_flag; (* Read-only field? *) + lbl_loc: Location.t; + lbl_attributes: Parsetree.attributes; + } diff --git a/res_syntax/compiler-libs-406/typetexp.ml b/res_syntax/compiler-libs-406/typetexp.ml new file mode 100644 index 0000000000..5347c42da2 --- /dev/null +++ b/res_syntax/compiler-libs-406/typetexp.ml @@ -0,0 +1,988 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* typetexp.ml,v 1.34.4.9 2002/01/07 08:39:16 garrigue Exp *) + +(* Typechecking of type expressions for the core language *) + +open Asttypes +open Misc +open Parsetree +open Typedtree +open Types +open Ctype + +exception Already_bound + +type error = + Unbound_type_variable of string + | Unbound_type_constructor of Longident.t + | Unbound_type_constructor_2 of Path.t + | Type_arity_mismatch of Longident.t * int * int + | Bound_type_variable of string + | Recursive_type + | Unbound_row_variable of Longident.t + | Type_mismatch of (type_expr * type_expr) list + | Alias_type_mismatch of (type_expr * type_expr) list + | Present_has_conjunction of string + | Present_has_no_type of string + | Constructor_mismatch of type_expr * type_expr + | Not_a_variant of type_expr + | Variant_tags of string * string + | Invalid_variable_name of string + | Cannot_quantify of string * type_expr + | Multiple_constraints_on_type of Longident.t + | Method_mismatch of string * type_expr * type_expr + | Unbound_value of Longident.t + | Unbound_constructor of Longident.t + | Unbound_label of Longident.t + | Unbound_module of Longident.t + | Unbound_class of Longident.t + | Unbound_modtype of Longident.t + | Unbound_cltype of Longident.t + | Ill_typed_functor_application of Longident.t + | Illegal_reference_to_recursive_module + | Access_functor_as_structure of Longident.t + | Apply_structure_as_functor of Longident.t + | Cannot_scrape_alias of Longident.t * Path.t + | Opened_object of Path.t option + | Not_an_object of type_expr + +exception Error of Location.t * Env.t * error +exception Error_forward of Location.error + + +type variable_context = int * (string, type_expr) Tbl.t + +(* Local definitions *) + +let instance_list = Ctype.instance_list Env.empty + +(* Narrowing unbound identifier errors. *) + +let rec narrow_unbound_lid_error : 'a. _ -> _ -> _ -> _ -> 'a = + fun env loc lid make_error -> + let check_module mlid = + try ignore (Env.lookup_module ~load:true mlid env) with + | Not_found -> + narrow_unbound_lid_error env loc mlid (fun lid -> Unbound_module lid) + | Env.Recmodule -> + raise (Error (loc, env, Illegal_reference_to_recursive_module)) + in + begin match lid with + | Longident.Lident _ -> () + | Longident.Ldot (mlid, _) -> + check_module mlid; + let md = Env.find_module (Env.lookup_module ~load:true mlid env) env in + begin match Env.scrape_alias env md.md_type with + | Mty_functor _ -> + raise (Error (loc, env, Access_functor_as_structure mlid)) + | Mty_alias(_, p) -> + raise (Error (loc, env, Cannot_scrape_alias(mlid, p))) + | _ -> () + end + | Longident.Lapply (flid, mlid) -> + check_module flid; + let fmd = Env.find_module (Env.lookup_module ~load:true flid env) env in + begin match Env.scrape_alias env fmd.md_type with + | Mty_signature _ -> + raise (Error (loc, env, Apply_structure_as_functor flid)) + | Mty_alias(_, p) -> + raise (Error (loc, env, Cannot_scrape_alias(flid, p))) + | _ -> () + end; + check_module mlid; + let mmd = Env.find_module (Env.lookup_module ~load:true mlid env) env in + begin match Env.scrape_alias env mmd.md_type with + | Mty_alias(_, p) -> + raise (Error (loc, env, Cannot_scrape_alias(mlid, p))) + | _ -> + raise (Error (loc, env, Ill_typed_functor_application lid)) + end + end; + raise (Error (loc, env, make_error lid)) + +let find_component (lookup : ?loc:_ -> _) make_error env loc lid = + try + match lid with + | Longident.Ldot (Longident.Lident "*predef*", s) -> + lookup ~loc (Longident.Lident s) Env.initial_safe_string + | _ -> + lookup ~loc lid env + with Not_found -> + narrow_unbound_lid_error env loc lid make_error + | Env.Recmodule -> + raise (Error (loc, env, Illegal_reference_to_recursive_module)) + +let find_type env loc lid = + let path = + find_component Env.lookup_type (fun lid -> Unbound_type_constructor lid) + env loc lid + in + let decl = Env.find_type path env in + Builtin_attributes.check_deprecated loc decl.type_attributes (Path.name path); + (path, decl) + +let find_constructor = + find_component Env.lookup_constructor (fun lid -> Unbound_constructor lid) +let find_all_constructors = + find_component Env.lookup_all_constructors + (fun lid -> Unbound_constructor lid) +let find_label = + find_component Env.lookup_label (fun lid -> Unbound_label lid) +let find_all_labels = + find_component Env.lookup_all_labels (fun lid -> Unbound_label lid) + +let find_class env loc lid = + let (path, decl) as r = + find_component Env.lookup_class (fun lid -> Unbound_class lid) env loc lid + in + Builtin_attributes.check_deprecated loc decl.cty_attributes (Path.name path); + r + +let find_value env loc lid = + Env.check_value_name (Longident.last lid) loc; + let (path, decl) as r = + find_component Env.lookup_value (fun lid -> Unbound_value lid) env loc lid + in + Builtin_attributes.check_deprecated loc decl.val_attributes (Path.name path); + r + +let lookup_module ?(load=false) env loc lid = + find_component (fun ?loc lid env -> (Env.lookup_module ~load ?loc lid env)) + (fun lid -> Unbound_module lid) env loc lid + +let find_module env loc lid = + let path = lookup_module ~load:true env loc lid in + let decl = Env.find_module path env in + (* No need to check for deprecated here, this is done in Env. *) + (path, decl) + +let find_modtype env loc lid = + let (path, decl) as r = + find_component Env.lookup_modtype (fun lid -> Unbound_modtype lid) + env loc lid + in + Builtin_attributes.check_deprecated loc decl.mtd_attributes (Path.name path); + r + +let find_class_type env loc lid = + let (path, decl) as r = + find_component Env.lookup_cltype (fun lid -> Unbound_cltype lid) + env loc lid + in + Builtin_attributes.check_deprecated loc decl.clty_attributes (Path.name path); + r + +let unbound_constructor_error env lid = + narrow_unbound_lid_error env lid.loc lid.txt + (fun lid -> Unbound_constructor lid) + +let unbound_label_error env lid = + narrow_unbound_lid_error env lid.loc lid.txt + (fun lid -> Unbound_label lid) + +(* Support for first-class modules. *) + +let transl_modtype_longident = ref (fun _ -> assert false) +let transl_modtype = ref (fun _ -> assert false) + +let create_package_mty fake loc env (p, l) = + let l = + List.sort + (fun (s1, _t1) (s2, _t2) -> + if s1.txt = s2.txt then + raise (Error (loc, env, Multiple_constraints_on_type s1.txt)); + compare s1.txt s2.txt) + l + in + l, + List.fold_left + (fun mty (s, t) -> + let d = {ptype_name = mkloc (Longident.last s.txt) s.loc; + ptype_params = []; + ptype_cstrs = []; + ptype_kind = Ptype_abstract; + ptype_private = Asttypes.Public; + ptype_manifest = if fake then None else Some t; + ptype_attributes = []; + ptype_loc = loc} in + Ast_helper.Mty.mk ~loc + (Pmty_with (mty, [ Pwith_type ({ txt = s.txt; loc }, d) ])) + ) + (Ast_helper.Mty.mk ~loc (Pmty_ident p)) + l + +(* Translation of type expressions *) + +let type_variables = ref (Tbl.empty : (string, type_expr) Tbl.t) +let univars = ref ([] : (string * type_expr) list) +let pre_univars = ref ([] : type_expr list) +let used_variables = ref (Tbl.empty : (string, type_expr * Location.t) Tbl.t) + +let reset_type_variables () = + reset_global_level (); + Ctype.reset_reified_var_counter (); + type_variables := Tbl.empty + +let narrow () = + (increase_global_level (), !type_variables) + +let widen (gl, tv) = + restore_global_level gl; + type_variables := tv + +let strict_ident c = (c = '_' || c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z') + +let validate_name = function + None -> None + | Some name as s -> + if name <> "" && strict_ident name.[0] then s else None + +let new_global_var ?name () = + new_global_var ?name:(validate_name name) () +let newvar ?name () = + newvar ?name:(validate_name name) () + +let type_variable loc name = + try + Tbl.find name !type_variables + with Not_found -> + raise(Error(loc, Env.empty, Unbound_type_variable ("'" ^ name))) + +let transl_type_param env styp = + let loc = styp.ptyp_loc in + match styp.ptyp_desc with + Ptyp_any -> + let ty = new_global_var ~name:"_" () in + { ctyp_desc = Ttyp_any; ctyp_type = ty; ctyp_env = env; + ctyp_loc = loc; ctyp_attributes = styp.ptyp_attributes; } + | Ptyp_var name -> + let ty = + try + if name <> "" && name.[0] = '_' then + raise (Error (loc, Env.empty, Invalid_variable_name ("'" ^ name))); + ignore (Tbl.find name !type_variables); + raise Already_bound + with Not_found -> + let v = new_global_var ~name () in + type_variables := Tbl.add name v !type_variables; + v + in + { ctyp_desc = Ttyp_var name; ctyp_type = ty; ctyp_env = env; + ctyp_loc = loc; ctyp_attributes = styp.ptyp_attributes; } + | _ -> assert false + +let transl_type_param env styp = + (* Currently useless, since type parameters cannot hold attributes + (but this could easily be lifted in the future). *) + Builtin_attributes.warning_scope styp.ptyp_attributes + (fun () -> transl_type_param env styp) + + +let new_pre_univar ?name () = + let v = newvar ?name () in pre_univars := v :: !pre_univars; v + +let rec swap_list = function + x :: y :: l -> y :: x :: swap_list l + | l -> l + +type policy = Fixed | Extensible | Univars + +let rec transl_type env policy styp = + Builtin_attributes.warning_scope styp.ptyp_attributes + (fun () -> transl_type_aux env policy styp) + +and transl_type_aux env policy styp = + let loc = styp.ptyp_loc in + let ctyp ctyp_desc ctyp_type = + { ctyp_desc; ctyp_type; ctyp_env = env; + ctyp_loc = loc; ctyp_attributes = styp.ptyp_attributes } + in + match styp.ptyp_desc with + Ptyp_any -> + let ty = + if policy = Univars then new_pre_univar () else + if policy = Fixed then + raise (Error (styp.ptyp_loc, env, Unbound_type_variable "_")) + else newvar () + in + ctyp Ttyp_any ty + | Ptyp_var name -> + let ty = + if name <> "" && name.[0] = '_' then + raise (Error (styp.ptyp_loc, env, Invalid_variable_name ("'" ^ name))); + begin try + instance env (List.assoc name !univars) + with Not_found -> try + instance env (fst(Tbl.find name !used_variables)) + with Not_found -> + let v = + if policy = Univars then new_pre_univar ~name () else newvar ~name () + in + used_variables := Tbl.add name (v, styp.ptyp_loc) !used_variables; + v + end + in + ctyp (Ttyp_var name) ty + | Ptyp_arrow(l, st1, st2) -> + let cty1 = transl_type env policy st1 in + let cty2 = transl_type env policy st2 in + let ty1 = cty1.ctyp_type in + let ty1 = + if Btype.is_optional l + then newty (Tconstr(Predef.path_option,[ty1], ref Mnil)) + else ty1 in + let ty = newty (Tarrow(l, ty1, cty2.ctyp_type, Cok)) in + ctyp (Ttyp_arrow (l, cty1, cty2)) ty + | Ptyp_tuple stl -> + assert (List.length stl >= 2); + let ctys = List.map (transl_type env policy) stl in + let ty = newty (Ttuple (List.map (fun ctyp -> ctyp.ctyp_type) ctys)) in + ctyp (Ttyp_tuple ctys) ty + | Ptyp_constr(lid, stl) -> + let (path, decl) = find_type env lid.loc lid.txt in + let stl = + match stl with + | [ {ptyp_desc=Ptyp_any} as t ] when decl.type_arity > 1 -> + List.map (fun _ -> t) decl.type_params + | _ -> stl + in + if List.length stl <> decl.type_arity then + raise(Error(styp.ptyp_loc, env, + Type_arity_mismatch(lid.txt, decl.type_arity, + List.length stl))); + let args = List.map (transl_type env policy) stl in + let params = instance_list decl.type_params in + let unify_param = + match decl.type_manifest with + None -> unify_var + | Some ty -> + if (repr ty).level = Btype.generic_level then unify_var else unify + in + List.iter2 + (fun (sty, cty) ty' -> + try unify_param env ty' cty.ctyp_type with Unify trace -> + raise (Error(sty.ptyp_loc, env, Type_mismatch (swap_list trace)))) + (List.combine stl args) params; + let constr = + newconstr path (List.map (fun ctyp -> ctyp.ctyp_type) args) in + begin try + Ctype.enforce_constraints env constr + with Unify trace -> + raise (Error(styp.ptyp_loc, env, Type_mismatch trace)) + end; + ctyp (Ttyp_constr (path, lid, args)) constr + | Ptyp_object (fields, o) -> + let ty, fields = transl_fields env policy o fields in + ctyp (Ttyp_object (fields, o)) (newobj ty) + | Ptyp_class(lid, stl) -> + let (path, decl, _is_variant) = + try + let path = Env.lookup_type lid.txt env in + let decl = Env.find_type path env in + let rec check decl = + match decl.type_manifest with + None -> raise Not_found + | Some ty -> + match (repr ty).desc with + Tvariant row when Btype.static_row row -> () + | Tconstr (path, _, _) -> + check (Env.find_type path env) + | _ -> raise Not_found + in check decl; + Location.deprecated styp.ptyp_loc + "old syntax for polymorphic variant type"; + (path, decl,true) + with Not_found -> try + let lid2 = + match lid.txt with + Longident.Lident s -> Longident.Lident ("#" ^ s) + | Longident.Ldot(r, s) -> Longident.Ldot (r, "#" ^ s) + | Longident.Lapply(_, _) -> fatal_error "Typetexp.transl_type" + in + let path = Env.lookup_type lid2 env in + let decl = Env.find_type path env in + (path, decl, false) + with Not_found -> + ignore (find_class env lid.loc lid.txt); assert false + in + if List.length stl <> decl.type_arity then + raise(Error(styp.ptyp_loc, env, + Type_arity_mismatch(lid.txt, decl.type_arity, + List.length stl))); + let args = List.map (transl_type env policy) stl in + let params = instance_list decl.type_params in + List.iter2 + (fun (sty, cty) ty' -> + try unify_var env ty' cty.ctyp_type with Unify trace -> + raise (Error(sty.ptyp_loc, env, Type_mismatch (swap_list trace)))) + (List.combine stl args) params; + let ty_args = List.map (fun ctyp -> ctyp.ctyp_type) args in + let ty = + try Ctype.expand_head env (newconstr path ty_args) + with Unify trace -> + raise (Error(styp.ptyp_loc, env, Type_mismatch trace)) + in + let ty = match ty.desc with + Tvariant row -> + let row = Btype.row_repr row in + let fields = + List.map + (fun (l,f) -> l, + match Btype.row_field_repr f with + | Rpresent (Some ty) -> + Reither(false, [ty], false, ref None) + | Rpresent None -> + Reither (true, [], false, ref None) + | _ -> f) + row.row_fields + in + let row = { row_closed = true; row_fields = fields; + row_bound = (); row_name = Some (path, ty_args); + row_fixed = false; row_more = newvar () } in + let static = Btype.static_row row in + let row = + if static then { row with row_more = newty Tnil } + else if policy <> Univars then row + else { row with row_more = new_pre_univar () } + in + newty (Tvariant row) + | Tobject (fi, _) -> + let _, tv = flatten_fields fi in + if policy = Univars then pre_univars := tv :: !pre_univars; + ty + | _ -> + assert false + in + ctyp (Ttyp_class (path, lid, args)) ty + | Ptyp_alias(st, alias) -> + let cty = + try + let t = + try List.assoc alias !univars + with Not_found -> + instance env (fst(Tbl.find alias !used_variables)) + in + let ty = transl_type env policy st in + begin try unify_var env t ty.ctyp_type with Unify trace -> + let trace = swap_list trace in + raise(Error(styp.ptyp_loc, env, Alias_type_mismatch trace)) + end; + ty + with Not_found -> + if !Clflags.principal then begin_def (); + let t = newvar () in + used_variables := Tbl.add alias (t, styp.ptyp_loc) !used_variables; + let ty = transl_type env policy st in + begin try unify_var env t ty.ctyp_type with Unify trace -> + let trace = swap_list trace in + raise(Error(styp.ptyp_loc, env, Alias_type_mismatch trace)) + end; + if !Clflags.principal then begin + end_def (); + generalize_structure t; + end; + let t = instance env t in + let px = Btype.proxy t in + begin match px.desc with + | Tvar None -> Btype.log_type px; px.desc <- Tvar (Some alias) + | Tunivar None -> Btype.log_type px; px.desc <- Tunivar (Some alias) + | _ -> () + end; + { ty with ctyp_type = t } + in + ctyp (Ttyp_alias (cty, alias)) cty.ctyp_type + | Ptyp_variant(fields, closed, present) -> + let name = ref None in + let mkfield l f = + newty (Tvariant {row_fields=[l,f]; row_more=newvar(); + row_bound=(); row_closed=true; + row_fixed=false; row_name=None}) in + let hfields = Hashtbl.create 17 in + let add_typed_field loc l f = + let h = Btype.hash_variant l in + try + let (l',f') = Hashtbl.find hfields h in + (* Check for tag conflicts *) + if l <> l' then raise(Error(styp.ptyp_loc, env, Variant_tags(l, l'))); + let ty = mkfield l f and ty' = mkfield l f' in + if equal env false [ty] [ty'] then () else + try unify env ty ty' + with Unify _trace -> + raise(Error(loc, env, Constructor_mismatch (ty,ty'))) + with Not_found -> + Hashtbl.add hfields h (l,f) + in + let add_field = function + Rtag (l, attrs, c, stl) -> + name := None; + let tl = + Builtin_attributes.warning_scope attrs + (fun () -> List.map (transl_type env policy) stl) + in + let f = match present with + Some present when not (List.mem l.txt present) -> + let ty_tl = List.map (fun cty -> cty.ctyp_type) tl in + Reither(c, ty_tl, false, ref None) + | _ -> + if List.length stl > 1 || c && stl <> [] then + raise(Error(styp.ptyp_loc, env, + Present_has_conjunction l.txt)); + match tl with [] -> Rpresent None + | st :: _ -> + Rpresent (Some st.ctyp_type) + in + add_typed_field styp.ptyp_loc l.txt f; + Ttag (l,attrs,c,tl) + | Rinherit sty -> + let cty = transl_type env policy sty in + let ty = cty.ctyp_type in + let nm = + match repr cty.ctyp_type with + {desc=Tconstr(p, tl, _)} -> Some(p, tl) + | _ -> None + in + begin try + (* Set name if there are no fields yet *) + Hashtbl.iter (fun _ _ -> raise Exit) hfields; + name := nm + with Exit -> + (* Unset it otherwise *) + name := None + end; + let fl = match expand_head env cty.ctyp_type, nm with + {desc=Tvariant row}, _ when Btype.static_row row -> + let row = Btype.row_repr row in + row.row_fields + | {desc=Tvar _}, Some(p, _) -> + raise(Error(sty.ptyp_loc, env, Unbound_type_constructor_2 p)) + | _ -> + raise(Error(sty.ptyp_loc, env, Not_a_variant ty)) + in + List.iter + (fun (l, f) -> + let f = match present with + Some present when not (List.mem l present) -> + begin match f with + Rpresent(Some ty) -> + Reither(false, [ty], false, ref None) + | Rpresent None -> + Reither(true, [], false, ref None) + | _ -> + assert false + end + | _ -> f + in + add_typed_field sty.ptyp_loc l f) + fl; + Tinherit cty + in + let tfields = List.map add_field fields in + let fields = Hashtbl.fold (fun _ p l -> p :: l) hfields [] in + begin match present with None -> () + | Some present -> + List.iter + (fun l -> if not (List.mem_assoc l fields) then + raise(Error(styp.ptyp_loc, env, Present_has_no_type l))) + present + end; + let row = + { row_fields = List.rev fields; row_more = newvar (); + row_bound = (); row_closed = (closed = Closed); + row_fixed = false; row_name = !name } in + let static = Btype.static_row row in + let row = + if static then { row with row_more = newty Tnil } + else if policy <> Univars then row + else { row with row_more = new_pre_univar () } + in + let ty = newty (Tvariant row) in + ctyp (Ttyp_variant (tfields, closed, present)) ty + | Ptyp_poly(vars, st) -> + let vars = List.map (fun v -> v.txt) vars in + begin_def(); + let new_univars = List.map (fun name -> name, newvar ~name ()) vars in + let old_univars = !univars in + univars := new_univars @ !univars; + let cty = transl_type env policy st in + let ty = cty.ctyp_type in + univars := old_univars; + end_def(); + generalize ty; + let ty_list = + List.fold_left + (fun tyl (name, ty1) -> + let v = Btype.proxy ty1 in + if deep_occur v ty then begin + match v.desc with + Tvar name when v.level = Btype.generic_level -> + v.desc <- Tunivar name; + v :: tyl + | _ -> + raise (Error (styp.ptyp_loc, env, Cannot_quantify (name, v))) + end else tyl) + [] new_univars + in + let ty' = Btype.newgenty (Tpoly(ty, List.rev ty_list)) in + unify_var env (newvar()) ty'; + ctyp (Ttyp_poly (vars, cty)) ty' + | Ptyp_package (p, l) -> + let l, mty = create_package_mty true styp.ptyp_loc env (p, l) in + let z = narrow () in + let mty = !transl_modtype env mty in + widen z; + let ptys = List.map (fun (s, pty) -> + s, transl_type env policy pty + ) l in + let path = !transl_modtype_longident styp.ptyp_loc env p.txt in + let ty = newty (Tpackage (path, + List.map (fun (s, _pty) -> s.txt) l, + List.map (fun (_,cty) -> cty.ctyp_type) ptys)) + in + ctyp (Ttyp_package { + pack_path = path; + pack_type = mty.mty_type; + pack_fields = ptys; + pack_txt = p; + }) ty + | Ptyp_extension ext -> + raise (Error_forward (Builtin_attributes.error_of_extension ext)) + +and transl_poly_type env policy t = + transl_type env policy (Ast_helper.Typ.force_poly t) + +and transl_fields env policy o fields = + let hfields = Hashtbl.create 17 in + let add_typed_field loc l ty = + try + let ty' = Hashtbl.find hfields l in + if equal env false [ty] [ty'] then () else + try unify env ty ty' + with Unify _trace -> + raise(Error(loc, env, Method_mismatch (l, ty, ty'))) + with Not_found -> + Hashtbl.add hfields l ty in + let add_field = function + | Otag (s, a, ty1) -> begin + let ty1 = + Builtin_attributes.warning_scope a + (fun () -> transl_poly_type env policy ty1) + in + let field = OTtag (s, a, ty1) in + add_typed_field ty1.ctyp_loc s.txt ty1.ctyp_type; + field + end + | Oinherit sty -> begin + let cty = transl_type env policy sty in + let nm = + match repr cty.ctyp_type with + {desc=Tconstr(p, _, _)} -> Some p + | _ -> None in + let t = expand_head env cty.ctyp_type in + match t, nm with + {desc=Tobject ({desc=(Tfield _ | Tnil) as tf}, _)}, _ -> begin + if opened_object t then + raise (Error (sty.ptyp_loc, env, Opened_object nm)); + let rec iter_add = function + | Tfield (s, _k, ty1, ty2) -> begin + add_typed_field sty.ptyp_loc s ty1; + iter_add ty2.desc + end + | Tnil -> () + | _ -> assert false in + iter_add tf; + OTinherit cty + end + | {desc=Tvar _}, Some p -> + raise (Error (sty.ptyp_loc, env, Unbound_type_constructor_2 p)) + | _ -> raise (Error (sty.ptyp_loc, env, Not_an_object t)) + end in + let object_fields = List.map add_field fields in + let fields = Hashtbl.fold (fun s ty l -> (s, ty) :: l) hfields [] in + let ty_init = + match o, policy with + | Closed, _ -> newty Tnil + | Open, Univars -> new_pre_univar () + | Open, _ -> newvar () in + let ty = List.fold_left (fun ty (s, ty') -> + newty (Tfield (s, Fpresent, ty', ty))) ty_init fields in + ty, object_fields + + +(* Make the rows "fixed" in this type, to make universal check easier *) +let rec make_fixed_univars ty = + let ty = repr ty in + if ty.level >= Btype.lowest_level then begin + Btype.mark_type_node ty; + match ty.desc with + | Tvariant row -> + let row = Btype.row_repr row in + if Btype.is_Tunivar (Btype.row_more row) then + ty.desc <- Tvariant + {row with row_fixed=true; + row_fields = List.map + (fun (s,f as p) -> match Btype.row_field_repr f with + Reither (c, tl, _m, r) -> s, Reither (c, tl, true, r) + | _ -> p) + row.row_fields}; + Btype.iter_row make_fixed_univars row + | _ -> + Btype.iter_type_expr make_fixed_univars ty + end + +let make_fixed_univars ty = + make_fixed_univars ty; + Btype.unmark_type ty + +let create_package_mty = create_package_mty false + +let globalize_used_variables env fixed = + let r = ref [] in + Tbl.iter + (fun name (ty, loc) -> + let v = new_global_var () in + let snap = Btype.snapshot () in + if try unify env v ty; true with _ -> Btype.backtrack snap; false + then try + r := (loc, v, Tbl.find name !type_variables) :: !r + with Not_found -> + if fixed && Btype.is_Tvar (repr ty) then + raise(Error(loc, env, Unbound_type_variable ("'"^name))); + let v2 = new_global_var () in + r := (loc, v, v2) :: !r; + type_variables := Tbl.add name v2 !type_variables) + !used_variables; + used_variables := Tbl.empty; + fun () -> + List.iter + (function (loc, t1, t2) -> + try unify env t1 t2 with Unify trace -> + raise (Error(loc, env, Type_mismatch trace))) + !r + +let transl_simple_type env fixed styp = + univars := []; used_variables := Tbl.empty; + let typ = transl_type env (if fixed then Fixed else Extensible) styp in + globalize_used_variables env fixed (); + make_fixed_univars typ.ctyp_type; + typ + +let transl_simple_type_univars env styp = + univars := []; used_variables := Tbl.empty; pre_univars := []; + begin_def (); + let typ = transl_type env Univars styp in + (* Only keep already global variables in used_variables *) + let new_variables = !used_variables in + used_variables := Tbl.empty; + Tbl.iter + (fun name p -> + if Tbl.mem name !type_variables then + used_variables := Tbl.add name p !used_variables) + new_variables; + globalize_used_variables env false (); + end_def (); + generalize typ.ctyp_type; + let univs = + List.fold_left + (fun acc v -> + let v = repr v in + match v.desc with + Tvar name when v.level = Btype.generic_level -> + v.desc <- Tunivar name; v :: acc + | _ -> acc) + [] !pre_univars + in + make_fixed_univars typ.ctyp_type; + { typ with ctyp_type = + instance env (Btype.newgenty (Tpoly (typ.ctyp_type, univs))) } + +let transl_simple_type_delayed env styp = + univars := []; used_variables := Tbl.empty; + let typ = transl_type env Extensible styp in + make_fixed_univars typ.ctyp_type; + (typ, globalize_used_variables env false) + +let transl_type_scheme env styp = + reset_type_variables(); + begin_def(); + let typ = transl_simple_type env false styp in + end_def(); + generalize typ.ctyp_type; + typ + + +(* Error report *) + +open Format +open Printtyp + +let spellcheck ppf fold env lid = + let choices ~path name = + let env = fold (fun x xs -> x::xs) path env [] in + Misc.spellcheck env name in + match lid with + | Longident.Lapply _ -> () + | Longident.Lident s -> + Misc.did_you_mean ppf (fun () -> choices ~path:None s) + | Longident.Ldot (r, s) -> + Misc.did_you_mean ppf (fun () -> choices ~path:(Some r) s) + +let fold_descr fold get_name f = fold (fun descr acc -> f (get_name descr) acc) +let fold_simple fold4 f = fold4 (fun name _path _descr acc -> f name acc) + +let fold_values = fold_simple Env.fold_values +let fold_types = fold_simple Env.fold_types +let fold_modules = fold_simple Env.fold_modules +let fold_constructors = fold_descr Env.fold_constructors (fun d -> d.cstr_name) +let fold_labels = fold_descr Env.fold_labels (fun d -> d.lbl_name) +let fold_classs = fold_simple Env.fold_classs +let fold_modtypes = fold_simple Env.fold_modtypes +let fold_cltypes = fold_simple Env.fold_cltypes + +let report_error env ppf = function + | Unbound_type_variable name -> + (* we don't use "spellcheck" here: the function that raises this + error seems not to be called anywhere, so it's unclear how it + should be handled *) + fprintf ppf "Unbound type parameter %s@." name + | Unbound_type_constructor lid -> + fprintf ppf "Unbound type constructor %a" longident lid; + spellcheck ppf fold_types env lid; + | Unbound_type_constructor_2 p -> + fprintf ppf "The type constructor@ %a@ is not yet completely defined" + path p + | Type_arity_mismatch(lid, expected, provided) -> + fprintf ppf + "@[The type constructor %a@ expects %i argument(s),@ \ + but is here applied to %i argument(s)@]" + longident lid expected provided + | Bound_type_variable name -> + fprintf ppf "Already bound type parameter '%s" name + | Recursive_type -> + fprintf ppf "This type is recursive" + | Unbound_row_variable lid -> + (* we don't use "spellcheck" here: this error is not raised + anywhere so it's unclear how it should be handled *) + fprintf ppf "Unbound row variable in #%a" longident lid + | Type_mismatch trace -> + Printtyp.report_unification_error ppf Env.empty trace + (function ppf -> + fprintf ppf "This type") + (function ppf -> + fprintf ppf "should be an instance of type") + | Alias_type_mismatch trace -> + Printtyp.report_unification_error ppf Env.empty trace + (function ppf -> + fprintf ppf "This alias is bound to type") + (function ppf -> + fprintf ppf "but is used as an instance of type") + | Present_has_conjunction l -> + fprintf ppf "The present constructor %s has a conjunctive type" l + | Present_has_no_type l -> + fprintf ppf "The present constructor %s has no type" l + | Constructor_mismatch (ty, ty') -> + wrap_printing_env env (fun () -> + Printtyp.reset_and_mark_loops_list [ty; ty']; + fprintf ppf "@[%s %a@ %s@ %a@]" + "This variant type contains a constructor" + Printtyp.type_expr ty + "which should be" + Printtyp.type_expr ty') + | Not_a_variant ty -> + Printtyp.reset_and_mark_loops ty; + fprintf ppf + "@[The type %a@ does not expand to a polymorphic variant type@]" + Printtyp.type_expr ty; + begin match ty.desc with + | Tvar (Some s) -> + (* PR#7012: help the user that wrote 'Foo instead of `Foo *) + Misc.did_you_mean ppf (fun () -> ["`" ^ s]) + | _ -> () + end + | Variant_tags (lab1, lab2) -> + fprintf ppf + "@[Variant tags `%s@ and `%s have the same hash value.@ %s@]" + lab1 lab2 "Change one of them." + | Invalid_variable_name name -> + fprintf ppf "The type variable name %s is not allowed in programs" name + | Cannot_quantify (name, v) -> + fprintf ppf + "@[The universal type variable '%s cannot be generalized:@ %s.@]" + name + (if Btype.is_Tvar v then "it escapes its scope" else + if Btype.is_Tunivar v then "it is already bound to another variable" + else "it is not a variable") + | Multiple_constraints_on_type s -> + fprintf ppf "Multiple constraints for type %a" longident s + | Method_mismatch (l, ty, ty') -> + wrap_printing_env env (fun () -> + Printtyp.reset_and_mark_loops_list [ty; ty']; + fprintf ppf "@[Method '%s' has type %a,@ which should be %a@]" + l Printtyp.type_expr ty Printtyp.type_expr ty') + | Unbound_value lid -> + fprintf ppf "Unbound value %a" longident lid; + spellcheck ppf fold_values env lid; + | Unbound_module lid -> + fprintf ppf "Unbound module %a" longident lid; + spellcheck ppf fold_modules env lid; + | Unbound_constructor lid -> + fprintf ppf "Unbound constructor %a" longident lid; + spellcheck ppf fold_constructors env lid; + | Unbound_label lid -> + fprintf ppf "Unbound record field %a" longident lid; + spellcheck ppf fold_labels env lid; + | Unbound_class lid -> + fprintf ppf "Unbound class %a" longident lid; + spellcheck ppf fold_classs env lid; + | Unbound_modtype lid -> + fprintf ppf "Unbound module type %a" longident lid; + spellcheck ppf fold_modtypes env lid; + | Unbound_cltype lid -> + fprintf ppf "Unbound class type %a" longident lid; + spellcheck ppf fold_cltypes env lid; + | Ill_typed_functor_application lid -> + fprintf ppf "Ill-typed functor application %a" longident lid + | Illegal_reference_to_recursive_module -> + fprintf ppf "Illegal recursive module reference" + | Access_functor_as_structure lid -> + fprintf ppf "The module %a is a functor, not a structure" longident lid + | Apply_structure_as_functor lid -> + fprintf ppf "The module %a is a structure, not a functor" longident lid + | Cannot_scrape_alias(lid, p) -> + fprintf ppf + "The module %a is an alias for module %a, which is missing" + longident lid path p + | Opened_object nm -> + fprintf ppf + "Illegal open object type%a" + (fun ppf -> function + Some p -> fprintf ppf "@ %a" path p + | None -> fprintf ppf "") nm + | Not_an_object ty -> + Printtyp.reset_and_mark_loops ty; + fprintf ppf "@[The type %a@ is not an object type@]" + Printtyp.type_expr ty + +let () = + Location.register_error_of_exn + (function + | Error (loc, env, err) -> + Some (Location.error_of_printer loc (report_error env) err) + | Error_forward err -> + Some err + | _ -> + None + ) diff --git a/res_syntax/compiler-libs-406/typetexp.mli b/res_syntax/compiler-libs-406/typetexp.mli new file mode 100644 index 0000000000..c6bc5e4302 --- /dev/null +++ b/res_syntax/compiler-libs-406/typetexp.mli @@ -0,0 +1,115 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Typechecking of type expressions for the core language *) + +open Types + +val transl_simple_type: + Env.t -> bool -> Parsetree.core_type -> Typedtree.core_type +val transl_simple_type_univars: + Env.t -> Parsetree.core_type -> Typedtree.core_type +val transl_simple_type_delayed: + Env.t -> Parsetree.core_type -> Typedtree.core_type * (unit -> unit) + (* Translate a type, but leave type variables unbound. Returns + the type and a function that binds the type variable. *) +val transl_type_scheme: + Env.t -> Parsetree.core_type -> Typedtree.core_type +val reset_type_variables: unit -> unit +val type_variable: Location.t -> string -> type_expr +val transl_type_param: + Env.t -> Parsetree.core_type -> Typedtree.core_type + +type variable_context +val narrow: unit -> variable_context +val widen: variable_context -> unit + +exception Already_bound + +type error = + Unbound_type_variable of string + | Unbound_type_constructor of Longident.t + | Unbound_type_constructor_2 of Path.t + | Type_arity_mismatch of Longident.t * int * int + | Bound_type_variable of string + | Recursive_type + | Unbound_row_variable of Longident.t + | Type_mismatch of (type_expr * type_expr) list + | Alias_type_mismatch of (type_expr * type_expr) list + | Present_has_conjunction of string + | Present_has_no_type of string + | Constructor_mismatch of type_expr * type_expr + | Not_a_variant of type_expr + | Variant_tags of string * string + | Invalid_variable_name of string + | Cannot_quantify of string * type_expr + | Multiple_constraints_on_type of Longident.t + | Method_mismatch of string * type_expr * type_expr + | Unbound_value of Longident.t + | Unbound_constructor of Longident.t + | Unbound_label of Longident.t + | Unbound_module of Longident.t + | Unbound_class of Longident.t + | Unbound_modtype of Longident.t + | Unbound_cltype of Longident.t + | Ill_typed_functor_application of Longident.t + | Illegal_reference_to_recursive_module + | Access_functor_as_structure of Longident.t + | Apply_structure_as_functor of Longident.t + | Cannot_scrape_alias of Longident.t * Path.t + | Opened_object of Path.t option + | Not_an_object of type_expr + +exception Error of Location.t * Env.t * error + +val report_error: Env.t -> Format.formatter -> error -> unit + +(* Support for first-class modules. *) +val transl_modtype_longident: (* from Typemod *) + (Location.t -> Env.t -> Longident.t -> Path.t) ref +val transl_modtype: (* from Typemod *) + (Env.t -> Parsetree.module_type -> Typedtree.module_type) ref +val create_package_mty: + Location.t -> Env.t -> Parsetree.package_type -> + (Longident.t Asttypes.loc * Parsetree.core_type) list * + Parsetree.module_type + +val find_type: + Env.t -> Location.t -> Longident.t -> Path.t * type_declaration +val find_constructor: + Env.t -> Location.t -> Longident.t -> constructor_description +val find_all_constructors: + Env.t -> Location.t -> Longident.t -> + (constructor_description * (unit -> unit)) list +val find_label: + Env.t -> Location.t -> Longident.t -> label_description +val find_all_labels: + Env.t -> Location.t -> Longident.t -> + (label_description * (unit -> unit)) list +val find_value: + Env.t -> Location.t -> Longident.t -> Path.t * value_description +val find_class: + Env.t -> Location.t -> Longident.t -> Path.t * class_declaration +val find_module: + Env.t -> Location.t -> Longident.t -> Path.t * module_declaration +val lookup_module: + ?load:bool -> Env.t -> Location.t -> Longident.t -> Path.t +val find_modtype: + Env.t -> Location.t -> Longident.t -> Path.t * modtype_declaration +val find_class_type: + Env.t -> Location.t -> Longident.t -> Path.t * class_type_declaration + +val unbound_constructor_error: Env.t -> Longident.t Location.loc -> 'a +val unbound_label_error: Env.t -> Longident.t Location.loc -> 'a diff --git a/res_syntax/compiler-libs-406/untypeast.ml b/res_syntax/compiler-libs-406/untypeast.ml new file mode 100644 index 0000000000..e4ec51ce45 --- /dev/null +++ b/res_syntax/compiler-libs-406/untypeast.ml @@ -0,0 +1,824 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Thomas Gazagnaire (OCamlPro), Fabrice Le Fessant (INRIA Saclay) *) +(* *) +(* Copyright 2007 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Longident +open Asttypes +open Parsetree +open Ast_helper + +module T = Typedtree + +type mapper = { + attribute: mapper -> T.attribute -> attribute; + attributes: mapper -> T.attribute list -> attribute list; + case: mapper -> T.case -> case; + cases: mapper -> T.case list -> case list; + class_declaration: mapper -> T.class_declaration -> class_declaration; + class_description: mapper -> T.class_description -> class_description; + class_expr: mapper -> T.class_expr -> class_expr; + class_field: mapper -> T.class_field -> class_field; + class_signature: mapper -> T.class_signature -> class_signature; + class_structure: mapper -> T.class_structure -> class_structure; + class_type: mapper -> T.class_type -> class_type; + class_type_declaration: mapper -> T.class_type_declaration + -> class_type_declaration; + class_type_field: mapper -> T.class_type_field -> class_type_field; + constructor_declaration: mapper -> T.constructor_declaration + -> constructor_declaration; + expr: mapper -> T.expression -> expression; + extension_constructor: mapper -> T.extension_constructor + -> extension_constructor; + include_declaration: mapper -> T.include_declaration -> include_declaration; + include_description: mapper -> T.include_description -> include_description; + label_declaration: mapper -> T.label_declaration -> label_declaration; + location: mapper -> Location.t -> Location.t; + module_binding: mapper -> T.module_binding -> module_binding; + module_declaration: mapper -> T.module_declaration -> module_declaration; + module_expr: mapper -> T.module_expr -> module_expr; + module_type: mapper -> T.module_type -> module_type; + module_type_declaration: + mapper -> T.module_type_declaration -> module_type_declaration; + package_type: mapper -> T.package_type -> package_type; + open_description: mapper -> T.open_description -> open_description; + pat: mapper -> T.pattern -> pattern; + row_field: mapper -> T.row_field -> row_field; + object_field: mapper -> T.object_field -> object_field; + signature: mapper -> T.signature -> signature; + signature_item: mapper -> T.signature_item -> signature_item; + structure: mapper -> T.structure -> structure; + structure_item: mapper -> T.structure_item -> structure_item; + typ: mapper -> T.core_type -> core_type; + type_declaration: mapper -> T.type_declaration -> type_declaration; + type_extension: mapper -> T.type_extension -> type_extension; + type_kind: mapper -> T.type_kind -> type_kind; + value_binding: mapper -> T.value_binding -> value_binding; + value_description: mapper -> T.value_description -> value_description; + with_constraint: + mapper -> (Path.t * Longident.t Location.loc * T.with_constraint) + -> with_constraint; +} + +open T + +(* +Some notes: + + * For Pexp_function, we cannot go back to the exact original version + when there is a default argument, because the default argument is + translated in the typer. The code, if printed, will not be parsable because + new generated identifiers are not correct. + + * For Pexp_apply, it is unclear whether arguments are reordered, especially + when there are optional arguments. + +*) + + +(** Utility functions. *) + +let string_is_prefix sub str = + let sublen = String.length sub in + String.length str >= sublen && String.sub str 0 sublen = sub + +let map_opt f = function None -> None | Some e -> Some (f e) + +let rec lident_of_path = function + | Path.Pident id -> Longident.Lident (Ident.name id) + | Path.Pdot (p, s, _) -> Longident.Ldot (lident_of_path p, s) + | Path.Papply (p1, p2) -> + Longident.Lapply (lident_of_path p1, lident_of_path p2) + +let map_loc sub {loc; txt} = {loc = sub.location sub loc; txt} + +(** Try a name [$name$0], check if it's free, if not, increment and repeat. *) +let fresh_name s env = + let rec aux i = + let name = s ^ string_of_int i in + try + let _ = Env.lookup_value (Lident name) env in + name + with + | Not_found -> aux (i+1) + in + aux 0 + +(** Mapping functions. *) + +let constant = function + | Const_char c -> Pconst_char c + | Const_string (s,d) -> Pconst_string (s,d) + | Const_int i -> Pconst_integer (string_of_int i, None) + | Const_int32 i -> Pconst_integer (Int32.to_string i, Some 'l') + | Const_int64 i -> Pconst_integer (Int64.to_string i, Some 'L') + | Const_nativeint i -> Pconst_integer (Nativeint.to_string i, Some 'n') + | Const_float f -> Pconst_float (f,None) + +let attribute sub (s, p) = (map_loc sub s, p) +let attributes sub l = List.map (sub.attribute sub) l + +let structure sub str = + List.map (sub.structure_item sub) str.str_items + +let open_description sub od = + let loc = sub.location sub od.open_loc in + let attrs = sub.attributes sub od.open_attributes in + Opn.mk ~loc ~attrs + ~override:od.open_override + (map_loc sub od.open_txt) + +let structure_item sub item = + let loc = sub.location sub item.str_loc in + let desc = + match item.str_desc with + Tstr_eval (exp, attrs) -> Pstr_eval (sub.expr sub exp, attrs) + | Tstr_value (rec_flag, list) -> + Pstr_value (rec_flag, List.map (sub.value_binding sub) list) + | Tstr_primitive vd -> + Pstr_primitive (sub.value_description sub vd) + | Tstr_type (rec_flag, list) -> + Pstr_type (rec_flag, List.map (sub.type_declaration sub) list) + | Tstr_typext tyext -> + Pstr_typext (sub.type_extension sub tyext) + | Tstr_exception ext -> + Pstr_exception (sub.extension_constructor sub ext) + | Tstr_module mb -> + Pstr_module (sub.module_binding sub mb) + | Tstr_recmodule list -> + Pstr_recmodule (List.map (sub.module_binding sub) list) + | Tstr_modtype mtd -> + Pstr_modtype (sub.module_type_declaration sub mtd) + | Tstr_open od -> + Pstr_open (sub.open_description sub od) + | Tstr_class list -> + Pstr_class + (List.map + (fun (ci, _) -> sub.class_declaration sub ci) + list) + | Tstr_class_type list -> + Pstr_class_type + (List.map + (fun (_id, _name, ct) -> sub.class_type_declaration sub ct) + list) + | Tstr_include incl -> + Pstr_include (sub.include_declaration sub incl) + | Tstr_attribute x -> + Pstr_attribute x + in + Str.mk ~loc desc + +let value_description sub v = + let loc = sub.location sub v.val_loc in + let attrs = sub.attributes sub v.val_attributes in + Val.mk ~loc ~attrs + ~prim:v.val_prim + (map_loc sub v.val_name) + (sub.typ sub v.val_desc) + +let module_binding sub mb = + let loc = sub.location sub mb.mb_loc in + let attrs = sub.attributes sub mb.mb_attributes in + Mb.mk ~loc ~attrs + (map_loc sub mb.mb_name) + (sub.module_expr sub mb.mb_expr) + +let type_parameter sub (ct, v) = (sub.typ sub ct, v) + +let type_declaration sub decl = + let loc = sub.location sub decl.typ_loc in + let attrs = sub.attributes sub decl.typ_attributes in + Type.mk ~loc ~attrs + ~params:(List.map (type_parameter sub) decl.typ_params) + ~cstrs:( + List.map + (fun (ct1, ct2, loc) -> + (sub.typ sub ct1, sub.typ sub ct2, sub.location sub loc)) + decl.typ_cstrs) + ~kind:(sub.type_kind sub decl.typ_kind) + ~priv:decl.typ_private + ?manifest:(map_opt (sub.typ sub) decl.typ_manifest) + (map_loc sub decl.typ_name) + +let type_kind sub tk = match tk with + | Ttype_abstract -> Ptype_abstract + | Ttype_variant list -> + Ptype_variant (List.map (sub.constructor_declaration sub) list) + | Ttype_record list -> + Ptype_record (List.map (sub.label_declaration sub) list) + | Ttype_open -> Ptype_open + +let constructor_arguments sub = function + | Cstr_tuple l -> Pcstr_tuple (List.map (sub.typ sub) l) + | Cstr_record l -> Pcstr_record (List.map (sub.label_declaration sub) l) + +let constructor_declaration sub cd = + let loc = sub.location sub cd.cd_loc in + let attrs = sub.attributes sub cd.cd_attributes in + Type.constructor ~loc ~attrs + ~args:(constructor_arguments sub cd.cd_args) + ?res:(map_opt (sub.typ sub) cd.cd_res) + (map_loc sub cd.cd_name) + +let label_declaration sub ld = + let loc = sub.location sub ld.ld_loc in + let attrs = sub.attributes sub ld.ld_attributes in + Type.field ~loc ~attrs + ~mut:ld.ld_mutable + (map_loc sub ld.ld_name) + (sub.typ sub ld.ld_type) + +let type_extension sub tyext = + let attrs = sub.attributes sub tyext.tyext_attributes in + Te.mk ~attrs + ~params:(List.map (type_parameter sub) tyext.tyext_params) + ~priv:tyext.tyext_private + (map_loc sub tyext.tyext_txt) + (List.map (sub.extension_constructor sub) tyext.tyext_constructors) + +let extension_constructor sub ext = + let loc = sub.location sub ext.ext_loc in + let attrs = sub.attributes sub ext.ext_attributes in + Te.constructor ~loc ~attrs + (map_loc sub ext.ext_name) + (match ext.ext_kind with + | Text_decl (args, ret) -> + Pext_decl (constructor_arguments sub args, + map_opt (sub.typ sub) ret) + | Text_rebind (_p, lid) -> Pext_rebind (map_loc sub lid) + ) + +let pattern sub pat = + let loc = sub.location sub pat.pat_loc in + (* todo: fix attributes on extras *) + let attrs = sub.attributes sub pat.pat_attributes in + let desc = + match pat with + { pat_extra=[Tpat_unpack, _, _attrs]; pat_desc = Tpat_var (_,name); _ } -> + Ppat_unpack name + | { pat_extra=[Tpat_type (_path, lid), _, _attrs]; _ } -> + Ppat_type (map_loc sub lid) + | { pat_extra= (Tpat_constraint ct, _, _attrs) :: rem; _ } -> + Ppat_constraint (sub.pat sub { pat with pat_extra=rem }, + sub.typ sub ct) + | _ -> + match pat.pat_desc with + Tpat_any -> Ppat_any + | Tpat_var (id, name) -> + begin + match (Ident.name id).[0] with + 'A'..'Z' -> + Ppat_unpack name + | _ -> + Ppat_var name + end + + (* We transform (_ as x) in x if _ and x have the same location. + The compiler transforms (x:t) into (_ as x : t). + This avoids transforming a warning 27 into a 26. + *) + | Tpat_alias ({pat_desc = Tpat_any; pat_loc}, _id, name) + when pat_loc = pat.pat_loc -> + Ppat_var name + + | Tpat_alias (pat, _id, name) -> + Ppat_alias (sub.pat sub pat, name) + | Tpat_constant cst -> Ppat_constant (constant cst) + | Tpat_tuple list -> + Ppat_tuple (List.map (sub.pat sub) list) + | Tpat_construct (lid, _, args) -> + Ppat_construct (map_loc sub lid, + (match args with + [] -> None + | [arg] -> Some (sub.pat sub arg) + | args -> + Some + (Pat.tuple ~loc + (List.map (sub.pat sub) args) + ) + )) + | Tpat_variant (label, pato, _) -> + Ppat_variant (label, map_opt (sub.pat sub) pato) + | Tpat_record (list, closed) -> + Ppat_record (List.map (fun (lid, _, pat) -> + map_loc sub lid, sub.pat sub pat) list, closed) + | Tpat_array list -> Ppat_array (List.map (sub.pat sub) list) + | Tpat_or (p1, p2, _) -> Ppat_or (sub.pat sub p1, sub.pat sub p2) + | Tpat_lazy p -> Ppat_lazy (sub.pat sub p) + in + Pat.mk ~loc ~attrs desc + +let exp_extra sub (extra, loc, attrs) sexp = + let loc = sub.location sub loc in + let attrs = sub.attributes sub attrs in + let desc = + match extra with + Texp_coerce (cty1, cty2) -> + Pexp_coerce (sexp, + map_opt (sub.typ sub) cty1, + sub.typ sub cty2) + | Texp_constraint cty -> + Pexp_constraint (sexp, sub.typ sub cty) + | Texp_open (ovf, _path, lid, _) -> + Pexp_open (ovf, map_loc sub lid, sexp) + | Texp_poly cto -> Pexp_poly (sexp, map_opt (sub.typ sub) cto) + | Texp_newtype s -> Pexp_newtype (mkloc s loc, sexp) + in + Exp.mk ~loc ~attrs desc + +let cases sub l = List.map (sub.case sub) l + +let case sub {c_lhs; c_guard; c_rhs} = + { + pc_lhs = sub.pat sub c_lhs; + pc_guard = map_opt (sub.expr sub) c_guard; + pc_rhs = sub.expr sub c_rhs; + } + +let value_binding sub vb = + let loc = sub.location sub vb.vb_loc in + let attrs = sub.attributes sub vb.vb_attributes in + Vb.mk ~loc ~attrs + (sub.pat sub vb.vb_pat) + (sub.expr sub vb.vb_expr) + +let expression sub exp = + let loc = sub.location sub exp.exp_loc in + let attrs = sub.attributes sub exp.exp_attributes in + let desc = + match exp.exp_desc with + Texp_ident (_path, lid, _) -> Pexp_ident (map_loc sub lid) + | Texp_constant cst -> Pexp_constant (constant cst) + | Texp_let (rec_flag, list, exp) -> + Pexp_let (rec_flag, + List.map (sub.value_binding sub) list, + sub.expr sub exp) + + (* Pexp_function can't have a label, so we split in 3 cases. *) + (* One case, no guard: It's a fun. *) + | Texp_function { arg_label; cases = [{c_lhs=p; c_guard=None; c_rhs=e}]; + _ } -> + Pexp_fun (arg_label, None, sub.pat sub p, sub.expr sub e) + (* No label: it's a function. *) + | Texp_function { arg_label = Nolabel; cases; _; } -> + Pexp_function (sub.cases sub cases) + (* Mix of both, we generate `fun ~label:$name$ -> match $name$ with ...` *) + | Texp_function { arg_label = Labelled s | Optional s as label; cases; + _ } -> + let name = fresh_name s exp.exp_env in + Pexp_fun (label, None, Pat.var ~loc {loc;txt = name }, + Exp.match_ ~loc (Exp.ident ~loc {loc;txt= Lident name}) + (sub.cases sub cases)) + | Texp_apply (exp, list) -> + Pexp_apply (sub.expr sub exp, + List.fold_right (fun (label, expo) list -> + match expo with + None -> list + | Some exp -> (label, sub.expr sub exp) :: list + ) list []) + | Texp_match (exp, cases, exn_cases, _) -> + let merged_cases = sub.cases sub cases + @ List.map + (fun c -> + let uc = sub.case sub c in + let pat = { uc.pc_lhs + with ppat_desc = Ppat_exception uc.pc_lhs } + in + { uc with pc_lhs = pat }) + exn_cases + in + Pexp_match (sub.expr sub exp, merged_cases) + | Texp_try (exp, cases) -> + Pexp_try (sub.expr sub exp, sub.cases sub cases) + | Texp_tuple list -> + Pexp_tuple (List.map (sub.expr sub) list) + | Texp_construct (lid, _, args) -> + Pexp_construct (map_loc sub lid, + (match args with + [] -> None + | [ arg ] -> Some (sub.expr sub arg) + | args -> + Some + (Exp.tuple ~loc (List.map (sub.expr sub) args)) + )) + | Texp_variant (label, expo) -> + Pexp_variant (label, map_opt (sub.expr sub) expo) + | Texp_record { fields; extended_expression; _ } -> + let list = Array.fold_left (fun l -> function + | _, Kept _ -> l + | _, Overridden (lid, exp) -> (lid, sub.expr sub exp) :: l) + [] fields + in + Pexp_record (list, map_opt (sub.expr sub) extended_expression) + | Texp_field (exp, lid, _label) -> + Pexp_field (sub.expr sub exp, map_loc sub lid) + | Texp_setfield (exp1, lid, _label, exp2) -> + Pexp_setfield (sub.expr sub exp1, map_loc sub lid, + sub.expr sub exp2) + | Texp_array list -> + Pexp_array (List.map (sub.expr sub) list) + | Texp_ifthenelse (exp1, exp2, expo) -> + Pexp_ifthenelse (sub.expr sub exp1, + sub.expr sub exp2, + map_opt (sub.expr sub) expo) + | Texp_sequence (exp1, exp2) -> + Pexp_sequence (sub.expr sub exp1, sub.expr sub exp2) + | Texp_while (exp1, exp2) -> + Pexp_while (sub.expr sub exp1, sub.expr sub exp2) + | Texp_for (_id, name, exp1, exp2, dir, exp3) -> + Pexp_for (name, + sub.expr sub exp1, sub.expr sub exp2, + dir, sub.expr sub exp3) + | Texp_send (exp, meth, _) -> + Pexp_send (sub.expr sub exp, match meth with + Tmeth_name name -> mkloc name loc + | Tmeth_val id -> mkloc (Ident.name id) loc) + | Texp_new (_path, lid, _) -> Pexp_new (map_loc sub lid) + | Texp_instvar (_, path, name) -> + Pexp_ident ({loc = sub.location sub name.loc ; txt = lident_of_path path}) + | Texp_setinstvar (_, _path, lid, exp) -> + Pexp_setinstvar (map_loc sub lid, sub.expr sub exp) + | Texp_override (_, list) -> + Pexp_override (List.map (fun (_path, lid, exp) -> + (map_loc sub lid, sub.expr sub exp) + ) list) + | Texp_letmodule (_id, name, mexpr, exp) -> + Pexp_letmodule (name, sub.module_expr sub mexpr, + sub.expr sub exp) + | Texp_letexception (ext, exp) -> + Pexp_letexception (sub.extension_constructor sub ext, + sub.expr sub exp) + | Texp_assert exp -> Pexp_assert (sub.expr sub exp) + | Texp_lazy exp -> Pexp_lazy (sub.expr sub exp) + | Texp_object (cl, _) -> + Pexp_object (sub.class_structure sub cl) + | Texp_pack (mexpr) -> + Pexp_pack (sub.module_expr sub mexpr) + | Texp_unreachable -> + Pexp_unreachable + | Texp_extension_constructor (lid, _) -> + Pexp_extension ({ txt = "ocaml.extension_constructor"; loc }, + PStr [ Str.eval ~loc + (Exp.construct ~loc (map_loc sub lid) None) + ]) + in + List.fold_right (exp_extra sub) exp.exp_extra + (Exp.mk ~loc ~attrs desc) + +let package_type sub pack = + (map_loc sub pack.pack_txt, + List.map (fun (s, ct) -> + (s, sub.typ sub ct)) pack.pack_fields) + +let module_type_declaration sub mtd = + let loc = sub.location sub mtd.mtd_loc in + let attrs = sub.attributes sub mtd.mtd_attributes in + Mtd.mk ~loc ~attrs + ?typ:(map_opt (sub.module_type sub) mtd.mtd_type) + (map_loc sub mtd.mtd_name) + +let signature sub sg = + List.map (sub.signature_item sub) sg.sig_items + +let signature_item sub item = + let loc = sub.location sub item.sig_loc in + let desc = + match item.sig_desc with + Tsig_value v -> + Psig_value (sub.value_description sub v) + | Tsig_type (rec_flag, list) -> + Psig_type (rec_flag, List.map (sub.type_declaration sub) list) + | Tsig_typext tyext -> + Psig_typext (sub.type_extension sub tyext) + | Tsig_exception ext -> + Psig_exception (sub.extension_constructor sub ext) + | Tsig_module md -> + Psig_module (sub.module_declaration sub md) + | Tsig_recmodule list -> + Psig_recmodule (List.map (sub.module_declaration sub) list) + | Tsig_modtype mtd -> + Psig_modtype (sub.module_type_declaration sub mtd) + | Tsig_open od -> + Psig_open (sub.open_description sub od) + | Tsig_include incl -> + Psig_include (sub.include_description sub incl) + | Tsig_class list -> + Psig_class (List.map (sub.class_description sub) list) + | Tsig_class_type list -> + Psig_class_type (List.map (sub.class_type_declaration sub) list) + | Tsig_attribute x -> + Psig_attribute x + in + Sig.mk ~loc desc + +let module_declaration sub md = + let loc = sub.location sub md.md_loc in + let attrs = sub.attributes sub md.md_attributes in + Md.mk ~loc ~attrs + (map_loc sub md.md_name) + (sub.module_type sub md.md_type) + +let include_infos f sub incl = + let loc = sub.location sub incl.incl_loc in + let attrs = sub.attributes sub incl.incl_attributes in + Incl.mk ~loc ~attrs + (f sub incl.incl_mod) + +let include_declaration sub = include_infos sub.module_expr sub +let include_description sub = include_infos sub.module_type sub + +let class_infos f sub ci = + let loc = sub.location sub ci.ci_loc in + let attrs = sub.attributes sub ci.ci_attributes in + Ci.mk ~loc ~attrs + ~virt:ci.ci_virt + ~params:(List.map (type_parameter sub) ci.ci_params) + (map_loc sub ci.ci_id_name) + (f sub ci.ci_expr) + +let class_declaration sub = class_infos sub.class_expr sub +let class_description sub = class_infos sub.class_type sub +let class_type_declaration sub = class_infos sub.class_type sub + +let module_type sub mty = + let loc = sub.location sub mty.mty_loc in + let attrs = sub.attributes sub mty.mty_attributes in + let desc = match mty.mty_desc with + Tmty_ident (_path, lid) -> Pmty_ident (map_loc sub lid) + | Tmty_alias (_path, lid) -> Pmty_alias (map_loc sub lid) + | Tmty_signature sg -> Pmty_signature (sub.signature sub sg) + | Tmty_functor (_id, name, mtype1, mtype2) -> + Pmty_functor (name, map_opt (sub.module_type sub) mtype1, + sub.module_type sub mtype2) + | Tmty_with (mtype, list) -> + Pmty_with (sub.module_type sub mtype, + List.map (sub.with_constraint sub) list) + | Tmty_typeof mexpr -> + Pmty_typeof (sub.module_expr sub mexpr) + in + Mty.mk ~loc ~attrs desc + +let with_constraint sub (_path, lid, cstr) = + match cstr with + | Twith_type decl -> + Pwith_type (map_loc sub lid, sub.type_declaration sub decl) + | Twith_module (_path, lid2) -> + Pwith_module (map_loc sub lid, map_loc sub lid2) + | Twith_typesubst decl -> + Pwith_typesubst (map_loc sub lid, sub.type_declaration sub decl) + | Twith_modsubst (_path, lid2) -> + Pwith_modsubst (map_loc sub lid, map_loc sub lid2) + +let module_expr sub mexpr = + let loc = sub.location sub mexpr.mod_loc in + let attrs = sub.attributes sub mexpr.mod_attributes in + match mexpr.mod_desc with + Tmod_constraint (m, _, Tmodtype_implicit, _ ) -> + sub.module_expr sub m + | _ -> + let desc = match mexpr.mod_desc with + Tmod_ident (_p, lid) -> Pmod_ident (map_loc sub lid) + | Tmod_structure st -> Pmod_structure (sub.structure sub st) + | Tmod_functor (_id, name, mtype, mexpr) -> + Pmod_functor (name, Misc.may_map (sub.module_type sub) mtype, + sub.module_expr sub mexpr) + | Tmod_apply (mexp1, mexp2, _) -> + Pmod_apply (sub.module_expr sub mexp1, sub.module_expr sub mexp2) + | Tmod_constraint (mexpr, _, Tmodtype_explicit mtype, _) -> + Pmod_constraint (sub.module_expr sub mexpr, + sub.module_type sub mtype) + | Tmod_constraint (_mexpr, _, Tmodtype_implicit, _) -> + assert false + | Tmod_unpack (exp, _pack) -> + Pmod_unpack (sub.expr sub exp) + (* TODO , sub.package_type sub pack) *) + in + Mod.mk ~loc ~attrs desc + +let class_expr sub cexpr = + let loc = sub.location sub cexpr.cl_loc in + let attrs = sub.attributes sub cexpr.cl_attributes in + let desc = match cexpr.cl_desc with + | Tcl_constraint ( { cl_desc = Tcl_ident (_path, lid, tyl); _ }, + None, _, _, _ ) -> + Pcl_constr (map_loc sub lid, + List.map (sub.typ sub) tyl) + | Tcl_structure clstr -> Pcl_structure (sub.class_structure sub clstr) + + | Tcl_fun (label, pat, _pv, cl, _partial) -> + Pcl_fun (label, None, sub.pat sub pat, sub.class_expr sub cl) + + | Tcl_apply (cl, args) -> + Pcl_apply (sub.class_expr sub cl, + List.fold_right (fun (label, expo) list -> + match expo with + None -> list + | Some exp -> (label, sub.expr sub exp) :: list + ) args []) + + | Tcl_let (rec_flat, bindings, _ivars, cl) -> + Pcl_let (rec_flat, + List.map (sub.value_binding sub) bindings, + sub.class_expr sub cl) + + | Tcl_constraint (cl, Some clty, _vals, _meths, _concrs) -> + Pcl_constraint (sub.class_expr sub cl, sub.class_type sub clty) + + | Tcl_open (ovf, _p, lid, _env, e) -> + Pcl_open (ovf, lid, sub.class_expr sub e) + + | Tcl_ident _ -> assert false + | Tcl_constraint (_, None, _, _, _) -> assert false + in + Cl.mk ~loc ~attrs desc + +let class_type sub ct = + let loc = sub.location sub ct.cltyp_loc in + let attrs = sub.attributes sub ct.cltyp_attributes in + let desc = match ct.cltyp_desc with + Tcty_signature csg -> Pcty_signature (sub.class_signature sub csg) + | Tcty_constr (_path, lid, list) -> + Pcty_constr (map_loc sub lid, List.map (sub.typ sub) list) + | Tcty_arrow (label, ct, cl) -> + Pcty_arrow (label, sub.typ sub ct, sub.class_type sub cl) + | Tcty_open (ovf, _p, lid, _env, e) -> + Pcty_open (ovf, lid, sub.class_type sub e) + in + Cty.mk ~loc ~attrs desc + +let class_signature sub cs = + { + pcsig_self = sub.typ sub cs.csig_self; + pcsig_fields = List.map (sub.class_type_field sub) cs.csig_fields; + } + +let class_type_field sub ctf = + let loc = sub.location sub ctf.ctf_loc in + let attrs = sub.attributes sub ctf.ctf_attributes in + let desc = match ctf.ctf_desc with + Tctf_inherit ct -> Pctf_inherit (sub.class_type sub ct) + | Tctf_val (s, mut, virt, ct) -> + Pctf_val (mkloc s loc, mut, virt, sub.typ sub ct) + | Tctf_method (s, priv, virt, ct) -> + Pctf_method (mkloc s loc, priv, virt, sub.typ sub ct) + | Tctf_constraint (ct1, ct2) -> + Pctf_constraint (sub.typ sub ct1, sub.typ sub ct2) + | Tctf_attribute x -> Pctf_attribute x + in + Ctf.mk ~loc ~attrs desc + +let core_type sub ct = + let loc = sub.location sub ct.ctyp_loc in + let attrs = sub.attributes sub ct.ctyp_attributes in + let desc = match ct.ctyp_desc with + Ttyp_any -> Ptyp_any + | Ttyp_var s -> Ptyp_var s + | Ttyp_arrow (label, ct1, ct2) -> + Ptyp_arrow (label, sub.typ sub ct1, sub.typ sub ct2) + | Ttyp_tuple list -> Ptyp_tuple (List.map (sub.typ sub) list) + | Ttyp_constr (_path, lid, list) -> + Ptyp_constr (map_loc sub lid, + List.map (sub.typ sub) list) + | Ttyp_object (list, o) -> + Ptyp_object + (List.map (sub.object_field sub) list, o) + | Ttyp_class (_path, lid, list) -> + Ptyp_class (map_loc sub lid, List.map (sub.typ sub) list) + | Ttyp_alias (ct, s) -> + Ptyp_alias (sub.typ sub ct, s) + | Ttyp_variant (list, bool, labels) -> + Ptyp_variant (List.map (sub.row_field sub) list, bool, labels) + | Ttyp_poly (list, ct) -> + let list = List.map (fun v -> mkloc v loc) list in + Ptyp_poly (list, sub.typ sub ct) + | Ttyp_package pack -> Ptyp_package (sub.package_type sub pack) + in + Typ.mk ~loc ~attrs desc + +let class_structure sub cs = + let rec remove_self = function + | { pat_desc = Tpat_alias (p, id, _s) } + when string_is_prefix "selfpat-" id.Ident.name -> + remove_self p + | p -> p + in + { pcstr_self = sub.pat sub (remove_self cs.cstr_self); + pcstr_fields = List.map (sub.class_field sub) cs.cstr_fields; + } + +let row_field sub rf = + match rf with + Ttag (label, attrs, bool, list) -> + Rtag (label, sub.attributes sub attrs, bool, List.map (sub.typ sub) list) + | Tinherit ct -> Rinherit (sub.typ sub ct) + +let object_field sub ofield = + match ofield with + OTtag (label, attrs, ct) -> + Otag (label, sub.attributes sub attrs, sub.typ sub ct) + | OTinherit ct -> Oinherit (sub.typ sub ct) + +and is_self_pat = function + | { pat_desc = Tpat_alias(_pat, id, _) } -> + string_is_prefix "self-" (Ident.name id) + | _ -> false + +let class_field sub cf = + let loc = sub.location sub cf.cf_loc in + let attrs = sub.attributes sub cf.cf_attributes in + let desc = match cf.cf_desc with + Tcf_inherit (ovf, cl, super, _vals, _meths) -> + Pcf_inherit (ovf, sub.class_expr sub cl, + map_opt (fun v -> mkloc v loc) super) + | Tcf_constraint (cty, cty') -> + Pcf_constraint (sub.typ sub cty, sub.typ sub cty') + | Tcf_val (lab, mut, _, Tcfk_virtual cty, _) -> + Pcf_val (lab, mut, Cfk_virtual (sub.typ sub cty)) + | Tcf_val (lab, mut, _, Tcfk_concrete (o, exp), _) -> + Pcf_val (lab, mut, Cfk_concrete (o, sub.expr sub exp)) + | Tcf_method (lab, priv, Tcfk_virtual cty) -> + Pcf_method (lab, priv, Cfk_virtual (sub.typ sub cty)) + | Tcf_method (lab, priv, Tcfk_concrete (o, exp)) -> + let remove_fun_self = function + | { exp_desc = + Texp_function { arg_label = Nolabel; cases = [case]; _ } } + when is_self_pat case.c_lhs && case.c_guard = None -> case.c_rhs + | e -> e + in + let exp = remove_fun_self exp in + Pcf_method (lab, priv, Cfk_concrete (o, sub.expr sub exp)) + | Tcf_initializer exp -> + let remove_fun_self = function + | { exp_desc = + Texp_function { arg_label = Nolabel; cases = [case]; _ } } + when is_self_pat case.c_lhs && case.c_guard = None -> case.c_rhs + | e -> e + in + let exp = remove_fun_self exp in + Pcf_initializer (sub.expr sub exp) + | Tcf_attribute x -> Pcf_attribute x + in + Cf.mk ~loc ~attrs desc + +let location _sub l = l + +let default_mapper = + { + attribute = attribute ; + attributes = attributes ; + structure = structure; + structure_item = structure_item; + module_expr = module_expr; + signature = signature; + signature_item = signature_item; + module_type = module_type; + with_constraint = with_constraint; + class_declaration = class_declaration; + class_expr = class_expr; + class_field = class_field; + class_structure = class_structure; + class_type = class_type; + class_type_field = class_type_field; + class_signature = class_signature; + class_type_declaration = class_type_declaration; + class_description = class_description; + type_declaration = type_declaration; + type_kind = type_kind; + typ = core_type; + type_extension = type_extension; + extension_constructor = extension_constructor; + value_description = value_description; + pat = pattern; + expr = expression; + module_declaration = module_declaration; + module_type_declaration = module_type_declaration; + module_binding = module_binding; + package_type = package_type ; + open_description = open_description; + include_description = include_description; + include_declaration = include_declaration; + value_binding = value_binding; + constructor_declaration = constructor_declaration; + label_declaration = label_declaration; + cases = cases; + case = case; + location = location; + row_field = row_field ; + object_field = object_field ; + } + +let untype_structure ?(mapper=default_mapper) structure = + mapper.structure mapper structure + +let untype_signature ?(mapper=default_mapper) signature = + mapper.signature mapper signature diff --git a/res_syntax/compiler-libs-406/untypeast.mli b/res_syntax/compiler-libs-406/untypeast.mli new file mode 100644 index 0000000000..20a6668c92 --- /dev/null +++ b/res_syntax/compiler-libs-406/untypeast.mli @@ -0,0 +1,79 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Thomas Gazagnaire (OCamlPro), Fabrice Le Fessant (INRIA Saclay) *) +(* *) +(* Copyright 2007 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Parsetree + +val lident_of_path : Path.t -> Longident.t + +type mapper = { + attribute: mapper -> Typedtree.attribute -> attribute; + attributes: mapper -> Typedtree.attribute list -> attribute list; + case: mapper -> Typedtree.case -> case; + cases: mapper -> Typedtree.case list -> case list; + class_declaration: mapper -> Typedtree.class_declaration -> class_declaration; + class_description: mapper -> Typedtree.class_description -> class_description; + class_expr: mapper -> Typedtree.class_expr -> class_expr; + class_field: mapper -> Typedtree.class_field -> class_field; + class_signature: mapper -> Typedtree.class_signature -> class_signature; + class_structure: mapper -> Typedtree.class_structure -> class_structure; + class_type: mapper -> Typedtree.class_type -> class_type; + class_type_declaration: mapper -> Typedtree.class_type_declaration + -> class_type_declaration; + class_type_field: mapper -> Typedtree.class_type_field -> class_type_field; + constructor_declaration: mapper -> Typedtree.constructor_declaration + -> constructor_declaration; + expr: mapper -> Typedtree.expression -> expression; + extension_constructor: mapper -> Typedtree.extension_constructor + -> extension_constructor; + include_declaration: + mapper -> Typedtree.include_declaration -> include_declaration; + include_description: + mapper -> Typedtree.include_description -> include_description; + label_declaration: + mapper -> Typedtree.label_declaration -> label_declaration; + location: mapper -> Location.t -> Location.t; + module_binding: mapper -> Typedtree.module_binding -> module_binding; + module_declaration: + mapper -> Typedtree.module_declaration -> module_declaration; + module_expr: mapper -> Typedtree.module_expr -> module_expr; + module_type: mapper -> Typedtree.module_type -> module_type; + module_type_declaration: + mapper -> Typedtree.module_type_declaration -> module_type_declaration; + package_type: mapper -> Typedtree.package_type -> package_type; + open_description: mapper -> Typedtree.open_description -> open_description; + pat: mapper -> Typedtree.pattern -> pattern; + row_field: mapper -> Typedtree.row_field -> row_field; + object_field: mapper -> Typedtree.object_field -> object_field; + signature: mapper -> Typedtree.signature -> signature; + signature_item: mapper -> Typedtree.signature_item -> signature_item; + structure: mapper -> Typedtree.structure -> structure; + structure_item: mapper -> Typedtree.structure_item -> structure_item; + typ: mapper -> Typedtree.core_type -> core_type; + type_declaration: mapper -> Typedtree.type_declaration -> type_declaration; + type_extension: mapper -> Typedtree.type_extension -> type_extension; + type_kind: mapper -> Typedtree.type_kind -> type_kind; + value_binding: mapper -> Typedtree.value_binding -> value_binding; + value_description: mapper -> Typedtree.value_description -> value_description; + with_constraint: + mapper -> (Path.t * Longident.t Location.loc * Typedtree.with_constraint) + -> with_constraint; +} + +val default_mapper : mapper + +val untype_structure : ?mapper:mapper -> Typedtree.structure -> structure +val untype_signature : ?mapper:mapper -> Typedtree.signature -> signature + +val constant : Asttypes.constant -> Parsetree.constant diff --git a/res_syntax/compiler-libs-406/warnings.ml b/res_syntax/compiler-libs-406/warnings.ml new file mode 100644 index 0000000000..23f3f1d5a6 --- /dev/null +++ b/res_syntax/compiler-libs-406/warnings.ml @@ -0,0 +1,650 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Weis && Damien Doligez, INRIA Rocquencourt *) +(* *) +(* Copyright 1998 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* When you change this, you need to update the documentation: + - man/ocamlc.m + - man/ocamlopt.m + - manual/manual/cmds/comp.etex + - manual/manual/cmds/native.etex +*) + +type loc = { + loc_start: Lexing.position; + loc_end: Lexing.position; + loc_ghost: bool; +} + +type t = + | Comment_start (* 1 *) + | Comment_not_end (* 2 *) + | Deprecated of string * loc * loc (* 3 *) + | Fragile_match of string (* 4 *) + | Partial_application (* 5 *) + | Labels_omitted of string list (* 6 *) + | Method_override of string list (* 7 *) + | Partial_match of string (* 8 *) + | Non_closed_record_pattern of string (* 9 *) + | Statement_type (* 10 *) + | Unused_match (* 11 *) + | Unused_pat (* 12 *) + | Instance_variable_override of string list (* 13 *) + | Illegal_backslash (* 14 *) + | Implicit_public_methods of string list (* 15 *) + | Unerasable_optional_argument (* 16 *) + | Undeclared_virtual_method of string (* 17 *) + | Not_principal of string (* 18 *) + | Without_principality of string (* 19 *) + | Unused_argument (* 20 *) + | Nonreturning_statement (* 21 *) + | Preprocessor of string (* 22 *) + | Useless_record_with (* 23 *) + | Bad_module_name of string (* 24 *) + | All_clauses_guarded (* 8, used to be 25 *) + | Unused_var of string (* 26 *) + | Unused_var_strict of string (* 27 *) + | Wildcard_arg_to_constant_constr (* 28 *) + | Eol_in_string (* 29 *) + | Duplicate_definitions of string * string * string * string (*30 *) + | Multiple_definition of string * string * string (* 31 *) + | Unused_value_declaration of string (* 32 *) + | Unused_open of string (* 33 *) + | Unused_type_declaration of string (* 34 *) + | Unused_for_index of string (* 35 *) + | Unused_ancestor of string (* 36 *) + | Unused_constructor of string * bool * bool (* 37 *) + | Unused_extension of string * bool * bool * bool (* 38 *) + | Unused_rec_flag (* 39 *) + | Name_out_of_scope of string * string list * bool (* 40 *) + | Ambiguous_name of string list * string list * bool (* 41 *) + | Disambiguated_name of string (* 42 *) + | Nonoptional_label of string (* 43 *) + | Open_shadow_identifier of string * string (* 44 *) + | Open_shadow_label_constructor of string * string (* 45 *) + | Bad_env_variable of string * string (* 46 *) + | Attribute_payload of string * string (* 47 *) + | Eliminated_optional_arguments of string list (* 48 *) + | No_cmi_file of string * string option (* 49 *) + | Bad_docstring of bool (* 50 *) + | Expect_tailcall (* 51 *) + | Fragile_literal_pattern (* 52 *) + | Misplaced_attribute of string (* 53 *) + | Duplicated_attribute of string (* 54 *) + | Inlining_impossible of string (* 55 *) + | Unreachable_case (* 56 *) + | Ambiguous_pattern of string list (* 57 *) + | No_cmx_file of string (* 58 *) + | Assignment_to_non_mutable_value (* 59 *) + | Unused_module of string (* 60 *) + | Unboxable_type_in_prim_decl of string (* 61 *) + | Constraint_on_gadt (* 62 *) +;; + +(* If you remove a warning, leave a hole in the numbering. NEVER change + the numbers of existing warnings. + If you add a new warning, add it at the end with a new number; + do NOT reuse one of the holes. +*) + +let number = function + | Comment_start -> 1 + | Comment_not_end -> 2 + | Deprecated _ -> 3 + | Fragile_match _ -> 4 + | Partial_application -> 5 + | Labels_omitted _ -> 6 + | Method_override _ -> 7 + | Partial_match _ -> 8 + | Non_closed_record_pattern _ -> 9 + | Statement_type -> 10 + | Unused_match -> 11 + | Unused_pat -> 12 + | Instance_variable_override _ -> 13 + | Illegal_backslash -> 14 + | Implicit_public_methods _ -> 15 + | Unerasable_optional_argument -> 16 + | Undeclared_virtual_method _ -> 17 + | Not_principal _ -> 18 + | Without_principality _ -> 19 + | Unused_argument -> 20 + | Nonreturning_statement -> 21 + | Preprocessor _ -> 22 + | Useless_record_with -> 23 + | Bad_module_name _ -> 24 + | All_clauses_guarded -> 8 (* used to be 25 *) + | Unused_var _ -> 26 + | Unused_var_strict _ -> 27 + | Wildcard_arg_to_constant_constr -> 28 + | Eol_in_string -> 29 + | Duplicate_definitions _ -> 30 + | Multiple_definition _ -> 31 + | Unused_value_declaration _ -> 32 + | Unused_open _ -> 33 + | Unused_type_declaration _ -> 34 + | Unused_for_index _ -> 35 + | Unused_ancestor _ -> 36 + | Unused_constructor _ -> 37 + | Unused_extension _ -> 38 + | Unused_rec_flag -> 39 + | Name_out_of_scope _ -> 40 + | Ambiguous_name _ -> 41 + | Disambiguated_name _ -> 42 + | Nonoptional_label _ -> 43 + | Open_shadow_identifier _ -> 44 + | Open_shadow_label_constructor _ -> 45 + | Bad_env_variable _ -> 46 + | Attribute_payload _ -> 47 + | Eliminated_optional_arguments _ -> 48 + | No_cmi_file _ -> 49 + | Bad_docstring _ -> 50 + | Expect_tailcall -> 51 + | Fragile_literal_pattern -> 52 + | Misplaced_attribute _ -> 53 + | Duplicated_attribute _ -> 54 + | Inlining_impossible _ -> 55 + | Unreachable_case -> 56 + | Ambiguous_pattern _ -> 57 + | No_cmx_file _ -> 58 + | Assignment_to_non_mutable_value -> 59 + | Unused_module _ -> 60 + | Unboxable_type_in_prim_decl _ -> 61 + | Constraint_on_gadt -> 62 +;; + +let last_warning_number = 62 +;; + +(* Must be the max number returned by the [number] function. *) + +let letter = function + | 'a' -> + let rec loop i = if i = 0 then [] else i :: loop (i - 1) in + loop last_warning_number + | 'b' -> [] + | 'c' -> [1; 2] + | 'd' -> [3] + | 'e' -> [4] + | 'f' -> [5] + | 'g' -> [] + | 'h' -> [] + | 'i' -> [] + | 'j' -> [] + | 'k' -> [32; 33; 34; 35; 36; 37; 38; 39] + | 'l' -> [6] + | 'm' -> [7] + | 'n' -> [] + | 'o' -> [] + | 'p' -> [8] + | 'q' -> [] + | 'r' -> [9] + | 's' -> [10] + | 't' -> [] + | 'u' -> [11; 12] + | 'v' -> [13] + | 'w' -> [] + | 'x' -> [14; 15; 16; 17; 18; 19; 20; 21; 22; 23; 24; 30] + | 'y' -> [26] + | 'z' -> [27] + | _ -> assert false +;; + +type state = + { + active: bool array; + error: bool array; + } + +let current = + ref + { + active = Array.make (last_warning_number + 1) true; + error = Array.make (last_warning_number + 1) false; + } + +let disabled = ref false + +let without_warnings f = + Misc.protect_refs [Misc.R(disabled, true)] f + +let backup () = !current + +let restore x = current := x + +let is_active x = not !disabled && (!current).active.(number x);; +let is_error x = not !disabled && (!current).error.(number x);; + +let mk_lazy f = + let state = backup () in + lazy + ( + let prev = backup () in + restore state; + try + let r = f () in + restore prev; + r + with exn -> + restore prev; + raise exn + ) + +let parse_opt error active flags s = + let set i = flags.(i) <- true in + let clear i = flags.(i) <- false in + let set_all i = active.(i) <- true; error.(i) <- true in + let error () = raise (Arg.Bad "Ill-formed list of warnings") in + let rec get_num n i = + if i >= String.length s then i, n + else match s.[i] with + | '0'..'9' -> get_num (10 * n + Char.code s.[i] - Char.code '0') (i + 1) + | _ -> i, n + in + let get_range i = + let i, n1 = get_num 0 i in + if i + 2 < String.length s && s.[i] = '.' && s.[i + 1] = '.' then + let i, n2 = get_num 0 (i + 2) in + if n2 < n1 then error (); + i, n1, n2 + else + i, n1, n1 + in + let rec loop i = + if i >= String.length s then () else + match s.[i] with + | 'A' .. 'Z' -> + List.iter set (letter (Char.lowercase_ascii s.[i])); + loop (i+1) + | 'a' .. 'z' -> + List.iter clear (letter s.[i]); + loop (i+1) + | '+' -> loop_letter_num set (i+1) + | '-' -> loop_letter_num clear (i+1) + | '@' -> loop_letter_num set_all (i+1) + | _ -> error () + and loop_letter_num myset i = + if i >= String.length s then error () else + match s.[i] with + | '0' .. '9' -> + let i, n1, n2 = get_range i in + for n = n1 to min n2 last_warning_number do myset n done; + loop i + | 'A' .. 'Z' -> + List.iter myset (letter (Char.lowercase_ascii s.[i])); + loop (i+1) + | 'a' .. 'z' -> + List.iter myset (letter s.[i]); + loop (i+1) + | _ -> error () + in + loop 0 +;; + +let parse_options errflag s = + let error = Array.copy (!current).error in + let active = Array.copy (!current).active in + parse_opt error active (if errflag then error else active) s; + current := {error; active} + +(* If you change these, don't forget to change them in man/ocamlc.m *) +let defaults_w = "+a-4-6-7-9-27-29-32..42-44-45-48-50-60";; +let defaults_warn_error = "-a+31";; + +let () = parse_options false defaults_w;; +let () = parse_options true defaults_warn_error;; + +let message = function + | Comment_start -> "this is the start of a comment." + | Comment_not_end -> "this is not the end of a comment." + | Deprecated (s, _, _) -> + (* Reduce \r\n to \n: + - Prevents any \r characters being printed on Unix when processing + Windows sources + - Prevents \r\r\n being generated on Windows, which affects the + testsuite + *) + "deprecated: " ^ Misc.normalise_eol s + | Fragile_match "" -> + "this pattern-matching is fragile." + | Fragile_match s -> + "this pattern-matching is fragile.\n\ + It will remain exhaustive when constructors are added to type " ^ s ^ "." + | Partial_application -> + "this function application is partial,\n\ + maybe some arguments are missing." + | Labels_omitted [] -> assert false + | Labels_omitted [l] -> + "label " ^ l ^ " was omitted in the application of this function." + | Labels_omitted ls -> + "labels " ^ String.concat ", " ls ^ + " were omitted in the application of this function." + | Method_override [lab] -> + "the method " ^ lab ^ " is overridden." + | Method_override (cname :: slist) -> + String.concat " " + ("the following methods are overridden by the class" + :: cname :: ":\n " :: slist) + | Method_override [] -> assert false + | Partial_match "" -> "this pattern-matching is not exhaustive." + | Partial_match s -> + "this pattern-matching is not exhaustive.\n\ + Here is an example of a case that is not matched:\n" ^ s + | Non_closed_record_pattern s -> + "the following labels are not bound in this record pattern:\n" ^ s ^ + "\nEither bind these labels explicitly or add '; _' to the pattern." + | Statement_type -> + "this expression should have type unit." + | Unused_match -> "this match case is unused." + | Unused_pat -> "this sub-pattern is unused." + | Instance_variable_override [lab] -> + "the instance variable " ^ lab ^ " is overridden.\n" ^ + "The behaviour changed in ocaml 3.10 (previous behaviour was hiding.)" + | Instance_variable_override (cname :: slist) -> + String.concat " " + ("the following instance variables are overridden by the class" + :: cname :: ":\n " :: slist) ^ + "\nThe behaviour changed in ocaml 3.10 (previous behaviour was hiding.)" + | Instance_variable_override [] -> assert false + | Illegal_backslash -> "illegal backslash escape in string." + | Implicit_public_methods l -> + "the following private methods were made public implicitly:\n " + ^ String.concat " " l ^ "." + | Unerasable_optional_argument -> "this optional argument cannot be erased." + | Undeclared_virtual_method m -> "the virtual method "^m^" is not declared." + | Not_principal s -> s^" is not principal." + | Without_principality s -> s^" without principality." + | Unused_argument -> "this argument will not be used by the function." + | Nonreturning_statement -> + "this statement never returns (or has an unsound type.)" + | Preprocessor s -> s + | Useless_record_with -> + "all the fields are explicitly listed in this record:\n\ + the 'with' clause is useless." + | Bad_module_name (modname) -> + "bad source file name: \"" ^ modname ^ "\" is not a valid module name." + | All_clauses_guarded -> + "this pattern-matching is not exhaustive.\n\ + All clauses in this pattern-matching are guarded." + | Unused_var v | Unused_var_strict v -> "unused variable " ^ v ^ "." + | Wildcard_arg_to_constant_constr -> + "wildcard pattern given as argument to a constant constructor" + | Eol_in_string -> + "unescaped end-of-line in a string constant (non-portable code)" + | Duplicate_definitions (kind, cname, tc1, tc2) -> + Printf.sprintf "the %s %s is defined in both types %s and %s." + kind cname tc1 tc2 + | Multiple_definition(modname, file1, file2) -> + Printf.sprintf + "files %s and %s both define a module named %s" + file1 file2 modname + | Unused_value_declaration v -> "unused value " ^ v ^ "." + | Unused_open s -> "unused open " ^ s ^ "." + | Unused_type_declaration s -> "unused type " ^ s ^ "." + | Unused_for_index s -> "unused for-loop index " ^ s ^ "." + | Unused_ancestor s -> "unused ancestor variable " ^ s ^ "." + | Unused_constructor (s, false, false) -> "unused constructor " ^ s ^ "." + | Unused_constructor (s, true, _) -> + "constructor " ^ s ^ + " is never used to build values.\n\ + (However, this constructor appears in patterns.)" + | Unused_constructor (s, false, true) -> + "constructor " ^ s ^ + " is never used to build values.\n\ + Its type is exported as a private type." + | Unused_extension (s, is_exception, cu_pattern, cu_privatize) -> + let kind = + if is_exception then "exception" else "extension constructor" in + let name = kind ^ " " ^ s in + begin match cu_pattern, cu_privatize with + | false, false -> "unused " ^ name + | true, _ -> + name ^ + " is never used to build values.\n\ + (However, this constructor appears in patterns.)" + | false, true -> + name ^ + " is never used to build values.\n\ + It is exported or rebound as a private extension." + end + | Unused_rec_flag -> + "unused rec flag." + | Name_out_of_scope (ty, [nm], false) -> + nm ^ " was selected from type " ^ ty ^ + ".\nIt is not visible in the current scope, and will not \n\ + be selected if the type becomes unknown." + | Name_out_of_scope (_, _, false) -> assert false + | Name_out_of_scope (ty, slist, true) -> + "this record of type "^ ty ^" contains fields that are \n\ + not visible in the current scope: " + ^ String.concat " " slist ^ ".\n\ + They will not be selected if the type becomes unknown." + | Ambiguous_name ([s], tl, false) -> + s ^ " belongs to several types: " ^ String.concat " " tl ^ + "\nThe first one was selected. Please disambiguate if this is wrong." + | Ambiguous_name (_, _, false) -> assert false + | Ambiguous_name (_slist, tl, true) -> + "these field labels belong to several types: " ^ + String.concat " " tl ^ + "\nThe first one was selected. Please disambiguate if this is wrong." + | Disambiguated_name s -> + "this use of " ^ s ^ " relies on type-directed disambiguation,\n\ + it will not compile with OCaml 4.00 or earlier." + | Nonoptional_label s -> + "the label " ^ s ^ " is not optional." + | Open_shadow_identifier (kind, s) -> + Printf.sprintf + "this open statement shadows the %s identifier %s (which is later used)" + kind s + | Open_shadow_label_constructor (kind, s) -> + Printf.sprintf + "this open statement shadows the %s %s (which is later used)" + kind s + | Bad_env_variable (var, s) -> + Printf.sprintf "illegal environment variable %s : %s" var s + | Attribute_payload (a, s) -> + Printf.sprintf "illegal payload for attribute '%s'.\n%s" a s + | Eliminated_optional_arguments sl -> + Printf.sprintf "implicit elimination of optional argument%s %s" + (if List.length sl = 1 then "" else "s") + (String.concat ", " sl) + | No_cmi_file(name, None) -> + "no cmi file was found in path for module " ^ name + | No_cmi_file(name, Some msg) -> + Printf.sprintf + "no valid cmi file was found in path for module %s. %s" + name msg + | Bad_docstring unattached -> + if unattached then "unattached documentation comment (ignored)" + else "ambiguous documentation comment" + | Expect_tailcall -> + Printf.sprintf "expected tailcall" + | Fragile_literal_pattern -> + Printf.sprintf + "Code should not depend on the actual values of\n\ + this constructor's arguments. They are only for information\n\ + and may change in future versions. (See manual section 8.5)" + | Unreachable_case -> + "this match case is unreachable.\n\ + Consider replacing it with a refutation case ' -> .'" + | Misplaced_attribute attr_name -> + Printf.sprintf "the %S attribute cannot appear in this context" attr_name + | Duplicated_attribute attr_name -> + Printf.sprintf "the %S attribute is used more than once on this \ + expression" + attr_name + | Inlining_impossible reason -> + Printf.sprintf "Cannot inline: %s" reason + | Ambiguous_pattern vars -> + let msg = + let vars = List.sort String.compare vars in + match vars with + | [] -> assert false + | [x] -> "variable " ^ x + | _::_ -> + "variables " ^ String.concat "," vars in + Printf.sprintf + "Ambiguous or-pattern variables under guard;\n\ + %s may match different arguments. (See manual section 8.5)" + msg + | No_cmx_file name -> + Printf.sprintf + "no cmx file was found in path for module %s, \ + and its interface was not compiled with -opaque" name + | Assignment_to_non_mutable_value -> + "A potential assignment to a non-mutable value was detected \n\ + in this source file. Such assignments may generate incorrect code \n\ + when using Flambda." + | Unused_module s -> "unused module " ^ s ^ "." + | Unboxable_type_in_prim_decl t -> + Printf.sprintf + "This primitive declaration uses type %s, which is unannotated and\n\ + unboxable. The representation of such types may change in future\n\ + versions. You should annotate the declaration of %s with [@@boxed]\n\ + or [@@unboxed]." t t + | Constraint_on_gadt -> + "Type constraints do not apply to GADT cases of variant types." +;; + +let sub_locs = function + | Deprecated (_, def, use) -> + [ + def, "Definition"; + use, "Expected signature"; + ] + | _ -> [] + +let nerrors = ref 0;; + +type reporting_information = + { number : int + ; message : string + ; is_error : bool + ; sub_locs : (loc * string) list; + } + +let report w = + match is_active w with + | false -> `Inactive + | true -> + if is_error w then incr nerrors; + `Active { number = number w; message = message w; is_error = is_error w; + sub_locs = sub_locs w; + } +;; + +exception Errors;; + +let reset_fatal () = + nerrors := 0 + +let check_fatal () = + if !nerrors > 0 then begin + nerrors := 0; + raise Errors; + end; +;; + +let descriptions = + [ + 1, "Suspicious-looking start-of-comment mark."; + 2, "Suspicious-looking end-of-comment mark."; + 3, "Deprecated feature."; + 4, "Fragile pattern matching: matching that will remain complete even\n\ + \ if additional constructors are added to one of the variant types\n\ + \ matched."; + 5, "Partially applied function: expression whose result has function\n\ + \ type and is ignored."; + 6, "Label omitted in function application."; + 7, "Method overridden."; + 8, "Partial match: missing cases in pattern-matching."; + 9, "Missing fields in a record pattern."; + 10, "Expression on the left-hand side of a sequence that doesn't have \ + type\n\ + \ \"unit\" (and that is not a function, see warning number 5)."; + 11, "Redundant case in a pattern matching (unused match case)."; + 12, "Redundant sub-pattern in a pattern-matching."; + 13, "Instance variable overridden."; + 14, "Illegal backslash escape in a string constant."; + 15, "Private method made public implicitly."; + 16, "Unerasable optional argument."; + 17, "Undeclared virtual method."; + 18, "Non-principal type."; + 19, "Type without principality."; + 20, "Unused function argument."; + 21, "Non-returning statement."; + 22, "Preprocessor warning."; + 23, "Useless record \"with\" clause."; + 24, "Bad module name: the source file name is not a valid OCaml module \ + name."; + 25, "Deprecated: now part of warning 8."; + 26, "Suspicious unused variable: unused variable that is bound\n\ + \ with \"let\" or \"as\", and doesn't start with an underscore (\"_\")\n\ + \ character."; + 27, "Innocuous unused variable: unused variable that is not bound with\n\ + \ \"let\" nor \"as\", and doesn't start with an underscore (\"_\")\n\ + \ character."; + 28, "Wildcard pattern given as argument to a constant constructor."; + 29, "Unescaped end-of-line in a string constant (non-portable code)."; + 30, "Two labels or constructors of the same name are defined in two\n\ + \ mutually recursive types."; + 31, "A module is linked twice in the same executable."; + 32, "Unused value declaration."; + 33, "Unused open statement."; + 34, "Unused type declaration."; + 35, "Unused for-loop index."; + 36, "Unused ancestor variable."; + 37, "Unused constructor."; + 38, "Unused extension constructor."; + 39, "Unused rec flag."; + 40, "Constructor or label name used out of scope."; + 41, "Ambiguous constructor or label name."; + 42, "Disambiguated constructor or label name (compatibility warning)."; + 43, "Nonoptional label applied as optional."; + 44, "Open statement shadows an already defined identifier."; + 45, "Open statement shadows an already defined label or constructor."; + 46, "Error in environment variable."; + 47, "Illegal attribute payload."; + 48, "Implicit elimination of optional arguments."; + 49, "Absent cmi file when looking up module alias."; + 50, "Unexpected documentation comment."; + 51, "Warning on non-tail calls if @tailcall present."; + 52, "Fragile constant pattern."; + 53, "Attribute cannot appear in this context"; + 54, "Attribute used more than once on an expression"; + 55, "Inlining impossible"; + 56, "Unreachable case in a pattern-matching (based on type information)."; + 57, "Ambiguous or-pattern variables under guard"; + 58, "Missing cmx file"; + 59, "Assignment to non-mutable value"; + 60, "Unused module declaration"; + 61, "Unboxable type in primitive declaration"; + 62, "Type constraint on GADT type declaration" + ] +;; + +let help_warnings () = + List.iter (fun (i, s) -> Printf.printf "%3i %s\n" i s) descriptions; + print_endline " A all warnings"; + for i = Char.code 'b' to Char.code 'z' do + let c = Char.chr i in + match letter c with + | [] -> () + | [n] -> + Printf.printf " %c Alias for warning %i.\n" (Char.uppercase_ascii c) n + | l -> + Printf.printf " %c warnings %s.\n" + (Char.uppercase_ascii c) + (String.concat ", " (List.map string_of_int l)) + done; + exit 0 +;; diff --git a/res_syntax/compiler-libs-406/warnings.mli b/res_syntax/compiler-libs-406/warnings.mli new file mode 100644 index 0000000000..1171f8b3f3 --- /dev/null +++ b/res_syntax/compiler-libs-406/warnings.mli @@ -0,0 +1,118 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Weis && Damien Doligez, INRIA Rocquencourt *) +(* *) +(* Copyright 1998 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type loc = { + loc_start: Lexing.position; + loc_end: Lexing.position; + loc_ghost: bool; +} + +type t = + | Comment_start (* 1 *) + | Comment_not_end (* 2 *) + | Deprecated of string * loc * loc (* 3 *) + | Fragile_match of string (* 4 *) + | Partial_application (* 5 *) + | Labels_omitted of string list (* 6 *) + | Method_override of string list (* 7 *) + | Partial_match of string (* 8 *) + | Non_closed_record_pattern of string (* 9 *) + | Statement_type (* 10 *) + | Unused_match (* 11 *) + | Unused_pat (* 12 *) + | Instance_variable_override of string list (* 13 *) + | Illegal_backslash (* 14 *) + | Implicit_public_methods of string list (* 15 *) + | Unerasable_optional_argument (* 16 *) + | Undeclared_virtual_method of string (* 17 *) + | Not_principal of string (* 18 *) + | Without_principality of string (* 19 *) + | Unused_argument (* 20 *) + | Nonreturning_statement (* 21 *) + | Preprocessor of string (* 22 *) + | Useless_record_with (* 23 *) + | Bad_module_name of string (* 24 *) + | All_clauses_guarded (* 8, used to be 25 *) + | Unused_var of string (* 26 *) + | Unused_var_strict of string (* 27 *) + | Wildcard_arg_to_constant_constr (* 28 *) + | Eol_in_string (* 29 *) + | Duplicate_definitions of string * string * string * string (* 30 *) + | Multiple_definition of string * string * string (* 31 *) + | Unused_value_declaration of string (* 32 *) + | Unused_open of string (* 33 *) + | Unused_type_declaration of string (* 34 *) + | Unused_for_index of string (* 35 *) + | Unused_ancestor of string (* 36 *) + | Unused_constructor of string * bool * bool (* 37 *) + | Unused_extension of string * bool * bool * bool (* 38 *) + | Unused_rec_flag (* 39 *) + | Name_out_of_scope of string * string list * bool (* 40 *) + | Ambiguous_name of string list * string list * bool (* 41 *) + | Disambiguated_name of string (* 42 *) + | Nonoptional_label of string (* 43 *) + | Open_shadow_identifier of string * string (* 44 *) + | Open_shadow_label_constructor of string * string (* 45 *) + | Bad_env_variable of string * string (* 46 *) + | Attribute_payload of string * string (* 47 *) + | Eliminated_optional_arguments of string list (* 48 *) + | No_cmi_file of string * string option (* 49 *) + | Bad_docstring of bool (* 50 *) + | Expect_tailcall (* 51 *) + | Fragile_literal_pattern (* 52 *) + | Misplaced_attribute of string (* 53 *) + | Duplicated_attribute of string (* 54 *) + | Inlining_impossible of string (* 55 *) + | Unreachable_case (* 56 *) + | Ambiguous_pattern of string list (* 57 *) + | No_cmx_file of string (* 58 *) + | Assignment_to_non_mutable_value (* 59 *) + | Unused_module of string (* 60 *) + | Unboxable_type_in_prim_decl of string (* 61 *) + | Constraint_on_gadt (* 62 *) +;; + +val parse_options : bool -> string -> unit;; + +val without_warnings : (unit -> 'a) -> 'a + +val is_active : t -> bool;; +val is_error : t -> bool;; + +val defaults_w : string;; +val defaults_warn_error : string;; + +type reporting_information = + { number : int + ; message : string + ; is_error : bool + ; sub_locs : (loc * string) list; + } + +val report : t -> [ `Active of reporting_information | `Inactive ] + +exception Errors;; + +val check_fatal : unit -> unit;; +val reset_fatal: unit -> unit + +val help_warnings: unit -> unit + +type state +val backup: unit -> state +val restore: state -> unit +val mk_lazy: (unit -> 'a) -> 'a Lazy.t + (** Like [Lazy.of_fun], but the function is applied with + the warning settings at the time [mk_lazy] is called. *) diff --git a/res_syntax/compiler-libs-406/weak.ml b/res_syntax/compiler-libs-406/weak.ml new file mode 100644 index 0000000000..4ade095acb --- /dev/null +++ b/res_syntax/compiler-libs-406/weak.ml @@ -0,0 +1,336 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Weak array operations *) + +type 'a t + +external create : int -> 'a t = "caml_weak_create" + +(** number of additional values in a weak pointer *) +let additional_values = 2 + +let length x = Obj.size(Obj.repr x) - additional_values + +external set : 'a t -> int -> 'a option -> unit = "caml_weak_set" +external get : 'a t -> int -> 'a option = "caml_weak_get" +external get_copy : 'a t -> int -> 'a option = "caml_weak_get_copy" +external check : 'a t -> int -> bool = "caml_weak_check" +external blit : 'a t -> int -> 'a t -> int -> int -> unit = "caml_weak_blit" +(* blit: src srcoff dst dstoff len *) + +let fill ar ofs len x = + if ofs < 0 || len < 0 || ofs + len > length ar + then raise (Invalid_argument "Weak.fill") + else begin + for i = ofs to (ofs + len - 1) do + set ar i x + done + end + + +(** Weak hash tables *) + +module type S = sig + type data + type t + val create : int -> t + val clear : t -> unit + val merge : t -> data -> data + val add : t -> data -> unit + val remove : t -> data -> unit + val find : t -> data -> data + val find_opt : t -> data -> data option + val find_all : t -> data -> data list + val mem : t -> data -> bool + val iter : (data -> unit) -> t -> unit + val fold : (data -> 'a -> 'a) -> t -> 'a -> 'a + val count : t -> int + val stats : t -> int * int * int * int * int * int +end + +module Make (H : Hashtbl.HashedType) : (S with type data = H.t) = struct + + type 'a weak_t = 'a t + let weak_create = create + let emptybucket = weak_create 0 + + type data = H.t + + type t = { + mutable table : data weak_t array; + mutable hashes : int array array; + mutable limit : int; (* bucket size limit *) + mutable oversize : int; (* number of oversize buckets *) + mutable rover : int; (* for internal bookkeeping *) + } + + let get_index t h = (h land max_int) mod (Array.length t.table) + + let limit = 7 + let over_limit = 2 + + let create sz = + let sz = if sz < 7 then 7 else sz in + let sz = if sz > Sys.max_array_length then Sys.max_array_length else sz in + { + table = Array.make sz emptybucket; + hashes = Array.make sz [| |]; + limit = limit; + oversize = 0; + rover = 0; + } + + let clear t = + for i = 0 to Array.length t.table - 1 do + t.table.(i) <- emptybucket; + t.hashes.(i) <- [| |]; + done; + t.limit <- limit; + t.oversize <- 0 + + + let fold f t init = + let rec fold_bucket i b accu = + if i >= length b then accu else + match get b i with + | Some v -> fold_bucket (i+1) b (f v accu) + | None -> fold_bucket (i+1) b accu + in + Array.fold_right (fold_bucket 0) t.table init + + + let iter f t = + let rec iter_bucket i b = + if i >= length b then () else + match get b i with + | Some v -> f v; iter_bucket (i+1) b + | None -> iter_bucket (i+1) b + in + Array.iter (iter_bucket 0) t.table + + + let iter_weak f t = + let rec iter_bucket i j b = + if i >= length b then () else + match check b i with + | true -> f b t.hashes.(j) i; iter_bucket (i+1) j b + | false -> iter_bucket (i+1) j b + in + Array.iteri (iter_bucket 0) t.table + + + let rec count_bucket i b accu = + if i >= length b then accu else + count_bucket (i+1) b (accu + (if check b i then 1 else 0)) + + + let count t = + Array.fold_right (count_bucket 0) t.table 0 + + + let next_sz n = min (3 * n / 2 + 3) Sys.max_array_length + let prev_sz n = ((n - 3) * 2 + 2) / 3 + + let test_shrink_bucket t = + let bucket = t.table.(t.rover) in + let hbucket = t.hashes.(t.rover) in + let len = length bucket in + let prev_len = prev_sz len in + let live = count_bucket 0 bucket 0 in + if live <= prev_len then begin + let rec loop i j = + if j >= prev_len then begin + if check bucket i then loop (i + 1) j + else if check bucket j then begin + blit bucket j bucket i 1; + hbucket.(i) <- hbucket.(j); + loop (i + 1) (j - 1); + end else loop i (j - 1); + end; + in + loop 0 (length bucket - 1); + if prev_len = 0 then begin + t.table.(t.rover) <- emptybucket; + t.hashes.(t.rover) <- [| |]; + end else begin + Obj.truncate (Obj.repr bucket) (prev_len + additional_values); + Obj.truncate (Obj.repr hbucket) prev_len; + end; + if len > t.limit && prev_len <= t.limit then t.oversize <- t.oversize - 1; + end; + t.rover <- (t.rover + 1) mod (Array.length t.table) + + + let rec resize t = + let oldlen = Array.length t.table in + let newlen = next_sz oldlen in + if newlen > oldlen then begin + let newt = create newlen in + let add_weak ob oh oi = + let setter nb ni _ = blit ob oi nb ni 1 in + let h = oh.(oi) in + add_aux newt setter None h (get_index newt h); + in + iter_weak add_weak t; + t.table <- newt.table; + t.hashes <- newt.hashes; + t.limit <- newt.limit; + t.oversize <- newt.oversize; + t.rover <- t.rover mod Array.length newt.table; + end else begin + t.limit <- max_int; (* maximum size already reached *) + t.oversize <- 0; + end + + and add_aux t setter d h index = + let bucket = t.table.(index) in + let hashes = t.hashes.(index) in + let sz = length bucket in + let rec loop i = + if i >= sz then begin + let newsz = + min (3 * sz / 2 + 3) (Sys.max_array_length - additional_values) + in + if newsz <= sz then failwith "Weak.Make: hash bucket cannot grow more"; + let newbucket = weak_create newsz in + let newhashes = Array.make newsz 0 in + blit bucket 0 newbucket 0 sz; + Array.blit hashes 0 newhashes 0 sz; + setter newbucket sz d; + newhashes.(sz) <- h; + t.table.(index) <- newbucket; + t.hashes.(index) <- newhashes; + if sz <= t.limit && newsz > t.limit then begin + t.oversize <- t.oversize + 1; + for _i = 0 to over_limit do test_shrink_bucket t done; + end; + if t.oversize > Array.length t.table / over_limit then resize t; + end else if check bucket i then begin + loop (i + 1) + end else begin + setter bucket i d; + hashes.(i) <- h; + end; + in + loop 0 + + + let add t d = + let h = H.hash d in + add_aux t set (Some d) h (get_index t h) + + + let find_or t d ifnotfound = + let h = H.hash d in + let index = get_index t h in + let bucket = t.table.(index) in + let hashes = t.hashes.(index) in + let sz = length bucket in + let rec loop i = + if i >= sz then ifnotfound h index + else if h = hashes.(i) then begin + match get_copy bucket i with + | Some v when H.equal v d + -> begin match get bucket i with + | Some v -> v + | None -> loop (i + 1) + end + | _ -> loop (i + 1) + end else loop (i + 1) + in + loop 0 + + + let merge t d = + find_or t d (fun h index -> add_aux t set (Some d) h index; d) + + + let find t d = find_or t d (fun _h _index -> raise Not_found) + + let find_opt t d = + let h = H.hash d in + let index = get_index t h in + let bucket = t.table.(index) in + let hashes = t.hashes.(index) in + let sz = length bucket in + let rec loop i = + if i >= sz then None + else if h = hashes.(i) then begin + match get_copy bucket i with + | Some v when H.equal v d + -> begin match get bucket i with + | Some _ as v -> v + | None -> loop (i + 1) + end + | _ -> loop (i + 1) + end else loop (i + 1) + in + loop 0 + + + let find_shadow t d iffound ifnotfound = + let h = H.hash d in + let index = get_index t h in + let bucket = t.table.(index) in + let hashes = t.hashes.(index) in + let sz = length bucket in + let rec loop i = + if i >= sz then ifnotfound + else if h = hashes.(i) then begin + match get_copy bucket i with + | Some v when H.equal v d -> iffound bucket i + | _ -> loop (i + 1) + end else loop (i + 1) + in + loop 0 + + + let remove t d = find_shadow t d (fun w i -> set w i None) () + + + let mem t d = find_shadow t d (fun _w _i -> true) false + + + let find_all t d = + let h = H.hash d in + let index = get_index t h in + let bucket = t.table.(index) in + let hashes = t.hashes.(index) in + let sz = length bucket in + let rec loop i accu = + if i >= sz then accu + else if h = hashes.(i) then begin + match get_copy bucket i with + | Some v when H.equal v d + -> begin match get bucket i with + | Some v -> loop (i + 1) (v :: accu) + | None -> loop (i + 1) accu + end + | _ -> loop (i + 1) accu + end else loop (i + 1) accu + in + loop 0 [] + + + let stats t = + let len = Array.length t.table in + let lens = Array.map length t.table in + Array.sort compare lens; + let totlen = Array.fold_left ( + ) 0 lens in + (len, count t, totlen, lens.(0), lens.(len/2), lens.(len-1)) + + +end diff --git a/res_syntax/compiler-libs-406/weak.mli b/res_syntax/compiler-libs-406/weak.mli new file mode 100644 index 0000000000..842520aa10 --- /dev/null +++ b/res_syntax/compiler-libs-406/weak.mli @@ -0,0 +1,185 @@ +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Damien Doligez, projet Para, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Arrays of weak pointers and hash sets of weak pointers. *) + + +(** {1 Low-level functions} *) + +type 'a t +(** The type of arrays of weak pointers (weak arrays). A weak + pointer is a value that the garbage collector may erase whenever + the value is not used any more (through normal pointers) by the + program. Note that finalisation functions are run after the + weak pointers are erased. + + A weak pointer is said to be full if it points to a value, + empty if the value was erased by the GC. + + Notes: + - Integers are not allocated and cannot be stored in weak arrays. + - Weak arrays cannot be marshaled using {!Pervasives.output_value} + nor the functions of the {!Marshal} module. +*) + + +val create : int -> 'a t +(** [Weak.create n] returns a new weak array of length [n]. + All the pointers are initially empty. Raise [Invalid_argument] + if [n] is negative or greater than {!Sys.max_array_length}[-1].*) + +val length : 'a t -> int +(** [Weak.length ar] returns the length (number of elements) of + [ar].*) + +val set : 'a t -> int -> 'a option -> unit +(** [Weak.set ar n (Some el)] sets the [n]th cell of [ar] to be a + (full) pointer to [el]; [Weak.set ar n None] sets the [n]th + cell of [ar] to empty. + Raise [Invalid_argument "Weak.set"] if [n] is not in the range + 0 to {!Weak.length}[ a - 1].*) + +val get : 'a t -> int -> 'a option +(** [Weak.get ar n] returns None if the [n]th cell of [ar] is + empty, [Some x] (where [x] is the value) if it is full. + Raise [Invalid_argument "Weak.get"] if [n] is not in the range + 0 to {!Weak.length}[ a - 1].*) + +val get_copy : 'a t -> int -> 'a option +(** [Weak.get_copy ar n] returns None if the [n]th cell of [ar] is + empty, [Some x] (where [x] is a (shallow) copy of the value) if + it is full. + In addition to pitfalls with mutable values, the interesting + difference with [get] is that [get_copy] does not prevent + the incremental GC from erasing the value in its current cycle + ([get] may delay the erasure to the next GC cycle). + Raise [Invalid_argument "Weak.get"] if [n] is not in the range + 0 to {!Weak.length}[ a - 1]. + + If the element is a custom block it is not copied. + +*) + + +val check : 'a t -> int -> bool +(** [Weak.check ar n] returns [true] if the [n]th cell of [ar] is + full, [false] if it is empty. Note that even if [Weak.check ar n] + returns [true], a subsequent {!Weak.get}[ ar n] can return [None].*) + +val fill : 'a t -> int -> int -> 'a option -> unit +(** [Weak.fill ar ofs len el] sets to [el] all pointers of [ar] from + [ofs] to [ofs + len - 1]. Raise [Invalid_argument "Weak.fill"] + if [ofs] and [len] do not designate a valid subarray of [a].*) + +val blit : 'a t -> int -> 'a t -> int -> int -> unit +(** [Weak.blit ar1 off1 ar2 off2 len] copies [len] weak pointers + from [ar1] (starting at [off1]) to [ar2] (starting at [off2]). + It works correctly even if [ar1] and [ar2] are the same. + Raise [Invalid_argument "Weak.blit"] if [off1] and [len] do + not designate a valid subarray of [ar1], or if [off2] and [len] + do not designate a valid subarray of [ar2].*) + + +(** {1 Weak hash sets} *) + +(** A weak hash set is a hashed set of values. Each value may + magically disappear from the set when it is not used by the + rest of the program any more. This is normally used to share + data structures without inducing memory leaks. + Weak hash sets are defined on values from a {!Hashtbl.HashedType} + module; the [equal] relation and [hash] function are taken from that + module. We will say that [v] is an instance of [x] if [equal x v] + is [true]. + + The [equal] relation must be able to work on a shallow copy of + the values and give the same result as with the values themselves. + *) + +module type S = sig + type data + (** The type of the elements stored in the table. *) + + type t + (** The type of tables that contain elements of type [data]. + Note that weak hash sets cannot be marshaled using + {!Pervasives.output_value} or the functions of the {!Marshal} + module. *) + + val create : int -> t + (** [create n] creates a new empty weak hash set, of initial + size [n]. The table will grow as needed. *) + + val clear : t -> unit + (** Remove all elements from the table. *) + + val merge : t -> data -> data + (** [merge t x] returns an instance of [x] found in [t] if any, + or else adds [x] to [t] and return [x]. *) + + val add : t -> data -> unit + (** [add t x] adds [x] to [t]. If there is already an instance + of [x] in [t], it is unspecified which one will be + returned by subsequent calls to [find] and [merge]. *) + + val remove : t -> data -> unit + (** [remove t x] removes from [t] one instance of [x]. Does + nothing if there is no instance of [x] in [t]. *) + + val find : t -> data -> data + (** [find t x] returns an instance of [x] found in [t]. + Raise [Not_found] if there is no such element. *) + + val find_opt: t -> data -> data option + (** [find_opt t x] returns an instance of [x] found in [t] + or [None] if there is no such element. + @since 4.05 + *) + + val find_all : t -> data -> data list + (** [find_all t x] returns a list of all the instances of [x] + found in [t]. *) + + val mem : t -> data -> bool + (** [mem t x] returns [true] if there is at least one instance + of [x] in [t], false otherwise. *) + + val iter : (data -> unit) -> t -> unit + (** [iter f t] calls [f] on each element of [t], in some unspecified + order. It is not specified what happens if [f] tries to change + [t] itself. *) + + val fold : (data -> 'a -> 'a) -> t -> 'a -> 'a + (** [fold f t init] computes [(f d1 (... (f dN init)))] where + [d1 ... dN] are the elements of [t] in some unspecified order. + It is not specified what happens if [f] tries to change [t] + itself. *) + + val count : t -> int + (** Count the number of elements in the table. [count t] gives the + same result as [fold (fun _ n -> n+1) t 0] but does not delay the + deallocation of the dead elements. *) + + val stats : t -> int * int * int * int * int * int + (** Return statistics on the table. The numbers are, in order: + table length, number of entries, sum of bucket lengths, + smallest bucket length, median bucket length, biggest bucket length. *) +end +(** The output signature of the functor {!Weak.Make}. *) + +module Make (H : Hashtbl.HashedType) : S with type data = H.t +(** Functor building an implementation of the weak hash set structure. + [H.equal] can't be the physical equality, since only shallow + copies of the elements in the set are given to it. + *) diff --git a/res_syntax/dune b/res_syntax/dune new file mode 100644 index 0000000000..71b7d10cee --- /dev/null +++ b/res_syntax/dune @@ -0,0 +1,9 @@ +(dirs compiler-libs-406 src cli benchmarks testrunner) + +(env + (dev + (flags + (:standard -w +a-4-42-40-9-48))) + (release + (flags + (:standard -w +a-4-42-40-9-48)))) diff --git a/res_syntax/dune-project b/res_syntax/dune-project new file mode 100644 index 0000000000..d30f242f24 --- /dev/null +++ b/res_syntax/dune-project @@ -0,0 +1,27 @@ +(lang dune 2.3) + +(name rescript-syntax) + +(generate_opam_files true) + +(license MIT) + +(authors "Maxim Valcke ") + +(maintainers "Maxim Valcke ") + +(homepage "https://github.com/rescript-lang/syntax") + +(bug_reports "https://github.com/rescript-lang/syntax/issues") + +(package + (name rescript-syntax) + (synopsis "ReScript parser/printer") + (depends + (ocaml + (>= 4.10)) + (ocamlformat + (= 0.22.4)) + (reanalyze + (= 2.23.0)) + dune)) diff --git a/res_syntax/rescript-syntax.opam b/res_syntax/rescript-syntax.opam new file mode 100644 index 0000000000..1113a8fffb --- /dev/null +++ b/res_syntax/rescript-syntax.opam @@ -0,0 +1,28 @@ +# This file is generated by dune, edit dune-project instead +opam-version: "2.0" +synopsis: "ReScript parser/printer" +maintainer: ["Maxim Valcke "] +authors: ["Maxim Valcke "] +license: "MIT" +homepage: "https://github.com/rescript-lang/syntax" +bug-reports: "https://github.com/rescript-lang/syntax/issues" +depends: [ + "ocaml" {>= "4.10"} + "ocamlformat" {= "0.22.4"} + "reanalyze" {= "2.23.0"} + "dune" +] +build: [ + ["dune" "subst"] {pinned} + [ + "dune" + "build" + "-p" + name + "-j" + jobs + "@install" + "@runtest" {with-test} + "@doc" {with-doc} + ] +] diff --git a/res_syntax/scripts/bootstrap.sh b/res_syntax/scripts/bootstrap.sh new file mode 100755 index 0000000000..180428a07d --- /dev/null +++ b/res_syntax/scripts/bootstrap.sh @@ -0,0 +1,9 @@ +#!/bin/zsh + +# pack and parse the whole codebase using the compiler itself. Kind of a test +rm -rf ./bootstrap +mkdir ./bootstrap + +ocaml unix.cma ./scripts/bspack.ml -bs-main Res_cli -I cli -I src -o ./bootstrap/rescript.ml +rescript ./bootstrap/rescript.ml > ./bootstrap/rescript.res +ocamlopt.opt -w a -pp "rescript -print binary" -O2 -o rescript -I +compiler-libs ocamlcommon.cmxa -I lib -impl ./bootstrap/rescript.res diff --git a/res_syntax/scripts/bspack.ml b/res_syntax/scripts/bspack.ml new file mode 100644 index 0000000000..d0a635d8e1 --- /dev/null +++ b/res_syntax/scripts/bspack.ml @@ -0,0 +1,32248 @@ +module Arg_helper : sig +#1 "arg_helper.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2015--2016 OCamlPro SAS *) +(* Copyright 2015--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Decipher command line arguments of the form + | =[,...] + (as used for example for the specification of inlining parameters + varying by simplification round). +*) + +module Make (S : sig + module Key : sig + type t + + (** The textual representation of a key must not contain '=' or ','. *) + val of_string : string -> t + + module Map : Map.S with type key = t + end + + module Value : sig + type t + + (** The textual representation of a value must not contain ','. *) + val of_string : string -> t + end +end) : sig + type parsed + + val default : S.Value.t -> parsed + + val set_base_default : S.Value.t -> parsed -> parsed + + val add_base_override : S.Key.t -> S.Value.t -> parsed -> parsed + + val reset_base_overrides : parsed -> parsed + + val set_user_default : S.Value.t -> parsed -> parsed + + val add_user_override : S.Key.t -> S.Value.t -> parsed -> parsed + + val parse : string -> string -> parsed ref -> unit + + type parse_result = + | Ok + | Parse_failed of exn + + val parse_no_error : string -> parsed ref -> parse_result + + val get : key:S.Key.t -> parsed -> S.Value.t +end + +end = struct +#1 "arg_helper.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2015--2016 OCamlPro SAS *) +(* Copyright 2015--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +let fatal err = + prerr_endline err; + exit 2 + +module Make (S : sig + module Key : sig + type t + val of_string : string -> t + module Map : Map.S with type key = t + end + + module Value : sig + type t + val of_string : string -> t + end +end) = struct + type parsed = { + base_default : S.Value.t; + base_override : S.Value.t S.Key.Map.t; + user_default : S.Value.t option; + user_override : S.Value.t S.Key.Map.t; + } + + let default v = + { base_default = v; + base_override = S.Key.Map.empty; + user_default = None; + user_override = S.Key.Map.empty; } + + let set_base_default value t = + { t with base_default = value } + + let add_base_override key value t = + { t with base_override = S.Key.Map.add key value t.base_override } + + let reset_base_overrides t = + { t with base_override = S.Key.Map.empty } + + let set_user_default value t = + { t with user_default = Some value } + + let add_user_override key value t = + { t with user_override = S.Key.Map.add key value t.user_override } + + exception Parse_failure of exn + + let parse_exn str ~update = + (* Is the removal of empty chunks really relevant here? *) + (* (It has been added to mimic the old Misc.String.split.) *) + let values = String.split_on_char ',' str |> List.filter ((<>) "") in + let parsed = + List.fold_left (fun acc value -> + match String.index value '=' with + | exception Not_found -> + begin match S.Value.of_string value with + | value -> set_user_default value acc + | exception exn -> raise (Parse_failure exn) + end + | equals -> + let key_value_pair = value in + let length = String.length key_value_pair in + assert (equals >= 0 && equals < length); + if equals = 0 then begin + raise (Parse_failure ( + Failure "Missing key in argument specification")) + end; + let key = + let key = String.sub key_value_pair 0 equals in + try S.Key.of_string key + with exn -> raise (Parse_failure exn) + in + let value = + let value = + String.sub key_value_pair (equals + 1) (length - equals - 1) + in + try S.Value.of_string value + with exn -> raise (Parse_failure exn) + in + add_user_override key value acc) + !update + values + in + update := parsed + + let parse str help_text update = + match parse_exn str ~update with + | () -> () + | exception (Parse_failure exn) -> + fatal (Printf.sprintf "%s: %s" (Printexc.to_string exn) help_text) + + type parse_result = + | Ok + | Parse_failed of exn + + let parse_no_error str update = + match parse_exn str ~update with + | () -> Ok + | exception (Parse_failure exn) -> Parse_failed exn + + let get ~key parsed = + match S.Key.Map.find key parsed.user_override with + | value -> value + | exception Not_found -> + match parsed.user_default with + | Some value -> value + | None -> + match S.Key.Map.find key parsed.base_override with + | value -> value + | exception Not_found -> parsed.base_default + +end + +end +module Config_whole_compiler : sig +#1 "config_whole_compiler.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* System configuration *) + +val version: string + (* The current version number of the system *) + +val standard_library: string + (* The directory containing the standard libraries *) + +val bs_only : bool ref + +val standard_runtime: string + (* The full path to the standard bytecode interpreter ocamlrun *) +val ccomp_type: string + (* The "kind" of the C compiler, assembler and linker used: one of + "cc" (for Unix-style C compilers) + "msvc" (for Microsoft Visual C++ and MASM) *) +val c_compiler: string + (* The compiler to use for compiling C files *) +val c_output_obj: string + (* Name of the option of the C compiler for specifying the output file *) +val ocamlc_cflags : string + (* The flags ocamlc should pass to the C compiler *) +val ocamlc_cppflags : string + (* The flags ocamlc should pass to the C preprocessor *) +val ocamlopt_cflags : string + (* The flags ocamlopt should pass to the C compiler *) +val ocamlopt_cppflags : string + (* The flags ocamlopt should pass to the C preprocessor *) +val bytecomp_c_libraries: string + (* The C libraries to link with custom runtimes *) +val native_c_libraries: string + (* The C libraries to link with native-code programs *) +val native_pack_linker: string + (* The linker to use for packaging (ocamlopt -pack) and for partial + links (ocamlopt -output-obj). *) +val mkdll: string + (* The linker command line to build dynamic libraries. *) +val mkexe: string + (* The linker command line to build executables. *) +val mkmaindll: string + (* The linker command line to build main programs as dlls. *) +val ranlib: string + (* Command to randomize a library, or "" if not needed *) +val ar: string + (* Name of the ar command, or "" if not needed (MSVC) *) +val cc_profile : string + (* The command line option to the C compiler to enable profiling. *) + +val load_path: string list ref + (* Directories in the search path for .cmi and .cmo files *) + +val interface_suffix: string ref + (* Suffix for interface file names *) + +val exec_magic_number: string + (* Magic number for bytecode executable files *) +val cmi_magic_number: string + (* Magic number for compiled interface files *) +val cmo_magic_number: string + (* Magic number for object bytecode files *) +val cma_magic_number: string + (* Magic number for archive files *) +val cmx_magic_number: string + (* Magic number for compilation unit descriptions *) +val cmxa_magic_number: string + (* Magic number for libraries of compilation unit descriptions *) +val ast_intf_magic_number: string + (* Magic number for file holding an interface syntax tree *) +val ast_impl_magic_number: string + (* Magic number for file holding an implementation syntax tree *) +val cmxs_magic_number: string + (* Magic number for dynamically-loadable plugins *) +val cmt_magic_number: string + (* Magic number for compiled interface files *) + +val max_tag: int + (* Biggest tag that can be stored in the header of a regular block. *) +val lazy_tag : int + (* Normally the same as Obj.lazy_tag. Separate definition because + of technical reasons for bootstrapping. *) +val max_young_wosize: int + (* Maximal size of arrays that are directly allocated in the + minor heap *) +val stack_threshold: int + (* Size in words of safe area at bottom of VM stack, + see byterun/config.h *) +val stack_safety_margin: int + (* Size in words of the safety margin between the bottom of + the stack and the stack pointer. This margin can be used by + intermediate computations of some instructions, or the event + handler. *) + +val architecture: string + (* Name of processor type for the native-code compiler *) +val model: string + (* Name of processor submodel for the native-code compiler *) +val system: string + (* Name of operating system for the native-code compiler *) + +val asm: string + (* The assembler (and flags) to use for assembling + ocamlopt-generated code. *) + +val asm_cfi_supported: bool + (* Whether assembler understands CFI directives *) +val with_frame_pointers : bool + (* Whether assembler should maintain frame pointers *) + +val ext_obj: string + (* Extension for object files, e.g. [.o] under Unix. *) +val ext_asm: string + (* Extension for assembler files, e.g. [.s] under Unix. *) +val ext_lib: string + (* Extension for library files, e.g. [.a] under Unix. *) +val ext_dll: string + (* Extension for dynamically-loaded libraries, e.g. [.so] under Unix.*) + +val default_executable_name: string + (* Name of executable produced by linking if none is given with -o, + e.g. [a.out] under Unix. *) + +val systhread_supported : bool + (* Whether the system thread library is implemented *) + +val flexdll_dirs : string list + (* Directories needed for the FlexDLL objects *) + +val host : string + (* Whether the compiler is a cross-compiler *) + +val target : string + (* Whether the compiler is a cross-compiler *) + +val print_config : out_channel -> unit;; + +val profiling : bool + (* Whether profiling with gprof is supported on this platform *) + +val flambda : bool + (* Whether the compiler was configured for flambda *) + +val spacetime : bool + (* Whether the compiler was configured for Spacetime profiling *) +val enable_call_counts : bool + (* Whether call counts are to be available when Spacetime profiling *) +val profinfo : bool + (* Whether the compiler was configured for profiling *) +val profinfo_width : int + (* How many bits are to be used in values' headers for profiling + information *) +val libunwind_available : bool + (* Whether the libunwind library is available on the target *) +val libunwind_link_flags : string + (* Linker flags to use libunwind *) + +val safe_string: bool + (* Whether the compiler was configured with -force-safe-string; + in that case, the -unsafe-string compile-time option is unavailable + + @since 4.05.0 *) +val default_safe_string: bool + (* Whether the compiler was configured to use the -safe-string + or -unsafe-string compile-time option by default. + + @since 4.06.0 *) +val flat_float_array : bool + (* Whether the compiler and runtime automagically flatten float + arrays *) +val windows_unicode: bool + (* Whether Windows Unicode runtime is enabled *) +val afl_instrument : bool + (* Whether afl-fuzz instrumentation is generated by default *) + + +end = struct +#1 "config_whole_compiler.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* The main OCaml version string has moved to ../VERSION *) +let version = "4.06.1+BS" +let standard_library = + let (//) = Filename.concat in + Filename.dirname Sys.executable_name // Filename.parent_dir_name // "lib" // "ocaml" +let standard_library_default = standard_library +let bs_only = ref true +let standard_runtime = "ocamlrun" (*dont care:path to ocamlrun*) +let ccomp_type = "cc" +let c_compiler = "gcc" +let c_output_obj = "-o " +let ocamlc_cflags = "-O2 -fno-strict-aliasing -fwrapv " +let ocamlc_cppflags = "-D_FILE_OFFSET_BITS=64 -D_REENTRANT" +let ocamlopt_cflags = "-O2 -fno-strict-aliasing -fwrapv" +let ocamlopt_cppflags = "-D_FILE_OFFSET_BITS=64 -D_REENTRANT" +let bytecomp_c_libraries = "-lcurses -lpthread " +(* bytecomp_c_compiler and native_c_compiler have been supported for a + long time and are retained for backwards compatibility. + For programs that don't need compatibility with older OCaml releases + the recommended approach is to use the constituent variables + c_compiler, ocamlc_cflags, ocamlc_cppflags etc., directly. +*) +let bytecomp_c_compiler = + "" +let native_c_compiler = + "" +let native_c_libraries = "" +let native_pack_linker = "ld -r -arch x86_64 -o\ " +let ranlib = "ranlib" +let ar = "ar" +let cc_profile = "-pg" +let mkdll = "" +let mkexe = "" +let mkmaindll = "" + +let profiling = true +let flambda = false +let safe_string = false +let default_safe_string = true +let windows_unicode = 0 != 0 + +let flat_float_array = true + +let afl_instrument = false + +let exec_magic_number = "Caml1999X011" +and cmi_magic_number = "Caml1999I022" +and cmo_magic_number = "Caml1999O022" +and cma_magic_number = "Caml1999A022" +and cmx_magic_number = + (* if flambda then + "Caml1999y022" + else *) + "Caml1999Y022" +and cmxa_magic_number = + (* if flambda then + "Caml1999z022" + else *) + "Caml1999Z022" +and ast_impl_magic_number = "Caml1999M022" +and ast_intf_magic_number = "Caml1999N022" +and cmxs_magic_number = "Caml1999D022" + (* cmxs_magic_number is duplicated in otherlibs/dynlink/natdynlink.ml *) +and cmt_magic_number = "Caml1999T022" + +let load_path = ref ([] : string list) + +let interface_suffix = ref ".mli" + +let max_tag = 245 +(* This is normally the same as in obj.ml, but we have to define it + separately because it can differ when we're in the middle of a + bootstrapping phase. *) +let lazy_tag = 246 + +let max_young_wosize = 256 +let stack_threshold = 256 (* see byterun/config.h *) +let stack_safety_margin = 60 + +let architecture = "amd64" +let model = "default" +let system = "macosx" + +let asm = "clang -arch x86_64 -Wno-trigraphs -c" +let asm_cfi_supported = true +let with_frame_pointers = false +let spacetime = false +let enable_call_counts = true +let libunwind_available = false +let libunwind_link_flags = "" +let profinfo = false +let profinfo_width = 0 + +let ext_exe = "" +let ext_obj = ".o" +let ext_asm = ".s" +let ext_lib = ".a" +let ext_dll = ".so" + +let host = "x86_64-apple-darwin17.7.0" +let target = "x86_64-apple-darwin17.7.0" + +let default_executable_name = + "" + +let systhread_supported = false;; + +let flexdll_dirs = [];; + +let print_config oc = + let p name valu = Printf.fprintf oc "%s: %s\n" name valu in + let p_int name valu = Printf.fprintf oc "%s: %d\n" name valu in + let p_bool name valu = Printf.fprintf oc "%s: %B\n" name valu in + p "version" version; + p "standard_library_default" standard_library_default; + p "standard_library" standard_library; + p "standard_runtime" standard_runtime; + p "ccomp_type" ccomp_type; + p "c_compiler" c_compiler; + p "ocamlc_cflags" ocamlc_cflags; + p "ocamlc_cppflags" ocamlc_cppflags; + p "ocamlopt_cflags" ocamlopt_cflags; + p "ocamlopt_cppflags" ocamlopt_cppflags; + p "bytecomp_c_compiler" bytecomp_c_compiler; + p "native_c_compiler" native_c_compiler; + p "bytecomp_c_libraries" bytecomp_c_libraries; + p "native_c_libraries" native_c_libraries; + p "native_pack_linker" native_pack_linker; + p "ranlib" ranlib; + p "cc_profile" cc_profile; + p "architecture" architecture; + p "model" model; + p_int "int_size" Sys.int_size; + p_int "word_size" Sys.word_size; + p "system" system; + p "asm" asm; + p_bool "asm_cfi_supported" asm_cfi_supported; + p_bool "with_frame_pointers" with_frame_pointers; + p "ext_exe" ext_exe; + p "ext_obj" ext_obj; + p "ext_asm" ext_asm; + p "ext_lib" ext_lib; + p "ext_dll" ext_dll; + p "os_type" Sys.os_type; + p "default_executable_name" default_executable_name; + p_bool "systhread_supported" systhread_supported; + p "host" host; + p "target" target; + p_bool "profiling" profiling; + p_bool "flambda" flambda; + p_bool "spacetime" spacetime; + p_bool "safe_string" safe_string; + p_bool "default_safe_string" default_safe_string; + p_bool "flat_float_array" flat_float_array; + p_bool "afl_instrument" afl_instrument; + p_bool "windows_unicode" windows_unicode; + + (* print the magic number *) + p "exec_magic_number" exec_magic_number; + p "cmi_magic_number" cmi_magic_number; + p "cmo_magic_number" cmo_magic_number; + p "cma_magic_number" cma_magic_number; + p "cmx_magic_number" cmx_magic_number; + p "cmxa_magic_number" cmxa_magic_number; + p "ast_impl_magic_number" ast_impl_magic_number; + p "ast_intf_magic_number" ast_intf_magic_number; + p "cmxs_magic_number" cmxs_magic_number; + p "cmt_magic_number" cmt_magic_number; + + flush oc; +;; + +end +module Config = Config_whole_compiler +module Misc : sig +#1 "misc.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Miscellaneous useful types and functions *) + +val array_of_list_rev : 'a list -> 'a array + + +val fatal_error: string -> 'a +val fatal_errorf: ('a, Format.formatter, unit, 'b) format4 -> 'a +exception Fatal_error + +val try_finally : (unit -> 'a) -> (unit -> unit) -> 'a;; + +val map_end: ('a -> 'b) -> 'a list -> 'b list -> 'b list + (* [map_end f l t] is [map f l @ t], just more efficient. *) +val map_left_right: ('a -> 'b) -> 'a list -> 'b list + (* Like [List.map], with guaranteed left-to-right evaluation order *) +val for_all2: ('a -> 'b -> bool) -> 'a list -> 'b list -> bool + (* Same as [List.for_all] but for a binary predicate. + In addition, this [for_all2] never fails: given two lists + with different lengths, it returns false. *) +val replicate_list: 'a -> int -> 'a list + (* [replicate_list elem n] is the list with [n] elements + all identical to [elem]. *) +val list_remove: 'a -> 'a list -> 'a list + (* [list_remove x l] returns a copy of [l] with the first + element equal to [x] removed. *) +val split_last: 'a list -> 'a list * 'a + (* Return the last element and the other elements of the given list. *) +val may: ('a -> unit) -> 'a option -> unit +val may_map: ('a -> 'b) -> 'a option -> 'b option + +type ref_and_value = R : 'a ref * 'a -> ref_and_value + +val protect_refs : ref_and_value list -> (unit -> 'a) -> 'a +(** [protect_refs l f] temporarily sets [r] to [v] for each [R (r, v)] in [l] + while executing [f]. The previous contents of the references is restored + even if [f] raises an exception. *) + +module Stdlib : sig + module List : sig + type 'a t = 'a list + + val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int + (** The lexicographic order supported by the provided order. + There is no constraint on the relative lengths of the lists. *) + + val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool + (** Returns [true] iff the given lists have the same length and content + with respect to the given equality function. *) + + val filter_map : ('a -> 'b option) -> 'a t -> 'b t + (** [filter_map f l] applies [f] to every element of [l], filters + out the [None] elements and returns the list of the arguments of + the [Some] elements. *) + + val some_if_all_elements_are_some : 'a option t -> 'a t option + (** If all elements of the given list are [Some _] then [Some xs] + is returned with the [xs] being the contents of those [Some]s, with + order preserved. Otherwise return [None]. *) + + val map2_prefix : ('a -> 'b -> 'c) -> 'a t -> 'b t -> ('c t * 'b t) + (** [let r1, r2 = map2_prefix f l1 l2] + If [l1] is of length n and [l2 = h2 @ t2] with h2 of length n, + r1 is [List.map2 f l1 h1] and r2 is t2. *) + + val split_at : int -> 'a t -> 'a t * 'a t + (** [split_at n l] returns the pair [before, after] where [before] is + the [n] first elements of [l] and [after] the remaining ones. + If [l] has less than [n] elements, raises Invalid_argument. *) + end + + module Option : sig + type 'a t = 'a option + + val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool + + val iter : ('a -> unit) -> 'a t -> unit + val map : ('a -> 'b) -> 'a t -> 'b t + val fold : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b + val value_default : ('a -> 'b) -> default:'b -> 'a t -> 'b + end + + module Array : sig + val exists2 : ('a -> 'b -> bool) -> 'a array -> 'b array -> bool + (* Same as [Array.exists], but for a two-argument predicate. Raise + Invalid_argument if the two arrays are determined to have + different lengths. *) + end +end + +val find_in_path: string list -> string -> string + (* Search a file in a list of directories. *) +val find_in_path_rel: string list -> string -> string + (* Search a relative file in a list of directories. *) +val find_in_path_uncap: string list -> string -> string + (* Same, but search also for uncapitalized name, i.e. + if name is Foo.ml, allow /path/Foo.ml and /path/foo.ml + to match. *) +val remove_file: string -> unit + (* Delete the given file if it exists. Never raise an error. *) +val expand_directory: string -> string -> string + (* [expand_directory alt file] eventually expands a [+] at the + beginning of file into [alt] (an alternate root directory) *) + +val create_hashtable: int -> ('a * 'b) list -> ('a, 'b) Hashtbl.t + (* Create a hashtable of the given size and fills it with the + given bindings. *) + +val copy_file: in_channel -> out_channel -> unit + (* [copy_file ic oc] reads the contents of file [ic] and copies + them to [oc]. It stops when encountering EOF on [ic]. *) +val copy_file_chunk: in_channel -> out_channel -> int -> unit + (* [copy_file_chunk ic oc n] reads [n] bytes from [ic] and copies + them to [oc]. It raises [End_of_file] when encountering + EOF on [ic]. *) +val string_of_file: in_channel -> string + (* [string_of_file ic] reads the contents of file [ic] and copies + them to a string. It stops when encountering EOF on [ic]. *) + +val output_to_bin_file_directly: string -> (string -> out_channel -> 'a) -> 'a + +val output_to_file_via_temporary: + ?mode:open_flag list -> string -> (string -> out_channel -> 'a) -> 'a + (* Produce output in temporary file, then rename it + (as atomically as possible) to the desired output file name. + [output_to_file_via_temporary filename fn] opens a temporary file + which is passed to [fn] (name + output channel). When [fn] returns, + the channel is closed and the temporary file is renamed to + [filename]. *) + +val log2: int -> int + (* [log2 n] returns [s] such that [n = 1 lsl s] + if [n] is a power of 2*) +val align: int -> int -> int + (* [align n a] rounds [n] upwards to a multiple of [a] + (a power of 2). *) +val no_overflow_add: int -> int -> bool + (* [no_overflow_add n1 n2] returns [true] if the computation of + [n1 + n2] does not overflow. *) +val no_overflow_sub: int -> int -> bool + (* [no_overflow_sub n1 n2] returns [true] if the computation of + [n1 - n2] does not overflow. *) +val no_overflow_mul: int -> int -> bool + (* [no_overflow_mul n1 n2] returns [true] if the computation of + [n1 * n2] does not overflow. *) +val no_overflow_lsl: int -> int -> bool + (* [no_overflow_lsl n k] returns [true] if the computation of + [n lsl k] does not overflow. *) + +module Int_literal_converter : sig + val int : string -> int + val int32 : string -> int32 + val int64 : string -> int64 + val nativeint : string -> nativeint +end + +val chop_extensions: string -> string + (* Return the given file name without its extensions. The extensions + is the longest suffix starting with a period and not including + a directory separator, [.xyz.uvw] for instance. + + Return the given name if it does not contain an extension. *) + +val search_substring: string -> string -> int -> int + (* [search_substring pat str start] returns the position of the first + occurrence of string [pat] in string [str]. Search starts + at offset [start] in [str]. Raise [Not_found] if [pat] + does not occur. *) + +val replace_substring: before:string -> after:string -> string -> string + (* [replace_substring ~before ~after str] replaces all + occurrences of [before] with [after] in [str] and returns + the resulting string. *) + +val rev_split_words: string -> string list + (* [rev_split_words s] splits [s] in blank-separated words, and returns + the list of words in reverse order. *) + +val get_ref: 'a list ref -> 'a list + (* [get_ref lr] returns the content of the list reference [lr] and reset + its content to the empty list. *) + + +val fst3: 'a * 'b * 'c -> 'a +val snd3: 'a * 'b * 'c -> 'b +val thd3: 'a * 'b * 'c -> 'c + +val fst4: 'a * 'b * 'c * 'd -> 'a +val snd4: 'a * 'b * 'c * 'd -> 'b +val thd4: 'a * 'b * 'c * 'd -> 'c +val for4: 'a * 'b * 'c * 'd -> 'd + +module LongString : + sig + type t = bytes array + val create : int -> t + val length : t -> int + val get : t -> int -> char + val set : t -> int -> char -> unit + val blit : t -> int -> t -> int -> int -> unit + val output : out_channel -> t -> int -> int -> unit + val unsafe_blit_to_bytes : t -> int -> bytes -> int -> int -> unit + val input_bytes : in_channel -> int -> t + end + +val edit_distance : string -> string -> int -> int option +(** [edit_distance a b cutoff] computes the edit distance between + strings [a] and [b]. To help efficiency, it uses a cutoff: if the + distance [d] is smaller than [cutoff], it returns [Some d], else + [None]. + + The distance algorithm currently used is Damerau-Levenshtein: it + computes the number of insertion, deletion, substitution of + letters, or swapping of adjacent letters to go from one word to the + other. The particular algorithm may change in the future. +*) + +val spellcheck : string list -> string -> string list +(** [spellcheck env name] takes a list of names [env] that exist in + the current environment and an erroneous [name], and returns a + list of suggestions taken from [env], that are close enough to + [name] that it may be a typo for one of them. *) + +val did_you_mean : Format.formatter -> (unit -> string list) -> unit +(** [did_you_mean ppf get_choices] hints that the user may have meant + one of the option returned by calling [get_choices]. It does nothing + if the returned list is empty. + + The [unit -> ...] thunking is meant to delay any potentially-slow + computation (typically computing edit-distance with many things + from the current environment) to when the hint message is to be + printed. You should print an understandable error message before + calling [did_you_mean], so that users get a clear notification of + the failure even if producing the hint is slow. +*) + +val cut_at : string -> char -> string * string +(** [String.cut_at s c] returns a pair containing the sub-string before + the first occurrence of [c] in [s], and the sub-string after the + first occurrence of [c] in [s]. + [let (before, after) = String.cut_at s c in + before ^ String.make 1 c ^ after] is the identity if [s] contains [c]. + + Raise [Not_found] if the character does not appear in the string + @since 4.01 +*) + + +module StringSet: Set.S with type elt = string +module StringMap: Map.S with type key = string +(* TODO: replace all custom instantiations of StringSet/StringMap in various + compiler modules with this one. *) + +(* Color handling *) +module Color : sig + type color = + | Black + | Red + | Green + | Yellow + | Blue + | Magenta + | Cyan + | White + ;; + + type style = + | FG of color (* foreground *) + | BG of color (* background *) + | Bold + | Reset + + | Dim + + + val ansi_of_style_l : style list -> string + (* ANSI escape sequence for the given style *) + + type styles = { + error: style list; + warning: style list; + loc: style list; + } + + val default_styles: styles + val get_styles: unit -> styles + val set_styles: styles -> unit + + type setting = Auto | Always | Never + + val setup : setting option -> unit + (* [setup opt] will enable or disable color handling on standard formatters + according to the value of color setting [opt]. + Only the first call to this function has an effect. *) + + val set_color_tag_handling : Format.formatter -> unit + (* adds functions to support color tags to the given formatter. *) +end + +val normalise_eol : string -> string +(** [normalise_eol s] returns a fresh copy of [s] with any '\r' characters + removed. Intended for pre-processing text which will subsequently be printed + on a channel which performs EOL transformations (i.e. Windows) *) + +val delete_eol_spaces : string -> string +(** [delete_eol_spaces s] returns a fresh copy of [s] with any end of + line spaces removed. Intended to normalize the output of the + toplevel for tests. *) + + + +(** {1 Hook machinery} + + Hooks machinery: + [add_hook name f] will register a function that will be called on the + argument of a later call to [apply_hooks]. Hooks are applied in the + lexicographical order of their names. +*) + +type hook_info = { + sourcefile : string; +} + +exception HookExnWrapper of + { + error: exn; + hook_name: string; + hook_info: hook_info; + } + (** An exception raised by a hook will be wrapped into a + [HookExnWrapper] constructor by the hook machinery. *) + + +val raise_direct_hook_exn: exn -> 'a + (** A hook can use [raise_unwrapped_hook_exn] to raise an exception that will + not be wrapped into a {!HookExnWrapper}. *) + +module type HookSig = sig + type t + val add_hook : string -> (hook_info -> t -> t) -> unit + val apply_hooks : hook_info -> t -> t +end + +module MakeHooks : functor (M : sig type t end) -> HookSig with type t = M.t + +end = struct +#1 "misc.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Errors *) + +exception Fatal_error + + +let array_of_list_rev = function + [] -> [||] + | hd::tl -> + let len = List.length tl in + let a = Array.make (len + 1) hd in + let rec fill i = function + [] -> a + | hd::tl -> Array.unsafe_set a i hd; fill (i-1) tl in + fill (len - 1) tl + + +let fatal_error msg = + prerr_string ">> Fatal error: "; prerr_endline msg; raise Fatal_error + +let fatal_errorf fmt = Format.kasprintf fatal_error fmt + +(* Exceptions *) + +let try_finally work cleanup = + let result = (try work () with e -> cleanup (); raise e) in + cleanup (); + result +;; + +type ref_and_value = R : 'a ref * 'a -> ref_and_value + +let protect_refs = + let set_refs l = List.iter (fun (R (r, v)) -> r := v) l in + fun refs f -> + let backup = List.map (fun (R (r, _)) -> R (r, !r)) refs in + set_refs refs; + match f () with + | x -> set_refs backup; x + | exception e -> set_refs backup; raise e + +(* List functions *) + +let rec map_end f l1 l2 = + match l1 with + [] -> l2 + | hd::tl -> f hd :: map_end f tl l2 + +let rec map_left_right f = function + [] -> [] + | hd::tl -> let res = f hd in res :: map_left_right f tl + +let rec for_all2 pred l1 l2 = + match (l1, l2) with + ([], []) -> true + | (hd1::tl1, hd2::tl2) -> pred hd1 hd2 && for_all2 pred tl1 tl2 + | (_, _) -> false + +let rec replicate_list elem n = + if n <= 0 then [] else elem :: replicate_list elem (n-1) + +let rec list_remove x = function + [] -> [] + | hd :: tl -> + if hd = x then tl else hd :: list_remove x tl + +let rec split_last = function + [] -> assert false + | [x] -> ([], x) + | hd :: tl -> + let (lst, last) = split_last tl in + (hd :: lst, last) + +module Stdlib = struct + module List = struct + type 'a t = 'a list + + let rec compare cmp l1 l2 = + match l1, l2 with + | [], [] -> 0 + | [], _::_ -> -1 + | _::_, [] -> 1 + | h1::t1, h2::t2 -> + let c = cmp h1 h2 in + if c <> 0 then c + else compare cmp t1 t2 + + let rec equal eq l1 l2 = + match l1, l2 with + | ([], []) -> true + | (hd1 :: tl1, hd2 :: tl2) -> eq hd1 hd2 && equal eq tl1 tl2 + | (_, _) -> false + + let filter_map f l = + let rec aux acc l = + match l with + | [] -> List.rev acc + | h :: t -> + match f h with + | None -> aux acc t + | Some v -> aux (v :: acc) t + in + aux [] l + + let map2_prefix f l1 l2 = + let rec aux acc l1 l2 = + match l1, l2 with + | [], _ -> (List.rev acc, l2) + | _ :: _, [] -> raise (Invalid_argument "map2_prefix") + | h1::t1, h2::t2 -> + let h = f h1 h2 in + aux (h :: acc) t1 t2 + in + aux [] l1 l2 + + let some_if_all_elements_are_some l = + let rec aux acc l = + match l with + | [] -> Some (List.rev acc) + | None :: _ -> None + | Some h :: t -> aux (h :: acc) t + in + aux [] l + + let split_at n l = + let rec aux n acc l = + if n = 0 + then List.rev acc, l + else + match l with + | [] -> raise (Invalid_argument "split_at") + | t::q -> aux (n-1) (t::acc) q + in + aux n [] l + end + + module Option = struct + type 'a t = 'a option + + let equal eq o1 o2 = + match o1, o2 with + | None, None -> true + | Some e1, Some e2 -> eq e1 e2 + | _, _ -> false + + let iter f = function + | Some x -> f x + | None -> () + + let map f = function + | Some x -> Some (f x) + | None -> None + + let fold f a b = + match a with + | None -> b + | Some a -> f a b + + let value_default f ~default a = + match a with + | None -> default + | Some a -> f a + end + + module Array = struct + let exists2 p a1 a2 = + let n = Array.length a1 in + if Array.length a2 <> n then invalid_arg "Misc.Stdlib.Array.exists2"; + let rec loop i = + if i = n then false + else if p (Array.unsafe_get a1 i) (Array.unsafe_get a2 i) then true + else loop (succ i) in + loop 0 + end +end + +let may = Stdlib.Option.iter +let may_map = Stdlib.Option.map + +(* File functions *) + +let find_in_path path name = + if not (Filename.is_implicit name) then + if Sys.file_exists name then name else raise Not_found + else begin + let rec try_dir = function + [] -> raise Not_found + | dir::rem -> + let fullname = Filename.concat dir name in + if Sys.file_exists fullname then fullname else try_dir rem + in try_dir path + end + +let find_in_path_rel path name = + let rec simplify s = + let open Filename in + let base = basename s in + let dir = dirname s in + if dir = s then dir + else if base = current_dir_name then simplify dir + else concat (simplify dir) base + in + let rec try_dir = function + [] -> raise Not_found + | dir::rem -> + let fullname = simplify (Filename.concat dir name) in + if Sys.file_exists fullname then fullname else try_dir rem + in try_dir path + +let find_in_path_uncap path name = + let uname = String.uncapitalize_ascii name in + let rec try_dir = function + [] -> raise Not_found + | dir::rem -> + let fullname = Filename.concat dir name + and ufullname = Filename.concat dir uname in + if Sys.file_exists ufullname then ufullname + else if Sys.file_exists fullname then fullname + else try_dir rem + in try_dir path + +let remove_file filename = + try + if Sys.file_exists filename + then Sys.remove filename + with Sys_error _msg -> + () + +(* Expand a -I option: if it starts with +, make it relative to the standard + library directory *) + +let expand_directory alt s = + if String.length s > 0 && s.[0] = '+' + then Filename.concat alt + (String.sub s 1 (String.length s - 1)) + else s + +(* Hashtable functions *) + +let create_hashtable size init = + let tbl = Hashtbl.create size in + List.iter (fun (key, data) -> Hashtbl.add tbl key data) init; + tbl + +(* File copy *) + +let copy_file ic oc = + let buff = Bytes.create 0x1000 in + let rec copy () = + let n = input ic buff 0 0x1000 in + if n = 0 then () else (output oc buff 0 n; copy()) + in copy() + +let copy_file_chunk ic oc len = + let buff = Bytes.create 0x1000 in + let rec copy n = + if n <= 0 then () else begin + let r = input ic buff 0 (min n 0x1000) in + if r = 0 then raise End_of_file else (output oc buff 0 r; copy(n-r)) + end + in copy len + +let string_of_file ic = + let b = Buffer.create 0x10000 in + let buff = Bytes.create 0x1000 in + let rec copy () = + let n = input ic buff 0 0x1000 in + if n = 0 then Buffer.contents b else + (Buffer.add_subbytes b buff 0 n; copy()) + in copy() + +let output_to_bin_file_directly filename fn = + let oc = Pervasives.open_out_bin filename in + match fn filename oc with + | v -> close_out oc ; v + | exception e -> close_out oc ; raise e + +let output_to_file_via_temporary ?(mode = [Open_text]) filename fn = + let (temp_filename, oc) = + Filename.open_temp_file + ~mode ~perms:0o666 ~temp_dir:(Filename.dirname filename) + (Filename.basename filename) ".tmp" in + (* The 0o666 permissions will be modified by the umask. It's just + like what [open_out] and [open_out_bin] do. + With temp_dir = dirname filename, we ensure that the returned + temp file is in the same directory as filename itself, making + it safe to rename temp_filename to filename later. + With prefix = basename filename, we are almost certain that + the first generated name will be unique. A fixed prefix + would work too but might generate more collisions if many + files are being produced simultaneously in the same directory. *) + match fn temp_filename oc with + | res -> + close_out oc; + begin try + Sys.rename temp_filename filename; res + with exn -> + remove_file temp_filename; raise exn + end + | exception exn -> + close_out oc; remove_file temp_filename; raise exn + +(* Integer operations *) + +let rec log2 n = + if n <= 1 then 0 else 1 + log2(n asr 1) + +let align n a = + if n >= 0 then (n + a - 1) land (-a) else n land (-a) + +let no_overflow_add a b = (a lxor b) lor (a lxor (lnot (a+b))) < 0 + +let no_overflow_sub a b = (a lxor (lnot b)) lor (b lxor (a-b)) < 0 + +let no_overflow_mul a b = b <> 0 && (a * b) / b = a + +let no_overflow_lsl a k = + 0 <= k && k < Sys.word_size && min_int asr k <= a && a <= max_int asr k + +module Int_literal_converter = struct + (* To convert integer literals, allowing max_int + 1 (PR#4210) *) + let cvt_int_aux str neg of_string = + if String.length str = 0 || str.[0]= '-' + then of_string str + else neg (of_string ("-" ^ str)) + let int s = cvt_int_aux s (~-) int_of_string + let int32 s = cvt_int_aux s Int32.neg Int32.of_string + let int64 s = cvt_int_aux s Int64.neg Int64.of_string + let nativeint s = cvt_int_aux s Nativeint.neg Nativeint.of_string +end + +(* String operations *) + +let chop_extensions file = + let dirname = Filename.dirname file and basename = Filename.basename file in + try + let pos = String.index basename '.' in + let basename = String.sub basename 0 pos in + if Filename.is_implicit file && dirname = Filename.current_dir_name then + basename + else + Filename.concat dirname basename + with Not_found -> file + +let search_substring pat str start = + let rec search i j = + if j >= String.length pat then i + else if i + j >= String.length str then raise Not_found + else if str.[i + j] = pat.[j] then search i (j+1) + else search (i+1) 0 + in search start 0 + +let replace_substring ~before ~after str = + let rec search acc curr = + match search_substring before str curr with + | next -> + let prefix = String.sub str curr (next - curr) in + search (prefix :: acc) (next + String.length before) + | exception Not_found -> + let suffix = String.sub str curr (String.length str - curr) in + List.rev (suffix :: acc) + in String.concat after (search [] 0) + +let rev_split_words s = + let rec split1 res i = + if i >= String.length s then res else begin + match s.[i] with + ' ' | '\t' | '\r' | '\n' -> split1 res (i+1) + | _ -> split2 res i (i+1) + end + and split2 res i j = + if j >= String.length s then String.sub s i (j-i) :: res else begin + match s.[j] with + ' ' | '\t' | '\r' | '\n' -> split1 (String.sub s i (j-i) :: res) (j+1) + | _ -> split2 res i (j+1) + end + in split1 [] 0 + +let get_ref r = + let v = !r in + r := []; v + +let fst3 (x, _, _) = x +let snd3 (_,x,_) = x +let thd3 (_,_,x) = x + +let fst4 (x, _, _, _) = x +let snd4 (_,x,_, _) = x +let thd4 (_,_,x,_) = x +let for4 (_,_,_,x) = x + + +module LongString = struct + type t = bytes array + + let create str_size = + let tbl_size = str_size / Sys.max_string_length + 1 in + let tbl = Array.make tbl_size Bytes.empty in + for i = 0 to tbl_size - 2 do + tbl.(i) <- Bytes.create Sys.max_string_length; + done; + tbl.(tbl_size - 1) <- Bytes.create (str_size mod Sys.max_string_length); + tbl + + let length tbl = + let tbl_size = Array.length tbl in + Sys.max_string_length * (tbl_size - 1) + Bytes.length tbl.(tbl_size - 1) + + let get tbl ind = + Bytes.get tbl.(ind / Sys.max_string_length) (ind mod Sys.max_string_length) + + let set tbl ind c = + Bytes.set tbl.(ind / Sys.max_string_length) (ind mod Sys.max_string_length) + c + + let blit src srcoff dst dstoff len = + for i = 0 to len - 1 do + set dst (dstoff + i) (get src (srcoff + i)) + done + + let output oc tbl pos len = + for i = pos to pos + len - 1 do + output_char oc (get tbl i) + done + + let unsafe_blit_to_bytes src srcoff dst dstoff len = + for i = 0 to len - 1 do + Bytes.unsafe_set dst (dstoff + i) (get src (srcoff + i)) + done + + let input_bytes ic len = + let tbl = create len in + Array.iter (fun str -> really_input ic str 0 (Bytes.length str)) tbl; + tbl +end + + +let edit_distance a b cutoff = + let la, lb = String.length a, String.length b in + let cutoff = + (* using max_int for cutoff would cause overflows in (i + cutoff + 1); + we bring it back to the (max la lb) worstcase *) + min (max la lb) cutoff in + if abs (la - lb) > cutoff then None + else begin + (* initialize with 'cutoff + 1' so that not-yet-written-to cases have + the worst possible cost; this is useful when computing the cost of + a case just at the boundary of the cutoff diagonal. *) + let m = Array.make_matrix (la + 1) (lb + 1) (cutoff + 1) in + m.(0).(0) <- 0; + for i = 1 to la do + m.(i).(0) <- i; + done; + for j = 1 to lb do + m.(0).(j) <- j; + done; + for i = 1 to la do + for j = max 1 (i - cutoff - 1) to min lb (i + cutoff + 1) do + let cost = if a.[i-1] = b.[j-1] then 0 else 1 in + let best = + (* insert, delete or substitute *) + min (1 + min m.(i-1).(j) m.(i).(j-1)) (m.(i-1).(j-1) + cost) + in + let best = + (* swap two adjacent letters; we use "cost" again in case of + a swap between two identical letters; this is slightly + redundant as this is a double-substitution case, but it + was done this way in most online implementations and + imitation has its virtues *) + if not (i > 1 && j > 1 && a.[i-1] = b.[j-2] && a.[i-2] = b.[j-1]) + then best + else min best (m.(i-2).(j-2) + cost) + in + m.(i).(j) <- best + done; + done; + let result = m.(la).(lb) in + if result > cutoff + then None + else Some result + end + +let spellcheck env name = + let cutoff = + match String.length name with + | 1 | 2 -> 0 + | 3 | 4 -> 1 + | 5 | 6 -> 2 + | _ -> 3 + in + let compare target acc head = + match edit_distance target head cutoff with + | None -> acc + | Some dist -> + let (best_choice, best_dist) = acc in + if dist < best_dist then ([head], dist) + else if dist = best_dist then (head :: best_choice, dist) + else acc + in + fst (List.fold_left (compare name) ([], max_int) env) + +let did_you_mean ppf get_choices = + (* flush now to get the error report early, in the (unheard of) case + where the search in the get_choices function would take a bit of + time; in the worst case, the user has seen the error, she can + interrupt the process before the spell-checking terminates. *) + Format.fprintf ppf "@?"; + match get_choices () with + | [] -> () + | choices -> + let rest, last = split_last choices in + Format.fprintf ppf "@\nHint: Did you mean %s%s%s?@?" + (String.concat ", " rest) + (if rest = [] then "" else " or ") + last + +let cut_at s c = + let pos = String.index s c in + String.sub s 0 pos, String.sub s (pos+1) (String.length s - pos - 1) + + +module StringSet = Set.Make(struct type t = string let compare = compare end) +module StringMap = Map.Make(struct type t = string let compare = compare end) + +(* Color handling *) +module Color = struct + (* use ANSI color codes, see https://en.wikipedia.org/wiki/ANSI_escape_code *) + type color = + | Black + | Red + | Green + | Yellow + | Blue + | Magenta + | Cyan + | White + ;; + + type style = + | FG of color (* foreground *) + | BG of color (* background *) + | Bold + | Reset + + | Dim + + + let ansi_of_color = function + | Black -> "0" + | Red -> "1" + | Green -> "2" + | Yellow -> "3" + | Blue -> "4" + | Magenta -> "5" + | Cyan -> "6" + | White -> "7" + + let code_of_style = function + | FG c -> "3" ^ ansi_of_color c + | BG c -> "4" ^ ansi_of_color c + | Bold -> "1" + | Reset -> "0" + + | Dim -> "2" + + + let ansi_of_style_l l = + let s = match l with + | [] -> code_of_style Reset + | [s] -> code_of_style s + | _ -> String.concat ";" (List.map code_of_style l) + in + "\x1b[" ^ s ^ "m" + + type styles = { + error: style list; + warning: style list; + loc: style list; + } + + let default_styles = { + warning = [Bold; FG Magenta]; + error = [Bold; FG Red]; + loc = [Bold]; + } + + let cur_styles = ref default_styles + let get_styles () = !cur_styles + let set_styles s = cur_styles := s + + (* map a tag to a style, if the tag is known. + @raise Not_found otherwise *) + let style_of_tag s = match s with + | "error" -> (!cur_styles).error + | "warning" -> (!cur_styles).warning + | "loc" -> (!cur_styles).loc + + | "info" -> [Bold; FG Yellow] + | "dim" -> [Dim] + | "filename" -> [FG Cyan] + + | _ -> raise Not_found + + let color_enabled = ref true + + (* either prints the tag of [s] or delegates to [or_else] *) + let mark_open_tag ~or_else s = + try + let style = style_of_tag s in + if !color_enabled then ansi_of_style_l style else "" + with Not_found -> or_else s + + let mark_close_tag ~or_else s = + try + let _ = style_of_tag s in + if !color_enabled then ansi_of_style_l [Reset] else "" + with Not_found -> or_else s + + (* add color handling to formatter [ppf] *) + let set_color_tag_handling ppf = + let open Format in + let functions = pp_get_formatter_tag_functions ppf () in + let functions' = {functions with + mark_open_tag=(mark_open_tag ~or_else:functions.mark_open_tag); + mark_close_tag=(mark_close_tag ~or_else:functions.mark_close_tag); + } in + pp_set_mark_tags ppf true; (* enable tags *) + pp_set_formatter_tag_functions ppf functions'; + (* also setup margins *) + pp_set_margin ppf (pp_get_margin std_formatter()); + () + + external isatty : out_channel -> bool = "caml_sys_isatty" + + (* reasonable heuristic on whether colors should be enabled *) + let should_enable_color () = + let term = try Sys.getenv "TERM" with Not_found -> "" in + term <> "dumb" + && term <> "" + && isatty stderr + + type setting = Auto | Always | Never + + let setup = + let first = ref true in (* initialize only once *) + let formatter_l = + [Format.std_formatter; Format.err_formatter; Format.str_formatter] + in + fun o -> + if !first then ( + first := false; + Format.set_mark_tags true; + List.iter set_color_tag_handling formatter_l; + color_enabled := (match o with + | Some Always -> true + | Some Auto -> should_enable_color () + | Some Never -> false + | None -> should_enable_color ()) + ); + () +end + +let normalise_eol s = + let b = Buffer.create 80 in + for i = 0 to String.length s - 1 do + if s.[i] <> '\r' then Buffer.add_char b s.[i] + done; + Buffer.contents b + +let delete_eol_spaces src = + let len_src = String.length src in + let dst = Bytes.create len_src in + let rec loop i_src i_dst = + if i_src = len_src then + i_dst + else + match src.[i_src] with + | ' ' | '\t' -> + loop_spaces 1 (i_src + 1) i_dst + | c -> + Bytes.set dst i_dst c; + loop (i_src + 1) (i_dst + 1) + and loop_spaces spaces i_src i_dst = + if i_src = len_src then + i_dst + else + match src.[i_src] with + | ' ' | '\t' -> + loop_spaces (spaces + 1) (i_src + 1) i_dst + | '\n' -> + Bytes.set dst i_dst '\n'; + loop (i_src + 1) (i_dst + 1) + | _ -> + for n = 0 to spaces do + Bytes.set dst (i_dst + n) src.[i_src - spaces + n] + done; + loop (i_src + 1) (i_dst + spaces + 1) + in + let stop = loop 0 0 in + Bytes.sub_string dst 0 stop + +type hook_info = { + sourcefile : string; +} + +exception HookExnWrapper of + { + error: exn; + hook_name: string; + hook_info: hook_info; + } + +exception HookExn of exn + +let raise_direct_hook_exn e = raise (HookExn e) + +let fold_hooks list hook_info ast = + List.fold_left (fun ast (hook_name,f) -> + try + f hook_info ast + with + | HookExn e -> raise e + | error -> raise (HookExnWrapper {error; hook_name; hook_info}) + (* when explicit reraise with backtrace will be available, + it should be used here *) + + ) ast (List.sort compare list) + +module type HookSig = sig + type t + + val add_hook : string -> (hook_info -> t -> t) -> unit + val apply_hooks : hook_info -> t -> t +end + +module MakeHooks(M: sig + type t + end) : HookSig with type t = M.t += struct + + type t = M.t + + let hooks = ref [] + let add_hook name f = hooks := (name, f) :: !hooks + let apply_hooks sourcefile intf = + fold_hooks !hooks sourcefile intf +end + +end +module Identifiable : sig +#1 "identifiable.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2013--2016 OCamlPro SAS *) +(* Copyright 2014--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Uniform interface for common data structures over various things. *) + +module type Thing = sig + type t + + include Hashtbl.HashedType with type t := t + include Map.OrderedType with type t := t + + val output : out_channel -> t -> unit + val print : Format.formatter -> t -> unit +end + +module Pair : functor (A : Thing) (B : Thing) -> Thing with type t = A.t * B.t + +module type Set = sig + module T : Set.OrderedType + include Set.S + with type elt = T.t + and type t = Set.Make (T).t + + val output : out_channel -> t -> unit + val print : Format.formatter -> t -> unit + val to_string : t -> string + val of_list : elt list -> t + val map : (elt -> elt) -> t -> t +end + +module type Map = sig + module T : Map.OrderedType + include Map.S + with type key = T.t + and type 'a t = 'a Map.Make (T).t + + val filter_map : 'a t -> f:(key -> 'a -> 'b option) -> 'b t + val of_list : (key * 'a) list -> 'a t + + (** [disjoint_union m1 m2] contains all bindings from [m1] and + [m2]. If some binding is present in both and the associated + value is not equal, a Fatal_error is raised *) + val disjoint_union : ?eq:('a -> 'a -> bool) -> ?print:(Format.formatter -> 'a -> unit) -> 'a t -> 'a t -> 'a t + + (** [union_right m1 m2] contains all bindings from [m1] and [m2]. If + some binding is present in both, the one from [m2] is taken *) + val union_right : 'a t -> 'a t -> 'a t + + (** [union_left m1 m2 = union_right m2 m1] *) + val union_left : 'a t -> 'a t -> 'a t + + val union_merge : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t + val rename : key t -> key -> key + val map_keys : (key -> key) -> 'a t -> 'a t + val keys : 'a t -> Set.Make(T).t + val data : 'a t -> 'a list + val of_set : (key -> 'a) -> Set.Make(T).t -> 'a t + val transpose_keys_and_data : key t -> key t + val transpose_keys_and_data_set : key t -> Set.Make(T).t t + val print : + (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit +end + +module type Tbl = sig + module T : sig + type t + include Map.OrderedType with type t := t + include Hashtbl.HashedType with type t := t + end + include Hashtbl.S + with type key = T.t + and type 'a t = 'a Hashtbl.Make (T).t + + val to_list : 'a t -> (T.t * 'a) list + val of_list : (T.t * 'a) list -> 'a t + + val to_map : 'a t -> 'a Map.Make(T).t + val of_map : 'a Map.Make(T).t -> 'a t + val memoize : 'a t -> (key -> 'a) -> key -> 'a + val map : 'a t -> ('a -> 'b) -> 'b t +end + +module type S = sig + type t + + module T : Thing with type t = t + include Thing with type t := T.t + + module Set : Set with module T := T + module Map : Map with module T := T + module Tbl : Tbl with module T := T +end + +module Make (T : Thing) : S with type t := T.t + +end = struct +#1 "identifiable.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2013--2016 OCamlPro SAS *) +(* Copyright 2014--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +module type Thing = sig + type t + + include Hashtbl.HashedType with type t := t + include Map.OrderedType with type t := t + + val output : out_channel -> t -> unit + val print : Format.formatter -> t -> unit +end + +module type Set = sig + module T : Set.OrderedType + include Set.S + with type elt = T.t + and type t = Set.Make (T).t + + val output : out_channel -> t -> unit + val print : Format.formatter -> t -> unit + val to_string : t -> string + val of_list : elt list -> t + val map : (elt -> elt) -> t -> t +end + +module type Map = sig + module T : Map.OrderedType + include Map.S + with type key = T.t + and type 'a t = 'a Map.Make (T).t + + val filter_map : 'a t -> f:(key -> 'a -> 'b option) -> 'b t + val of_list : (key * 'a) list -> 'a t + + val disjoint_union : ?eq:('a -> 'a -> bool) -> ?print:(Format.formatter -> 'a -> unit) -> 'a t -> 'a t -> 'a t + + val union_right : 'a t -> 'a t -> 'a t + + val union_left : 'a t -> 'a t -> 'a t + + val union_merge : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t + val rename : key t -> key -> key + val map_keys : (key -> key) -> 'a t -> 'a t + val keys : 'a t -> Set.Make(T).t + val data : 'a t -> 'a list + val of_set : (key -> 'a) -> Set.Make(T).t -> 'a t + val transpose_keys_and_data : key t -> key t + val transpose_keys_and_data_set : key t -> Set.Make(T).t t + val print : + (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit +end + +module type Tbl = sig + module T : sig + type t + include Map.OrderedType with type t := t + include Hashtbl.HashedType with type t := t + end + include Hashtbl.S + with type key = T.t + and type 'a t = 'a Hashtbl.Make (T).t + + val to_list : 'a t -> (T.t * 'a) list + val of_list : (T.t * 'a) list -> 'a t + + val to_map : 'a t -> 'a Map.Make(T).t + val of_map : 'a Map.Make(T).t -> 'a t + val memoize : 'a t -> (key -> 'a) -> key -> 'a + val map : 'a t -> ('a -> 'b) -> 'b t +end + +module Pair (A : Thing) (B : Thing) : Thing with type t = A.t * B.t = struct + type t = A.t * B.t + + let compare (a1, b1) (a2, b2) = + let c = A.compare a1 a2 in + if c <> 0 then c + else B.compare b1 b2 + + let output oc (a, b) = Printf.fprintf oc " (%a, %a)" A.output a B.output b + let hash (a, b) = Hashtbl.hash (A.hash a, B.hash b) + let equal (a1, b1) (a2, b2) = A.equal a1 a2 && B.equal b1 b2 + let print ppf (a, b) = Format.fprintf ppf " (%a, @ %a)" A.print a B.print b +end + +module Make_map (T : Thing) = struct + include Map.Make (T) + + let filter_map t ~f = + fold (fun id v map -> + match f id v with + | None -> map + | Some r -> add id r map) t empty + + let of_list l = + List.fold_left (fun map (id, v) -> add id v map) empty l + + let disjoint_union ?eq ?print m1 m2 = + union (fun id v1 v2 -> + let ok = match eq with + | None -> false + | Some eq -> eq v1 v2 + in + if not ok then + let err = + match print with + | None -> + Format.asprintf "Map.disjoint_union %a" T.print id + | Some print -> + Format.asprintf "Map.disjoint_union %a => %a <> %a" + T.print id print v1 print v2 + in + Misc.fatal_error err + else Some v1) + m1 m2 + + let union_right m1 m2 = + merge (fun _id x y -> match x, y with + | None, None -> None + | None, Some v + | Some v, None + | Some _, Some v -> Some v) + m1 m2 + + let union_left m1 m2 = union_right m2 m1 + + let union_merge f m1 m2 = + let aux _ m1 m2 = + match m1, m2 with + | None, m | m, None -> m + | Some m1, Some m2 -> Some (f m1 m2) + in + merge aux m1 m2 + + let rename m v = + try find v m + with Not_found -> v + + let map_keys f m = + of_list (List.map (fun (k, v) -> f k, v) (bindings m)) + + let print f ppf s = + let elts ppf s = iter (fun id v -> + Format.fprintf ppf "@ (@[%a@ %a@])" T.print id f v) s in + Format.fprintf ppf "@[<1>{@[%a@ @]}@]" elts s + + module T_set = Set.Make (T) + + let keys map = fold (fun k _ set -> T_set.add k set) map T_set.empty + + let data t = List.map snd (bindings t) + + let of_set f set = T_set.fold (fun e map -> add e (f e) map) set empty + + let transpose_keys_and_data map = fold (fun k v m -> add v k m) map empty + let transpose_keys_and_data_set map = + fold (fun k v m -> + let set = + match find v m with + | exception Not_found -> + T_set.singleton k + | set -> + T_set.add k set + in + add v set m) + map empty +end + +module Make_set (T : Thing) = struct + include Set.Make (T) + + let output oc s = + Printf.fprintf oc " ( "; + iter (fun v -> Printf.fprintf oc "%a " T.output v) s; + Printf.fprintf oc ")" + + let print ppf s = + let elts ppf s = iter (fun e -> Format.fprintf ppf "@ %a" T.print e) s in + Format.fprintf ppf "@[<1>{@[%a@ @]}@]" elts s + + let to_string s = Format.asprintf "%a" print s + + let of_list l = match l with + | [] -> empty + | [t] -> singleton t + | t :: q -> List.fold_left (fun acc e -> add e acc) (singleton t) q + + let map f s = of_list (List.map f (elements s)) +end + +module Make_tbl (T : Thing) = struct + include Hashtbl.Make (T) + + module T_map = Make_map (T) + + let to_list t = + fold (fun key datum elts -> (key, datum)::elts) t [] + + let of_list elts = + let t = create 42 in + List.iter (fun (key, datum) -> add t key datum) elts; + t + + let to_map v = fold T_map.add v T_map.empty + + let of_map m = + let t = create (T_map.cardinal m) in + T_map.iter (fun k v -> add t k v) m; + t + + let memoize t f = fun key -> + try find t key with + | Not_found -> + let r = f key in + add t key r; + r + + let map t f = + of_map (T_map.map f (to_map t)) +end + +module type S = sig + type t + + module T : Thing with type t = t + include Thing with type t := T.t + + module Set : Set with module T := T + module Map : Map with module T := T + module Tbl : Tbl with module T := T +end + +module Make (T : Thing) = struct + module T = T + include T + + module Set = Make_set (T) + module Map = Make_map (T) + module Tbl = Make_tbl (T) +end + +end +module Numbers : sig +#1 "numbers.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2013--2016 OCamlPro SAS *) +(* Copyright 2014--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Modules about numbers, some of which satisfy {!Identifiable.S}. *) + +module Int : sig + include Identifiable.S with type t = int + + (** [zero_to_n n] is the set of numbers \{0, ..., n\} (inclusive). *) + val zero_to_n : int -> Set.t +end + +module Int8 : sig + type t + + val zero : t + val one : t + + val of_int_exn : int -> t + val to_int : t -> int +end + +module Int16 : sig + type t + + val of_int_exn : int -> t + val of_int64_exn : Int64.t -> t + + val to_int : t -> int +end + +module Float : Identifiable.S with type t = float + +end = struct +#1 "numbers.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* Mark Shinwell and Leo White, Jane Street Europe *) +(* *) +(* Copyright 2013--2016 OCamlPro SAS *) +(* Copyright 2014--2016 Jane Street Group LLC *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +module Int_base = Identifiable.Make (struct + type t = int + + let compare x y = x - y + let output oc x = Printf.fprintf oc "%i" x + let hash i = i + let equal (i : int) j = i = j + let print = Format.pp_print_int +end) + +module Int = struct + type t = int + + include Int_base + + let rec zero_to_n n = + if n < 0 then Set.empty else Set.add n (zero_to_n (n-1)) +end + +module Int8 = struct + type t = int + + let zero = 0 + let one = 1 + + let of_int_exn i = + if i < -(1 lsl 7) || i > ((1 lsl 7) - 1) then + Misc.fatal_errorf "Int8.of_int_exn: %d is out of range" i + else + i + + let to_int i = i +end + +module Int16 = struct + type t = int + + let of_int_exn i = + if i < -(1 lsl 15) || i > ((1 lsl 15) - 1) then + Misc.fatal_errorf "Int16.of_int_exn: %d is out of range" i + else + i + + let lower_int64 = Int64.neg (Int64.shift_left Int64.one 15) + let upper_int64 = Int64.sub (Int64.shift_left Int64.one 15) Int64.one + + let of_int64_exn i = + if Int64.compare i lower_int64 < 0 + || Int64.compare i upper_int64 > 0 + then + Misc.fatal_errorf "Int16.of_int64_exn: %Ld is out of range" i + else + Int64.to_int i + + let to_int t = t +end + +module Float = struct + type t = float + + include Identifiable.Make (struct + type t = float + + let compare x y = Stdlib.compare x y + let output oc x = Printf.fprintf oc "%f" x + let hash f = Hashtbl.hash f + let equal (i : float) j = i = j + let print = Format.pp_print_float + end) +end + +end +module Profile : sig +#1 "profile.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* *) +(* Copyright 2015 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Compiler performance recording *) + +type file = string + +val reset : unit -> unit +(** erase all recorded profile information *) + +val record_call : ?accumulate:bool -> string -> (unit -> 'a) -> 'a +(** [record_call pass f] calls [f] and records its profile information. *) + +val record : ?accumulate:bool -> string -> ('a -> 'b) -> 'a -> 'b +(** [record pass f arg] records the profile information of [f arg] *) + +type column = [ `Time | `Alloc | `Top_heap | `Abs_top_heap ] + +val print : Format.formatter -> column list -> unit +(** Prints the selected recorded profiling information to the formatter. *) + +(** Command line flags *) + +val options_doc : string +val all_columns : column list + +(** A few pass names that are needed in several places, and shared to + avoid typos. *) + +val generate : string +val transl : string +val typing : string + +end = struct +#1 "profile.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Chambart, OCamlPro *) +(* *) +(* Copyright 2015 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +[@@@ocaml.warning "+a-18-40-42-48"] + +type file = string + +external time_include_children: bool -> float = "caml_sys_time_include_children" +let cpu_time () = time_include_children true + +module Measure = struct + type t = { + time : float; + allocated_words : float; + top_heap_words : int; + } + let create () = + let stat = Gc.quick_stat () in + { + time = cpu_time (); + allocated_words = stat.minor_words +. stat.major_words; + top_heap_words = stat.top_heap_words; + } + let zero = { time = 0.; allocated_words = 0.; top_heap_words = 0 } +end + +module Measure_diff = struct + let timestamp = let r = ref (-1) in fun () -> incr r; !r + type t = { + timestamp : int; + duration : float; + allocated_words : float; + top_heap_words_increase : int; + } + let zero () = { + timestamp = timestamp (); + duration = 0.; + allocated_words = 0.; + top_heap_words_increase = 0; + } + let accumulate t (m1 : Measure.t) (m2 : Measure.t) = { + timestamp = t.timestamp; + duration = t.duration +. (m2.time -. m1.time); + allocated_words = + t.allocated_words +. (m2.allocated_words -. m1.allocated_words); + top_heap_words_increase = + t.top_heap_words_increase + (m2.top_heap_words - m1.top_heap_words); + } + let of_diff m1 m2 = + accumulate (zero ()) m1 m2 +end + +type hierarchy = + | E of (string, Measure_diff.t * hierarchy) Hashtbl.t +[@@unboxed] + +let create () = E (Hashtbl.create 2) +let hierarchy = ref (create ()) +let initial_measure = ref None +let reset () = hierarchy := create (); initial_measure := None + +let record_call ?(accumulate = false) name f = + let E prev_hierarchy = !hierarchy in + let start_measure = Measure.create () in + if !initial_measure = None then initial_measure := Some start_measure; + let this_measure_diff, this_table = + (* We allow the recording of multiple categories by the same name, for tools + like ocamldoc that use the compiler libs but don't care about profile + information, and so may record, say, "parsing" multiple times. *) + if accumulate + then + match Hashtbl.find prev_hierarchy name with + | exception Not_found -> Measure_diff.zero (), Hashtbl.create 2 + | measure_diff, E table -> + Hashtbl.remove prev_hierarchy name; + measure_diff, table + else Measure_diff.zero (), Hashtbl.create 2 + in + hierarchy := E this_table; + Misc.try_finally f + (fun () -> + hierarchy := E prev_hierarchy; + let end_measure = Measure.create () in + let measure_diff = + Measure_diff.accumulate this_measure_diff start_measure end_measure in + Hashtbl.add prev_hierarchy name (measure_diff, E this_table)) + +let record ?accumulate pass f x = record_call ?accumulate pass (fun () -> f x) + +type display = { + to_string : max:float -> width:int -> string; + worth_displaying : max:float -> bool; +} + +let time_display v : display = + (* Because indentation is meaningful, and because the durations are + the first element of each row, we can't pad them with spaces. *) + let to_string_without_unit v ~width = Printf.sprintf "%0*.03f" width v in + let to_string ~max:_ ~width = + to_string_without_unit v ~width:(width - 1) ^ "s" in + let worth_displaying ~max:_ = + float_of_string (to_string_without_unit v ~width:0) <> 0. in + { to_string; worth_displaying } + +let memory_word_display = + (* To make memory numbers easily comparable across rows, we choose a single + scale for an entire column. To keep the display compact and not overly + precise (no one cares about the exact number of bytes), we pick the largest + scale we can and we only show 3 digits. Avoiding showing tiny numbers also + allows us to avoid displaying passes that barely allocate compared to the + rest of the compiler. *) + let bytes_of_words words = words *. float_of_int (Sys.word_size / 8) in + let to_string_without_unit v ~width scale = + let precision = 3 and precision_power = 1e3 in + let v_rescaled = bytes_of_words v /. scale in + let v_rounded = + floor (v_rescaled *. precision_power +. 0.5) /. precision_power in + let v_str = Printf.sprintf "%.*f" precision v_rounded in + let index_of_dot = String.index v_str '.' in + let v_str_truncated = + String.sub v_str 0 + (if index_of_dot >= precision + then index_of_dot + else precision + 1) + in + Printf.sprintf "%*s" width v_str_truncated + in + let choose_memory_scale = + let units = [|"B"; "kB"; "MB"; "GB"|] in + fun words -> + let bytes = bytes_of_words words in + let scale = ref (Array.length units - 1) in + while !scale > 0 && bytes < 1024. ** float_of_int !scale do + decr scale + done; + 1024. ** float_of_int !scale, units.(!scale) + in + fun ?previous v : display -> + let to_string ~max ~width = + let scale, scale_str = choose_memory_scale max in + let width = width - String.length scale_str in + to_string_without_unit v ~width scale ^ scale_str + in + let worth_displaying ~max = + let scale, _ = choose_memory_scale max in + float_of_string (to_string_without_unit v ~width:0 scale) <> 0. + && match previous with + | None -> true + | Some p -> + (* This branch is for numbers that represent absolute quantity, rather + than differences. It allows us to skip displaying the same absolute + quantity many times in a row. *) + to_string_without_unit p ~width:0 scale + <> to_string_without_unit v ~width:0 scale + in + { to_string; worth_displaying } + +let profile_list (E table) = + let l = Hashtbl.fold (fun k d l -> (k, d) :: l) table [] in + List.sort (fun (_, (p1, _)) (_, (p2, _)) -> + compare p1.Measure_diff.timestamp p2.Measure_diff.timestamp) l + +let compute_other_category (E table : hierarchy) (total : Measure_diff.t) = + let r = ref total in + Hashtbl.iter (fun _pass ((p2 : Measure_diff.t), _) -> + let p1 = !r in + r := { + timestamp = p1.timestamp; + duration = p1.duration -. p2.duration; + allocated_words = p1.allocated_words -. p2.allocated_words; + top_heap_words_increase = + p1.top_heap_words_increase - p2.top_heap_words_increase; + } + ) table; + !r + +type row = R of string * (float * display) list * row list +type column = [ `Time | `Alloc | `Top_heap | `Abs_top_heap ] + +let rec rows_of_hierarchy ~nesting make_row name measure_diff hierarchy env = + let rows = + rows_of_hierarchy_list + ~nesting:(nesting + 1) make_row hierarchy measure_diff env in + let values, env = + make_row env measure_diff ~toplevel_other:(nesting = 0 && name = "other") in + R (name, values, rows), env + +and rows_of_hierarchy_list ~nesting make_row hierarchy total env = + let list = profile_list hierarchy in + let list = + if list <> [] || nesting = 0 + then list @ [ "other", (compute_other_category hierarchy total, create ()) ] + else [] + in + let env = ref env in + List.map (fun (name, (measure_diff, hierarchy)) -> + let a, env' = + rows_of_hierarchy ~nesting make_row name measure_diff hierarchy !env in + env := env'; + a + ) list + +let rows_of_hierarchy hierarchy measure_diff initial_measure columns = + (* Computing top heap size is a bit complicated: if the compiler applies a + list of passes n times (rather than applying pass1 n times, then pass2 n + times etc), we only show one row for that pass but what does "top heap + size at the end of that pass" even mean? + It seems the only sensible answer is to pretend the compiler applied pass1 + n times, pass2 n times by accumulating all the heap size increases that + happened during each pass, and then compute what the heap size would have + been. So that's what we do. + There's a bit of extra complication, which is that the heap can increase in + between measurements. So the heap sizes can be a bit off until the "other" + rows account for what's missing. We special case the toplevel "other" row + so that any increases that happened before the start of the compilation is + correctly reported, as a lot of code may run before the start of the + compilation (eg functor applications). *) + let make_row prev_top_heap_words (p : Measure_diff.t) ~toplevel_other = + let top_heap_words = + prev_top_heap_words + + p.top_heap_words_increase + - if toplevel_other + then initial_measure.Measure.top_heap_words + else 0 + in + let make value ~f = value, f value in + List.map (function + | `Time -> + make p.duration ~f:time_display + | `Alloc -> + make p.allocated_words ~f:memory_word_display + | `Top_heap -> + make (float_of_int p.top_heap_words_increase) ~f:memory_word_display + | `Abs_top_heap -> + make (float_of_int top_heap_words) + ~f:(memory_word_display ~previous:(float_of_int prev_top_heap_words)) + ) columns, + top_heap_words + in + rows_of_hierarchy_list ~nesting:0 make_row hierarchy measure_diff + initial_measure.top_heap_words + +let max_by_column ~n_columns rows = + let a = Array.make n_columns 0. in + let rec loop (R (_, values, rows)) = + List.iteri (fun i (v, _) -> a.(i) <- max a.(i) v) values; + List.iter loop rows + in + List.iter loop rows; + a + +let width_by_column ~n_columns ~display_cell rows = + let a = Array.make n_columns 1 in + let rec loop (R (_, values, rows)) = + List.iteri (fun i cell -> + let _, str = display_cell i cell ~width:0 in + a.(i) <- max a.(i) (String.length str) + ) values; + List.iter loop rows; + in + List.iter loop rows; + a + +let display_rows ppf rows = + let n_columns = + match rows with + | [] -> 0 + | R (_, values, _) :: _ -> List.length values + in + let maxs = max_by_column ~n_columns rows in + let display_cell i (_, c) ~width = + let display_cell = c.worth_displaying ~max:maxs.(i) in + display_cell, if display_cell + then c.to_string ~max:maxs.(i) ~width + else String.make width '-' + in + let widths = width_by_column ~n_columns ~display_cell rows in + let rec loop (R (name, values, rows)) ~indentation = + let worth_displaying, cell_strings = + values + |> List.mapi (fun i cell -> display_cell i cell ~width:widths.(i)) + |> List.split + in + if List.exists (fun b -> b) worth_displaying then + Format.fprintf ppf "%s%s %s@\n" + indentation (String.concat " " cell_strings) name; + List.iter (loop ~indentation:(" " ^ indentation)) rows; + in + List.iter (loop ~indentation:"") rows + +let print ppf columns = + match columns with + | [] -> () + | _ :: _ -> + let initial_measure = + match !initial_measure with + | Some v -> v + | None -> Measure.zero + in + let total = Measure_diff.of_diff Measure.zero (Measure.create ()) in + display_rows ppf (rows_of_hierarchy !hierarchy total initial_measure columns) + +let column_mapping = [ + "time", `Time; + "alloc", `Alloc; + "top-heap", `Top_heap; + "absolute-top-heap", `Abs_top_heap; +] + +let column_names = List.map fst column_mapping + +let options_doc = + Printf.sprintf + " Print performance information for each pass\ + \n The columns are: %s." + (String.concat " " column_names) + +let all_columns = List.map snd column_mapping + +let generate = "generate" +let transl = "transl" +let typing = "typing" + +end +module Clflags : sig +#1 "clflags.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 2005 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Command line flags *) + +(** Optimization parameters represented as ints indexed by round number. *) +module Int_arg_helper : sig + type parsed + + val parse : string -> string -> parsed ref -> unit + + type parse_result = + | Ok + | Parse_failed of exn + val parse_no_error : string -> parsed ref -> parse_result + + val get : key:int -> parsed -> int +end + +(** Optimization parameters represented as floats indexed by round number. *) +module Float_arg_helper : sig + type parsed + + val parse : string -> string -> parsed ref -> unit + + type parse_result = + | Ok + | Parse_failed of exn + val parse_no_error : string -> parsed ref -> parse_result + + val get : key:int -> parsed -> float +end + +type inlining_arguments = { + inline_call_cost : int option; + inline_alloc_cost : int option; + inline_prim_cost : int option; + inline_branch_cost : int option; + inline_indirect_cost : int option; + inline_lifting_benefit : int option; + inline_branch_factor : float option; + inline_max_depth : int option; + inline_max_unroll : int option; + inline_threshold : float option; + inline_toplevel_threshold : int option; +} + +val classic_arguments : inlining_arguments +val o1_arguments : inlining_arguments +val o2_arguments : inlining_arguments +val o3_arguments : inlining_arguments + +(** Set all the inlining arguments for a round. + The default is set if no round is provided. *) +val use_inlining_arguments_set : ?round:int -> inlining_arguments -> unit + +val objfiles : string list ref +val ccobjs : string list ref +val dllibs : string list ref +val compile_only : bool ref +val output_name : string option ref +val include_dirs : string list ref +val no_std_include : bool ref +val print_types : bool ref +val make_archive : bool ref +val debug : bool ref +val fast : bool ref +val use_linscan : bool ref +val link_everything : bool ref +val custom_runtime : bool ref +val no_check_prims : bool ref +val bytecode_compatible_32 : bool ref +val output_c_object : bool ref +val output_complete_object : bool ref +val all_ccopts : string list ref +val classic : bool ref +val nopervasives : bool ref +val open_modules : string list ref +val preprocessor : string option ref +val all_ppx : string list ref +val annotations : bool ref +val binary_annotations : bool ref +val use_threads : bool ref +val use_vmthreads : bool ref +val noassert : bool ref +val verbose : bool ref +val noprompt : bool ref +val nopromptcont : bool ref +val init_file : string option ref +val noinit : bool ref +val noversion : bool ref +val use_prims : string ref +val use_runtime : string ref +val principal : bool ref +val real_paths : bool ref +val recursive_types : bool ref +val strict_sequence : bool ref +val strict_formats : bool ref +val applicative_functors : bool ref +val make_runtime : bool ref +val gprofile : bool ref +val c_compiler : string option ref +val no_auto_link : bool ref +val dllpaths : string list ref +val make_package : bool ref +val for_package : string option ref +val error_size : int ref +val float_const_prop : bool ref +val transparent_modules : bool ref +val dump_source : bool ref +val dump_parsetree : bool ref +val dump_typedtree : bool ref +val dump_rawlambda : bool ref +val dump_lambda : bool ref +val dump_rawclambda : bool ref +val dump_clambda : bool ref +val dump_rawflambda : bool ref +val dump_flambda : bool ref +val dump_flambda_let : int option ref +val dump_instr : bool ref +val keep_asm_file : bool ref +val optimize_for_speed : bool ref +val dump_cmm : bool ref +val dump_selection : bool ref +val dump_cse : bool ref +val dump_live : bool ref +val dump_avail : bool ref +val debug_runavail : bool ref +val dump_spill : bool ref +val dump_split : bool ref +val dump_interf : bool ref +val dump_prefer : bool ref +val dump_regalloc : bool ref +val dump_reload : bool ref +val dump_scheduling : bool ref +val dump_linear : bool ref +val dump_interval : bool ref +val keep_startup_file : bool ref +val dump_combine : bool ref +val native_code : bool ref +val default_inline_threshold : float +val inline_threshold : Float_arg_helper.parsed ref +val inlining_report : bool ref +val simplify_rounds : int option ref +val default_simplify_rounds : int ref +val rounds : unit -> int +val default_inline_max_unroll : int +val inline_max_unroll : Int_arg_helper.parsed ref +val default_inline_toplevel_threshold : int +val inline_toplevel_threshold : Int_arg_helper.parsed ref +val default_inline_call_cost : int +val default_inline_alloc_cost : int +val default_inline_prim_cost : int +val default_inline_branch_cost : int +val default_inline_indirect_cost : int +val default_inline_lifting_benefit : int +val inline_call_cost : Int_arg_helper.parsed ref +val inline_alloc_cost : Int_arg_helper.parsed ref +val inline_prim_cost : Int_arg_helper.parsed ref +val inline_branch_cost : Int_arg_helper.parsed ref +val inline_indirect_cost : Int_arg_helper.parsed ref +val inline_lifting_benefit : Int_arg_helper.parsed ref +val default_inline_branch_factor : float +val inline_branch_factor : Float_arg_helper.parsed ref +val dont_write_files : bool ref +val std_include_flag : string -> string +val std_include_dir : unit -> string list +val shared : bool ref +val dlcode : bool ref +val pic_code : bool ref +val runtime_variant : string ref +val force_slash : bool ref +val keep_docs : bool ref +val keep_locs : bool ref +val unsafe_string : bool ref +val opaque : bool ref +val profile_columns : Profile.column list ref +val flambda_invariant_checks : bool ref +val unbox_closures : bool ref +val unbox_closures_factor : int ref +val default_unbox_closures_factor : int +val unbox_free_vars_of_closures : bool ref +val unbox_specialised_args : bool ref +val clambda_checks : bool ref +val default_inline_max_depth : int +val inline_max_depth : Int_arg_helper.parsed ref +val remove_unused_arguments : bool ref +val dump_flambda_verbose : bool ref +val classic_inlining : bool ref +val afl_instrument : bool ref +val afl_inst_ratio : int ref + +val all_passes : string list ref +val dumped_pass : string -> bool +val set_dumped_pass : string -> bool -> unit + +val parse_color_setting : string -> Misc.Color.setting option +val color : Misc.Color.setting option ref + +val unboxed_types : bool ref + +val arg_spec : (string * Arg.spec * string) list ref + +(* [add_arguments __LOC__ args] will add the arguments from [args] at + the end of [arg_spec], checking that they have not already been + added by [add_arguments] before. A warning is printed showing the + locations of the function from which the argument was previously + added. *) +val add_arguments : string -> (string * Arg.spec * string) list -> unit + +(* [parse_arguments anon_arg usage] will parse the arguments, using + the arguments provided in [Clflags.arg_spec]. It allows plugins to + provide their own arguments. +*) +val parse_arguments : Arg.anon_fun -> string -> unit + +(* [print_arguments usage] print the standard usage message *) +val print_arguments : string -> unit + +(* [reset_arguments ()] clear all declared arguments *) +val reset_arguments : unit -> unit + + +type mli_status = Mli_na | Mli_exists | Mli_non_exists +val no_implicit_current_dir : bool ref +val assume_no_mli : mli_status ref +val record_event_when_debug : bool ref +val bs_vscode : bool +val dont_record_crc_unit : string option ref +val bs_gentype : string option ref +val no_assert_false : bool ref +val dump_location : bool ref + + +end = struct +#1 "clflags.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Command-line parameters *) + +module Int_arg_helper = Arg_helper.Make (struct + module Key = struct + include Numbers.Int + let of_string = int_of_string + end + + module Value = struct + include Numbers.Int + let of_string = int_of_string + end +end) +module Float_arg_helper = Arg_helper.Make (struct + module Key = struct + include Numbers.Int + let of_string = int_of_string + end + + module Value = struct + include Numbers.Float + let of_string = float_of_string + end +end) + +let objfiles = ref ([] : string list) (* .cmo and .cma files *) +and ccobjs = ref ([] : string list) (* .o, .a, .so and -cclib -lxxx *) +and dllibs = ref ([] : string list) (* .so and -dllib -lxxx *) + +let compile_only = ref false (* -c *) +and output_name = ref (None : string option) (* -o *) +and include_dirs = ref ([] : string list)(* -I *) +and no_std_include = ref false (* -nostdlib *) +and print_types = ref false (* -i *) +and make_archive = ref false (* -a *) +and debug = ref false (* -g *) +and fast = ref false (* -unsafe *) +and use_linscan = ref false (* -linscan *) +and link_everything = ref false (* -linkall *) +and custom_runtime = ref false (* -custom *) +and no_check_prims = ref false (* -no-check-prims *) +and bytecode_compatible_32 = ref false (* -compat-32 *) +and output_c_object = ref false (* -output-obj *) +and output_complete_object = ref false (* -output-complete-obj *) +and all_ccopts = ref ([] : string list) (* -ccopt *) +and classic = ref false (* -nolabels *) +and nopervasives = ref false (* -nopervasives *) +and preprocessor = ref(None : string option) (* -pp *) +and all_ppx = ref ([] : string list) (* -ppx *) +let annotations = ref false (* -annot *) +let binary_annotations = ref false (* -annot *) +and use_threads = ref false (* -thread *) +and use_vmthreads = ref false (* -vmthread *) +and noassert = ref false (* -noassert *) +and verbose = ref false (* -verbose *) +and noversion = ref false (* -no-version *) +and noprompt = ref false (* -noprompt *) +and nopromptcont = ref false (* -nopromptcont *) +and init_file = ref (None : string option) (* -init *) +and noinit = ref false (* -noinit *) +and open_modules = ref [] (* -open *) +and use_prims = ref "" (* -use-prims ... *) +and use_runtime = ref "" (* -use-runtime ... *) +and principal = ref false (* -principal *) +and real_paths = ref true (* -short-paths *) +and recursive_types = ref false (* -rectypes *) +and strict_sequence = ref false (* -strict-sequence *) +and strict_formats = ref false (* -strict-formats *) +and applicative_functors = ref true (* -no-app-funct *) +and make_runtime = ref false (* -make-runtime *) +and gprofile = ref false (* -p *) +and c_compiler = ref (None: string option) (* -cc *) +and no_auto_link = ref false (* -noautolink *) +and dllpaths = ref ([] : string list) (* -dllpath *) +and make_package = ref false (* -pack *) +and for_package = ref (None: string option) (* -for-pack *) +and error_size = ref 500 (* -error-size *) +and float_const_prop = ref true (* -no-float-const-prop *) +and transparent_modules = ref false (* -trans-mod *) +let dump_source = ref false (* -dsource *) +let dump_parsetree = ref false (* -dparsetree *) +and dump_typedtree = ref false (* -dtypedtree *) +and dump_rawlambda = ref false (* -drawlambda *) +and dump_lambda = ref false (* -dlambda *) +and dump_rawclambda = ref false (* -drawclambda *) +and dump_clambda = ref false (* -dclambda *) +and dump_rawflambda = ref false (* -drawflambda *) +and dump_flambda = ref false (* -dflambda *) +and dump_flambda_let = ref (None : int option) (* -dflambda-let=... *) +and dump_flambda_verbose = ref false (* -dflambda-verbose *) +and dump_instr = ref false (* -dinstr *) + +let keep_asm_file = ref false (* -S *) +let optimize_for_speed = ref true (* -compact *) +and opaque = ref false (* -opaque *) + +and dump_cmm = ref false (* -dcmm *) +let dump_selection = ref false (* -dsel *) +let dump_cse = ref false (* -dcse *) +let dump_live = ref false (* -dlive *) +let dump_avail = ref false (* -davail *) +let dump_spill = ref false (* -dspill *) +let dump_split = ref false (* -dsplit *) +let dump_interf = ref false (* -dinterf *) +let dump_prefer = ref false (* -dprefer *) +let dump_regalloc = ref false (* -dalloc *) +let dump_reload = ref false (* -dreload *) +let dump_scheduling = ref false (* -dscheduling *) +let dump_linear = ref false (* -dlinear *) +let dump_interval = ref false (* -dinterval *) +let keep_startup_file = ref false (* -dstartup *) +let dump_combine = ref false (* -dcombine *) +let profile_columns : Profile.column list ref = ref [] (* -dprofile/-dtimings *) + +let debug_runavail = ref false (* -drunavail *) + +let native_code = ref false (* set to true under ocamlopt *) + +let force_slash = ref false (* for ocamldep *) +let clambda_checks = ref false (* -clambda-checks *) + +let flambda_invariant_checks = ref true (* -flambda-invariants *) + +let dont_write_files = ref false (* set to true under ocamldoc *) + +let std_include_flag prefix = + if !no_std_include then "" + else (prefix ^ (Filename.quote Config.standard_library)) +;; + +let std_include_dir () = + if !no_std_include then [] else [Config.standard_library] +;; + +let shared = ref false (* -shared *) +let dlcode = ref true (* not -nodynlink *) + +let pic_code = ref (match Config.architecture with (* -fPIC *) + | "amd64" -> true + | _ -> false) + +let runtime_variant = ref "";; (* -runtime-variant *) + +let keep_docs = ref false (* -keep-docs *) +let keep_locs = ref true (* -keep-locs *) +let unsafe_string = + if Config.safe_string then ref false + else ref (not Config.default_safe_string) + (* -safe-string / -unsafe-string *) + +let classic_inlining = ref false (* -Oclassic *) +let inlining_report = ref false (* -inlining-report *) + +let afl_instrument = ref Config.afl_instrument (* -afl-instrument *) +let afl_inst_ratio = ref 100 (* -afl-inst-ratio *) + +let simplify_rounds = ref None (* -rounds *) +let default_simplify_rounds = ref 1 (* -rounds *) +let rounds () = + match !simplify_rounds with + | None -> !default_simplify_rounds + | Some r -> r + +let default_inline_threshold = if Config.flambda then 10. else 10. /. 8. +let inline_toplevel_multiplier = 16 +let default_inline_toplevel_threshold = + int_of_float ((float inline_toplevel_multiplier) *. default_inline_threshold) +let default_inline_call_cost = 5 +let default_inline_alloc_cost = 7 +let default_inline_prim_cost = 3 +let default_inline_branch_cost = 5 +let default_inline_indirect_cost = 4 +let default_inline_branch_factor = 0.1 +let default_inline_lifting_benefit = 1300 +let default_inline_max_unroll = 0 +let default_inline_max_depth = 1 + +let inline_threshold = ref (Float_arg_helper.default default_inline_threshold) +let inline_toplevel_threshold = + ref (Int_arg_helper.default default_inline_toplevel_threshold) +let inline_call_cost = ref (Int_arg_helper.default default_inline_call_cost) +let inline_alloc_cost = ref (Int_arg_helper.default default_inline_alloc_cost) +let inline_prim_cost = ref (Int_arg_helper.default default_inline_prim_cost) +let inline_branch_cost = + ref (Int_arg_helper.default default_inline_branch_cost) +let inline_indirect_cost = + ref (Int_arg_helper.default default_inline_indirect_cost) +let inline_branch_factor = + ref (Float_arg_helper.default default_inline_branch_factor) +let inline_lifting_benefit = + ref (Int_arg_helper.default default_inline_lifting_benefit) +let inline_max_unroll = + ref (Int_arg_helper.default default_inline_max_unroll) +let inline_max_depth = + ref (Int_arg_helper.default default_inline_max_depth) + + +let unbox_specialised_args = ref true (* -no-unbox-specialised-args *) +let unbox_free_vars_of_closures = ref true +let unbox_closures = ref false (* -unbox-closures *) +let default_unbox_closures_factor = 10 +let unbox_closures_factor = + ref default_unbox_closures_factor (* -unbox-closures-factor *) +let remove_unused_arguments = ref false (* -remove-unused-arguments *) + +type inlining_arguments = { + inline_call_cost : int option; + inline_alloc_cost : int option; + inline_prim_cost : int option; + inline_branch_cost : int option; + inline_indirect_cost : int option; + inline_lifting_benefit : int option; + inline_branch_factor : float option; + inline_max_depth : int option; + inline_max_unroll : int option; + inline_threshold : float option; + inline_toplevel_threshold : int option; +} + +let set_int_arg round (arg:Int_arg_helper.parsed ref) default value = + let value : int = + match value with + | None -> default + | Some value -> value + in + match round with + | None -> + arg := Int_arg_helper.set_base_default value + (Int_arg_helper.reset_base_overrides !arg) + | Some round -> + arg := Int_arg_helper.add_base_override round value !arg + +let set_float_arg round (arg:Float_arg_helper.parsed ref) default value = + let value = + match value with + | None -> default + | Some value -> value + in + match round with + | None -> + arg := Float_arg_helper.set_base_default value + (Float_arg_helper.reset_base_overrides !arg) + | Some round -> + arg := Float_arg_helper.add_base_override round value !arg + +let use_inlining_arguments_set ?round (arg:inlining_arguments) = + let set_int = set_int_arg round in + let set_float = set_float_arg round in + set_int inline_call_cost default_inline_call_cost arg.inline_call_cost; + set_int inline_alloc_cost default_inline_alloc_cost arg.inline_alloc_cost; + set_int inline_prim_cost default_inline_prim_cost arg.inline_prim_cost; + set_int inline_branch_cost + default_inline_branch_cost arg.inline_branch_cost; + set_int inline_indirect_cost + default_inline_indirect_cost arg.inline_indirect_cost; + set_int inline_lifting_benefit + default_inline_lifting_benefit arg.inline_lifting_benefit; + set_float inline_branch_factor + default_inline_branch_factor arg.inline_branch_factor; + set_int inline_max_depth + default_inline_max_depth arg.inline_max_depth; + set_int inline_max_unroll + default_inline_max_unroll arg.inline_max_unroll; + set_float inline_threshold + default_inline_threshold arg.inline_threshold; + set_int inline_toplevel_threshold + default_inline_toplevel_threshold arg.inline_toplevel_threshold + +(* o1 is the default *) +let o1_arguments = { + inline_call_cost = None; + inline_alloc_cost = None; + inline_prim_cost = None; + inline_branch_cost = None; + inline_indirect_cost = None; + inline_lifting_benefit = None; + inline_branch_factor = None; + inline_max_depth = None; + inline_max_unroll = None; + inline_threshold = None; + inline_toplevel_threshold = None; +} + +let classic_arguments = { + inline_call_cost = None; + inline_alloc_cost = None; + inline_prim_cost = None; + inline_branch_cost = None; + inline_indirect_cost = None; + inline_lifting_benefit = None; + inline_branch_factor = None; + inline_max_depth = None; + inline_max_unroll = None; + (* [inline_threshold] matches the current compiler's default. + Note that this particular fraction can be expressed exactly in + floating point. *) + inline_threshold = Some (10. /. 8.); + (* [inline_toplevel_threshold] is not used in classic mode. *) + inline_toplevel_threshold = Some 1; +} + +let o2_arguments = { + inline_call_cost = Some (2 * default_inline_call_cost); + inline_alloc_cost = Some (2 * default_inline_alloc_cost); + inline_prim_cost = Some (2 * default_inline_prim_cost); + inline_branch_cost = Some (2 * default_inline_branch_cost); + inline_indirect_cost = Some (2 * default_inline_indirect_cost); + inline_lifting_benefit = None; + inline_branch_factor = None; + inline_max_depth = Some 2; + inline_max_unroll = None; + inline_threshold = Some 25.; + inline_toplevel_threshold = Some (25 * inline_toplevel_multiplier); +} + +let o3_arguments = { + inline_call_cost = Some (3 * default_inline_call_cost); + inline_alloc_cost = Some (3 * default_inline_alloc_cost); + inline_prim_cost = Some (3 * default_inline_prim_cost); + inline_branch_cost = Some (3 * default_inline_branch_cost); + inline_indirect_cost = Some (3 * default_inline_indirect_cost); + inline_lifting_benefit = None; + inline_branch_factor = Some 0.; + inline_max_depth = Some 3; + inline_max_unroll = Some 1; + inline_threshold = Some 50.; + inline_toplevel_threshold = Some (50 * inline_toplevel_multiplier); +} + +let all_passes = ref [] +let dumped_passes_list = ref [] +let dumped_pass s = + assert(List.mem s !all_passes); + List.mem s !dumped_passes_list + +let set_dumped_pass s enabled = + if (List.mem s !all_passes) then begin + let passes_without_s = List.filter ((<>) s) !dumped_passes_list in + let dumped_passes = + if enabled then + s :: passes_without_s + else + passes_without_s + in + dumped_passes_list := dumped_passes + end + +let parse_color_setting = function + | "auto" -> Some Misc.Color.Auto + | "always" -> Some Misc.Color.Always + | "never" -> Some Misc.Color.Never + | _ -> None +let color = ref None ;; (* -color *) + +let unboxed_types = ref false + +let arg_spec = ref [] +let arg_names = ref Misc.StringMap.empty + +let reset_arguments () = + arg_spec := []; + arg_names := Misc.StringMap.empty + +let add_arguments loc args = + List.iter (function (arg_name, _, _) as arg -> + try + let loc2 = Misc.StringMap.find arg_name !arg_names in + Printf.eprintf + "Warning: plugin argument %s is already defined:\n" arg_name; + Printf.eprintf " First definition: %s\n" loc2; + Printf.eprintf " New definition: %s\n" loc; + with Not_found -> + arg_spec := !arg_spec @ [ arg ]; + arg_names := Misc.StringMap.add arg_name loc !arg_names + ) args + +let print_arguments usage = + Arg.usage !arg_spec usage + +(* This function is almost the same as [Arg.parse_expand], except + that [Arg.parse_expand] could not be used because it does not take a + reference for [arg_spec].*) +let parse_arguments f msg = + try + let argv = ref Sys.argv in + let current = ref (!Arg.current) in + Arg.parse_and_expand_argv_dynamic current argv arg_spec f msg + with + | Arg.Bad msg -> Printf.eprintf "%s" msg; exit 2 + | Arg.Help msg -> Printf.printf "%s" msg; exit 0 + + +type mli_status = Mli_na | Mli_exists | Mli_non_exists +let no_implicit_current_dir = ref false +let assume_no_mli = ref Mli_na +let record_event_when_debug = ref true (* turned off in BuckleScript*) +let bs_vscode = + try ignore @@ Sys.getenv "BS_VSCODE" ; true with _ -> false + (* We get it from environment variable mostly due to + we don't want to rebuild when flip on or off + *) +let dont_record_crc_unit : string option ref = ref None +let bs_gentype = ref None +let no_assert_false = ref false +let dump_location = ref true + + +end +module Terminfo : sig +#1 "terminfo.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Basic interface to the terminfo database *) + +type status = + | Uninitialised + | Bad_term + | Good_term of int (* number of lines of the terminal *) +;; +external setup : out_channel -> status = "caml_terminfo_setup";; +external backup : int -> unit = "caml_terminfo_backup";; +external standout : bool -> unit = "caml_terminfo_standout";; +external resume : int -> unit = "caml_terminfo_resume";; + +end = struct +#1 "terminfo.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Basic interface to the terminfo database *) + +type status = + | Uninitialised + | Bad_term + | Good_term of int +;; +external setup : out_channel -> status = "caml_terminfo_setup";; +external backup : int -> unit = "caml_terminfo_backup";; +external standout : bool -> unit = "caml_terminfo_standout";; +external resume : int -> unit = "caml_terminfo_resume";; + +end +module Warnings : sig +#1 "warnings.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Weis && Damien Doligez, INRIA Rocquencourt *) +(* *) +(* Copyright 1998 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type loc = { + loc_start: Lexing.position; + loc_end: Lexing.position; + loc_ghost: bool; +} + +type t = + | Comment_start (* 1 *) + | Comment_not_end (* 2 *) + | Deprecated of string * loc * loc (* 3 *) + | Fragile_match of string (* 4 *) + | Partial_application (* 5 *) + | Labels_omitted of string list (* 6 *) + | Method_override of string list (* 7 *) + | Partial_match of string (* 8 *) + | Non_closed_record_pattern of string (* 9 *) + | Statement_type (* 10 *) + | Unused_match (* 11 *) + | Unused_pat (* 12 *) + | Instance_variable_override of string list (* 13 *) + | Illegal_backslash (* 14 *) + | Implicit_public_methods of string list (* 15 *) + | Unerasable_optional_argument (* 16 *) + | Undeclared_virtual_method of string (* 17 *) + | Not_principal of string (* 18 *) + | Without_principality of string (* 19 *) + | Unused_argument (* 20 *) + | Nonreturning_statement (* 21 *) + | Preprocessor of string (* 22 *) + | Useless_record_with (* 23 *) + | Bad_module_name of string (* 24 *) + | All_clauses_guarded (* 8, used to be 25 *) + | Unused_var of string (* 26 *) + | Unused_var_strict of string (* 27 *) + | Wildcard_arg_to_constant_constr (* 28 *) + | Eol_in_string (* 29 *) + | Duplicate_definitions of string * string * string * string (* 30 *) + | Multiple_definition of string * string * string (* 31 *) + | Unused_value_declaration of string (* 32 *) + | Unused_open of string (* 33 *) + | Unused_type_declaration of string (* 34 *) + | Unused_for_index of string (* 35 *) + | Unused_ancestor of string (* 36 *) + | Unused_constructor of string * bool * bool (* 37 *) + | Unused_extension of string * bool * bool * bool (* 38 *) + | Unused_rec_flag (* 39 *) + | Name_out_of_scope of string * string list * bool (* 40 *) + | Ambiguous_name of string list * string list * bool (* 41 *) + | Disambiguated_name of string (* 42 *) + | Nonoptional_label of string (* 43 *) + | Open_shadow_identifier of string * string (* 44 *) + | Open_shadow_label_constructor of string * string (* 45 *) + | Bad_env_variable of string * string (* 46 *) + | Attribute_payload of string * string (* 47 *) + | Eliminated_optional_arguments of string list (* 48 *) + | No_cmi_file of string * string option (* 49 *) + | Bad_docstring of bool (* 50 *) + | Expect_tailcall (* 51 *) + | Fragile_literal_pattern (* 52 *) + | Misplaced_attribute of string (* 53 *) + | Duplicated_attribute of string (* 54 *) + | Inlining_impossible of string (* 55 *) + | Unreachable_case (* 56 *) + | Ambiguous_pattern of string list (* 57 *) + | No_cmx_file of string (* 58 *) + | Assignment_to_non_mutable_value (* 59 *) + | Unused_module of string (* 60 *) + | Unboxable_type_in_prim_decl of string (* 61 *) + | Constraint_on_gadt (* 62 *) + + | Bs_unused_attribute of string (* 101 *) + | Bs_polymorphic_comparison (* 102 *) + | Bs_ffi_warning of string (* 103 *) + | Bs_derive_warning of string (* 104 *) + | Bs_fragile_external of string (* 105 *) + | Bs_unimplemented_primitive of string (* 106 *) + | Bs_integer_literal_overflow (* 107 *) + | Bs_uninterpreted_delimiters of string (* 108 *) + +;; + +val parse_options : bool -> string -> unit;; + +val without_warnings : (unit -> 'a) -> 'a + +val is_active : t -> bool;; +val is_error : t -> bool;; + +val defaults_w : string;; +val defaults_warn_error : string;; + +type reporting_information = + { number : int + ; message : string + ; is_error : bool + ; sub_locs : (loc * string) list; + } + +val report : t -> [ `Active of reporting_information | `Inactive ] + +exception Errors;; + +val check_fatal : unit -> unit;; +val reset_fatal: unit -> unit + +val help_warnings: unit -> unit + +type state +val backup: unit -> state +val restore: state -> unit +val mk_lazy: (unit -> 'a) -> 'a Lazy.t + (** Like [Lazy.of_fun], but the function is applied with + the warning settings at the time [mk_lazy] is called. *) + + +val message : t -> string +val number: t -> int +val super_report : + (t -> string) -> + t -> [ `Active of reporting_information | `Inactive ] + + +end = struct +#1 "warnings.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Pierre Weis && Damien Doligez, INRIA Rocquencourt *) +(* *) +(* Copyright 1998 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* When you change this, you need to update the documentation: + - man/ocamlc.m + - man/ocamlopt.m + - manual/manual/cmds/comp.etex + - manual/manual/cmds/native.etex +*) + +type loc = { + loc_start: Lexing.position; + loc_end: Lexing.position; + loc_ghost: bool; +} + +type t = + | Comment_start (* 1 *) + | Comment_not_end (* 2 *) + | Deprecated of string * loc * loc (* 3 *) + | Fragile_match of string (* 4 *) + | Partial_application (* 5 *) + | Labels_omitted of string list (* 6 *) + | Method_override of string list (* 7 *) + | Partial_match of string (* 8 *) + | Non_closed_record_pattern of string (* 9 *) + | Statement_type (* 10 *) + | Unused_match (* 11 *) + | Unused_pat (* 12 *) + | Instance_variable_override of string list (* 13 *) + | Illegal_backslash (* 14 *) + | Implicit_public_methods of string list (* 15 *) + | Unerasable_optional_argument (* 16 *) + | Undeclared_virtual_method of string (* 17 *) + | Not_principal of string (* 18 *) + | Without_principality of string (* 19 *) + | Unused_argument (* 20 *) + | Nonreturning_statement (* 21 *) + | Preprocessor of string (* 22 *) + | Useless_record_with (* 23 *) + | Bad_module_name of string (* 24 *) + | All_clauses_guarded (* 8, used to be 25 *) + | Unused_var of string (* 26 *) + | Unused_var_strict of string (* 27 *) + | Wildcard_arg_to_constant_constr (* 28 *) + | Eol_in_string (* 29 *) + | Duplicate_definitions of string * string * string * string (*30 *) + | Multiple_definition of string * string * string (* 31 *) + | Unused_value_declaration of string (* 32 *) + | Unused_open of string (* 33 *) + | Unused_type_declaration of string (* 34 *) + | Unused_for_index of string (* 35 *) + | Unused_ancestor of string (* 36 *) + | Unused_constructor of string * bool * bool (* 37 *) + | Unused_extension of string * bool * bool * bool (* 38 *) + | Unused_rec_flag (* 39 *) + | Name_out_of_scope of string * string list * bool (* 40 *) + | Ambiguous_name of string list * string list * bool (* 41 *) + | Disambiguated_name of string (* 42 *) + | Nonoptional_label of string (* 43 *) + | Open_shadow_identifier of string * string (* 44 *) + | Open_shadow_label_constructor of string * string (* 45 *) + | Bad_env_variable of string * string (* 46 *) + | Attribute_payload of string * string (* 47 *) + | Eliminated_optional_arguments of string list (* 48 *) + | No_cmi_file of string * string option (* 49 *) + | Bad_docstring of bool (* 50 *) + | Expect_tailcall (* 51 *) + | Fragile_literal_pattern (* 52 *) + | Misplaced_attribute of string (* 53 *) + | Duplicated_attribute of string (* 54 *) + | Inlining_impossible of string (* 55 *) + | Unreachable_case (* 56 *) + | Ambiguous_pattern of string list (* 57 *) + | No_cmx_file of string (* 58 *) + | Assignment_to_non_mutable_value (* 59 *) + | Unused_module of string (* 60 *) + | Unboxable_type_in_prim_decl of string (* 61 *) + | Constraint_on_gadt (* 62 *) + + + | Bs_unused_attribute of string (* 101 *) + | Bs_polymorphic_comparison (* 102 *) + | Bs_ffi_warning of string (* 103 *) + | Bs_derive_warning of string (* 104 *) + | Bs_fragile_external of string (* 105 *) + | Bs_unimplemented_primitive of string (* 106 *) + | Bs_integer_literal_overflow (* 107 *) + | Bs_uninterpreted_delimiters of string (* 108 *) + +;; + +(* If you remove a warning, leave a hole in the numbering. NEVER change + the numbers of existing warnings. + If you add a new warning, add it at the end with a new number; + do NOT reuse one of the holes. +*) + +let number = function + | Comment_start -> 1 + | Comment_not_end -> 2 + | Deprecated _ -> 3 + | Fragile_match _ -> 4 + | Partial_application -> 5 + | Labels_omitted _ -> 6 + | Method_override _ -> 7 + | Partial_match _ -> 8 + | Non_closed_record_pattern _ -> 9 + | Statement_type -> 10 + | Unused_match -> 11 + | Unused_pat -> 12 + | Instance_variable_override _ -> 13 + | Illegal_backslash -> 14 + | Implicit_public_methods _ -> 15 + | Unerasable_optional_argument -> 16 + | Undeclared_virtual_method _ -> 17 + | Not_principal _ -> 18 + | Without_principality _ -> 19 + | Unused_argument -> 20 + | Nonreturning_statement -> 21 + | Preprocessor _ -> 22 + | Useless_record_with -> 23 + | Bad_module_name _ -> 24 + | All_clauses_guarded -> 8 (* used to be 25 *) + | Unused_var _ -> 26 + | Unused_var_strict _ -> 27 + | Wildcard_arg_to_constant_constr -> 28 + | Eol_in_string -> 29 + | Duplicate_definitions _ -> 30 + | Multiple_definition _ -> 31 + | Unused_value_declaration _ -> 32 + | Unused_open _ -> 33 + | Unused_type_declaration _ -> 34 + | Unused_for_index _ -> 35 + | Unused_ancestor _ -> 36 + | Unused_constructor _ -> 37 + | Unused_extension _ -> 38 + | Unused_rec_flag -> 39 + | Name_out_of_scope _ -> 40 + | Ambiguous_name _ -> 41 + | Disambiguated_name _ -> 42 + | Nonoptional_label _ -> 43 + | Open_shadow_identifier _ -> 44 + | Open_shadow_label_constructor _ -> 45 + | Bad_env_variable _ -> 46 + | Attribute_payload _ -> 47 + | Eliminated_optional_arguments _ -> 48 + | No_cmi_file _ -> 49 + | Bad_docstring _ -> 50 + | Expect_tailcall -> 51 + | Fragile_literal_pattern -> 52 + | Misplaced_attribute _ -> 53 + | Duplicated_attribute _ -> 54 + | Inlining_impossible _ -> 55 + | Unreachable_case -> 56 + | Ambiguous_pattern _ -> 57 + | No_cmx_file _ -> 58 + | Assignment_to_non_mutable_value -> 59 + | Unused_module _ -> 60 + | Unboxable_type_in_prim_decl _ -> 61 + | Constraint_on_gadt -> 62 + + + | Bs_unused_attribute _ -> 101 + | Bs_polymorphic_comparison -> 102 + | Bs_ffi_warning _ -> 103 + | Bs_derive_warning _ -> 104 + | Bs_fragile_external _ -> 105 + | Bs_unimplemented_primitive _ -> 106 + | Bs_integer_literal_overflow -> 107 + | Bs_uninterpreted_delimiters _ -> 108 + +;; + +let last_warning_number = 108 +let letter_all = + let rec loop i = if i = 0 then [] else i :: loop (i - 1) in + loop last_warning_number + +(* Must be the max number returned by the [number] function. *) + +let letter = function + | 'a' -> letter_all + | 'b' -> [] + | 'c' -> [1; 2] + | 'd' -> [3] + | 'e' -> [4] + | 'f' -> [5] + | 'g' -> [] + | 'h' -> [] + | 'i' -> [] + | 'j' -> [] + | 'k' -> [32; 33; 34; 35; 36; 37; 38; 39] + | 'l' -> [6] + | 'm' -> [7] + | 'n' -> [] + | 'o' -> [] + | 'p' -> [8] + | 'q' -> [] + | 'r' -> [9] + | 's' -> [10] + | 't' -> [] + | 'u' -> [11; 12] + | 'v' -> [13] + | 'w' -> [] + | 'x' -> [14; 15; 16; 17; 18; 19; 20; 21; 22; 23; 24; 30] + | 'y' -> [26] + | 'z' -> [27] + | _ -> assert false +;; + +type state = + { + active: bool array; + error: bool array; + } + +let current = + ref + { + active = Array.make (last_warning_number + 1) true; + error = Array.make (last_warning_number + 1) false; + } + +let disabled = ref false + +let without_warnings f = + Misc.protect_refs [Misc.R(disabled, true)] f + +let backup () = !current + +let restore x = current := x + +let is_active x = not !disabled && (!current).active.(number x);; +let is_error x = not !disabled && (!current).error.(number x);; + +let mk_lazy f = + let state = backup () in + lazy + ( + let prev = backup () in + restore state; + try + let r = f () in + restore prev; + r + with exn -> + restore prev; + raise exn + ) + +let parse_opt error active flags s = + let set i = flags.(i) <- true in + let clear i = flags.(i) <- false in + let set_all i = active.(i) <- true; error.(i) <- true in + let error () = raise (Arg.Bad "Ill-formed list of warnings") in + let rec get_num n i = + if i >= String.length s then i, n + else match s.[i] with + | '0'..'9' -> get_num (10 * n + Char.code s.[i] - Char.code '0') (i + 1) + | _ -> i, n + in + let get_range i = + let i, n1 = get_num 0 i in + if i + 2 < String.length s && s.[i] = '.' && s.[i + 1] = '.' then + let i, n2 = get_num 0 (i + 2) in + if n2 < n1 then error (); + i, n1, n2 + else + i, n1, n1 + in + let rec loop i = + if i >= String.length s then () else + match s.[i] with + | 'A' .. 'Z' -> + List.iter set (letter (Char.lowercase_ascii s.[i])); + loop (i+1) + | 'a' .. 'z' -> + List.iter clear (letter s.[i]); + loop (i+1) + | '+' -> loop_letter_num set (i+1) + | '-' -> loop_letter_num clear (i+1) + | '@' -> loop_letter_num set_all (i+1) + | _ -> error () + and loop_letter_num myset i = + if i >= String.length s then error () else + match s.[i] with + | '0' .. '9' -> + let i, n1, n2 = get_range i in + for n = n1 to min n2 last_warning_number do myset n done; + loop i + | 'A' .. 'Z' -> + List.iter myset (letter (Char.lowercase_ascii s.[i])); + loop (i+1) + | 'a' .. 'z' -> + List.iter myset (letter s.[i]); + loop (i+1) + | _ -> error () + in + loop 0 +;; + +let parse_options errflag s = + let error = Array.copy (!current).error in + let active = Array.copy (!current).active in + parse_opt error active (if errflag then error else active) s; + current := {error; active} + +(* If you change these, don't forget to change them in man/ocamlc.m *) +let defaults_w = "+a-4-6-7-9-27-29-32..42-44-45-48-50-60-102";; +let defaults_warn_error = "-a+31";; + +let () = + if not !Config.bs_only then ( + parse_options false defaults_w; + parse_options true defaults_warn_error; + ) + +let message = function + | Comment_start -> "this is the start of a comment." + | Comment_not_end -> "this is not the end of a comment." + | Deprecated (s, _, _) -> + (* Reduce \r\n to \n: + - Prevents any \r characters being printed on Unix when processing + Windows sources + - Prevents \r\r\n being generated on Windows, which affects the + testsuite + *) + "deprecated: " ^ Misc.normalise_eol s + | Fragile_match "" -> + "this pattern-matching is fragile." + | Fragile_match s -> + "this pattern-matching is fragile.\n\ + It will remain exhaustive when constructors are added to type " ^ s ^ "." + | Partial_application -> + "this function application is partial,\n\ + maybe some arguments are missing." + | Labels_omitted [] -> assert false + | Labels_omitted [l] -> + "label " ^ l ^ " was omitted in the application of this function." + | Labels_omitted ls -> + "labels " ^ String.concat ", " ls ^ + " were omitted in the application of this function." + | Method_override [lab] -> + "the method " ^ lab ^ " is overridden." + | Method_override (cname :: slist) -> + String.concat " " + ("the following methods are overridden by the class" + :: cname :: ":\n " :: slist) + | Method_override [] -> assert false + | Partial_match "" -> "this pattern-matching is not exhaustive." + | Partial_match s -> + "this pattern-matching is not exhaustive.\n\ + Here is an example of a case that is not matched:\n" ^ s + | Non_closed_record_pattern s -> + "the following labels are not bound in this record pattern:\n" ^ s ^ + "\nEither bind these labels explicitly or add '; _' to the pattern." + | Statement_type -> + "this expression should have type unit." + | Unused_match -> "this match case is unused." + | Unused_pat -> "this sub-pattern is unused." + | Instance_variable_override [lab] -> + "the instance variable " ^ lab ^ " is overridden.\n" ^ + "The behaviour changed in ocaml 3.10 (previous behaviour was hiding.)" + | Instance_variable_override (cname :: slist) -> + String.concat " " + ("the following instance variables are overridden by the class" + :: cname :: ":\n " :: slist) ^ + "\nThe behaviour changed in ocaml 3.10 (previous behaviour was hiding.)" + | Instance_variable_override [] -> assert false + | Illegal_backslash -> "illegal backslash escape in string." + | Implicit_public_methods l -> + "the following private methods were made public implicitly:\n " + ^ String.concat " " l ^ "." + | Unerasable_optional_argument -> "this optional argument cannot be erased." + | Undeclared_virtual_method m -> "the virtual method "^m^" is not declared." + | Not_principal s -> s^" is not principal." + | Without_principality s -> s^" without principality." + | Unused_argument -> "this argument will not be used by the function." + | Nonreturning_statement -> + "this statement never returns (or has an unsound type.)" + | Preprocessor s -> s + | Useless_record_with -> + "all the fields are explicitly listed in this record:\n\ + the 'with' clause is useless." + | Bad_module_name (modname) -> + "bad source file name: \"" ^ modname ^ "\" is not a valid module name." + | All_clauses_guarded -> + "this pattern-matching is not exhaustive.\n\ + All clauses in this pattern-matching are guarded." + | Unused_var v | Unused_var_strict v -> "unused variable " ^ v ^ "." + | Wildcard_arg_to_constant_constr -> + "wildcard pattern given as argument to a constant constructor" + | Eol_in_string -> + "unescaped end-of-line in a string constant (non-portable code)" + | Duplicate_definitions (kind, cname, tc1, tc2) -> + Printf.sprintf "the %s %s is defined in both types %s and %s." + kind cname tc1 tc2 + | Multiple_definition(modname, file1, file2) -> + Printf.sprintf + "files %s and %s both define a module named %s" + file1 file2 modname + | Unused_value_declaration v -> "unused value " ^ v ^ "." + | Unused_open s -> "unused open " ^ s ^ "." + | Unused_type_declaration s -> "unused type " ^ s ^ "." + | Unused_for_index s -> "unused for-loop index " ^ s ^ "." + | Unused_ancestor s -> "unused ancestor variable " ^ s ^ "." + | Unused_constructor (s, false, false) -> "unused constructor " ^ s ^ "." + | Unused_constructor (s, true, _) -> + "constructor " ^ s ^ + " is never used to build values.\n\ + (However, this constructor appears in patterns.)" + | Unused_constructor (s, false, true) -> + "constructor " ^ s ^ + " is never used to build values.\n\ + Its type is exported as a private type." + | Unused_extension (s, is_exception, cu_pattern, cu_privatize) -> + let kind = + if is_exception then "exception" else "extension constructor" in + let name = kind ^ " " ^ s in + begin match cu_pattern, cu_privatize with + | false, false -> "unused " ^ name + | true, _ -> + name ^ + " is never used to build values.\n\ + (However, this constructor appears in patterns.)" + | false, true -> + name ^ + " is never used to build values.\n\ + It is exported or rebound as a private extension." + end + | Unused_rec_flag -> + "unused rec flag." + | Name_out_of_scope (ty, [nm], false) -> + nm ^ " was selected from type " ^ ty ^ + ".\nIt is not visible in the current scope, and will not \n\ + be selected if the type becomes unknown." + | Name_out_of_scope (_, _, false) -> assert false + | Name_out_of_scope (ty, slist, true) -> + "this record of type "^ ty ^" contains fields that are \n\ + not visible in the current scope: " + ^ String.concat " " slist ^ ".\n\ + They will not be selected if the type becomes unknown." + | Ambiguous_name ([s], tl, false) -> + s ^ " belongs to several types: " ^ String.concat " " tl ^ + "\nThe first one was selected. Please disambiguate if this is wrong." + | Ambiguous_name (_, _, false) -> assert false + | Ambiguous_name (_slist, tl, true) -> + "these field labels belong to several types: " ^ + String.concat " " tl ^ + "\nThe first one was selected. Please disambiguate if this is wrong." + | Disambiguated_name s -> + "this use of " ^ s ^ " relies on type-directed disambiguation,\n\ + it will not compile with OCaml 4.00 or earlier." + | Nonoptional_label s -> + "the label " ^ s ^ " is not optional." + | Open_shadow_identifier (kind, s) -> + Printf.sprintf + "this open statement shadows the %s identifier %s (which is later used)" + kind s + | Open_shadow_label_constructor (kind, s) -> + Printf.sprintf + "this open statement shadows the %s %s (which is later used)" + kind s + | Bad_env_variable (var, s) -> + Printf.sprintf "illegal environment variable %s : %s" var s + | Attribute_payload (a, s) -> + Printf.sprintf "illegal payload for attribute '%s'.\n%s" a s + | Eliminated_optional_arguments sl -> + Printf.sprintf "implicit elimination of optional argument%s %s" + (if List.length sl = 1 then "" else "s") + (String.concat ", " sl) + | No_cmi_file(name, None) -> + "no cmi file was found in path for module " ^ name + | No_cmi_file(name, Some msg) -> + Printf.sprintf + "no valid cmi file was found in path for module %s. %s" + name msg + | Bad_docstring unattached -> + if unattached then "unattached documentation comment (ignored)" + else "ambiguous documentation comment" + | Expect_tailcall -> + Printf.sprintf "expected tailcall" + | Fragile_literal_pattern -> + Printf.sprintf + "Code should not depend on the actual values of\n\ + this constructor's arguments. They are only for information\n\ + and may change in future versions. (See manual section 8.5)" + | Unreachable_case -> + "this match case is unreachable.\n\ + Consider replacing it with a refutation case ' -> .'" + | Misplaced_attribute attr_name -> + Printf.sprintf "the %S attribute cannot appear in this context" attr_name + | Duplicated_attribute attr_name -> + Printf.sprintf "the %S attribute is used more than once on this \ + expression" + attr_name + | Inlining_impossible reason -> + Printf.sprintf "Cannot inline: %s" reason + | Ambiguous_pattern vars -> + let msg = + let vars = List.sort String.compare vars in + match vars with + | [] -> assert false + | [x] -> "variable " ^ x + | _::_ -> + "variables " ^ String.concat "," vars in + Printf.sprintf + "Ambiguous or-pattern variables under guard;\n\ + %s may match different arguments. (See manual section 8.5)" + msg + | No_cmx_file name -> + Printf.sprintf + "no cmx file was found in path for module %s, \ + and its interface was not compiled with -opaque" name + | Assignment_to_non_mutable_value -> + "A potential assignment to a non-mutable value was detected \n\ + in this source file. Such assignments may generate incorrect code \n\ + when using Flambda." + | Unused_module s -> "unused module " ^ s ^ "." + | Unboxable_type_in_prim_decl t -> + Printf.sprintf + "This primitive declaration uses type %s, which is unannotated and\n\ + unboxable. The representation of such types may change in future\n\ + versions. You should annotate the declaration of %s with [@@boxed]\n\ + or [@@unboxed]." t t + | Constraint_on_gadt -> + "Type constraints do not apply to GADT cases of variant types." + + + | Bs_unused_attribute s -> + "Unused BuckleScript attribute: " ^ s ^ "\n\ + This means such annotation is not annotated properly. \n\ + for example, some annotations is only meaningful in externals \n" + | Bs_polymorphic_comparison -> + "polymorphic comparison introduced (maybe unsafe)" + | Bs_ffi_warning s -> + "BuckleScript FFI warning: " ^ s + | Bs_derive_warning s -> + "BuckleScript bs.deriving warning: " ^ s + | Bs_fragile_external s -> + "BuckleScript warning: " ^ s ^" : the external name is inferred from val name is unsafe from refactoring when changing value name" + | Bs_unimplemented_primitive s -> + "BuckleScript warning: Unimplemented primitive used:" ^ s + | Bs_integer_literal_overflow -> + "BuckleScript warning: Integer literal exceeds the range of representable integers of type int" + | Bs_uninterpreted_delimiters s -> + "BuckleScript warning: Uninterpreted delimiters" ^ s + +;; + +let sub_locs = function + | Deprecated (_, def, use) -> + [ + def, "Definition"; + use, "Expected signature"; + ] + | _ -> [] + +let nerrors = ref 0;; + +type reporting_information = + { number : int + ; message : string + ; is_error : bool + ; sub_locs : (loc * string) list; + } + +let report w = + match w with + | Name_out_of_scope _ (* 40 *) + | Disambiguated_name _ (* 42 *) + | Unboxable_type_in_prim_decl _ (* 61 *) -> `Inactive + (* TODO: we could simplify the code even more *) + | _ -> + match is_active w with + | false -> `Inactive + | true -> + if is_error w then incr nerrors; + `Active { number = number w; message = message w; is_error = is_error w; + sub_locs = sub_locs w; + } +;; + + +let super_report message w = + match is_active w with + | false -> `Inactive + | true -> + if is_error w then incr nerrors; + `Active { number = number w; message = message w; is_error = is_error w; + sub_locs = sub_locs w; + } +;; + +exception Errors;; + +let reset_fatal () = + nerrors := 0 + +let check_fatal () = + if !nerrors > 0 then begin + nerrors := 0; + raise Errors; + end; +;; + +let descriptions = + [ + 1, "Suspicious-looking start-of-comment mark."; + 2, "Suspicious-looking end-of-comment mark."; + 3, "Deprecated feature."; + 4, "Fragile pattern matching: matching that will remain complete even\n\ + \ if additional constructors are added to one of the variant types\n\ + \ matched."; + 5, "Partially applied function: expression whose result has function\n\ + \ type and is ignored."; + 6, "Label omitted in function application."; + 7, "Method overridden."; + 8, "Partial match: missing cases in pattern-matching."; + 9, "Missing fields in a record pattern."; + 10, "Expression on the left-hand side of a sequence that doesn't have \ + type\n\ + \ \"unit\" (and that is not a function, see warning number 5)."; + 11, "Redundant case in a pattern matching (unused match case)."; + 12, "Redundant sub-pattern in a pattern-matching."; + 13, "Instance variable overridden."; + 14, "Illegal backslash escape in a string constant."; + 15, "Private method made public implicitly."; + 16, "Unerasable optional argument."; + 17, "Undeclared virtual method."; + 18, "Non-principal type."; + 19, "Type without principality."; + 20, "Unused function argument."; + 21, "Non-returning statement."; + 22, "Preprocessor warning."; + 23, "Useless record \"with\" clause."; + 24, "Bad module name: the source file name is not a valid OCaml module \ + name."; + 25, "Deprecated: now part of warning 8."; + 26, "Suspicious unused variable: unused variable that is bound\n\ + \ with \"let\" or \"as\", and doesn't start with an underscore (\"_\")\n\ + \ character."; + 27, "Innocuous unused variable: unused variable that is not bound with\n\ + \ \"let\" nor \"as\", and doesn't start with an underscore (\"_\")\n\ + \ character."; + 28, "Wildcard pattern given as argument to a constant constructor."; + 29, "Unescaped end-of-line in a string constant (non-portable code)."; + 30, "Two labels or constructors of the same name are defined in two\n\ + \ mutually recursive types."; + 31, "A module is linked twice in the same executable."; + 32, "Unused value declaration."; + 33, "Unused open statement."; + 34, "Unused type declaration."; + 35, "Unused for-loop index."; + 36, "Unused ancestor variable."; + 37, "Unused constructor."; + 38, "Unused extension constructor."; + 39, "Unused rec flag."; + 40, "Constructor or label name used out of scope."; + 41, "Ambiguous constructor or label name."; + 42, "Disambiguated constructor or label name (compatibility warning)."; + 43, "Nonoptional label applied as optional."; + 44, "Open statement shadows an already defined identifier."; + 45, "Open statement shadows an already defined label or constructor."; + 46, "Error in environment variable."; + 47, "Illegal attribute payload."; + 48, "Implicit elimination of optional arguments."; + 49, "Absent cmi file when looking up module alias."; + 50, "Unexpected documentation comment."; + 51, "Warning on non-tail calls if @tailcall present."; + 52, "Fragile constant pattern."; + 53, "Attribute cannot appear in this context"; + 54, "Attribute used more than once on an expression"; + 55, "Inlining impossible"; + 56, "Unreachable case in a pattern-matching (based on type information)."; + 57, "Ambiguous or-pattern variables under guard"; + 58, "Missing cmx file"; + 59, "Assignment to non-mutable value"; + 60, "Unused module declaration"; + 61, "Unboxable type in primitive declaration"; + 62, "Type constraint on GADT type declaration"; + + + 101, "BuckleScript warning: Unused bs attributes"; + 102, "BuckleScript warning: polymorphic comparison introduced (maybe unsafe)"; + 103, "BuckleScript warning: about fragile FFI definitions" ; + 104, "BuckleScript warning: bs.deriving warning with customized message "; + 105, "BuckleScript warning: the external name is inferred from val name is unsafe from refactoring when changing value name"; + 106, "BuckleScript warning: Unimplemented primitive used:"; + 107, "BuckleScript warning: Integer literal exceeds the range of representable integers of type int"; + 108, "BuckleScript warning: Uninterpreted delimiters (for unicode)" + + ] +;; + +let help_warnings () = + List.iter (fun (i, s) -> Printf.printf "%3i %s\n" i s) descriptions; + print_endline " A all warnings"; + for i = Char.code 'b' to Char.code 'z' do + let c = Char.chr i in + match letter c with + | [] -> () + | [n] -> + Printf.printf " %c Alias for warning %i.\n" (Char.uppercase_ascii c) n + | l -> + Printf.printf " %c warnings %s.\n" + (Char.uppercase_ascii c) + (String.concat ", " (List.map string_of_int l)) + done; + exit 0 +;; + +end +module Location : sig +#1 "location.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Source code locations (ranges of positions), used in parsetree. *) + +open Format + +type t = Warnings.loc = { + loc_start: Lexing.position; + loc_end: Lexing.position; + loc_ghost: bool; +} + +(** Note on the use of Lexing.position in this module. + If [pos_fname = ""], then use [!input_name] instead. + If [pos_lnum = -1], then [pos_bol = 0]. Use [pos_cnum] and + re-parse the file to get the line and character numbers. + Else all fields are correct. +*) + +val none : t +(** An arbitrary value of type [t]; describes an empty ghost range. *) + +val in_file : string -> t +(** Return an empty ghost range located in a given file. *) + +val init : Lexing.lexbuf -> string -> unit +(** Set the file name and line number of the [lexbuf] to be the start + of the named file. *) + +val curr : Lexing.lexbuf -> t +(** Get the location of the current token from the [lexbuf]. *) + +val symbol_rloc: unit -> t +val symbol_gloc: unit -> t + +(** [rhs_loc n] returns the location of the symbol at position [n], starting + at 1, in the current parser rule. *) +val rhs_loc: int -> t + +val input_name: string ref +val set_input_name: string -> unit +val input_lexbuf: Lexing.lexbuf option ref + +val get_pos_info: Lexing.position -> string * int * int (* file, line, char *) +val print_loc: formatter -> t -> unit +val print_error: formatter -> t -> unit +val print_error_cur_file: formatter -> unit -> unit +val print_warning: t -> formatter -> Warnings.t -> unit +val formatter_for_warnings : formatter ref +val prerr_warning: t -> Warnings.t -> unit +val echo_eof: unit -> unit +val reset: unit -> unit + +val default_printer : formatter -> t -> unit +val printer : (formatter -> t -> unit) ref + +val warning_printer : (t -> formatter -> Warnings.t -> unit) ref +(** Hook for intercepting warnings. *) + +val default_warning_printer : t -> formatter -> Warnings.t -> unit +(** Original warning printer for use in hooks. *) + +val highlight_locations: formatter -> t list -> bool + +type 'a loc = { + txt : 'a; + loc : t; +} + +val mknoloc : 'a -> 'a loc +val mkloc : 'a -> t -> 'a loc + +val print: formatter -> t -> unit +val print_compact: formatter -> t -> unit +val print_filename: formatter -> string -> unit + +val absolute_path: string -> string + +val show_filename: string -> string + (** In -absname mode, return the absolute path for this filename. + Otherwise, returns the filename unchanged. *) + + +val absname: bool ref + +(** Support for located errors *) + +type error = + { + loc: t; + msg: string; + sub: error list; + if_highlight: string; (* alternative message if locations are highlighted *) + } + +exception Already_displayed_error +exception Error of error + +val error: ?loc:t -> ?sub:error list -> ?if_highlight:string -> string -> error + + +val print_error_prefix : Format.formatter -> unit +val pp_ksprintf : ?before:(formatter -> unit) -> (string -> 'a) -> ('b, formatter, unit, 'a) format4 -> 'b + + +val errorf: ?loc:t -> ?sub:error list -> ?if_highlight:string + -> ('a, Format.formatter, unit, error) format4 -> 'a + +val raise_errorf: ?loc:t -> ?sub:error list -> ?if_highlight:string + -> ('a, Format.formatter, unit, 'b) format4 -> 'a + +val error_of_printer: t -> (formatter -> 'a -> unit) -> 'a -> error + +val error_of_printer_file: (formatter -> 'a -> unit) -> 'a -> error + +val error_of_exn: exn -> [ `Ok of error | `Already_displayed ] option + +val register_error_of_exn: (exn -> error option) -> unit +(** Each compiler module which defines a custom type of exception + which can surface as a user-visible error should register + a "printer" for this exception using [register_error_of_exn]. + The result of the printer is an [error] value containing + a location, a message, and optionally sub-messages (each of them + being located as well). *) + +val report_error: formatter -> error -> unit + +val error_reporter : (formatter -> error -> unit) ref +(** Hook for intercepting error reports. *) + +val default_error_reporter : formatter -> error -> unit +(** Original error reporter for use in hooks. *) + +val report_exception: formatter -> exn -> unit +(** Reraise the exception if it is unknown. *) + +val deprecated: ?def:t -> ?use:t -> t -> string -> unit + +end = struct +#1 "location.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Lexing + +let absname = ref false + (* This reference should be in Clflags, but it would create an additional + dependency and make bootstrapping Camlp4 more difficult. *) + +type t = Warnings.loc = { loc_start: position; loc_end: position; loc_ghost: bool };; + +let in_file name = + let loc = { + pos_fname = name; + pos_lnum = 1; + pos_bol = 0; + pos_cnum = -1; + } in + { loc_start = loc; loc_end = loc; loc_ghost = true } +;; + +let none = in_file "_none_";; + +let curr lexbuf = { + loc_start = lexbuf.lex_start_p; + loc_end = lexbuf.lex_curr_p; + loc_ghost = false +};; + +let init lexbuf fname = + lexbuf.lex_curr_p <- { + pos_fname = fname; + pos_lnum = 1; + pos_bol = 0; + pos_cnum = 0; + } +;; + +let symbol_rloc () = { + loc_start = Parsing.symbol_start_pos (); + loc_end = Parsing.symbol_end_pos (); + loc_ghost = false; +};; + +let symbol_gloc () = { + loc_start = Parsing.symbol_start_pos (); + loc_end = Parsing.symbol_end_pos (); + loc_ghost = true; +};; + +let rhs_loc n = { + loc_start = Parsing.rhs_start_pos n; + loc_end = Parsing.rhs_end_pos n; + loc_ghost = false; +};; + +let input_name = ref "_none_" +let input_lexbuf = ref (None : lexbuf option) +let set_input_name name = + if name <> "" then input_name := name +(* Terminal info *) + +let status = ref Terminfo.Uninitialised + +let num_loc_lines = ref 0 (* number of lines already printed after input *) + +let print_updating_num_loc_lines ppf f arg = + let open Format in + let out_functions = pp_get_formatter_out_functions ppf () in + let out_string str start len = + let rec count i c = + if i = start + len then c + else if String.get str i = '\n' then count (succ i) (succ c) + else count (succ i) c in + num_loc_lines := !num_loc_lines + count start 0 ; + out_functions.out_string str start len in + pp_set_formatter_out_functions ppf + { out_functions with out_string } ; + f ppf arg ; + pp_print_flush ppf (); + pp_set_formatter_out_functions ppf out_functions + +(* Highlight the locations using standout mode. *) + +let highlight_terminfo ppf num_lines lb locs = + Format.pp_print_flush ppf (); (* avoid mixing Format and normal output *) + (* Char 0 is at offset -lb.lex_abs_pos in lb.lex_buffer. *) + let pos0 = -lb.lex_abs_pos in + (* Do nothing if the buffer does not contain the whole phrase. *) + if pos0 < 0 then raise Exit; + (* Count number of lines in phrase *) + let lines = ref !num_loc_lines in + for i = pos0 to lb.lex_buffer_len - 1 do + if Bytes.get lb.lex_buffer i = '\n' then incr lines + done; + (* If too many lines, give up *) + if !lines >= num_lines - 2 then raise Exit; + (* Move cursor up that number of lines *) + flush stdout; Terminfo.backup !lines; + (* Print the input, switching to standout for the location *) + let bol = ref false in + print_string "# "; + for pos = 0 to lb.lex_buffer_len - pos0 - 1 do + if !bol then (print_string " "; bol := false); + if List.exists (fun loc -> pos = loc.loc_start.pos_cnum) locs then + Terminfo.standout true; + if List.exists (fun loc -> pos = loc.loc_end.pos_cnum) locs then + Terminfo.standout false; + let c = Bytes.get lb.lex_buffer (pos + pos0) in + print_char c; + bol := (c = '\n') + done; + (* Make sure standout mode is over *) + Terminfo.standout false; + (* Position cursor back to original location *) + Terminfo.resume !num_loc_lines; + flush stdout + +(* Highlight the location by printing it again. *) + +let highlight_dumb ppf lb loc = + (* Char 0 is at offset -lb.lex_abs_pos in lb.lex_buffer. *) + let pos0 = -lb.lex_abs_pos in + (* Do nothing if the buffer does not contain the whole phrase. *) + if pos0 < 0 then raise Exit; + let end_pos = lb.lex_buffer_len - pos0 - 1 in + (* Determine line numbers for the start and end points *) + let line_start = ref 0 and line_end = ref 0 in + for pos = 0 to end_pos do + if Bytes.get lb.lex_buffer (pos + pos0) = '\n' then begin + if loc.loc_start.pos_cnum > pos then incr line_start; + if loc.loc_end.pos_cnum > pos then incr line_end; + end + done; + (* Print character location (useful for Emacs) *) + Format.fprintf ppf "@[Characters %i-%i:@," + loc.loc_start.pos_cnum loc.loc_end.pos_cnum; + (* Print the input, underlining the location *) + Format.pp_print_string ppf " "; + let line = ref 0 in + let pos_at_bol = ref 0 in + for pos = 0 to end_pos do + match Bytes.get lb.lex_buffer (pos + pos0) with + | '\n' -> + if !line = !line_start && !line = !line_end then begin + (* loc is on one line: underline location *) + Format.fprintf ppf "@, "; + for _i = !pos_at_bol to loc.loc_start.pos_cnum - 1 do + Format.pp_print_char ppf ' ' + done; + for _i = loc.loc_start.pos_cnum to loc.loc_end.pos_cnum - 1 do + Format.pp_print_char ppf '^' + done + end; + if !line >= !line_start && !line <= !line_end then begin + Format.fprintf ppf "@,"; + if pos < loc.loc_end.pos_cnum then Format.pp_print_string ppf " " + end; + incr line; + pos_at_bol := pos + 1 + | '\r' -> () (* discard *) + | c -> + if !line = !line_start && !line = !line_end then + (* loc is on one line: print whole line *) + Format.pp_print_char ppf c + else if !line = !line_start then + (* first line of multiline loc: + print a dot for each char before loc_start *) + if pos < loc.loc_start.pos_cnum then + Format.pp_print_char ppf '.' + else + Format.pp_print_char ppf c + else if !line = !line_end then + (* last line of multiline loc: print a dot for each char + after loc_end, even whitespaces *) + if pos < loc.loc_end.pos_cnum then + Format.pp_print_char ppf c + else + Format.pp_print_char ppf '.' + else if !line > !line_start && !line < !line_end then + (* intermediate line of multiline loc: print whole line *) + Format.pp_print_char ppf c + done; + Format.fprintf ppf "@]" + +(* Highlight the location using one of the supported modes. *) + +let rec highlight_locations ppf locs = + match !status with + Terminfo.Uninitialised -> + status := Terminfo.setup stdout; highlight_locations ppf locs + | Terminfo.Bad_term -> + begin match !input_lexbuf with + None -> false + | Some lb -> + let norepeat = + try Sys.getenv "TERM" = "norepeat" with Not_found -> false in + if norepeat then false else + let loc1 = List.hd locs in + try highlight_dumb ppf lb loc1; true + with Exit -> false + end + | Terminfo.Good_term num_lines -> + begin match !input_lexbuf with + None -> false + | Some lb -> + try highlight_terminfo ppf num_lines lb locs; true + with Exit -> false + end + +(* Print the location in some way or another *) + +open Format + +let absolute_path s = (* This function could go into Filename *) + let open Filename in + let s = if is_relative s then concat (Sys.getcwd ()) s else s in + (* Now simplify . and .. components *) + let rec aux s = + let base = basename s in + let dir = dirname s in + if dir = s then dir + else if base = current_dir_name then aux dir + else if base = parent_dir_name then dirname (aux dir) + else concat (aux dir) base + in + aux s + +let show_filename file = + if !absname then absolute_path file else file + +let print_filename ppf file = + Format.fprintf ppf "%s" (show_filename file) + +let reset () = + num_loc_lines := 0 + +let (msg_file, msg_line, msg_chars, msg_to, msg_colon) = + ("File \"", "\", line ", ", characters ", "-", ":") + +(* return file, line, char from the given position *) +let get_pos_info pos = + (pos.pos_fname, pos.pos_lnum, pos.pos_cnum - pos.pos_bol) +;; + +let setup_colors () = + Misc.Color.setup !Clflags.color + +let print_loc ppf loc = + setup_colors (); + let (file, line, startchar) = get_pos_info loc.loc_start in + + let startchar = + if Clflags.bs_vscode then startchar + 1 else startchar in + + let endchar = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum + startchar in + if file = "//toplevel//" then begin + if highlight_locations ppf [loc] then () else + fprintf ppf "Characters %i-%i" + loc.loc_start.pos_cnum loc.loc_end.pos_cnum + end else begin + fprintf ppf "%s@{%a%s%i" msg_file print_filename file msg_line line; + if startchar >= 0 then + fprintf ppf "%s%i%s%i" msg_chars startchar msg_to endchar; + fprintf ppf "@}" + end +;; + +let default_printer ppf loc = + setup_colors (); + if loc.loc_start.pos_fname = "//toplevel//" + && highlight_locations ppf [loc] then () + else fprintf ppf "@{%a@}%s@," print_loc loc msg_colon +;; + +let printer = ref default_printer +let print ppf loc = !printer ppf loc + +let error_prefix = "Error" +let warning_prefix = "Warning" + +let print_error_prefix ppf = + setup_colors (); + fprintf ppf "@{%s@}" error_prefix; +;; + +let print_compact ppf loc = + if loc.loc_start.pos_fname = "//toplevel//" + && highlight_locations ppf [loc] then () + else begin + let (file, line, startchar) = get_pos_info loc.loc_start in + let endchar = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum + startchar in + fprintf ppf "%a:%i" print_filename file line; + if startchar >= 0 then fprintf ppf ",%i--%i" startchar endchar + end +;; + +let print_error ppf loc = + fprintf ppf "%a%t:" print loc print_error_prefix; +;; + +let print_error_cur_file ppf () = print_error ppf (in_file !input_name);; + +let default_warning_printer loc ppf w = + match Warnings.report w with + | `Inactive -> () + | `Active { Warnings. number; message; is_error; sub_locs } -> + setup_colors (); + fprintf ppf "@["; + print ppf loc; + if is_error + then + fprintf ppf "%t (%s %d): %s@," print_error_prefix + (String.uncapitalize_ascii warning_prefix) number message + else fprintf ppf "@{%s@} %d: %s@," warning_prefix number message; + List.iter + (fun (loc, msg) -> + if loc <> none then fprintf ppf " %a %s@," print loc msg + ) + sub_locs; + fprintf ppf "@]" +;; + +let warning_printer = ref default_warning_printer ;; + +let print_warning loc ppf w = + print_updating_num_loc_lines ppf (!warning_printer loc) w +;; + +let formatter_for_warnings = ref err_formatter;; +let prerr_warning loc w = + print_warning loc !formatter_for_warnings w;; + +let echo_eof () = + print_newline (); + incr num_loc_lines + +type 'a loc = { + txt : 'a; + loc : t; +} + +let mkloc txt loc = { txt ; loc } +let mknoloc txt = mkloc txt none + + +type error = + { + loc: t; + msg: string; + sub: error list; + if_highlight: string; (* alternative message if locations are highlighted *) + } + +let pp_ksprintf ?before k fmt = + let buf = Buffer.create 64 in + let ppf = Format.formatter_of_buffer buf in + Misc.Color.set_color_tag_handling ppf; + begin match before with + | None -> () + | Some f -> f ppf + end; + kfprintf + (fun _ -> + pp_print_flush ppf (); + let msg = Buffer.contents buf in + k msg) + ppf fmt + +(* Shift the formatter's offset by the length of the error prefix, which + is always added by the compiler after the message has been formatted *) +let print_phanton_error_prefix ppf = + Format.pp_print_as ppf (String.length error_prefix + 2 (* ": " *)) "" + +let errorf ?(loc = none) ?(sub = []) ?(if_highlight = "") fmt = + pp_ksprintf + ~before:print_phanton_error_prefix + (fun msg -> {loc; msg; sub; if_highlight}) + fmt + +let error ?(loc = none) ?(sub = []) ?(if_highlight = "") msg = + {loc; msg; sub; if_highlight} + +let error_of_exn : (exn -> error option) list ref = ref [] + +let register_error_of_exn f = error_of_exn := f :: !error_of_exn + +exception Already_displayed_error = Warnings.Errors + +let error_of_exn exn = + match exn with + | Already_displayed_error -> Some `Already_displayed + | _ -> + let rec loop = function + | [] -> None + | f :: rest -> + match f exn with + | Some error -> Some (`Ok error) + | None -> loop rest + in + loop !error_of_exn + +let rec default_error_reporter ppf ({loc; msg; sub; if_highlight} as err) = + let highlighted = + if if_highlight <> "" && loc.loc_start.pos_fname = "//toplevel//" then + let rec collect_locs locs {loc; sub; _} = + List.fold_left collect_locs (loc :: locs) sub + in + let locs = collect_locs [] err in + highlight_locations ppf locs + else + false + in + if highlighted then + Format.pp_print_string ppf if_highlight + else begin + fprintf ppf "@[%a %s" print_error loc msg; + List.iter (Format.fprintf ppf "@,@[<2>%a@]" default_error_reporter) sub; + fprintf ppf "@]" + end + +let error_reporter = ref default_error_reporter + +let report_error ppf err = + print_updating_num_loc_lines ppf !error_reporter err +;; + +let error_of_printer loc print x = + errorf ~loc "%a@?" print x + +let error_of_printer_file print x = + error_of_printer (in_file !input_name) print x + +let () = + register_error_of_exn + (function + | Sys_error msg -> + Some (errorf ~loc:(in_file !input_name) + "I/O error: %s" msg) + + | Misc.HookExnWrapper {error = e; hook_name; + hook_info={Misc.sourcefile}} -> + let sub = match error_of_exn e with + | None | Some `Already_displayed -> error (Printexc.to_string e) + | Some (`Ok err) -> err + in + Some + (errorf ~loc:(in_file sourcefile) + "In hook %S:" hook_name + ~sub:[sub]) + | _ -> None + ) + +external reraise : exn -> 'a = "%reraise" + +let rec report_exception_rec n ppf exn = + try + match error_of_exn exn with + | None -> reraise exn + | Some `Already_displayed -> () + | Some (`Ok err) -> fprintf ppf "@[%a@]@." report_error err + with exn when n > 0 -> report_exception_rec (n-1) ppf exn + +let report_exception ppf exn = report_exception_rec 5 ppf exn + + +exception Error of error + +let () = + register_error_of_exn + (function + | Error e -> Some e + | _ -> None + ) + +let raise_errorf ?(loc = none) ?(sub = []) ?(if_highlight = "") = + pp_ksprintf + ~before:print_phanton_error_prefix + (fun msg -> raise (Error ({loc; msg; sub; if_highlight}))) + +let deprecated ?(def = none) ?(use = none) loc msg = + prerr_warning loc (Warnings.Deprecated (msg, def, use)) + +end +module Bs_exception : sig +#1 "bs_exception.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + +type error = + | Cmj_not_found of string + | Js_not_found of string + | Bs_cyclic_depends of string list + | Bs_duplicated_module of string * string + | Bs_duplicate_exports of string (* gpr_974 *) + | Bs_package_not_found of string + | Bs_main_not_exist of string + | Bs_invalid_path of string + | Missing_ml_dependency of string + | Dependency_script_module_dependent_not of string +(* +TODO: In the futrue, we should refine dependency [bsb] +should not rely on such exception, it should have its own exception handling +*) + +(* exception Error of error *) + +(* val report_error : Format.formatter -> error -> unit *) + +val error : error -> 'a + +end = struct +#1 "bs_exception.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + +type error = + | Cmj_not_found of string + | Js_not_found of string + | Bs_cyclic_depends of string list + | Bs_duplicated_module of string * string + | Bs_duplicate_exports of string (* gpr_974 *) + | Bs_package_not_found of string + | Bs_main_not_exist of string + | Bs_invalid_path of string + | Missing_ml_dependency of string + | Dependency_script_module_dependent_not of string + (** TODO: we need add location handling *) +exception Error of error + +let error err = raise (Error err) + +let report_error ppf = function + | Dependency_script_module_dependent_not s + -> + Format.fprintf ppf + "%s is compiled in script mode while its dependent is not" + s + | Missing_ml_dependency s -> + Format.fprintf ppf "Missing dependency %s in search path" s + | Cmj_not_found s -> + Format.fprintf ppf "%s not found, it means either the module does not exist or it is a namespace" s + | Js_not_found s -> + Format.fprintf ppf "%s not found, needed in script mode " s + | Bs_cyclic_depends str + -> + Format.fprintf ppf "Cyclic depends : @[%a@]" + (Format.pp_print_list ~pp_sep:Format.pp_print_space + Format.pp_print_string) + str + | Bs_duplicate_exports str -> + Format.fprintf ppf "%s are exported as twice" str + | Bs_duplicated_module (a,b) + -> + Format.fprintf ppf "The build system does not support two files with same names yet %s, %s" a b + | Bs_main_not_exist main + -> + Format.fprintf ppf "File %s not found " main + + | Bs_package_not_found package + -> + Format.fprintf ppf "Package %s not found or %s/lib/ocaml does not exist or please set npm_config_prefix correctly" + package package + | Bs_invalid_path path + -> Format.pp_print_string ppf ("Invalid path: " ^ path ) + + +let () = + Location.register_error_of_exn + (function + | Error err + -> Some (Location.error_of_printer_file report_error err) + | _ -> None + ) + +end +(** Interface as module *) +module Asttypes += struct +#1 "asttypes.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Auxiliary AST types used by parsetree and typedtree. *) + +type constant = + Const_int of int + | Const_char of char + | Const_string of string * string option + | Const_float of string + | Const_int32 of int32 + | Const_int64 of int64 + | Const_nativeint of nativeint + +type rec_flag = Nonrecursive | Recursive + +type direction_flag = Upto | Downto + +(* Order matters, used in polymorphic comparison *) +type private_flag = Private | Public + +type mutable_flag = Immutable | Mutable + +type virtual_flag = Virtual | Concrete + +type override_flag = Override | Fresh + +type closed_flag = Closed | Open + +type label = string + +type arg_label = + Nolabel + | Labelled of string (* label:T -> ... *) + | Optional of string (* ?label:T -> ... *) + +type 'a loc = 'a Location.loc = { + txt : 'a; + loc : Location.t; +} + + +type variance = + | Covariant + | Contravariant + | Invariant + +end +module Longident : sig +#1 "longident.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Long identifiers, used in parsetree. *) + +type t = + Lident of string + | Ldot of t * string + | Lapply of t * t + +val flatten: t -> string list +val unflatten: string list -> t option +val last: t -> string +val parse: string -> t + +end = struct +#1 "longident.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +type t = + Lident of string + | Ldot of t * string + | Lapply of t * t + +let rec flat accu = function + Lident s -> s :: accu + | Ldot(lid, s) -> flat (s :: accu) lid + | Lapply(_, _) -> Misc.fatal_error "Longident.flat" + +let flatten lid = flat [] lid + +let last = function + Lident s -> s + | Ldot(_, s) -> s + | Lapply(_, _) -> Misc.fatal_error "Longident.last" + +let rec split_at_dots s pos = + try + let dot = String.index_from s pos '.' in + String.sub s pos (dot - pos) :: split_at_dots s (dot + 1) + with Not_found -> + [String.sub s pos (String.length s - pos)] + +let unflatten l = + match l with + | [] -> None + | hd :: tl -> Some (List.fold_left (fun p s -> Ldot(p, s)) (Lident hd) tl) + +let parse s = + match unflatten (split_at_dots s 0) with + | None -> Lident "" (* should not happen, but don't put assert false + so as not to crash the toplevel (see Genprintval) *) + | Some v -> v + +end +(** Interface as module *) +module Parsetree += struct +#1 "parsetree.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Abstract syntax tree produced by parsing *) + +open Asttypes + +type constant = + Pconst_integer of string * char option + (* 3 3l 3L 3n + + Suffixes [g-z][G-Z] are accepted by the parser. + Suffixes except 'l', 'L' and 'n' are rejected by the typechecker + *) + | Pconst_char of char + (* 'c' *) + | Pconst_string of string * string option + (* "constant" + {delim|other constant|delim} + *) + | Pconst_float of string * char option + (* 3.4 2e5 1.4e-4 + + Suffixes [g-z][G-Z] are accepted by the parser. + Suffixes are rejected by the typechecker. + *) + +(** {1 Extension points} *) + +type attribute = string loc * payload + (* [@id ARG] + [@@id ARG] + + Metadata containers passed around within the AST. + The compiler ignores unknown attributes. + *) + +and extension = string loc * payload + (* [%id ARG] + [%%id ARG] + + Sub-language placeholder -- rejected by the typechecker. + *) + +and attributes = attribute list + +and payload = + | PStr of structure + | PSig of signature (* : SIG *) + | PTyp of core_type (* : T *) + | PPat of pattern * expression option (* ? P or ? P when E *) + +(** {1 Core language} *) + +(* Type expressions *) + +and core_type = + { + ptyp_desc: core_type_desc; + ptyp_loc: Location.t; + ptyp_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and core_type_desc = + | Ptyp_any + (* _ *) + | Ptyp_var of string + (* 'a *) + | Ptyp_arrow of arg_label * core_type * core_type + (* T1 -> T2 Simple + ~l:T1 -> T2 Labelled + ?l:T1 -> T2 Optional + *) + | Ptyp_tuple of core_type list + (* T1 * ... * Tn + + Invariant: n >= 2 + *) + | Ptyp_constr of Longident.t loc * core_type list + (* tconstr + T tconstr + (T1, ..., Tn) tconstr + *) + | Ptyp_object of object_field list * closed_flag + (* < l1:T1; ...; ln:Tn > (flag = Closed) + < l1:T1; ...; ln:Tn; .. > (flag = Open) + *) + | Ptyp_class of Longident.t loc * core_type list + (* #tconstr + T #tconstr + (T1, ..., Tn) #tconstr + *) + | Ptyp_alias of core_type * string + (* T as 'a *) + | Ptyp_variant of row_field list * closed_flag * label list option + (* [ `A|`B ] (flag = Closed; labels = None) + [> `A|`B ] (flag = Open; labels = None) + [< `A|`B ] (flag = Closed; labels = Some []) + [< `A|`B > `X `Y ](flag = Closed; labels = Some ["X";"Y"]) + *) + | Ptyp_poly of string loc list * core_type + (* 'a1 ... 'an. T + + Can only appear in the following context: + + - As the core_type of a Ppat_constraint node corresponding + to a constraint on a let-binding: let x : 'a1 ... 'an. T + = e ... + + - Under Cfk_virtual for methods (not values). + + - As the core_type of a Pctf_method node. + + - As the core_type of a Pexp_poly node. + + - As the pld_type field of a label_declaration. + + - As a core_type of a Ptyp_object node. + *) + + | Ptyp_package of package_type + (* (module S) *) + | Ptyp_extension of extension + (* [%id] *) + +and package_type = Longident.t loc * (Longident.t loc * core_type) list + (* + (module S) + (module S with type t1 = T1 and ... and tn = Tn) + *) + +and row_field = + | Rtag of label loc * attributes * bool * core_type list + (* [`A] ( true, [] ) + [`A of T] ( false, [T] ) + [`A of T1 & .. & Tn] ( false, [T1;...Tn] ) + [`A of & T1 & .. & Tn] ( true, [T1;...Tn] ) + + - The 2nd field is true if the tag contains a + constant (empty) constructor. + - '&' occurs when several types are used for the same constructor + (see 4.2 in the manual) + + - TODO: switch to a record representation, and keep location + *) + | Rinherit of core_type + (* [ T ] *) + +and object_field = + | Otag of label loc * attributes * core_type + | Oinherit of core_type + +(* Patterns *) + +and pattern = + { + ppat_desc: pattern_desc; + ppat_loc: Location.t; + ppat_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and pattern_desc = + | Ppat_any + (* _ *) + | Ppat_var of string loc + (* x *) + | Ppat_alias of pattern * string loc + (* P as 'a *) + | Ppat_constant of constant + (* 1, 'a', "true", 1.0, 1l, 1L, 1n *) + | Ppat_interval of constant * constant + (* 'a'..'z' + + Other forms of interval are recognized by the parser + but rejected by the type-checker. *) + | Ppat_tuple of pattern list + (* (P1, ..., Pn) + + Invariant: n >= 2 + *) + | Ppat_construct of Longident.t loc * pattern option + (* C None + C P Some P + C (P1, ..., Pn) Some (Ppat_tuple [P1; ...; Pn]) + *) + | Ppat_variant of label * pattern option + (* `A (None) + `A P (Some P) + *) + | Ppat_record of (Longident.t loc * pattern) list * closed_flag + (* { l1=P1; ...; ln=Pn } (flag = Closed) + { l1=P1; ...; ln=Pn; _} (flag = Open) + + Invariant: n > 0 + *) + | Ppat_array of pattern list + (* [| P1; ...; Pn |] *) + | Ppat_or of pattern * pattern + (* P1 | P2 *) + | Ppat_constraint of pattern * core_type + (* (P : T) *) + | Ppat_type of Longident.t loc + (* #tconst *) + | Ppat_lazy of pattern + (* lazy P *) + | Ppat_unpack of string loc + (* (module P) + Note: (module P : S) is represented as + Ppat_constraint(Ppat_unpack, Ptyp_package) + *) + | Ppat_exception of pattern + (* exception P *) + | Ppat_extension of extension + (* [%id] *) + | Ppat_open of Longident.t loc * pattern + (* M.(P) *) + +(* Value expressions *) + +and expression = + { + pexp_desc: expression_desc; + pexp_loc: Location.t; + pexp_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and expression_desc = + | Pexp_ident of Longident.t loc + (* x + M.x + *) + | Pexp_constant of constant + (* 1, 'a', "true", 1.0, 1l, 1L, 1n *) + | Pexp_let of rec_flag * value_binding list * expression + (* let P1 = E1 and ... and Pn = EN in E (flag = Nonrecursive) + let rec P1 = E1 and ... and Pn = EN in E (flag = Recursive) + *) + | Pexp_function of case list + (* function P1 -> E1 | ... | Pn -> En *) + | Pexp_fun of arg_label * expression option * pattern * expression + (* fun P -> E1 (Simple, None) + fun ~l:P -> E1 (Labelled l, None) + fun ?l:P -> E1 (Optional l, None) + fun ?l:(P = E0) -> E1 (Optional l, Some E0) + + Notes: + - If E0 is provided, only Optional is allowed. + - "fun P1 P2 .. Pn -> E1" is represented as nested Pexp_fun. + - "let f P = E" is represented using Pexp_fun. + *) + | Pexp_apply of expression * (arg_label * expression) list + (* E0 ~l1:E1 ... ~ln:En + li can be empty (non labeled argument) or start with '?' + (optional argument). + + Invariant: n > 0 + *) + | Pexp_match of expression * case list + (* match E0 with P1 -> E1 | ... | Pn -> En *) + | Pexp_try of expression * case list + (* try E0 with P1 -> E1 | ... | Pn -> En *) + | Pexp_tuple of expression list + (* (E1, ..., En) + + Invariant: n >= 2 + *) + | Pexp_construct of Longident.t loc * expression option + (* C None + C E Some E + C (E1, ..., En) Some (Pexp_tuple[E1;...;En]) + *) + | Pexp_variant of label * expression option + (* `A (None) + `A E (Some E) + *) + | Pexp_record of (Longident.t loc * expression) list * expression option + (* { l1=P1; ...; ln=Pn } (None) + { E0 with l1=P1; ...; ln=Pn } (Some E0) + + Invariant: n > 0 + *) + | Pexp_field of expression * Longident.t loc + (* E.l *) + | Pexp_setfield of expression * Longident.t loc * expression + (* E1.l <- E2 *) + | Pexp_array of expression list + (* [| E1; ...; En |] *) + | Pexp_ifthenelse of expression * expression * expression option + (* if E1 then E2 else E3 *) + | Pexp_sequence of expression * expression + (* E1; E2 *) + | Pexp_while of expression * expression + (* while E1 do E2 done *) + | Pexp_for of + pattern * expression * expression * direction_flag * expression + (* for i = E1 to E2 do E3 done (flag = Upto) + for i = E1 downto E2 do E3 done (flag = Downto) + *) + | Pexp_constraint of expression * core_type + (* (E : T) *) + | Pexp_coerce of expression * core_type option * core_type + (* (E :> T) (None, T) + (E : T0 :> T) (Some T0, T) + *) + | Pexp_send of expression * label loc + (* E # m *) + | Pexp_new of Longident.t loc + (* new M.c *) + | Pexp_setinstvar of label loc * expression + (* x <- 2 *) + | Pexp_override of (label loc * expression) list + (* {< x1 = E1; ...; Xn = En >} *) + | Pexp_letmodule of string loc * module_expr * expression + (* let module M = ME in E *) + | Pexp_letexception of extension_constructor * expression + (* let exception C in E *) + | Pexp_assert of expression + (* assert E + Note: "assert false" is treated in a special way by the + type-checker. *) + | Pexp_lazy of expression + (* lazy E *) + | Pexp_poly of expression * core_type option + (* Used for method bodies. + + Can only be used as the expression under Cfk_concrete + for methods (not values). *) + | Pexp_object of class_structure + (* object ... end *) + | Pexp_newtype of string loc * expression + (* fun (type t) -> E *) + | Pexp_pack of module_expr + (* (module ME) + + (module ME : S) is represented as + Pexp_constraint(Pexp_pack, Ptyp_package S) *) + | Pexp_open of override_flag * Longident.t loc * expression + (* M.(E) + let open M in E + let! open M in E *) + | Pexp_extension of extension + (* [%id] *) + | Pexp_unreachable + (* . *) + +and case = (* (P -> E) or (P when E0 -> E) *) + { + pc_lhs: pattern; + pc_guard: expression option; + pc_rhs: expression; + } + +(* Value descriptions *) + +and value_description = + { + pval_name: string loc; + pval_type: core_type; + pval_prim: string list; + pval_attributes: attributes; (* ... [@@id1] [@@id2] *) + pval_loc: Location.t; + } + +(* + val x: T (prim = []) + external x: T = "s1" ... "sn" (prim = ["s1";..."sn"]) +*) + +(* Type declarations *) + +and type_declaration = + { + ptype_name: string loc; + ptype_params: (core_type * variance) list; + (* ('a1,...'an) t; None represents _*) + ptype_cstrs: (core_type * core_type * Location.t) list; + (* ... constraint T1=T1' ... constraint Tn=Tn' *) + ptype_kind: type_kind; + ptype_private: private_flag; (* = private ... *) + ptype_manifest: core_type option; (* = T *) + ptype_attributes: attributes; (* ... [@@id1] [@@id2] *) + ptype_loc: Location.t; + } + +(* + type t (abstract, no manifest) + type t = T0 (abstract, manifest=T0) + type t = C of T | ... (variant, no manifest) + type t = T0 = C of T | ... (variant, manifest=T0) + type t = {l: T; ...} (record, no manifest) + type t = T0 = {l : T; ...} (record, manifest=T0) + type t = .. (open, no manifest) +*) + +and type_kind = + | Ptype_abstract + | Ptype_variant of constructor_declaration list + (* Invariant: non-empty list *) + | Ptype_record of label_declaration list + (* Invariant: non-empty list *) + | Ptype_open + +and label_declaration = + { + pld_name: string loc; + pld_mutable: mutable_flag; + pld_type: core_type; + pld_loc: Location.t; + pld_attributes: attributes; (* l : T [@id1] [@id2] *) + } + +(* { ...; l: T; ... } (mutable=Immutable) + { ...; mutable l: T; ... } (mutable=Mutable) + + Note: T can be a Ptyp_poly. +*) + +and constructor_declaration = + { + pcd_name: string loc; + pcd_args: constructor_arguments; + pcd_res: core_type option; + pcd_loc: Location.t; + pcd_attributes: attributes; (* C of ... [@id1] [@id2] *) + } + +and constructor_arguments = + | Pcstr_tuple of core_type list + | Pcstr_record of label_declaration list + +(* + | C of T1 * ... * Tn (res = None, args = Pcstr_tuple []) + | C: T0 (res = Some T0, args = []) + | C: T1 * ... * Tn -> T0 (res = Some T0, args = Pcstr_tuple) + | C of {...} (res = None, args = Pcstr_record) + | C: {...} -> T0 (res = Some T0, args = Pcstr_record) + | C of {...} as t (res = None, args = Pcstr_record) +*) + +and type_extension = + { + ptyext_path: Longident.t loc; + ptyext_params: (core_type * variance) list; + ptyext_constructors: extension_constructor list; + ptyext_private: private_flag; + ptyext_attributes: attributes; (* ... [@@id1] [@@id2] *) + } +(* + type t += ... +*) + +and extension_constructor = + { + pext_name: string loc; + pext_kind : extension_constructor_kind; + pext_loc : Location.t; + pext_attributes: attributes; (* C of ... [@id1] [@id2] *) + } + +and extension_constructor_kind = + Pext_decl of constructor_arguments * core_type option + (* + | C of T1 * ... * Tn ([T1; ...; Tn], None) + | C: T0 ([], Some T0) + | C: T1 * ... * Tn -> T0 ([T1; ...; Tn], Some T0) + *) + | Pext_rebind of Longident.t loc + (* + | C = D + *) + +(** {1 Class language} *) + +(* Type expressions for the class language *) + +and class_type = + { + pcty_desc: class_type_desc; + pcty_loc: Location.t; + pcty_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and class_type_desc = + | Pcty_constr of Longident.t loc * core_type list + (* c + ['a1, ..., 'an] c *) + | Pcty_signature of class_signature + (* object ... end *) + | Pcty_arrow of arg_label * core_type * class_type + (* T -> CT Simple + ~l:T -> CT Labelled l + ?l:T -> CT Optional l + *) + | Pcty_extension of extension + (* [%id] *) + | Pcty_open of override_flag * Longident.t loc * class_type + (* let open M in CT *) + +and class_signature = + { + pcsig_self: core_type; + pcsig_fields: class_type_field list; + } +(* object('selfpat) ... end + object ... end (self = Ptyp_any) + *) + +and class_type_field = + { + pctf_desc: class_type_field_desc; + pctf_loc: Location.t; + pctf_attributes: attributes; (* ... [@@id1] [@@id2] *) + } + +and class_type_field_desc = + | Pctf_inherit of class_type + (* inherit CT *) + | Pctf_val of (label loc * mutable_flag * virtual_flag * core_type) + (* val x: T *) + | Pctf_method of (label loc * private_flag * virtual_flag * core_type) + (* method x: T + + Note: T can be a Ptyp_poly. + *) + | Pctf_constraint of (core_type * core_type) + (* constraint T1 = T2 *) + | Pctf_attribute of attribute + (* [@@@id] *) + | Pctf_extension of extension + (* [%%id] *) + +and 'a class_infos = + { + pci_virt: virtual_flag; + pci_params: (core_type * variance) list; + pci_name: string loc; + pci_expr: 'a; + pci_loc: Location.t; + pci_attributes: attributes; (* ... [@@id1] [@@id2] *) + } +(* class c = ... + class ['a1,...,'an] c = ... + class virtual c = ... + + Also used for "class type" declaration. +*) + +and class_description = class_type class_infos + +and class_type_declaration = class_type class_infos + +(* Value expressions for the class language *) + +and class_expr = + { + pcl_desc: class_expr_desc; + pcl_loc: Location.t; + pcl_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and class_expr_desc = + | Pcl_constr of Longident.t loc * core_type list + (* c + ['a1, ..., 'an] c *) + | Pcl_structure of class_structure + (* object ... end *) + | Pcl_fun of arg_label * expression option * pattern * class_expr + (* fun P -> CE (Simple, None) + fun ~l:P -> CE (Labelled l, None) + fun ?l:P -> CE (Optional l, None) + fun ?l:(P = E0) -> CE (Optional l, Some E0) + *) + | Pcl_apply of class_expr * (arg_label * expression) list + (* CE ~l1:E1 ... ~ln:En + li can be empty (non labeled argument) or start with '?' + (optional argument). + + Invariant: n > 0 + *) + | Pcl_let of rec_flag * value_binding list * class_expr + (* let P1 = E1 and ... and Pn = EN in CE (flag = Nonrecursive) + let rec P1 = E1 and ... and Pn = EN in CE (flag = Recursive) + *) + | Pcl_constraint of class_expr * class_type + (* (CE : CT) *) + | Pcl_extension of extension + (* [%id] *) + | Pcl_open of override_flag * Longident.t loc * class_expr + (* let open M in CE *) + + +and class_structure = + { + pcstr_self: pattern; + pcstr_fields: class_field list; + } +(* object(selfpat) ... end + object ... end (self = Ppat_any) + *) + +and class_field = + { + pcf_desc: class_field_desc; + pcf_loc: Location.t; + pcf_attributes: attributes; (* ... [@@id1] [@@id2] *) + } + +and class_field_desc = + | Pcf_inherit of override_flag * class_expr * string loc option + (* inherit CE + inherit CE as x + inherit! CE + inherit! CE as x + *) + | Pcf_val of (label loc * mutable_flag * class_field_kind) + (* val x = E + val virtual x: T + *) + | Pcf_method of (label loc * private_flag * class_field_kind) + (* method x = E (E can be a Pexp_poly) + method virtual x: T (T can be a Ptyp_poly) + *) + | Pcf_constraint of (core_type * core_type) + (* constraint T1 = T2 *) + | Pcf_initializer of expression + (* initializer E *) + | Pcf_attribute of attribute + (* [@@@id] *) + | Pcf_extension of extension + (* [%%id] *) + +and class_field_kind = + | Cfk_virtual of core_type + | Cfk_concrete of override_flag * expression + +and class_declaration = class_expr class_infos + +(** {1 Module language} *) + +(* Type expressions for the module language *) + +and module_type = + { + pmty_desc: module_type_desc; + pmty_loc: Location.t; + pmty_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and module_type_desc = + | Pmty_ident of Longident.t loc + (* S *) + | Pmty_signature of signature + (* sig ... end *) + | Pmty_functor of string loc * module_type option * module_type + (* functor(X : MT1) -> MT2 *) + | Pmty_with of module_type * with_constraint list + (* MT with ... *) + | Pmty_typeof of module_expr + (* module type of ME *) + | Pmty_extension of extension + (* [%id] *) + | Pmty_alias of Longident.t loc + (* (module M) *) + +and signature = signature_item list + +and signature_item = + { + psig_desc: signature_item_desc; + psig_loc: Location.t; + } + +and signature_item_desc = + | Psig_value of value_description + (* + val x: T + external x: T = "s1" ... "sn" + *) + | Psig_type of rec_flag * type_declaration list + (* type t1 = ... and ... and tn = ... *) + | Psig_typext of type_extension + (* type t1 += ... *) + | Psig_exception of extension_constructor + (* exception C of T *) + | Psig_module of module_declaration + (* module X : MT *) + | Psig_recmodule of module_declaration list + (* module rec X1 : MT1 and ... and Xn : MTn *) + | Psig_modtype of module_type_declaration + (* module type S = MT + module type S *) + | Psig_open of open_description + (* open X *) + | Psig_include of include_description + (* include MT *) + | Psig_class of class_description list + (* class c1 : ... and ... and cn : ... *) + | Psig_class_type of class_type_declaration list + (* class type ct1 = ... and ... and ctn = ... *) + | Psig_attribute of attribute + (* [@@@id] *) + | Psig_extension of extension * attributes + (* [%%id] *) + +and module_declaration = + { + pmd_name: string loc; + pmd_type: module_type; + pmd_attributes: attributes; (* ... [@@id1] [@@id2] *) + pmd_loc: Location.t; + } +(* S : MT *) + +and module_type_declaration = + { + pmtd_name: string loc; + pmtd_type: module_type option; + pmtd_attributes: attributes; (* ... [@@id1] [@@id2] *) + pmtd_loc: Location.t; + } +(* S = MT + S (abstract module type declaration, pmtd_type = None) +*) + +and open_description = + { + popen_lid: Longident.t loc; + popen_override: override_flag; + popen_loc: Location.t; + popen_attributes: attributes; + } +(* open! X - popen_override = Override (silences the 'used identifier + shadowing' warning) + open X - popen_override = Fresh + *) + +and 'a include_infos = + { + pincl_mod: 'a; + pincl_loc: Location.t; + pincl_attributes: attributes; + } + +and include_description = module_type include_infos +(* include MT *) + +and include_declaration = module_expr include_infos +(* include ME *) + +and with_constraint = + | Pwith_type of Longident.t loc * type_declaration + (* with type X.t = ... + + Note: the last component of the longident must match + the name of the type_declaration. *) + | Pwith_module of Longident.t loc * Longident.t loc + (* with module X.Y = Z *) + | Pwith_typesubst of Longident.t loc * type_declaration + (* with type X.t := ..., same format as [Pwith_type] *) + | Pwith_modsubst of Longident.t loc * Longident.t loc + (* with module X.Y := Z *) + +(* Value expressions for the module language *) + +and module_expr = + { + pmod_desc: module_expr_desc; + pmod_loc: Location.t; + pmod_attributes: attributes; (* ... [@id1] [@id2] *) + } + +and module_expr_desc = + | Pmod_ident of Longident.t loc + (* X *) + | Pmod_structure of structure + (* struct ... end *) + | Pmod_functor of string loc * module_type option * module_expr + (* functor(X : MT1) -> ME *) + | Pmod_apply of module_expr * module_expr + (* ME1(ME2) *) + | Pmod_constraint of module_expr * module_type + (* (ME : MT) *) + | Pmod_unpack of expression + (* (val E) *) + | Pmod_extension of extension + (* [%id] *) + +and structure = structure_item list + +and structure_item = + { + pstr_desc: structure_item_desc; + pstr_loc: Location.t; + } + +and structure_item_desc = + | Pstr_eval of expression * attributes + (* E *) + | Pstr_value of rec_flag * value_binding list + (* let P1 = E1 and ... and Pn = EN (flag = Nonrecursive) + let rec P1 = E1 and ... and Pn = EN (flag = Recursive) + *) + | Pstr_primitive of value_description + (* val x: T + external x: T = "s1" ... "sn" *) + | Pstr_type of rec_flag * type_declaration list + (* type t1 = ... and ... and tn = ... *) + | Pstr_typext of type_extension + (* type t1 += ... *) + | Pstr_exception of extension_constructor + (* exception C of T + exception C = M.X *) + | Pstr_module of module_binding + (* module X = ME *) + | Pstr_recmodule of module_binding list + (* module rec X1 = ME1 and ... and Xn = MEn *) + | Pstr_modtype of module_type_declaration + (* module type S = MT *) + | Pstr_open of open_description + (* open X *) + | Pstr_class of class_declaration list + (* class c1 = ... and ... and cn = ... *) + | Pstr_class_type of class_type_declaration list + (* class type ct1 = ... and ... and ctn = ... *) + | Pstr_include of include_declaration + (* include ME *) + | Pstr_attribute of attribute + (* [@@@id] *) + | Pstr_extension of extension * attributes + (* [%%id] *) + +and value_binding = + { + pvb_pat: pattern; + pvb_expr: expression; + pvb_attributes: attributes; + pvb_loc: Location.t; + } + +and module_binding = + { + pmb_name: string loc; + pmb_expr: module_expr; + pmb_attributes: attributes; + pmb_loc: Location.t; + } +(* X = ME *) + +(** {1 Toplevel} *) + +(* Toplevel phrases *) + +type toplevel_phrase = + | Ptop_def of structure + | Ptop_dir of string * directive_argument + (* #use, #load ... *) + +and directive_argument = + | Pdir_none + | Pdir_string of string + | Pdir_int of string * char option + | Pdir_ident of Longident.t + | Pdir_bool of bool + +end +module Builtin_attributes : sig +#1 "builtin_attributes.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Support for some of the builtin attributes: + + ocaml.deprecated + ocaml.error + ocaml.ppwarning + ocaml.warning + ocaml.warnerror + ocaml.explicit_arity (for camlp4/camlp5) + ocaml.warn_on_literal_pattern + ocaml.deprecated_mutable + ocaml.immediate + ocaml.boxed / ocaml.unboxed +*) + + +val check_deprecated: Location.t -> Parsetree.attributes -> string -> unit +val check_deprecated_inclusion: + def:Location.t -> use:Location.t -> Location.t -> Parsetree.attributes -> + Parsetree.attributes -> string -> unit +val deprecated_of_attrs: Parsetree.attributes -> string option +val deprecated_of_sig: Parsetree.signature -> string option +val deprecated_of_str: Parsetree.structure -> string option + +val check_deprecated_mutable: + Location.t -> Parsetree.attributes -> string -> unit +val check_deprecated_mutable_inclusion: + def:Location.t -> use:Location.t -> Location.t -> Parsetree.attributes -> + Parsetree.attributes -> string -> unit + +val check_bs_attributes_inclusion: + (Parsetree.attributes -> + Parsetree.attributes -> string -> (string*string) option ) ref + +val check_duplicated_labels: + (Parsetree.label_declaration list -> + string Asttypes.loc option + ) ref +val error_of_extension: Parsetree.extension -> Location.error + +val warning_attribute: ?ppwarning:bool -> Parsetree.attribute -> unit + (** Apply warning settings from the specified attribute. + "ocaml.warning"/"ocaml.warnerror" (and variants without the prefix) + are processed and other attributes are ignored. + + Also implement ocaml.ppwarning (unless ~ppwarning:false is + passed). + *) + +val warning_scope: + ?ppwarning:bool -> + Parsetree.attributes -> (unit -> 'a) -> 'a + (** Execute a function in a new scope for warning settings. This + means that the effect of any call to [warning_attribute] during + the execution of this function will be discarded after + execution. + + The function also takes a list of attributes which are processed + with [warning_attribute] in the fresh scope before the function + is executed. + *) + +val warn_on_literal_pattern: Parsetree.attributes -> bool +val explicit_arity: Parsetree.attributes -> bool + + +val immediate: Parsetree.attributes -> bool + +val has_unboxed: Parsetree.attributes -> bool +val has_boxed: Parsetree.attributes -> bool + +end = struct +#1 "builtin_attributes.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Asttypes +open Parsetree + +let string_of_cst = function + | Pconst_string(s, _) -> Some s + | _ -> None + +let string_of_payload = function + | PStr[{pstr_desc=Pstr_eval({pexp_desc=Pexp_constant c},_)}] -> + string_of_cst c + | _ -> None + +let string_of_opt_payload p = + match string_of_payload p with + | Some s -> s + | None -> "" + +let rec error_of_extension ext = + match ext with + | ({txt = ("ocaml.error"|"error") as txt; loc}, p) -> + let rec sub_from inner = + match inner with + | {pstr_desc=Pstr_extension (ext, _)} :: rest -> + error_of_extension ext :: sub_from rest + | _ :: rest -> + (Location.errorf ~loc + "Invalid syntax for sub-error of extension '%s'." txt) :: + sub_from rest + | [] -> [] + in + begin match p with + | PStr [] -> raise Location.Already_displayed_error + | PStr({pstr_desc=Pstr_eval + ({pexp_desc=Pexp_constant(Pconst_string(msg,_))}, _)}:: + {pstr_desc=Pstr_eval + ({pexp_desc=Pexp_constant(Pconst_string(if_highlight,_))}, _)}:: + inner) -> + Location.error ~loc ~if_highlight ~sub:(sub_from inner) msg + | PStr({pstr_desc=Pstr_eval + ({pexp_desc=Pexp_constant(Pconst_string(msg,_))}, _)}::inner) -> + Location.error ~loc ~sub:(sub_from inner) msg + | _ -> Location.errorf ~loc "Invalid syntax for extension '%s'." txt + end + | ({txt; loc}, _) -> + Location.errorf ~loc "Uninterpreted extension '%s'." txt + +let cat s1 s2 = + if s2 = "" then s1 else + + if Clflags.bs_vscode then s1 ^ " " ^ s2 + else s1 ^ "\n" ^ s2 + + +let rec deprecated_of_attrs = function + | [] -> None + | ({txt = "ocaml.deprecated"|"deprecated"; _}, p) :: _ -> + Some (string_of_opt_payload p) + | _ :: tl -> deprecated_of_attrs tl + +let check_deprecated loc attrs s = + match deprecated_of_attrs attrs with + | None -> () + | Some txt -> Location.deprecated loc (cat s txt) + +let check_deprecated_inclusion ~def ~use loc attrs1 attrs2 s = + match deprecated_of_attrs attrs1, deprecated_of_attrs attrs2 with + | None, _ | Some _, Some _ -> () + | Some txt, None -> Location.deprecated ~def ~use loc (cat s txt) + +let rec deprecated_mutable_of_attrs = function + | [] -> None + | ({txt = "ocaml.deprecated_mutable"|"deprecated_mutable"; _}, p) :: _ -> + Some (string_of_opt_payload p) + | _ :: tl -> deprecated_mutable_of_attrs tl + +let check_deprecated_mutable loc attrs s = + match deprecated_mutable_of_attrs attrs with + | None -> () + | Some txt -> + Location.deprecated loc (Printf.sprintf "mutating field %s" (cat s txt)) + +let check_deprecated_mutable_inclusion ~def ~use loc attrs1 attrs2 s = + match deprecated_mutable_of_attrs attrs1, + deprecated_mutable_of_attrs attrs2 + with + | None, _ | Some _, Some _ -> () + | Some txt, None -> + Location.deprecated ~def ~use loc + (Printf.sprintf "mutating field %s" (cat s txt)) + +let check_bs_attributes_inclusion = + ref (fun _attrs1 _attrs2 _s -> + None + ) + +let check_duplicated_labels : (_ -> _ option ) ref = ref (fun _lbls -> + None +) + +let rec deprecated_of_sig = function + | {psig_desc = Psig_attribute a} :: tl -> + begin match deprecated_of_attrs [a] with + | None -> deprecated_of_sig tl + | Some _ as r -> r + end + | _ -> None + + +let rec deprecated_of_str = function + | {pstr_desc = Pstr_attribute a} :: tl -> + begin match deprecated_of_attrs [a] with + | None -> deprecated_of_str tl + | Some _ as r -> r + end + | _ -> None + + +let warning_attribute ?(ppwarning = true) = + let process loc txt errflag payload = + match string_of_payload payload with + | Some s -> + begin try Warnings.parse_options errflag s + with Arg.Bad _ -> + Location.prerr_warning loc + (Warnings.Attribute_payload + (txt, "Ill-formed list of warnings")) + end + | None -> + Location.prerr_warning loc + (Warnings.Attribute_payload + (txt, "A single string literal is expected")) + in + function + | ({txt = ("ocaml.warning"|"warning") as txt; loc}, payload) -> + process loc txt false payload + | ({txt = ("ocaml.warnerror"|"warnerror") as txt; loc}, payload) -> + process loc txt true payload + | {txt="ocaml.ppwarning"|"ppwarning"}, + PStr[{pstr_desc=Pstr_eval({pexp_desc=Pexp_constant + (Pconst_string (s, _))},_); + pstr_loc}] when ppwarning -> + Location.prerr_warning pstr_loc (Warnings.Preprocessor s) + | _ -> + () + +let warning_scope ?ppwarning attrs f = + let prev = Warnings.backup () in + try + List.iter (warning_attribute ?ppwarning) (List.rev attrs); + let ret = f () in + Warnings.restore prev; + ret + with exn -> + Warnings.restore prev; + raise exn + + +let warn_on_literal_pattern = + List.exists + (function + | ({txt="ocaml.warn_on_literal_pattern"|"warn_on_literal_pattern"; _}, _) + -> true + | _ -> false + ) + +let explicit_arity = + List.exists + (function + | ({txt="ocaml.explicit_arity"|"explicit_arity"; _}, _) -> true + | _ -> false + ) + +let immediate = + List.exists + (function + | ({txt="ocaml.immediate"|"immediate"; _}, _) -> true + | _ -> false + ) + +(* The "ocaml.boxed (default)" and "ocaml.unboxed (default)" + attributes cannot be input by the user, they are added by the + compiler when applying the default setting. This is done to record + in the .cmi the default used by the compiler when compiling the + source file because the default can change between compiler + invocations. *) + +let check l (x, _) = List.mem x.txt l + +let has_unboxed attr = + List.exists (check ["ocaml.unboxed"; "unboxed"]) + attr + +let has_boxed attr = + List.exists (check ["ocaml.boxed"; "boxed"]) attr + +end +module Depend : sig +#1 "depend.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1999 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Module dependencies. *) + +module StringSet : Set.S with type elt = string +module StringMap : Map.S with type key = string + +type map_tree = Node of StringSet.t * bound_map +and bound_map = map_tree StringMap.t +val make_leaf : string -> map_tree +val make_node : bound_map -> map_tree +val weaken_map : StringSet.t -> map_tree -> map_tree + +val free_structure_names : StringSet.t ref + +(* dependencies found by preprocessing tools (plugins) *) +val pp_deps : string list ref + +val open_module : bound_map -> Longident.t -> bound_map + +val add_use_file : bound_map -> Parsetree.toplevel_phrase list -> unit + +val add_signature : bound_map -> Parsetree.signature -> unit + +val add_implementation : bound_map -> Parsetree.structure -> unit + +val add_implementation_binding : bound_map -> Parsetree.structure -> bound_map +val add_signature_binding : bound_map -> Parsetree.signature -> bound_map + +end = struct +#1 "depend.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1999 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Asttypes +open Location +open Longident +open Parsetree + +let pp_deps = ref [] + +module StringSet = Set.Make(struct type t = string let compare = compare end) +module StringMap = Map.Make(String) + +(* Module resolution map *) +(* Node (set of imports for this path, map for submodules) *) +type map_tree = Node of StringSet.t * bound_map +and bound_map = map_tree StringMap.t +let bound = Node (StringSet.empty, StringMap.empty) + +(*let get_free (Node (s, _m)) = s*) +let get_map (Node (_s, m)) = m +let make_leaf s = Node (StringSet.singleton s, StringMap.empty) +let make_node m = Node (StringSet.empty, m) +let rec weaken_map s (Node(s0,m0)) = + Node (StringSet.union s s0, StringMap.map (weaken_map s) m0) +let rec collect_free (Node (s, m)) = + StringMap.fold (fun _ n -> StringSet.union (collect_free n)) m s + +(* Returns the imports required to access the structure at path p *) +(* Only raises Not_found if the head of p is not in the toplevel map *) +let rec lookup_free p m = + match p with + [] -> raise Not_found + | s::p -> + let Node (f, m') = StringMap.find s m in + try lookup_free p m' with Not_found -> f + +(* Returns the node corresponding to the structure at path p *) +let rec lookup_map lid m = + match lid with + Lident s -> StringMap.find s m + | Ldot (l, s) -> StringMap.find s (get_map (lookup_map l m)) + | Lapply _ -> raise Not_found + +(* Collect free module identifiers in the a.s.t. *) + +let free_structure_names = ref StringSet.empty + +let add_names s = + free_structure_names := StringSet.union s !free_structure_names + +let rec add_path bv ?(p=[]) = function + | Lident s -> + let free = + try lookup_free (s::p) bv with Not_found -> StringSet.singleton s + in + (*StringSet.iter (fun s -> Printf.eprintf "%s " s) free; + prerr_endline "";*) + add_names free + | Ldot(l, s) -> add_path bv ~p:(s::p) l + | Lapply(l1, l2) -> add_path bv l1; add_path bv l2 + +let open_module bv lid = + match lookup_map lid bv with + | Node (s, m) -> + add_names s; + StringMap.fold StringMap.add m bv + | exception Not_found -> + add_path bv lid; bv + +let add_parent bv lid = + match lid.txt with + Ldot(l, _s) -> add_path bv l + | _ -> () + +let add = add_parent + +let addmodule bv lid = add_path bv lid.txt + +let handle_extension ext = + match (fst ext).txt with + | "error" | "ocaml.error" -> + raise (Location.Error + (Builtin_attributes.error_of_extension ext)) + | _ -> + () + +let rec add_type bv ty = + match ty.ptyp_desc with + Ptyp_any -> () + | Ptyp_var _ -> () + | Ptyp_arrow(_, t1, t2) -> add_type bv t1; add_type bv t2 + | Ptyp_tuple tl -> List.iter (add_type bv) tl + | Ptyp_constr(c, tl) -> add bv c; List.iter (add_type bv) tl + | Ptyp_object (fl, _) -> + List.iter + (function Otag (_, _, t) -> add_type bv t + | Oinherit t -> add_type bv t) fl + | Ptyp_class(c, tl) -> add bv c; List.iter (add_type bv) tl + | Ptyp_alias(t, _) -> add_type bv t + | Ptyp_variant(fl, _, _) -> + List.iter + (function Rtag(_,_,_,stl) -> List.iter (add_type bv) stl + | Rinherit sty -> add_type bv sty) + fl + | Ptyp_poly(_, t) -> add_type bv t + | Ptyp_package pt -> add_package_type bv pt + | Ptyp_extension e -> handle_extension e + +and add_package_type bv (lid, l) = + add bv lid; + List.iter (add_type bv) (List.map (fun (_, e) -> e) l) + +let add_opt add_fn bv = function + None -> () + | Some x -> add_fn bv x + +let add_constructor_arguments bv = function + | Pcstr_tuple l -> List.iter (add_type bv) l + | Pcstr_record l -> List.iter (fun l -> add_type bv l.pld_type) l + +let add_constructor_decl bv pcd = + add_constructor_arguments bv pcd.pcd_args; + Misc.may (add_type bv) pcd.pcd_res + +let add_type_declaration bv td = + List.iter + (fun (ty1, ty2, _) -> add_type bv ty1; add_type bv ty2) + td.ptype_cstrs; + add_opt add_type bv td.ptype_manifest; + let add_tkind = function + Ptype_abstract -> () + | Ptype_variant cstrs -> + List.iter (add_constructor_decl bv) cstrs + | Ptype_record lbls -> + List.iter (fun pld -> add_type bv pld.pld_type) lbls + | Ptype_open -> () in + add_tkind td.ptype_kind + +let add_extension_constructor bv ext = + match ext.pext_kind with + Pext_decl(args, rty) -> + add_constructor_arguments bv args; + Misc.may (add_type bv) rty + | Pext_rebind lid -> add bv lid + +let add_type_extension bv te = + add bv te.ptyext_path; + List.iter (add_extension_constructor bv) te.ptyext_constructors + +let rec add_class_type bv cty = + match cty.pcty_desc with + Pcty_constr(l, tyl) -> + add bv l; List.iter (add_type bv) tyl + | Pcty_signature { pcsig_self = ty; pcsig_fields = fieldl } -> + add_type bv ty; + List.iter (add_class_type_field bv) fieldl + | Pcty_arrow(_, ty1, cty2) -> + add_type bv ty1; add_class_type bv cty2 + | Pcty_extension e -> handle_extension e + | Pcty_open (_ovf, m, e) -> + let bv = open_module bv m.txt in add_class_type bv e + +and add_class_type_field bv pctf = + match pctf.pctf_desc with + Pctf_inherit cty -> add_class_type bv cty + | Pctf_val(_, _, _, ty) -> add_type bv ty + | Pctf_method(_, _, _, ty) -> add_type bv ty + | Pctf_constraint(ty1, ty2) -> add_type bv ty1; add_type bv ty2 + | Pctf_attribute _ -> () + | Pctf_extension e -> handle_extension e + +let add_class_description bv infos = + add_class_type bv infos.pci_expr + +let add_class_type_declaration = add_class_description + +let pattern_bv = ref StringMap.empty + +let rec add_pattern bv pat = + match pat.ppat_desc with + Ppat_any -> () + | Ppat_var _ -> () + | Ppat_alias(p, _) -> add_pattern bv p + | Ppat_interval _ + | Ppat_constant _ -> () + | Ppat_tuple pl -> List.iter (add_pattern bv) pl + | Ppat_construct(c, op) -> add bv c; add_opt add_pattern bv op + | Ppat_record(pl, _) -> + List.iter (fun (lbl, p) -> add bv lbl; add_pattern bv p) pl + | Ppat_array pl -> List.iter (add_pattern bv) pl + | Ppat_or(p1, p2) -> add_pattern bv p1; add_pattern bv p2 + | Ppat_constraint(p, ty) -> add_pattern bv p; add_type bv ty + | Ppat_variant(_, op) -> add_opt add_pattern bv op + | Ppat_type li -> add bv li + | Ppat_lazy p -> add_pattern bv p + | Ppat_unpack id -> pattern_bv := StringMap.add id.txt bound !pattern_bv + | Ppat_open ( m, p) -> let bv = open_module bv m.txt in add_pattern bv p + | Ppat_exception p -> add_pattern bv p + | Ppat_extension e -> handle_extension e + +let add_pattern bv pat = + pattern_bv := bv; + add_pattern bv pat; + !pattern_bv + +let rec add_expr bv exp = + match exp.pexp_desc with + Pexp_ident l -> add bv l + | Pexp_constant _ -> () + | Pexp_let(rf, pel, e) -> + let bv = add_bindings rf bv pel in add_expr bv e + | Pexp_fun (_, opte, p, e) -> + add_opt add_expr bv opte; add_expr (add_pattern bv p) e + | Pexp_function pel -> + add_cases bv pel + | Pexp_apply(e, el) -> + add_expr bv e; List.iter (fun (_,e) -> add_expr bv e) el + | Pexp_match(e, pel) -> add_expr bv e; add_cases bv pel + | Pexp_try(e, pel) -> add_expr bv e; add_cases bv pel + | Pexp_tuple el -> List.iter (add_expr bv) el + | Pexp_construct(c, opte) -> add bv c; add_opt add_expr bv opte + | Pexp_variant(_, opte) -> add_opt add_expr bv opte + | Pexp_record(lblel, opte) -> + List.iter (fun (lbl, e) -> add bv lbl; add_expr bv e) lblel; + add_opt add_expr bv opte + | Pexp_field(e, fld) -> add_expr bv e; add bv fld + | Pexp_setfield(e1, fld, e2) -> add_expr bv e1; add bv fld; add_expr bv e2 + | Pexp_array el -> List.iter (add_expr bv) el + | Pexp_ifthenelse(e1, e2, opte3) -> + add_expr bv e1; add_expr bv e2; add_opt add_expr bv opte3 + | Pexp_sequence(e1, e2) -> add_expr bv e1; add_expr bv e2 + | Pexp_while(e1, e2) -> add_expr bv e1; add_expr bv e2 + | Pexp_for( _, e1, e2, _, e3) -> + add_expr bv e1; add_expr bv e2; add_expr bv e3 + | Pexp_coerce(e1, oty2, ty3) -> + add_expr bv e1; + add_opt add_type bv oty2; + add_type bv ty3 + | Pexp_constraint(e1, ty2) -> + add_expr bv e1; + add_type bv ty2 + | Pexp_send(e, _m) -> add_expr bv e + | Pexp_new li -> add bv li + | Pexp_setinstvar(_v, e) -> add_expr bv e + | Pexp_override sel -> List.iter (fun (_s, e) -> add_expr bv e) sel + | Pexp_letmodule(id, m, e) -> + let b = add_module_binding bv m in + add_expr (StringMap.add id.txt b bv) e + | Pexp_letexception(_, e) -> add_expr bv e + | Pexp_assert (e) -> add_expr bv e + | Pexp_lazy (e) -> add_expr bv e + | Pexp_poly (e, t) -> add_expr bv e; add_opt add_type bv t + | Pexp_object { pcstr_self = pat; pcstr_fields = fieldl } -> + let bv = add_pattern bv pat in List.iter (add_class_field bv) fieldl + | Pexp_newtype (_, e) -> add_expr bv e + | Pexp_pack m -> add_module bv m + | Pexp_open (_ovf, m, e) -> + let bv = open_module bv m.txt in add_expr bv e + | Pexp_extension (({ txt = ("ocaml.extension_constructor"| + "extension_constructor"); _ }, + PStr [item]) as e) -> + begin match item.pstr_desc with + | Pstr_eval ({ pexp_desc = Pexp_construct (c, None) }, _) -> add bv c + | _ -> handle_extension e + end + | Pexp_extension e -> handle_extension e + | Pexp_unreachable -> () + +and add_cases bv cases = + List.iter (add_case bv) cases + +and add_case bv {pc_lhs; pc_guard; pc_rhs} = + let bv = add_pattern bv pc_lhs in + add_opt add_expr bv pc_guard; + add_expr bv pc_rhs + +and add_bindings recf bv pel = + let bv' = List.fold_left (fun bv x -> add_pattern bv x.pvb_pat) bv pel in + let bv = if recf = Recursive then bv' else bv in + List.iter (fun x -> add_expr bv x.pvb_expr) pel; + bv' + +and add_modtype bv mty = + match mty.pmty_desc with + Pmty_ident l -> add bv l + | Pmty_alias l -> addmodule bv l + | Pmty_signature s -> add_signature bv s + | Pmty_functor(id, mty1, mty2) -> + Misc.may (add_modtype bv) mty1; + add_modtype (StringMap.add id.txt bound bv) mty2 + | Pmty_with(mty, cstrl) -> + add_modtype bv mty; + List.iter + (function + | Pwith_type (_, td) -> add_type_declaration bv td + | Pwith_module (_, lid) -> addmodule bv lid + | Pwith_typesubst (_, td) -> add_type_declaration bv td + | Pwith_modsubst (_, lid) -> addmodule bv lid + ) + cstrl + | Pmty_typeof m -> add_module bv m + | Pmty_extension e -> handle_extension e + +and add_module_alias bv l = + try + add_parent bv l; + lookup_map l.txt bv + with Not_found -> + match l.txt with + Lident s -> make_leaf s + | _ -> addmodule bv l; bound (* cannot delay *) + +and add_modtype_binding bv mty = + if not !Clflags.transparent_modules then add_modtype bv mty; + match mty.pmty_desc with + Pmty_alias l -> + add_module_alias bv l + | Pmty_signature s -> + make_node (add_signature_binding bv s) + | Pmty_typeof modl -> + add_module_binding bv modl + | _ -> + if !Clflags.transparent_modules then add_modtype bv mty; bound + +and add_signature bv sg = + ignore (add_signature_binding bv sg) + +and add_signature_binding bv sg = + snd (List.fold_left add_sig_item (bv, StringMap.empty) sg) + +and add_sig_item (bv, m) item = + match item.psig_desc with + Psig_value vd -> + add_type bv vd.pval_type; (bv, m) + | Psig_type (_, dcls) -> + List.iter (add_type_declaration bv) dcls; (bv, m) + | Psig_typext te -> + add_type_extension bv te; (bv, m) + | Psig_exception pext -> + add_extension_constructor bv pext; (bv, m) + | Psig_module pmd -> + let m' = add_modtype_binding bv pmd.pmd_type in + let add = StringMap.add pmd.pmd_name.txt m' in + (add bv, add m) + | Psig_recmodule decls -> + let add = + List.fold_right (fun pmd -> StringMap.add pmd.pmd_name.txt bound) + decls + in + let bv' = add bv and m' = add m in + List.iter (fun pmd -> add_modtype bv' pmd.pmd_type) decls; + (bv', m') + | Psig_modtype x -> + begin match x.pmtd_type with + None -> () + | Some mty -> add_modtype bv mty + end; + (bv, m) + | Psig_open od -> + (open_module bv od.popen_lid.txt, m) + | Psig_include incl -> + let Node (s, m') = add_modtype_binding bv incl.pincl_mod in + add_names s; + let add = StringMap.fold StringMap.add m' in + (add bv, add m) + | Psig_class cdl -> + List.iter (add_class_description bv) cdl; (bv, m) + | Psig_class_type cdtl -> + List.iter (add_class_type_declaration bv) cdtl; (bv, m) + | Psig_attribute _ -> (bv, m) + | Psig_extension (e, _) -> + handle_extension e; + (bv, m) + +and add_module_binding bv modl = + if not !Clflags.transparent_modules then add_module bv modl; + match modl.pmod_desc with + Pmod_ident l -> + begin try + add_parent bv l; + lookup_map l.txt bv + with Not_found -> + match l.txt with + Lident s -> make_leaf s + | _ -> addmodule bv l; bound + end + | Pmod_structure s -> + make_node (snd (add_structure_binding bv s)) + | _ -> + if !Clflags.transparent_modules then add_module bv modl; bound + +and add_module bv modl = + match modl.pmod_desc with + Pmod_ident l -> addmodule bv l + | Pmod_structure s -> ignore (add_structure bv s) + | Pmod_functor(id, mty, modl) -> + Misc.may (add_modtype bv) mty; + add_module (StringMap.add id.txt bound bv) modl + | Pmod_apply(mod1, mod2) -> + add_module bv mod1; add_module bv mod2 + | Pmod_constraint(modl, mty) -> + add_module bv modl; add_modtype bv mty + | Pmod_unpack(e) -> + add_expr bv e + | Pmod_extension e -> + handle_extension e + +and add_structure bv item_list = + let (bv, m) = add_structure_binding bv item_list in + add_names (collect_free (make_node m)); + bv + +and add_structure_binding bv item_list = + List.fold_left add_struct_item (bv, StringMap.empty) item_list + +and add_struct_item (bv, m) item : _ StringMap.t * _ StringMap.t = + match item.pstr_desc with + Pstr_eval (e, _attrs) -> + add_expr bv e; (bv, m) + | Pstr_value(rf, pel) -> + let bv = add_bindings rf bv pel in (bv, m) + | Pstr_primitive vd -> + add_type bv vd.pval_type; (bv, m) + | Pstr_type (_, dcls) -> + List.iter (add_type_declaration bv) dcls; (bv, m) + | Pstr_typext te -> + add_type_extension bv te; + (bv, m) + | Pstr_exception pext -> + add_extension_constructor bv pext; (bv, m) + | Pstr_module x -> + let b = add_module_binding bv x.pmb_expr in + let add = StringMap.add x.pmb_name.txt b in + (add bv, add m) + | Pstr_recmodule bindings -> + let add = + List.fold_right (fun x -> StringMap.add x.pmb_name.txt bound) bindings + in + let bv' = add bv and m = add m in + List.iter + (fun x -> add_module bv' x.pmb_expr) + bindings; + (bv', m) + | Pstr_modtype x -> + begin match x.pmtd_type with + None -> () + | Some mty -> add_modtype bv mty + end; + (bv, m) + | Pstr_open od -> + (open_module bv od.popen_lid.txt, m) + | Pstr_class cdl -> + List.iter (add_class_declaration bv) cdl; (bv, m) + | Pstr_class_type cdtl -> + List.iter (add_class_type_declaration bv) cdtl; (bv, m) + | Pstr_include incl -> + let Node (s, m') = add_module_binding bv incl.pincl_mod in + add_names s; + let add = StringMap.fold StringMap.add m' in + (add bv, add m) + | Pstr_attribute _ -> (bv, m) + | Pstr_extension (e, _) -> + handle_extension e; + (bv, m) + +and add_use_file bv top_phrs = + ignore (List.fold_left add_top_phrase bv top_phrs) + +and add_implementation bv l = + if !Clflags.transparent_modules then + ignore (add_structure_binding bv l) + else ignore (add_structure bv l) + +and add_implementation_binding bv l = + snd (add_structure_binding bv l) + +and add_top_phrase bv = function + | Ptop_def str -> add_structure bv str + | Ptop_dir (_, _) -> bv + +and add_class_expr bv ce = + match ce.pcl_desc with + Pcl_constr(l, tyl) -> + add bv l; List.iter (add_type bv) tyl + | Pcl_structure { pcstr_self = pat; pcstr_fields = fieldl } -> + let bv = add_pattern bv pat in List.iter (add_class_field bv) fieldl + | Pcl_fun(_, opte, pat, ce) -> + add_opt add_expr bv opte; + let bv = add_pattern bv pat in add_class_expr bv ce + | Pcl_apply(ce, exprl) -> + add_class_expr bv ce; List.iter (fun (_,e) -> add_expr bv e) exprl + | Pcl_let(rf, pel, ce) -> + let bv = add_bindings rf bv pel in add_class_expr bv ce + | Pcl_constraint(ce, ct) -> + add_class_expr bv ce; add_class_type bv ct + | Pcl_extension e -> handle_extension e + | Pcl_open (_ovf, m, e) -> + let bv = open_module bv m.txt in add_class_expr bv e + +and add_class_field bv pcf = + match pcf.pcf_desc with + Pcf_inherit(_, ce, _) -> add_class_expr bv ce + | Pcf_val(_, _, Cfk_concrete (_, e)) + | Pcf_method(_, _, Cfk_concrete (_, e)) -> add_expr bv e + | Pcf_val(_, _, Cfk_virtual ty) + | Pcf_method(_, _, Cfk_virtual ty) -> add_type bv ty + | Pcf_constraint(ty1, ty2) -> add_type bv ty1; add_type bv ty2 + | Pcf_initializer e -> add_expr bv e + | Pcf_attribute _ -> () + | Pcf_extension e -> handle_extension e + +and add_class_declaration bv decl = + add_class_expr bv decl.pci_expr + +end +module Ext_array : sig +#1 "ext_array.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + +(** Some utilities for {!Array} operations *) +val reverse_range : 'a array -> int -> int -> unit +val reverse_in_place : 'a array -> unit +val reverse : 'a array -> 'a array +val reverse_of_list : 'a list -> 'a array + +val filter : ('a -> bool) -> 'a array -> 'a array + +val filter_map : +'a array -> +('a -> 'b option) -> +'b array + +val range : int -> int -> int array + +val map2i : (int -> 'a -> 'b -> 'c ) -> 'a array -> 'b array -> 'c array + +val to_list_f : + 'a array -> + ('a -> 'b) -> + 'b list + +val to_list_map : +'a array -> ('a -> 'b option) -> 'b list + +val to_list_map_acc : + 'a array -> + 'b list -> + ('a -> 'b option) -> + 'b list + +val of_list_map : + 'a list -> + ('a -> 'b) -> + 'b array + +val rfind_with_index : 'a array -> ('a -> 'b -> bool) -> 'b -> int + + + +type 'a split = No_split | Split of 'a array * 'a array + + +val find_and_split : + 'a array -> + ('a -> 'b -> bool) -> + 'b -> 'a split + +val exists : ('a -> bool) -> 'a array -> bool + +val is_empty : 'a array -> bool + +val for_all2_no_exn : + 'a array -> + 'b array -> + ('a -> 'b -> bool) -> + bool + +val for_alli : + 'a array -> + (int -> 'a -> bool) -> + bool + +val map : + 'a array -> + ('a -> 'b) -> + 'b array + +val iter : + 'a array -> + ('a -> unit) -> + unit + +val fold_left : + 'b array -> + 'a -> + ('a -> 'b -> 'a) -> + 'a + +val get_or : + 'a array -> + int -> + (unit -> 'a) -> + 'a +end = struct +#1 "ext_array.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + +let reverse_range a i len = + if len = 0 then () + else + for k = 0 to (len-1)/2 do + let t = Array.unsafe_get a (i+k) in + Array.unsafe_set a (i+k) ( Array.unsafe_get a (i+len-1-k)); + Array.unsafe_set a (i+len-1-k) t; + done + + +let reverse_in_place a = + reverse_range a 0 (Array.length a) + +let reverse a = + let b_len = Array.length a in + if b_len = 0 then [||] else + let b = Array.copy a in + for i = 0 to b_len - 1 do + Array.unsafe_set b i (Array.unsafe_get a (b_len - 1 -i )) + done; + b + +let reverse_of_list = function + | [] -> [||] + | hd::tl as l -> + let len = List.length l in + let a = Array.make len hd in + let rec fill i = function + | [] -> a + | hd::tl -> Array.unsafe_set a (len - i - 2) hd; fill (i+1) tl in + fill 0 tl + +let filter f a = + let arr_len = Array.length a in + let rec aux acc i = + if i = arr_len + then reverse_of_list acc + else + let v = Array.unsafe_get a i in + if f v then + aux (v::acc) (i+1) + else aux acc (i + 1) + in aux [] 0 + + +let filter_map a (f : _ -> _ option) = + let arr_len = Array.length a in + let rec aux acc i = + if i = arr_len + then reverse_of_list acc + else + let v = Array.unsafe_get a i in + match f v with + | Some v -> + aux (v::acc) (i+1) + | None -> + aux acc (i + 1) + in aux [] 0 + +let range from to_ = + if from > to_ then invalid_arg "Ext_array.range" + else Array.init (to_ - from + 1) (fun i -> i + from) + +let map2i f a b = + let len = Array.length a in + if len <> Array.length b then + invalid_arg "Ext_array.map2i" + else + Array.mapi (fun i a -> f i a ( Array.unsafe_get b i )) a + +let rec tolist_f_aux a f i res = + if i < 0 then res else + let v = Array.unsafe_get a i in + tolist_f_aux a f (i - 1) + (f v :: res) + +let to_list_f a f = tolist_f_aux a f (Array.length a - 1) [] + +let rec tolist_aux a f i res = + if i < 0 then res else + let v = Array.unsafe_get a i in + tolist_aux a f (i - 1) + (match f v with + | Some v -> v :: res + | None -> res) + +let to_list_map a f = + tolist_aux a f (Array.length a - 1) [] + +let to_list_map_acc a acc f = + tolist_aux a f (Array.length a - 1) acc + + +let of_list_map a f = + match a with + | [] -> [||] + | [a0] -> + let b0 = f a0 in + [|b0|] + | [a0;a1] -> + let b0 = f a0 in + let b1 = f a1 in + [|b0;b1|] + | [a0;a1;a2] -> + let b0 = f a0 in + let b1 = f a1 in + let b2 = f a2 in + [|b0;b1;b2|] + | [a0;a1;a2;a3] -> + let b0 = f a0 in + let b1 = f a1 in + let b2 = f a2 in + let b3 = f a3 in + [|b0;b1;b2;b3|] + | [a0;a1;a2;a3;a4] -> + let b0 = f a0 in + let b1 = f a1 in + let b2 = f a2 in + let b3 = f a3 in + let b4 = f a4 in + [|b0;b1;b2;b3;b4|] + + | a0::a1::a2::a3::a4::tl -> + let b0 = f a0 in + let b1 = f a1 in + let b2 = f a2 in + let b3 = f a3 in + let b4 = f a4 in + let len = List.length tl + 5 in + let arr = Array.make len b0 in + Array.unsafe_set arr 1 b1 ; + Array.unsafe_set arr 2 b2 ; + Array.unsafe_set arr 3 b3 ; + Array.unsafe_set arr 4 b4 ; + let rec fill i = function + | [] -> arr + | hd :: tl -> + Array.unsafe_set arr i (f hd); + fill (i + 1) tl in + fill 5 tl + +(** + {[ + # rfind_with_index [|1;2;3|] (=) 2;; + - : int = 1 + # rfind_with_index [|1;2;3|] (=) 1;; + - : int = 0 + # rfind_with_index [|1;2;3|] (=) 3;; + - : int = 2 + # rfind_with_index [|1;2;3|] (=) 4;; + - : int = -1 + ]} +*) +let rfind_with_index arr cmp v = + let len = Array.length arr in + let rec aux i = + if i < 0 then i + else if cmp (Array.unsafe_get arr i) v then i + else aux (i - 1) in + aux (len - 1) + +type 'a split = No_split | Split of 'a array * 'a array + + +let find_with_index arr cmp v = + let len = Array.length arr in + let rec aux i len = + if i >= len then -1 + else if cmp (Array.unsafe_get arr i ) v then i + else aux (i + 1) len in + aux 0 len + +let find_and_split arr cmp v : _ split = + let i = find_with_index arr cmp v in + if i < 0 then + No_split + else + Split (Array.sub arr 0 i, Array.sub arr (i + 1 ) (Array.length arr - i - 1)) + +(** TODO: available since 4.03, use {!Array.exists} *) + +let exists p a = + let n = Array.length a in + let rec loop i = + if i = n then false + else if p (Array.unsafe_get a i) then true + else loop (succ i) in + loop 0 + + +let is_empty arr = + Array.length arr = 0 + + +let rec unsafe_loop index len p xs ys = + if index >= len then true + else + p + (Array.unsafe_get xs index) + (Array.unsafe_get ys index) && + unsafe_loop (succ index) len p xs ys + +let for_alli a p = + let n = Array.length a in + let rec loop i = + if i = n then true + else if p i (Array.unsafe_get a i) then loop (succ i) + else false in + loop 0 + +let for_all2_no_exn xs ys p = + let len_xs = Array.length xs in + let len_ys = Array.length ys in + len_xs = len_ys && + unsafe_loop 0 len_xs p xs ys + + +let map a f = + let open Array in + let l = length a in + if l = 0 then [||] else begin + let r = make l (f(unsafe_get a 0)) in + for i = 1 to l - 1 do + unsafe_set r i (f(unsafe_get a i)) + done; + r + end + +let iter a f = + let open Array in + for i = 0 to length a - 1 do f(unsafe_get a i) done + + + let fold_left a x f = + let open Array in + let r = ref x in + for i = 0 to length a - 1 do + r := f !r (unsafe_get a i) + done; + !r + +let get_or arr i cb = + if i >=0 && i < Array.length arr then + Array.unsafe_get arr i + else cb () +end +module Bs_hash_stubs += struct +#1 "bs_hash_stubs.ml" + + + + +let hash_string : string -> int = Hashtbl.hash +let hash_string_int s i = Hashtbl.hash (s,i) +let hash_string_small_int : string -> int -> int = hash_string_int +let hash_stamp_and_name (i:int) (s:string) = Hashtbl.hash(i,s) +let hash_int (i:int) = Hashtbl.hash i +let string_length_based_compare (x : string ) (y : string) = + let len1 = String.length x in + let len2 = String.length y in + if len1 = len2 then String.compare x y + else compare (len1:int) len2 +let int_unsafe_blit: int array -> int -> int array -> int -> int -> unit = + Array.blit + + +end +module Ext_bytes : sig +#1 "ext_bytes.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + +external unsafe_blit_string : string -> int -> bytes -> int -> int -> unit + = "caml_blit_string" +[@@noalloc] + + + + +end = struct +#1 "ext_bytes.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + + +external unsafe_blit_string : string -> int -> bytes -> int -> int -> unit + = "caml_blit_string" +[@@noalloc] + + +end +module Ext_string : sig +#1 "ext_string.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + + + +(** Extension to the standard library [String] module, fixed some bugs like + avoiding locale sensitivity *) + +(** default is false *) +val split_by : ?keep_empty:bool -> (char -> bool) -> string -> string list + + +(** remove whitespace letters ('\t', '\n', ' ') on both side*) +val trim : string -> string + + +(** default is false *) +val split : ?keep_empty:bool -> string -> char -> string list + +(** split by space chars for quick scripting *) +val quick_split_by_ws : string -> string list + + + +val starts_with : string -> string -> bool + +(** + return [-1] when not found, the returned index is useful + see [ends_with_then_chop] +*) +val ends_with_index : string -> string -> int + +val ends_with : string -> string -> bool + +(** + [ends_with_then_chop name ext] + @example: + {[ + ends_with_then_chop "a.cmj" ".cmj" + "a" + ]} + This is useful in controlled or file case sensitve system +*) +val ends_with_then_chop : string -> string -> string option + + + + +(** + [for_all_from s start p] + if [start] is negative, it raises, + if [start] is too large, it returns true +*) +val for_all_from: + string -> + int -> + (char -> bool) -> + bool + +val for_all : + string -> + (char -> bool) -> + bool + +val is_empty : string -> bool + +val repeat : int -> string -> string + +val equal : string -> string -> bool + +(** + [extract_until s cursor sep] + When [sep] not found, the cursor is updated to -1, + otherwise cursor is increased to 1 + [sep_position] + User can not determine whether it is found or not by + telling the return string is empty since + "\n\n" would result in an empty string too. +*) +(* val extract_until: + string -> + int ref -> (* cursor to be updated *) + char -> + string *) + +val index_count: + string -> + int -> + char -> + int -> + int + +(* val index_next : + string -> + int -> + char -> + int *) + + +(** + [find ~start ~sub s] + returns [-1] if not found +*) +val find : ?start:int -> sub:string -> string -> int + +val contain_substring : string -> string -> bool + +val non_overlap_count : sub:string -> string -> int + +val rfind : sub:string -> string -> int + +(** [tail_from s 1] + return a substring from offset 1 (inclusive) +*) +val tail_from : string -> int -> string + + +(** returns negative number if not found *) +val rindex_neg : string -> char -> int + +val rindex_opt : string -> char -> int option + + +val no_char : string -> char -> int -> int -> bool + + +val no_slash : string -> bool + +(** return negative means no slash, otherwise [i] means the place for first slash *) +val no_slash_idx : string -> int + +val no_slash_idx_from : string -> int -> int + +(** if no conversion happens, reference equality holds *) +val replace_slash_backward : string -> string + +(** if no conversion happens, reference equality holds *) +val replace_backward_slash : string -> string + +val empty : string + + +val compare : string -> string -> int + +val single_space : string + +val concat3 : string -> string -> string -> string +val concat4 : string -> string -> string -> string -> string +val concat5 : string -> string -> string -> string -> string -> string +val inter2 : string -> string -> string +val inter3 : string -> string -> string -> string +val inter4 : string -> string -> string -> string -> string +val concat_array : string -> string array -> string + +val single_colon : string + +val parent_dir_lit : string +val current_dir_lit : string + +val capitalize_ascii : string -> string + +val capitalize_sub: + string -> + int -> + string + +val uncapitalize_ascii : string -> string + +val lowercase_ascii : string -> string + +(** Play parity to {!Ext_buffer.add_int_1} *) +val get_int_1 : string -> int -> int +val get_int_2 : string -> int -> int +val get_int_3 : string -> int -> int +val get_int_4 : string -> int -> int + +val get_1_2_3_4 : + string -> + off:int -> + int -> + int + +val unsafe_sub : + string -> + int -> + int -> + string +end = struct +#1 "ext_string.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + + +(* + {[ split " test_unsafe_obj_ffi_ppx.cmi" ~keep_empty:false ' ']} +*) +let split_by ?(keep_empty=false) is_delim str = + let len = String.length str in + let rec loop acc last_pos pos = + if pos = -1 then + if last_pos = 0 && not keep_empty then + + acc + else + String.sub str 0 last_pos :: acc + else + if is_delim str.[pos] then + let new_len = (last_pos - pos - 1) in + if new_len <> 0 || keep_empty then + let v = String.sub str (pos + 1) new_len in + loop ( v :: acc) + pos (pos - 1) + else loop acc pos (pos - 1) + else loop acc last_pos (pos - 1) + in + loop [] len (len - 1) + +let trim s = + let i = ref 0 in + let j = String.length s in + while !i < j && + let u = String.unsafe_get s !i in + u = '\t' || u = '\n' || u = ' ' + do + incr i; + done; + let k = ref (j - 1) in + while !k >= !i && + let u = String.unsafe_get s !k in + u = '\t' || u = '\n' || u = ' ' do + decr k ; + done; + String.sub s !i (!k - !i + 1) + +let split ?keep_empty str on = + if str = "" then [] else + split_by ?keep_empty (fun x -> (x : char) = on) str ;; + +let quick_split_by_ws str : string list = + split_by ~keep_empty:false (fun x -> x = '\t' || x = '\n' || x = ' ') str + +let starts_with s beg = + let beg_len = String.length beg in + let s_len = String.length s in + beg_len <= s_len && + (let i = ref 0 in + while !i < beg_len + && String.unsafe_get s !i = + String.unsafe_get beg !i do + incr i + done; + !i = beg_len + ) + +let rec ends_aux s end_ j k = + if k < 0 then (j + 1) + else if String.unsafe_get s j = String.unsafe_get end_ k then + ends_aux s end_ (j - 1) (k - 1) + else -1 + +(** return an index which is minus when [s] does not + end with [beg] +*) +let ends_with_index s end_ : int = + let s_finish = String.length s - 1 in + let s_beg = String.length end_ - 1 in + if s_beg > s_finish then -1 + else + ends_aux s end_ s_finish s_beg + +let ends_with s end_ = ends_with_index s end_ >= 0 + +let ends_with_then_chop s beg = + let i = ends_with_index s beg in + if i >= 0 then Some (String.sub s 0 i) + else None + +(* let check_suffix_case = ends_with *) +(* let check_suffix_case_then_chop = ends_with_then_chop *) + +(* let check_any_suffix_case s suffixes = + Ext_list.exists suffixes (fun x -> check_suffix_case s x) *) + +(* let check_any_suffix_case_then_chop s suffixes = + let rec aux suffixes = + match suffixes with + | [] -> None + | x::xs -> + let id = ends_with_index s x in + if id >= 0 then Some (String.sub s 0 id) + else aux xs in + aux suffixes *) + + + + +(* it is unsafe to expose such API as unsafe since + user can provide bad input range + +*) +let rec unsafe_for_all_range s ~start ~finish p = + start > finish || + p (String.unsafe_get s start) && + unsafe_for_all_range s ~start:(start + 1) ~finish p + +let for_all_from s start p = + let len = String.length s in + if start < 0 then invalid_arg "Ext_string.for_all_from" + else unsafe_for_all_range s ~start ~finish:(len - 1) p + + +let for_all s (p : char -> bool) = + unsafe_for_all_range s ~start:0 ~finish:(String.length s - 1) p + +let is_empty s = String.length s = 0 + + +let repeat n s = + let len = String.length s in + let res = Bytes.create(n * len) in + for i = 0 to pred n do + String.blit s 0 res (i * len) len + done; + Bytes.to_string res + + + + +let unsafe_is_sub ~sub i s j ~len = + let rec check k = + if k = len + then true + else + String.unsafe_get sub (i+k) = + String.unsafe_get s (j+k) && check (k+1) + in + j+len <= String.length s && check 0 + + + +let find ?(start=0) ~sub s = + let exception Local_exit in + let n = String.length sub in + let s_len = String.length s in + let i = ref start in + try + while !i + n <= s_len do + if unsafe_is_sub ~sub 0 s !i ~len:n then + raise_notrace Local_exit; + incr i + done; + -1 + with Local_exit -> + !i + +let contain_substring s sub = + find s ~sub >= 0 + +(** TODO: optimize + avoid nonterminating when string is empty +*) +let non_overlap_count ~sub s = + let sub_len = String.length sub in + let rec aux acc off = + let i = find ~start:off ~sub s in + if i < 0 then acc + else aux (acc + 1) (i + sub_len) in + if String.length sub = 0 then invalid_arg "Ext_string.non_overlap_count" + else aux 0 0 + + +let rfind ~sub s = + let exception Local_exit in + let n = String.length sub in + let i = ref (String.length s - n) in + try + while !i >= 0 do + if unsafe_is_sub ~sub 0 s !i ~len:n then + raise_notrace Local_exit; + decr i + done; + -1 + with Local_exit -> + !i + +let tail_from s x = + let len = String.length s in + if x > len then invalid_arg ("Ext_string.tail_from " ^s ^ " : "^ string_of_int x ) + else String.sub s x (len - x) + +let equal (x : string) y = x = y + +(* let rec index_rec s lim i c = + if i >= lim then -1 else + if String.unsafe_get s i = c then i + else index_rec s lim (i + 1) c *) + + + +let rec index_rec_count s lim i c count = + if i >= lim then -1 else + if String.unsafe_get s i = c then + if count = 1 then i + else index_rec_count s lim (i + 1) c (count - 1) + else index_rec_count s lim (i + 1) c count + +let index_count s i c count = + let lim = String.length s in + if i < 0 || i >= lim || count < 1 then + invalid_arg ("index_count: ( " ^string_of_int i ^ "," ^string_of_int count ^ ")" ); + index_rec_count s lim i c count + +(* let index_next s i c = + index_count s i c 1 *) + +(* let extract_until s cursor c = + let len = String.length s in + let start = !cursor in + if start < 0 || start >= len then ( + cursor := -1; + "" + ) + else + let i = index_rec s len start c in + let finish = + if i < 0 then ( + cursor := -1 ; + len + ) + else ( + cursor := i + 1; + i + ) in + String.sub s start (finish - start) *) + +let rec rindex_rec s i c = + if i < 0 then i else + if String.unsafe_get s i = c then i else rindex_rec s (i - 1) c;; + +let rec rindex_rec_opt s i c = + if i < 0 then None else + if String.unsafe_get s i = c then Some i else rindex_rec_opt s (i - 1) c;; + +let rindex_neg s c = + rindex_rec s (String.length s - 1) c;; + +let rindex_opt s c = + rindex_rec_opt s (String.length s - 1) c;; + + +(** TODO: can be improved to return a positive integer instead *) +let rec unsafe_no_char x ch i last_idx = + i > last_idx || + (String.unsafe_get x i <> ch && unsafe_no_char x ch (i + 1) last_idx) + +let rec unsafe_no_char_idx x ch i last_idx = + if i > last_idx then -1 + else + if String.unsafe_get x i <> ch then + unsafe_no_char_idx x ch (i + 1) last_idx + else i + +let no_char x ch i len : bool = + let str_len = String.length x in + if i < 0 || i >= str_len || len >= str_len then invalid_arg "Ext_string.no_char" + else unsafe_no_char x ch i len + + +let no_slash x = + unsafe_no_char x '/' 0 (String.length x - 1) + +let no_slash_idx x = + unsafe_no_char_idx x '/' 0 (String.length x - 1) + +let no_slash_idx_from x from = + let last_idx = String.length x - 1 in + assert (from >= 0); + unsafe_no_char_idx x '/' from last_idx + +let replace_slash_backward (x : string ) = + let len = String.length x in + if unsafe_no_char x '/' 0 (len - 1) then x + else + String.map (function + | '/' -> '\\' + | x -> x ) x + +let replace_backward_slash (x : string)= + let len = String.length x in + if unsafe_no_char x '\\' 0 (len -1) then x + else + String.map (function + |'\\'-> '/' + | x -> x) x + +let empty = "" + + +let compare = Bs_hash_stubs.string_length_based_compare + +let single_space = " " +let single_colon = ":" + +let concat_array sep (s : string array) = + let s_len = Array.length s in + match s_len with + | 0 -> empty + | 1 -> Array.unsafe_get s 0 + | _ -> + let sep_len = String.length sep in + let len = ref 0 in + for i = 0 to s_len - 1 do + len := !len + String.length (Array.unsafe_get s i) + done; + let target = + Bytes.create + (!len + (s_len - 1) * sep_len ) in + let hd = (Array.unsafe_get s 0) in + let hd_len = String.length hd in + String.unsafe_blit hd 0 target 0 hd_len; + let current_offset = ref hd_len in + for i = 1 to s_len - 1 do + String.unsafe_blit sep 0 target !current_offset sep_len; + let cur = Array.unsafe_get s i in + let cur_len = String.length cur in + let new_off_set = (!current_offset + sep_len ) in + String.unsafe_blit cur 0 target new_off_set cur_len; + current_offset := + new_off_set + cur_len ; + done; + Bytes.unsafe_to_string target + +let concat3 a b c = + let a_len = String.length a in + let b_len = String.length b in + let c_len = String.length c in + let len = a_len + b_len + c_len in + let target = Bytes.create len in + String.unsafe_blit a 0 target 0 a_len ; + String.unsafe_blit b 0 target a_len b_len; + String.unsafe_blit c 0 target (a_len + b_len) c_len; + Bytes.unsafe_to_string target + +let concat4 a b c d = + let a_len = String.length a in + let b_len = String.length b in + let c_len = String.length c in + let d_len = String.length d in + let len = a_len + b_len + c_len + d_len in + + let target = Bytes.create len in + String.unsafe_blit a 0 target 0 a_len ; + String.unsafe_blit b 0 target a_len b_len; + String.unsafe_blit c 0 target (a_len + b_len) c_len; + String.unsafe_blit d 0 target (a_len + b_len + c_len) d_len; + Bytes.unsafe_to_string target + + +let concat5 a b c d e = + let a_len = String.length a in + let b_len = String.length b in + let c_len = String.length c in + let d_len = String.length d in + let e_len = String.length e in + let len = a_len + b_len + c_len + d_len + e_len in + + let target = Bytes.create len in + String.unsafe_blit a 0 target 0 a_len ; + String.unsafe_blit b 0 target a_len b_len; + String.unsafe_blit c 0 target (a_len + b_len) c_len; + String.unsafe_blit d 0 target (a_len + b_len + c_len) d_len; + String.unsafe_blit e 0 target (a_len + b_len + c_len + d_len) e_len; + Bytes.unsafe_to_string target + + + +let inter2 a b = + concat3 a single_space b + + +let inter3 a b c = + concat5 a single_space b single_space c + + + + + +let inter4 a b c d = + concat_array single_space [| a; b ; c; d|] + + +let parent_dir_lit = ".." +let current_dir_lit = "." + + +(* reference {!Bytes.unppercase} *) +let capitalize_ascii (s : string) : string = + if String.length s = 0 then s + else + begin + let c = String.unsafe_get s 0 in + if (c >= 'a' && c <= 'z') + || (c >= '\224' && c <= '\246') + || (c >= '\248' && c <= '\254') then + let uc = Char.unsafe_chr (Char.code c - 32) in + let bytes = Bytes.of_string s in + Bytes.unsafe_set bytes 0 uc; + Bytes.unsafe_to_string bytes + else s + end + +let capitalize_sub (s : string) len : string = + let slen = String.length s in + if len < 0 || len > slen then invalid_arg "Ext_string.capitalize_sub" + else + if len = 0 then "" + else + let bytes = Bytes.create len in + let uc = + let c = String.unsafe_get s 0 in + if (c >= 'a' && c <= 'z') + || (c >= '\224' && c <= '\246') + || (c >= '\248' && c <= '\254') then + Char.unsafe_chr (Char.code c - 32) else c in + Bytes.unsafe_set bytes 0 uc; + for i = 1 to len - 1 do + Bytes.unsafe_set bytes i (String.unsafe_get s i) + done ; + Bytes.unsafe_to_string bytes + + + +let uncapitalize_ascii = + String.uncapitalize_ascii + +let lowercase_ascii = String.lowercase_ascii + + + +let get_int_1 (x : string) off : int = + Char.code x.[off] + +let get_int_2 (x : string) off : int = + Char.code x.[off] lor + Char.code x.[off+1] lsl 8 + +let get_int_3 (x : string) off : int = + Char.code x.[off] lor + Char.code x.[off+1] lsl 8 lor + Char.code x.[off+2] lsl 16 + +let get_int_4 (x : string) off : int = + Char.code x.[off] lor + Char.code x.[off+1] lsl 8 lor + Char.code x.[off+2] lsl 16 lor + Char.code x.[off+3] lsl 24 + +let get_1_2_3_4 (x : string) ~off len : int = + if len = 1 then get_int_1 x off + else if len = 2 then get_int_2 x off + else if len = 3 then get_int_3 x off + else if len = 4 then get_int_4 x off + else assert false + +let unsafe_sub x offs len = + let b = Bytes.create len in + Ext_bytes.unsafe_blit_string x offs b 0 len; + (Bytes.unsafe_to_string b); +end +module Ext_filename : sig +#1 "ext_filename.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + +(* TODO: + Change the module name, this code is not really an extension of the standard + library but rather specific to JS Module name convention. +*) + + + + + +(** An extension module to calculate relative path follow node/npm style. + TODO : this short name will have to change upon renaming the file. +*) + +val is_dir_sep : + char -> bool + +val maybe_quote: + string -> + string + +val chop_extension_maybe: + string -> + string + +(* return an empty string if no extension found *) +val get_extension_maybe: + string -> + string + + +val new_extension: + string -> + string -> + string + +val chop_all_extensions_maybe: + string -> + string + +(* OCaml specific abstraction*) +val module_name: + string -> + string + + + + +type module_info = { + module_name : string ; + case : bool; +} + + + +val as_module: + basename:string -> + module_info option +end = struct +#1 "ext_filename.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + +let is_dir_sep_unix c = c = '/' +let is_dir_sep_win_cygwin c = + c = '/' || c = '\\' || c = ':' + +let is_dir_sep = + if Sys.unix then is_dir_sep_unix else is_dir_sep_win_cygwin + +(* reference ninja.cc IsKnownShellSafeCharacter *) +let maybe_quote ( s : string) = + let noneed_quote = + Ext_string.for_all s (function + | '0' .. '9' + | 'a' .. 'z' + | 'A' .. 'Z' + | '_' | '+' + | '-' | '.' + | '/' + | '@' -> true + | _ -> false + ) in + if noneed_quote then + s + else Filename.quote s + + +let chop_extension_maybe name = + let rec search_dot i = + if i < 0 || is_dir_sep (String.unsafe_get name i) then name + else if String.unsafe_get name i = '.' then String.sub name 0 i + else search_dot (i - 1) in + search_dot (String.length name - 1) + +let get_extension_maybe name = + let name_len = String.length name in + let rec search_dot name i name_len = + if i < 0 || is_dir_sep (String.unsafe_get name i) then "" + else if String.unsafe_get name i = '.' then String.sub name i (name_len - i) + else search_dot name (i - 1) name_len in + search_dot name (name_len - 1) name_len + +let chop_all_extensions_maybe name = + let rec search_dot i last = + if i < 0 || is_dir_sep (String.unsafe_get name i) then + (match last with + | None -> name + | Some i -> String.sub name 0 i) + else if String.unsafe_get name i = '.' then + search_dot (i - 1) (Some i) + else search_dot (i - 1) last in + search_dot (String.length name - 1) None + + +let new_extension name (ext : string) = + let rec search_dot name i ext = + if i < 0 || is_dir_sep (String.unsafe_get name i) then + name ^ ext + else if String.unsafe_get name i = '.' then + let ext_len = String.length ext in + let buf = Bytes.create (i + ext_len) in + Bytes.blit_string name 0 buf 0 i; + Bytes.blit_string ext 0 buf i ext_len; + Bytes.unsafe_to_string buf + else search_dot name (i - 1) ext in + search_dot name (String.length name - 1) ext + + + +(** TODO: improve efficiency + given a path, calcuate its module name + Note that `ocamlc.opt -c aa.xx.mli` gives `aa.xx.cmi` + we can not strip all extensions, otherwise + we can not tell the difference between "x.cpp.ml" + and "x.ml" +*) +let module_name name = + let rec search_dot i name = + if i < 0 then + Ext_string.capitalize_ascii name + else + if String.unsafe_get name i = '.' then + Ext_string.capitalize_sub name i + else + search_dot (i - 1) name in + let name = Filename.basename name in + let name_len = String.length name in + search_dot (name_len - 1) name + +type module_info = { + module_name : string ; + case : bool; +} + + + +let rec valid_module_name_aux name off len = + if off >= len then true + else + let c = String.unsafe_get name off in + match c with + | 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '\'' -> + valid_module_name_aux name (off + 1) len + | _ -> false + +type state = + | Invalid + | Upper + | Lower + +let valid_module_name name len = + if len = 0 then Invalid + else + let c = String.unsafe_get name 0 in + match c with + | 'A' .. 'Z' + -> + if valid_module_name_aux name 1 len then + Upper + else Invalid + | 'a' .. 'z' + -> + if valid_module_name_aux name 1 len then + Lower + else Invalid + | _ -> Invalid + + +let as_module ~basename = + let rec search_dot i name name_len = + if i < 0 then + (* Input e.g, [a_b] *) + match valid_module_name name name_len with + | Invalid -> None + | Upper -> Some {module_name = name; case = true } + | Lower -> Some {module_name = Ext_string.capitalize_ascii name; case = false} + else + if String.unsafe_get name i = '.' then + (*Input e.g, [A_b] *) + match valid_module_name name i with + | Invalid -> None + | Upper -> + Some {module_name = Ext_string.capitalize_sub name i; case = true} + | Lower -> + Some {module_name = Ext_string.capitalize_sub name i; case = false} + else + search_dot (i - 1) name name_len in + let name_len = String.length basename in + search_dot (name_len - 1) basename name_len + +end +module Ext_format : sig +#1 "ext_format.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + + + +(** Simplified wrapper module for the standard library [Format] module. + *) + +type t = private Format.formatter + +(* val string : t -> string -> unit + +val break : t -> unit + +val break1 : t -> unit + +val space : t -> unit + +val group : t -> int -> (unit -> 'a) -> 'a +(** [group] will record current indentation + and indent futher + *) + +val vgroup : t -> int -> (unit -> 'a) -> 'a + +val paren : t -> (unit -> 'a) -> 'a + +val paren_group : t -> int -> (unit -> 'a) -> 'a + +val brace_group : t -> int -> (unit -> 'a) -> 'a + +val brace_vgroup : t -> int -> (unit -> 'a) -> 'a + +val bracket_group : t -> int -> (unit -> 'a) -> 'a + +val newline : t -> unit + +val to_out_channel : out_channel -> t + +val flush : t -> unit -> unit *) + +val pp_print_queue : + ?pp_sep:(Format.formatter -> unit -> unit) -> + (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a Queue.t -> unit + +end = struct +#1 "ext_format.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + + + +open Format + +type t = formatter + +(* let string = pp_print_string *) + +(* let break = fun fmt -> pp_print_break fmt 0 0 + +let break1 = + fun fmt -> pp_print_break fmt 0 1 + +let space fmt = + pp_print_break fmt 1 0 + *) +(* let vgroup fmt indent u = + pp_open_vbox fmt indent; + let v = u () in + pp_close_box fmt (); + v + +let group fmt indent u = + pp_open_hovbox fmt indent; + let v = u () in + pp_close_box fmt (); + v + +let paren fmt u = + string fmt "("; + let v = u () in + string fmt ")"; + v + +let brace fmt u = + string fmt "{"; + (* break1 fmt ; *) + let v = u () in + string fmt "}"; + v + +let bracket fmt u = + string fmt "["; + let v = u () in + string fmt "]"; + v *) + +(* let paren_group st n action = + group st n (fun _ -> paren st action) + +let brace_group st n action = + group st n (fun _ -> brace st action ) + +let brace_vgroup st n action = + vgroup st n (fun _ -> + string st "{"; + pp_print_break st 0 2; + let v = vgroup st 0 action in + pp_print_break st 0 0; + string st "}"; + v + ) +let bracket_group st n action = + group st n (fun _ -> bracket st action) + +let newline fmt = pp_print_newline fmt () + +let to_out_channel = formatter_of_out_channel + +(* let non_breaking_space fmt = string fmt " " *) +(* let set_needed_space_function _ _ = () *) +let flush = pp_print_flush + *) +(* let list = pp_print_list *) + +let pp_print_queue ?(pp_sep = pp_print_cut) pp_v ppf q = + Queue.iter (fun q -> pp_v ppf q ; pp_sep ppf ()) q + +end +module Ext_list : sig +#1 "ext_list.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + +val map : + 'a list -> + ('a -> 'b) -> + 'b list + +val map_combine : + 'a list -> + 'b list -> + ('a -> 'c) -> + ('c * 'b) list + +val combine_array: + 'a array -> + 'b list -> + ('a -> 'c) -> + ('c * 'b) list + +val combine_array_append: + 'a array -> + 'b list -> + ('c * 'b) list -> + ('a -> 'c) -> + ('c * 'b) list + +val has_string : + string list -> + string -> + bool + + +val map_split_opt : + 'a list -> + ('a -> 'b option * 'c option) -> + 'b list * 'c list + +val mapi : + 'a list -> + (int -> 'a -> 'b) -> + 'b list + +val mapi_append : + 'a list -> + (int -> 'a -> 'b) -> + 'b list -> + 'b list + +val map_snd : ('a * 'b) list -> ('b -> 'c) -> ('a * 'c) list + +(** [map_last f xs ] + will pass [true] to [f] for the last element, + [false] otherwise. + For empty list, it returns empty +*) +val map_last : + 'a list -> + (bool -> 'a -> 'b) -> 'b list + +(** [last l] + return the last element + raise if the list is empty +*) +val last : 'a list -> 'a + +val append : + 'a list -> + 'a list -> + 'a list + +val append_one : + 'a list -> + 'a -> + 'a list + +val map_append : + 'b list -> + 'a list -> + ('b -> 'a) -> + 'a list + +val fold_right : + 'a list -> + 'b -> + ('a -> 'b -> 'b) -> + 'b + +val fold_right2 : + 'a list -> + 'b list -> + 'c -> + ('a -> 'b -> 'c -> 'c) -> 'c + +val fold_right3 : + 'a list -> + 'b list -> + 'c list -> + 'd -> + ('a -> 'b -> 'c -> 'd -> 'd) -> + 'd + + +val map2 : + 'a list -> + 'b list -> + ('a -> 'b -> 'c) -> + 'c list + +val fold_left_with_offset : + 'a list -> + 'acc -> + int -> + ('a -> 'acc -> int -> 'acc) -> + 'acc + + +(** @unused *) +val filter_map : + 'a list -> + ('a -> 'b option) -> + 'b list + +(** [exclude p l] is the opposite of [filter p l] *) +val exclude : + 'a list -> + ('a -> bool) -> + 'a list + +(** [excludes p l] + return a tuple [excluded,newl] + where [exluded] is true indicates that at least one + element is removed,[newl] is the new list where all [p x] for [x] is false + +*) +val exclude_with_val : + 'a list -> + ('a -> bool) -> + 'a list option + + +val same_length : 'a list -> 'b list -> bool + +val init : int -> (int -> 'a) -> 'a list + +(** [split_at n l] + will split [l] into two lists [a,b], [a] will be of length [n], + otherwise, it will raise +*) +val split_at : + 'a list -> + int -> + 'a list * 'a list + + +(** [split_at_last l] + It is equivalent to [split_at (List.length l - 1) l ] +*) +val split_at_last : 'a list -> 'a list * 'a + +val filter_mapi : + 'a list -> + ('a -> int -> 'b option) -> + 'b list + +val filter_map2 : + 'a list -> + 'b list -> + ('a -> 'b -> 'c option) -> + 'c list + + +val length_compare : 'a list -> int -> [`Gt | `Eq | `Lt ] + +val length_ge : 'a list -> int -> bool + +(** + + {[length xs = length ys + n ]} + input n should be positive + TODO: input checking +*) + +val length_larger_than_n : + 'a list -> + 'a list -> + int -> + bool + + +(** + [rev_map_append f l1 l2] + [map f l1] and reverse it to append [l2] + This weird semantics is due to it is the most efficient operation + we can do +*) +val rev_map_append : + 'a list -> + 'b list -> + ('a -> 'b) -> + 'b list + + +val flat_map : + 'a list -> + ('a -> 'b list) -> + 'b list + +val flat_map_append : + 'a list -> + 'b list -> + ('a -> 'b list) -> + 'b list + + +(** + [stable_group eq lst] + Example: + Input: + {[ + stable_group (=) [1;2;3;4;3] + ]} + Output: + {[ + [[1];[2];[4];[3;3]] + ]} + TODO: this is O(n^2) behavior + which could be improved later +*) +val stable_group : + 'a list -> + ('a -> 'a -> bool) -> + 'a list list + +(** [drop n list] + raise when [n] is negative + raise when list's length is less than [n] +*) +val drop : + 'a list -> + int -> + 'a list + +val find_first : + 'a list -> + ('a -> bool) -> + 'a option + +(** [find_first_not p lst ] + if all elements in [lst] pass, return [None] + otherwise return the first element [e] as [Some e] which + fails the predicate +*) +val find_first_not : + 'a list -> + ('a -> bool) -> + 'a option + +(** [find_opt f l] returns [None] if all return [None], + otherwise returns the first one. +*) + +val find_opt : + 'a list -> + ('a -> 'b option) -> + 'b option + +val find_def : + 'a list -> + ('a -> 'b option) -> + 'b -> + 'b + + +val rev_iter : + 'a list -> + ('a -> unit) -> + unit + +val iter: + 'a list -> + ('a -> unit) -> + unit + +val for_all: + 'a list -> + ('a -> bool) -> + bool +val for_all_snd: + ('a * 'b) list -> + ('b -> bool) -> + bool + +(** [for_all2_no_exn p xs ys] + return [true] if all satisfied, + [false] otherwise or length not equal +*) +val for_all2_no_exn : + 'a list -> + 'b list -> + ('a -> 'b -> bool) -> + bool + + + +(** [f] is applied follow the list order *) +val split_map : + 'a list -> + ('a -> 'b * 'c) -> + 'b list * 'c list + +(** [fn] is applied from left to right *) +val reduce_from_left : + 'a list -> + ('a -> 'a -> 'a) -> + 'a + +val sort_via_array : + 'a list -> + ('a -> 'a -> int) -> + 'a list + + + + +(** [assoc_by_string default key lst] + if [key] is found in the list return that val, + other unbox the [default], + otherwise [assert false ] +*) +val assoc_by_string : + (string * 'a) list -> + string -> + 'a option -> + 'a + +val assoc_by_int : + (int * 'a) list -> + int -> + 'a option -> + 'a + + +val nth_opt : 'a list -> int -> 'a option + +val iter_snd : ('a * 'b) list -> ('b -> unit) -> unit + +val iter_fst : ('a * 'b) list -> ('a -> unit) -> unit + +val exists : 'a list -> ('a -> bool) -> bool + +val exists_fst : + ('a * 'b) list -> + ('a -> bool) -> + bool + +val exists_snd : + ('a * 'b) list -> + ('b -> bool) -> + bool + +val concat_append: + 'a list list -> + 'a list -> + 'a list + +val fold_left2: + 'a list -> + 'b list -> + 'c -> + ('a -> 'b -> 'c -> 'c) + -> 'c + +val fold_left: + 'a list -> + 'b -> + ('b -> 'a -> 'b) -> + 'b + +val singleton_exn: + 'a list -> 'a + +val mem_string : + string list -> + string -> + bool +end = struct +#1 "ext_list.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + +let rec map l f = + match l with + | [] -> + [] + | [x1] -> + let y1 = f x1 in + [y1] + | [x1; x2] -> + let y1 = f x1 in + let y2 = f x2 in + [y1; y2] + | [x1; x2; x3] -> + let y1 = f x1 in + let y2 = f x2 in + let y3 = f x3 in + [y1; y2; y3] + | [x1; x2; x3; x4] -> + let y1 = f x1 in + let y2 = f x2 in + let y3 = f x3 in + let y4 = f x4 in + [y1; y2; y3; y4] + | x1::x2::x3::x4::x5::tail -> + let y1 = f x1 in + let y2 = f x2 in + let y3 = f x3 in + let y4 = f x4 in + let y5 = f x5 in + y1::y2::y3::y4::y5::(map tail f) + +let rec has_string l f = + match l with + | [] -> + false + | [x1] -> + x1 = f + | [x1; x2] -> + x1 = f || x2 = f + | [x1; x2; x3] -> + x1 = f || x2 = f || x3 = f + | x1 :: x2 :: x3 :: x4 -> + x1 = f || x2 = f || x3 = f || has_string x4 f + +let rec map_combine l1 l2 f = + match (l1, l2) with + ([], []) -> [] + | (a1::l1, a2::l2) -> + (f a1, a2) :: map_combine l1 l2 f + | (_, _) -> + invalid_arg "Ext_list.map_combine" + +let rec combine_array_unsafe arr l i j acc f = + if i = j then acc + else + match l with + | [] -> invalid_arg "Ext_list.combine" + | h :: tl -> + (f (Array.unsafe_get arr i) , h) :: + combine_array_unsafe arr tl (i + 1) j acc f + +let combine_array_append arr l acc f = + let len = Array.length arr in + combine_array_unsafe arr l 0 len acc f + +let combine_array arr l f = + let len = Array.length arr in + combine_array_unsafe arr l 0 len [] f + +let rec map_split_opt + (xs : 'a list) (f : 'a -> 'b option * 'c option) + : 'b list * 'c list = + match xs with + | [] -> [], [] + | x::xs -> + let c,d = f x in + let cs,ds = map_split_opt xs f in + (match c with Some c -> c::cs | None -> cs), + (match d with Some d -> d::ds | None -> ds) + +let rec map_snd l f = + match l with + | [] -> + [] + | [ v1,x1 ] -> + let y1 = f x1 in + [v1,y1] + | [v1, x1; v2, x2] -> + let y1 = f x1 in + let y2 = f x2 in + [v1, y1; v2, y2] + | [ v1, x1; v2, x2; v3, x3] -> + let y1 = f x1 in + let y2 = f x2 in + let y3 = f x3 in + [v1, y1; v2, y2; v3, y3] + | [ v1, x1; v2, x2; v3, x3; v4, x4] -> + let y1 = f x1 in + let y2 = f x2 in + let y3 = f x3 in + let y4 = f x4 in + [v1, y1; v2, y2; v3, y3; v4, y4] + | (v1, x1) ::(v2, x2) :: (v3, x3)::(v4, x4) :: (v5, x5) ::tail -> + let y1 = f x1 in + let y2 = f x2 in + let y3 = f x3 in + let y4 = f x4 in + let y5 = f x5 in + (v1, y1)::(v2, y2) :: (v3, y3) :: (v4, y4) :: (v5, y5) :: (map_snd tail f) + + +let rec map_last l f= + match l with + | [] -> + [] + | [x1] -> + let y1 = f true x1 in + [y1] + | [x1; x2] -> + let y1 = f false x1 in + let y2 = f true x2 in + [y1; y2] + | [x1; x2; x3] -> + let y1 = f false x1 in + let y2 = f false x2 in + let y3 = f true x3 in + [y1; y2; y3] + | [x1; x2; x3; x4] -> + let y1 = f false x1 in + let y2 = f false x2 in + let y3 = f false x3 in + let y4 = f true x4 in + [y1; y2; y3; y4] + | x1::x2::x3::x4::tail -> + (* make sure that tail is not empty *) + let y1 = f false x1 in + let y2 = f false x2 in + let y3 = f false x3 in + let y4 = f false x4 in + y1::y2::y3::y4::(map_last tail f) + +let rec mapi_aux lst i f tail = + match lst with + [] -> tail + | a::l -> + let r = f i a in r :: mapi_aux l (i + 1) f tail + +let mapi lst f = mapi_aux lst 0 f [] +let mapi_append lst f tail = mapi_aux lst 0 f tail +let rec last xs = + match xs with + | [x] -> x + | _ :: tl -> last tl + | [] -> invalid_arg "Ext_list.last" + + + +let rec append_aux l1 l2 = + match l1 with + | [] -> l2 + | [a0] -> a0::l2 + | [a0;a1] -> a0::a1::l2 + | [a0;a1;a2] -> a0::a1::a2::l2 + | [a0;a1;a2;a3] -> a0::a1::a2::a3::l2 + | [a0;a1;a2;a3;a4] -> a0::a1::a2::a3::a4::l2 + | a0::a1::a2::a3::a4::rest -> a0::a1::a2::a3::a4::append_aux rest l2 + +let append l1 l2 = + match l2 with + | [] -> l1 + | _ -> append_aux l1 l2 + +let append_one l1 x = append_aux l1 [x] + +let rec map_append l1 l2 f = + match l1 with + | [] -> l2 + | [a0] -> f a0::l2 + | [a0;a1] -> + let b0 = f a0 in + let b1 = f a1 in + b0::b1::l2 + | [a0;a1;a2] -> + let b0 = f a0 in + let b1 = f a1 in + let b2 = f a2 in + b0::b1::b2::l2 + | [a0;a1;a2;a3] -> + let b0 = f a0 in + let b1 = f a1 in + let b2 = f a2 in + let b3 = f a3 in + b0::b1::b2::b3::l2 + | [a0;a1;a2;a3;a4] -> + let b0 = f a0 in + let b1 = f a1 in + let b2 = f a2 in + let b3 = f a3 in + let b4 = f a4 in + b0::b1::b2::b3::b4::l2 + + | a0::a1::a2::a3::a4::rest -> + let b0 = f a0 in + let b1 = f a1 in + let b2 = f a2 in + let b3 = f a3 in + let b4 = f a4 in + b0::b1::b2::b3::b4::map_append rest l2 f + + + +let rec fold_right l acc f = + match l with + | [] -> acc + | [a0] -> f a0 acc + | [a0;a1] -> f a0 (f a1 acc) + | [a0;a1;a2] -> f a0 (f a1 (f a2 acc)) + | [a0;a1;a2;a3] -> f a0 (f a1 (f a2 (f a3 acc))) + | [a0;a1;a2;a3;a4] -> + f a0 (f a1 (f a2 (f a3 (f a4 acc)))) + | a0::a1::a2::a3::a4::rest -> + f a0 (f a1 (f a2 (f a3 (f a4 (fold_right rest acc f ))))) + +let rec fold_right2 l r acc f = + match l,r with + | [],[] -> acc + | [a0],[b0] -> f a0 b0 acc + | [a0;a1],[b0;b1] -> f a0 b0 (f a1 b1 acc) + | [a0;a1;a2],[b0;b1;b2] -> f a0 b0 (f a1 b1 (f a2 b2 acc)) + | [a0;a1;a2;a3],[b0;b1;b2;b3] -> + f a0 b0 (f a1 b1 (f a2 b2 (f a3 b3 acc))) + | [a0;a1;a2;a3;a4], [b0;b1;b2;b3;b4] -> + f a0 b0 (f a1 b1 (f a2 b2 (f a3 b3 (f a4 b4 acc)))) + | a0::a1::a2::a3::a4::arest, b0::b1::b2::b3::b4::brest -> + f a0 b0 (f a1 b1 (f a2 b2 (f a3 b3 (f a4 b4 (fold_right2 arest brest acc f ))))) + | _, _ -> invalid_arg "Ext_list.fold_right2" + +let rec fold_right3 l r last acc f = + match l,r,last with + | [],[],[] -> acc + | [a0],[b0],[c0] -> f a0 b0 c0 acc + | [a0;a1],[b0;b1],[c0; c1] -> f a0 b0 c0 (f a1 b1 c1 acc) + | [a0;a1;a2],[b0;b1;b2],[c0;c1;c2] -> f a0 b0 c0 (f a1 b1 c1 (f a2 b2 c2 acc)) + | [a0;a1;a2;a3],[b0;b1;b2;b3],[c0;c1;c2;c3] -> + f a0 b0 c0 (f a1 b1 c1 (f a2 b2 c2 (f a3 b3 c3 acc))) + | [a0;a1;a2;a3;a4], [b0;b1;b2;b3;b4], [c0;c1;c2;c3;c4] -> + f a0 b0 c0 (f a1 b1 c1 (f a2 b2 c2 (f a3 b3 c3 (f a4 b4 c4 acc)))) + | a0::a1::a2::a3::a4::arest, b0::b1::b2::b3::b4::brest, c0::c1::c2::c3::c4::crest -> + f a0 b0 c0 (f a1 b1 c1 (f a2 b2 c2 (f a3 b3 c3 (f a4 b4 c4 (fold_right3 arest brest crest acc f ))))) + | _, _, _ -> invalid_arg "Ext_list.fold_right2" + +let rec map2 l r f = + match l,r with + | [],[] -> [] + | [a0],[b0] -> [f a0 b0] + | [a0;a1],[b0;b1] -> + let c0 = f a0 b0 in + let c1 = f a1 b1 in + [c0; c1] + | [a0;a1;a2],[b0;b1;b2] -> + let c0 = f a0 b0 in + let c1 = f a1 b1 in + let c2 = f a2 b2 in + [c0;c1;c2] + | [a0;a1;a2;a3],[b0;b1;b2;b3] -> + let c0 = f a0 b0 in + let c1 = f a1 b1 in + let c2 = f a2 b2 in + let c3 = f a3 b3 in + [c0;c1;c2;c3] + | [a0;a1;a2;a3;a4], [b0;b1;b2;b3;b4] -> + let c0 = f a0 b0 in + let c1 = f a1 b1 in + let c2 = f a2 b2 in + let c3 = f a3 b3 in + let c4 = f a4 b4 in + [c0;c1;c2;c3;c4] + | a0::a1::a2::a3::a4::arest, b0::b1::b2::b3::b4::brest -> + let c0 = f a0 b0 in + let c1 = f a1 b1 in + let c2 = f a2 b2 in + let c3 = f a3 b3 in + let c4 = f a4 b4 in + c0::c1::c2::c3::c4::map2 arest brest f + | _, _ -> invalid_arg "Ext_list.map2" + +let rec fold_left_with_offset l accu i f = + match l with + | [] -> accu + | a::l -> + fold_left_with_offset + l + (f a accu i) + (i + 1) + f + + +let rec filter_map xs (f: 'a -> 'b option)= + match xs with + | [] -> [] + | y :: ys -> + begin match f y with + | None -> filter_map ys f + | Some z -> z :: filter_map ys f + end + +let rec exclude (xs : 'a list) (p : 'a -> bool) : 'a list = + match xs with + | [] -> [] + | x::xs -> + if p x then exclude xs p + else x:: exclude xs p + +let rec exclude_with_val l p = + match l with + | [] -> None + | a0::xs -> + if p a0 then Some (exclude xs p) + else + match xs with + | [] -> None + | a1::rest -> + if p a1 then + Some (a0:: exclude rest p) + else + match exclude_with_val rest p with + | None -> None + | Some rest -> Some (a0::a1::rest) + + + +let rec same_length xs ys = + match xs, ys with + | [], [] -> true + | _::xs, _::ys -> same_length xs ys + | _, _ -> false + + +let init n f = + match n with + | 0 -> [] + | 1 -> + let a0 = f 0 in + [a0] + | 2 -> + let a0 = f 0 in + let a1 = f 1 in + [a0; a1] + | 3 -> + let a0 = f 0 in + let a1 = f 1 in + let a2 = f 2 in + [a0; a1; a2] + | 4 -> + let a0 = f 0 in + let a1 = f 1 in + let a2 = f 2 in + let a3 = f 3 in + [a0; a1; a2; a3] + | 5 -> + let a0 = f 0 in + let a1 = f 1 in + let a2 = f 2 in + let a3 = f 3 in + let a4 = f 4 in + [a0; a1; a2; a3; a4] + | _ -> + Array.to_list (Array.init n f) + +let rec rev_append l1 l2 = + match l1 with + | [] -> l2 + | [a0] -> a0::l2 (* single element is common *) + | [a0 ; a1] -> a1 :: a0 :: l2 + | a0::a1::a2::rest -> rev_append rest (a2::a1::a0::l2) + +let rev l = rev_append l [] + +let rec small_split_at n acc l = + if n <= 0 then rev acc , l + else + match l with + | x::xs -> small_split_at (n - 1) (x ::acc) xs + | _ -> invalid_arg "Ext_list.split_at" + +let split_at l n = + small_split_at n [] l + +let rec split_at_last_aux acc x = + match x with + | [] -> invalid_arg "Ext_list.split_at_last" + | [ x] -> rev acc, x + | y0::ys -> split_at_last_aux (y0::acc) ys + +let split_at_last (x : 'a list) = + match x with + | [] -> invalid_arg "Ext_list.split_at_last" + | [a0] -> + [], a0 + | [a0;a1] -> + [a0], a1 + | [a0;a1;a2] -> + [a0;a1], a2 + | [a0;a1;a2;a3] -> + [a0;a1;a2], a3 + | [a0;a1;a2;a3;a4] -> + [a0;a1;a2;a3], a4 + | a0::a1::a2::a3::a4::rest -> + let rev, last = split_at_last_aux [] rest + in + a0::a1::a2::a3::a4:: rev , last + +(** + can not do loop unroll due to state combination +*) +let filter_mapi xs f = + let rec aux i xs = + match xs with + | [] -> [] + | y :: ys -> + begin match f y i with + | None -> aux (i + 1) ys + | Some z -> z :: aux (i + 1) ys + end in + aux 0 xs + +let rec filter_map2 xs ys (f: 'a -> 'b -> 'c option) = + match xs,ys with + | [],[] -> [] + | u::us, v :: vs -> + begin match f u v with + | None -> filter_map2 us vs f (* idea: rec f us vs instead? *) + | Some z -> z :: filter_map2 us vs f + end + | _ -> invalid_arg "Ext_list.filter_map2" + + +let rec rev_map_append l1 l2 f = + match l1 with + | [] -> l2 + | a :: l -> rev_map_append l (f a :: l2) f + + + +(** It is not worth loop unrolling, + it is already tail-call, and we need to be careful + about evaluation order when unroll +*) +let rec flat_map_aux f acc append lx = + match lx with + | [] -> rev_append acc append + | a0::rest -> + let new_acc = + match f a0 with + | [] -> acc + | [a0] -> a0::acc + | [a0;a1] -> a1::a0::acc + | a0::a1::a2::rest -> + rev_append rest (a2::a1::a0::acc) + in + flat_map_aux f new_acc append rest + +let flat_map lx f = + flat_map_aux f [] [] lx + +let flat_map_append lx append f = + flat_map_aux f [] append lx + + +let rec length_compare l n = + if n < 0 then `Gt + else + begin match l with + | _ ::xs -> length_compare xs (n - 1) + | [] -> + if n = 0 then `Eq + else `Lt + end + +let rec length_ge l n = + if n > 0 then + match l with + | _ :: tl -> length_ge tl (n - 1) + | [] -> false + else true + +(** + {[length xs = length ys + n ]} +*) +let rec length_larger_than_n xs ys n = + match xs, ys with + | _, [] -> length_compare xs n = `Eq + | _::xs, _::ys -> + length_larger_than_n xs ys n + | [], _ -> false + + + + +let rec group (eq : 'a -> 'a -> bool) lst = + match lst with + | [] -> [] + | x::xs -> + aux eq x (group eq xs ) + +and aux eq (x : 'a) (xss : 'a list list) : 'a list list = + match xss with + | [] -> [[x]] + | (y0::_ as y)::ys -> (* cannot be empty *) + if eq x y0 then + (x::y) :: ys + else + y :: aux eq x ys + | _ :: _ -> assert false + +let stable_group lst eq = group eq lst |> rev + +let rec drop h n = + if n < 0 then invalid_arg "Ext_list.drop" + else + if n = 0 then h + else + match h with + | [] -> + invalid_arg "Ext_list.drop" + | _ :: tl -> + drop tl (n - 1) + +let rec find_first x p = + match x with + | [] -> None + | x :: l -> + if p x then Some x + else find_first l p + +let rec find_first_not xs p = + match xs with + | [] -> None + | a::l -> + if p a + then find_first_not l p + else Some a + + +let rec rev_iter l f = + match l with + | [] -> () + | [x1] -> + f x1 + | [x1; x2] -> + f x2 ; f x1 + | [x1; x2; x3] -> + f x3 ; f x2 ; f x1 + | [x1; x2; x3; x4] -> + f x4; f x3; f x2; f x1 + | x1::x2::x3::x4::x5::tail -> + rev_iter tail f; + f x5; f x4 ; f x3; f x2 ; f x1 + +let rec iter l f = + match l with + | [] -> () + | [x1] -> + f x1 + | [x1; x2] -> + f x1 ; f x2 + | [x1; x2; x3] -> + f x1 ; f x2 ; f x3 + | [x1; x2; x3; x4] -> + f x1; f x2; f x3; f x4 + | x1::x2::x3::x4::x5::tail -> + f x1; f x2 ; f x3; f x4 ; f x5; + iter tail f + + +let rec for_all lst p = + match lst with + [] -> true + | a::l -> p a && for_all l p + +let rec for_all_snd lst p = + match lst with + [] -> true + | (_,a)::l -> p a && for_all_snd l p + + +let rec for_all2_no_exn l1 l2 p = + match (l1, l2) with + | ([], []) -> true + | (a1::l1, a2::l2) -> p a1 a2 && for_all2_no_exn l1 l2 p + | (_, _) -> false + + +let rec find_opt xs p = + match xs with + | [] -> None + | x :: l -> + match p x with + | Some _ as v -> v + | None -> find_opt l p + +let rec find_def xs p def = + match xs with + | [] -> def + | x::l -> + match p x with + | Some v -> v + | None -> find_def l p def + +let rec split_map l f = + match l with + | [] -> + [],[] + | [x1] -> + let a0,b0 = f x1 in + [a0],[b0] + | [x1; x2] -> + let a1,b1 = f x1 in + let a2,b2 = f x2 in + [a1;a2],[b1;b2] + | [x1; x2; x3] -> + let a1,b1 = f x1 in + let a2,b2 = f x2 in + let a3,b3 = f x3 in + [a1;a2;a3], [b1;b2;b3] + | [x1; x2; x3; x4] -> + let a1,b1 = f x1 in + let a2,b2 = f x2 in + let a3,b3 = f x3 in + let a4,b4 = f x4 in + [a1;a2;a3;a4], [b1;b2;b3;b4] + | x1::x2::x3::x4::x5::tail -> + let a1,b1 = f x1 in + let a2,b2 = f x2 in + let a3,b3 = f x3 in + let a4,b4 = f x4 in + let a5,b5 = f x5 in + let ass,bss = split_map tail f in + a1::a2::a3::a4::a5::ass, + b1::b2::b3::b4::b5::bss + + + + +let sort_via_array lst cmp = + let arr = Array.of_list lst in + Array.sort cmp arr; + Array.to_list arr + + + + +let rec assoc_by_string lst (k : string) def = + match lst with + | [] -> + begin match def with + | None -> assert false + | Some x -> x end + | (k1,v1)::rest -> + if k1 = k then v1 else + assoc_by_string rest k def + +let rec assoc_by_int lst (k : int) def = + match lst with + | [] -> + begin match def with + | None -> assert false + | Some x -> x end + | (k1,v1)::rest -> + if k1 = k then v1 else + assoc_by_int rest k def + + +let rec nth_aux l n = + match l with + | [] -> None + | a::l -> if n = 0 then Some a else nth_aux l (n-1) + +let nth_opt l n = + if n < 0 then None + else + nth_aux l n + +let rec iter_snd lst f = + match lst with + | [] -> () + | (_,x)::xs -> + f x ; + iter_snd xs f + +let rec iter_fst lst f = + match lst with + | [] -> () + | (x,_)::xs -> + f x ; + iter_fst xs f + +let rec exists l p = + match l with + [] -> false + | x :: xs -> p x || exists xs p + +let rec exists_fst l p = + match l with + [] -> false + | (a,_)::l -> p a || exists_fst l p + +let rec exists_snd l p = + match l with + [] -> false + | (_, a)::l -> p a || exists_snd l p + +let rec concat_append + (xss : 'a list list) + (xs : 'a list) : 'a list = + match xss with + | [] -> xs + | l::r -> append l (concat_append r xs) + +let rec fold_left l accu f = + match l with + [] -> accu + | a::l -> fold_left l (f accu a) f + +let reduce_from_left lst fn = + match lst with + | first :: rest -> fold_left rest first fn + | _ -> invalid_arg "Ext_list.reduce_from_left" + +let rec fold_left2 l1 l2 accu f = + match (l1, l2) with + ([], []) -> accu + | (a1::l1, a2::l2) -> fold_left2 l1 l2 (f a1 a2 accu) f + | (_, _) -> invalid_arg "Ext_list.fold_left2" + +let singleton_exn xs = match xs with [x] -> x | _ -> assert false + +let rec mem_string (xs : string list) (x : string) = + match xs with + [] -> false + | a::l -> a = x || mem_string l x + +end +module Ext_pervasives : sig +#1 "ext_pervasives.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + + + +(** Extension to standard library [Pervavives] module, safe to open + *) + +external reraise: exn -> 'a = "%reraise" + +val finally : + 'a -> + clean:('a -> 'c) -> + ('a -> 'b) -> 'b + +(* val try_it : (unit -> 'a) -> unit *) + +val with_file_as_chan : string -> (out_channel -> 'a) -> 'a + + + + + + + + + + + + + +(* external id : 'a -> 'a = "%identity" *) + +(** Copied from {!Btype.hash_variant}: + need sync up and add test case + *) +(* val hash_variant : string -> int *) + +(* val todo : string -> 'a *) + +val nat_of_string_exn : string -> int + +val parse_nat_of_string: + string -> + int ref -> + int +end = struct +#1 "ext_pervasives.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + +external reraise: exn -> 'a = "%reraise" + +let finally v ~clean:action f = + match f v with + | exception e -> + action v ; + reraise e + | e -> action v ; e + +(* let try_it f = + try ignore (f ()) with _ -> () *) + +let with_file_as_chan filename f = + finally (open_out_bin filename) ~clean:close_out f + + + + + + +(* external id : 'a -> 'a = "%identity" *) + +(* +let hash_variant s = + let accu = ref 0 in + for i = 0 to String.length s - 1 do + accu := 223 * !accu + Char.code s.[i] + done; + (* reduce to 31 bits *) + accu := !accu land (1 lsl 31 - 1); + (* make it signed for 64 bits architectures *) + if !accu > 0x3FFFFFFF then !accu - (1 lsl 31) else !accu *) + +(* let todo loc = + failwith (loc ^ " Not supported yet") + *) + + + +let rec int_of_string_aux s acc off len = + if off >= len then acc + else + let d = (Char.code (String.unsafe_get s off) - 48) in + if d >=0 && d <= 9 then + int_of_string_aux s (10*acc + d) (off + 1) len + else -1 (* error *) + +let nat_of_string_exn (s : string) = + let acc = int_of_string_aux s 0 0 (String.length s) in + if acc < 0 then invalid_arg s + else acc + + +(** return index *) +let parse_nat_of_string (s : string) (cursor : int ref) = + let current = !cursor in + assert (current >= 0); + let acc = ref 0 in + let s_len = String.length s in + let todo = ref true in + let cur = ref current in + while !todo && !cursor < s_len do + let d = Char.code (String.unsafe_get s !cur) - 48 in + if d >=0 && d <= 9 then begin + acc := 10* !acc + d; + incr cur + end else todo := false + done ; + cursor := !cur; + !acc +end +module Ext_fmt += struct +#1 "ext_fmt.ml" + + +let with_file_as_pp filename f = + Ext_pervasives.finally (open_out_bin filename) ~clean:close_out + (fun chan -> + let fmt = Format.formatter_of_out_channel chan in + let v = f fmt in + Format.pp_print_flush fmt (); + v + ) + + + +let failwithf ~loc fmt = Format.ksprintf (fun s -> failwith (loc ^ s)) + fmt + +let invalid_argf fmt = Format.ksprintf invalid_arg fmt + + +end +module Ext_sys : sig +#1 "ext_sys.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + +(* Not used yet *) +(* val is_directory_no_exn : string -> bool *) + + +val is_windows_or_cygwin : bool + + +end = struct +#1 "ext_sys.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + +(** TODO: not exported yet, wait for Windows Fix*) +(* let is_directory_no_exn f = + try Sys.is_directory f with _ -> false *) + + +let is_windows_or_cygwin = Sys.win32 || Sys.cygwin + + + +end +module Literals : sig +#1 "literals.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + +val js_array_ctor : string +val js_type_number : string +val js_type_string : string +val js_type_object : string +val js_type_boolean : string +val js_undefined : string +val js_prop_length : string + +val param : string +val partial_arg : string +val prim : string + +(**temporary varaible used in {!Js_ast_util} *) +val tmp : string + +val create : string +val runtime : string +val stdlib : string +val imul : string + +val setter_suffix : string +val setter_suffix_len : int + + +val debugger : string + +val unsafe_downgrade : string +val fn_run : string +val method_run : string +val fn_method : string +val fn_mk : string + +(** callback actually, not exposed to user yet *) +(* val js_fn_runmethod : string *) + +val bs_deriving : string +val bs_deriving_dot : string +val bs_type : string + +(** nodejs *) + +val node_modules : string +val node_modules_length : int +val package_json : string +val bsconfig_json : string +val build_ninja : string + +(* Name of the library file created for each external dependency. *) +val library_file : string + +val suffix_a : string +val suffix_cmj : string +val suffix_cmo : string +val suffix_cma : string +val suffix_cmi : string +val suffix_cmx : string +val suffix_cmxa : string +val suffix_ml : string +val suffix_mlast : string +val suffix_mlast_simple : string +val suffix_mliast : string +val suffix_reast : string +val suffix_reiast : string + +val suffix_mliast_simple : string +val suffix_mlmap : string +val suffix_mll : string +val suffix_re : string +val suffix_rei : string + +val suffix_d : string +val suffix_js : string +val suffix_bs_js : string +(* val suffix_re_js : string *) +val suffix_gen_js : string +val suffix_gen_tsx: string + +val suffix_tsx : string + +val suffix_mli : string +val suffix_cmt : string +val suffix_cmti : string + +val commonjs : string + +val es6 : string +val es6_global : string + +val unused_attribute : string +val dash_nostdlib : string + +val reactjs_jsx_ppx_2_exe : string +val reactjs_jsx_ppx_3_exe : string + +val native : string +val bytecode : string +val js : string + +val node_sep : string +val node_parent : string +val node_current : string +val gentype_import : string + +val bsbuild_cache : string + +val sourcedirs_meta : string + +val ns_sep_char : char +val ns_sep : string + +val exception_id : string + +val polyvar_hash : string +val polyvar_value : string + +val cons : string +val hd : string +val tl : string +val lazy_done : string +val lazy_val : string +end = struct +#1 "literals.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + + +let js_array_ctor = "Array" +let js_type_number = "number" +let js_type_string = "string" +let js_type_object = "object" +let js_type_boolean = "boolean" +let js_undefined = "undefined" +let js_prop_length = "length" + +let prim = "prim" +let param = "param" +let partial_arg = "partial_arg" +let tmp = "tmp" + +let create = "create" (* {!Caml_exceptions.create}*) + +let runtime = "runtime" (* runtime directory *) + +let stdlib = "stdlib" + +let imul = "imul" (* signed int32 mul *) + +let setter_suffix = "#=" +let setter_suffix_len = String.length setter_suffix + +let debugger = "debugger" +let unsafe_downgrade = "unsafe_downgrade" +let fn_run = "fn_run" +let method_run = "method_run" + +let fn_method = "fn_method" +let fn_mk = "fn_mk" +(*let js_fn_runmethod = "js_fn_runmethod"*) + +let bs_deriving = "bs.deriving" +let bs_deriving_dot = "bs.deriving." +let bs_type = "bs.type" + + +(** nodejs *) +let node_modules = "node_modules" +let node_modules_length = String.length "node_modules" +let package_json = "package.json" +let bsconfig_json = "bsconfig.json" +let build_ninja = "build.ninja" + +(* Name of the library file created for each external dependency. *) +let library_file = "lib" + +let suffix_a = ".a" +let suffix_cmj = ".cmj" +let suffix_cmo = ".cmo" +let suffix_cma = ".cma" +let suffix_cmi = ".cmi" +let suffix_cmx = ".cmx" +let suffix_cmxa = ".cmxa" +let suffix_mll = ".mll" +let suffix_ml = ".ml" +let suffix_mli = ".mli" +let suffix_re = ".re" +let suffix_rei = ".rei" +let suffix_mlmap = ".mlmap" + +let suffix_cmt = ".cmt" +let suffix_cmti = ".cmti" +let suffix_mlast = ".mlast" +let suffix_mlast_simple = ".mlast_simple" +let suffix_mliast = ".mliast" +let suffix_reast = ".reast" +let suffix_reiast = ".reiast" +let suffix_mliast_simple = ".mliast_simple" +let suffix_d = ".d" +let suffix_js = ".js" +let suffix_bs_js = ".bs.js" +(* let suffix_re_js = ".re.js" *) +let suffix_gen_js = ".gen.js" +let suffix_gen_tsx = ".gen.tsx" +let suffix_tsx = ".tsx" + +let commonjs = "commonjs" + +let es6 = "es6" +let es6_global = "es6-global" + +let unused_attribute = "Unused attribute " +let dash_nostdlib = "-nostdlib" + +let reactjs_jsx_ppx_2_exe = "reactjs_jsx_ppx_2.exe" +let reactjs_jsx_ppx_3_exe = "reactjs_jsx_ppx_3.exe" + +let native = "native" +let bytecode = "bytecode" +let js = "js" + + + +(** Used when produce node compatible paths *) +let node_sep = "/" +let node_parent = ".." +let node_current = "." + +let gentype_import = "genType.import" + +let bsbuild_cache = ".bsbuild" + +let sourcedirs_meta = ".sourcedirs.json" + +(* Note the build system should check the validity of filenames + espeically, it should not contain '-' +*) +let ns_sep_char = '-' +let ns_sep = "-" +let exception_id = "RE_EXN_ID" + +let polyvar_hash = "HASH" +let polyvar_value = "VAL" + +let cons = "::" +let hd = "hd" +let tl = "tl" + +let lazy_done = "LAZY_DONE" +let lazy_val = "VAL" +end +module Ext_path : sig +#1 "ext_path.mli" +(* Copyright (C) 2017 Authors of BuckleScript + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + +type t + + +(** Js_output is node style, which means + separator is only '/' + + if the path contains 'node_modules', + [node_relative_path] will discard its prefix and + just treat it as a library instead +*) +val simple_convert_node_path_to_os_path : string -> string + + + +(** + [combine path1 path2] + 1. add some simplifications when concatenating + 2. when [path2] is absolute, return [path2] +*) +val combine : + string -> + string -> + string + + + +(** + {[ + get_extension "a.txt" = ".txt" + get_extension "a" = "" + ]} +*) + + + + + +val node_rebase_file : + from:string -> + to_:string -> + string -> + string + +(** + TODO: could be highly optimized + if [from] and [to] resolve to the same path, a zero-length string is returned + Given that two paths are directory + + A typical use case is + {[ + Filename.concat + (rel_normalized_absolute_path cwd (Filename.dirname a)) + (Filename.basename a) + ]} +*) +val rel_normalized_absolute_path : from:string -> string -> string + + +val normalize_absolute_path : string -> string + + +val absolute_cwd_path : string -> string + +(** [concat dirname filename] + The same as {!Filename.concat} except a tiny optimization + for current directory simplification +*) +val concat : string -> string -> string + +val check_suffix_case : + string -> string -> bool + + + +(* It is lazy so that it will not hit errors when in script mode *) +val package_dir : string Lazy.t + +end = struct +#1 "ext_path.ml" +(* Copyright (C) 2017 Authors of BuckleScript + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + +(* [@@@warning "-37"] *) +type t = + (* | File of string *) + | Dir of string +[@@unboxed] + +let simple_convert_node_path_to_os_path = + if Sys.unix then fun x -> x + else if Sys.win32 || Sys.cygwin then + Ext_string.replace_slash_backward + else failwith ("Unknown OS : " ^ Sys.os_type) + + +let cwd = lazy (Sys.getcwd()) + +let split_by_sep_per_os : string -> string list = + if Ext_sys.is_windows_or_cygwin then + fun x -> + (* on Windows, we can still accept -bs-package-output lib/js *) + Ext_string.split_by + (fun x -> match x with |'/' |'\\' -> true | _ -> false) x + else + fun x -> Ext_string.split x '/' + +(** example + {[ + "/bb/mbigc/mbig2899/bgit/bucklescript/jscomp/stdlib/external/pervasives.cmj" + "/bb/mbigc/mbig2899/bgit/bucklescript/jscomp/stdlib/ocaml_array.ml" + ]} + + The other way + {[ + + "/bb/mbigc/mbig2899/bgit/bucklescript/jscomp/stdlib/ocaml_array.ml" + "/bb/mbigc/mbig2899/bgit/bucklescript/jscomp/stdlib/external/pervasives.cmj" + ]} + {[ + "/bb/mbigc/mbig2899/bgit/bucklescript/jscomp/stdlib//ocaml_array.ml" + ]} + {[ + /a/b + /c/d + ]} +*) +let node_relative_path + ~from:(file_or_dir_2 : t ) + (file_or_dir_1 : t) + = + let relevant_dir1 = + match file_or_dir_1 with + | Dir x -> x + (* | File file1 -> Filename.dirname file1 *) in + let relevant_dir2 = + match file_or_dir_2 with + | Dir x -> x + (* | File file2 -> Filename.dirname file2 *) in + let dir1 = split_by_sep_per_os relevant_dir1 in + let dir2 = split_by_sep_per_os relevant_dir2 in + let rec go (dir1 : string list) (dir2 : string list) = + match dir1, dir2 with + | "." :: xs, ys -> go xs ys + | xs , "." :: ys -> go xs ys + | x::xs , y :: ys when x = y + -> go xs ys + | _, _ -> + Ext_list.map_append dir2 dir1 (fun _ -> Literals.node_parent) + in + match go dir1 dir2 with + | (x :: _ ) as ys when x = Literals.node_parent -> + String.concat Literals.node_sep ys + | ys -> + String.concat Literals.node_sep + @@ Literals.node_current :: ys + + +let node_concat ~dir base = + dir ^ Literals.node_sep ^ base + +let node_rebase_file ~from ~to_ file = + + node_concat + ~dir:( + if from = to_ then Literals.node_current + else node_relative_path ~from:(Dir from) (Dir to_)) + file + + +(*** + {[ + Filename.concat "." "";; + "./" + ]} +*) +let combine path1 path2 = + if Filename.is_relative path2 then + if Ext_string.is_empty path2 then + path1 + else + if path1 = Filename.current_dir_name then + path2 + else + if path2 = Filename.current_dir_name + then path1 + else + Filename.concat path1 path2 + else + path2 + + + + + + + + +let (//) x y = + if x = Filename.current_dir_name then y + else if y = Filename.current_dir_name then x + else Filename.concat x y + +(** + {[ + split_aux "//ghosg//ghsogh/";; + - : string * string list = ("/", ["ghosg"; "ghsogh"]) + ]} + Note that + {[ + Filename.dirname "/a/" = "/" + Filename.dirname "/a/b/" = Filename.dirname "/a/b" = "/a" + ]} + Special case: + {[ + basename "//" = "/" + basename "///" = "/" + ]} + {[ + basename "" = "." + basename "" = "." + dirname "" = "." + dirname "" = "." + ]} +*) +let split_aux p = + let rec go p acc = + let dir = Filename.dirname p in + if dir = p then dir, acc + else + let new_path = Filename.basename p in + if Ext_string.equal new_path Filename.dir_sep then + go dir acc + (* We could do more path simplification here + leave to [rel_normalized_absolute_path] + *) + else + go dir (new_path :: acc) + + in go p [] + + + + + +(** + TODO: optimization + if [from] and [to] resolve to the same path, a zero-length string is returned + + This function is useed in [es6-global] and + [amdjs-global] format and tailored for `rollup` +*) +let rel_normalized_absolute_path ~from to_ = + let root1, paths1 = split_aux from in + let root2, paths2 = split_aux to_ in + if root1 <> root2 then root2 + else + let rec go xss yss = + match xss, yss with + | x::xs, y::ys -> + if Ext_string.equal x y then go xs ys + else if x = Filename.current_dir_name then go xs yss + else if y = Filename.current_dir_name then go xss ys + else + let start = + Ext_list.fold_left xs Ext_string.parent_dir_lit (fun acc _ -> acc // Ext_string.parent_dir_lit ) + in + Ext_list.fold_left yss start (fun acc v -> acc // v) + | [], [] -> Ext_string.empty + | [], y::ys -> Ext_list.fold_left ys y (fun acc x -> acc // x) + | _::xs, [] -> + Ext_list.fold_left xs Ext_string.parent_dir_lit (fun acc _ -> acc // Ext_string.parent_dir_lit ) + in + let v = go paths1 paths2 in + + if Ext_string.is_empty v then Literals.node_current + else + if + v = "." + || v = ".." + || Ext_string.starts_with v "./" + || Ext_string.starts_with v "../" + then v + else "./" ^ v + +(*TODO: could be hgighly optimized later + {[ + normalize_absolute_path "/gsho/./..";; + + normalize_absolute_path "/a/b/../c../d/e/f";; + + normalize_absolute_path "/gsho/./..";; + + normalize_absolute_path "/gsho/./../..";; + + normalize_absolute_path "/a/b/c/d";; + + normalize_absolute_path "/a/b/c/d/";; + + normalize_absolute_path "/a/";; + + normalize_absolute_path "/a";; + ]} +*) +(** See tests in {!Ounit_path_tests} *) +let normalize_absolute_path x = + let drop_if_exist xs = + match xs with + | [] -> [] + | _ :: xs -> xs in + let rec normalize_list acc paths = + match paths with + | [] -> acc + | x :: xs -> + if Ext_string.equal x Ext_string.current_dir_lit then + normalize_list acc xs + else if Ext_string.equal x Ext_string.parent_dir_lit then + normalize_list (drop_if_exist acc ) xs + else + normalize_list (x::acc) xs + in + let root, paths = split_aux x in + let rev_paths = normalize_list [] paths in + let rec go acc rev_paths = + match rev_paths with + | [] -> Filename.concat root acc + | last::rest -> go (Filename.concat last acc ) rest in + match rev_paths with + | [] -> root + | last :: rest -> go last rest + + + + +let absolute_path cwd s = + let process s = + let s = + if Filename.is_relative s then + Lazy.force cwd // s + else s in + (* Now simplify . and .. components *) + let rec aux s = + let base,dir = Filename.basename s, Filename.dirname s in + if dir = s then dir + else if base = Filename.current_dir_name then aux dir + else if base = Filename.parent_dir_name then Filename.dirname (aux dir) + else aux dir // base + in aux s in + process s + +let absolute_cwd_path s = + absolute_path cwd s + +(* let absolute cwd s = + match s with + | File x -> File (absolute_path cwd x ) + | Dir x -> Dir (absolute_path cwd x) *) + +let concat dirname filename = + if filename = Filename.current_dir_name then dirname + else if dirname = Filename.current_dir_name then filename + else Filename.concat dirname filename + + +let check_suffix_case = + Ext_string.ends_with + +(* Input must be absolute directory *) +let rec find_root_filename ~cwd filename = + if Sys.file_exists ( Filename.concat cwd filename) then cwd + else + let cwd' = Filename.dirname cwd in + if String.length cwd' < String.length cwd then + find_root_filename ~cwd:cwd' filename + else + Ext_fmt.failwithf + ~loc:__LOC__ + "%s not found from %s" filename cwd + + +let find_package_json_dir cwd = + find_root_filename ~cwd Literals.bsconfig_json + +let package_dir = lazy (find_package_json_dir (Lazy.force cwd)) + +end +module Ext_ref : sig +#1 "ext_ref.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + +(** [non_exn_protect ref value f] assusme [f()] + would not raise +*) + +val non_exn_protect : 'a ref -> 'a -> (unit -> 'b) -> 'b +val protect : 'a ref -> 'a -> (unit -> 'b) -> 'b + +val protect2 : 'a ref -> 'b ref -> 'a -> 'b -> (unit -> 'c) -> 'c + +(** [non_exn_protect2 refa refb va vb f ] + assume [f ()] would not raise +*) +val non_exn_protect2 : 'a ref -> 'b ref -> 'a -> 'b -> (unit -> 'c) -> 'c + +val protect_list : ('a ref * 'a) list -> (unit -> 'b) -> 'b + +end = struct +#1 "ext_ref.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + +let non_exn_protect r v body = + let old = !r in + r := v; + let res = body() in + r := old; + res + +let protect r v body = + let old = !r in + try + r := v; + let res = body() in + r := old; + res + with x -> + r := old; + raise x + +let non_exn_protect2 r1 r2 v1 v2 body = + let old1 = !r1 in + let old2 = !r2 in + r1 := v1; + r2 := v2; + let res = body() in + r1 := old1; + r2 := old2; + res + +let protect2 r1 r2 v1 v2 body = + let old1 = !r1 in + let old2 = !r2 in + try + r1 := v1; + r2 := v2; + let res = body() in + r1 := old1; + r2 := old2; + res + with x -> + r1 := old1; + r2 := old2; + raise x + +let protect_list rvs body = + let olds = Ext_list.map rvs (fun (x,_) -> !x) in + let () = List.iter (fun (x,y) -> x:=y) rvs in + try + let res = body () in + List.iter2 (fun (x,_) old -> x := old) rvs olds; + res + with e -> + List.iter2 (fun (x,_) old -> x := old) rvs olds; + raise e + +end +module Ext_util : sig +#1 "ext_util.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + +val power_2_above : int -> int -> int + + +val stats_to_string : Hashtbl.statistics -> string +end = struct +#1 "ext_util.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + +(** + {[ + (power_2_above 16 63 = 64) + (power_2_above 16 76 = 128) + ]} +*) +let rec power_2_above x n = + if x >= n then x + else if x * 2 > Sys.max_array_length then x + else power_2_above (x * 2) n + + +let stats_to_string ({num_bindings; num_buckets; max_bucket_length; bucket_histogram} : Hashtbl.statistics) = + Printf.sprintf + "bindings: %d,buckets: %d, longest: %d, hist:[%s]" + num_bindings + num_buckets + max_bucket_length + (String.concat "," (Array.to_list (Array.map string_of_int bucket_histogram))) +end +module Hash_gen += struct +#1 "hash_gen.ml" +(***********************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. All rights reserved. This file is distributed *) +(* under the terms of the GNU Library General Public License, with *) +(* the special exception on linking described in file ../LICENSE. *) +(* *) +(***********************************************************************) + +(* Hash tables *) + + + + +(* We do dynamic hashing, and resize the table and rehash the elements + when buckets become too long. *) + +type ('a, 'b) bucket = + | Empty + | Cons of { + mutable key : 'a ; + mutable data : 'b ; + mutable next : ('a, 'b) bucket + } + +type ('a, 'b) t = + { mutable size: int; (* number of entries *) + mutable data: ('a, 'b) bucket array; (* the buckets *) + initial_size: int; (* initial array size *) + } + + + +let create initial_size = + let s = Ext_util.power_2_above 16 initial_size in + { initial_size = s; size = 0; data = Array.make s Empty } + +let clear h = + h.size <- 0; + let len = Array.length h.data in + for i = 0 to len - 1 do + Array.unsafe_set h.data i Empty + done + +let reset h = + h.size <- 0; + h.data <- Array.make h.initial_size Empty + + +let length h = h.size + +let resize indexfun h = + let odata = h.data in + let osize = Array.length odata in + let nsize = osize * 2 in + if nsize < Sys.max_array_length then begin + let ndata = Array.make nsize Empty in + let ndata_tail = Array.make nsize Empty in + h.data <- ndata; (* so that indexfun sees the new bucket count *) + let rec insert_bucket = function + Empty -> () + | Cons {key; next} as cell -> + let nidx = indexfun h key in + begin match Array.unsafe_get ndata_tail nidx with + | Empty -> + Array.unsafe_set ndata nidx cell + | Cons tail -> + tail.next <- cell + end; + Array.unsafe_set ndata_tail nidx cell; + insert_bucket next + in + for i = 0 to osize - 1 do + insert_bucket (Array.unsafe_get odata i) + done; + for i = 0 to nsize - 1 do + match Array.unsafe_get ndata_tail i with + | Empty -> () + | Cons tail -> tail.next <- Empty + done + end + + + +let iter h f = + let rec do_bucket = function + | Empty -> + () + | Cons l -> + f l.key l.data; do_bucket l.next in + let d = h.data in + for i = 0 to Array.length d - 1 do + do_bucket (Array.unsafe_get d i) + done + +let fold h init f = + let rec do_bucket b accu = + match b with + Empty -> + accu + | Cons l -> + do_bucket l.next (f l.key l.data accu) in + let d = h.data in + let accu = ref init in + for i = 0 to Array.length d - 1 do + accu := do_bucket (Array.unsafe_get d i) !accu + done; + !accu + +let to_list h f = + fold h [] (fun k data acc -> f k data :: acc) + + + + +let rec small_bucket_mem (lst : _ bucket) eq key = + match lst with + | Empty -> false + | Cons lst -> + eq key lst.key || + match lst.next with + | Empty -> false + | Cons lst -> + eq key lst.key || + match lst.next with + | Empty -> false + | Cons lst -> + eq key lst.key || + small_bucket_mem lst.next eq key + + +let rec small_bucket_opt eq key (lst : _ bucket) : _ option = + match lst with + | Empty -> None + | Cons lst -> + if eq key lst.key then Some lst.data else + match lst.next with + | Empty -> None + | Cons lst -> + if eq key lst.key then Some lst.data else + match lst.next with + | Empty -> None + | Cons lst -> + if eq key lst.key then Some lst.data else + small_bucket_opt eq key lst.next + + +let rec small_bucket_key_opt eq key (lst : _ bucket) : _ option = + match lst with + | Empty -> None + | Cons {key=k; next} -> + if eq key k then Some k else + match next with + | Empty -> None + | Cons {key=k; next} -> + if eq key k then Some k else + match next with + | Empty -> None + | Cons {key=k; next} -> + if eq key k then Some k else + small_bucket_key_opt eq key next + + +let rec small_bucket_default eq key default (lst : _ bucket) = + match lst with + | Empty -> default + | Cons lst -> + if eq key lst.key then lst.data else + match lst.next with + | Empty -> default + | Cons lst -> + if eq key lst.key then lst.data else + match lst.next with + | Empty -> default + | Cons lst -> + if eq key lst.key then lst.data else + small_bucket_default eq key default lst.next + +let rec remove_bucket + h (i : int) + key + ~(prec : _ bucket) + (buck : _ bucket) + eq_key = + match buck with + | Empty -> + () + | Cons {key=k; next } -> + if eq_key k key + then begin + h.size <- h.size - 1; + match prec with + | Empty -> Array.unsafe_set h.data i next + | Cons c -> c.next <- next + end + else remove_bucket h i key ~prec:buck next eq_key + +let rec replace_bucket key data (buck : _ bucket) eq_key = + match buck with + | Empty -> + true + | Cons slot -> + if eq_key slot.key key + then (slot.key <- key; slot.data <- data; false) + else replace_bucket key data slot.next eq_key + +module type S = sig + type key + type 'a t + val create: int -> 'a t + val clear: 'a t -> unit + val reset: 'a t -> unit + + val add: 'a t -> key -> 'a -> unit + val add_or_update: + 'a t -> + key -> + update:('a -> 'a) -> + 'a -> unit + val remove: 'a t -> key -> unit + val find_exn: 'a t -> key -> 'a + val find_all: 'a t -> key -> 'a list + val find_opt: 'a t -> key -> 'a option + + (** return the key found in the hashtbl. + Use case: when you find the key existed in hashtbl, + you want to use the one stored in the hashtbl. + (they are semantically equivlanent, but may have other information different) + *) + val find_key_opt: 'a t -> key -> key option + + val find_default: 'a t -> key -> 'a -> 'a + + val replace: 'a t -> key -> 'a -> unit + val mem: 'a t -> key -> bool + val iter: 'a t -> (key -> 'a -> unit) -> unit + val fold: + 'a t -> 'b -> + (key -> 'a -> 'b -> 'b) -> 'b + val length: 'a t -> int + (* val stats: 'a t -> Hashtbl.statistics *) + val to_list : 'a t -> (key -> 'a -> 'c) -> 'c list + val of_list2: key list -> 'a list -> 'a t +end + + + + + +end +module Hash_string : sig +#1 "hash_string.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + +include Hash_gen.S with type key = string + + + + +end = struct +#1 "hash_string.ml" +# 9 "ext/hash.cppo.ml" +type key = string +type 'a t = (key, 'a) Hash_gen.t +let key_index (h : _ t ) (key : key) = + (Bs_hash_stubs.hash_string key ) land (Array.length h.data - 1) +let eq_key = Ext_string.equal + +# 33 "ext/hash.cppo.ml" +type ('a, 'b) bucket = ('a,'b) Hash_gen.bucket +let create = Hash_gen.create +let clear = Hash_gen.clear +let reset = Hash_gen.reset +let iter = Hash_gen.iter +let to_list = Hash_gen.to_list +let fold = Hash_gen.fold +let length = Hash_gen.length +(* let stats = Hash_gen.stats *) + + + +let add (h : _ t) key data = + let i = key_index h key in + let h_data = h.data in + Array.unsafe_set h_data i (Cons{key; data; next=Array.unsafe_get h_data i}); + h.size <- h.size + 1; + if h.size > Array.length h_data lsl 1 then Hash_gen.resize key_index h + +(* after upgrade to 4.04 we should provide an efficient [replace_or_init] *) +let add_or_update + (h : 'a t) + (key : key) + ~update:(modf : 'a -> 'a) + (default : 'a) : unit = + let rec find_bucket (bucketlist : _ bucket) : bool = + match bucketlist with + | Cons rhs -> + if eq_key rhs.key key then begin rhs.data <- modf rhs.data; false end + else find_bucket rhs.next + | Empty -> true in + let i = key_index h key in + let h_data = h.data in + if find_bucket (Array.unsafe_get h_data i) then + begin + Array.unsafe_set h_data i (Cons{key; data=default; next = Array.unsafe_get h_data i}); + h.size <- h.size + 1 ; + if h.size > Array.length h_data lsl 1 then Hash_gen.resize key_index h + end + +let remove (h : _ t ) key = + let i = key_index h key in + let h_data = h.data in + Hash_gen.remove_bucket h i key ~prec:Empty (Array.unsafe_get h_data i) eq_key + +(* for short bucket list, [find_rec is not called ] *) +let rec find_rec key (bucketlist : _ bucket) = match bucketlist with + | Empty -> + raise Not_found + | Cons rhs -> + if eq_key key rhs.key then rhs.data else find_rec key rhs.next + +let find_exn (h : _ t) key = + match Array.unsafe_get h.data (key_index h key) with + | Empty -> raise Not_found + | Cons rhs -> + if eq_key key rhs.key then rhs.data else + match rhs.next with + | Empty -> raise Not_found + | Cons rhs -> + if eq_key key rhs.key then rhs.data else + match rhs.next with + | Empty -> raise Not_found + | Cons rhs -> + if eq_key key rhs.key then rhs.data else find_rec key rhs.next + +let find_opt (h : _ t) key = + Hash_gen.small_bucket_opt eq_key key (Array.unsafe_get h.data (key_index h key)) + +let find_key_opt (h : _ t) key = + Hash_gen.small_bucket_key_opt eq_key key (Array.unsafe_get h.data (key_index h key)) + +let find_default (h : _ t) key default = + Hash_gen.small_bucket_default eq_key key default (Array.unsafe_get h.data (key_index h key)) + +let find_all (h : _ t) key = + let rec find_in_bucket (bucketlist : _ bucket) = match bucketlist with + | Empty -> + [] + | Cons rhs -> + if eq_key key rhs.key + then rhs.data :: find_in_bucket rhs.next + else find_in_bucket rhs.next in + find_in_bucket (Array.unsafe_get h.data (key_index h key)) + + +let replace h key data = + let i = key_index h key in + let h_data = h.data in + let l = Array.unsafe_get h_data i in + if Hash_gen.replace_bucket key data l eq_key then + begin + Array.unsafe_set h_data i (Cons{key; data; next=l}); + h.size <- h.size + 1; + if h.size > Array.length h_data lsl 1 then Hash_gen.resize key_index h; + end + +let mem (h : _ t) key = + Hash_gen.small_bucket_mem + (Array.unsafe_get h.data (key_index h key)) + eq_key key + + +let of_list2 ks vs = + let len = List.length ks in + let map = create len in + List.iter2 (fun k v -> add map k v) ks vs ; + map + + +end +module Js_config : sig +#1 "js_config.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + +(* val get_packages_info : + unit -> Js_packages_info.t *) + + +(** set/get header *) +val no_version_header : bool ref + + +(** return [package_name] and [path] + when in script mode: +*) + +(* val get_current_package_name_and_path : + Js_packages_info.module_system -> + Js_packages_info.info_query *) + + +(* val set_package_name : string -> unit +val get_package_name : unit -> string option *) + +(** cross module inline option *) +val cross_module_inline : bool ref + +(** diagnose option *) +val diagnose : bool ref +val get_diagnose : unit -> bool +(* val set_diagnose : bool -> unit *) + + +(** options for builtin ppx *) +val no_builtin_ppx : bool ref + + + + + + +(** check-div-by-zero option *) +val check_div_by_zero : bool ref +val get_check_div_by_zero : unit -> bool + + + +val tool_name : string + + +val sort_imports : bool ref + +val syntax_only : bool ref +val binary_ast : bool ref +val simple_binary_ast : bool ref + + +val bs_suffix : bool ref +val debug : bool ref + +val cmi_only : bool ref +val cmj_only : bool ref +(* stopped after generating cmj *) +val force_cmi : bool ref +val force_cmj : bool ref + +val jsx_version : int ref +val refmt : string option ref +val is_reason : bool ref + +val js_stdout : bool ref + +val all_module_aliases : bool ref + +val no_stdlib: bool ref +val no_export: bool ref +val record_as_js_object : bool ref +val as_ppx : bool ref + +val mono_empty_array : bool ref +end = struct +#1 "js_config.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + +(* let add_npm_package_path s = + match !packages_info with + | Empty -> + Ext_arg.bad_argf "please set package name first using -bs-package-name "; + | NonBrowser(name, envs) -> + let env, path = + match Ext_string.split ~keep_empty:false s ':' with + | [ package_name; path] -> + (match Js_packages_info.module_system_of_string package_name with + | Some x -> x + | None -> + Ext_arg.bad_argf "invalid module system %s" package_name), path + | [path] -> + NodeJS, path + | _ -> + Ext_arg.bad_argf "invalid npm package path: %s" s + in + packages_info := NonBrowser (name, ((env,path) :: envs)) *) +(** Browser is not set via command line only for internal use *) + + +let no_version_header = ref false + +let cross_module_inline = ref false + + + +let diagnose = ref false +let get_diagnose () = !diagnose +(* let set_diagnose b = diagnose := b *) + +(* let (//) = Filename.concat *) + +(* let get_packages_info () = !packages_info *) + +let no_builtin_ppx = ref false + + + + +let tool_name = "BuckleScript" + +let check_div_by_zero = ref true +let get_check_div_by_zero () = !check_div_by_zero + + + + +let sort_imports = ref true + +let syntax_only = ref false +let binary_ast = ref false +let simple_binary_ast = ref false + +let bs_suffix = ref false + +let debug = ref false + +let cmi_only = ref false +let cmj_only = ref false + +let force_cmi = ref false +let force_cmj = ref false + +let jsx_version = ref (-1) + +let refmt = ref None + +let is_reason = ref false + +let js_stdout = ref true + +let all_module_aliases = ref false + +let no_stdlib = ref false + +let no_export = ref false + +let record_as_js_object = ref false (* otherwise has an attribute *) + +let as_ppx = ref false + +let mono_empty_array = ref true +end +module Map_gen += struct +#1 "map_gen.ml" +(***********************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. All rights reserved. This file is distributed *) +(* under the terms of the GNU Library General Public License, with *) +(* the special exception on linking described in file ../LICENSE. *) +(* *) +(***********************************************************************) +(** adapted from stdlib *) + +type ('key,'a) t = + | Empty + | Node of ('key,'a) t * 'key * 'a * ('key,'a) t * int + +type ('key,'a) enumeration = + | End + | More of 'key * 'a * ('key,'a) t * ('key, 'a) enumeration + +let rec cardinal_aux acc = function + | Empty -> acc + | Node (l,_,_,r, _) -> + cardinal_aux (cardinal_aux (acc + 1) r ) l + +let cardinal s = cardinal_aux 0 s + +let rec bindings_aux accu = function + | Empty -> accu + | Node(l, v, d, r, _) -> bindings_aux ((v, d) :: bindings_aux accu r) l + +let bindings s = + bindings_aux [] s + +let rec fill_array_with_f (s : _ t) i arr f : int = + match s with + | Empty -> i + | Node ( l ,k,v,r,_) -> + let inext = fill_array_with_f l i arr f in + Array.unsafe_set arr inext (f k v); + fill_array_with_f r (inext + 1) arr f + +let rec fill_array_aux (s : _ t) i arr : int = + match s with + | Empty -> i + | Node (l,k,v,r,_) -> + let inext = fill_array_aux l i arr in + Array.unsafe_set arr inext (k,v); + fill_array_aux r (inext + 1) arr + + +let to_sorted_array (s : ('key,'a) t) : ('key * 'a ) array = + match s with + | Empty -> [||] + | Node(l,k,v,r,_) -> + let len = + cardinal_aux (cardinal_aux 1 r) l in + let arr = + Array.make len (k,v) in + ignore (fill_array_aux s 0 arr : int); + arr + +let to_sorted_array_with_f (type key a b ) (s : (key,a) t) (f : key -> a -> b): b array = + match s with + | Empty -> [||] + | Node(l,k,v,r,_) -> + let len = + cardinal_aux (cardinal_aux 1 r) l in + let arr = + Array.make len (f k v) in + ignore (fill_array_with_f s 0 arr f: int); + arr + +let rec keys_aux accu = function + Empty -> accu + | Node(l, v, _, r, _) -> keys_aux (v :: keys_aux accu r) l + +let keys s = keys_aux [] s + + + +let rec cons_enum m e = + match m with + Empty -> e + | Node(l, v, d, r, _) -> cons_enum l (More(v, d, r, e)) + + +let height = function + | Empty -> 0 + | Node(_,_,_,_,h) -> h + +let create l x d r = + let hl = height l and hr = height r in + Node(l, x, d, r, (if hl >= hr then hl + 1 else hr + 1)) + +let singleton x d = Node(Empty, x, d, Empty, 1) + +let bal l x d r = + let hl = match l with Empty -> 0 | Node(_,_,_,_,h) -> h in + let hr = match r with Empty -> 0 | Node(_,_,_,_,h) -> h in + if hl > hr + 2 then begin + match l with + Empty -> invalid_arg "Map.bal" + | Node(ll, lv, ld, lr, _) -> + if height ll >= height lr then + create ll lv ld (create lr x d r) + else begin + match lr with + Empty -> invalid_arg "Map.bal" + | Node(lrl, lrv, lrd, lrr, _)-> + create (create ll lv ld lrl) lrv lrd (create lrr x d r) + end + end else if hr > hl + 2 then begin + match r with + Empty -> invalid_arg "Map.bal" + | Node(rl, rv, rd, rr, _) -> + if height rr >= height rl then + create (create l x d rl) rv rd rr + else begin + match rl with + Empty -> invalid_arg "Map.bal" + | Node(rll, rlv, rld, rlr, _) -> + create (create l x d rll) rlv rld (create rlr rv rd rr) + end + end else + Node(l, x, d, r, (if hl >= hr then hl + 1 else hr + 1)) + +let empty = Empty + +let is_empty = function Empty -> true | _ -> false + +let rec min_binding_exn = function + Empty -> raise Not_found + | Node(Empty, x, d, _, _) -> (x, d) + | Node(l, _, _, _, _) -> min_binding_exn l + +let choose = min_binding_exn + +let rec max_binding_exn = function + Empty -> raise Not_found + | Node(_, x, d, Empty, _) -> (x, d) + | Node(_, _, _, r, _) -> max_binding_exn r + +let rec remove_min_binding = function + Empty -> invalid_arg "Map.remove_min_elt" + | Node(Empty, _, _, r, _) -> r + | Node(l, x, d, r, _) -> bal (remove_min_binding l) x d r + +let merge t1 t2 = + match (t1, t2) with + (Empty, t) -> t + | (t, Empty) -> t + | (_, _) -> + let (x, d) = min_binding_exn t2 in + bal t1 x d (remove_min_binding t2) + + +let rec iter x f = match x with + Empty -> () + | Node(l, v, d, r, _) -> + iter l f; f v d; iter r f + +let rec map x f = match x with + Empty -> + Empty + | Node(l, v, d, r, h) -> + let l' = map l f in + let d' = f d in + let r' = map r f in + Node(l', v, d', r', h) + +let rec mapi x f = match x with + Empty -> + Empty + | Node(l, v, d, r, h) -> + let l' = mapi l f in + let d' = f v d in + let r' = mapi r f in + Node(l', v, d', r', h) + +let rec fold m accu f = + match m with + Empty -> accu + | Node(l, v, d, r, _) -> + fold r (f v d (fold l accu f)) f + +let rec for_all x p = match x with + Empty -> true + | Node(l, v, d, r, _) -> p v d && for_all l p && for_all r p + +let rec exists x p = match x with + Empty -> false + | Node(l, v, d, r, _) -> p v d || exists l p || exists r p + +(* Beware: those two functions assume that the added k is *strictly* + smaller (or bigger) than all the present keys in the tree; it + does not test for equality with the current min (or max) key. + + Indeed, they are only used during the "join" operation which + respects this precondition. +*) + +let rec add_min_binding k v = function + | Empty -> singleton k v + | Node (l, x, d, r, _) -> + bal (add_min_binding k v l) x d r + +let rec add_max_binding k v = function + | Empty -> singleton k v + | Node (l, x, d, r, _) -> + bal l x d (add_max_binding k v r) + +(* Same as create and bal, but no assumptions are made on the + relative heights of l and r. *) + +let rec join l v d r = + match (l, r) with + (Empty, _) -> add_min_binding v d r + | (_, Empty) -> add_max_binding v d l + | (Node(ll, lv, ld, lr, lh), Node(rl, rv, rd, rr, rh)) -> + if lh > rh + 2 then bal ll lv ld (join lr v d r) else + if rh > lh + 2 then bal (join l v d rl) rv rd rr else + create l v d r + +(* Merge two trees l and r into one. + All elements of l must precede the elements of r. + No assumption on the heights of l and r. *) + +let concat t1 t2 = + match (t1, t2) with + (Empty, t) -> t + | (t, Empty) -> t + | (_, _) -> + let (x, d) = min_binding_exn t2 in + join t1 x d (remove_min_binding t2) + +let concat_or_join t1 v d t2 = + match d with + | Some d -> join t1 v d t2 + | None -> concat t1 t2 + +let rec filter x p = match x with + Empty -> Empty + | Node(l, v, d, r, _) -> + (* call [p] in the expected left-to-right order *) + let l' = filter l p in + let pvd = p v d in + let r' = filter r p in + if pvd then join l' v d r' else concat l' r' + +let rec partition x p = match x with + Empty -> (Empty, Empty) + | Node(l, v, d, r, _) -> + (* call [p] in the expected left-to-right order *) + let (lt, lf) = partition l p in + let pvd = p v d in + let (rt, rf) = partition r p in + if pvd + then (join lt v d rt, concat lf rf) + else (concat lt rt, join lf v d rf) + +let compare compare_key cmp_val m1 m2 = + let rec compare_aux e1 e2 = + match (e1, e2) with + (End, End) -> 0 + | (End, _) -> -1 + | (_, End) -> 1 + | (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) -> + let c = compare_key v1 v2 in + if c <> 0 then c else + let c = cmp_val d1 d2 in + if c <> 0 then c else + compare_aux (cons_enum r1 e1) (cons_enum r2 e2) + in compare_aux (cons_enum m1 End) (cons_enum m2 End) + +let equal compare_key cmp m1 m2 = + let rec equal_aux e1 e2 = + match (e1, e2) with + (End, End) -> true + | (End, _) -> false + | (_, End) -> false + | (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) -> + compare_key v1 v2 = 0 && cmp d1 d2 && + equal_aux (cons_enum r1 e1) (cons_enum r2 e2) + in equal_aux (cons_enum m1 End) (cons_enum m2 End) + + + + +module type S = + sig + type key + type +'a t + val empty: 'a t + val compare_key: key -> key -> int + val is_empty: 'a t -> bool + val mem: 'a t -> key -> bool + val to_sorted_array : + 'a t -> (key * 'a ) array + val to_sorted_array_with_f : + 'a t -> (key -> 'a -> 'b) -> 'b array + val add: 'a t -> key -> 'a -> 'a t + (** [add x y m] + If [x] was already bound in [m], its previous binding disappears. *) + + val adjust: 'a t -> key -> ('a option-> 'a) -> 'a t + (** [adjust acc k replace ] if not exist [add (replace None ], otherwise + [add k v (replace (Some old))] + *) + + val singleton: key -> 'a -> 'a t + + val remove: 'a t -> key -> 'a t + (** [remove x m] returns a map containing the same bindings as + [m], except for [x] which is unbound in the returned map. *) + + val merge: + 'a t -> 'b t -> + (key -> 'a option -> 'b option -> 'c option) -> 'c t + (** [merge f m1 m2] computes a map whose keys is a subset of keys of [m1] + and of [m2]. The presence of each such binding, and the corresponding + value, is determined with the function [f]. + @since 3.12.0 + *) + + val disjoint_merge : 'a t -> 'a t -> 'a t + (* merge two maps, will raise if they have the same key *) + val compare: 'a t -> 'a t -> ('a -> 'a -> int) -> int + (** Total ordering between maps. The first argument is a total ordering + used to compare data associated with equal keys in the two maps. *) + + val equal: 'a t -> 'a t -> ('a -> 'a -> bool) -> bool + + val iter: 'a t -> (key -> 'a -> unit) -> unit + (** [iter f m] applies [f] to all bindings in map [m]. + The bindings are passed to [f] in increasing order. *) + + val fold: 'a t -> 'b -> (key -> 'a -> 'b -> 'b) -> 'b + (** [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)], + where [k1 ... kN] are the keys of all bindings in [m] + (in increasing order) *) + + val for_all: 'a t -> (key -> 'a -> bool) -> bool + (** [for_all p m] checks if all the bindings of the map. + order unspecified + *) + + val exists: 'a t -> (key -> 'a -> bool) -> bool + (** [exists p m] checks if at least one binding of the map + satisfy the predicate [p]. + order unspecified + *) + + val filter: 'a t -> (key -> 'a -> bool) -> 'a t + (** [filter p m] returns the map with all the bindings in [m] + that satisfy predicate [p]. + order unspecified + *) + + val partition: 'a t -> (key -> 'a -> bool) -> 'a t * 'a t + (** [partition p m] returns a pair of maps [(m1, m2)], where + [m1] contains all the bindings of [s] that satisfy the + predicate [p], and [m2] is the map with all the bindings of + [s] that do not satisfy [p]. + *) + + val cardinal: 'a t -> int + (** Return the number of bindings of a map. *) + + val bindings: 'a t -> (key * 'a) list + (** Return the list of all bindings of the given map. + The returned list is sorted in increasing order with respect + to the ordering *) + val keys : 'a t -> key list + (* Increasing order *) + + val min_binding_exn: 'a t -> (key * 'a) + (** raise [Not_found] if the map is empty. *) + + val max_binding_exn: 'a t -> (key * 'a) + (** Same as {!Map.S.min_binding} *) + + val choose: 'a t -> (key * 'a) + (** Return one binding of the given map, or raise [Not_found] if + the map is empty. Which binding is chosen is unspecified, + but equal bindings will be chosen for equal maps. + *) + + val split: 'a t -> key -> 'a t * 'a option * 'a t + (** [split x m] returns a triple [(l, data, r)], where + [l] is the map with all the bindings of [m] whose key + is strictly less than [x]; + [r] is the map with all the bindings of [m] whose key + is strictly greater than [x]; + [data] is [None] if [m] contains no binding for [x], + or [Some v] if [m] binds [v] to [x]. + @since 3.12.0 + *) + + val find_exn: 'a t -> key -> 'a + (** [find x m] returns the current binding of [x] in [m], + or raises [Not_found] if no such binding exists. *) + val find_opt: 'a t -> key ->'a option + val find_default: 'a t -> key -> 'a -> 'a + val map: 'a t -> ('a -> 'b) -> 'b t + (** [map f m] returns a map with same domain as [m], where the + associated value [a] of all bindings of [m] has been + replaced by the result of the application of [f] to [a]. + The bindings are passed to [f] in increasing order + with respect to the ordering over the type of the keys. *) + + val mapi: 'a t -> (key -> 'a -> 'b) -> 'b t + (** Same as {!Map.S.map}, but the function receives as arguments both the + key and the associated value for each binding of the map. *) + + val of_list : (key * 'a) list -> 'a t + val of_array : (key * 'a ) array -> 'a t + val add_list : (key * 'b) list -> 'b t -> 'b t + + end + +end +module Map_string : sig +#1 "map_string.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + +include Map_gen.S with type key = string + +end = struct +#1 "map_string.ml" + +# 2 "ext/map.cppo.ml" +(* we don't create [map_poly], since some operations require raise an exception which carries [key] *) + + + +# 10 "ext/map.cppo.ml" + type key = string + let compare_key = Ext_string.compare + +# 22 "ext/map.cppo.ml" +type 'a t = (key,'a) Map_gen.t +exception Duplicate_key of key + +let empty = Map_gen.empty +let is_empty = Map_gen.is_empty +let iter = Map_gen.iter +let fold = Map_gen.fold +let for_all = Map_gen.for_all +let exists = Map_gen.exists +let singleton = Map_gen.singleton +let cardinal = Map_gen.cardinal +let bindings = Map_gen.bindings +let to_sorted_array = Map_gen.to_sorted_array +let to_sorted_array_with_f = Map_gen.to_sorted_array_with_f +let keys = Map_gen.keys +let choose = Map_gen.choose +let partition = Map_gen.partition +let filter = Map_gen.filter +let map = Map_gen.map +let mapi = Map_gen.mapi +let bal = Map_gen.bal +let height = Map_gen.height +let max_binding_exn = Map_gen.max_binding_exn +let min_binding_exn = Map_gen.min_binding_exn + + +let rec add (tree : _ Map_gen.t as 'a) x data : 'a = match tree with + | Empty -> + Node(Empty, x, data, Empty, 1) + | Node(l, v, d, r, h) -> + let c = compare_key x v in + if c = 0 then + Node(l, x, data, r, h) + else if c < 0 then + bal (add l x data ) v d r + else + bal l v d (add r x data ) + + +let rec adjust (tree : _ Map_gen.t as 'a) x replace : 'a = + match tree with + | Empty -> + Node(Empty, x, replace None, Empty, 1) + | Node(l, v, d, r, h) -> + let c = compare_key x v in + if c = 0 then + Node(l, x, replace (Some d) , r, h) + else if c < 0 then + bal (adjust l x replace ) v d r + else + bal l v d (adjust r x replace ) + + +let rec find_exn (tree : _ Map_gen.t ) x = match tree with + | Empty -> + raise Not_found + | Node(l, v, d, r, _) -> + let c = compare_key x v in + if c = 0 then d + else find_exn (if c < 0 then l else r) x + +let rec find_opt (tree : _ Map_gen.t ) x = match tree with + | Empty -> None + | Node(l, v, d, r, _) -> + let c = compare_key x v in + if c = 0 then Some d + else find_opt (if c < 0 then l else r) x + +let rec find_default (tree : _ Map_gen.t ) x default = match tree with + | Empty -> default + | Node(l, v, d, r, _) -> + let c = compare_key x v in + if c = 0 then d + else find_default (if c < 0 then l else r) x default + +let rec mem (tree : _ Map_gen.t ) x= match tree with + | Empty -> + false + | Node(l, v, _, r, _) -> + let c = compare_key x v in + c = 0 || mem (if c < 0 then l else r) x + +let rec remove (tree : _ Map_gen.t as 'a) x : 'a = match tree with + | Empty -> + Empty + | Node(l, v, d, r, _) -> + let c = compare_key x v in + if c = 0 then + Map_gen.merge l r + else if c < 0 then + bal (remove l x) v d r + else + bal l v d (remove r x ) + + +let rec split (tree : _ Map_gen.t as 'a) x : 'a * _ option * 'a = match tree with + | Empty -> + (Empty, None, Empty) + | Node(l, v, d, r, _) -> + let c = compare_key x v in + if c = 0 then (l, Some d, r) + else if c < 0 then + let (ll, pres, rl) = split l x in (ll, pres, Map_gen.join rl v d r) + else + let (lr, pres, rr) = split r x in (Map_gen.join l v d lr, pres, rr) + +let rec merge (s1 : _ Map_gen.t) (s2 : _ Map_gen.t) f : _ Map_gen.t = + match (s1, s2) with + | (Empty, Empty) -> Empty + | (Node (l1, v1, d1, r1, h1), _) when h1 >= height s2 -> + let (l2, d2, r2) = split s2 v1 in + Map_gen.concat_or_join (merge l1 l2 f) v1 (f v1 (Some d1) d2) (merge r1 r2 f) + | (_, Node (l2, v2, d2, r2, _)) -> + let (l1, d1, r1) = split s1 v2 in + Map_gen.concat_or_join (merge l1 l2 f) v2 (f v2 d1 (Some d2)) (merge r1 r2 f) + | _ -> + assert false + +let rec disjoint_merge (s1 : _ Map_gen.t) (s2 : _ Map_gen.t) : _ Map_gen.t = + match (s1, s2) with + | (Empty, Empty) -> Empty + | (Node (l1, v1, d1, r1, h1), _) when h1 >= height s2 -> + begin match split s2 v1 with + | l2, None, r2 -> + Map_gen.join (disjoint_merge l1 l2) v1 d1 (disjoint_merge r1 r2) + | _, Some _, _ -> + raise (Duplicate_key v1) + end + | (_, Node (l2, v2, d2, r2, _)) -> + begin match split s1 v2 with + | (l1, None, r1) -> + Map_gen.join (disjoint_merge l1 l2) v2 d2 (disjoint_merge r1 r2) + | (_, Some _, _) -> + raise (Duplicate_key v2) + end + | _ -> + assert false + + + +let compare m1 m2 cmp = Map_gen.compare compare_key cmp m1 m2 + +let equal m1 m2 cmp = Map_gen.equal compare_key cmp m1 m2 + +let add_list (xs : _ list ) init = + Ext_list.fold_left xs init (fun acc (k,v) -> add acc k v ) + +let of_list xs = add_list xs empty + +let of_array xs = + Ext_array.fold_left xs empty (fun acc (k,v) -> add acc k v ) + +end +module Ml_binary : sig +#1 "ml_binary.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + +type _ kind = + | Ml : Parsetree.structure kind + | Mli : Parsetree.signature kind + + +val read_ast : 'a kind -> in_channel -> 'a + +val write_ast : + 'a kind -> string -> 'a -> out_channel -> unit +end = struct +#1 "ml_binary.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + +type _ kind = + | Ml : Parsetree.structure kind + | Mli : Parsetree.signature kind + +(** [read_ast kind ic] assume [ic] channel is + in the right position *) +let read_ast (type t ) (kind : t kind) ic : t = + let magic = + match kind with + | Ml -> Config.ast_impl_magic_number + | Mli -> Config.ast_intf_magic_number in + let buffer = really_input_string ic (String.length magic) in + assert(buffer = magic); (* already checked by apply_rewriter *) + Location.set_input_name @@ input_value ic; + input_value ic + +let write_ast (type t) (kind : t kind) + (fname : string) + (pt : t) oc = + let magic = + match kind with + | Ml -> Config.ast_impl_magic_number + | Mli -> Config.ast_intf_magic_number in + output_string oc magic ; + output_value oc fname; + output_value oc pt +end +module Ast_extract : sig +#1 "ast_extract.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + + + + + + + + +module Set_string = Depend.StringSet + +val read_parse_and_extract : 'a Ml_binary.kind -> 'a -> Set_string.t + +type ('a,'b) t + +(* val sort_files_by_dependencies : + domain:Set_string.t -> Set_string.t Map_string.t -> string Queue.t *) + + +val sort : + ('a -> Parsetree.structure) -> + ('b -> Parsetree.signature) -> + ('a, 'b) t Map_string.t -> string Queue.t + + + +(** + [build fmt files parse_implementation parse_interface] + Given a list of files return an ast table +*) +val collect_ast_map : + Format.formatter -> + string list -> + (Format.formatter -> string -> 'a) -> + (Format.formatter -> string -> 'b) -> + ('a, 'b) t Map_string.t + +type dir_spec = + { dir : string ; + mutable excludes : string list + } + +(** If the genereated queue is empty, it means + 1. The main module does not exist (does not exist due to typo) + 2. It does exist but not in search path + The order matters from head to tail +*) +val collect_from_main : + ?extra_dirs:dir_spec list -> + ?excludes : string list -> + ?alias_map: string Hash_string.t -> + Format.formatter -> + (Format.formatter -> string -> 'a) -> + (Format.formatter -> string -> 'b) -> + ('a -> Parsetree.structure) -> + ('b -> Parsetree.signature) -> + string -> ('a, 'b) t Map_string.t * string Queue.t + +val build_queue : + Format.formatter -> + string Queue.t -> + ('b, 'c) t Map_string.t -> + (Format.formatter -> string -> string -> 'b -> unit) -> + (Format.formatter -> string -> string -> 'c -> unit) -> unit + +val handle_queue : + string Queue.t -> + ('a, 'b) t Map_string.t -> + (string -> string -> 'a -> unit) -> + (string -> string -> 'b -> unit) -> + (string -> string -> string -> 'b -> 'a -> unit) -> unit + + +val build_lazy_queue : + Format.formatter -> + string Queue.t -> + (Parsetree.structure lazy_t, Parsetree.signature lazy_t) t Map_string.t -> + (Format.formatter -> string -> string -> Parsetree.structure -> unit) -> + (Format.formatter -> string -> string -> Parsetree.signature -> unit) -> unit + + + +end = struct +#1 "ast_extract.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + +(* type module_name = private string *) + +module Set_string = Depend.StringSet + +(* FIXME: [Clflags.open_modules] seems not to be properly used *) +module SMap = Depend.StringMap +let bound_vars = SMap.empty + + +type 'a kind = 'a Ml_binary.kind + + +let read_parse_and_extract (type t) (k : t kind) (ast : t) : Set_string.t = + Depend.free_structure_names := Set_string.empty; + Ext_ref.protect Clflags.transparent_modules false begin fun _ -> + List.iter (* check *) + (fun modname -> + ignore @@ + Depend.open_module bound_vars (Longident.Lident modname)) + (!Clflags.open_modules); + (match k with + | Ml_binary.Ml -> Depend.add_implementation bound_vars ast + | Ml_binary.Mli -> Depend.add_signature bound_vars ast ); + !Depend.free_structure_names + end + +type ('a,'b) ast_info = + | Ml of + string * (* sourcefile *) + 'a * + string (* opref *) + | Mli of string * (* sourcefile *) + 'b * + string (* opref *) + | Ml_mli of + string * (* sourcefile *) + 'a * + string * (* opref1 *) + string * (* sourcefile *) + 'b * + string (* opref2*) + +type ('a,'b) t = + { module_name : string ; ast_info : ('a,'b) ast_info } + + +(* only visit nodes that are currently in the domain *) +(* https://en.wikipedia.org/wiki/Topological_sorting *) +(* dfs *) +let sort_files_by_dependencies ~(domain : Set_string.t) (dependency_graph : Set_string.t Map_string.t) : + string Queue.t = + let next current = + Map_string.find_exn dependency_graph current in + let worklist = ref domain in + let result = Queue.create () in + let rec visit (visiting : Set_string.t) path (current : string) = + let next_path = current :: path in + if Set_string.mem current visiting then + Bs_exception.error (Bs_cyclic_depends next_path) + else if Set_string.mem current !worklist then + begin + let next_set = Set_string.add current visiting in + next current |> + Set_string.iter + (fun node -> + if Map_string.mem dependency_graph node then + visit next_set next_path node) + ; + worklist := Set_string.remove current !worklist; + Queue.push current result ; + end in + while not (Set_string.is_empty !worklist) do + visit Set_string.empty [] (Set_string.choose !worklist) + done; + if Js_config.get_diagnose () then + Format.fprintf Format.err_formatter + "Order: @[%a@]@." + (Ext_format.pp_print_queue + ~pp_sep:Format.pp_print_space + Format.pp_print_string) + result ; + result +;; + + + +let sort project_ml project_mli (ast_table : _ t Map_string.t) = + let domain = + Map_string.fold ast_table Set_string.empty + (fun k _ acc -> Set_string.add k acc) + in + let h = + Map_string.map ast_table + (fun + ({ast_info}) + -> + match ast_info with + | Ml (_, ast, _) + -> + read_parse_and_extract Ml (project_ml ast) + | Mli (_, ast, _) + -> + read_parse_and_extract Mli (project_mli ast) + | Ml_mli (_, impl, _, _, intf, _) + -> + Set_string.union + (read_parse_and_extract Ml (project_ml impl)) + (read_parse_and_extract Mli (project_mli intf)) + ) in + sort_files_by_dependencies ~domain h + +(** same as {!Ocaml_parse.check_suffix} but does not care with [-c -o] option*) +let check_suffix name = + if Ext_path.check_suffix_case name ".ml" then + `Ml, + Ext_filename.chop_extension_maybe name + else if Ext_path.check_suffix_case name !Config.interface_suffix then + `Mli, Ext_filename.chop_extension_maybe name + else + raise(Arg.Bad("don't know what to do with " ^ name)) + + +let collect_ast_map ppf files parse_implementation parse_interface = + Ext_list.fold_left files Map_string.empty + (fun acc source_file -> + match check_suffix source_file with + | `Ml, opref -> + let module_name = Ext_filename.module_name source_file in + begin match Map_string.find_exn acc module_name with + | exception Not_found -> + Map_string.add acc module_name + {ast_info = + (Ml (source_file, parse_implementation + ppf source_file, opref)); + module_name ; + } + | {ast_info = (Ml (source_file2, _, _) + | Ml_mli(source_file2, _, _,_,_,_))} -> + Bs_exception.error + (Bs_duplicated_module (source_file, source_file2)) + | {ast_info = Mli (source_file2, intf, opref2)} + -> + Map_string.add acc module_name + {ast_info = + Ml_mli (source_file, + parse_implementation ppf source_file, + opref, + source_file2, + intf, + opref2 + ); + module_name} + end + | `Mli, opref -> + let module_name = Ext_filename.module_name source_file in + begin match Map_string.find_exn acc module_name with + | exception Not_found -> + Map_string.add acc module_name + {ast_info = (Mli (source_file, parse_interface + ppf source_file, opref)); + module_name } + | {ast_info = + (Mli (source_file2, _, _) | + Ml_mli(_,_,_,source_file2,_,_)) } -> + Bs_exception.error + (Bs_duplicated_module (source_file, source_file2)) + | {ast_info = Ml (source_file2, impl, opref2)} + -> + Map_string.add acc module_name + {ast_info = + Ml_mli + (source_file2, + impl, + opref2, + source_file, + parse_interface ppf source_file, + opref + ); + module_name} + end + ) +;; +type dir_spec = + { dir : string ; + mutable excludes : string list + } + +let collect_from_main + ?(extra_dirs=[]) + ?(excludes=[]) + ?alias_map + (ppf : Format.formatter) + parse_implementation + parse_interface + project_impl + project_intf + main_module = + let files = + Ext_list.fold_left extra_dirs [] (fun acc dir_spec -> + let dirname, excludes = + match dir_spec with + | { dir = dirname; excludes = dir_excludes} -> + (* dirname, excludes *) + (* | `Dir_with_excludes (dirname, dir_excludes) -> *) + dirname, + (Ext_list.flat_map_append + dir_excludes excludes + (fun x -> [x ^ ".ml" ; x ^ ".mli" ]) + ) + in + Ext_array.fold_left (Sys.readdir dirname) acc (fun acc source_file -> + if (Ext_string.ends_with source_file ".ml" || + Ext_string.ends_with source_file ".mli" ) + && (* not_excluded source_file *) (not (Ext_list.mem_string excludes source_file )) + then + (Filename.concat dirname source_file) :: acc else acc + ) ) + in + let ast_table = collect_ast_map ppf files parse_implementation parse_interface in + let visited = Hash_string.create 31 in + let result = Queue.create () in + let next module_name : Set_string.t = + let module_set = + match Map_string.find_exn ast_table module_name with + | exception _ -> Set_string.empty + | {ast_info = Ml (_, impl, _)} -> + read_parse_and_extract Ml (project_impl impl) + | {ast_info = Mli (_, intf,_)} -> + read_parse_and_extract Mli (project_intf intf) + | {ast_info = Ml_mli(_, impl, _, _, intf, _)} + -> + Set_string.union + (read_parse_and_extract Ml (project_impl impl)) + (read_parse_and_extract Mli (project_intf intf)) + in + match alias_map with + | None -> module_set + | Some map -> + Set_string.fold (fun x acc -> Set_string.add (Hash_string.find_default map x x) acc ) module_set Set_string.empty + in + let rec visit visiting path current = + if Set_string.mem current visiting then + Bs_exception.error (Bs_cyclic_depends (current::path)) + else + if not (Hash_string.mem visited current) + && Map_string.mem ast_table current then + begin + Set_string.iter + (visit + (Set_string.add current visiting) + (current::path)) + (next current) ; + Queue.push current result; + Hash_string.add visited current (); + end in + visit (Set_string.empty) [] main_module ; + ast_table, result + + +let build_queue ppf queue + (ast_table : _ t Map_string.t) + after_parsing_impl + after_parsing_sig + = + queue + |> Queue.iter + (fun modname -> + match Map_string.find_exn ast_table modname with + | {ast_info = Ml(source_file,ast, opref)} + -> + after_parsing_impl ppf source_file + opref ast + | {ast_info = Mli (source_file,ast,opref) ; } + -> + after_parsing_sig ppf source_file + opref ast + | {ast_info = Ml_mli(source_file1,impl,opref1,source_file2,intf,opref2)} + -> + after_parsing_sig ppf source_file1 opref1 intf ; + after_parsing_impl ppf source_file2 opref2 impl + | exception Not_found -> assert false + ) + + +let handle_queue + queue ast_table + decorate_module_only + decorate_interface_only + decorate_module = + queue + |> Queue.iter + (fun base -> + match (Map_string.find_exn ast_table base ).ast_info with + | exception Not_found -> assert false + | Ml (ml_name, ml_content, _) + -> + decorate_module_only base ml_name ml_content + | Mli (mli_name , mli_content, _) -> + decorate_interface_only base mli_name mli_content + | Ml_mli (ml_name, ml_content, _, mli_name, mli_content, _) + -> + decorate_module base mli_name ml_name mli_content ml_content + + ) + + + +let build_lazy_queue ppf queue (ast_table : _ t Map_string.t) + after_parsing_impl + after_parsing_sig + = + queue |> Queue.iter (fun modname -> + match Map_string.find_exn ast_table modname with + | {ast_info = Ml(source_file,lazy ast, opref)} + -> + after_parsing_impl ppf source_file opref ast + | {ast_info = Mli (source_file,lazy ast,opref) ; } + -> + after_parsing_sig ppf source_file opref ast + | {ast_info = Ml_mli(source_file1,lazy impl,opref1,source_file2,lazy intf,opref2)} + -> + after_parsing_sig ppf source_file1 opref1 intf ; + after_parsing_impl ppf source_file2 opref2 impl + | exception Not_found -> assert false + ) + + +end +module Ext_io : sig +#1 "ext_io.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + +val load_file : string -> string + +val rev_lines_of_file : string -> string list + +val rev_lines_of_chann : in_channel -> string list + +val write_file : string -> string -> unit + +end = struct +#1 "ext_io.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + +(** on 32 bit , there are 16M limitation *) +let load_file f = + Ext_pervasives.finally (open_in_bin f) ~clean:close_in begin fun ic -> + let n = in_channel_length ic in + let s = Bytes.create n in + really_input ic s 0 n; + Bytes.unsafe_to_string s + end + + +let rev_lines_of_chann chan = + let rec loop acc chan = + match input_line chan with + | line -> loop (line :: acc) chan + | exception End_of_file -> close_in chan ; acc in + loop [] chan + + +let rev_lines_of_file file = + Ext_pervasives.finally + ~clean:close_in + (open_in_bin file) rev_lines_of_chann + + +let write_file f content = + Ext_pervasives.finally ~clean:close_out + (open_out_bin f) begin fun oc -> + output_string oc content + end + +end +module Docstrings : sig +#1 "docstrings.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Leo White *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Documentation comments *) + +(** (Re)Initialise all docstring state *) +val init : unit -> unit + +(** Emit warnings for unattached and ambiguous docstrings *) +val warn_bad_docstrings : unit -> unit + +(** {2 Docstrings} *) + +(** Documentation comments *) +type docstring + +(** Create a docstring *) +val docstring : string -> Location.t -> docstring + +(** Register a docstring *) +val register : docstring -> unit + +(** Get the text of a docstring *) +val docstring_body : docstring -> string + +(** Get the location of a docstring *) +val docstring_loc : docstring -> Location.t + +(** {2 Set functions} + + These functions are used by the lexer to associate docstrings to + the locations of tokens. *) + +(** Docstrings immediately preceding a token *) +val set_pre_docstrings : Lexing.position -> docstring list -> unit + +(** Docstrings immediately following a token *) +val set_post_docstrings : Lexing.position -> docstring list -> unit + +(** Docstrings not immediately adjacent to a token *) +val set_floating_docstrings : Lexing.position -> docstring list -> unit + +(** Docstrings immediately following the token which precedes this one *) +val set_pre_extra_docstrings : Lexing.position -> docstring list -> unit + +(** Docstrings immediately preceding the token which follows this one *) +val set_post_extra_docstrings : Lexing.position -> docstring list -> unit + +(** {2 Items} + + The {!docs} type represents documentation attached to an item. *) + +type docs = + { docs_pre: docstring option; + docs_post: docstring option; } + +val empty_docs : docs + +val docs_attr : docstring -> Parsetree.attribute + +(** Convert item documentation to attributes and add them to an + attribute list *) +val add_docs_attrs : docs -> Parsetree.attributes -> Parsetree.attributes + +(** Fetch the item documentation for the current symbol. This also + marks this documentation (for ambiguity warnings). *) +val symbol_docs : unit -> docs +val symbol_docs_lazy : unit -> docs Lazy.t + +(** Fetch the item documentation for the symbols between two + positions. This also marks this documentation (for ambiguity + warnings). *) +val rhs_docs : int -> int -> docs +val rhs_docs_lazy : int -> int -> docs Lazy.t + +(** Mark the item documentation for the current symbol (for ambiguity + warnings). *) +val mark_symbol_docs : unit -> unit + +(** Mark as associated the item documentation for the symbols between + two positions (for ambiguity warnings) *) +val mark_rhs_docs : int -> int -> unit + +(** {2 Fields and constructors} + + The {!info} type represents documentation attached to a field or + constructor. *) + +type info = docstring option + +val empty_info : info + +val info_attr : docstring -> Parsetree.attribute + +(** Convert field info to attributes and add them to an + attribute list *) +val add_info_attrs : info -> Parsetree.attributes -> Parsetree.attributes + +(** Fetch the field info for the current symbol. *) +val symbol_info : unit -> info + +(** Fetch the field info following the symbol at a given position. *) +val rhs_info : int -> info + +(** {2 Unattached comments} + + The {!text} type represents documentation which is not attached to + anything. *) + +type text = docstring list + +val empty_text : text +val empty_text_lazy : text Lazy.t + +val text_attr : docstring -> Parsetree.attribute + +(** Convert text to attributes and add them to an attribute list *) +val add_text_attrs : text -> Parsetree.attributes -> Parsetree.attributes + +(** Fetch the text preceding the current symbol. *) +val symbol_text : unit -> text +val symbol_text_lazy : unit -> text Lazy.t + +(** Fetch the text preceding the symbol at the given position. *) +val rhs_text : int -> text +val rhs_text_lazy : int -> text Lazy.t + +(** {2 Extra text} + + There may be additional text attached to the delimiters of a block + (e.g. [struct] and [end]). This is fetched by the following + functions, which are applied to the contents of the block rather + than the delimiters. *) + +(** Fetch additional text preceding the current symbol *) +val symbol_pre_extra_text : unit -> text + +(** Fetch additional text following the current symbol *) +val symbol_post_extra_text : unit -> text + +(** Fetch additional text preceding the symbol at the given position *) +val rhs_pre_extra_text : int -> text + +(** Fetch additional text following the symbol at the given position *) +val rhs_post_extra_text : int -> text + +end = struct +#1 "docstrings.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Leo White *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +open Location + +(* Docstrings *) + +(* A docstring is "attached" if it has been inserted in the AST. This + is used for generating unexpected docstring warnings. *) +type ds_attached = + | Unattached (* Not yet attached anything.*) + | Info (* Attached to a field or constructor. *) + | Docs (* Attached to an item or as floating text. *) + +(* A docstring is "associated" with an item if there are no blank lines between + them. This is used for generating docstring ambiguity warnings. *) +type ds_associated = + | Zero (* Not associated with an item *) + | One (* Associated with one item *) + | Many (* Associated with multiple items (ambiguity) *) + +type docstring = + { ds_body: string; + ds_loc: Location.t; + mutable ds_attached: ds_attached; + mutable ds_associated: ds_associated; } + +(* List of docstrings *) + +let docstrings : docstring list ref = ref [] + +(* Warn for unused and ambiguous docstrings *) + +let warn_bad_docstrings () = + if Warnings.is_active (Warnings.Bad_docstring true) then begin + List.iter + (fun ds -> + match ds.ds_attached with + | Info -> () + | Unattached -> + prerr_warning ds.ds_loc (Warnings.Bad_docstring true) + | Docs -> + match ds.ds_associated with + | Zero | One -> () + | Many -> + prerr_warning ds.ds_loc (Warnings.Bad_docstring false)) + (List.rev !docstrings) +end + +(* Docstring constructors and destructors *) + +let docstring body loc = + let ds = + { ds_body = body; + ds_loc = loc; + ds_attached = Unattached; + ds_associated = Zero; } + in + ds + +let register ds = + docstrings := ds :: !docstrings + +let docstring_body ds = ds.ds_body + +let docstring_loc ds = ds.ds_loc + +(* Docstrings attached to items *) + +type docs = + { docs_pre: docstring option; + docs_post: docstring option; } + +let empty_docs = { docs_pre = None; docs_post = None } + +let doc_loc = {txt = "ocaml.doc"; loc = Location.none} + +let docs_attr ds = + let open Parsetree in + let exp = + { pexp_desc = Pexp_constant (Pconst_string(ds.ds_body, None)); + pexp_loc = ds.ds_loc; + pexp_attributes = []; } + in + let item = + { pstr_desc = Pstr_eval (exp, []); pstr_loc = exp.pexp_loc } + in + (doc_loc, PStr [item]) + +let add_docs_attrs docs attrs = + let attrs = + match docs.docs_pre with + | None | Some { ds_body=""; _ } -> attrs + | Some ds -> docs_attr ds :: attrs + in + let attrs = + match docs.docs_post with + | None | Some { ds_body=""; _ } -> attrs + | Some ds -> attrs @ [docs_attr ds] + in + attrs + +(* Docstrings attached to constructors or fields *) + +type info = docstring option + +let empty_info = None + +let info_attr = docs_attr + +let add_info_attrs info attrs = + match info with + | None | Some {ds_body=""; _} -> attrs + | Some ds -> attrs @ [info_attr ds] + +(* Docstrings not attached to a specific item *) + +type text = docstring list + +let empty_text = [] +let empty_text_lazy = lazy [] + +let text_loc = {txt = "ocaml.text"; loc = Location.none} + +let text_attr ds = + let open Parsetree in + let exp = + { pexp_desc = Pexp_constant (Pconst_string(ds.ds_body, None)); + pexp_loc = ds.ds_loc; + pexp_attributes = []; } + in + let item = + { pstr_desc = Pstr_eval (exp, []); pstr_loc = exp.pexp_loc } + in + (text_loc, PStr [item]) + +let add_text_attrs dsl attrs = + let fdsl = List.filter (function {ds_body=""} -> false| _ ->true) dsl in + (List.map text_attr fdsl) @ attrs + +(* Find the first non-info docstring in a list, attach it and return it *) +let get_docstring ~info dsl = + let rec loop = function + | [] -> None + | {ds_attached = Info; _} :: rest -> loop rest + | ds :: _ -> + ds.ds_attached <- if info then Info else Docs; + Some ds + in + loop dsl + +(* Find all the non-info docstrings in a list, attach them and return them *) +let get_docstrings dsl = + let rec loop acc = function + | [] -> List.rev acc + | {ds_attached = Info; _} :: rest -> loop acc rest + | ds :: rest -> + ds.ds_attached <- Docs; + loop (ds :: acc) rest + in + loop [] dsl + +(* "Associate" all the docstrings in a list *) +let associate_docstrings dsl = + List.iter + (fun ds -> + match ds.ds_associated with + | Zero -> ds.ds_associated <- One + | (One | Many) -> ds.ds_associated <- Many) + dsl + +(* Map from positions to pre docstrings *) + +let pre_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_pre_docstrings pos dsl = + if dsl <> [] then Hashtbl.add pre_table pos dsl + +let get_pre_docs pos = + try + let dsl = Hashtbl.find pre_table pos in + associate_docstrings dsl; + get_docstring ~info:false dsl + with Not_found -> None + +let mark_pre_docs pos = + try + let dsl = Hashtbl.find pre_table pos in + associate_docstrings dsl + with Not_found -> () + +(* Map from positions to post docstrings *) + +let post_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_post_docstrings pos dsl = + if dsl <> [] then Hashtbl.add post_table pos dsl + +let get_post_docs pos = + try + let dsl = Hashtbl.find post_table pos in + associate_docstrings dsl; + get_docstring ~info:false dsl + with Not_found -> None + +let mark_post_docs pos = + try + let dsl = Hashtbl.find post_table pos in + associate_docstrings dsl + with Not_found -> () + +let get_info pos = + try + let dsl = Hashtbl.find post_table pos in + get_docstring ~info:true dsl + with Not_found -> None + +(* Map from positions to floating docstrings *) + +let floating_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_floating_docstrings pos dsl = + if dsl <> [] then Hashtbl.add floating_table pos dsl + +let get_text pos = + try + let dsl = Hashtbl.find floating_table pos in + get_docstrings dsl + with Not_found -> [] + +(* Maps from positions to extra docstrings *) + +let pre_extra_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_pre_extra_docstrings pos dsl = + if dsl <> [] then Hashtbl.add pre_extra_table pos dsl + +let get_pre_extra_text pos = + try + let dsl = Hashtbl.find pre_extra_table pos in + get_docstrings dsl + with Not_found -> [] + +let post_extra_table : (Lexing.position, docstring list) Hashtbl.t = + Hashtbl.create 50 + +let set_post_extra_docstrings pos dsl = + if dsl <> [] then Hashtbl.add post_extra_table pos dsl + +let get_post_extra_text pos = + try + let dsl = Hashtbl.find post_extra_table pos in + get_docstrings dsl + with Not_found -> [] + +(* Docstrings from parser actions *) + +let symbol_docs () = + { docs_pre = get_pre_docs (Parsing.symbol_start_pos ()); + docs_post = get_post_docs (Parsing.symbol_end_pos ()); } + +let symbol_docs_lazy () = + let p1 = Parsing.symbol_start_pos () in + let p2 = Parsing.symbol_end_pos () in + lazy { docs_pre = get_pre_docs p1; + docs_post = get_post_docs p2; } + +let rhs_docs pos1 pos2 = + { docs_pre = get_pre_docs (Parsing.rhs_start_pos pos1); + docs_post = get_post_docs (Parsing.rhs_end_pos pos2); } + +let rhs_docs_lazy pos1 pos2 = + let p1 = Parsing.rhs_start_pos pos1 in + let p2 = Parsing.rhs_end_pos pos2 in + lazy { docs_pre = get_pre_docs p1; + docs_post = get_post_docs p2; } + +let mark_symbol_docs () = + mark_pre_docs (Parsing.symbol_start_pos ()); + mark_post_docs (Parsing.symbol_end_pos ()) + +let mark_rhs_docs pos1 pos2 = + mark_pre_docs (Parsing.rhs_start_pos pos1); + mark_post_docs (Parsing.rhs_end_pos pos2) + +let symbol_info () = + get_info (Parsing.symbol_end_pos ()) + +let rhs_info pos = + get_info (Parsing.rhs_end_pos pos) + +let symbol_text () = + get_text (Parsing.symbol_start_pos ()) + +let symbol_text_lazy () = + let pos = Parsing.symbol_start_pos () in + lazy (get_text pos) + +let rhs_text pos = + get_text (Parsing.rhs_start_pos pos) + +let rhs_text_lazy pos = + let pos = Parsing.rhs_start_pos pos in + lazy (get_text pos) + +let symbol_pre_extra_text () = + get_pre_extra_text (Parsing.symbol_start_pos ()) + +let symbol_post_extra_text () = + get_post_extra_text (Parsing.symbol_end_pos ()) + +let rhs_pre_extra_text pos = + get_pre_extra_text (Parsing.rhs_start_pos pos) + +let rhs_post_extra_text pos = + get_post_extra_text (Parsing.rhs_end_pos pos) + + +(* (Re)Initialise all comment state *) + +let init () = + docstrings := []; + Hashtbl.reset pre_table; + Hashtbl.reset post_table; + Hashtbl.reset floating_table; + Hashtbl.reset pre_extra_table; + Hashtbl.reset post_extra_table + +end +module Syntaxerr : sig +#1 "syntaxerr.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Auxiliary type for reporting syntax errors *) + +open Format + +type error = + Unclosed of Location.t * string * Location.t * string + | Expecting of Location.t * string + | Not_expecting of Location.t * string + | Applicative_path of Location.t + | Variable_in_scope of Location.t * string + | Other of Location.t + | Ill_formed_ast of Location.t * string + | Invalid_package_type of Location.t * string + +exception Error of error +exception Escape_error + +val report_error: formatter -> error -> unit + (** @deprecated Use {!Location.error_of_exn}, {!Location.report_error}. *) + +val location_of_error: error -> Location.t +val ill_formed_ast: Location.t -> string -> 'a + +end = struct +#1 "syntaxerr.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1997 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Auxiliary type for reporting syntax errors *) + +type error = + Unclosed of Location.t * string * Location.t * string + | Expecting of Location.t * string + | Not_expecting of Location.t * string + | Applicative_path of Location.t + | Variable_in_scope of Location.t * string + | Other of Location.t + | Ill_formed_ast of Location.t * string + | Invalid_package_type of Location.t * string + +exception Error of error +exception Escape_error + +let prepare_error = function + | Unclosed(opening_loc, opening, closing_loc, closing) -> + Location.errorf ~loc:closing_loc + ~sub:[ + Location.errorf ~loc:opening_loc + "This '%s' might be unmatched" opening + ] + ~if_highlight: + (Printf.sprintf "Syntax error: '%s' expected, \ + the highlighted '%s' might be unmatched" + closing opening) + "Syntax error: '%s' expected" closing + + | Expecting (loc, nonterm) -> + Location.errorf ~loc "Syntax error: %s expected." nonterm + | Not_expecting (loc, nonterm) -> + Location.errorf ~loc "Syntax error: %s not expected." nonterm + | Applicative_path loc -> + Location.errorf ~loc + "Syntax error: applicative paths of the form F(X).t \ + are not supported when the option -no-app-func is set." + | Variable_in_scope (loc, var) -> + Location.errorf ~loc + "In this scoped type, variable '%s \ + is reserved for the local type %s." + var var + | Other loc -> + Location.errorf ~loc "Syntax error" + | Ill_formed_ast (loc, s) -> + Location.errorf ~loc "broken invariant in parsetree: %s" s + | Invalid_package_type (loc, s) -> + Location.errorf ~loc "invalid package type: %s" s + +let () = + Location.register_error_of_exn + (function + | Error err -> Some (prepare_error err) + | _ -> None + ) + + +let report_error ppf err = + Location.report_error ppf (prepare_error err) + +let location_of_error = function + | Unclosed(l,_,_,_) + | Applicative_path l + | Variable_in_scope(l,_) + | Other l + | Not_expecting (l, _) + | Ill_formed_ast (l, _) + | Invalid_package_type (l, _) + | Expecting (l, _) -> l + + +let ill_formed_ast loc s = + raise (Error (Ill_formed_ast (loc, s))) + +end +module Ast_helper : sig +#1 "ast_helper.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Helpers to produce Parsetree fragments *) + +open Asttypes +open Docstrings +open Parsetree + +type lid = Longident.t loc +type str = string loc +type loc = Location.t +type attrs = attribute list + +(** {1 Default locations} *) + +val default_loc: loc ref + (** Default value for all optional location arguments. *) + +val with_default_loc: loc -> (unit -> 'a) -> 'a + (** Set the [default_loc] within the scope of the execution + of the provided function. *) + +(** {1 Constants} *) + +module Const : sig + val char : char -> constant + val string : ?quotation_delimiter:string -> string -> constant + val integer : ?suffix:char -> string -> constant + val int : ?suffix:char -> int -> constant + val int32 : ?suffix:char -> int32 -> constant + val int64 : ?suffix:char -> int64 -> constant + val nativeint : ?suffix:char -> nativeint -> constant + val float : ?suffix:char -> string -> constant +end + +(** {1 Core language} *) + +(** Type expressions *) +module Typ : + sig + val mk: ?loc:loc -> ?attrs:attrs -> core_type_desc -> core_type + val attr: core_type -> attribute -> core_type + + val any: ?loc:loc -> ?attrs:attrs -> unit -> core_type + val var: ?loc:loc -> ?attrs:attrs -> string -> core_type + val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type -> core_type + -> core_type + val tuple: ?loc:loc -> ?attrs:attrs -> core_type list -> core_type + val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type + val object_: ?loc:loc -> ?attrs:attrs -> object_field list + -> closed_flag -> core_type + val class_: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type + val alias: ?loc:loc -> ?attrs:attrs -> core_type -> string -> core_type + val variant: ?loc:loc -> ?attrs:attrs -> row_field list -> closed_flag + -> label list option -> core_type + val poly: ?loc:loc -> ?attrs:attrs -> str list -> core_type -> core_type + val package: ?loc:loc -> ?attrs:attrs -> lid -> (lid * core_type) list + -> core_type + val extension: ?loc:loc -> ?attrs:attrs -> extension -> core_type + + val force_poly: core_type -> core_type + + val varify_constructors: str list -> core_type -> core_type + (** [varify_constructors newtypes te] is type expression [te], of which + any of nullary type constructor [tc] is replaced by type variable of + the same name, if [tc]'s name appears in [newtypes]. + Raise [Syntaxerr.Variable_in_scope] if any type variable inside [te] + appears in [newtypes]. + @since 4.05 + *) + end + +(** Patterns *) +module Pat: + sig + val mk: ?loc:loc -> ?attrs:attrs -> pattern_desc -> pattern + val attr:pattern -> attribute -> pattern + + val any: ?loc:loc -> ?attrs:attrs -> unit -> pattern + val var: ?loc:loc -> ?attrs:attrs -> str -> pattern + val alias: ?loc:loc -> ?attrs:attrs -> pattern -> str -> pattern + val constant: ?loc:loc -> ?attrs:attrs -> constant -> pattern + val interval: ?loc:loc -> ?attrs:attrs -> constant -> constant -> pattern + val tuple: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern + val construct: ?loc:loc -> ?attrs:attrs -> lid -> pattern option -> pattern + val variant: ?loc:loc -> ?attrs:attrs -> label -> pattern option -> pattern + val record: ?loc:loc -> ?attrs:attrs -> (lid * pattern) list -> closed_flag + -> pattern + val array: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern + val or_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern -> pattern + val constraint_: ?loc:loc -> ?attrs:attrs -> pattern -> core_type -> pattern + val type_: ?loc:loc -> ?attrs:attrs -> lid -> pattern + val lazy_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern + val unpack: ?loc:loc -> ?attrs:attrs -> str -> pattern + val open_: ?loc:loc -> ?attrs:attrs -> lid -> pattern -> pattern + val exception_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern + val extension: ?loc:loc -> ?attrs:attrs -> extension -> pattern + end + +(** Expressions *) +module Exp: + sig + val mk: ?loc:loc -> ?attrs:attrs -> expression_desc -> expression + val attr: expression -> attribute -> expression + + val ident: ?loc:loc -> ?attrs:attrs -> lid -> expression + val constant: ?loc:loc -> ?attrs:attrs -> constant -> expression + val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list + -> expression -> expression + val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option + -> pattern -> expression -> expression + val function_: ?loc:loc -> ?attrs:attrs -> case list -> expression + val apply: ?loc:loc -> ?attrs:attrs -> expression + -> (arg_label * expression) list -> expression + val match_: ?loc:loc -> ?attrs:attrs -> expression -> case list + -> expression + val try_: ?loc:loc -> ?attrs:attrs -> expression -> case list -> expression + val tuple: ?loc:loc -> ?attrs:attrs -> expression list -> expression + val construct: ?loc:loc -> ?attrs:attrs -> lid -> expression option + -> expression + val variant: ?loc:loc -> ?attrs:attrs -> label -> expression option + -> expression + val record: ?loc:loc -> ?attrs:attrs -> (lid * expression) list + -> expression option -> expression + val field: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression + val setfield: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression + -> expression + val array: ?loc:loc -> ?attrs:attrs -> expression list -> expression + val ifthenelse: ?loc:loc -> ?attrs:attrs -> expression -> expression + -> expression option -> expression + val sequence: ?loc:loc -> ?attrs:attrs -> expression -> expression + -> expression + val while_: ?loc:loc -> ?attrs:attrs -> expression -> expression + -> expression + val for_: ?loc:loc -> ?attrs:attrs -> pattern -> expression -> expression + -> direction_flag -> expression -> expression + val coerce: ?loc:loc -> ?attrs:attrs -> expression -> core_type option + -> core_type -> expression + val constraint_: ?loc:loc -> ?attrs:attrs -> expression -> core_type + -> expression + val send: ?loc:loc -> ?attrs:attrs -> expression -> str -> expression + val new_: ?loc:loc -> ?attrs:attrs -> lid -> expression + val setinstvar: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression + val override: ?loc:loc -> ?attrs:attrs -> (str * expression) list + -> expression + val letmodule: ?loc:loc -> ?attrs:attrs -> str -> module_expr -> expression + -> expression + val letexception: + ?loc:loc -> ?attrs:attrs -> extension_constructor -> expression + -> expression + val assert_: ?loc:loc -> ?attrs:attrs -> expression -> expression + val lazy_: ?loc:loc -> ?attrs:attrs -> expression -> expression + val poly: ?loc:loc -> ?attrs:attrs -> expression -> core_type option + -> expression + val object_: ?loc:loc -> ?attrs:attrs -> class_structure -> expression + val newtype: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression + val pack: ?loc:loc -> ?attrs:attrs -> module_expr -> expression + val open_: ?loc:loc -> ?attrs:attrs -> override_flag -> lid -> expression + -> expression + val extension: ?loc:loc -> ?attrs:attrs -> extension -> expression + val unreachable: ?loc:loc -> ?attrs:attrs -> unit -> expression + + val case: pattern -> ?guard:expression -> expression -> case + end + +(** Value declarations *) +module Val: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> + ?prim:string list -> str -> core_type -> value_description + end + +(** Type declarations *) +module Type: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + ?params:(core_type * variance) list -> + ?cstrs:(core_type * core_type * loc) list -> + ?kind:type_kind -> ?priv:private_flag -> ?manifest:core_type -> str -> + type_declaration + + val constructor: ?loc:loc -> ?attrs:attrs -> ?info:info -> + ?args:constructor_arguments -> ?res:core_type -> str -> + constructor_declaration + val field: ?loc:loc -> ?attrs:attrs -> ?info:info -> + ?mut:mutable_flag -> str -> core_type -> label_declaration + end + +(** Type extensions *) +module Te: + sig + val mk: ?attrs:attrs -> ?docs:docs -> + ?params:(core_type * variance) list -> ?priv:private_flag -> + lid -> extension_constructor list -> type_extension + + val constructor: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info -> + str -> extension_constructor_kind -> extension_constructor + + val decl: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info -> + ?args:constructor_arguments -> ?res:core_type -> str -> + extension_constructor + val rebind: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info -> + str -> lid -> extension_constructor + end + +(** {1 Module language} *) + +(** Module type expressions *) +module Mty: + sig + val mk: ?loc:loc -> ?attrs:attrs -> module_type_desc -> module_type + val attr: module_type -> attribute -> module_type + + val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_type + val alias: ?loc:loc -> ?attrs:attrs -> lid -> module_type + val signature: ?loc:loc -> ?attrs:attrs -> signature -> module_type + val functor_: ?loc:loc -> ?attrs:attrs -> + str -> module_type option -> module_type -> module_type + val with_: ?loc:loc -> ?attrs:attrs -> module_type -> + with_constraint list -> module_type + val typeof_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type + val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_type + end + +(** Module expressions *) +module Mod: + sig + val mk: ?loc:loc -> ?attrs:attrs -> module_expr_desc -> module_expr + val attr: module_expr -> attribute -> module_expr + + val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_expr + val structure: ?loc:loc -> ?attrs:attrs -> structure -> module_expr + val functor_: ?loc:loc -> ?attrs:attrs -> + str -> module_type option -> module_expr -> module_expr + val apply: ?loc:loc -> ?attrs:attrs -> module_expr -> module_expr -> + module_expr + val constraint_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type -> + module_expr + val unpack: ?loc:loc -> ?attrs:attrs -> expression -> module_expr + val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_expr + end + +(** Signature items *) +module Sig: + sig + val mk: ?loc:loc -> signature_item_desc -> signature_item + + val value: ?loc:loc -> value_description -> signature_item + val type_: ?loc:loc -> rec_flag -> type_declaration list -> signature_item + val type_extension: ?loc:loc -> type_extension -> signature_item + val exception_: ?loc:loc -> extension_constructor -> signature_item + val module_: ?loc:loc -> module_declaration -> signature_item + val rec_module: ?loc:loc -> module_declaration list -> signature_item + val modtype: ?loc:loc -> module_type_declaration -> signature_item + val open_: ?loc:loc -> open_description -> signature_item + val include_: ?loc:loc -> include_description -> signature_item + val class_: ?loc:loc -> class_description list -> signature_item + val class_type: ?loc:loc -> class_type_declaration list -> signature_item + val extension: ?loc:loc -> ?attrs:attrs -> extension -> signature_item + val attribute: ?loc:loc -> attribute -> signature_item + val text: text -> signature_item list + end + +(** Structure items *) +module Str: + sig + val mk: ?loc:loc -> structure_item_desc -> structure_item + + val eval: ?loc:loc -> ?attrs:attributes -> expression -> structure_item + val value: ?loc:loc -> rec_flag -> value_binding list -> structure_item + val primitive: ?loc:loc -> value_description -> structure_item + val type_: ?loc:loc -> rec_flag -> type_declaration list -> structure_item + val type_extension: ?loc:loc -> type_extension -> structure_item + val exception_: ?loc:loc -> extension_constructor -> structure_item + val module_: ?loc:loc -> module_binding -> structure_item + val rec_module: ?loc:loc -> module_binding list -> structure_item + val modtype: ?loc:loc -> module_type_declaration -> structure_item + val open_: ?loc:loc -> open_description -> structure_item + val class_: ?loc:loc -> class_declaration list -> structure_item + val class_type: ?loc:loc -> class_type_declaration list -> structure_item + val include_: ?loc:loc -> include_declaration -> structure_item + val extension: ?loc:loc -> ?attrs:attrs -> extension -> structure_item + val attribute: ?loc:loc -> attribute -> structure_item + val text: text -> structure_item list + end + +(** Module declarations *) +module Md: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + str -> module_type -> module_declaration + end + +(** Module type declarations *) +module Mtd: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + ?typ:module_type -> str -> module_type_declaration + end + +(** Module bindings *) +module Mb: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + str -> module_expr -> module_binding + end + +(** Opens *) +module Opn: + sig + val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> + ?override:override_flag -> lid -> open_description + end + +(** Includes *) +module Incl: + sig + val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> 'a -> 'a include_infos + end + +(** Value bindings *) +module Vb: + sig + val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + pattern -> expression -> value_binding + end + + +(** {1 Class language} *) + +(** Class type expressions *) +module Cty: + sig + val mk: ?loc:loc -> ?attrs:attrs -> class_type_desc -> class_type + val attr: class_type -> attribute -> class_type + + val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_type + val signature: ?loc:loc -> ?attrs:attrs -> class_signature -> class_type + val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type -> + class_type -> class_type + val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type + val open_: ?loc:loc -> ?attrs:attrs -> override_flag -> lid -> class_type + -> class_type + end + +(** Class type fields *) +module Ctf: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> + class_type_field_desc -> class_type_field + val attr: class_type_field -> attribute -> class_type_field + + val inherit_: ?loc:loc -> ?attrs:attrs -> class_type -> class_type_field + val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag -> + virtual_flag -> core_type -> class_type_field + val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag -> + virtual_flag -> core_type -> class_type_field + val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type -> + class_type_field + val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type_field + val attribute: ?loc:loc -> attribute -> class_type_field + val text: text -> class_type_field list + end + +(** Class expressions *) +module Cl: + sig + val mk: ?loc:loc -> ?attrs:attrs -> class_expr_desc -> class_expr + val attr: class_expr -> attribute -> class_expr + + val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_expr + val structure: ?loc:loc -> ?attrs:attrs -> class_structure -> class_expr + val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option -> + pattern -> class_expr -> class_expr + val apply: ?loc:loc -> ?attrs:attrs -> class_expr -> + (arg_label * expression) list -> class_expr + val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list -> + class_expr -> class_expr + val constraint_: ?loc:loc -> ?attrs:attrs -> class_expr -> class_type -> + class_expr + val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_expr + val open_: ?loc:loc -> ?attrs:attrs -> override_flag -> lid -> class_expr + -> class_expr + end + +(** Class fields *) +module Cf: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> class_field_desc -> + class_field + val attr: class_field -> attribute -> class_field + + val inherit_: ?loc:loc -> ?attrs:attrs -> override_flag -> class_expr -> + str option -> class_field + val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag -> + class_field_kind -> class_field + val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag -> + class_field_kind -> class_field + val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type -> + class_field + val initializer_: ?loc:loc -> ?attrs:attrs -> expression -> class_field + val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_field + val attribute: ?loc:loc -> attribute -> class_field + val text: text -> class_field list + + val virtual_: core_type -> class_field_kind + val concrete: override_flag -> expression -> class_field_kind + + end + +(** Classes *) +module Ci: + sig + val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text -> + ?virt:virtual_flag -> ?params:(core_type * variance) list -> + str -> 'a -> 'a class_infos + end + +(** Class signatures *) +module Csig: + sig + val mk: core_type -> class_type_field list -> class_signature + end + +(** Class structures *) +module Cstr: + sig + val mk: pattern -> class_field list -> class_structure + end + +end = struct +#1 "ast_helper.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Alain Frisch, LexiFi *) +(* *) +(* Copyright 2012 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Helpers to produce Parsetree fragments *) + +open Asttypes +open Parsetree +open Docstrings + +type lid = Longident.t loc +type str = string loc +type loc = Location.t +type attrs = attribute list + +let default_loc = ref Location.none + +let with_default_loc l f = + let old = !default_loc in + default_loc := l; + try let r = f () in default_loc := old; r + with exn -> default_loc := old; raise exn + +module Const = struct + let integer ?suffix i = Pconst_integer (i, suffix) + let int ?suffix i = integer ?suffix (string_of_int i) + let int32 ?(suffix='l') i = integer ~suffix (Int32.to_string i) + let int64 ?(suffix='L') i = integer ~suffix (Int64.to_string i) + let nativeint ?(suffix='n') i = integer ~suffix (Nativeint.to_string i) + let float ?suffix f = Pconst_float (f, suffix) + let char c = Pconst_char c + let string ?quotation_delimiter s = Pconst_string (s, quotation_delimiter) +end + +module Typ = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + {ptyp_desc = d; ptyp_loc = loc; ptyp_attributes = attrs} + let attr d a = {d with ptyp_attributes = d.ptyp_attributes @ [a]} + + let any ?loc ?attrs () = mk ?loc ?attrs Ptyp_any + let var ?loc ?attrs a = mk ?loc ?attrs (Ptyp_var a) + let arrow ?loc ?attrs a b c = mk ?loc ?attrs (Ptyp_arrow (a, b, c)) + let tuple ?loc ?attrs a = mk ?loc ?attrs (Ptyp_tuple a) + let constr ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_constr (a, b)) + let object_ ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_object (a, b)) + let class_ ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_class (a, b)) + let alias ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_alias (a, b)) + let variant ?loc ?attrs a b c = mk ?loc ?attrs (Ptyp_variant (a, b, c)) + let poly ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_poly (a, b)) + let package ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_package (a, b)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Ptyp_extension a) + + let force_poly t = + match t.ptyp_desc with + | Ptyp_poly _ -> t + | _ -> poly ~loc:t.ptyp_loc [] t (* -> ghost? *) + + let varify_constructors var_names t = + let check_variable vl loc v = + if List.mem v vl then + raise Syntaxerr.(Error(Variable_in_scope(loc,v))) in + let var_names = List.map (fun v -> v.txt) var_names in + let rec loop t = + let desc = + match t.ptyp_desc with + | Ptyp_any -> Ptyp_any + | Ptyp_var x -> + check_variable var_names t.ptyp_loc x; + Ptyp_var x + | Ptyp_arrow (label,core_type,core_type') -> + Ptyp_arrow(label, loop core_type, loop core_type') + | Ptyp_tuple lst -> Ptyp_tuple (List.map loop lst) + | Ptyp_constr( { txt = Longident.Lident s }, []) + when List.mem s var_names -> + Ptyp_var s + | Ptyp_constr(longident, lst) -> + Ptyp_constr(longident, List.map loop lst) + | Ptyp_object (lst, o) -> + Ptyp_object (List.map loop_object_field lst, o) + | Ptyp_class (longident, lst) -> + Ptyp_class (longident, List.map loop lst) + | Ptyp_alias(core_type, string) -> + check_variable var_names t.ptyp_loc string; + Ptyp_alias(loop core_type, string) + | Ptyp_variant(row_field_list, flag, lbl_lst_option) -> + Ptyp_variant(List.map loop_row_field row_field_list, + flag, lbl_lst_option) + | Ptyp_poly(string_lst, core_type) -> + List.iter (fun v -> + check_variable var_names t.ptyp_loc v.txt) string_lst; + Ptyp_poly(string_lst, loop core_type) + | Ptyp_package(longident,lst) -> + Ptyp_package(longident,List.map (fun (n,typ) -> (n,loop typ) ) lst) + | Ptyp_extension (s, arg) -> + Ptyp_extension (s, arg) + in + {t with ptyp_desc = desc} + and loop_row_field = + function + | Rtag(label,attrs,flag,lst) -> + Rtag(label,attrs,flag,List.map loop lst) + | Rinherit t -> + Rinherit (loop t) + and loop_object_field = + function + | Otag(label, attrs, t) -> + Otag(label, attrs, loop t) + | Oinherit t -> + Oinherit (loop t) + in + loop t + +end + +module Pat = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + {ppat_desc = d; ppat_loc = loc; ppat_attributes = attrs} + let attr d a = {d with ppat_attributes = d.ppat_attributes @ [a]} + + let any ?loc ?attrs () = mk ?loc ?attrs Ppat_any + let var ?loc ?attrs a = mk ?loc ?attrs (Ppat_var a) + let alias ?loc ?attrs a b = mk ?loc ?attrs (Ppat_alias (a, b)) + let constant ?loc ?attrs a = mk ?loc ?attrs (Ppat_constant a) + let interval ?loc ?attrs a b = mk ?loc ?attrs (Ppat_interval (a, b)) + let tuple ?loc ?attrs a = mk ?loc ?attrs (Ppat_tuple a) + let construct ?loc ?attrs a b = mk ?loc ?attrs (Ppat_construct (a, b)) + let variant ?loc ?attrs a b = mk ?loc ?attrs (Ppat_variant (a, b)) + let record ?loc ?attrs a b = mk ?loc ?attrs (Ppat_record (a, b)) + let array ?loc ?attrs a = mk ?loc ?attrs (Ppat_array a) + let or_ ?loc ?attrs a b = mk ?loc ?attrs (Ppat_or (a, b)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Ppat_constraint (a, b)) + let type_ ?loc ?attrs a = mk ?loc ?attrs (Ppat_type a) + let lazy_ ?loc ?attrs a = mk ?loc ?attrs (Ppat_lazy a) + let unpack ?loc ?attrs a = mk ?loc ?attrs (Ppat_unpack a) + let open_ ?loc ?attrs a b = mk ?loc ?attrs (Ppat_open (a, b)) + let exception_ ?loc ?attrs a = mk ?loc ?attrs (Ppat_exception a) + let extension ?loc ?attrs a = mk ?loc ?attrs (Ppat_extension a) +end + +module Exp = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + {pexp_desc = d; pexp_loc = loc; pexp_attributes = attrs} + let attr d a = {d with pexp_attributes = d.pexp_attributes @ [a]} + + let ident ?loc ?attrs a = mk ?loc ?attrs (Pexp_ident a) + let constant ?loc ?attrs a = mk ?loc ?attrs (Pexp_constant a) + let let_ ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_let (a, b, c)) + let fun_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pexp_fun (a, b, c, d)) + let function_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_function a) + let apply ?loc ?attrs a b = mk ?loc ?attrs (Pexp_apply (a, b)) + let match_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_match (a, b)) + let try_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_try (a, b)) + let tuple ?loc ?attrs a = mk ?loc ?attrs (Pexp_tuple a) + let construct ?loc ?attrs a b = mk ?loc ?attrs (Pexp_construct (a, b)) + let variant ?loc ?attrs a b = mk ?loc ?attrs (Pexp_variant (a, b)) + let record ?loc ?attrs a b = mk ?loc ?attrs (Pexp_record (a, b)) + let field ?loc ?attrs a b = mk ?loc ?attrs (Pexp_field (a, b)) + let setfield ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_setfield (a, b, c)) + let array ?loc ?attrs a = mk ?loc ?attrs (Pexp_array a) + let ifthenelse ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_ifthenelse (a, b, c)) + let sequence ?loc ?attrs a b = mk ?loc ?attrs (Pexp_sequence (a, b)) + let while_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_while (a, b)) + let for_ ?loc ?attrs a b c d e = mk ?loc ?attrs (Pexp_for (a, b, c, d, e)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_constraint (a, b)) + let coerce ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_coerce (a, b, c)) + let send ?loc ?attrs a b = mk ?loc ?attrs (Pexp_send (a, b)) + let new_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_new a) + let setinstvar ?loc ?attrs a b = mk ?loc ?attrs (Pexp_setinstvar (a, b)) + let override ?loc ?attrs a = mk ?loc ?attrs (Pexp_override a) + let letmodule ?loc ?attrs a b c= mk ?loc ?attrs (Pexp_letmodule (a, b, c)) + let letexception ?loc ?attrs a b = mk ?loc ?attrs (Pexp_letexception (a, b)) + let assert_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_assert a) + let lazy_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_lazy a) + let poly ?loc ?attrs a b = mk ?loc ?attrs (Pexp_poly (a, b)) + let object_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_object a) + let newtype ?loc ?attrs a b = mk ?loc ?attrs (Pexp_newtype (a, b)) + let pack ?loc ?attrs a = mk ?loc ?attrs (Pexp_pack a) + let open_ ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_open (a, b, c)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pexp_extension a) + let unreachable ?loc ?attrs () = mk ?loc ?attrs Pexp_unreachable + + let case lhs ?guard rhs = + { + pc_lhs = lhs; + pc_guard = guard; + pc_rhs = rhs; + } +end + +module Mty = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + {pmty_desc = d; pmty_loc = loc; pmty_attributes = attrs} + let attr d a = {d with pmty_attributes = d.pmty_attributes @ [a]} + + let ident ?loc ?attrs a = mk ?loc ?attrs (Pmty_ident a) + let alias ?loc ?attrs a = mk ?loc ?attrs (Pmty_alias a) + let signature ?loc ?attrs a = mk ?loc ?attrs (Pmty_signature a) + let functor_ ?loc ?attrs a b c = mk ?loc ?attrs (Pmty_functor (a, b, c)) + let with_ ?loc ?attrs a b = mk ?loc ?attrs (Pmty_with (a, b)) + let typeof_ ?loc ?attrs a = mk ?loc ?attrs (Pmty_typeof a) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pmty_extension a) +end + +module Mod = struct +let mk ?(loc = !default_loc) ?(attrs = []) d = + {pmod_desc = d; pmod_loc = loc; pmod_attributes = attrs} + let attr d a = {d with pmod_attributes = d.pmod_attributes @ [a]} + + let ident ?loc ?attrs x = mk ?loc ?attrs (Pmod_ident x) + let structure ?loc ?attrs x = mk ?loc ?attrs (Pmod_structure x) + let functor_ ?loc ?attrs arg arg_ty body = + mk ?loc ?attrs (Pmod_functor (arg, arg_ty, body)) + let apply ?loc ?attrs m1 m2 = mk ?loc ?attrs (Pmod_apply (m1, m2)) + let constraint_ ?loc ?attrs m mty = mk ?loc ?attrs (Pmod_constraint (m, mty)) + let unpack ?loc ?attrs e = mk ?loc ?attrs (Pmod_unpack e) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pmod_extension a) +end + +module Sig = struct + let mk ?(loc = !default_loc) d = {psig_desc = d; psig_loc = loc} + + let value ?loc a = mk ?loc (Psig_value a) + let type_ ?loc rec_flag a = mk ?loc (Psig_type (rec_flag, a)) + let type_extension ?loc a = mk ?loc (Psig_typext a) + let exception_ ?loc a = mk ?loc (Psig_exception a) + let module_ ?loc a = mk ?loc (Psig_module a) + let rec_module ?loc a = mk ?loc (Psig_recmodule a) + let modtype ?loc a = mk ?loc (Psig_modtype a) + let open_ ?loc a = mk ?loc (Psig_open a) + let include_ ?loc a = mk ?loc (Psig_include a) + let class_ ?loc a = mk ?loc (Psig_class a) + let class_type ?loc a = mk ?loc (Psig_class_type a) + let extension ?loc ?(attrs = []) a = mk ?loc (Psig_extension (a, attrs)) + let attribute ?loc a = mk ?loc (Psig_attribute a) + let text txt = + let f_txt = List.filter (fun ds -> docstring_body ds <> "") txt in + List.map + (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) + f_txt +end + +module Str = struct + let mk ?(loc = !default_loc) d = {pstr_desc = d; pstr_loc = loc} + + let eval ?loc ?(attrs = []) a = mk ?loc (Pstr_eval (a, attrs)) + let value ?loc a b = mk ?loc (Pstr_value (a, b)) + let primitive ?loc a = mk ?loc (Pstr_primitive a) + let type_ ?loc rec_flag a = mk ?loc (Pstr_type (rec_flag, a)) + let type_extension ?loc a = mk ?loc (Pstr_typext a) + let exception_ ?loc a = mk ?loc (Pstr_exception a) + let module_ ?loc a = mk ?loc (Pstr_module a) + let rec_module ?loc a = mk ?loc (Pstr_recmodule a) + let modtype ?loc a = mk ?loc (Pstr_modtype a) + let open_ ?loc a = mk ?loc (Pstr_open a) + let class_ ?loc a = mk ?loc (Pstr_class a) + let class_type ?loc a = mk ?loc (Pstr_class_type a) + let include_ ?loc a = mk ?loc (Pstr_include a) + let extension ?loc ?(attrs = []) a = mk ?loc (Pstr_extension (a, attrs)) + let attribute ?loc a = mk ?loc (Pstr_attribute a) + let text txt = + let f_txt = List.filter (fun ds -> docstring_body ds <> "") txt in + List.map + (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) + f_txt +end + +module Cl = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + { + pcl_desc = d; + pcl_loc = loc; + pcl_attributes = attrs; + } + let attr d a = {d with pcl_attributes = d.pcl_attributes @ [a]} + + let constr ?loc ?attrs a b = mk ?loc ?attrs (Pcl_constr (a, b)) + let structure ?loc ?attrs a = mk ?loc ?attrs (Pcl_structure a) + let fun_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pcl_fun (a, b, c, d)) + let apply ?loc ?attrs a b = mk ?loc ?attrs (Pcl_apply (a, b)) + let let_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcl_let (a, b, c)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pcl_constraint (a, b)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pcl_extension a) + let open_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcl_open (a, b, c)) +end + +module Cty = struct + let mk ?(loc = !default_loc) ?(attrs = []) d = + { + pcty_desc = d; + pcty_loc = loc; + pcty_attributes = attrs; + } + let attr d a = {d with pcty_attributes = d.pcty_attributes @ [a]} + + let constr ?loc ?attrs a b = mk ?loc ?attrs (Pcty_constr (a, b)) + let signature ?loc ?attrs a = mk ?loc ?attrs (Pcty_signature a) + let arrow ?loc ?attrs a b c = mk ?loc ?attrs (Pcty_arrow (a, b, c)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pcty_extension a) + let open_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcty_open (a, b, c)) +end + +module Ctf = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) d = + { + pctf_desc = d; + pctf_loc = loc; + pctf_attributes = add_docs_attrs docs attrs; + } + + let inherit_ ?loc ?attrs a = mk ?loc ?attrs (Pctf_inherit a) + let val_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pctf_val (a, b, c, d)) + let method_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pctf_method (a, b, c, d)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pctf_constraint (a, b)) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pctf_extension a) + let attribute ?loc a = mk ?loc (Pctf_attribute a) + let text txt = + let f_txt = List.filter (fun ds -> docstring_body ds <> "") txt in + List.map + (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) + f_txt + + let attr d a = {d with pctf_attributes = d.pctf_attributes @ [a]} + +end + +module Cf = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) d = + { + pcf_desc = d; + pcf_loc = loc; + pcf_attributes = add_docs_attrs docs attrs; + } + + let inherit_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcf_inherit (a, b, c)) + let val_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcf_val (a, b, c)) + let method_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcf_method (a, b, c)) + let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pcf_constraint (a, b)) + let initializer_ ?loc ?attrs a = mk ?loc ?attrs (Pcf_initializer a) + let extension ?loc ?attrs a = mk ?loc ?attrs (Pcf_extension a) + let attribute ?loc a = mk ?loc (Pcf_attribute a) + let text txt = + let f_txt = List.filter (fun ds -> docstring_body ds <> "") txt in + List.map + (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) + f_txt + + let virtual_ ct = Cfk_virtual ct + let concrete o e = Cfk_concrete (o, e) + + let attr d a = {d with pcf_attributes = d.pcf_attributes @ [a]} + +end + +module Val = struct + let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) + ?(prim = []) name typ = + { + pval_name = name; + pval_type = typ; + pval_attributes = add_docs_attrs docs attrs; + pval_loc = loc; + pval_prim = prim; + } +end + +module Md = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) name typ = + { + pmd_name = name; + pmd_type = typ; + pmd_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pmd_loc = loc; + } +end + +module Mtd = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) ?typ name = + { + pmtd_name = name; + pmtd_type = typ; + pmtd_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pmtd_loc = loc; + } +end + +module Mb = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) name expr = + { + pmb_name = name; + pmb_expr = expr; + pmb_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pmb_loc = loc; + } +end + +module Opn = struct + let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) + ?(override = Fresh) lid = + { + popen_lid = lid; + popen_override = override; + popen_loc = loc; + popen_attributes = add_docs_attrs docs attrs; + } +end + +module Incl = struct + let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) mexpr = + { + pincl_mod = mexpr; + pincl_loc = loc; + pincl_attributes = add_docs_attrs docs attrs; + } + +end + +module Vb = struct + let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) + ?(text = []) pat expr = + { + pvb_pat = pat; + pvb_expr = expr; + pvb_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pvb_loc = loc; + } +end + +module Ci = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) + ?(virt = Concrete) ?(params = []) name expr = + { + pci_virt = virt; + pci_params = params; + pci_name = name; + pci_expr = expr; + pci_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + pci_loc = loc; + } +end + +module Type = struct + let mk ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(text = []) + ?(params = []) + ?(cstrs = []) + ?(kind = Ptype_abstract) + ?(priv = Public) + ?manifest + name = + { + ptype_name = name; + ptype_params = params; + ptype_cstrs = cstrs; + ptype_kind = kind; + ptype_private = priv; + ptype_manifest = manifest; + ptype_attributes = + add_text_attrs text (add_docs_attrs docs attrs); + ptype_loc = loc; + } + + let constructor ?(loc = !default_loc) ?(attrs = []) ?(info = empty_info) + ?(args = Pcstr_tuple []) ?res name = + { + pcd_name = name; + pcd_args = args; + pcd_res = res; + pcd_loc = loc; + pcd_attributes = add_info_attrs info attrs; + } + + let field ?(loc = !default_loc) ?(attrs = []) ?(info = empty_info) + ?(mut = Immutable) name typ = + { + pld_name = name; + pld_mutable = mut; + pld_type = typ; + pld_loc = loc; + pld_attributes = add_info_attrs info attrs; + } + +end + +(** Type extensions *) +module Te = struct + let mk ?(attrs = []) ?(docs = empty_docs) + ?(params = []) ?(priv = Public) path constructors = + { + ptyext_path = path; + ptyext_params = params; + ptyext_constructors = constructors; + ptyext_private = priv; + ptyext_attributes = add_docs_attrs docs attrs; + } + + let constructor ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(info = empty_info) name kind = + { + pext_name = name; + pext_kind = kind; + pext_loc = loc; + pext_attributes = add_docs_attrs docs (add_info_attrs info attrs); + } + + let decl ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) + ?(info = empty_info) ?(args = Pcstr_tuple []) ?res name = + { + pext_name = name; + pext_kind = Pext_decl(args, res); + pext_loc = loc; + pext_attributes = add_docs_attrs docs (add_info_attrs info attrs); + } + + let rebind ?(loc = !default_loc) ?(attrs = []) + ?(docs = empty_docs) ?(info = empty_info) name lid = + { + pext_name = name; + pext_kind = Pext_rebind lid; + pext_loc = loc; + pext_attributes = add_docs_attrs docs (add_info_attrs info attrs); + } + +end + +module Csig = struct + let mk self fields = + { + pcsig_self = self; + pcsig_fields = fields; + } +end + +module Cstr = struct + let mk self fields = + { + pcstr_self = self; + pcstr_fields = fields; + } +end + +end +module Parser : sig +#1 "parser.mli" +type token = + | AMPERAMPER + | AMPERSAND + | AND + | AS + | ASSERT + | BACKQUOTE + | BANG + | BAR + | BARBAR + | BARRBRACKET + | BEGIN + | CHAR of (char) + | CLASS + | COLON + | COLONCOLON + | COLONEQUAL + | COLONGREATER + | COMMA + | CONSTRAINT + | DO + | DONE + | DOT + | DOTDOT + | DOWNTO + | ELSE + | END + | EOF + | EQUAL + | EXCEPTION + | EXTERNAL + | FALSE + | FLOAT of (string * char option) + | FOR + | FUN + | FUNCTION + | FUNCTOR + | GREATER + | GREATERRBRACE + | GREATERRBRACKET + | IF + | IN + | INCLUDE + | INFIXOP0 of (string) + | INFIXOP1 of (string) + | INFIXOP2 of (string) + | INFIXOP3 of (string) + | INFIXOP4 of (string) + | DOTOP of (string) + | INHERIT + | INITIALIZER + | INT of (string * char option) + | LABEL of (string) + | LAZY + | LBRACE + | LBRACELESS + | LBRACKET + | LBRACKETBAR + | LBRACKETLESS + | LBRACKETGREATER + | LBRACKETPERCENT + | LBRACKETPERCENTPERCENT + | LESS + | LESSMINUS + | LET + | LIDENT of (string) + | LPAREN + | LBRACKETAT + | LBRACKETATAT + | LBRACKETATATAT + | MATCH + | METHOD + | MINUS + | MINUSDOT + | MINUSGREATER + | MODULE + | MUTABLE + | NEW + | NONREC + | OBJECT + | OF + | OPEN + | OPTLABEL of (string) + | OR + | PERCENT + | PLUS + | PLUSDOT + | PLUSEQ + | PREFIXOP of (string) + | PRIVATE + | QUESTION + | QUOTE + | RBRACE + | RBRACKET + | REC + | RPAREN + | SEMI + | SEMISEMI + | HASH + | HASHOP of (string) + | SIG + | STAR + | STRING of (string * string option) + | STRUCT + | THEN + | TILDE + | TO + | TRUE + | TRY + | TYPE + | UIDENT of (string) + | UNDERSCORE + | VAL + | VIRTUAL + | WHEN + | WHILE + | WITH + | COMMENT of (string * Location.t) + | DOCSTRING of (Docstrings.docstring) + | EOL + +val implementation : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.structure +val interface : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.signature +val toplevel_phrase : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.toplevel_phrase +val use_file : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.toplevel_phrase list +val parse_core_type : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.core_type +val parse_expression : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.expression +val parse_pattern : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.pattern + +end = struct +#1 "parser.ml" +type token = + | AMPERAMPER + | AMPERSAND + | AND + | AS + | ASSERT + | BACKQUOTE + | BANG + | BAR + | BARBAR + | BARRBRACKET + | BEGIN + | CHAR of (char) + | CLASS + | COLON + | COLONCOLON + | COLONEQUAL + | COLONGREATER + | COMMA + | CONSTRAINT + | DO + | DONE + | DOT + | DOTDOT + | DOWNTO + | ELSE + | END + | EOF + | EQUAL + | EXCEPTION + | EXTERNAL + | FALSE + | FLOAT of (string * char option) + | FOR + | FUN + | FUNCTION + | FUNCTOR + | GREATER + | GREATERRBRACE + | GREATERRBRACKET + | IF + | IN + | INCLUDE + | INFIXOP0 of (string) + | INFIXOP1 of (string) + | INFIXOP2 of (string) + | INFIXOP3 of (string) + | INFIXOP4 of (string) + | DOTOP of (string) + | INHERIT + | INITIALIZER + | INT of (string * char option) + | LABEL of (string) + | LAZY + | LBRACE + | LBRACELESS + | LBRACKET + | LBRACKETBAR + | LBRACKETLESS + | LBRACKETGREATER + | LBRACKETPERCENT + | LBRACKETPERCENTPERCENT + | LESS + | LESSMINUS + | LET + | LIDENT of (string) + | LPAREN + | LBRACKETAT + | LBRACKETATAT + | LBRACKETATATAT + | MATCH + | METHOD + | MINUS + | MINUSDOT + | MINUSGREATER + | MODULE + | MUTABLE + | NEW + | NONREC + | OBJECT + | OF + | OPEN + | OPTLABEL of (string) + | OR + | PERCENT + | PLUS + | PLUSDOT + | PLUSEQ + | PREFIXOP of (string) + | PRIVATE + | QUESTION + | QUOTE + | RBRACE + | RBRACKET + | REC + | RPAREN + | SEMI + | SEMISEMI + | HASH + | HASHOP of (string) + | SIG + | STAR + | STRING of (string * string option) + | STRUCT + | THEN + | TILDE + | TO + | TRUE + | TRY + | TYPE + | UIDENT of (string) + | UNDERSCORE + | VAL + | VIRTUAL + | WHEN + | WHILE + | WITH + | COMMENT of (string * Location.t) + | DOCSTRING of (Docstrings.docstring) + | EOL + +open Parsing;; +let _ = parse_error;; +# 19 "parsing/parser.mly" +open Location +open Asttypes +open Longident +open Parsetree +open Ast_helper +open Docstrings + +let mktyp d = Typ.mk ~loc:(symbol_rloc()) d +let mkpat d = Pat.mk ~loc:(symbol_rloc()) d +let mkexp d = Exp.mk ~loc:(symbol_rloc()) d +let mkmty ?attrs d = Mty.mk ~loc:(symbol_rloc()) ?attrs d +let mksig d = Sig.mk ~loc:(symbol_rloc()) d +let mkmod ?attrs d = Mod.mk ~loc:(symbol_rloc()) ?attrs d +let mkstr d = Str.mk ~loc:(symbol_rloc()) d +let mkclass ?attrs d = Cl.mk ~loc:(symbol_rloc()) ?attrs d +let mkcty ?attrs d = Cty.mk ~loc:(symbol_rloc()) ?attrs d +let mkctf ?attrs ?docs d = + Ctf.mk ~loc:(symbol_rloc()) ?attrs ?docs d +let mkcf ?attrs ?docs d = + Cf.mk ~loc:(symbol_rloc()) ?attrs ?docs d + +let mkrhs rhs pos = mkloc rhs (rhs_loc pos) + +let reloc_pat x = { x with ppat_loc = symbol_rloc () };; +let reloc_exp x = { x with pexp_loc = symbol_rloc () };; + +let mkoperator name pos = + let loc = rhs_loc pos in + Exp.mk ~loc (Pexp_ident(mkloc (Lident name) loc)) + +let mkpatvar name pos = + Pat.mk ~loc:(rhs_loc pos) (Ppat_var (mkrhs name pos)) + +(* + Ghost expressions and patterns: + expressions and patterns that do not appear explicitly in the + source file they have the loc_ghost flag set to true. + Then the profiler will not try to instrument them and the + -annot option will not try to display their type. + + Every grammar rule that generates an element with a location must + make at most one non-ghost element, the topmost one. + + How to tell whether your location must be ghost: + A location corresponds to a range of characters in the source file. + If the location contains a piece of code that is syntactically + valid (according to the documentation), and corresponds to the + AST node, then the location must be real; in all other cases, + it must be ghost. +*) +let ghexp d = Exp.mk ~loc:(symbol_gloc ()) d +let ghpat d = Pat.mk ~loc:(symbol_gloc ()) d +let ghtyp d = Typ.mk ~loc:(symbol_gloc ()) d +let ghloc d = { txt = d; loc = symbol_gloc () } +let ghstr d = Str.mk ~loc:(symbol_gloc()) d +let ghsig d = Sig.mk ~loc:(symbol_gloc()) d + +let mkinfix arg1 name arg2 = + mkexp(Pexp_apply(mkoperator name 2, [Nolabel, arg1; Nolabel, arg2])) + +let neg_string f = + if String.length f > 0 && f.[0] = '-' + then String.sub f 1 (String.length f - 1) + else "-" ^ f + +let mkuminus name arg = + match name, arg.pexp_desc with + | "-", Pexp_constant(Pconst_integer (n,m)) -> + mkexp(Pexp_constant(Pconst_integer(neg_string n,m))) + | ("-" | "-."), Pexp_constant(Pconst_float (f, m)) -> + mkexp(Pexp_constant(Pconst_float(neg_string f, m))) + | _ -> + mkexp(Pexp_apply(mkoperator ("~" ^ name) 1, [Nolabel, arg])) + +let mkuplus name arg = + let desc = arg.pexp_desc in + match name, desc with + | "+", Pexp_constant(Pconst_integer _) + | ("+" | "+."), Pexp_constant(Pconst_float _) -> mkexp desc + | _ -> + mkexp(Pexp_apply(mkoperator ("~" ^ name) 1, [Nolabel, arg])) + +let mkexp_cons consloc args loc = + Exp.mk ~loc (Pexp_construct(mkloc (Lident "::") consloc, Some args)) + +let mkpat_cons consloc args loc = + Pat.mk ~loc (Ppat_construct(mkloc (Lident "::") consloc, Some args)) + +let rec mktailexp nilloc = function + [] -> + let loc = { nilloc with loc_ghost = true } in + let nil = { txt = Lident "[]"; loc = loc } in + Exp.mk ~loc (Pexp_construct (nil, None)) + | e1 :: el -> + let exp_el = mktailexp nilloc el in + let loc = {loc_start = e1.pexp_loc.loc_start; + loc_end = exp_el.pexp_loc.loc_end; + loc_ghost = true} + in + let arg = Exp.mk ~loc (Pexp_tuple [e1; exp_el]) in + mkexp_cons {loc with loc_ghost = true} arg loc + +let rec mktailpat nilloc = function + [] -> + let loc = { nilloc with loc_ghost = true } in + let nil = { txt = Lident "[]"; loc = loc } in + Pat.mk ~loc (Ppat_construct (nil, None)) + | p1 :: pl -> + let pat_pl = mktailpat nilloc pl in + let loc = {loc_start = p1.ppat_loc.loc_start; + loc_end = pat_pl.ppat_loc.loc_end; + loc_ghost = true} + in + let arg = Pat.mk ~loc (Ppat_tuple [p1; pat_pl]) in + mkpat_cons {loc with loc_ghost = true} arg loc + +let mkstrexp e attrs = + { pstr_desc = Pstr_eval (e, attrs); pstr_loc = e.pexp_loc } + +let mkexp_constraint e (t1, t2) = + match t1, t2 with + | Some t, None -> ghexp(Pexp_constraint(e, t)) + | _, Some t -> ghexp(Pexp_coerce(e, t1, t)) + | None, None -> assert false + +let mkexp_opt_constraint e = function + | None -> e + | Some constraint_ -> mkexp_constraint e constraint_ + +let mkpat_opt_constraint p = function + | None -> p + | Some typ -> mkpat (Ppat_constraint(p, typ)) + +let array_function str name = + ghloc (Ldot(Lident str, (if !Clflags.fast then "unsafe_" ^ name else name))) + +let syntax_error () = + raise Syntaxerr.Escape_error + +let unclosed opening_name opening_num closing_name closing_num = + raise(Syntaxerr.Error(Syntaxerr.Unclosed(rhs_loc opening_num, opening_name, + rhs_loc closing_num, closing_name))) + +let expecting pos nonterm = + raise Syntaxerr.(Error(Expecting(rhs_loc pos, nonterm))) + +let not_expecting pos nonterm = + raise Syntaxerr.(Error(Not_expecting(rhs_loc pos, nonterm))) + +let bigarray_function str name = + ghloc (Ldot(Ldot(Lident "Bigarray", str), name)) + +let bigarray_untuplify = function + { pexp_desc = Pexp_tuple explist; pexp_loc = _ } -> explist + | exp -> [exp] + +let bigarray_get arr arg = + let get = if !Clflags.fast then "unsafe_get" else "get" in + match bigarray_untuplify arg with + [c1] -> + mkexp(Pexp_apply(ghexp(Pexp_ident(bigarray_function "Array1" get)), + [Nolabel, arr; Nolabel, c1])) + | [c1;c2] -> + mkexp(Pexp_apply(ghexp(Pexp_ident(bigarray_function "Array2" get)), + [Nolabel, arr; Nolabel, c1; Nolabel, c2])) + | [c1;c2;c3] -> + mkexp(Pexp_apply(ghexp(Pexp_ident(bigarray_function "Array3" get)), + [Nolabel, arr; Nolabel, c1; Nolabel, c2; Nolabel, c3])) + | coords -> + mkexp(Pexp_apply(ghexp(Pexp_ident(bigarray_function "Genarray" "get")), + [Nolabel, arr; Nolabel, ghexp(Pexp_array coords)])) + +let bigarray_set arr arg newval = + let set = if !Clflags.fast then "unsafe_set" else "set" in + match bigarray_untuplify arg with + [c1] -> + mkexp(Pexp_apply(ghexp(Pexp_ident(bigarray_function "Array1" set)), + [Nolabel, arr; Nolabel, c1; Nolabel, newval])) + | [c1;c2] -> + mkexp(Pexp_apply(ghexp(Pexp_ident(bigarray_function "Array2" set)), + [Nolabel, arr; Nolabel, c1; + Nolabel, c2; Nolabel, newval])) + | [c1;c2;c3] -> + mkexp(Pexp_apply(ghexp(Pexp_ident(bigarray_function "Array3" set)), + [Nolabel, arr; Nolabel, c1; + Nolabel, c2; Nolabel, c3; Nolabel, newval])) + | coords -> + mkexp(Pexp_apply(ghexp(Pexp_ident(bigarray_function "Genarray" "set")), + [Nolabel, arr; + Nolabel, ghexp(Pexp_array coords); + Nolabel, newval])) + +let lapply p1 p2 = + if !Clflags.applicative_functors + then Lapply(p1, p2) + else raise (Syntaxerr.Error(Syntaxerr.Applicative_path (symbol_rloc()))) + +let exp_of_label lbl pos = + mkexp (Pexp_ident(mkrhs (Lident(Longident.last lbl)) pos)) + +let pat_of_label lbl pos = + mkpat (Ppat_var (mkrhs (Longident.last lbl) pos)) + +let mk_newtypes newtypes exp = + List.fold_right (fun newtype exp -> mkexp (Pexp_newtype (newtype, exp))) + newtypes exp + +let wrap_type_annotation newtypes core_type body = + let exp = mkexp(Pexp_constraint(body,core_type)) in + let exp = mk_newtypes newtypes exp in + (exp, ghtyp(Ptyp_poly(newtypes, Typ.varify_constructors newtypes core_type))) + +let wrap_exp_attrs body (ext, attrs) = + (* todo: keep exact location for the entire attribute *) + let body = {body with pexp_attributes = attrs @ body.pexp_attributes} in + match ext with + | None -> body + | Some id -> ghexp(Pexp_extension (id, PStr [mkstrexp body []])) + +let mkexp_attrs d attrs = + wrap_exp_attrs (mkexp d) attrs + +let wrap_typ_attrs typ (ext, attrs) = + (* todo: keep exact location for the entire attribute *) + let typ = {typ with ptyp_attributes = attrs @ typ.ptyp_attributes} in + match ext with + | None -> typ + | Some id -> ghtyp(Ptyp_extension (id, PTyp typ)) + +let mktyp_attrs d attrs = + wrap_typ_attrs (mktyp d) attrs + +let wrap_pat_attrs pat (ext, attrs) = + (* todo: keep exact location for the entire attribute *) + let pat = {pat with ppat_attributes = attrs @ pat.ppat_attributes} in + match ext with + | None -> pat + | Some id -> ghpat(Ppat_extension (id, PPat (pat, None))) + +let mkpat_attrs d attrs = + wrap_pat_attrs (mkpat d) attrs + +let wrap_class_attrs body attrs = + {body with pcl_attributes = attrs @ body.pcl_attributes} +let wrap_class_type_attrs body attrs = + {body with pcty_attributes = attrs @ body.pcty_attributes} +let wrap_mod_attrs body attrs = + {body with pmod_attributes = attrs @ body.pmod_attributes} +let wrap_mty_attrs body attrs = + {body with pmty_attributes = attrs @ body.pmty_attributes} + +let wrap_str_ext body ext = + match ext with + | None -> body + | Some id -> ghstr(Pstr_extension ((id, PStr [body]), [])) + +let mkstr_ext d ext = + wrap_str_ext (mkstr d) ext + +let wrap_sig_ext body ext = + match ext with + | None -> body + | Some id -> ghsig(Psig_extension ((id, PSig [body]), [])) + +let mksig_ext d ext = + wrap_sig_ext (mksig d) ext + +let text_str pos = Str.text (rhs_text pos) +let text_sig pos = Sig.text (rhs_text pos) +let text_cstr pos = Cf.text (rhs_text pos) +let text_csig pos = Ctf.text (rhs_text pos) +let text_def pos = [Ptop_def (Str.text (rhs_text pos))] + +let extra_text text pos items = + let pre_extras = rhs_pre_extra_text pos in + let post_extras = rhs_post_extra_text pos in + text pre_extras @ items @ text post_extras + +let extra_str pos items = extra_text Str.text pos items +let extra_sig pos items = extra_text Sig.text pos items +let extra_cstr pos items = extra_text Cf.text pos items +let extra_csig pos items = extra_text Ctf.text pos items +let extra_def pos items = + extra_text (fun txt -> [Ptop_def (Str.text txt)]) pos items + +let extra_rhs_core_type ct ~pos = + let docs = rhs_info pos in + { ct with ptyp_attributes = add_info_attrs docs ct.ptyp_attributes } + +type let_binding = + { lb_pattern: pattern; + lb_expression: expression; + lb_attributes: attributes; + lb_docs: docs Lazy.t; + lb_text: text Lazy.t; + lb_loc: Location.t; } + +type let_bindings = + { lbs_bindings: let_binding list; + lbs_rec: rec_flag; + lbs_extension: string Asttypes.loc option; + lbs_loc: Location.t } + +let mklb first (p, e) attrs = + { lb_pattern = p; + lb_expression = e; + lb_attributes = attrs; + lb_docs = symbol_docs_lazy (); + lb_text = if first then empty_text_lazy + else symbol_text_lazy (); + lb_loc = symbol_rloc (); } + +let mklbs ext rf lb = + { lbs_bindings = [lb]; + lbs_rec = rf; + lbs_extension = ext ; + lbs_loc = symbol_rloc (); } + +let addlb lbs lb = + { lbs with lbs_bindings = lb :: lbs.lbs_bindings } + +let val_of_let_bindings lbs = + let bindings = + List.map + (fun lb -> + Vb.mk ~loc:lb.lb_loc ~attrs:lb.lb_attributes + ~docs:(Lazy.force lb.lb_docs) + ~text:(Lazy.force lb.lb_text) + lb.lb_pattern lb.lb_expression) + lbs.lbs_bindings + in + let str = mkstr(Pstr_value(lbs.lbs_rec, List.rev bindings)) in + match lbs.lbs_extension with + | None -> str + | Some id -> ghstr (Pstr_extension((id, PStr [str]), [])) + +let expr_of_let_bindings lbs body = + let bindings = + List.map + (fun lb -> + Vb.mk ~loc:lb.lb_loc ~attrs:lb.lb_attributes + lb.lb_pattern lb.lb_expression) + lbs.lbs_bindings + in + mkexp_attrs (Pexp_let(lbs.lbs_rec, List.rev bindings, body)) + (lbs.lbs_extension, []) + +let class_of_let_bindings lbs body = + let bindings = + List.map + (fun lb -> + Vb.mk ~loc:lb.lb_loc ~attrs:lb.lb_attributes + lb.lb_pattern lb.lb_expression) + lbs.lbs_bindings + in + if lbs.lbs_extension <> None then + raise Syntaxerr.(Error(Not_expecting(lbs.lbs_loc, "extension"))); + mkclass(Pcl_let (lbs.lbs_rec, List.rev bindings, body)) + + +(* Alternatively, we could keep the generic module type in the Parsetree + and extract the package type during type-checking. In that case, + the assertions below should be turned into explicit checks. *) +let package_type_of_module_type pmty = + let err loc s = + raise (Syntaxerr.Error (Syntaxerr.Invalid_package_type (loc, s))) + in + let map_cstr = function + | Pwith_type (lid, ptyp) -> + let loc = ptyp.ptype_loc in + if ptyp.ptype_params <> [] then + err loc "parametrized types are not supported"; + if ptyp.ptype_cstrs <> [] then + err loc "constrained types are not supported"; + if ptyp.ptype_private <> Public then + err loc "private types are not supported"; + + (* restrictions below are checked by the 'with_constraint' rule *) + assert (ptyp.ptype_kind = Ptype_abstract); + assert (ptyp.ptype_attributes = []); + let ty = + match ptyp.ptype_manifest with + | Some ty -> ty + | None -> assert false + in + (lid, ty) + | _ -> + err pmty.pmty_loc "only 'with type t =' constraints are supported" + in + match pmty with + | {pmty_desc = Pmty_ident lid} -> (lid, []) + | {pmty_desc = Pmty_with({pmty_desc = Pmty_ident lid}, cstrs)} -> + (lid, List.map map_cstr cstrs) + | _ -> + err pmty.pmty_loc + "only module type identifier and 'with type' constraints are supported" + + +# 524 "parsing/parser.ml" +let yytransl_const = [| + 257 (* AMPERAMPER *); + 258 (* AMPERSAND *); + 259 (* AND *); + 260 (* AS *); + 261 (* ASSERT *); + 262 (* BACKQUOTE *); + 263 (* BANG *); + 264 (* BAR *); + 265 (* BARBAR *); + 266 (* BARRBRACKET *); + 267 (* BEGIN *); + 269 (* CLASS *); + 270 (* COLON *); + 271 (* COLONCOLON *); + 272 (* COLONEQUAL *); + 273 (* COLONGREATER *); + 274 (* COMMA *); + 275 (* CONSTRAINT *); + 276 (* DO *); + 277 (* DONE *); + 278 (* DOT *); + 279 (* DOTDOT *); + 280 (* DOWNTO *); + 281 (* ELSE *); + 282 (* END *); + 0 (* EOF *); + 283 (* EQUAL *); + 284 (* EXCEPTION *); + 285 (* EXTERNAL *); + 286 (* FALSE *); + 288 (* FOR *); + 289 (* FUN *); + 290 (* FUNCTION *); + 291 (* FUNCTOR *); + 292 (* GREATER *); + 293 (* GREATERRBRACE *); + 294 (* GREATERRBRACKET *); + 295 (* IF *); + 296 (* IN *); + 297 (* INCLUDE *); + 304 (* INHERIT *); + 305 (* INITIALIZER *); + 308 (* LAZY *); + 309 (* LBRACE *); + 310 (* LBRACELESS *); + 311 (* LBRACKET *); + 312 (* LBRACKETBAR *); + 313 (* LBRACKETLESS *); + 314 (* LBRACKETGREATER *); + 315 (* LBRACKETPERCENT *); + 316 (* LBRACKETPERCENTPERCENT *); + 317 (* LESS *); + 318 (* LESSMINUS *); + 319 (* LET *); + 321 (* LPAREN *); + 322 (* LBRACKETAT *); + 323 (* LBRACKETATAT *); + 324 (* LBRACKETATATAT *); + 325 (* MATCH *); + 326 (* METHOD *); + 327 (* MINUS *); + 328 (* MINUSDOT *); + 329 (* MINUSGREATER *); + 330 (* MODULE *); + 331 (* MUTABLE *); + 332 (* NEW *); + 333 (* NONREC *); + 334 (* OBJECT *); + 335 (* OF *); + 336 (* OPEN *); + 338 (* OR *); + 339 (* PERCENT *); + 340 (* PLUS *); + 341 (* PLUSDOT *); + 342 (* PLUSEQ *); + 344 (* PRIVATE *); + 345 (* QUESTION *); + 346 (* QUOTE *); + 347 (* RBRACE *); + 348 (* RBRACKET *); + 349 (* REC *); + 350 (* RPAREN *); + 351 (* SEMI *); + 352 (* SEMISEMI *); + 353 (* HASH *); + 355 (* SIG *); + 356 (* STAR *); + 358 (* STRUCT *); + 359 (* THEN *); + 360 (* TILDE *); + 361 (* TO *); + 362 (* TRUE *); + 363 (* TRY *); + 364 (* TYPE *); + 366 (* UNDERSCORE *); + 367 (* VAL *); + 368 (* VIRTUAL *); + 369 (* WHEN *); + 370 (* WHILE *); + 371 (* WITH *); + 374 (* EOL *); + 0|] + +let yytransl_block = [| + 268 (* CHAR *); + 287 (* FLOAT *); + 298 (* INFIXOP0 *); + 299 (* INFIXOP1 *); + 300 (* INFIXOP2 *); + 301 (* INFIXOP3 *); + 302 (* INFIXOP4 *); + 303 (* DOTOP *); + 306 (* INT *); + 307 (* LABEL *); + 320 (* LIDENT *); + 337 (* OPTLABEL *); + 343 (* PREFIXOP *); + 354 (* HASHOP *); + 357 (* STRING *); + 365 (* UIDENT *); + 372 (* COMMENT *); + 373 (* DOCSTRING *); + 0|] + +let yylhs = "\255\255\ +\001\000\002\000\003\000\003\000\003\000\010\000\010\000\014\000\ +\014\000\004\000\016\000\016\000\017\000\017\000\017\000\017\000\ +\017\000\017\000\017\000\005\000\006\000\007\000\020\000\020\000\ +\021\000\021\000\023\000\023\000\024\000\024\000\024\000\024\000\ +\024\000\024\000\024\000\024\000\024\000\027\000\027\000\027\000\ +\027\000\027\000\027\000\027\000\027\000\027\000\027\000\027\000\ +\008\000\008\000\032\000\032\000\032\000\015\000\015\000\015\000\ +\015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\ +\015\000\015\000\015\000\015\000\045\000\049\000\049\000\049\000\ +\039\000\040\000\040\000\050\000\051\000\022\000\022\000\022\000\ +\022\000\022\000\022\000\022\000\022\000\022\000\022\000\022\000\ +\009\000\009\000\009\000\054\000\054\000\054\000\054\000\054\000\ +\054\000\054\000\054\000\054\000\054\000\054\000\054\000\054\000\ +\054\000\054\000\042\000\060\000\063\000\063\000\063\000\057\000\ +\058\000\059\000\059\000\064\000\065\000\066\000\066\000\041\000\ +\043\000\043\000\068\000\069\000\072\000\072\000\072\000\071\000\ +\071\000\077\000\077\000\073\000\073\000\073\000\073\000\073\000\ +\073\000\073\000\078\000\078\000\078\000\078\000\078\000\078\000\ +\078\000\078\000\082\000\083\000\083\000\083\000\084\000\084\000\ +\085\000\085\000\085\000\085\000\085\000\085\000\085\000\086\000\ +\086\000\087\000\087\000\087\000\087\000\088\000\088\000\088\000\ +\088\000\088\000\074\000\074\000\074\000\074\000\074\000\097\000\ +\097\000\097\000\097\000\097\000\097\000\097\000\100\000\101\000\ +\101\000\102\000\102\000\103\000\103\000\103\000\103\000\103\000\ +\103\000\104\000\104\000\104\000\106\000\089\000\061\000\061\000\ +\107\000\108\000\044\000\044\000\109\000\110\000\012\000\012\000\ +\012\000\012\000\075\000\075\000\075\000\075\000\075\000\075\000\ +\075\000\075\000\116\000\116\000\113\000\113\000\112\000\112\000\ +\114\000\115\000\115\000\030\000\030\000\030\000\030\000\030\000\ +\030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ +\030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ +\030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ +\030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ +\030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ +\030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ +\030\000\030\000\030\000\030\000\030\000\030\000\118\000\118\000\ +\118\000\118\000\118\000\118\000\118\000\118\000\118\000\118\000\ +\118\000\118\000\118\000\118\000\118\000\118\000\118\000\118\000\ +\118\000\118\000\118\000\118\000\118\000\118\000\118\000\118\000\ +\118\000\118\000\118\000\118\000\118\000\118\000\118\000\118\000\ +\118\000\118\000\118\000\118\000\118\000\118\000\118\000\118\000\ +\118\000\118\000\118\000\118\000\118\000\118\000\118\000\118\000\ +\118\000\118\000\118\000\118\000\118\000\118\000\118\000\118\000\ +\118\000\118\000\118\000\118\000\118\000\118\000\079\000\079\000\ +\136\000\136\000\137\000\137\000\137\000\137\000\138\000\096\000\ +\096\000\139\000\139\000\139\000\139\000\139\000\139\000\033\000\ +\033\000\144\000\145\000\147\000\147\000\095\000\095\000\095\000\ +\121\000\121\000\148\000\148\000\148\000\122\000\122\000\122\000\ +\122\000\123\000\123\000\132\000\132\000\150\000\150\000\150\000\ +\151\000\151\000\135\000\135\000\153\000\153\000\133\000\133\000\ +\092\000\092\000\092\000\092\000\092\000\152\000\152\000\019\000\ +\019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ +\019\000\142\000\142\000\142\000\142\000\142\000\142\000\142\000\ +\142\000\142\000\155\000\155\000\155\000\155\000\117\000\117\000\ +\143\000\143\000\143\000\143\000\143\000\143\000\143\000\143\000\ +\143\000\143\000\143\000\143\000\143\000\143\000\143\000\143\000\ +\143\000\143\000\143\000\143\000\143\000\143\000\159\000\159\000\ +\159\000\159\000\159\000\159\000\159\000\154\000\154\000\154\000\ +\156\000\156\000\156\000\161\000\161\000\160\000\160\000\160\000\ +\160\000\162\000\162\000\163\000\163\000\035\000\164\000\164\000\ +\034\000\036\000\036\000\165\000\166\000\170\000\170\000\169\000\ +\169\000\169\000\169\000\169\000\169\000\169\000\169\000\169\000\ +\169\000\169\000\168\000\168\000\168\000\173\000\174\000\174\000\ +\176\000\176\000\177\000\175\000\175\000\175\000\178\000\076\000\ +\076\000\171\000\171\000\171\000\179\000\180\000\038\000\038\000\ +\056\000\119\000\182\000\182\000\182\000\182\000\183\000\183\000\ +\172\000\172\000\172\000\185\000\186\000\037\000\055\000\188\000\ +\188\000\188\000\188\000\188\000\188\000\189\000\189\000\189\000\ +\190\000\191\000\192\000\193\000\053\000\053\000\194\000\194\000\ +\194\000\194\000\195\000\195\000\141\000\141\000\093\000\093\000\ +\187\000\187\000\018\000\018\000\196\000\196\000\198\000\198\000\ +\198\000\198\000\198\000\149\000\149\000\199\000\199\000\199\000\ +\199\000\199\000\199\000\199\000\199\000\199\000\199\000\199\000\ +\199\000\199\000\199\000\199\000\199\000\199\000\199\000\199\000\ +\031\000\202\000\202\000\203\000\203\000\201\000\201\000\205\000\ +\205\000\206\000\206\000\204\000\204\000\098\000\098\000\080\000\ +\080\000\184\000\184\000\200\000\200\000\200\000\200\000\200\000\ +\200\000\200\000\209\000\207\000\208\000\090\000\131\000\131\000\ +\131\000\131\000\157\000\157\000\157\000\157\000\157\000\067\000\ +\067\000\140\000\140\000\140\000\140\000\140\000\210\000\210\000\ +\210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ +\210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ +\210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ +\210\000\210\000\210\000\181\000\181\000\181\000\181\000\181\000\ +\181\000\130\000\130\000\124\000\124\000\124\000\124\000\124\000\ +\124\000\124\000\129\000\129\000\158\000\158\000\025\000\025\000\ +\197\000\197\000\197\000\052\000\052\000\099\000\099\000\081\000\ +\081\000\011\000\011\000\011\000\011\000\011\000\011\000\011\000\ +\125\000\146\000\146\000\167\000\167\000\126\000\126\000\094\000\ +\094\000\091\000\091\000\070\000\070\000\105\000\105\000\105\000\ +\105\000\105\000\062\000\062\000\120\000\120\000\134\000\134\000\ +\127\000\127\000\128\000\128\000\211\000\211\000\211\000\211\000\ +\211\000\211\000\211\000\211\000\211\000\211\000\211\000\211\000\ +\211\000\211\000\211\000\211\000\211\000\211\000\211\000\211\000\ +\211\000\211\000\211\000\211\000\211\000\211\000\211\000\211\000\ +\211\000\211\000\211\000\211\000\211\000\211\000\211\000\211\000\ +\211\000\211\000\211\000\211\000\211\000\211\000\211\000\211\000\ +\211\000\211\000\211\000\211\000\211\000\211\000\211\000\111\000\ +\111\000\028\000\213\000\047\000\013\000\013\000\026\000\026\000\ +\048\000\048\000\048\000\029\000\046\000\212\000\212\000\212\000\ +\212\000\212\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000" + +let yylen = "\002\000\ +\002\000\002\000\002\000\002\000\001\000\002\000\001\000\000\000\ +\002\000\001\000\001\000\003\000\001\000\002\000\004\000\003\000\ +\003\000\002\000\002\000\002\000\002\000\002\000\002\000\005\000\ +\001\000\001\000\002\000\001\000\001\000\004\000\004\000\005\000\ +\002\000\003\000\001\000\002\000\001\000\005\000\005\000\003\000\ +\003\000\005\000\007\000\009\000\007\000\006\000\006\000\005\000\ +\003\000\001\000\000\000\002\000\002\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\002\000\001\000\004\000\002\000\004\000\002\000\ +\005\000\001\000\002\000\006\000\005\000\001\000\004\000\004\000\ +\005\000\003\000\003\000\005\000\003\000\003\000\001\000\002\000\ +\000\000\002\000\002\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\002\000\001\000\005\000\004\000\002\000\006\000\003\000\005\000\ +\006\000\001\000\002\000\007\000\006\000\000\000\002\000\006\000\ +\001\000\002\000\007\000\007\000\002\000\004\000\002\000\000\000\ +\003\000\003\000\002\000\001\000\003\000\002\000\003\000\007\000\ +\002\000\001\000\004\000\001\000\004\000\004\000\005\000\005\000\ +\003\000\003\000\002\000\003\000\005\000\000\000\000\000\002\000\ +\006\000\003\000\003\000\004\000\004\000\002\000\001\000\002\000\ +\000\000\007\000\007\000\006\000\007\000\007\000\007\000\005\000\ +\008\000\011\000\001\000\006\000\004\000\005\000\003\000\004\000\ +\001\000\004\000\004\000\002\000\001\000\007\000\002\000\003\000\ +\000\000\000\000\002\000\004\000\004\000\007\000\004\000\002\000\ +\001\000\005\000\005\000\003\000\003\000\003\000\001\000\002\000\ +\008\000\008\000\001\000\002\000\009\000\008\000\001\000\002\000\ +\003\000\005\000\005\000\002\000\005\000\002\000\004\000\002\000\ +\002\000\001\000\001\000\001\000\000\000\002\000\001\000\003\000\ +\001\000\001\000\003\000\001\000\002\000\003\000\007\000\006\000\ +\007\000\004\000\004\000\007\000\006\000\006\000\005\000\001\000\ +\002\000\002\000\007\000\005\000\006\000\010\000\003\000\003\000\ +\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ +\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ +\003\000\003\000\003\000\002\000\002\000\005\000\007\000\007\000\ +\007\000\007\000\007\000\007\000\009\000\009\000\009\000\003\000\ +\003\000\003\000\004\000\004\000\002\000\001\000\001\000\001\000\ +\001\000\001\000\003\000\003\000\004\000\003\000\004\000\004\000\ +\003\000\005\000\004\000\005\000\005\000\005\000\005\000\005\000\ +\005\000\005\000\005\000\005\000\005\000\005\000\007\000\007\000\ +\007\000\007\000\007\000\007\000\005\000\005\000\003\000\003\000\ +\005\000\005\000\004\000\004\000\002\000\006\000\004\000\006\000\ +\004\000\004\000\006\000\004\000\006\000\002\000\002\000\003\000\ +\003\000\003\000\002\000\005\000\004\000\005\000\003\000\003\000\ +\005\000\007\000\006\000\009\000\008\000\001\000\001\000\002\000\ +\001\000\001\000\002\000\002\000\002\000\002\000\001\000\001\000\ +\002\000\002\000\004\000\007\000\008\000\003\000\005\000\001\000\ +\002\000\005\000\004\000\001\000\003\000\002\000\002\000\005\000\ +\001\000\003\000\003\000\005\000\003\000\002\000\004\000\002\000\ +\005\000\003\000\003\000\003\000\001\000\001\000\003\000\002\000\ +\004\000\002\000\002\000\003\000\003\000\001\000\001\000\003\000\ +\002\000\004\000\002\000\002\000\002\000\001\000\000\000\003\000\ +\003\000\001\000\003\000\003\000\003\000\003\000\003\000\002\000\ +\001\000\003\000\003\000\001\000\003\000\003\000\003\000\003\000\ +\002\000\001\000\001\000\002\000\002\000\003\000\001\000\001\000\ +\001\000\001\000\003\000\001\000\001\000\002\000\001\000\003\000\ +\004\000\004\000\005\000\005\000\004\000\003\000\003\000\005\000\ +\005\000\004\000\005\000\007\000\007\000\001\000\003\000\003\000\ +\004\000\004\000\004\000\002\000\004\000\003\000\003\000\003\000\ +\003\000\003\000\003\000\001\000\003\000\001\000\002\000\004\000\ +\003\000\004\000\002\000\002\000\000\000\006\000\001\000\002\000\ +\008\000\001\000\002\000\008\000\007\000\003\000\000\000\000\000\ +\002\000\003\000\002\000\003\000\002\000\003\000\005\000\005\000\ +\005\000\007\000\000\000\001\000\003\000\002\000\001\000\003\000\ +\002\000\001\000\002\000\000\000\001\000\001\000\002\000\001\000\ +\003\000\001\000\001\000\002\000\003\000\004\000\001\000\007\000\ +\006\000\003\000\000\000\002\000\004\000\002\000\001\000\003\000\ +\001\000\001\000\002\000\005\000\007\000\009\000\009\000\001\000\ +\001\000\001\000\001\000\002\000\002\000\001\000\001\000\002\000\ +\003\000\004\000\004\000\005\000\001\000\003\000\006\000\005\000\ +\004\000\004\000\001\000\002\000\002\000\003\000\001\000\003\000\ +\001\000\003\000\001\000\002\000\001\000\004\000\001\000\006\000\ +\004\000\005\000\003\000\001\000\003\000\002\000\001\000\001\000\ +\002\000\004\000\003\000\002\000\002\000\003\000\005\000\003\000\ +\004\000\005\000\004\000\002\000\004\000\006\000\005\000\001\000\ +\001\000\001\000\003\000\001\000\001\000\005\000\002\000\001\000\ +\000\000\001\000\003\000\001\000\002\000\001\000\003\000\001\000\ +\003\000\001\000\003\000\002\000\002\000\001\000\001\000\001\000\ +\001\000\001\000\004\000\006\000\002\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\002\000\002\000\002\000\002\000\001\000\ +\001\000\001\000\003\000\003\000\002\000\003\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\003\000\004\000\003\000\004\000\ +\003\000\004\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\002\000\002\000\003\000\001\000\ +\001\000\001\000\003\000\001\000\005\000\002\000\002\000\003\000\ +\001\000\001\000\001\000\003\000\001\000\003\000\001\000\003\000\ +\001\000\003\000\004\000\001\000\003\000\001\000\003\000\001\000\ +\003\000\002\000\003\000\003\000\003\000\003\000\003\000\003\000\ +\002\000\000\000\001\000\000\000\001\000\001\000\001\000\000\000\ +\001\000\000\000\001\000\000\000\001\000\000\000\001\000\001\000\ +\002\000\002\000\000\000\001\000\000\000\001\000\000\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\001\000\001\000\001\000\001\000\001\000\001\000\001\000\001\000\ +\003\000\004\000\004\000\004\000\000\000\002\000\000\000\002\000\ +\000\000\002\000\003\000\004\000\004\000\001\000\002\000\002\000\ +\002\000\004\000\002\000\002\000\002\000\002\000\002\000\002\000\ +\002\000" + +let yydefred = "\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\112\002\000\000\000\000\000\000\ +\169\002\114\002\000\000\000\000\000\000\000\000\000\000\111\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\217\002\218\002\000\000\000\000\ +\000\000\000\000\219\002\220\002\000\000\000\000\113\002\170\002\ +\000\000\000\000\175\002\030\001\000\000\000\000\035\003\000\000\ +\000\000\000\000\000\000\094\001\000\000\050\000\000\000\055\000\ +\056\000\000\000\058\000\059\000\060\000\000\000\062\000\063\000\ +\000\000\000\000\066\000\000\000\068\000\074\000\007\002\121\000\ +\000\000\203\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\031\001\032\001\162\002\112\001\226\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\036\003\000\000\093\000\092\000\000\000\ +\100\000\101\000\000\000\000\000\106\000\000\000\095\000\096\000\ +\097\000\098\000\000\000\102\000\000\000\114\000\199\000\005\000\ +\000\000\037\003\000\000\000\000\000\000\007\000\000\000\013\000\ +\000\000\038\003\000\000\000\000\000\000\010\000\011\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\177\002\063\002\039\003\000\000\080\002\055\002\000\000\ +\064\002\051\002\000\000\000\000\000\000\040\003\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\122\002\000\000\000\000\ +\000\000\000\000\177\001\041\003\000\000\000\000\198\001\171\001\ +\000\000\000\000\115\002\175\001\176\001\000\000\161\001\000\000\ +\183\001\000\000\000\000\000\000\000\000\121\002\120\002\193\002\ +\079\001\033\001\034\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\133\001\000\000\083\001\110\002\000\000\ +\000\000\000\000\166\002\000\000\000\000\069\001\000\000\223\002\ +\224\002\225\002\226\002\227\002\228\002\229\002\230\002\231\002\ +\232\002\233\002\234\002\235\002\236\002\237\002\238\002\239\002\ +\240\002\241\002\242\002\243\002\244\002\245\002\246\002\247\002\ +\221\002\248\002\249\002\250\002\251\002\252\002\253\002\254\002\ +\255\002\000\003\001\003\002\003\003\003\004\003\005\003\006\003\ +\007\003\008\003\009\003\010\003\222\002\011\003\012\003\013\003\ +\014\003\015\003\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\125\002\152\002\151\002\000\000\150\002\000\000\153\002\ +\146\002\148\002\128\002\129\002\130\002\131\002\132\002\000\000\ +\147\002\000\000\000\000\000\000\149\002\155\002\000\000\000\000\ +\154\002\000\000\167\002\139\002\145\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\212\002\000\000\078\001\ +\052\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\ +\000\000\000\000\053\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\029\001\000\000\000\000\113\001\ +\000\000\227\001\000\000\075\000\000\000\122\000\000\000\204\000\ +\067\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\095\001\098\001\000\000\000\000\ +\000\000\012\001\013\001\000\000\000\000\000\000\000\000\090\000\ +\000\000\002\000\105\000\091\000\000\000\115\000\000\000\200\000\ +\000\000\003\000\004\000\006\000\009\000\014\000\000\000\000\000\ +\000\000\019\000\000\000\018\000\000\000\173\002\000\000\085\002\ +\000\000\000\000\214\002\000\000\076\002\000\000\106\002\068\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\103\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\062\002\184\002\000\000\ +\069\002\020\000\052\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\065\002\021\000\000\000\000\000\171\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\204\001\000\000\140\002\000\000\ +\144\002\000\000\000\000\142\002\127\002\000\000\117\002\116\002\ +\119\002\118\002\182\001\000\000\000\000\000\000\000\000\022\000\ +\160\001\000\000\172\001\173\001\000\000\000\000\000\000\000\000\ +\026\003\000\000\000\000\000\000\000\000\038\001\000\000\000\000\ +\205\002\000\000\160\002\000\000\000\000\161\002\156\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\218\000\180\001\181\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\035\000\037\000\000\000\000\000\000\000\ +\000\000\000\000\150\001\000\000\064\001\063\001\000\000\000\000\ +\082\001\081\001\000\000\139\001\000\000\000\000\000\000\000\000\ +\000\000\030\003\000\000\000\000\000\000\000\000\000\000\000\000\ +\195\002\000\000\168\002\000\000\000\000\000\000\126\002\000\000\ +\036\001\035\001\000\000\124\002\123\002\000\000\000\000\000\000\ +\000\000\000\000\080\001\000\000\000\000\151\000\000\000\000\000\ +\197\002\000\000\000\000\000\000\000\000\049\000\022\003\000\000\ +\000\000\000\000\000\000\000\000\176\002\163\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\209\000\000\000\000\000\230\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\103\001\ +\101\001\087\001\000\000\100\001\096\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\087\000\078\000\ +\180\002\000\000\000\000\000\000\000\000\000\000\000\000\191\002\ +\188\002\187\002\192\002\000\000\189\002\017\000\000\000\016\000\ +\012\000\084\002\000\000\082\002\000\000\087\002\072\002\000\000\ +\000\000\000\000\000\000\109\002\067\002\100\002\101\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\098\002\000\000\ +\174\002\178\002\000\000\000\000\000\000\070\002\159\001\174\001\ +\000\000\000\000\000\000\200\001\199\001\000\000\000\000\000\000\ +\000\000\000\000\191\001\000\000\190\001\153\001\152\001\158\001\ +\000\000\156\001\000\000\208\001\000\000\000\000\000\000\184\001\ +\000\000\179\001\000\000\027\003\024\003\000\000\000\000\000\000\ +\000\000\041\001\000\000\000\000\000\000\039\001\037\001\000\000\ +\000\000\000\000\157\002\000\000\158\002\000\000\000\000\000\000\ +\000\000\143\002\000\000\141\002\000\000\000\000\217\000\000\000\ +\219\000\000\000\220\000\214\000\225\000\000\000\212\000\000\000\ +\216\000\000\000\000\000\000\000\000\000\235\000\000\000\000\000\ +\121\001\000\000\000\000\000\000\000\000\000\000\000\000\069\000\ +\033\000\036\000\000\000\000\000\132\001\148\001\000\000\149\001\ +\000\000\000\000\135\001\000\000\140\001\000\000\074\001\073\001\ +\068\001\067\001\031\003\000\000\000\000\028\003\017\003\029\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\170\001\000\000\000\000\000\000\000\000\000\000\040\001\020\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\028\001\027\001\000\000\000\000\000\000\000\000\254\001\ +\253\001\000\000\244\001\000\000\000\000\000\000\000\000\000\000\ +\085\001\000\000\076\001\000\000\071\001\000\000\000\000\000\000\ +\043\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\108\000\088\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\015\000\000\000\073\002\088\002\ +\000\000\000\000\000\000\077\002\075\002\000\000\000\000\000\000\ +\049\002\000\000\000\000\000\000\000\000\000\000\066\002\000\000\ +\000\000\185\002\000\000\000\000\179\002\054\002\172\002\000\000\ +\000\000\000\000\217\001\000\000\202\001\201\001\205\001\203\001\ +\000\000\194\001\000\000\185\001\189\001\186\001\000\000\018\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\002\000\000\159\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\012\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\126\001\128\001\000\000\000\000\000\000\000\000\ +\028\000\000\000\000\000\041\000\000\000\040\000\000\000\034\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\114\001\ +\000\000\000\000\000\000\000\000\000\000\106\001\000\000\000\000\ +\000\000\000\000\000\000\169\001\000\000\000\000\138\002\136\002\ +\134\002\000\000\089\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\023\000\025\000\026\000\000\000\072\000\073\000\000\000\ +\148\000\000\000\000\000\000\000\000\000\000\000\000\000\159\000\ +\152\000\107\000\239\000\000\000\247\001\000\000\000\000\000\000\ +\000\000\250\001\246\001\000\000\000\000\019\003\066\001\065\001\ +\086\001\084\001\000\000\000\000\165\002\000\000\044\001\042\001\ +\210\000\115\001\000\000\000\000\000\000\000\000\000\000\062\001\ +\048\001\000\000\046\001\000\000\000\000\000\000\000\000\000\000\ +\054\001\000\000\050\001\000\000\052\001\000\000\000\000\000\000\ +\086\000\085\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\037\002\000\000\181\002\000\000\000\000\000\000\000\000\000\000\ +\112\000\000\000\000\000\000\000\083\002\090\002\000\000\074\002\ +\092\002\000\000\000\000\000\000\000\000\000\000\000\000\079\002\ +\071\002\000\000\099\002\000\000\216\002\216\001\000\000\195\001\ +\193\001\192\001\188\001\187\001\061\001\047\001\045\001\000\000\ +\000\000\000\000\053\001\049\001\051\001\000\000\000\000\129\000\ +\000\000\251\001\000\000\000\000\000\000\000\000\203\002\000\000\ +\000\000\017\002\000\000\000\000\000\000\000\000\009\002\000\000\ +\199\002\198\002\000\000\105\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\215\000\000\000\000\000\125\001\123\001\000\000\ +\122\001\000\000\000\000\027\000\000\000\000\000\031\000\030\000\ +\000\000\034\003\232\000\010\002\000\000\000\000\000\000\000\000\ +\118\001\000\000\000\000\116\001\119\001\000\000\163\001\162\001\ +\168\001\000\000\166\001\000\000\211\001\000\000\110\001\000\000\ +\000\000\091\001\000\000\000\000\000\000\120\000\076\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\158\000\000\000\000\000\245\001\000\000\231\001\000\000\ +\249\001\222\001\245\000\077\001\075\001\072\001\070\001\000\000\ +\231\001\077\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\080\000\079\000\000\000\000\000\000\000\000\000\113\000\111\000\ +\000\000\000\000\000\000\000\000\000\000\086\002\078\002\093\002\ +\050\002\046\002\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\001\002\255\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\177\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\138\000\000\000\000\000\000\000\ +\140\000\123\000\127\000\000\000\016\002\019\002\013\002\000\000\ +\008\002\000\000\000\000\000\000\236\000\000\000\222\000\213\000\ +\211\000\000\000\127\001\000\000\000\000\000\000\000\000\048\000\ +\000\000\000\000\042\000\039\000\038\000\231\000\233\000\000\000\ +\000\000\000\000\000\000\107\001\000\000\090\001\000\000\000\000\ +\149\000\000\000\000\000\000\000\000\000\000\000\155\000\000\000\ +\154\000\248\001\000\000\237\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\002\002\003\002\000\000\000\000\201\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\060\001\000\000\056\001\000\000\058\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\038\002\ +\116\000\000\000\000\000\117\000\000\000\091\002\108\002\197\001\ +\196\001\059\001\055\001\057\001\000\000\182\002\181\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\180\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\137\000\000\000\000\000\224\001\225\001\000\000\ +\129\001\124\001\046\000\000\000\047\000\000\000\000\000\000\000\ +\000\000\117\001\111\001\024\000\000\000\156\000\000\000\157\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\238\001\ +\000\000\000\000\000\000\000\000\004\002\000\000\000\000\228\001\ +\000\000\000\000\000\000\024\002\025\002\026\002\027\002\093\001\ +\000\000\229\001\124\000\000\000\000\000\000\000\000\000\201\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\030\002\031\002\000\000\205\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\186\000\000\000\000\000\000\000\175\000\ +\000\000\000\000\133\000\000\000\000\000\146\000\000\000\145\000\ +\000\000\000\000\000\000\000\000\000\000\043\000\045\000\000\000\ +\000\000\120\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\005\002\000\000\ +\230\001\000\000\000\000\000\000\022\002\028\002\029\002\092\001\ +\206\000\000\000\000\000\000\000\040\002\044\002\231\001\110\000\ +\000\000\023\002\032\002\202\000\183\002\176\000\000\000\000\000\ +\000\000\179\000\178\000\000\000\173\000\000\000\000\000\131\000\ +\139\000\000\000\000\000\142\000\141\000\000\000\246\000\000\000\ +\000\000\108\001\160\000\153\000\000\000\000\000\000\000\168\000\ +\000\000\000\000\000\000\000\000\006\002\241\001\000\000\000\000\ +\239\001\000\000\000\000\000\000\000\000\033\002\000\000\000\000\ +\174\000\184\000\000\000\000\000\000\000\000\000\000\000\193\000\ +\187\000\000\000\000\000\000\000\144\000\143\000\000\000\044\000\ +\109\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\164\000\000\000\000\000\000\000\000\000\034\002\035\002\ +\000\000\000\000\000\000\000\000\000\000\192\000\172\000\000\000\ +\021\002\166\000\167\000\000\000\000\000\000\000\000\000\000\000\ +\165\000\242\001\036\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\169\000\ +\000\000\191\000\188\000\209\002\210\002\000\000\000\000\000\000\ +\000\000\189\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\170\000\190\000\000\000\000\000" + +let yydgoto = "\008\000\ +\055\000\100\000\122\000\130\000\148\000\158\000\172\000\034\002\ +\101\000\123\000\131\000\057\000\072\001\126\000\058\000\134\000\ +\135\000\178\001\214\001\055\003\245\003\131\003\202\003\005\003\ +\059\000\233\001\012\002\101\001\060\000\061\000\132\003\062\000\ +\160\000\064\000\065\000\066\000\067\000\068\000\069\000\070\000\ +\071\000\072\000\073\000\074\000\075\000\076\000\077\000\025\001\ +\056\003\078\000\108\001\136\002\056\004\110\000\111\000\079\000\ +\113\000\114\000\115\000\116\000\117\000\063\001\112\003\118\000\ +\142\001\238\003\137\002\080\000\110\001\242\001\226\002\109\004\ +\007\005\251\004\253\002\169\003\211\005\008\005\123\001\179\001\ +\009\005\061\002\062\002\060\003\001\004\229\005\185\004\183\004\ +\051\005\081\000\112\004\155\004\070\006\066\005\156\004\187\003\ +\252\004\151\000\254\004\203\005\204\005\012\006\057\006\109\006\ +\105\006\241\005\119\000\144\001\082\000\112\001\019\001\190\003\ +\128\004\191\003\189\003\244\002\176\000\083\000\033\003\164\001\ +\000\003\254\002\084\000\085\000\086\000\123\004\087\000\088\000\ +\210\000\089\000\090\000\211\000\221\000\028\002\217\000\125\001\ +\126\001\121\002\037\003\091\000\071\006\039\003\181\000\092\000\ +\104\001\042\002\157\004\001\003\152\000\212\000\213\000\020\002\ +\218\000\182\000\183\000\042\003\184\000\153\000\185\000\201\001\ +\204\001\202\001\187\002\019\005\093\000\106\001\066\002\066\003\ +\191\004\071\005\067\005\113\004\067\003\006\004\068\003\011\004\ +\171\003\106\004\068\005\069\005\070\005\233\002\176\003\177\003\ +\114\004\115\004\128\003\171\005\193\005\172\005\173\005\174\005\ +\175\005\057\004\189\005\154\000\155\000\156\000\157\000\172\001\ +\154\002\155\002\156\002\074\004\121\003\071\004\173\001\174\001\ +\175\001\055\001\020\001\035\002\073\001" + +let yysindex = "\141\009\ +\228\067\075\007\170\051\083\051\204\051\233\070\196\074\000\000\ +\155\005\110\002\080\074\155\005\000\000\184\003\155\005\155\005\ +\000\000\000\000\155\005\155\005\155\005\155\005\155\005\000\000\ +\155\005\225\076\083\004\058\068\146\068\170\063\170\063\014\004\ +\000\000\024\061\170\063\155\005\000\000\000\000\087\005\155\005\ +\155\005\134\255\000\000\000\000\080\074\228\067\000\000\000\000\ +\155\005\155\005\000\000\000\000\155\005\155\005\000\000\160\000\ +\229\255\096\018\048\000\000\000\151\080\000\000\108\004\000\000\ +\000\000\195\000\000\000\000\000\000\000\075\001\000\000\000\000\ +\110\001\176\001\000\000\229\255\000\000\000\000\000\000\000\000\ +\171\000\000\000\105\076\218\001\080\074\080\074\233\070\233\070\ +\000\000\000\000\000\000\000\000\000\000\184\003\155\005\155\005\ +\087\005\075\007\155\005\000\000\049\003\000\000\000\000\195\000\ +\000\000\000\000\176\001\229\255\000\000\075\007\000\000\000\000\ +\000\000\000\000\128\002\000\000\177\002\000\000\000\000\000\000\ +\110\002\000\000\137\002\160\002\229\255\000\000\143\005\000\000\ +\035\052\000\000\171\007\229\255\171\007\000\000\000\000\151\044\ +\000\004\085\255\082\013\202\003\041\048\204\051\206\003\110\002\ +\058\003\000\000\000\000\000\000\070\000\000\000\000\000\222\003\ +\000\000\000\000\030\002\126\001\097\003\000\000\071\005\108\004\ +\155\005\155\005\208\003\199\073\006\074\000\000\136\065\090\003\ +\235\005\069\004\000\000\000\000\175\000\106\004\000\000\000\000\ +\196\074\196\074\000\000\000\000\000\000\162\004\000\000\154\004\ +\000\000\170\063\170\063\116\004\080\074\000\000\000\000\000\000\ +\000\000\000\000\000\000\231\068\155\005\147\004\023\002\116\003\ +\196\074\244\072\000\004\233\070\143\002\080\074\000\000\002\005\ +\070\001\197\003\149\255\000\000\241\004\000\000\000\000\092\005\ +\146\004\046\005\000\000\110\081\057\005\000\000\057\005\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\141\067\192\005\141\067\155\005\155\005\134\255\ +\162\005\000\000\000\000\000\000\080\074\000\000\154\005\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\203\006\ +\000\000\000\000\000\000\131\001\000\000\000\000\000\000\000\000\ +\000\000\080\074\000\000\000\000\000\000\156\000\166\255\141\067\ +\233\070\155\005\184\005\058\003\001\006\000\000\155\005\000\000\ +\000\000\233\070\211\005\116\003\233\070\000\000\170\063\096\018\ +\229\255\155\005\000\000\075\006\209\005\233\070\233\070\233\070\ +\233\070\233\070\233\070\233\070\233\070\233\070\233\070\233\070\ +\233\070\233\070\233\070\233\070\233\070\233\070\233\070\233\070\ +\233\070\233\070\060\069\233\070\000\000\116\004\233\070\000\000\ +\116\004\000\000\116\004\000\000\116\004\000\000\116\004\000\000\ +\000\000\233\070\169\004\225\006\080\074\080\074\027\006\068\006\ +\080\074\027\006\165\076\043\002\000\000\000\000\233\070\043\002\ +\043\002\000\000\000\000\147\004\023\002\075\004\236\005\000\000\ +\211\005\000\000\000\000\000\000\116\004\000\000\116\004\000\000\ +\195\003\000\000\000\000\000\000\000\000\000\000\171\007\229\255\ +\171\007\000\000\171\007\000\000\136\012\000\000\130\005\000\000\ +\042\006\138\006\000\000\136\012\000\000\136\012\000\000\000\000\ +\000\000\134\006\060\006\133\006\170\040\170\040\000\000\204\051\ +\155\005\116\004\009\001\104\006\167\006\000\000\000\000\164\006\ +\000\000\000\000\000\000\216\041\077\004\081\006\098\006\204\051\ +\058\003\000\000\000\000\196\074\009\076\000\000\173\006\179\006\ +\206\255\107\006\070\005\109\006\000\000\109\006\000\000\090\003\ +\000\000\131\001\235\005\000\000\000\000\102\002\000\000\000\000\ +\000\000\000\000\000\000\033\002\031\066\092\066\153\066\000\000\ +\000\000\176\003\000\000\000\000\196\074\242\001\141\067\116\004\ +\000\000\116\004\043\002\058\005\231\006\000\000\041\003\147\004\ +\000\000\153\006\000\000\119\006\140\000\000\000\000\000\078\002\ +\188\077\208\006\178\003\009\076\059\064\125\002\245\005\050\006\ +\048\072\000\000\000\000\000\000\196\074\110\006\116\004\254\003\ +\116\004\026\007\205\006\000\000\000\000\043\002\255\007\208\003\ +\202\009\124\017\000\000\202\006\000\000\000\000\208\003\233\070\ +\000\000\000\000\068\006\000\000\233\070\104\255\211\004\013\082\ +\196\074\000\000\149\006\170\063\155\006\023\002\142\006\155\005\ +\000\000\116\075\000\000\157\006\161\006\175\006\000\000\143\002\ +\000\000\000\000\182\006\000\000\000\000\163\006\166\006\110\002\ +\158\006\084\003\000\000\196\074\120\003\000\000\186\006\189\006\ +\000\000\092\006\252\006\006\007\141\067\000\000\000\000\225\076\ +\038\005\145\069\233\069\135\061\000\000\000\000\235\081\235\081\ +\203\081\110\013\110\081\203\081\184\012\184\012\184\012\184\012\ +\053\004\255\006\255\006\184\012\053\004\053\004\203\081\255\006\ +\053\004\053\004\053\004\170\063\000\000\255\006\116\075\000\000\ +\092\006\207\006\147\004\147\004\110\081\233\070\233\070\233\070\ +\252\001\249\006\233\070\233\070\233\070\043\002\043\002\000\000\ +\000\000\000\000\065\004\000\000\000\000\203\081\153\006\151\255\ +\116\004\075\004\213\006\116\004\000\000\163\002\000\000\000\000\ +\000\000\175\002\216\006\041\004\092\006\221\006\147\004\000\000\ +\000\000\000\000\000\000\055\007\000\000\000\000\171\007\000\000\ +\000\000\000\000\253\255\000\000\078\007\000\000\000\000\136\012\ +\127\001\112\000\120\054\000\000\000\000\000\000\000\000\008\007\ +\075\004\204\051\015\005\204\051\204\051\135\004\000\000\239\006\ +\000\000\000\000\220\001\110\002\022\007\000\000\000\000\000\000\ +\194\004\204\051\070\007\000\000\000\000\203\004\196\074\174\000\ +\024\006\247\006\000\000\254\046\000\000\000\000\000\000\000\000\ +\141\002\000\000\091\007\000\000\035\002\067\074\226\065\000\000\ +\035\002\000\000\015\007\000\000\000\000\233\070\233\070\233\070\ +\160\005\000\000\233\070\233\070\233\070\000\000\000\000\153\006\ +\237\005\044\007\000\000\018\007\000\000\013\041\179\002\013\041\ +\116\004\000\000\114\007\000\000\204\051\233\070\000\000\051\007\ +\000\000\196\074\000\000\000\000\000\000\053\007\000\000\053\007\ +\000\000\216\041\170\064\233\070\048\072\000\000\182\255\111\007\ +\000\000\233\070\056\007\116\004\033\001\228\067\247\002\000\000\ +\000\000\000\000\017\007\000\000\000\000\000\000\108\255\000\000\ +\116\004\233\070\000\000\110\081\000\000\110\081\000\000\000\000\ +\000\000\000\000\000\000\116\004\247\000\000\000\000\000\000\000\ +\087\007\151\255\084\003\186\006\229\255\216\071\239\004\115\007\ +\000\000\110\007\068\007\069\007\071\007\139\001\000\000\000\000\ +\000\004\109\007\084\003\075\004\143\002\150\005\084\003\229\255\ +\210\002\000\000\000\000\167\001\183\002\018\000\237\005\000\000\ +\000\000\209\004\000\000\136\002\204\051\233\070\047\007\242\255\ +\000\000\251\004\000\000\057\005\000\000\057\005\046\007\131\001\ +\000\000\184\255\233\070\229\255\077\007\084\003\153\006\153\006\ +\212\080\151\001\192\000\194\255\245\006\233\070\011\078\043\078\ +\121\078\080\007\056\007\119\255\063\007\075\007\075\004\060\255\ +\000\000\000\000\028\005\131\007\075\004\186\006\205\005\229\255\ +\209\004\134\007\153\006\017\003\000\000\136\012\000\000\000\000\ +\204\051\218\000\144\007\000\000\000\000\110\002\111\001\116\004\ +\000\000\204\051\255\002\058\007\116\004\058\003\000\000\022\007\ +\081\007\000\000\216\041\048\007\000\000\000\000\000\000\116\004\ +\196\074\065\007\000\000\070\005\000\000\000\000\000\000\000\000\ +\087\001\000\000\129\255\000\000\000\000\000\000\053\003\000\000\ +\017\081\048\001\246\255\020\007\153\078\231\078\007\079\102\007\ +\129\001\082\007\000\000\129\072\000\000\092\007\000\000\095\007\ +\239\006\083\007\144\001\151\007\116\004\000\000\229\255\245\001\ +\102\000\051\007\084\007\108\006\150\007\150\007\165\007\093\007\ +\108\007\051\007\000\000\000\000\063\070\233\070\196\074\049\081\ +\000\000\201\003\233\070\000\000\075\004\000\000\150\003\000\000\ +\204\051\110\081\233\070\233\070\116\004\142\007\228\004\000\000\ +\162\015\233\070\025\065\021\072\164\007\000\000\152\002\214\066\ +\019\067\080\067\233\070\000\000\204\051\196\074\000\000\000\000\ +\000\000\122\000\000\000\196\074\075\004\229\255\229\255\107\001\ +\053\006\000\000\000\000\000\000\180\007\000\000\000\000\204\051\ +\000\000\116\004\134\255\116\004\134\255\134\255\229\255\000\000\ +\000\000\000\000\000\000\196\074\000\000\207\001\168\007\112\007\ +\110\002\000\000\000\000\151\006\175\007\000\000\000\000\000\000\ +\000\000\000\000\060\001\168\006\000\000\143\002\000\000\000\000\ +\000\000\000\000\168\007\229\255\137\007\139\007\147\007\000\000\ +\000\000\148\007\000\000\154\007\233\070\233\070\233\070\110\081\ +\000\000\157\007\000\000\158\007\000\000\159\007\199\007\033\006\ +\000\000\000\000\116\004\159\004\255\002\186\006\092\006\219\007\ +\000\000\000\000\000\000\075\004\255\002\183\002\098\002\211\007\ +\000\000\140\007\075\004\163\007\000\000\000\000\072\001\000\000\ +\000\000\172\255\000\000\204\051\110\002\138\007\022\007\000\000\ +\000\000\204\051\000\000\070\005\000\000\000\000\075\004\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\233\070\ +\233\070\233\070\000\000\000\000\000\000\202\007\237\005\000\000\ +\110\002\000\000\136\050\096\005\229\255\129\072\000\000\068\006\ +\141\007\000\000\092\007\216\041\009\002\229\255\000\000\135\007\ +\000\000\000\000\233\070\000\000\048\072\204\051\233\070\146\007\ +\149\007\204\051\000\000\233\070\152\007\000\000\000\000\162\007\ +\000\000\233\070\143\002\000\000\100\077\137\255\000\000\000\000\ +\116\004\000\000\000\000\000\000\233\070\233\070\051\007\142\001\ +\000\000\051\007\214\007\000\000\000\000\233\070\000\000\000\000\ +\000\000\141\002\000\000\091\007\000\000\035\002\000\000\121\002\ +\035\002\000\000\156\007\111\007\255\002\000\000\000\000\143\002\ +\075\004\248\003\204\051\116\004\233\070\116\004\229\255\116\004\ +\229\255\000\000\111\007\237\005\000\000\031\077\000\000\160\007\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\209\002\ +\000\000\000\000\129\072\215\007\233\070\233\070\233\070\094\079\ +\126\079\204\079\233\070\233\070\233\070\136\050\075\004\143\002\ +\000\000\000\000\148\006\208\003\060\255\163\002\000\000\000\000\ +\075\004\160\007\163\002\224\007\204\051\000\000\000\000\000\000\ +\000\000\000\000\116\004\022\007\059\000\236\079\058\080\090\080\ +\163\005\000\000\000\000\054\012\173\007\230\007\116\004\216\041\ +\190\007\000\000\231\007\116\004\186\007\000\000\190\002\116\004\ +\204\051\217\005\096\005\116\004\000\000\249\004\116\004\165\076\ +\000\000\000\000\000\000\246\007\000\000\000\000\000\000\247\007\ +\000\000\135\007\229\255\241\007\000\000\116\004\000\000\000\000\ +\000\000\116\004\000\000\048\072\233\070\110\081\053\006\000\000\ +\243\000\237\002\000\000\000\000\000\000\000\000\000\000\242\007\ +\204\051\172\007\233\070\000\000\233\070\000\000\053\006\100\005\ +\000\000\250\002\229\255\096\005\229\255\195\001\000\000\234\004\ +\000\000\000\000\023\002\000\000\127\049\148\014\097\047\000\000\ +\096\003\217\007\007\008\000\000\000\000\151\255\063\002\000\000\ +\150\255\078\003\063\002\229\255\163\005\110\081\110\081\110\081\ +\000\000\216\007\000\000\218\007\000\000\221\007\110\081\110\081\ +\110\081\229\255\255\002\053\006\081\006\081\006\043\005\000\000\ +\000\000\079\006\174\255\000\000\136\050\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\204\051\000\000\000\000\151\006\ +\229\002\190\001\222\003\134\255\216\041\208\007\203\007\014\008\ +\096\005\000\000\136\050\045\005\049\073\202\001\134\255\166\000\ +\001\006\096\005\000\000\165\076\120\054\000\000\000\000\233\070\ +\000\000\000\000\000\000\249\255\000\000\193\007\204\051\215\003\ +\021\072\000\000\000\000\000\000\204\051\000\000\025\001\000\000\ +\177\007\160\007\068\006\181\007\092\007\068\006\151\255\000\000\ +\116\004\007\008\160\007\092\007\000\000\116\004\204\051\000\000\ +\023\002\082\002\194\001\000\000\000\000\000\000\000\000\000\000\ +\201\007\000\000\000\000\151\006\233\070\233\070\233\070\000\000\ +\168\003\168\003\204\051\228\007\204\051\098\002\023\002\151\255\ +\248\001\000\000\000\000\229\255\000\000\053\005\068\005\116\004\ +\223\007\204\051\200\004\000\000\136\050\216\041\116\004\000\000\ +\000\000\217\072\000\000\058\003\116\004\000\000\136\050\000\000\ +\026\005\116\004\116\004\022\008\075\004\000\000\000\000\008\004\ +\233\070\000\000\116\004\238\007\229\255\068\006\068\006\156\072\ +\068\006\068\006\103\006\116\004\101\003\212\007\000\000\052\004\ +\000\000\246\002\179\002\116\004\000\000\000\000\000\000\000\000\ +\000\000\110\081\110\081\110\081\000\000\000\000\000\000\000\000\ +\151\255\000\000\000\000\000\000\000\000\000\000\186\006\136\050\ +\103\004\000\000\000\000\158\001\000\000\234\007\096\005\000\000\ +\000\000\186\006\159\000\000\000\000\000\222\007\000\000\227\007\ +\233\070\000\000\000\000\000\000\048\008\052\008\140\048\000\000\ +\054\008\056\008\233\070\050\008\000\000\000\000\092\007\007\008\ +\000\000\204\051\179\002\116\004\116\004\000\000\060\008\036\005\ +\000\000\000\000\116\004\116\004\116\004\116\004\229\255\000\000\ +\000\000\136\050\116\004\088\005\000\000\000\000\116\004\000\000\ +\000\000\120\054\120\054\051\007\116\004\053\008\238\001\204\051\ +\204\051\000\000\233\070\240\007\116\004\116\004\000\000\000\000\ +\163\005\204\051\163\005\220\003\033\003\000\000\000\000\096\005\ +\000\000\000\000\000\000\062\008\233\070\204\051\116\004\116\004\ +\000\000\000\000\000\000\116\004\229\255\151\006\225\007\250\007\ +\068\006\147\004\092\007\073\008\229\255\116\004\204\051\000\000\ +\116\004\000\000\000\000\000\000\000\000\074\008\068\006\068\006\ +\204\051\000\000\057\004\120\054\077\008\079\008\116\004\233\070\ +\229\255\204\051\204\051\000\000\000\000\116\004\116\004" + +let yyrindex = "\000\000\ +\094\009\095\009\000\008\000\000\000\000\000\000\000\000\000\000\ +\232\076\000\000\000\000\148\070\000\000\022\003\029\003\171\006\ +\000\000\000\000\001\075\076\073\135\074\062\071\230\002\000\000\ +\232\076\000\000\000\000\000\000\000\000\000\000\000\000\028\075\ +\012\019\000\000\000\000\062\071\000\000\000\000\246\005\069\005\ +\015\002\042\004\000\000\000\000\000\000\099\000\000\000\000\000\ +\062\071\149\008\000\000\000\000\171\006\062\071\000\000\000\000\ +\176\040\099\000\128\019\000\000\016\046\000\000\149\013\000\000\ +\000\000\114\015\000\000\000\000\000\000\113\059\000\000\000\000\ +\122\059\171\059\000\000\215\059\000\000\000\000\000\000\000\000\ +\000\000\000\000\058\027\174\027\081\026\197\026\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\022\003\029\003\131\004\ +\246\005\116\000\149\008\000\000\000\000\000\000\000\000\222\041\ +\000\000\000\000\065\042\012\043\000\000\116\000\000\000\000\000\ +\000\000\000\000\111\043\000\000\058\044\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\002\008\000\000\000\008\000\000\ +\000\000\000\000\000\000\247\008\000\000\000\000\000\000\000\000\ +\134\014\134\014\000\000\079\010\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\247\010\ +\000\000\000\000\000\000\060\049\114\018\000\000\000\000\000\000\ +\001\075\036\076\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\150\052\000\000\000\000\ +\253\002\225\005\000\000\000\000\000\000\139\006\000\000\002\053\ +\000\000\000\000\000\000\165\060\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\022\003\239\255\000\000\000\000\ +\000\000\000\000\089\075\000\000\000\000\000\000\067\002\124\002\ +\000\000\227\255\000\000\000\000\037\000\000\000\000\000\170\255\ +\000\000\142\005\000\000\117\255\095\001\000\000\199\006\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\008\008\052\060\008\008\029\003\251\007\042\004\ +\177\075\000\000\000\000\000\000\167\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\076\062\162\062\230\002\000\000\000\000\248\062\078\063\ +\000\000\185\000\000\000\000\000\000\000\000\000\000\000\008\008\ +\000\000\069\005\000\000\000\000\002\004\000\000\251\007\000\000\ +\000\000\000\000\079\005\000\000\000\000\000\000\000\000\099\000\ +\222\055\028\075\000\000\149\013\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\220\035\000\000\000\000\204\075\000\000\000\000\ +\212\004\000\000\252\007\000\000\108\003\000\000\108\003\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\237\017\244\024\000\000\000\000\000\000\034\028\ +\151\028\000\000\000\000\239\255\000\000\000\000\000\000\000\000\ +\079\005\000\000\000\000\000\000\252\007\000\000\108\003\000\000\ +\059\014\000\000\000\000\000\000\000\000\000\000\000\000\247\008\ +\000\000\000\000\000\000\000\000\000\000\000\000\153\001\000\000\ +\095\008\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\207\255\000\000\076\008\000\000\078\008\084\008\000\000\000\000\ +\131\004\096\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\036\000\ +\000\000\146\000\068\000\095\001\000\000\199\006\000\000\235\000\ +\000\000\251\007\238\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\008\008\165\060\ +\000\000\229\050\011\029\000\000\000\000\000\000\000\000\239\255\ +\000\000\045\008\000\000\000\000\000\000\000\000\000\000\221\057\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\057\008\000\000\ +\246\061\215\059\064\004\000\000\000\000\127\029\000\000\000\000\ +\000\000\000\000\000\000\146\255\000\000\000\000\228\000\000\000\ +\000\000\000\000\148\005\000\000\090\001\000\000\000\000\018\008\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\251\007\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\175\004\000\000\000\000\008\008\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\138\038\242\038\ +\090\039\034\016\245\040\194\039\080\036\197\036\057\037\173\037\ +\034\033\244\029\104\030\034\038\150\033\011\034\042\040\220\030\ +\127\034\243\034\104\035\000\000\000\000\081\031\000\000\000\000\ +\085\001\000\000\239\255\239\255\088\041\000\000\000\000\000\000\ +\000\000\244\019\000\000\000\000\000\000\104\025\221\025\000\000\ +\000\000\000\000\128\024\000\000\000\000\146\040\045\008\117\011\ +\057\008\000\000\000\000\124\012\096\007\012\043\000\000\000\000\ +\000\000\000\000\000\000\000\000\175\004\000\000\239\255\000\000\ +\000\000\000\000\000\000\061\014\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\173\061\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\230\046\000\000\000\000\000\000\000\000\073\047\ +\000\000\000\000\000\000\000\000\172\047\000\000\000\000\000\000\ +\000\000\000\000\156\255\000\000\000\000\245\000\090\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\016\001\000\000\067\006\000\000\202\000\000\000\000\000\000\000\ +\118\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\045\008\ +\023\008\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\139\058\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\197\031\ +\000\000\000\000\000\000\147\071\000\000\204\005\000\000\000\000\ +\000\000\000\000\000\000\086\002\000\000\000\000\217\255\000\000\ +\067\000\000\000\000\000\006\000\000\000\144\000\000\000\000\000\ +\000\000\000\000\000\000\155\006\029\008\000\000\000\000\000\000\ +\000\000\170\005\000\000\000\000\230\057\028\007\000\000\188\006\ +\000\000\019\004\003\001\018\001\062\001\000\000\000\000\000\000\ +\089\075\204\058\000\000\000\000\000\000\000\000\000\000\215\059\ +\000\000\000\000\000\000\216\005\215\059\089\075\159\005\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\095\001\000\000\199\006\000\000\230\002\ +\000\000\000\000\000\000\230\057\000\000\000\000\045\008\045\008\ +\000\000\142\081\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\219\005\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\012\043\ +\000\000\000\000\045\008\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\230\001\ +\000\000\000\000\008\001\000\000\147\001\000\000\000\000\017\048\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\182\000\ +\000\000\002\001\000\000\217\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\069\008\187\054\000\000\ +\106\055\000\000\000\000\189\007\139\058\000\000\215\059\000\000\ +\000\000\009\000\000\000\250\255\040\008\040\008\254\255\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\120\046\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\172\000\000\000\000\000\083\008\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\215\059\253\058\000\000\ +\138\008\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\148\077\018\005\147\071\079\002\134\003\168\004\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\182\009\000\000\ +\000\000\000\000\000\000\215\059\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\082\056\253\058\000\000\000\000\105\020\000\000\ +\000\000\221\020\000\000\081\021\000\000\000\000\000\000\192\041\ +\000\000\198\021\000\000\058\022\000\000\174\022\000\000\000\000\ +\000\000\000\000\252\004\000\000\197\006\000\000\175\004\246\006\ +\000\000\089\008\000\000\000\000\252\052\012\043\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\153\001\000\000\ +\000\000\000\000\238\063\000\000\000\000\099\008\116\048\000\000\ +\000\000\000\000\000\000\230\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\023\008\000\000\ +\000\000\000\000\000\000\000\000\253\058\000\000\000\000\000\000\ +\000\000\000\000\085\005\000\000\000\000\215\059\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\197\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\152\006\000\000\170\003\000\000\032\006\000\000\000\000\ +\117\006\000\000\000\000\057\032\007\059\000\000\000\000\000\000\ +\000\000\000\000\000\000\248\005\000\000\038\004\168\004\117\004\ +\168\004\000\000\174\032\159\005\000\000\087\008\000\000\208\255\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\019\041\000\000\000\000\ +\000\000\208\255\019\041\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\144\016\215\048\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\037\015\059\008\000\000\ +\000\000\236\054\000\000\189\011\000\000\000\000\000\000\137\073\ +\000\000\028\075\000\000\047\003\000\000\000\000\027\058\110\053\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\069\059\215\059\000\000\000\000\058\000\000\000\000\000\ +\000\000\017\002\000\000\000\000\000\000\035\042\001\017\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\131\056\000\000\ +\000\000\000\000\168\004\000\000\168\004\072\008\000\000\069\008\ +\000\000\000\000\000\000\000\000\000\000\000\000\100\008\245\011\ +\184\056\000\000\237\056\000\000\000\000\147\016\253\058\000\000\ +\000\000\000\000\253\058\253\058\000\000\134\042\238\042\081\043\ +\000\000\035\023\000\000\151\023\000\000\011\024\180\043\028\044\ +\127\044\019\041\079\017\116\050\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\253\058\ +\000\000\000\000\119\002\146\003\000\000\194\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\146\003\000\000\ +\002\004\000\000\000\000\182\053\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\020\006\000\000\ +\094\008\072\008\000\000\101\008\069\008\000\000\147\016\000\000\ +\056\057\109\057\162\003\069\008\000\000\024\056\000\000\000\000\ +\000\000\234\012\215\059\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\253\058\000\000\000\000\000\000\000\000\ +\136\049\194\049\000\000\010\078\000\000\000\000\000\000\118\057\ +\012\043\000\000\000\000\019\041\000\000\000\000\000\000\252\007\ +\000\000\000\000\000\000\000\000\000\000\000\000\080\058\000\000\ +\030\055\000\000\000\000\000\000\252\007\000\000\000\000\000\000\ +\000\000\240\053\219\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\172\006\000\000\168\004\000\000\000\000\000\000\ +\000\000\000\000\000\000\024\056\000\000\000\000\000\000\000\000\ +\000\000\234\012\000\000\129\058\000\000\000\000\000\000\000\000\ +\000\000\226\044\074\045\173\045\000\000\000\000\000\000\000\000\ +\118\057\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\253\005\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\046\008\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\069\008\168\057\ +\000\000\000\000\000\000\129\058\129\058\000\000\252\049\000\000\ +\000\000\000\000\148\077\223\005\038\004\117\004\005\004\000\000\ +\000\000\000\000\042\054\000\000\000\000\000\000\111\005\000\000\ +\000\000\000\000\000\000\000\000\193\004\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\058\050\129\058\000\000\000\000\ +\000\000\000\000\000\000\104\008\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\077\009\186\010\ +\000\000\000\000\000\000\164\055\005\004\005\004\107\008\109\008\ +\000\000\110\008\069\008\000\000\005\004\100\054\000\000\000\000\ +\164\005\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\095\004\000\000\ +\005\004\000\000\000\000\000\000\000\000\050\009\222\010" + +let yygindex = "\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\033\000\ +\215\255\000\000\089\000\072\000\013\006\049\009\060\000\000\000\ +\214\255\126\000\233\001\099\253\000\000\217\254\078\006\071\255\ +\127\008\195\013\029\254\247\255\098\004\194\013\074\252\051\000\ +\093\000\023\000\026\000\034\000\000\000\000\000\000\000\000\000\ +\045\000\047\000\000\000\049\000\000\000\002\000\013\000\088\007\ +\093\001\000\000\000\000\000\000\000\000\000\000\000\000\052\000\ +\000\000\000\000\000\000\000\000\000\000\014\255\005\252\000\000\ +\000\000\000\000\027\000\000\000\000\000\142\254\251\253\032\252\ +\115\251\156\251\083\255\000\000\226\003\000\000\176\004\175\251\ +\113\255\059\004\000\000\000\000\000\000\000\000\000\000\000\000\ +\107\003\015\000\026\251\047\255\103\253\199\251\017\253\135\252\ +\095\251\043\254\247\003\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\046\000\200\006\ +\003\006\006\006\000\000\000\000\078\255\022\000\000\000\168\255\ +\184\001\059\253\000\254\108\010\156\012\000\000\000\000\000\000\ +\110\255\049\008\009\013\119\007\031\000\094\255\207\000\159\254\ +\000\000\080\008\100\007\216\011\115\253\000\000\078\254\000\000\ +\000\000\000\000\050\004\009\006\163\255\164\004\000\000\000\000\ +\000\000\000\000\073\000\000\000\235\007\157\255\254\007\021\007\ +\045\009\000\000\000\000\198\004\000\000\000\000\085\008\213\253\ +\190\005\138\251\021\251\213\251\011\253\000\000\097\253\000\000\ +\122\005\000\000\000\000\046\251\066\255\001\253\251\006\041\008\ +\000\000\000\000\099\004\000\000\000\000\137\004\078\252\000\000\ +\066\004\017\005\000\000\146\253\235\012\133\255\000\000\071\006\ +\128\255\220\254\141\255\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\082\255\000\000" + +let yytablesize = 21372 +let yytable = "\188\000\ +\019\002\185\001\188\000\108\000\188\000\188\000\188\000\161\001\ +\248\001\188\000\188\000\188\000\188\000\188\000\109\000\188\000\ +\200\001\127\002\180\001\010\002\162\001\127\003\188\000\002\002\ +\102\000\125\002\188\000\103\000\001\002\188\000\188\000\188\000\ +\193\000\056\000\213\003\104\000\192\000\040\002\009\003\188\000\ +\188\000\216\000\160\001\188\000\188\000\171\001\105\000\209\000\ +\106\000\166\001\107\000\171\004\129\003\112\000\222\003\196\003\ +\136\001\194\001\030\002\223\000\031\002\085\003\127\000\133\000\ +\124\004\170\003\064\001\224\004\140\001\186\001\219\001\014\005\ +\133\004\005\004\125\000\132\000\021\001\159\000\065\001\112\005\ +\056\001\158\005\075\005\162\005\188\000\188\000\188\000\188\000\ +\154\001\188\000\156\001\124\000\163\001\063\000\134\002\063\000\ +\063\000\113\003\051\000\108\000\051\002\201\003\037\002\023\003\ +\124\001\054\001\128\001\129\001\075\001\090\003\109\000\108\000\ +\165\005\090\005\005\002\089\000\143\001\128\005\049\004\040\003\ +\102\000\098\003\109\000\103\000\209\003\224\002\143\001\161\002\ +\089\004\162\002\149\000\104\000\102\000\054\004\073\005\103\000\ +\036\005\054\002\063\000\187\001\062\001\011\005\105\000\104\000\ +\106\000\138\001\107\000\145\001\021\002\112\000\076\001\188\000\ +\188\000\170\001\105\000\219\001\106\000\169\005\107\000\070\001\ +\237\004\112\000\114\005\225\001\230\002\052\002\151\005\055\004\ +\099\005\142\001\182\001\180\005\059\002\186\000\224\001\126\005\ +\165\001\010\000\234\001\243\001\040\003\191\005\234\005\023\004\ +\186\000\220\001\127\000\188\000\153\001\221\001\133\000\103\003\ +\133\000\035\004\186\000\024\003\222\001\201\003\128\002\223\001\ +\152\001\207\001\186\000\243\001\244\001\188\002\142\001\165\005\ +\143\001\103\003\235\001\143\001\050\004\090\003\245\001\200\002\ +\145\001\151\001\168\003\076\001\110\002\063\000\090\004\076\001\ +\114\002\076\001\151\001\014\002\244\001\232\002\037\005\231\001\ +\232\001\104\003\076\005\207\005\138\001\200\002\245\001\022\002\ +\138\001\015\004\173\002\145\001\218\005\035\004\219\001\186\000\ +\087\003\088\003\219\001\104\003\155\005\151\001\197\003\246\001\ +\196\005\040\006\247\001\053\002\118\003\141\001\200\002\231\004\ +\142\001\221\005\070\004\239\001\188\000\188\000\173\002\157\001\ +\200\002\003\004\184\002\006\002\226\000\024\004\208\005\246\001\ +\223\000\163\001\247\001\114\005\115\003\040\005\104\001\036\004\ +\042\005\009\003\188\000\221\001\134\001\204\002\198\003\071\001\ +\189\002\192\002\141\001\193\002\140\004\173\002\204\002\173\002\ +\188\000\182\002\193\000\145\001\168\002\188\000\145\001\145\001\ +\211\002\200\002\104\005\173\002\200\002\151\001\221\001\160\001\ +\188\000\151\001\147\001\212\001\181\002\077\001\160\001\064\001\ +\160\001\239\002\009\003\152\004\016\004\212\001\002\002\171\001\ +\171\001\029\004\030\004\095\004\227\000\165\005\222\005\226\000\ +\119\003\218\002\100\003\223\000\234\001\147\001\175\002\234\001\ +\231\005\234\001\051\000\234\001\141\001\234\001\104\001\034\003\ +\013\006\237\005\140\005\142\005\150\002\068\004\152\002\063\000\ +\153\002\063\000\019\006\089\000\069\002\206\001\046\003\118\003\ +\238\005\170\004\070\002\122\003\051\000\121\004\221\001\134\001\ +\055\002\059\006\221\001\234\001\120\006\234\001\122\002\186\000\ +\047\006\064\002\118\002\119\002\068\002\089\000\123\002\144\001\ +\124\001\214\001\140\004\177\005\063\000\234\001\071\003\227\000\ +\105\005\144\001\228\002\049\002\129\002\147\001\061\006\212\001\ +\147\001\147\001\212\001\049\006\076\001\214\005\140\002\188\000\ +\187\001\017\002\101\002\137\001\018\002\149\003\104\002\100\006\ +\195\005\102\006\220\001\215\005\030\000\220\001\221\001\021\005\ +\127\002\190\000\130\002\170\001\170\001\222\001\051\000\033\004\ +\223\001\225\001\000\006\131\002\146\001\105\001\058\005\170\003\ +\188\000\207\001\110\006\125\003\225\001\207\001\122\004\089\000\ +\220\001\207\001\133\000\207\001\133\000\087\006\133\000\207\001\ +\213\001\225\001\225\001\207\001\132\002\233\004\234\001\146\001\ +\234\001\118\003\213\001\136\001\207\001\218\001\133\002\186\000\ +\114\001\229\002\143\002\144\001\214\001\141\002\144\001\173\000\ +\186\000\225\001\139\005\076\001\215\001\076\001\027\003\076\001\ +\246\005\050\002\220\001\076\006\062\006\234\001\221\001\234\001\ +\010\003\215\002\137\002\216\005\140\002\222\001\137\001\081\002\ +\223\001\150\003\137\001\220\004\240\003\009\003\003\006\157\001\ +\220\001\135\002\207\001\157\001\220\001\129\002\127\002\157\001\ +\081\002\157\001\170\002\034\004\228\005\157\001\188\000\146\001\ +\204\003\108\000\146\001\146\001\207\001\207\001\024\006\207\001\ +\207\001\216\000\157\001\200\001\109\000\030\000\205\003\033\004\ +\136\003\137\003\190\000\130\002\213\001\072\004\102\000\213\001\ +\186\000\103\000\207\001\196\004\131\002\133\002\136\001\053\004\ +\218\001\104\000\160\001\063\000\218\001\061\004\166\005\135\003\ +\143\002\229\004\002\002\141\002\105\000\107\001\106\000\215\001\ +\107\000\031\003\050\003\112\000\221\003\132\002\137\005\216\000\ +\157\001\216\002\186\000\236\001\215\002\209\000\215\002\133\002\ +\137\002\007\003\186\000\216\002\087\004\081\002\171\002\211\003\ +\076\003\078\003\157\001\157\001\117\003\157\001\157\001\135\002\ +\109\001\096\002\041\003\002\002\237\001\206\001\102\005\234\001\ +\220\003\206\001\234\001\241\003\106\003\206\001\206\003\206\001\ +\157\001\191\001\047\002\206\001\076\004\176\004\118\003\206\001\ +\175\003\186\000\175\002\094\004\203\001\203\001\015\003\017\003\ +\206\001\083\003\103\004\082\003\243\001\253\004\032\004\197\004\ +\234\003\227\001\228\001\133\002\193\003\028\003\031\001\236\005\ +\023\003\063\000\123\003\041\005\097\002\142\004\120\001\121\001\ +\127\001\019\004\244\005\020\004\186\000\225\001\252\001\041\003\ +\051\006\251\001\111\001\103\003\088\004\216\002\091\003\092\003\ +\016\002\250\003\215\002\096\002\023\003\096\002\206\001\225\001\ +\244\005\225\001\252\001\225\001\186\000\173\004\192\001\225\001\ +\077\004\169\005\092\006\007\003\186\000\052\006\142\003\170\002\ +\206\001\206\001\133\000\206\001\206\001\187\000\251\003\252\003\ +\236\003\031\000\124\003\170\002\104\004\104\003\008\004\234\001\ +\188\004\035\000\031\000\053\006\009\003\004\004\206\001\077\004\ +\235\003\021\003\035\000\127\001\253\003\051\001\097\002\086\004\ +\097\002\140\003\044\006\076\001\023\003\225\001\081\004\127\003\ +\032\006\002\002\234\001\010\003\180\001\013\000\110\004\191\005\ +\253\004\044\006\069\004\094\006\071\001\187\001\079\004\187\001\ +\180\004\023\003\182\004\184\004\054\006\066\004\025\003\120\004\ +\018\000\185\002\187\001\225\001\222\004\254\003\129\003\074\003\ +\160\001\199\005\153\005\227\004\190\001\228\003\162\003\163\003\ +\198\002\025\003\119\005\024\000\010\003\212\005\207\003\133\003\ +\025\003\002\002\093\003\224\000\189\004\083\004\220\003\225\001\ +\138\001\222\001\154\005\189\001\243\001\185\003\186\000\144\003\ +\168\000\141\003\071\001\009\003\052\004\255\003\025\003\025\003\ +\236\001\155\003\171\002\195\003\023\003\169\000\188\000\077\004\ +\000\004\016\005\025\003\009\003\253\005\244\001\255\005\025\003\ +\171\002\167\005\025\003\171\002\025\003\172\002\047\000\245\001\ +\122\002\237\001\106\003\230\002\200\004\171\002\190\001\230\002\ +\166\000\249\001\063\000\172\002\186\000\195\002\172\002\108\000\ +\231\002\220\001\184\003\122\002\243\005\221\001\224\000\109\003\ +\172\002\122\002\109\000\196\002\222\001\077\002\234\001\223\001\ +\009\003\106\003\094\002\187\001\102\000\025\003\085\002\103\000\ +\246\001\071\001\141\001\247\001\122\002\059\004\187\001\104\000\ +\094\002\048\005\225\001\120\001\121\001\013\004\211\002\253\004\ +\211\002\187\001\105\000\045\005\106\000\225\001\107\000\159\004\ +\075\004\112\000\025\004\222\001\232\002\171\002\223\001\201\005\ +\232\002\171\002\111\003\122\002\122\002\253\004\211\002\186\000\ +\090\006\091\006\164\002\234\001\183\002\190\000\187\001\091\005\ +\172\002\007\002\225\001\143\001\172\002\122\002\122\002\122\002\ +\094\002\098\005\186\000\002\002\241\002\242\002\211\002\094\002\ +\110\004\185\000\012\004\197\002\107\003\201\002\203\002\205\002\ +\122\002\030\000\002\002\234\001\011\003\209\002\186\000\008\002\ +\017\000\031\005\094\002\124\005\185\000\220\001\236\004\166\000\ +\249\001\221\001\191\000\185\000\164\002\164\002\074\005\248\003\ +\222\001\009\004\129\006\223\001\186\000\071\001\106\003\010\003\ +\146\001\178\003\243\002\103\003\141\005\255\002\164\002\190\001\ +\234\001\185\000\234\001\179\003\009\002\010\004\047\005\253\004\ +\014\006\071\001\198\005\051\000\180\001\185\000\190\001\147\001\ +\180\001\253\004\187\001\230\002\180\001\185\000\180\001\185\000\ +\025\003\029\003\180\001\180\001\135\004\136\004\180\001\129\002\ +\043\006\007\003\186\000\186\000\149\005\104\003\015\005\180\001\ +\002\002\007\002\146\004\147\004\048\000\110\004\092\005\051\000\ +\025\003\153\004\125\002\193\004\057\003\190\001\025\003\030\000\ +\222\002\234\001\167\004\106\003\190\000\130\002\023\003\249\003\ +\185\000\030\000\253\004\106\003\091\004\023\003\131\002\008\002\ +\220\001\023\003\025\003\186\000\221\001\234\001\180\001\029\000\ +\186\000\023\003\223\002\222\001\232\002\180\001\223\001\103\003\ +\023\003\095\005\225\001\025\003\025\003\176\005\145\004\132\002\ +\166\000\249\001\025\003\025\003\208\003\025\003\015\003\180\001\ +\180\001\133\002\180\001\180\001\009\002\025\003\023\003\023\003\ +\215\000\052\003\168\004\051\000\253\004\004\003\150\000\234\004\ +\175\000\002\002\023\003\106\006\059\005\180\001\053\003\023\003\ +\129\002\104\003\023\003\161\001\023\003\178\004\186\000\058\003\ +\215\001\183\001\163\005\038\006\159\005\077\002\012\005\214\003\ +\162\001\200\005\243\001\243\004\106\003\025\003\025\003\187\001\ +\030\000\025\003\170\005\216\001\213\005\190\000\130\002\239\003\ +\107\006\059\003\092\004\246\003\054\003\143\004\115\005\131\002\ +\225\001\039\006\225\001\244\001\225\001\023\003\187\001\225\001\ +\158\001\186\000\023\003\106\003\192\005\245\001\051\000\148\003\ +\187\001\200\002\234\001\023\003\234\001\165\001\234\001\144\004\ +\132\002\165\001\028\004\166\000\249\001\220\001\203\001\159\003\ +\200\002\221\001\133\002\165\001\161\001\140\003\010\003\200\002\ +\222\001\193\001\020\005\223\001\165\001\211\002\023\005\211\002\ +\006\005\162\001\002\002\027\005\238\002\146\000\246\001\210\005\ +\211\002\247\001\017\002\211\002\106\003\018\002\200\002\176\001\ +\200\002\106\003\188\003\023\003\038\005\039\005\002\002\160\001\ +\144\002\234\001\200\002\220\003\163\000\044\005\206\002\165\000\ +\190\001\150\000\242\005\165\001\150\000\234\001\150\000\150\000\ +\207\002\225\005\122\005\186\000\145\002\211\002\234\001\119\006\ +\188\000\186\000\234\001\022\005\053\005\131\005\211\002\026\005\ +\001\006\150\000\166\000\249\001\021\003\175\000\175\000\163\001\ +\175\000\054\003\187\000\200\002\187\001\181\001\200\002\198\001\ +\187\001\139\004\175\000\175\000\150\000\010\003\164\001\021\003\ +\186\000\036\006\164\001\150\000\077\002\124\001\021\003\002\002\ +\007\002\164\001\025\006\197\000\210\005\010\003\106\003\146\002\ +\187\001\022\001\175\000\175\000\147\002\164\001\013\002\051\000\ +\050\005\150\000\150\000\103\006\021\003\002\002\109\003\187\001\ +\030\000\186\000\220\003\065\005\051\000\150\000\008\002\029\000\ +\021\003\188\001\029\000\110\003\017\006\150\000\195\001\150\000\ +\021\003\186\000\021\003\104\006\029\000\029\000\042\006\186\000\ +\029\000\106\003\010\003\128\006\164\001\049\005\236\001\023\001\ +\106\003\029\000\029\000\029\000\029\000\024\001\196\000\006\005\ +\187\000\089\001\090\001\009\002\138\005\023\003\122\005\029\000\ +\029\000\111\003\051\000\211\002\004\003\129\002\102\001\237\001\ +\150\000\196\000\146\005\021\003\147\005\186\000\131\005\214\000\ +\196\000\084\004\186\000\029\000\211\002\023\003\029\000\226\001\ +\029\000\029\000\029\000\029\000\158\001\030\000\187\001\095\001\ +\029\000\029\000\190\000\130\002\177\002\131\005\196\000\029\000\ +\006\005\025\002\215\000\103\001\131\002\023\003\211\002\187\001\ +\100\001\124\001\196\000\029\000\234\001\029\000\217\004\029\000\ +\029\000\196\000\196\000\172\004\196\000\025\003\144\005\021\003\ +\186\000\232\005\122\005\029\000\235\005\132\002\029\000\255\002\ +\230\001\146\000\029\000\229\001\023\003\186\000\026\002\133\002\ +\218\004\178\002\021\003\187\004\161\005\025\003\234\001\023\003\ +\047\002\021\003\025\003\025\003\050\006\131\005\138\003\010\006\ +\162\004\164\004\166\004\234\001\025\003\196\000\169\004\220\005\ +\131\005\234\001\025\003\047\002\255\002\006\005\187\001\021\003\ +\021\003\187\001\047\002\047\002\026\003\110\002\006\005\111\002\ +\181\001\011\006\234\001\021\003\023\003\025\003\187\001\135\002\ +\198\001\112\002\234\001\021\003\255\002\021\003\243\001\025\003\ +\047\002\047\002\223\003\077\002\029\006\030\006\224\003\033\006\ +\034\006\011\003\017\004\102\001\047\002\225\003\150\000\187\001\ +\226\003\143\003\241\001\047\002\047\002\150\000\047\002\150\000\ +\252\001\227\003\198\001\150\004\224\005\055\006\150\000\150\000\ +\007\004\150\000\227\005\023\003\023\003\051\000\021\003\015\002\ +\056\006\020\006\023\003\243\001\252\001\150\000\023\003\018\004\ +\130\005\150\000\234\001\234\001\240\005\175\000\175\000\023\003\ +\026\006\234\001\234\001\234\001\234\001\023\003\077\002\047\002\ +\186\000\131\005\211\002\021\006\156\005\234\001\023\003\051\000\ +\146\003\011\003\187\005\187\001\023\003\146\000\175\000\175\000\ +\175\000\023\003\155\001\187\001\234\001\188\005\175\000\009\006\ +\211\002\220\001\073\003\081\006\211\002\221\001\158\001\023\002\ +\211\002\211\002\211\002\211\002\222\001\187\001\187\001\223\001\ +\113\001\157\005\122\005\190\000\122\005\175\000\175\000\211\002\ +\065\006\023\003\175\000\108\006\131\005\215\000\175\000\187\001\ +\023\003\013\002\074\006\006\005\209\005\011\003\214\002\134\003\ +\215\002\187\001\150\000\150\000\005\006\187\001\024\002\118\006\ +\139\001\198\001\216\002\146\000\187\001\187\001\211\002\088\006\ +\000\005\150\000\175\000\110\005\025\003\125\006\126\006\186\000\ +\058\004\148\001\154\001\175\000\027\002\215\002\196\002\196\002\ +\155\001\013\002\097\006\216\002\202\002\196\002\001\005\029\002\ +\186\000\178\002\030\000\014\000\069\006\175\000\002\005\183\001\ +\003\005\178\002\196\002\048\002\112\006\186\000\051\000\077\006\ +\196\002\187\000\015\000\016\000\103\003\004\005\023\003\018\002\ +\146\000\025\003\215\002\013\003\006\005\185\002\048\002\023\000\ +\216\002\023\003\019\003\196\002\196\002\048\002\048\002\069\006\ +\069\006\148\005\058\001\186\000\082\000\095\006\096\006\132\006\ +\175\000\023\003\031\000\051\000\051\000\074\001\164\003\050\005\ +\157\002\011\002\035\000\048\002\048\002\036\002\104\003\147\000\ +\039\000\109\005\089\000\113\006\186\000\030\000\042\000\048\002\ +\181\001\245\004\110\005\135\002\181\001\051\000\048\002\048\002\ +\181\001\048\002\181\001\011\002\123\006\187\000\181\001\181\001\ +\247\004\147\000\181\001\242\003\089\000\083\000\127\006\043\002\ +\252\001\069\006\050\000\181\001\183\000\053\000\041\002\134\006\ +\135\006\150\000\243\003\244\003\150\000\072\002\073\002\074\002\ +\075\002\217\001\135\002\150\000\252\001\150\000\150\000\146\000\ +\166\000\076\002\048\002\161\000\057\002\023\003\183\000\151\003\ +\023\003\023\003\186\000\150\000\218\001\023\003\023\003\065\002\ +\175\000\152\003\181\001\210\001\058\002\150\000\161\000\210\001\ +\127\005\181\001\063\004\187\000\023\003\161\000\023\003\175\000\ +\175\000\210\001\023\003\064\003\245\002\246\002\023\003\023\003\ +\023\003\064\004\210\001\181\001\181\001\077\002\181\001\181\001\ +\065\003\060\002\155\001\161\000\161\000\023\003\155\001\150\000\ +\139\002\150\000\155\001\023\003\155\001\102\001\150\000\161\000\ +\155\001\181\001\025\003\175\000\155\001\071\002\161\000\161\000\ +\140\002\161\000\120\002\150\000\175\000\155\001\175\000\120\002\ +\189\000\054\003\025\003\196\000\023\003\198\000\199\000\200\000\ +\013\002\215\004\201\000\202\000\203\000\204\000\205\000\220\001\ +\206\000\245\002\248\002\221\001\017\002\007\003\186\000\018\002\ +\209\001\126\004\222\001\057\001\209\001\223\001\059\001\060\001\ +\061\001\035\006\161\000\215\000\155\001\159\002\209\001\175\000\ +\066\001\067\001\154\001\155\001\068\001\069\001\154\001\209\001\ +\186\000\160\002\154\001\163\002\154\001\135\002\013\002\103\003\ +\154\001\154\001\164\002\167\001\063\003\155\001\155\001\167\001\ +\155\001\155\001\064\003\093\005\151\002\154\001\150\000\198\004\ +\165\002\011\003\128\000\198\000\190\005\186\000\094\005\065\003\ +\172\002\199\004\167\001\155\001\173\002\132\001\133\001\134\001\ +\135\001\174\002\137\001\180\002\164\002\146\000\198\000\176\001\ +\186\002\104\003\185\002\176\001\082\000\198\000\215\002\082\000\ +\135\002\190\002\176\001\191\002\154\001\176\001\135\002\225\002\ +\215\002\082\000\227\002\154\001\002\003\082\000\176\001\150\000\ +\186\000\071\001\150\000\198\000\198\000\237\002\082\000\082\000\ +\082\000\082\000\011\003\150\000\018\003\154\001\154\001\198\000\ +\154\001\154\001\025\003\025\003\150\000\082\000\198\000\198\000\ +\030\003\198\000\175\000\166\002\167\002\083\000\032\003\043\003\ +\196\001\197\001\035\003\154\001\044\003\176\001\048\003\044\002\ +\082\000\045\002\083\000\082\000\247\002\249\002\083\000\082\000\ +\082\000\069\003\051\003\046\002\045\003\175\000\082\000\083\000\ +\083\000\083\000\083\000\047\003\082\000\115\002\008\003\116\002\ +\049\003\070\003\198\000\219\002\240\001\220\002\083\000\175\001\ +\082\000\117\002\082\000\175\001\082\000\082\000\051\000\221\002\ +\175\000\037\004\175\001\038\004\090\001\175\001\135\002\062\003\ +\082\000\083\000\150\000\082\000\083\000\039\004\094\003\083\000\ +\083\000\083\000\150\000\086\003\175\000\175\000\083\000\083\000\ +\101\003\175\000\175\000\175\000\108\003\083\000\150\000\175\000\ +\096\004\114\003\097\004\135\002\116\003\175\000\135\002\120\003\ +\130\003\083\000\139\003\083\000\098\004\083\000\083\000\094\000\ +\120\002\150\000\007\003\186\000\071\001\175\001\192\001\120\002\ +\145\003\083\000\120\002\153\003\083\000\175\000\095\000\016\000\ +\083\000\222\001\160\003\172\003\120\002\038\002\039\002\173\003\ +\120\002\047\002\186\003\096\000\245\002\177\002\199\003\013\002\ +\054\003\120\002\120\002\120\002\120\002\077\002\212\003\230\003\ +\229\003\231\003\232\003\048\002\233\003\174\000\031\000\237\003\ +\120\002\070\000\014\004\021\004\027\004\051\004\035\000\047\004\ +\060\004\056\002\105\003\067\004\097\000\010\000\063\002\080\004\ +\208\000\082\004\042\000\120\002\178\002\135\002\120\002\085\004\ +\177\002\120\002\120\002\120\002\135\002\102\004\111\004\116\004\ +\120\002\120\002\098\000\105\004\117\004\150\000\219\000\120\002\ +\127\004\125\004\130\004\150\000\132\004\149\004\099\000\014\000\ +\135\002\053\000\131\004\120\002\164\002\120\002\158\004\120\002\ +\120\002\177\004\190\004\195\004\164\002\192\004\015\000\016\000\ +\203\004\164\002\204\004\120\002\250\004\005\005\120\002\175\000\ +\205\004\206\004\120\002\023\000\214\004\150\000\164\002\207\004\ +\164\002\164\002\211\004\212\004\213\004\221\004\175\000\150\000\ +\225\004\226\004\228\004\150\000\241\004\164\002\031\000\013\005\ +\235\004\074\001\029\005\018\005\013\002\101\005\035\000\024\005\ +\043\005\077\005\025\005\117\005\039\000\028\005\021\003\072\005\ +\164\002\046\005\042\000\164\002\116\005\120\005\164\002\164\002\ +\164\002\121\005\123\005\133\005\136\005\079\003\164\002\143\005\ +\169\002\145\005\129\000\121\000\164\002\164\005\059\005\184\001\ +\202\005\013\002\135\002\205\005\150\000\181\005\050\000\182\005\ +\164\002\053\000\183\005\206\005\164\002\164\002\223\005\150\000\ +\230\005\199\001\174\000\174\000\233\005\174\000\248\005\008\006\ +\164\002\194\002\023\006\164\002\175\000\027\006\041\006\174\000\ +\174\000\216\003\058\006\072\002\073\002\074\002\075\002\250\004\ +\135\002\013\002\023\003\254\005\063\006\066\006\012\003\076\002\ +\064\006\067\006\135\002\072\006\247\003\073\006\150\000\174\000\ +\174\000\002\004\098\006\011\002\075\006\023\003\167\005\093\006\ +\116\006\117\006\111\005\111\006\023\003\150\000\121\006\124\006\ +\120\002\150\000\130\006\120\002\131\006\051\000\089\000\008\000\ +\026\004\021\003\150\000\051\000\005\005\120\002\084\002\025\003\ +\023\003\120\002\023\003\077\002\128\000\089\000\178\002\105\002\ +\252\001\102\002\120\002\120\002\120\002\120\002\023\003\104\002\ +\033\003\023\003\023\003\023\003\065\004\175\000\023\003\036\003\ +\023\003\120\002\135\002\135\002\202\002\221\000\107\002\200\002\ +\020\002\070\000\150\000\200\002\070\000\001\000\002\000\003\000\ +\004\000\005\000\006\000\007\000\120\002\005\005\070\000\120\002\ +\201\002\178\002\120\002\120\002\120\002\201\002\150\000\150\000\ +\150\000\120\002\120\002\070\000\203\002\070\000\070\000\206\002\ +\120\002\023\003\207\002\135\002\208\002\204\002\111\005\149\001\ +\048\004\070\000\070\000\016\006\120\002\232\001\120\002\132\005\ +\120\002\120\002\184\001\217\005\101\006\006\006\072\003\192\003\ +\129\004\149\002\226\005\119\004\120\002\070\000\250\004\120\002\ +\070\000\124\002\084\003\120\002\070\000\070\000\150\000\137\004\ +\210\002\206\001\147\003\070\000\025\003\025\003\150\000\134\005\ +\201\004\070\000\005\005\025\003\250\004\141\002\175\000\208\002\ +\242\004\025\003\182\003\005\005\176\002\070\000\150\000\219\005\ +\025\003\070\000\070\000\194\005\247\005\096\005\025\003\000\000\ +\150\000\113\002\175\000\000\000\000\000\070\000\150\000\000\000\ +\070\000\000\000\174\004\175\004\000\000\000\000\000\000\000\000\ +\000\000\025\003\025\003\021\003\000\000\000\000\000\000\000\000\ +\150\000\000\000\000\000\186\004\000\000\000\000\000\000\148\002\ +\000\000\000\000\021\003\021\003\000\000\000\000\000\000\000\000\ +\194\004\000\000\000\000\000\000\150\000\000\000\150\000\021\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\202\004\000\000\000\000\150\000\000\000\000\000\250\004\150\000\ +\000\000\195\000\021\003\175\000\000\000\021\003\000\000\000\000\ +\250\004\000\000\021\003\000\000\000\000\000\000\135\002\184\001\ +\021\003\000\000\174\000\174\000\195\000\000\000\021\003\000\000\ +\000\000\175\000\223\004\195\000\162\000\000\000\173\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\021\003\021\003\ +\000\000\000\000\000\000\174\000\174\000\174\000\000\000\162\000\ +\000\000\195\000\021\003\174\000\000\000\021\003\162\000\000\000\ +\000\000\250\004\217\002\000\000\000\000\195\000\000\000\000\000\ +\005\005\000\000\177\000\000\000\195\000\195\000\194\000\195\000\ +\000\000\010\005\174\000\174\000\162\000\162\000\000\000\174\000\ +\150\000\000\000\017\005\174\000\000\000\194\000\011\002\000\000\ +\162\000\000\000\000\000\150\000\000\000\000\000\199\001\162\000\ +\162\000\000\000\162\000\000\000\000\000\199\001\000\000\000\000\ +\194\000\000\000\000\000\250\004\000\000\000\000\000\000\174\000\ +\195\000\000\000\000\000\150\000\150\000\000\000\000\000\022\004\ +\174\000\150\000\150\000\000\000\000\000\000\000\011\002\000\000\ +\000\000\000\000\111\005\150\000\111\005\232\001\000\000\000\000\ +\232\001\005\005\174\000\162\000\000\000\061\003\194\000\150\000\ +\194\000\194\000\232\001\055\005\000\000\057\005\208\000\000\000\ +\232\001\014\003\000\000\000\000\000\000\000\000\000\000\232\001\ +\150\000\232\001\232\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\150\000\000\000\000\000\150\000\232\001\000\000\ +\000\000\000\000\000\000\150\000\150\000\174\000\000\000\000\000\ +\000\000\000\000\097\005\000\000\000\000\000\000\000\000\100\005\ +\000\000\232\001\000\000\000\000\232\001\000\000\094\002\000\000\ +\232\001\232\001\000\000\000\000\000\000\000\000\000\000\232\001\ +\136\000\000\000\137\000\138\000\030\000\232\001\139\000\000\000\ +\000\000\140\000\141\000\173\002\000\000\000\000\000\000\177\000\ +\177\000\232\001\177\000\000\000\000\000\232\001\232\001\000\000\ +\000\000\000\000\142\000\000\000\177\000\177\000\000\000\135\005\ +\000\000\232\001\143\000\144\000\232\001\000\000\000\000\000\000\ +\194\000\000\000\145\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\177\000\003\002\146\000\147\000\ +\000\000\194\000\000\000\000\000\199\001\174\000\000\000\150\005\ +\000\000\152\005\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\174\000\174\000\173\002\000\000\ +\173\002\173\002\173\002\168\005\000\000\000\000\173\002\178\005\ +\179\005\000\000\000\000\173\002\000\000\000\000\000\000\173\002\ +\173\002\173\002\000\000\000\000\000\000\180\003\184\005\000\000\ +\173\002\173\002\173\002\173\002\000\000\000\000\000\000\000\000\ +\174\000\000\000\173\002\000\000\011\002\000\000\000\000\173\002\ +\000\000\174\000\000\000\174\000\197\005\000\000\173\002\173\002\ +\000\000\000\000\000\000\000\000\000\000\011\002\000\000\000\000\ +\194\000\000\000\173\002\000\000\000\000\173\002\173\002\000\000\ +\173\002\173\002\173\002\000\000\173\002\000\000\000\000\173\002\ +\173\002\000\000\000\000\000\000\000\000\194\000\173\002\000\000\ +\000\000\000\000\215\003\000\000\174\000\000\000\000\000\000\000\ +\000\000\173\002\173\002\000\000\173\002\173\002\173\002\173\002\ +\000\000\000\000\173\002\011\002\000\000\000\000\000\000\245\005\ +\000\000\163\000\173\002\173\002\171\000\173\002\000\000\000\000\ +\249\005\173\002\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\163\000\002\006\000\000\000\000\ +\004\006\000\000\000\000\163\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\194\000\180\000\000\000\ +\194\000\194\000\000\000\000\000\194\000\000\000\194\000\000\000\ +\000\000\163\000\163\000\000\000\062\004\000\000\000\000\000\000\ +\194\000\028\006\000\000\000\000\156\002\163\000\094\002\194\000\ +\094\002\094\002\094\002\000\000\163\000\163\000\094\002\163\000\ +\000\000\000\000\000\000\094\002\184\001\000\000\000\000\094\002\ +\094\002\094\002\000\000\000\000\000\000\194\000\000\000\174\000\ +\094\002\094\002\094\002\094\002\000\000\000\000\000\000\000\000\ +\000\000\194\000\094\002\000\000\000\000\000\000\000\000\094\002\ +\194\000\194\000\000\000\194\000\000\000\000\000\094\002\094\002\ +\163\000\000\000\174\000\000\000\000\000\000\000\000\000\177\000\ +\003\002\000\000\094\002\000\000\000\000\094\002\000\000\000\000\ +\094\002\094\002\094\002\000\000\094\002\000\000\000\000\094\002\ +\094\002\000\000\000\000\086\006\000\000\174\000\094\002\000\000\ +\177\000\177\000\177\000\000\000\194\000\000\000\000\000\000\000\ +\177\000\094\002\094\002\000\000\094\002\094\002\094\002\094\002\ +\000\000\174\000\174\000\000\000\000\000\000\000\174\000\174\000\ +\174\000\000\000\094\002\000\000\174\000\094\002\000\000\003\002\ +\177\000\094\002\174\000\000\000\003\002\000\000\000\000\000\000\ +\177\000\114\006\115\006\000\000\011\002\000\000\000\000\000\000\ +\000\000\122\006\000\000\180\000\180\000\000\000\180\000\000\000\ +\000\000\011\002\174\000\000\000\000\000\000\000\000\000\000\000\ +\180\000\180\000\000\000\000\000\177\000\133\006\011\002\000\000\ +\011\002\011\002\000\000\000\000\011\002\177\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\011\002\000\000\250\001\ +\180\000\180\000\178\000\000\000\000\000\000\000\195\000\177\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\011\002\000\000\000\000\194\000\219\004\195\000\011\002\011\002\ +\011\002\000\000\000\000\000\000\171\000\000\000\011\002\171\000\ +\000\000\000\000\000\000\000\000\011\002\000\000\000\000\000\000\ +\195\000\171\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\011\002\000\000\177\000\000\000\011\002\000\000\171\000\171\000\ +\171\000\171\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\011\002\000\000\000\000\011\002\000\000\171\000\000\000\000\000\ +\000\000\011\002\184\001\000\000\174\000\000\000\195\000\000\000\ +\195\000\195\000\000\000\000\000\156\002\000\000\000\000\156\002\ +\171\000\000\000\000\000\174\000\156\002\000\000\000\000\171\000\ +\171\000\156\002\156\002\000\000\000\000\000\000\171\000\156\002\ +\000\000\011\002\177\002\000\000\171\000\000\000\156\002\179\000\ +\156\002\156\002\000\000\000\000\000\000\000\000\000\000\000\000\ +\171\000\000\000\171\000\067\002\171\000\156\002\000\000\000\000\ +\000\000\000\000\000\000\159\001\078\002\000\000\000\000\000\000\ +\171\000\000\000\177\000\171\000\000\000\000\000\011\002\000\000\ +\156\002\000\000\000\000\156\002\000\000\177\002\156\002\156\002\ +\156\002\177\000\177\000\010\000\000\000\157\001\156\002\178\000\ +\178\000\000\000\178\000\156\002\156\002\000\000\000\000\000\000\ +\000\000\174\000\000\000\000\000\178\000\178\000\000\000\000\000\ +\156\002\000\000\181\003\000\000\156\002\156\002\011\002\000\000\ +\195\000\000\000\199\001\000\000\000\000\177\000\000\000\000\000\ +\156\002\000\000\000\000\156\002\178\000\004\002\177\000\000\000\ +\003\002\195\000\000\000\000\000\136\000\000\000\137\000\138\000\ +\030\000\000\000\139\000\000\000\000\000\140\000\141\000\000\000\ +\000\000\000\000\000\000\023\003\000\000\000\000\000\000\000\000\ +\000\000\184\001\000\000\000\000\000\000\000\000\142\000\000\000\ +\023\003\000\000\000\000\000\000\000\000\010\000\143\000\144\000\ +\000\000\003\002\000\000\000\000\054\000\023\003\145\000\023\003\ +\023\003\000\000\174\000\180\000\180\000\000\000\000\000\000\000\ +\000\000\000\000\146\000\147\000\023\003\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\179\000\179\000\000\000\179\000\ +\000\000\000\000\184\001\199\002\180\000\180\000\180\000\023\003\ +\195\000\179\000\179\000\000\000\180\000\000\000\136\000\023\003\ +\137\000\138\000\030\000\000\000\139\000\023\003\081\001\158\001\ +\141\000\000\000\000\000\023\003\000\000\195\000\000\000\000\000\ +\000\000\179\000\179\000\180\000\180\000\000\000\000\000\000\000\ +\180\000\000\000\000\000\023\003\180\000\220\000\220\000\000\000\ +\000\000\144\000\087\001\088\001\089\001\090\001\078\002\023\003\ +\145\000\011\002\023\003\000\000\000\000\000\000\000\000\000\000\ +\000\000\011\002\000\000\000\000\146\000\147\000\011\002\184\001\ +\180\000\186\000\000\000\174\000\177\000\000\000\092\001\093\001\ +\184\001\038\003\000\000\011\002\000\000\011\002\011\002\000\000\ +\000\000\000\000\095\001\096\001\097\001\098\001\000\000\174\000\ +\195\000\195\000\011\002\180\000\195\000\000\000\195\000\003\002\ +\130\001\131\001\000\000\100\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\011\002\000\000\000\000\ +\011\002\000\000\000\000\011\002\011\002\011\002\000\000\000\000\ +\000\000\000\000\177\000\011\002\000\000\000\000\000\000\000\000\ +\159\001\011\002\186\002\000\000\190\002\000\000\038\003\159\001\ +\000\000\159\001\000\000\000\000\000\000\011\002\177\000\003\002\ +\000\000\011\002\011\002\177\000\177\000\177\000\000\000\000\000\ +\174\000\177\000\184\001\000\000\000\000\011\002\000\000\177\000\ +\011\002\000\000\000\000\000\000\000\000\000\000\000\000\178\000\ +\004\002\000\000\000\000\000\000\000\000\000\000\174\000\000\000\ +\167\001\000\000\000\000\000\000\000\000\000\000\000\000\177\000\ +\138\002\180\003\000\000\000\000\000\000\168\001\000\000\000\000\ +\178\000\178\000\178\000\000\000\081\001\000\000\000\000\000\000\ +\178\000\000\000\000\000\000\000\000\000\048\006\000\000\000\000\ +\136\000\000\000\137\000\138\000\030\000\184\001\139\000\000\000\ +\060\006\169\001\141\000\000\000\054\000\000\000\180\000\004\002\ +\178\000\088\001\089\001\090\001\004\002\000\000\000\000\000\000\ +\178\000\054\000\000\000\000\000\000\000\180\000\180\000\000\000\ +\179\002\180\003\000\000\144\000\000\000\000\000\054\000\000\000\ +\054\000\054\000\145\000\000\000\092\001\093\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\178\000\054\000\146\000\147\000\ +\095\001\096\001\097\001\098\001\000\000\178\000\000\000\000\000\ +\000\000\180\000\000\000\000\000\179\000\179\000\000\000\000\000\ +\054\000\100\001\180\000\054\000\180\000\000\000\184\001\178\000\ +\054\000\003\002\000\000\000\000\000\000\000\000\054\000\000\000\ +\000\000\000\000\000\000\195\000\054\000\179\000\179\000\179\000\ +\003\002\000\000\000\000\000\000\000\000\179\000\179\000\000\000\ +\054\000\000\000\000\000\000\000\054\000\054\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\180\000\000\000\000\000\ +\054\000\000\000\178\000\054\000\179\000\179\000\000\000\000\000\ +\000\000\179\000\000\000\000\000\000\000\179\000\000\000\079\002\ +\080\002\081\002\082\002\083\002\084\002\085\002\086\002\087\002\ +\088\002\089\002\090\002\091\002\092\002\093\002\094\002\095\002\ +\096\002\097\002\098\002\099\002\182\002\102\002\000\000\000\000\ +\103\002\179\000\000\000\105\002\000\000\106\002\000\000\107\002\ +\000\000\108\002\179\000\109\002\000\000\000\000\003\002\000\000\ +\000\000\000\000\000\000\159\001\000\000\000\000\000\000\000\000\ +\126\002\000\000\000\000\000\000\179\000\000\000\000\000\186\002\ +\000\000\190\002\000\000\078\002\000\000\000\000\000\000\142\002\ +\000\000\143\002\000\000\000\000\000\000\000\000\186\002\186\002\ +\190\002\190\002\178\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\158\002\000\000\186\002\000\000\190\002\000\000\000\000\ +\180\000\178\000\178\000\000\000\138\002\000\000\000\000\179\000\ +\000\000\057\000\000\000\194\000\000\000\000\000\186\002\000\000\ +\190\002\186\002\000\000\190\002\000\000\000\000\186\002\000\000\ +\190\002\000\000\000\000\180\000\186\002\000\000\190\002\003\002\ +\000\000\000\000\186\002\213\002\190\002\178\000\000\000\000\000\ +\000\000\000\000\000\000\138\002\000\000\000\000\178\000\000\000\ +\004\002\000\000\186\002\186\002\190\002\190\002\180\000\000\000\ +\000\000\000\000\228\002\000\000\000\000\000\000\186\002\000\000\ +\190\002\186\002\212\002\190\002\213\002\000\000\000\000\000\000\ +\000\000\000\000\180\000\180\000\000\000\000\000\160\004\180\000\ +\180\000\180\000\000\000\000\000\213\002\180\000\213\002\213\002\ +\213\002\004\002\213\002\180\000\000\000\213\002\213\002\179\000\ +\000\000\003\003\136\000\006\003\137\000\138\000\030\000\000\000\ +\139\000\000\000\000\000\140\000\141\000\000\000\179\000\179\000\ +\000\000\020\003\000\000\180\000\000\000\177\001\022\003\213\002\ +\000\000\000\000\000\000\000\000\142\000\000\000\213\002\000\000\ +\003\002\000\000\000\000\000\000\143\000\144\000\000\000\194\000\ +\000\000\229\002\213\002\213\002\145\000\000\000\000\000\000\000\ +\000\000\000\000\179\000\000\000\003\002\000\000\000\000\000\000\ +\146\000\147\000\000\000\179\000\000\000\179\000\000\000\000\000\ +\000\000\000\000\000\000\220\000\220\000\000\000\000\000\000\000\ +\000\000\159\001\000\000\000\000\000\000\000\000\073\004\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\138\002\000\000\ +\000\000\247\000\000\000\000\000\182\002\000\000\000\000\182\002\ +\000\000\000\000\000\000\000\000\178\000\000\000\179\000\089\003\ +\000\000\182\002\000\000\000\000\095\003\096\003\097\003\000\000\ +\000\000\000\000\000\000\000\000\000\000\003\002\182\002\182\002\ +\182\002\182\002\000\000\099\003\000\000\180\000\102\003\004\002\ +\000\000\000\000\000\000\000\000\000\000\182\002\000\000\000\000\ +\000\000\138\002\000\000\003\002\180\000\000\000\000\000\138\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\045\006\000\000\ +\182\002\000\000\178\000\000\000\173\002\000\000\182\002\182\002\ +\182\002\000\000\000\000\000\000\000\000\173\002\182\002\000\000\ +\000\000\057\000\000\000\000\000\182\002\000\000\178\000\004\002\ +\000\000\000\000\000\000\178\000\178\000\178\000\057\000\000\000\ +\182\002\178\000\182\002\000\000\182\002\173\002\000\000\178\000\ +\173\002\000\000\000\000\057\000\000\000\057\000\057\000\000\000\ +\182\002\173\002\011\002\182\002\000\000\000\000\078\006\161\003\ +\000\000\179\000\057\000\000\000\165\003\166\003\167\003\178\000\ +\000\000\014\003\180\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\183\003\000\000\057\000\000\000\000\000\ +\057\000\000\000\000\000\000\000\179\000\057\000\000\000\138\002\ +\000\000\000\000\000\000\057\000\000\000\000\000\000\000\000\000\ +\000\000\057\000\000\000\200\003\000\000\000\000\203\003\000\000\ +\000\000\000\000\000\000\000\000\000\000\057\000\000\000\179\000\ +\000\000\057\000\057\000\210\003\138\002\000\000\000\000\138\002\ +\136\000\000\000\137\000\138\000\030\000\057\000\139\000\000\000\ +\057\000\140\000\141\000\179\000\179\000\232\004\000\000\000\000\ +\179\000\179\000\179\000\000\000\000\000\000\000\179\000\000\000\ +\000\000\000\000\142\000\180\000\179\000\000\000\000\000\000\000\ +\000\000\000\000\143\000\126\003\000\000\000\000\000\000\000\000\ +\032\000\000\000\145\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\004\002\000\000\000\000\179\000\151\004\146\000\147\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\004\002\000\000\000\000\000\000\000\000\000\000\000\000\040\004\ +\000\000\247\000\247\000\247\000\247\000\000\000\138\002\000\000\ +\000\000\247\000\247\000\247\000\000\000\138\002\247\000\247\000\ +\000\000\247\000\247\000\247\000\247\000\247\000\247\000\000\000\ +\000\000\247\000\247\000\247\000\247\000\247\000\247\000\000\000\ +\000\000\138\002\078\004\000\000\000\000\247\000\247\000\000\000\ +\000\000\247\000\247\000\247\000\247\000\000\000\081\000\000\000\ +\000\000\247\000\247\000\000\000\180\000\255\004\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\247\000\247\000\000\000\ +\247\000\000\000\000\000\247\000\247\000\247\000\004\002\247\000\ +\180\000\000\000\247\000\247\000\000\000\000\000\000\000\000\000\ +\000\000\247\000\000\000\247\000\000\000\000\000\179\000\118\004\ +\000\000\000\000\000\000\000\000\247\000\247\000\000\000\247\000\ +\247\000\247\000\247\000\000\000\000\000\179\000\000\000\000\000\ +\247\000\000\000\247\000\000\000\141\004\247\000\000\000\159\001\ +\247\000\000\000\011\002\000\000\247\000\011\002\000\000\148\004\ +\000\000\000\000\011\002\138\002\000\000\000\000\000\000\011\002\ +\000\000\000\000\000\000\195\000\000\000\011\002\023\003\000\000\ +\000\000\180\000\000\000\000\000\011\002\000\000\011\002\011\002\ +\000\000\000\000\000\000\023\003\000\000\000\000\000\000\004\002\ +\000\000\000\000\000\000\011\002\179\004\000\000\181\004\180\000\ +\255\004\138\002\000\000\000\000\000\000\000\000\023\003\000\000\ +\023\003\023\003\023\003\138\002\023\003\000\000\011\002\023\003\ +\023\003\011\002\000\000\179\000\011\002\011\002\011\002\000\000\ +\159\001\000\000\000\000\113\005\011\002\000\000\000\000\000\000\ +\000\000\000\000\011\002\000\000\000\000\000\000\208\004\209\004\ +\210\004\023\003\000\000\000\000\229\000\000\000\011\002\000\000\ +\023\003\000\000\011\002\011\002\000\000\216\004\000\000\000\000\ +\000\000\000\000\000\000\000\000\023\003\023\003\011\002\000\000\ +\032\000\011\002\000\000\032\000\000\000\000\000\000\000\000\000\ +\000\000\230\004\000\000\138\002\138\002\032\000\032\000\000\000\ +\000\000\032\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\004\002\000\000\032\000\032\000\032\000\032\000\000\000\195\000\ +\000\000\238\004\239\004\240\004\179\000\000\000\000\000\000\000\ +\032\000\032\000\000\000\000\000\004\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\138\002\000\000\000\000\113\005\ +\000\000\000\000\000\000\000\000\032\000\000\000\000\000\032\000\ +\000\000\000\000\000\000\032\000\032\000\000\000\000\000\185\005\ +\186\005\032\000\032\000\030\005\000\000\000\000\081\000\255\004\ +\032\000\081\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\081\000\032\000\000\000\032\000\081\000\ +\032\000\032\000\000\000\000\000\000\000\255\004\000\000\000\000\ +\081\000\081\000\081\000\081\000\032\000\004\002\000\000\032\000\ +\000\000\060\002\000\000\032\000\000\000\000\000\052\005\081\000\ +\054\005\000\000\056\005\016\003\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\004\002\000\000\179\000\000\000\000\000\ +\000\000\000\000\081\000\000\000\000\000\081\000\078\005\079\005\ +\080\005\081\000\081\000\000\000\087\005\088\005\089\005\000\000\ +\081\000\179\000\000\000\000\000\000\000\000\000\081\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\081\000\000\000\081\000\103\005\081\000\081\000\ +\000\000\113\005\136\000\000\000\137\000\138\000\030\000\255\004\ +\139\000\118\005\081\000\140\000\141\000\081\000\000\000\000\000\ +\000\000\255\004\125\005\000\000\000\000\000\000\129\005\138\002\ +\000\000\000\000\000\000\000\000\142\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\143\000\144\000\000\000\000\000\ +\000\000\000\000\179\000\000\000\145\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\146\000\147\000\000\000\000\000\229\000\229\000\229\000\229\000\ +\179\000\000\000\255\004\000\000\229\000\229\000\229\000\000\000\ +\000\000\229\000\229\000\229\000\229\000\229\000\229\000\229\000\ +\229\000\229\000\000\000\000\000\229\000\229\000\229\000\229\000\ +\229\000\229\000\000\000\122\002\000\000\000\000\000\000\000\000\ +\229\000\229\000\000\000\000\000\229\000\229\000\229\000\229\000\ +\229\000\229\000\229\000\000\000\229\000\229\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\255\004\000\000\000\000\000\000\ +\229\000\229\000\000\000\229\000\000\000\000\000\229\000\229\000\ +\229\000\000\000\229\000\229\000\229\000\229\000\229\000\000\000\ +\000\000\000\000\000\000\113\005\229\000\113\005\229\000\229\000\ +\229\000\229\000\229\000\000\000\000\000\000\000\000\000\229\000\ +\229\000\000\000\229\000\229\000\229\000\229\000\000\000\000\000\ +\229\000\000\000\000\000\229\000\000\000\229\000\000\000\000\000\ +\229\000\000\000\000\000\229\000\000\000\000\000\000\000\229\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\239\005\000\000\000\000\000\000\014\000\000\000\000\000\000\000\ +\000\000\060\002\000\000\060\002\060\002\060\002\250\005\251\005\ +\252\005\060\002\000\000\015\000\016\000\000\000\060\002\164\002\ +\000\000\000\000\060\002\060\002\060\002\000\000\000\000\000\000\ +\023\000\000\000\007\006\060\002\060\002\060\002\060\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\060\002\000\000\018\006\ +\000\000\060\002\060\002\031\000\000\000\022\006\074\001\000\000\ +\000\000\060\002\060\002\035\000\000\000\000\000\000\000\000\000\ +\000\000\039\000\000\000\000\000\000\000\060\002\037\006\042\000\ +\060\002\000\000\000\000\060\002\060\002\060\002\046\006\060\002\ +\000\000\000\000\060\002\060\002\000\000\000\000\000\000\046\000\ +\000\000\060\002\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\050\000\060\002\060\002\053\000\060\002\ +\060\002\060\002\000\000\000\000\000\000\060\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\060\002\000\000\000\000\ +\060\002\000\000\000\000\000\000\060\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\079\006\080\006\ +\000\000\000\000\000\000\041\001\000\000\082\006\083\006\084\006\ +\085\006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\089\006\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\122\002\122\002\122\002\122\002\000\000\ +\099\006\122\002\122\002\122\002\122\002\122\002\122\002\122\002\ +\122\002\122\002\122\002\122\002\122\002\122\002\122\002\122\002\ +\122\002\122\002\000\000\122\002\122\002\122\002\122\002\122\002\ +\122\002\122\002\122\002\000\000\000\000\000\000\000\000\122\002\ +\122\002\000\000\000\000\122\002\122\002\122\002\122\002\122\002\ +\122\002\122\002\122\002\122\002\122\002\122\002\122\002\000\000\ +\122\002\122\002\122\002\122\002\000\000\000\000\122\002\122\002\ +\122\002\110\002\122\002\122\002\122\002\122\002\122\002\122\002\ +\000\000\122\002\122\002\122\002\122\002\122\002\000\000\122\002\ +\000\000\000\000\000\000\122\002\122\002\122\002\122\002\122\002\ +\122\002\122\002\122\002\000\000\122\002\000\000\122\002\122\002\ +\061\001\122\002\122\002\122\002\122\002\122\002\000\000\122\002\ +\122\002\000\000\122\002\122\002\122\002\122\002\000\000\122\002\ +\122\002\000\000\122\002\000\000\000\000\000\000\122\002\164\002\ +\164\002\164\002\164\002\164\002\000\000\164\002\164\002\164\002\ +\164\002\164\002\164\002\164\002\164\002\164\002\164\002\164\002\ +\164\002\164\002\164\002\164\002\164\002\000\000\000\000\164\002\ +\164\002\164\002\164\002\164\002\164\002\164\002\164\002\000\000\ +\000\000\000\000\000\000\164\002\164\002\000\000\000\000\164\002\ +\164\002\164\002\164\002\164\002\164\002\164\002\164\002\164\002\ +\164\002\164\002\164\002\000\000\164\002\164\002\164\002\164\002\ +\000\000\000\000\164\002\164\002\164\002\000\000\164\002\164\002\ +\164\002\164\002\164\002\164\002\000\000\164\002\164\002\164\002\ +\164\002\164\002\000\000\164\002\000\000\000\000\000\000\164\002\ +\164\002\164\002\164\002\164\002\164\002\164\002\164\002\000\000\ +\164\002\000\000\164\002\164\002\047\001\164\002\164\002\164\002\ +\164\002\164\002\000\000\164\002\164\002\000\000\164\002\164\002\ +\164\002\164\002\000\000\164\002\164\002\000\000\164\002\000\000\ +\000\000\000\000\164\002\041\001\041\001\041\001\041\001\000\000\ +\000\000\041\001\041\001\041\001\041\001\041\001\041\001\041\001\ +\041\001\041\001\041\001\041\001\041\001\041\001\041\001\041\001\ +\041\001\041\001\000\000\041\001\041\001\041\001\041\001\041\001\ +\041\001\041\001\041\001\000\000\000\000\000\000\000\000\041\001\ +\041\001\000\000\000\000\041\001\041\001\041\001\041\001\041\001\ +\041\001\041\001\041\001\041\001\041\001\041\001\041\001\000\000\ +\041\001\041\001\041\001\041\001\000\000\000\000\041\001\041\001\ +\041\001\000\000\041\001\041\001\041\001\041\001\041\001\041\001\ +\000\000\041\001\041\001\041\001\041\001\041\001\000\000\041\001\ +\000\000\000\000\000\000\041\001\041\001\041\001\041\001\041\001\ +\041\001\041\001\041\001\000\000\041\001\000\000\041\001\041\001\ +\045\001\041\001\041\001\041\001\041\001\041\001\000\000\041\001\ +\041\001\000\000\041\001\041\001\041\001\041\001\000\000\041\001\ +\041\001\000\000\041\001\000\000\000\000\000\000\041\001\000\000\ +\061\001\061\001\061\001\061\001\000\000\000\000\061\001\061\001\ +\061\001\061\001\061\001\061\001\061\001\061\001\061\001\061\001\ +\061\001\061\001\061\001\061\001\061\001\061\001\061\001\000\000\ +\061\001\061\001\061\001\061\001\061\001\061\001\061\001\061\001\ +\000\000\000\000\000\000\000\000\061\001\061\001\000\000\000\000\ +\061\001\061\001\061\001\061\001\061\001\061\001\061\001\061\001\ +\061\001\061\001\061\001\061\001\000\000\061\001\061\001\061\001\ +\061\001\000\000\000\000\061\001\061\001\061\001\000\000\061\001\ +\061\001\061\001\061\001\061\001\061\001\000\000\061\001\061\001\ +\061\001\061\001\061\001\000\000\061\001\000\000\000\000\000\000\ +\061\001\061\001\061\001\061\001\061\001\061\001\061\001\061\001\ +\000\000\061\001\000\000\061\001\061\001\053\001\061\001\061\001\ +\061\001\061\001\061\001\000\000\061\001\061\001\000\000\061\001\ +\061\001\061\001\061\001\000\000\061\001\061\001\000\000\061\001\ +\000\000\000\000\000\000\061\001\047\001\047\001\047\001\047\001\ +\000\000\000\000\047\001\047\001\047\001\047\001\047\001\047\001\ +\047\001\047\001\047\001\047\001\047\001\047\001\047\001\047\001\ +\047\001\047\001\047\001\000\000\047\001\047\001\047\001\047\001\ +\047\001\047\001\047\001\047\001\000\000\000\000\000\000\000\000\ +\047\001\047\001\000\000\000\000\047\001\047\001\047\001\047\001\ +\047\001\047\001\047\001\047\001\047\001\047\001\047\001\047\001\ +\000\000\047\001\047\001\047\001\047\001\000\000\000\000\047\001\ +\047\001\047\001\000\000\047\001\047\001\047\001\047\001\047\001\ +\047\001\000\000\047\001\047\001\047\001\047\001\047\001\000\000\ +\047\001\000\000\000\000\000\000\047\001\047\001\047\001\047\001\ +\047\001\047\001\047\001\047\001\000\000\047\001\000\000\047\001\ +\047\001\049\001\047\001\047\001\047\001\047\001\047\001\000\000\ +\047\001\047\001\000\000\047\001\047\001\047\001\047\001\000\000\ +\047\001\047\001\000\000\047\001\000\000\000\000\000\000\047\001\ +\045\001\045\001\045\001\045\001\000\000\000\000\045\001\045\001\ +\045\001\045\001\045\001\045\001\045\001\045\001\045\001\045\001\ +\045\001\045\001\045\001\045\001\045\001\045\001\045\001\000\000\ +\045\001\045\001\045\001\045\001\045\001\045\001\045\001\045\001\ +\000\000\000\000\000\000\000\000\045\001\045\001\000\000\000\000\ +\045\001\045\001\045\001\045\001\045\001\045\001\045\001\045\001\ +\045\001\045\001\045\001\045\001\000\000\045\001\045\001\045\001\ +\045\001\000\000\000\000\045\001\045\001\045\001\000\000\045\001\ +\045\001\045\001\045\001\045\001\045\001\000\000\045\001\045\001\ +\045\001\045\001\045\001\000\000\045\001\000\000\000\000\000\000\ +\045\001\045\001\045\001\045\001\045\001\045\001\045\001\045\001\ +\000\000\045\001\000\000\045\001\045\001\051\001\045\001\045\001\ +\045\001\045\001\045\001\000\000\045\001\045\001\000\000\045\001\ +\045\001\045\001\045\001\000\000\045\001\045\001\000\000\045\001\ +\000\000\000\000\000\000\045\001\000\000\053\001\053\001\053\001\ +\053\001\000\000\000\000\053\001\053\001\053\001\053\001\053\001\ +\053\001\053\001\053\001\053\001\053\001\053\001\053\001\053\001\ +\053\001\053\001\053\001\053\001\000\000\053\001\053\001\053\001\ +\053\001\053\001\053\001\053\001\053\001\000\000\000\000\000\000\ +\000\000\053\001\053\001\000\000\000\000\053\001\053\001\053\001\ +\053\001\053\001\053\001\053\001\053\001\053\001\053\001\053\001\ +\053\001\000\000\053\001\053\001\053\001\053\001\000\000\000\000\ +\053\001\053\001\053\001\000\000\053\001\053\001\053\001\053\001\ +\053\001\053\001\000\000\053\001\053\001\053\001\053\001\053\001\ +\000\000\053\001\000\000\000\000\000\000\053\001\053\001\053\001\ +\053\001\053\001\053\001\053\001\053\001\000\000\053\001\000\000\ +\053\001\053\001\059\001\053\001\053\001\053\001\053\001\053\001\ +\000\000\053\001\053\001\000\000\053\001\053\001\053\001\053\001\ +\000\000\053\001\053\001\000\000\053\001\000\000\000\000\000\000\ +\053\001\049\001\049\001\049\001\049\001\000\000\000\000\049\001\ +\049\001\049\001\049\001\049\001\049\001\049\001\049\001\049\001\ +\049\001\049\001\049\001\049\001\049\001\049\001\049\001\049\001\ +\000\000\049\001\049\001\049\001\049\001\049\001\049\001\049\001\ +\049\001\000\000\000\000\000\000\000\000\049\001\049\001\000\000\ +\000\000\049\001\049\001\049\001\049\001\049\001\049\001\049\001\ +\049\001\049\001\049\001\049\001\049\001\000\000\049\001\049\001\ +\049\001\049\001\000\000\000\000\049\001\049\001\049\001\000\000\ +\049\001\049\001\049\001\049\001\049\001\049\001\000\000\049\001\ +\049\001\049\001\049\001\049\001\000\000\049\001\000\000\000\000\ +\000\000\049\001\049\001\049\001\049\001\049\001\049\001\049\001\ +\049\001\000\000\049\001\000\000\049\001\049\001\055\001\049\001\ +\049\001\049\001\049\001\049\001\000\000\049\001\049\001\000\000\ +\049\001\049\001\049\001\049\001\000\000\049\001\049\001\000\000\ +\049\001\000\000\000\000\000\000\049\001\051\001\051\001\051\001\ +\051\001\000\000\000\000\051\001\051\001\051\001\051\001\051\001\ +\051\001\051\001\051\001\051\001\051\001\051\001\051\001\051\001\ +\051\001\051\001\051\001\051\001\000\000\051\001\051\001\051\001\ +\051\001\051\001\051\001\051\001\051\001\000\000\000\000\000\000\ +\000\000\051\001\051\001\000\000\000\000\051\001\051\001\051\001\ +\051\001\051\001\051\001\051\001\051\001\051\001\051\001\051\001\ +\051\001\000\000\051\001\051\001\051\001\051\001\000\000\000\000\ +\051\001\051\001\051\001\000\000\051\001\051\001\051\001\051\001\ +\051\001\051\001\000\000\051\001\051\001\051\001\051\001\051\001\ +\000\000\051\001\000\000\000\000\000\000\051\001\051\001\051\001\ +\051\001\051\001\051\001\051\001\051\001\000\000\051\001\000\000\ +\051\001\051\001\057\001\051\001\051\001\051\001\051\001\051\001\ +\000\000\051\001\051\001\000\000\051\001\051\001\051\001\051\001\ +\000\000\051\001\051\001\000\000\051\001\000\000\000\000\000\000\ +\051\001\000\000\059\001\059\001\059\001\059\001\000\000\000\000\ +\059\001\059\001\059\001\059\001\059\001\059\001\059\001\059\001\ +\059\001\059\001\059\001\059\001\059\001\059\001\059\001\059\001\ +\059\001\000\000\059\001\059\001\059\001\059\001\059\001\059\001\ +\059\001\059\001\000\000\000\000\000\000\000\000\059\001\059\001\ +\000\000\000\000\059\001\059\001\059\001\059\001\059\001\059\001\ +\059\001\059\001\059\001\059\001\059\001\059\001\000\000\059\001\ +\059\001\059\001\059\001\000\000\000\000\059\001\059\001\059\001\ +\000\000\059\001\059\001\059\001\059\001\059\001\059\001\000\000\ +\059\001\059\001\059\001\059\001\059\001\000\000\059\001\000\000\ +\000\000\000\000\059\001\059\001\059\001\059\001\059\001\059\001\ +\059\001\059\001\000\000\059\001\000\000\059\001\059\001\088\001\ +\059\001\059\001\059\001\059\001\059\001\000\000\059\001\059\001\ +\000\000\059\001\059\001\059\001\059\001\000\000\059\001\059\001\ +\000\000\059\001\000\000\000\000\000\000\059\001\055\001\055\001\ +\055\001\055\001\000\000\000\000\055\001\055\001\055\001\055\001\ +\055\001\055\001\055\001\055\001\055\001\055\001\055\001\055\001\ +\055\001\055\001\055\001\055\001\055\001\000\000\055\001\055\001\ +\055\001\055\001\055\001\055\001\055\001\055\001\000\000\000\000\ +\000\000\000\000\055\001\055\001\000\000\000\000\055\001\055\001\ +\055\001\055\001\055\001\055\001\055\001\055\001\055\001\055\001\ +\055\001\055\001\000\000\055\001\055\001\055\001\055\001\000\000\ +\000\000\055\001\055\001\055\001\000\000\055\001\055\001\055\001\ +\055\001\055\001\055\001\000\000\055\001\055\001\055\001\055\001\ +\055\001\000\000\055\001\000\000\000\000\000\000\055\001\055\001\ +\055\001\055\001\055\001\055\001\055\001\055\001\000\000\055\001\ +\000\000\055\001\055\001\097\001\055\001\055\001\055\001\055\001\ +\055\001\000\000\055\001\055\001\000\000\055\001\055\001\055\001\ +\055\001\000\000\055\001\055\001\000\000\055\001\000\000\000\000\ +\000\000\055\001\057\001\057\001\057\001\057\001\000\000\000\000\ +\057\001\057\001\057\001\057\001\057\001\057\001\057\001\057\001\ +\057\001\057\001\057\001\057\001\057\001\057\001\057\001\057\001\ +\057\001\000\000\057\001\057\001\057\001\057\001\057\001\057\001\ +\057\001\057\001\000\000\000\000\000\000\000\000\057\001\057\001\ +\000\000\000\000\057\001\057\001\057\001\057\001\057\001\057\001\ +\057\001\057\001\057\001\057\001\057\001\057\001\000\000\057\001\ +\057\001\057\001\057\001\000\000\000\000\057\001\057\001\057\001\ +\000\000\057\001\057\001\057\001\057\001\057\001\057\001\000\000\ +\057\001\057\001\057\001\057\001\057\001\000\000\057\001\000\000\ +\000\000\000\000\057\001\057\001\057\001\057\001\057\001\057\001\ +\057\001\057\001\000\000\057\001\000\000\057\001\057\001\099\001\ +\057\001\057\001\057\001\057\001\057\001\000\000\057\001\057\001\ +\000\000\057\001\057\001\057\001\057\001\000\000\057\001\057\001\ +\000\000\057\001\000\000\000\000\000\000\057\001\000\000\088\001\ +\088\001\088\001\088\001\088\001\000\000\088\001\088\001\088\001\ +\088\001\088\001\088\001\088\001\088\001\088\001\088\001\088\001\ +\088\001\088\001\088\001\088\001\088\001\000\000\000\000\088\001\ +\088\001\088\001\088\001\088\001\088\001\088\001\088\001\000\000\ +\000\000\000\000\000\000\088\001\088\001\000\000\000\000\088\001\ +\088\001\088\001\088\001\088\001\088\001\088\001\000\000\088\001\ +\088\001\088\001\088\001\000\000\088\001\088\001\088\001\088\001\ +\000\000\000\000\088\001\088\001\088\001\000\000\088\001\088\001\ +\088\001\088\001\088\001\088\001\000\000\088\001\088\001\088\001\ +\088\001\088\001\000\000\088\001\000\000\000\000\000\000\088\001\ +\088\001\088\001\088\001\088\001\088\001\088\001\088\001\000\000\ +\088\001\000\000\088\001\088\001\102\001\088\001\088\001\088\001\ +\088\001\088\001\000\000\088\001\088\001\000\000\088\001\088\001\ +\088\001\088\001\000\000\088\001\088\001\000\000\088\001\000\000\ +\000\000\000\000\088\001\097\001\097\001\097\001\097\001\097\001\ +\000\000\097\001\097\001\097\001\097\001\097\001\097\001\097\001\ +\097\001\097\001\097\001\097\001\097\001\097\001\097\001\097\001\ +\097\001\000\000\000\000\097\001\097\001\097\001\097\001\097\001\ +\097\001\097\001\097\001\000\000\000\000\000\000\000\000\097\001\ +\097\001\000\000\000\000\097\001\097\001\097\001\097\001\097\001\ +\097\001\097\001\000\000\097\001\097\001\097\001\097\001\000\000\ +\097\001\097\001\097\001\097\001\000\000\000\000\097\001\097\001\ +\097\001\000\000\097\001\097\001\097\001\097\001\097\001\097\001\ +\000\000\097\001\097\001\097\001\097\001\097\001\000\000\097\001\ +\000\000\000\000\000\000\097\001\097\001\097\001\097\001\097\001\ +\097\001\097\001\097\001\000\000\097\001\000\000\097\001\097\001\ +\033\001\097\001\097\001\097\001\000\000\000\000\000\000\097\001\ +\097\001\000\000\097\001\097\001\097\001\097\001\000\000\097\001\ +\097\001\000\000\097\001\000\000\000\000\000\000\097\001\099\001\ +\099\001\099\001\099\001\099\001\000\000\099\001\099\001\099\001\ +\099\001\099\001\099\001\099\001\099\001\099\001\099\001\099\001\ +\099\001\099\001\099\001\099\001\099\001\000\000\000\000\099\001\ +\099\001\099\001\099\001\099\001\099\001\099\001\099\001\000\000\ +\000\000\000\000\000\000\099\001\099\001\000\000\000\000\099\001\ +\099\001\099\001\099\001\099\001\099\001\099\001\000\000\099\001\ +\099\001\099\001\099\001\000\000\099\001\099\001\099\001\099\001\ +\000\000\000\000\099\001\099\001\099\001\000\000\099\001\099\001\ +\099\001\099\001\099\001\099\001\000\000\099\001\099\001\099\001\ +\099\001\099\001\000\000\099\001\000\000\000\000\000\000\099\001\ +\099\001\099\001\099\001\099\001\099\001\099\001\099\001\000\000\ +\099\001\000\000\099\001\099\001\034\001\099\001\099\001\099\001\ +\000\000\000\000\000\000\099\001\099\001\000\000\099\001\099\001\ +\099\001\099\001\000\000\099\001\099\001\000\000\099\001\000\000\ +\000\000\000\000\099\001\000\000\102\001\102\001\102\001\102\001\ +\102\001\000\000\102\001\102\001\102\001\102\001\102\001\102\001\ +\102\001\102\001\102\001\102\001\102\001\102\001\102\001\102\001\ +\102\001\102\001\000\000\000\000\102\001\102\001\102\001\102\001\ +\102\001\102\001\102\001\102\001\000\000\000\000\000\000\000\000\ +\102\001\102\001\000\000\000\000\102\001\102\001\102\001\102\001\ +\102\001\102\001\102\001\000\000\102\001\102\001\102\001\102\001\ +\000\000\102\001\102\001\102\001\102\001\000\000\000\000\102\001\ +\102\001\102\001\000\000\102\001\102\001\102\001\102\001\102\001\ +\102\001\000\000\102\001\102\001\102\001\102\001\102\001\000\000\ +\102\001\000\000\000\000\000\000\102\001\102\001\102\001\102\001\ +\102\001\102\001\102\001\102\001\000\000\102\001\000\000\102\001\ +\102\001\228\000\102\001\102\001\102\001\000\000\000\000\000\000\ +\102\001\102\001\000\000\102\001\102\001\102\001\102\001\000\000\ +\102\001\102\001\000\000\102\001\000\000\000\000\000\000\102\001\ +\033\001\033\001\033\001\033\001\000\000\000\000\000\000\000\000\ +\033\001\033\001\033\001\000\000\000\000\033\001\033\001\033\001\ +\033\001\033\001\033\001\033\001\033\001\033\001\033\001\000\000\ +\033\001\033\001\033\001\033\001\033\001\033\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\033\001\033\001\000\000\000\000\ +\033\001\033\001\033\001\033\001\033\001\033\001\033\001\033\001\ +\033\001\033\001\000\000\033\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\033\001\033\001\000\000\033\001\ +\000\000\000\000\033\001\033\001\033\001\000\000\033\001\033\001\ +\033\001\033\001\033\001\000\000\000\000\000\000\000\000\000\000\ +\033\001\033\001\033\001\033\001\033\001\033\001\033\001\000\000\ +\000\000\033\001\000\000\033\001\033\001\240\000\033\001\033\001\ +\033\001\033\001\033\001\000\000\033\001\000\000\000\000\033\001\ +\033\001\033\001\000\000\000\000\033\001\000\000\000\000\033\001\ +\000\000\000\000\000\000\033\001\034\001\034\001\034\001\034\001\ +\000\000\000\000\000\000\000\000\034\001\034\001\034\001\000\000\ +\000\000\034\001\034\001\034\001\034\001\034\001\034\001\034\001\ +\034\001\034\001\034\001\000\000\034\001\034\001\034\001\034\001\ +\034\001\034\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\034\001\034\001\000\000\000\000\034\001\034\001\034\001\034\001\ +\034\001\034\001\034\001\034\001\034\001\034\001\000\000\034\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\034\001\034\001\000\000\034\001\000\000\000\000\034\001\034\001\ +\034\001\000\000\034\001\034\001\034\001\034\001\034\001\000\000\ +\000\000\000\000\000\000\000\000\034\001\034\001\034\001\034\001\ +\034\001\034\001\034\001\000\000\000\000\034\001\000\000\034\001\ +\034\001\241\000\034\001\034\001\034\001\034\001\034\001\000\000\ +\034\001\000\000\000\000\034\001\034\001\034\001\000\000\000\000\ +\034\001\000\000\000\000\034\001\000\000\000\000\000\000\034\001\ +\000\000\228\000\228\000\228\000\228\000\000\000\000\000\000\000\ +\000\000\228\000\228\000\228\000\000\000\000\000\228\000\228\000\ +\228\000\228\000\228\000\228\000\228\000\228\000\228\000\000\000\ +\000\000\228\000\228\000\228\000\228\000\228\000\228\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\228\000\228\000\000\000\ +\000\000\228\000\228\000\228\000\228\000\228\000\228\000\228\000\ +\000\000\228\000\228\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\228\000\228\000\000\000\ +\228\000\000\000\000\000\228\000\228\000\228\000\000\000\228\000\ +\228\000\228\000\228\000\228\000\000\000\000\000\000\000\000\000\ +\000\000\228\000\000\000\228\000\228\000\228\000\228\000\228\000\ +\000\000\000\000\000\000\000\000\228\000\228\000\242\000\228\000\ +\228\000\228\000\000\000\000\000\000\000\228\000\000\000\000\000\ +\228\000\000\000\228\000\000\000\000\000\228\000\000\000\000\000\ +\228\000\000\000\000\000\000\000\228\000\240\000\240\000\240\000\ +\240\000\000\000\000\000\000\000\000\000\240\000\240\000\240\000\ +\000\000\000\000\240\000\240\000\240\000\240\000\240\000\000\000\ +\240\000\240\000\240\000\000\000\000\000\240\000\240\000\240\000\ +\240\000\240\000\240\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\240\000\240\000\000\000\000\000\240\000\240\000\240\000\ +\240\000\240\000\240\000\240\000\000\000\240\000\240\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\240\000\240\000\000\000\240\000\000\000\000\000\240\000\ +\240\000\240\000\000\000\240\000\240\000\240\000\240\000\240\000\ +\000\000\000\000\000\000\000\000\000\000\240\000\000\000\240\000\ +\240\000\240\000\240\000\240\000\000\000\000\000\000\000\000\000\ +\240\000\240\000\025\001\240\000\240\000\240\000\240\000\000\000\ +\000\000\240\000\000\000\000\000\240\000\000\000\240\000\000\000\ +\000\000\240\000\000\000\000\000\240\000\000\000\000\000\000\000\ +\240\000\241\000\241\000\241\000\241\000\000\000\000\000\000\000\ +\000\000\241\000\241\000\241\000\000\000\000\000\241\000\241\000\ +\241\000\241\000\241\000\241\000\241\000\241\000\241\000\000\000\ +\000\000\241\000\241\000\241\000\241\000\241\000\241\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\241\000\241\000\000\000\ +\000\000\241\000\241\000\241\000\241\000\241\000\241\000\241\000\ +\000\000\241\000\241\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\241\000\241\000\000\000\ +\241\000\000\000\000\000\241\000\241\000\241\000\000\000\241\000\ +\241\000\241\000\241\000\241\000\000\000\000\000\000\000\000\000\ +\000\000\241\000\000\000\241\000\241\000\241\000\241\000\241\000\ +\000\000\000\000\000\000\000\000\241\000\241\000\026\001\241\000\ +\241\000\241\000\000\000\000\000\000\000\241\000\000\000\000\000\ +\241\000\000\000\241\000\000\000\000\000\241\000\000\000\000\000\ +\241\000\000\000\000\000\000\000\241\000\000\000\242\000\242\000\ +\242\000\242\000\000\000\000\000\000\000\000\000\242\000\242\000\ +\242\000\000\000\000\000\242\000\242\000\242\000\242\000\242\000\ +\242\000\242\000\242\000\242\000\000\000\000\000\242\000\242\000\ +\242\000\242\000\242\000\242\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\242\000\242\000\000\000\000\000\242\000\242\000\ +\242\000\242\000\242\000\242\000\242\000\000\000\242\000\242\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\242\000\242\000\000\000\242\000\000\000\000\000\ +\242\000\242\000\242\000\000\000\242\000\242\000\242\000\242\000\ +\242\000\000\000\000\000\000\000\000\000\000\000\242\000\000\000\ +\242\000\242\000\242\000\242\000\242\000\000\000\000\000\000\000\ +\000\000\242\000\242\000\251\000\242\000\242\000\242\000\000\000\ +\000\000\000\000\242\000\000\000\000\000\242\000\000\000\242\000\ +\000\000\000\000\242\000\000\000\000\000\242\000\000\000\000\000\ +\000\000\242\000\025\001\025\001\025\001\025\001\000\000\000\000\ +\000\000\000\000\025\001\025\001\025\001\000\000\000\000\025\001\ +\025\001\025\001\025\001\025\001\025\001\025\001\025\001\025\001\ +\000\000\000\000\025\001\025\001\025\001\025\001\025\001\025\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\025\001\025\001\ +\000\000\000\000\025\001\025\001\025\001\025\001\025\001\025\001\ +\025\001\000\000\025\001\025\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\025\001\025\001\ +\000\000\025\001\000\000\000\000\025\001\025\001\025\001\000\000\ +\025\001\025\001\025\001\025\001\025\001\000\000\000\000\000\000\ +\000\000\000\000\025\001\000\000\025\001\025\001\025\001\025\001\ +\025\001\000\000\000\000\000\000\000\000\025\001\025\001\252\000\ +\025\001\025\001\025\001\000\000\000\000\000\000\025\001\000\000\ +\000\000\025\001\000\000\025\001\000\000\000\000\025\001\000\000\ +\000\000\025\001\000\000\000\000\000\000\025\001\026\001\026\001\ +\026\001\026\001\000\000\000\000\000\000\000\000\026\001\026\001\ +\026\001\000\000\000\000\026\001\026\001\026\001\026\001\026\001\ +\026\001\026\001\026\001\026\001\000\000\000\000\026\001\026\001\ +\026\001\026\001\026\001\026\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\026\001\026\001\000\000\000\000\026\001\026\001\ +\026\001\026\001\026\001\026\001\026\001\000\000\026\001\026\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\026\001\026\001\000\000\026\001\000\000\000\000\ +\026\001\026\001\026\001\000\000\026\001\026\001\026\001\026\001\ +\026\001\000\000\000\000\000\000\000\000\000\000\026\001\000\000\ +\026\001\026\001\026\001\026\001\026\001\000\000\000\000\000\000\ +\000\000\026\001\026\001\003\001\026\001\026\001\026\001\000\000\ +\000\000\000\000\026\001\000\000\000\000\026\001\000\000\026\001\ +\000\000\000\000\026\001\000\000\000\000\026\001\000\000\000\000\ +\000\000\026\001\000\000\251\000\251\000\251\000\251\000\000\000\ +\000\000\000\000\000\000\251\000\251\000\251\000\000\000\000\000\ +\251\000\251\000\251\000\251\000\251\000\251\000\251\000\251\000\ +\251\000\000\000\000\000\251\000\251\000\251\000\251\000\251\000\ +\251\000\000\000\000\000\000\000\000\000\000\000\000\000\251\000\ +\251\000\000\000\000\000\251\000\251\000\251\000\251\000\251\000\ +\251\000\000\000\000\000\251\000\251\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\251\000\ +\251\000\000\000\251\000\000\000\000\000\251\000\251\000\251\000\ +\000\000\251\000\251\000\251\000\251\000\251\000\000\000\000\000\ +\000\000\000\000\000\000\251\000\000\000\251\000\251\000\251\000\ +\251\000\251\000\000\000\000\000\000\000\000\000\251\000\251\000\ +\002\001\251\000\251\000\251\000\251\000\000\000\000\000\251\000\ +\000\000\000\000\251\000\000\000\251\000\000\000\000\000\251\000\ +\000\000\000\000\251\000\000\000\000\000\000\000\251\000\252\000\ +\252\000\252\000\252\000\000\000\000\000\000\000\000\000\252\000\ +\252\000\252\000\000\000\000\000\252\000\252\000\252\000\252\000\ +\252\000\252\000\252\000\252\000\252\000\000\000\000\000\252\000\ +\252\000\252\000\252\000\252\000\252\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\252\000\252\000\000\000\000\000\252\000\ +\252\000\252\000\252\000\252\000\252\000\000\000\000\000\252\000\ +\252\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\252\000\252\000\000\000\252\000\000\000\ +\000\000\252\000\252\000\252\000\000\000\252\000\252\000\252\000\ +\252\000\252\000\000\000\000\000\000\000\000\000\000\000\252\000\ +\000\000\252\000\252\000\252\000\252\000\252\000\000\000\000\000\ +\000\000\000\000\252\000\252\000\234\000\252\000\252\000\252\000\ +\252\000\000\000\000\000\252\000\000\000\000\000\252\000\000\000\ +\252\000\000\000\000\000\252\000\000\000\000\000\252\000\000\000\ +\000\000\000\000\252\000\003\001\003\001\003\001\003\001\000\000\ +\000\000\000\000\000\000\003\001\003\001\003\001\000\000\000\000\ +\003\001\003\001\003\001\003\001\003\001\003\001\003\001\003\001\ +\003\001\000\000\000\000\003\001\003\001\003\001\003\001\003\001\ +\003\001\000\000\000\000\000\000\000\000\000\000\000\000\003\001\ +\003\001\000\000\000\000\003\001\003\001\003\001\003\001\003\001\ +\003\001\000\000\000\000\003\001\003\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\003\001\ +\003\001\000\000\003\001\000\000\000\000\003\001\003\001\003\001\ +\000\000\003\001\003\001\003\001\003\001\003\001\000\000\000\000\ +\000\000\000\000\000\000\003\001\000\000\003\001\003\001\003\001\ +\003\001\003\001\000\000\000\000\000\000\000\000\003\001\003\001\ +\237\000\003\001\003\001\003\001\003\001\000\000\000\000\003\001\ +\000\000\000\000\003\001\000\000\003\001\000\000\000\000\003\001\ +\000\000\000\000\003\001\000\000\000\000\000\000\003\001\000\000\ +\002\001\002\001\002\001\002\001\000\000\000\000\000\000\000\000\ +\002\001\002\001\002\001\000\000\000\000\002\001\002\001\002\001\ +\002\001\002\001\002\001\002\001\002\001\002\001\000\000\000\000\ +\002\001\002\001\002\001\002\001\002\001\002\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\002\001\002\001\000\000\000\000\ +\002\001\002\001\002\001\002\001\002\001\002\001\000\000\000\000\ +\002\001\002\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\002\001\002\001\000\000\002\001\ +\000\000\000\000\002\001\002\001\002\001\000\000\002\001\002\001\ +\002\001\002\001\002\001\000\000\000\000\000\000\000\000\000\000\ +\002\001\000\000\002\001\002\001\002\001\002\001\002\001\000\000\ +\000\000\000\000\000\000\002\001\002\001\238\000\002\001\002\001\ +\002\001\002\001\000\000\000\000\002\001\000\000\000\000\002\001\ +\000\000\002\001\000\000\000\000\002\001\000\000\000\000\002\001\ +\000\000\000\000\000\000\002\001\234\000\234\000\234\000\234\000\ +\000\000\000\000\000\000\000\000\000\000\234\000\234\000\000\000\ +\000\000\234\000\234\000\234\000\234\000\234\000\234\000\234\000\ +\234\000\234\000\000\000\000\000\234\000\234\000\234\000\234\000\ +\234\000\234\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\234\000\234\000\000\000\000\000\234\000\234\000\234\000\234\000\ +\234\000\234\000\234\000\000\000\234\000\234\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\234\000\234\000\000\000\234\000\000\000\000\000\234\000\234\000\ +\234\000\000\000\234\000\234\000\234\000\234\000\234\000\000\000\ +\000\000\000\000\000\000\000\000\234\000\000\000\234\000\234\000\ +\234\000\234\000\234\000\000\000\000\000\000\000\000\000\234\000\ +\234\000\250\000\234\000\234\000\234\000\234\000\000\000\000\000\ +\234\000\000\000\000\000\234\000\000\000\234\000\000\000\000\000\ +\234\000\000\000\000\000\234\000\000\000\000\000\000\000\234\000\ +\237\000\237\000\237\000\237\000\000\000\000\000\000\000\000\000\ +\000\000\237\000\237\000\000\000\000\000\237\000\237\000\237\000\ +\237\000\237\000\237\000\237\000\237\000\237\000\000\000\000\000\ +\237\000\237\000\237\000\237\000\237\000\237\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\237\000\237\000\000\000\000\000\ +\237\000\237\000\237\000\237\000\237\000\237\000\237\000\000\000\ +\237\000\237\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\237\000\237\000\000\000\237\000\ +\000\000\000\000\237\000\237\000\237\000\000\000\237\000\237\000\ +\237\000\237\000\237\000\000\000\000\000\000\000\000\000\000\000\ +\237\000\000\000\237\000\237\000\237\000\237\000\237\000\000\000\ +\000\000\000\000\000\000\237\000\237\000\000\001\237\000\237\000\ +\237\000\237\000\000\000\000\000\237\000\000\000\000\000\237\000\ +\000\000\237\000\000\000\000\000\237\000\000\000\000\000\237\000\ +\000\000\000\000\000\000\237\000\000\000\238\000\238\000\238\000\ +\238\000\000\000\000\000\000\000\000\000\000\000\238\000\238\000\ +\000\000\000\000\238\000\238\000\238\000\238\000\238\000\238\000\ +\238\000\238\000\238\000\000\000\000\000\238\000\238\000\238\000\ +\238\000\238\000\238\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\238\000\238\000\000\000\000\000\238\000\238\000\238\000\ +\238\000\238\000\238\000\238\000\000\000\238\000\238\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\238\000\238\000\000\000\238\000\000\000\000\000\238\000\ +\238\000\238\000\000\000\238\000\238\000\238\000\238\000\238\000\ +\000\000\000\000\000\000\000\000\000\000\238\000\000\000\238\000\ +\238\000\238\000\238\000\238\000\000\000\000\000\000\000\000\000\ +\238\000\238\000\001\001\238\000\238\000\238\000\238\000\000\000\ +\000\000\238\000\000\000\000\000\238\000\000\000\238\000\000\000\ +\000\000\238\000\000\000\000\000\238\000\000\000\000\000\000\000\ +\238\000\250\000\250\000\250\000\250\000\000\000\000\000\000\000\ +\000\000\250\000\250\000\250\000\000\000\000\000\250\000\250\000\ +\250\000\250\000\250\000\250\000\250\000\250\000\250\000\000\000\ +\000\000\250\000\250\000\250\000\250\000\250\000\250\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\250\000\250\000\000\000\ +\000\000\250\000\250\000\250\000\250\000\250\000\000\000\000\000\ +\000\000\250\000\250\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\250\000\250\000\000\000\ +\250\000\000\000\000\000\250\000\250\000\250\000\000\000\250\000\ +\250\000\250\000\250\000\250\000\000\000\000\000\000\000\000\000\ +\000\000\250\000\000\000\250\000\000\000\250\000\250\000\250\000\ +\000\000\000\000\000\000\000\000\250\000\250\000\253\000\250\000\ +\250\000\250\000\250\000\000\000\000\000\000\000\000\000\000\000\ +\250\000\000\000\250\000\000\000\000\000\250\000\000\000\000\000\ +\250\000\000\000\000\000\000\000\250\000\000\001\000\001\000\001\ +\000\001\000\000\000\000\000\000\000\000\000\001\000\001\000\001\ +\000\000\000\000\000\001\000\001\000\001\000\001\000\001\000\001\ +\000\001\000\001\000\001\000\000\000\000\000\001\000\001\000\001\ +\000\001\000\001\000\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\001\000\001\000\000\000\000\000\001\000\001\000\001\ +\000\001\000\001\000\000\000\000\000\000\000\001\000\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\001\000\001\000\000\000\001\000\000\000\000\000\001\ +\000\001\000\001\000\000\000\001\000\001\000\001\000\001\000\001\ +\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\001\ +\000\000\000\001\000\001\000\001\000\000\000\000\000\000\000\000\ +\000\001\000\001\254\000\000\001\000\001\000\001\000\001\000\000\ +\000\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\ +\000\000\000\001\000\000\000\000\000\001\000\000\000\000\000\000\ +\000\001\000\000\001\001\001\001\001\001\001\001\000\000\000\000\ +\000\000\000\000\001\001\001\001\001\001\000\000\000\000\001\001\ +\001\001\001\001\001\001\001\001\001\001\001\001\001\001\001\001\ +\000\000\000\000\001\001\001\001\001\001\001\001\001\001\001\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\001\001\001\001\ +\000\000\000\000\001\001\001\001\001\001\001\001\001\001\000\000\ +\000\000\000\000\001\001\001\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\001\001\001\001\ +\000\000\001\001\000\000\000\000\001\001\001\001\001\001\000\000\ +\001\001\001\001\001\001\001\001\001\001\000\000\000\000\000\000\ +\000\000\000\000\001\001\000\000\001\001\000\000\001\001\001\001\ +\001\001\000\000\000\000\000\000\000\000\001\001\001\001\255\000\ +\001\001\001\001\001\001\001\001\000\000\000\000\000\000\000\000\ +\000\000\001\001\000\000\001\001\000\000\000\000\001\001\000\000\ +\000\000\001\001\000\000\000\000\000\000\001\001\253\000\253\000\ +\253\000\253\000\000\000\000\000\000\000\000\000\253\000\253\000\ +\253\000\000\000\000\000\253\000\253\000\253\000\253\000\253\000\ +\253\000\253\000\253\000\253\000\000\000\000\000\253\000\253\000\ +\253\000\253\000\253\000\253\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\253\000\253\000\000\000\000\000\253\000\253\000\ +\253\000\253\000\253\000\000\000\000\000\000\000\253\000\253\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\253\000\253\000\000\000\253\000\000\000\000\000\ +\253\000\253\000\253\000\000\000\253\000\253\000\253\000\253\000\ +\253\000\000\000\000\000\000\000\000\000\000\000\253\000\000\000\ +\253\000\000\000\253\000\253\000\253\000\000\000\000\000\000\000\ +\000\000\253\000\253\000\208\000\253\000\253\000\253\000\253\000\ +\000\000\000\000\000\000\000\000\000\000\253\000\000\000\253\000\ +\000\000\000\000\253\000\000\000\000\000\253\000\000\000\000\000\ +\000\000\253\000\254\000\254\000\254\000\254\000\000\000\000\000\ +\000\000\000\000\254\000\254\000\254\000\000\000\000\000\254\000\ +\254\000\254\000\254\000\254\000\254\000\254\000\254\000\254\000\ +\000\000\000\000\254\000\254\000\254\000\254\000\254\000\254\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\254\000\254\000\ +\000\000\000\000\254\000\254\000\254\000\254\000\254\000\000\000\ +\000\000\000\000\254\000\254\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\254\000\254\000\ +\000\000\254\000\000\000\000\000\254\000\254\000\254\000\000\000\ +\254\000\254\000\254\000\254\000\254\000\000\000\000\000\000\000\ +\000\000\000\000\254\000\000\000\254\000\000\000\254\000\254\000\ +\254\000\000\000\000\000\000\000\000\000\254\000\254\000\004\001\ +\254\000\254\000\254\000\254\000\000\000\000\000\000\000\000\000\ +\000\000\254\000\000\000\254\000\000\000\000\000\254\000\000\000\ +\000\000\254\000\000\000\000\000\000\000\254\000\000\000\255\000\ +\255\000\255\000\255\000\000\000\000\000\000\000\000\000\255\000\ +\255\000\255\000\000\000\000\000\255\000\255\000\255\000\255\000\ +\255\000\255\000\255\000\255\000\255\000\000\000\000\000\255\000\ +\255\000\255\000\255\000\255\000\255\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\255\000\255\000\000\000\000\000\255\000\ +\255\000\255\000\255\000\255\000\000\000\000\000\000\000\255\000\ +\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\255\000\255\000\000\000\255\000\000\000\ +\000\000\255\000\255\000\255\000\000\000\255\000\255\000\255\000\ +\255\000\255\000\000\000\000\000\000\000\000\000\000\000\255\000\ +\000\000\255\000\000\000\255\000\255\000\255\000\000\000\000\000\ +\000\000\000\000\255\000\255\000\006\001\255\000\255\000\255\000\ +\255\000\000\000\000\000\000\000\000\000\000\000\255\000\000\000\ +\255\000\000\000\000\000\255\000\000\000\000\000\255\000\000\000\ +\000\000\000\000\255\000\208\000\208\000\208\000\208\000\000\000\ +\000\000\000\000\000\000\208\000\208\000\208\000\000\000\000\000\ +\208\000\208\000\208\000\208\000\208\000\208\000\208\000\208\000\ +\208\000\000\000\000\000\208\000\208\000\208\000\208\000\208\000\ +\208\000\000\000\000\000\000\000\000\000\000\000\000\000\208\000\ +\208\000\000\000\000\000\208\000\208\000\208\000\208\000\208\000\ +\208\000\208\000\000\000\208\000\208\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\208\000\ +\208\000\000\000\000\000\000\000\000\000\208\000\208\000\208\000\ +\000\000\208\000\000\000\000\000\208\000\208\000\000\000\000\000\ +\000\000\000\000\000\000\208\000\000\000\208\000\000\000\000\000\ +\000\000\208\000\000\000\000\000\000\000\000\000\208\000\208\000\ +\248\000\208\000\208\000\208\000\208\000\000\000\000\000\208\000\ +\000\000\000\000\208\000\000\000\208\000\000\000\000\000\208\000\ +\000\000\000\000\208\000\000\000\000\000\000\000\208\000\004\001\ +\004\001\004\001\004\001\000\000\000\000\000\000\000\000\004\001\ +\004\001\004\001\000\000\000\000\004\001\004\001\000\000\004\001\ +\004\001\004\001\004\001\004\001\004\001\000\000\000\000\004\001\ +\004\001\004\001\004\001\004\001\004\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\004\001\004\001\000\000\000\000\004\001\ +\004\001\004\001\000\000\000\000\000\000\000\000\000\000\004\001\ +\004\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\004\001\004\001\000\000\004\001\000\000\ +\000\000\000\000\004\001\004\001\000\000\004\001\000\000\000\000\ +\004\001\004\001\000\000\000\000\000\000\000\000\000\000\004\001\ +\000\000\004\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\004\001\004\001\249\000\004\001\004\001\004\001\ +\004\001\000\000\000\000\000\000\000\000\000\000\004\001\000\000\ +\004\001\000\000\000\000\004\001\000\000\000\000\004\001\000\000\ +\000\000\000\000\004\001\000\000\006\001\006\001\006\001\006\001\ +\000\000\000\000\000\000\000\000\006\001\006\001\006\001\000\000\ +\000\000\006\001\006\001\000\000\006\001\006\001\006\001\006\001\ +\006\001\006\001\000\000\000\000\006\001\006\001\006\001\006\001\ +\006\001\006\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\006\001\006\001\000\000\000\000\006\001\006\001\006\001\000\000\ +\000\000\000\000\000\000\000\000\006\001\006\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\006\001\006\001\000\000\006\001\000\000\000\000\000\000\006\001\ +\006\001\000\000\006\001\000\000\000\000\006\001\006\001\000\000\ +\000\000\000\000\000\000\000\000\006\001\000\000\006\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\006\001\ +\006\001\005\001\006\001\006\001\006\001\006\001\000\000\000\000\ +\000\000\000\000\000\000\006\001\000\000\006\001\000\000\000\000\ +\006\001\000\000\000\000\006\001\000\000\000\000\000\000\006\001\ +\248\000\248\000\248\000\248\000\000\000\000\000\000\000\000\000\ +\248\000\248\000\248\000\000\000\000\000\248\000\248\000\000\000\ +\248\000\248\000\248\000\248\000\248\000\248\000\000\000\000\000\ +\248\000\248\000\248\000\248\000\248\000\248\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\248\000\248\000\000\000\000\000\ +\248\000\248\000\248\000\000\000\000\000\000\000\000\000\000\000\ +\248\000\248\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\248\000\248\000\000\000\248\000\ +\000\000\000\000\000\000\248\000\248\000\000\000\248\000\000\000\ +\000\000\248\000\248\000\000\000\000\000\000\000\000\000\000\000\ +\248\000\010\001\248\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\248\000\248\000\000\000\248\000\248\000\ +\248\000\248\000\000\000\000\000\000\000\000\000\000\000\248\000\ +\000\000\248\000\000\000\000\000\248\000\000\000\000\000\248\000\ +\000\000\000\000\000\000\248\000\249\000\249\000\249\000\249\000\ +\000\000\000\000\000\000\000\000\249\000\249\000\249\000\000\000\ +\000\000\249\000\249\000\000\000\249\000\249\000\249\000\249\000\ +\249\000\249\000\000\000\000\000\249\000\249\000\249\000\249\000\ +\249\000\249\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\249\000\249\000\000\000\000\000\249\000\249\000\249\000\000\000\ +\000\000\000\000\000\000\000\000\249\000\249\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\249\000\249\000\000\000\249\000\000\000\000\000\000\000\249\000\ +\249\000\009\001\249\000\000\000\000\000\249\000\249\000\000\000\ +\000\000\000\000\000\000\000\000\249\000\000\000\249\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\249\000\ +\249\000\000\000\249\000\249\000\249\000\249\000\000\000\000\000\ +\000\000\000\000\000\000\249\000\000\000\249\000\000\000\000\000\ +\249\000\000\000\000\000\249\000\000\000\000\000\000\000\249\000\ +\000\000\005\001\005\001\005\001\005\001\000\000\000\000\000\000\ +\000\000\005\001\005\001\005\001\000\000\000\000\005\001\005\001\ +\000\000\005\001\005\001\005\001\005\001\005\001\005\001\000\000\ +\000\000\005\001\005\001\005\001\005\001\005\001\005\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\005\001\005\001\000\000\ +\000\000\005\001\005\001\005\001\000\000\000\000\000\000\000\000\ +\000\000\005\001\005\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\008\001\000\000\000\000\000\000\005\001\005\001\000\000\ +\005\001\000\000\000\000\000\000\005\001\005\001\000\000\005\001\ +\000\000\000\000\005\001\005\001\000\000\000\000\000\000\000\000\ +\000\000\005\001\000\000\005\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\005\001\005\001\000\000\005\001\ +\005\001\005\001\005\001\000\000\000\000\000\000\000\000\000\000\ +\005\001\010\001\005\001\000\000\010\001\005\001\000\000\000\000\ +\005\001\010\001\010\001\010\001\005\001\000\000\010\001\010\001\ +\000\000\010\001\010\001\010\001\010\001\010\001\010\001\000\000\ +\000\000\010\001\010\001\010\001\000\000\010\001\010\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\010\001\000\000\ +\000\000\010\001\010\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\010\001\010\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\131\001\000\000\000\000\000\000\010\001\000\000\000\000\ +\010\001\000\000\000\000\000\000\010\001\010\001\000\000\010\001\ +\000\000\000\000\010\001\010\001\000\000\000\000\000\000\000\000\ +\000\000\010\001\000\000\010\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\010\001\010\001\000\000\010\001\ +\010\001\010\001\010\001\000\000\000\000\000\000\000\000\000\000\ +\010\001\009\001\010\001\000\000\009\001\010\001\000\000\000\000\ +\010\001\009\001\009\001\009\001\010\001\000\000\009\001\009\001\ +\000\000\009\001\009\001\009\001\009\001\009\001\009\001\000\000\ +\000\000\009\001\009\001\009\001\000\000\009\001\009\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\009\001\000\000\ +\000\000\009\001\009\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\009\001\009\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\007\001\000\000\000\000\000\000\009\001\000\000\000\000\ +\009\001\000\000\000\000\000\000\009\001\009\001\000\000\009\001\ +\000\000\000\000\009\001\009\001\000\000\000\000\000\000\000\000\ +\000\000\009\001\000\000\009\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\009\001\009\001\000\000\009\001\ +\009\001\009\001\009\001\000\000\000\000\000\000\000\000\000\000\ +\009\001\008\001\009\001\000\000\008\001\009\001\000\000\000\000\ +\009\001\008\001\000\000\008\001\009\001\000\000\008\001\008\001\ +\000\000\008\001\008\001\008\001\008\001\008\001\008\001\000\000\ +\000\000\008\001\008\001\008\001\000\000\008\001\008\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\008\001\000\000\ +\000\000\008\001\008\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\008\001\008\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\130\001\000\000\000\000\000\000\008\001\000\000\000\000\ +\008\001\000\000\000\000\000\000\008\001\008\001\000\000\008\001\ +\000\000\000\000\008\001\008\001\000\000\000\000\000\000\000\000\ +\000\000\008\001\000\000\000\000\000\000\000\000\000\000\021\003\ +\000\000\000\000\000\000\000\000\008\001\008\001\000\000\008\001\ +\008\001\008\001\008\001\000\000\000\000\000\000\000\000\000\000\ +\008\001\131\001\008\001\000\000\131\001\008\001\000\000\000\000\ +\008\001\131\001\000\000\131\001\008\001\000\000\131\001\131\001\ +\000\000\131\001\131\001\131\001\131\001\131\001\131\001\000\000\ +\000\000\131\001\131\001\131\001\000\000\131\001\131\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\131\001\000\000\ +\000\000\131\001\131\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\131\001\131\001\000\000\011\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\131\001\000\000\000\000\ +\131\001\000\000\000\000\000\000\131\001\131\001\000\000\131\001\ +\000\000\000\000\131\001\131\001\000\000\000\000\000\000\000\000\ +\000\000\131\001\021\003\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\131\001\131\001\000\000\131\001\ +\131\001\131\001\131\001\000\000\000\000\000\000\000\000\000\000\ +\131\001\007\001\131\001\000\000\007\001\131\001\000\000\000\000\ +\131\001\007\001\000\000\007\001\131\001\000\000\007\001\007\001\ +\000\000\007\001\007\001\007\001\007\001\007\001\007\001\000\000\ +\000\000\007\001\007\001\007\001\000\000\007\001\007\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\007\001\000\000\ +\000\000\007\001\007\001\000\000\000\000\000\000\000\000\024\001\ +\000\000\007\001\007\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\007\001\000\000\000\000\ +\007\001\000\000\000\000\000\000\007\001\007\001\000\000\007\001\ +\000\000\000\000\007\001\007\001\000\000\000\000\000\000\000\000\ +\000\000\007\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\007\001\007\001\000\000\007\001\ +\007\001\007\001\007\001\000\000\000\000\000\000\000\000\000\000\ +\007\001\130\001\007\001\000\000\130\001\007\001\000\000\000\000\ +\007\001\130\001\000\000\130\001\007\001\000\000\130\001\130\001\ +\000\000\130\001\130\001\130\001\130\001\130\001\130\001\000\000\ +\000\000\130\001\130\001\130\001\000\000\130\001\130\001\021\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\130\001\000\000\ +\000\000\130\001\130\001\000\000\021\003\000\000\000\000\014\001\ +\167\001\130\001\130\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\021\003\000\000\021\003\021\003\130\001\000\000\000\000\ +\130\001\000\000\000\000\000\000\130\001\130\001\000\000\130\001\ +\021\003\000\000\130\001\130\001\000\000\094\000\000\000\000\000\ +\136\000\130\001\137\000\138\000\030\000\000\000\139\000\000\000\ +\000\000\169\001\141\000\021\003\130\001\130\001\021\003\130\001\ +\130\001\130\001\130\001\021\003\011\001\000\000\000\000\011\001\ +\130\001\021\003\130\001\000\000\011\001\130\001\011\001\021\003\ +\130\001\011\001\011\001\144\000\130\001\011\001\000\000\011\001\ +\011\001\011\001\145\000\021\003\011\001\011\001\011\001\021\003\ +\011\001\011\001\021\003\000\000\000\000\021\003\146\000\147\000\ +\000\000\011\001\000\000\021\003\011\001\011\001\021\003\021\003\ +\000\000\000\000\243\000\000\000\011\001\011\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\021\003\000\000\021\003\021\003\ +\011\001\000\000\000\000\011\001\000\000\000\000\000\000\011\001\ +\011\001\000\000\011\001\021\003\000\000\011\001\011\001\000\000\ +\104\000\174\003\000\000\136\000\011\001\137\000\138\000\030\000\ +\000\000\139\000\000\000\000\000\158\001\141\000\021\003\011\001\ +\011\001\000\000\011\001\011\001\011\001\011\001\021\003\024\001\ +\000\000\000\000\024\001\011\001\021\003\011\001\000\000\024\001\ +\011\001\024\001\021\003\011\001\024\001\024\001\144\000\011\001\ +\024\001\000\000\024\001\024\001\024\001\145\000\021\003\024\001\ +\024\001\024\001\021\003\024\001\024\001\000\000\000\000\000\000\ +\000\000\146\000\147\000\000\000\024\001\000\000\021\003\024\001\ +\024\001\021\003\000\000\000\000\000\000\017\001\000\000\024\001\ +\024\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\024\001\000\000\000\000\024\001\000\000\ +\000\000\000\000\024\001\024\001\000\000\024\001\000\000\000\000\ +\024\001\024\001\000\000\000\000\000\000\000\000\000\000\024\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\024\001\024\001\000\000\024\001\024\001\024\001\ +\024\001\000\000\000\000\000\000\000\000\000\000\024\001\014\001\ +\024\001\000\000\014\001\024\001\000\000\000\000\024\001\014\001\ +\000\000\014\001\024\001\000\000\014\001\014\001\000\000\000\000\ +\014\001\000\000\014\001\014\001\014\001\000\000\000\000\014\001\ +\014\001\014\001\000\000\014\001\014\001\094\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\014\001\000\000\000\000\014\001\ +\014\001\000\000\094\000\000\000\000\000\016\001\000\000\014\001\ +\014\001\000\000\000\000\000\000\000\000\000\000\000\000\094\000\ +\000\000\094\000\094\000\014\001\000\000\000\000\014\001\000\000\ +\000\000\000\000\014\001\014\001\000\000\014\001\094\000\000\000\ +\014\001\014\001\000\000\021\003\000\000\000\000\136\000\014\001\ +\137\000\138\000\030\000\000\000\139\000\000\000\000\000\158\001\ +\141\000\094\000\014\001\014\001\000\000\014\001\014\001\014\001\ +\014\001\094\000\243\000\000\000\000\000\243\000\014\001\094\000\ +\014\001\000\000\243\000\014\001\243\000\094\000\014\001\243\000\ +\243\000\144\000\014\001\243\000\000\000\243\000\243\000\243\000\ +\145\000\094\000\243\000\243\000\243\000\094\000\243\000\243\000\ +\104\000\000\000\000\000\000\000\146\000\147\000\000\000\243\000\ +\000\000\094\000\243\000\243\000\094\000\104\000\000\000\000\000\ +\015\001\000\000\243\000\243\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\104\000\000\000\104\000\104\000\243\000\000\000\ +\000\000\243\000\000\000\000\000\000\000\243\000\243\000\000\000\ +\243\000\104\000\000\000\243\000\243\000\000\000\099\000\000\000\ +\000\000\000\000\243\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\104\000\243\000\243\000\000\000\ +\243\000\243\000\243\000\243\000\104\000\017\001\000\000\000\000\ +\017\001\243\000\104\000\243\000\000\000\017\001\243\000\017\001\ +\104\000\243\000\017\001\017\001\000\000\243\000\017\001\000\000\ +\017\001\017\001\017\001\000\000\104\000\017\001\017\001\017\001\ +\104\000\017\001\017\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\017\001\000\000\104\000\017\001\017\001\104\000\ +\000\000\000\000\000\000\020\001\000\000\017\001\017\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\017\001\000\000\000\000\017\001\000\000\000\000\000\000\ +\017\001\017\001\000\000\017\001\000\000\000\000\017\001\017\001\ +\000\000\000\000\000\000\000\000\000\000\017\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\017\001\017\001\000\000\017\001\017\001\017\001\017\001\000\000\ +\000\000\000\000\000\000\000\000\017\001\016\001\017\001\000\000\ +\016\001\017\001\000\000\000\000\017\001\016\001\000\000\016\001\ +\017\001\000\000\016\001\016\001\000\000\000\000\016\001\000\000\ +\016\001\016\001\016\001\000\000\000\000\016\001\016\001\016\001\ +\000\000\016\001\016\001\021\003\000\000\000\000\000\000\000\000\ +\000\000\000\000\016\001\000\000\000\000\016\001\016\001\000\000\ +\021\003\000\000\000\000\018\001\000\000\016\001\016\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\021\003\000\000\021\003\ +\021\003\016\001\000\000\000\000\016\001\000\000\000\000\000\000\ +\016\001\016\001\000\000\016\001\021\003\000\000\016\001\016\001\ +\000\000\103\000\000\000\000\000\000\000\016\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\021\003\ +\016\001\016\001\000\000\016\001\016\001\016\001\016\001\021\003\ +\015\001\000\000\000\000\015\001\016\001\021\003\016\001\000\000\ +\015\001\016\001\015\001\021\003\016\001\015\001\015\001\000\000\ +\016\001\015\001\000\000\015\001\015\001\015\001\000\000\021\003\ +\015\001\015\001\015\001\021\003\015\001\015\001\099\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\015\001\000\000\021\003\ +\015\001\015\001\021\003\099\000\000\000\000\000\019\001\000\000\ +\015\001\015\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\099\000\000\000\099\000\099\000\015\001\000\000\000\000\015\001\ +\000\000\000\000\000\000\015\001\015\001\000\000\015\001\099\000\ +\000\000\015\001\015\001\000\000\000\000\000\000\000\000\000\000\ +\015\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\099\000\015\001\015\001\000\000\015\001\015\001\ +\015\001\015\001\099\000\020\001\000\000\000\000\020\001\015\001\ +\099\000\015\001\000\000\020\001\015\001\020\001\099\000\015\001\ +\020\001\020\001\000\000\015\001\020\001\000\000\020\001\020\001\ +\020\001\000\000\099\000\020\001\020\001\020\001\099\000\020\001\ +\020\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\020\001\000\000\099\000\020\001\020\001\099\000\000\000\000\000\ +\000\000\023\001\000\000\020\001\020\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\020\001\ +\000\000\000\000\020\001\000\000\000\000\000\000\020\001\020\001\ +\000\000\020\001\000\000\000\000\020\001\020\001\000\000\000\000\ +\000\000\000\000\000\000\020\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\020\001\020\001\ +\000\000\020\001\020\001\020\001\020\001\000\000\000\000\000\000\ +\000\000\000\000\020\001\018\001\020\001\000\000\018\001\020\001\ +\000\000\000\000\020\001\018\001\000\000\018\001\020\001\000\000\ +\018\001\018\001\000\000\000\000\018\001\000\000\018\001\018\001\ +\018\001\000\000\000\000\018\001\018\001\018\001\000\000\018\001\ +\018\001\103\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\018\001\000\000\000\000\018\001\018\001\000\000\103\000\000\000\ +\000\000\021\001\000\000\018\001\018\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\103\000\000\000\103\000\103\000\018\001\ +\000\000\000\000\018\001\000\000\000\000\000\000\018\001\018\001\ +\000\000\018\001\103\000\000\000\018\001\018\001\000\000\000\000\ +\000\000\000\000\000\000\018\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\103\000\018\001\018\001\ +\000\000\018\001\018\001\018\001\018\001\103\000\019\001\000\000\ +\000\000\019\001\018\001\103\000\018\001\000\000\019\001\018\001\ +\019\001\103\000\018\001\019\001\019\001\000\000\018\001\019\001\ +\000\000\019\001\019\001\019\001\000\000\103\000\019\001\019\001\ +\019\001\103\000\019\001\019\001\010\000\000\000\157\001\000\000\ +\000\000\000\000\000\000\019\001\000\000\103\000\019\001\019\001\ +\103\000\000\000\000\000\000\000\022\001\000\000\019\001\019\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\019\001\000\000\000\000\019\001\000\000\000\000\ +\000\000\019\001\019\001\000\000\019\001\000\000\000\000\019\001\ +\019\001\000\000\000\000\000\000\000\000\136\000\019\001\137\000\ +\138\000\030\000\000\000\139\000\000\000\000\000\158\001\141\000\ +\000\000\019\001\019\001\000\000\019\001\019\001\019\001\019\001\ +\000\000\023\001\000\000\000\000\023\001\019\001\000\000\019\001\ +\000\000\023\001\019\001\023\001\000\000\019\001\023\001\023\001\ +\144\000\019\001\023\001\000\000\023\001\023\001\023\001\145\000\ +\000\000\023\001\023\001\023\001\000\000\023\001\023\001\000\000\ +\000\000\000\000\000\000\146\000\147\000\000\000\023\001\000\000\ +\000\000\023\001\023\001\000\000\000\000\000\000\000\000\207\000\ +\000\000\023\001\023\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\023\001\000\000\000\000\ +\023\001\000\000\000\000\000\000\023\001\023\001\000\000\023\001\ +\000\000\000\000\023\001\023\001\000\000\000\000\000\000\000\000\ +\000\000\023\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\023\001\023\001\000\000\023\001\ +\023\001\023\001\023\001\000\000\000\000\000\000\000\000\000\000\ +\023\001\021\001\023\001\000\000\021\001\023\001\000\000\000\000\ +\023\001\021\001\000\000\021\001\023\001\000\000\021\001\021\001\ +\000\000\000\000\021\001\000\000\021\001\021\001\021\001\000\000\ +\000\000\021\001\021\001\021\001\000\000\021\001\021\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\021\001\000\000\ +\000\000\021\001\021\001\000\000\000\000\000\000\000\000\244\000\ +\000\000\021\001\021\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\021\001\000\000\000\000\ +\021\001\000\000\000\000\000\000\021\001\021\001\000\000\021\001\ +\000\000\000\000\021\001\021\001\000\000\000\000\000\000\000\000\ +\000\000\021\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\021\001\021\001\000\000\021\001\ +\021\001\021\001\021\001\000\000\022\001\000\000\000\000\022\001\ +\021\001\000\000\021\001\000\000\022\001\021\001\022\001\000\000\ +\021\001\022\001\022\001\000\000\021\001\022\001\000\000\022\001\ +\022\001\022\001\000\000\000\000\022\001\022\001\022\001\000\000\ +\022\001\022\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\022\001\000\000\000\000\022\001\022\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\022\001\022\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\061\002\000\000\000\000\ +\022\001\000\000\000\000\022\001\000\000\000\000\000\000\022\001\ +\022\001\000\000\022\001\000\000\000\000\022\001\022\001\000\000\ +\000\000\000\000\000\000\000\000\022\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\022\001\ +\022\001\000\000\022\001\022\001\022\001\022\001\000\000\207\000\ +\000\000\000\000\207\000\022\001\000\000\022\001\000\000\207\000\ +\022\001\207\000\000\000\022\001\207\000\207\000\000\000\022\001\ +\207\000\000\000\207\000\207\000\207\000\000\000\000\000\207\000\ +\207\000\207\000\000\000\207\000\207\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\207\000\000\000\000\000\207\000\ +\207\000\000\000\000\000\000\000\000\000\000\000\000\000\207\000\ +\207\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\095\002\000\000\000\000\207\000\000\000\000\000\207\000\000\000\ +\000\000\000\000\207\000\207\000\000\000\207\000\000\000\000\000\ +\207\000\207\000\000\000\000\000\000\000\000\000\000\000\207\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\207\000\207\000\000\000\207\000\000\000\207\000\ +\207\000\000\000\000\000\000\000\000\000\000\000\207\000\244\000\ +\207\000\000\000\244\000\207\000\000\000\000\000\207\000\244\000\ +\000\000\244\000\207\000\000\000\244\000\244\000\000\000\000\000\ +\244\000\000\000\244\000\244\000\244\000\000\000\000\000\244\000\ +\000\000\244\000\000\000\244\000\244\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\244\000\000\000\000\000\244\000\ +\244\000\000\000\000\000\000\000\000\000\000\000\000\000\244\000\ +\244\000\000\000\000\000\059\002\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\244\000\000\000\000\000\244\000\000\000\ +\000\000\000\000\244\000\244\000\000\000\244\000\000\000\000\000\ +\244\000\244\000\000\000\000\000\000\000\000\000\000\000\244\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\244\000\244\000\000\000\244\000\244\000\244\000\ +\244\000\000\000\000\000\000\000\000\000\000\000\244\000\000\000\ +\244\000\000\000\000\000\244\000\000\000\061\002\244\000\061\002\ +\061\002\061\002\244\000\000\000\000\000\061\002\000\000\000\000\ +\000\000\000\000\061\002\000\000\000\000\000\000\061\002\061\002\ +\061\002\000\000\000\000\000\000\000\000\154\003\000\000\061\002\ +\061\002\061\002\061\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\061\002\000\000\000\000\000\000\061\002\061\002\000\000\ +\057\002\000\000\000\000\000\000\000\000\061\002\061\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\061\002\000\000\000\000\061\002\000\000\000\000\061\002\ +\061\002\061\002\000\000\061\002\000\000\000\000\061\002\061\002\ +\000\000\000\000\000\000\000\000\136\000\061\002\137\000\138\000\ +\030\000\000\000\139\000\000\000\000\000\140\000\141\000\000\000\ +\061\002\061\002\000\000\061\002\061\002\061\002\000\000\000\000\ +\095\002\061\002\095\002\095\002\095\002\000\000\142\000\000\000\ +\095\002\061\002\000\000\000\000\061\002\095\002\143\000\144\000\ +\061\002\095\002\095\002\095\002\000\000\000\000\145\000\000\000\ +\000\000\000\000\095\002\095\002\095\002\095\002\000\000\000\000\ +\059\005\000\000\146\000\147\000\095\002\000\000\000\000\000\000\ +\000\000\095\002\000\000\058\002\000\000\000\000\000\000\160\005\ +\095\002\095\002\000\000\000\000\000\000\000\000\243\001\000\000\ +\000\000\000\000\000\000\000\000\095\002\000\000\000\000\095\002\ +\000\000\000\000\095\002\095\002\095\002\000\000\095\002\000\000\ +\000\000\095\002\095\002\000\000\000\000\000\000\000\000\061\005\ +\095\002\137\000\138\000\030\000\000\000\139\000\000\000\000\000\ +\140\000\062\005\000\000\095\002\095\002\000\000\095\002\095\002\ +\095\002\095\002\000\000\059\002\000\000\059\002\059\002\059\002\ +\000\000\142\000\000\000\059\002\095\002\000\000\000\000\095\002\ +\059\002\143\000\144\000\095\002\059\002\059\002\059\002\000\000\ +\000\000\145\000\000\000\000\000\000\000\059\002\059\002\059\002\ +\059\002\000\000\246\001\000\000\000\000\064\005\147\000\059\002\ +\000\000\000\000\000\000\000\000\059\002\000\000\056\002\000\000\ +\000\000\000\000\000\000\059\002\059\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\059\002\ +\000\000\000\000\059\002\000\000\000\000\059\002\059\002\059\002\ +\000\000\059\002\000\000\000\000\000\000\059\002\000\000\000\000\ +\000\000\000\000\000\000\059\002\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\059\002\059\002\ +\000\000\059\002\059\002\059\002\059\002\000\000\000\000\000\000\ +\057\002\000\000\057\002\057\002\057\002\000\000\000\000\059\002\ +\057\002\000\000\059\002\000\000\000\000\057\002\059\002\000\000\ +\000\000\057\002\057\002\057\002\000\000\000\000\000\000\000\000\ +\000\000\000\000\057\002\057\002\057\002\057\002\000\000\000\000\ +\000\000\000\000\000\000\000\000\057\002\000\000\000\000\000\000\ +\000\000\057\002\000\000\053\002\000\000\000\000\000\000\000\000\ +\057\002\057\002\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\057\002\000\000\000\000\057\002\ +\000\000\000\000\057\002\057\002\057\002\000\000\057\002\000\000\ +\000\000\000\000\057\002\000\000\000\000\000\000\000\000\136\000\ +\057\002\137\000\138\000\030\000\000\000\139\000\000\000\000\000\ +\140\000\141\000\000\000\057\002\057\002\000\000\057\002\057\002\ +\057\002\057\002\177\001\058\002\000\000\058\002\058\002\058\002\ +\000\000\142\000\000\000\058\002\057\002\000\000\000\000\057\002\ +\058\002\143\000\144\000\057\002\058\002\058\002\058\002\042\002\ +\000\000\145\000\000\000\000\000\000\000\058\002\058\002\058\002\ +\058\002\000\000\000\000\000\000\000\000\146\000\147\000\058\002\ +\000\000\000\000\000\000\000\000\058\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\058\002\058\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\058\002\ +\000\000\000\000\058\002\000\000\000\000\058\002\058\002\058\002\ +\000\000\058\002\000\000\000\000\000\000\058\002\000\000\000\000\ +\000\000\041\002\136\000\058\002\137\000\138\000\030\000\000\000\ +\139\000\000\000\000\000\140\000\141\000\000\000\058\002\058\002\ +\000\000\058\002\058\002\058\002\058\002\000\000\056\002\000\000\ +\056\002\056\002\056\002\000\000\142\000\000\000\056\002\058\002\ +\000\000\000\000\058\002\056\002\143\000\126\003\058\002\056\002\ +\056\002\056\002\000\000\000\000\145\000\000\000\000\000\000\000\ +\056\002\056\002\056\002\056\002\000\000\000\000\000\000\068\006\ +\146\000\147\000\056\002\039\002\000\000\000\000\000\000\056\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\056\002\056\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\056\002\000\000\000\000\056\002\000\000\000\000\ +\056\002\056\002\056\002\000\000\056\002\000\000\000\000\000\000\ +\056\002\000\000\000\000\000\000\000\000\000\000\056\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\056\002\056\002\000\000\056\002\056\002\056\002\056\002\ +\000\000\197\000\000\000\053\002\000\000\053\002\053\002\000\000\ +\000\000\000\000\056\002\053\002\000\000\056\002\000\000\000\000\ +\053\002\056\002\000\000\000\000\053\002\053\002\053\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\053\002\053\002\053\002\ +\053\002\000\000\000\000\000\000\000\000\000\000\000\000\053\002\ +\000\000\000\000\000\000\000\000\053\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\053\002\053\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\084\000\000\000\000\000\000\000\053\002\ +\000\000\000\000\053\002\000\000\000\000\053\002\053\002\053\002\ +\000\000\053\002\000\000\000\000\010\000\053\002\157\001\042\002\ +\000\000\000\000\042\002\053\002\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\042\002\000\000\053\002\053\002\ +\042\002\053\002\053\002\053\002\053\002\000\000\000\000\000\000\ +\000\000\042\002\042\002\042\002\042\002\000\000\000\000\053\002\ +\000\000\000\000\053\002\000\000\000\000\000\000\053\002\000\000\ +\042\002\000\000\000\000\000\000\000\000\136\000\000\000\137\000\ +\138\000\030\000\000\000\139\000\000\000\000\000\158\001\141\000\ +\000\000\041\002\000\000\042\002\041\002\000\000\042\002\000\000\ +\000\000\042\002\042\002\042\002\000\000\000\000\041\002\000\000\ +\042\002\042\002\041\002\000\000\000\000\000\000\000\000\042\002\ +\144\000\000\000\227\002\041\002\041\002\041\002\041\002\145\000\ +\000\000\000\000\000\000\042\002\023\003\042\002\000\000\042\002\ +\042\002\000\000\041\002\146\000\147\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\042\002\000\000\000\000\042\002\000\000\ +\000\000\000\000\042\002\039\002\000\000\041\002\039\002\000\000\ +\041\002\000\000\000\000\041\002\041\002\041\002\000\000\000\000\ +\039\002\000\000\041\002\041\002\039\002\000\000\000\000\000\000\ +\000\000\041\002\000\000\000\000\000\000\039\002\039\002\039\002\ +\039\002\000\000\000\000\000\000\000\000\041\002\000\000\041\002\ +\000\000\041\002\041\002\000\000\039\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\041\002\000\000\000\000\ +\041\002\000\000\000\000\000\000\041\002\000\000\000\000\039\002\ +\000\000\197\000\039\002\000\000\197\000\039\002\039\002\039\002\ +\000\000\000\000\000\000\000\000\039\002\039\002\197\000\000\000\ +\000\000\000\000\197\000\039\002\197\000\000\000\000\000\000\000\ +\000\000\000\000\128\000\197\000\197\000\197\000\197\000\039\002\ +\000\000\039\002\000\000\039\002\039\002\000\000\000\000\000\000\ +\000\000\000\000\197\000\000\000\000\000\000\000\000\000\039\002\ +\000\000\000\000\039\002\000\000\000\000\000\000\039\002\000\000\ +\000\000\000\000\000\000\084\000\000\000\197\000\084\000\000\000\ +\197\000\000\000\000\000\000\000\197\000\197\000\000\000\000\000\ +\084\000\000\000\197\000\197\000\084\000\000\000\000\000\000\000\ +\000\000\197\000\000\000\000\000\000\000\084\000\084\000\084\000\ +\084\000\000\000\000\000\000\000\000\000\197\000\000\000\197\000\ +\000\000\197\000\197\000\000\000\084\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\197\000\000\000\000\000\ +\197\000\120\000\000\000\000\000\197\000\000\000\000\000\084\000\ +\000\000\000\000\084\000\000\000\000\000\000\000\084\000\084\000\ +\000\000\000\000\000\000\000\000\084\000\084\000\244\004\000\000\ +\137\000\138\000\030\000\084\000\139\000\000\000\245\004\246\004\ +\141\000\000\000\000\000\000\000\000\000\000\000\000\000\084\000\ +\000\000\084\000\000\000\084\000\084\000\247\004\000\000\000\000\ +\248\004\000\000\000\000\000\000\000\000\000\000\000\000\084\000\ +\249\004\144\000\084\000\000\000\023\003\000\000\084\000\023\003\ +\145\000\023\003\023\003\023\003\023\003\000\000\000\000\023\003\ +\023\003\023\003\000\000\000\000\146\000\147\000\000\000\023\003\ +\000\000\000\000\000\000\023\003\000\000\000\000\023\003\000\000\ +\023\003\023\003\023\003\023\003\023\003\023\003\023\003\023\003\ +\023\003\000\000\000\000\023\003\023\003\023\003\000\000\000\000\ +\000\000\000\000\000\000\000\000\023\003\023\003\023\003\023\003\ +\023\003\023\003\023\003\023\003\023\003\023\003\023\003\023\003\ +\023\003\023\003\150\001\023\003\023\003\023\003\000\000\023\003\ +\023\003\023\003\023\003\023\003\023\003\000\000\023\003\023\003\ +\023\003\023\003\023\003\000\000\023\003\023\003\000\000\000\000\ +\023\003\023\003\000\000\023\003\023\003\023\003\023\003\023\003\ +\023\003\023\003\000\000\023\003\023\003\023\003\000\000\023\003\ +\000\000\023\003\023\003\000\000\023\003\000\000\023\003\023\003\ +\023\003\023\003\023\003\023\003\023\003\000\000\023\003\009\000\ +\010\000\011\000\000\000\000\000\000\000\012\000\013\000\014\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\015\000\016\000\ +\017\000\018\000\019\000\020\000\021\000\000\000\000\000\000\000\ +\000\000\022\000\000\000\023\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\024\000\000\000\025\000\026\000\ +\027\000\028\000\029\000\000\000\000\000\030\000\031\000\000\000\ +\000\000\032\000\033\000\034\000\000\000\164\002\035\000\036\000\ +\000\000\037\000\038\000\000\000\039\000\000\000\040\000\000\000\ +\041\000\000\000\042\000\000\000\000\000\000\000\043\000\044\000\ +\000\000\045\000\000\000\000\000\000\000\000\000\009\000\010\000\ +\011\000\000\000\129\000\121\000\012\000\013\000\014\000\047\000\ +\000\000\000\000\000\000\000\000\048\000\049\000\050\000\051\000\ +\052\000\053\000\000\000\000\000\054\000\015\000\016\000\017\000\ +\018\000\019\000\020\000\021\000\000\000\000\000\000\000\000\000\ +\022\000\000\000\023\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\024\000\000\000\025\000\026\000\027\000\ +\028\000\029\000\000\000\000\000\030\000\031\000\000\000\000\000\ +\032\000\033\000\034\000\000\000\000\000\035\000\036\000\000\000\ +\037\000\038\000\000\000\039\000\000\000\040\000\000\000\041\000\ +\000\000\042\000\000\000\109\000\000\000\043\000\044\000\000\000\ +\045\000\178\001\136\000\000\000\137\000\138\000\030\000\000\000\ +\139\000\000\000\121\000\140\000\141\000\000\000\047\000\000\000\ +\000\000\000\000\000\000\048\000\049\000\050\000\051\000\052\000\ +\053\000\000\000\000\000\054\000\142\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\143\000\144\000\000\000\009\000\ +\010\000\011\000\000\000\000\000\145\000\012\000\013\000\014\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\146\000\147\000\000\000\000\000\000\000\000\000\015\000\016\000\ +\017\000\018\000\019\000\020\000\021\000\000\000\000\000\000\000\ +\000\000\022\000\000\000\023\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\024\000\000\000\025\000\026\000\ +\027\000\028\000\029\000\000\000\000\000\030\000\031\000\000\000\ +\000\000\032\000\033\000\034\000\000\000\000\000\035\000\036\000\ +\000\000\037\000\038\000\000\000\039\000\132\000\040\000\000\000\ +\041\000\000\000\042\000\000\000\000\000\000\000\043\000\044\000\ +\000\000\045\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\121\000\000\000\000\000\000\000\047\000\ +\000\000\000\000\000\000\000\000\048\000\049\000\050\000\051\000\ +\052\000\053\000\000\000\000\000\054\000\164\002\000\000\000\000\ +\000\000\164\002\000\000\164\002\000\000\164\002\000\000\164\002\ +\000\000\164\002\000\000\164\002\164\002\000\000\164\002\164\002\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\164\002\164\002\000\000\164\002\164\002\134\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\164\002\ +\164\002\164\002\164\002\000\000\164\002\164\002\000\000\000\000\ +\164\002\000\000\000\000\000\000\000\000\164\002\164\002\164\002\ +\000\000\000\000\000\000\000\000\164\002\000\000\164\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\164\002\000\000\ +\000\000\164\002\000\000\000\000\000\000\000\000\164\002\135\000\ +\164\002\164\002\000\000\164\002\164\002\000\000\164\002\000\000\ +\000\000\000\000\164\002\109\000\000\000\164\002\000\000\164\002\ +\000\000\178\001\164\002\164\002\000\000\178\001\164\002\178\001\ +\109\000\178\001\000\000\178\001\000\000\178\001\000\000\178\001\ +\178\001\000\000\178\001\178\001\000\000\109\000\000\000\109\000\ +\109\000\000\000\000\000\000\000\178\001\000\000\000\000\178\001\ +\178\001\000\000\000\000\000\000\109\000\000\000\000\000\000\000\ +\000\000\130\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\178\001\178\001\000\000\178\001\109\000\ +\178\001\178\001\000\000\000\000\178\001\000\000\109\000\109\000\ +\000\000\178\001\178\001\178\001\000\000\109\000\000\000\000\000\ +\178\001\000\000\178\001\109\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\178\001\000\000\000\000\178\001\000\000\109\000\ +\000\000\000\000\178\001\109\000\178\001\178\001\000\000\178\001\ +\178\001\000\000\178\001\136\000\000\000\000\000\178\001\109\000\ +\000\000\178\001\109\000\178\001\000\000\132\000\178\001\178\001\ +\132\000\132\000\178\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\132\000\132\000\000\000\000\000\000\000\000\000\ +\132\000\000\000\000\000\000\000\000\000\000\000\000\000\132\000\ +\000\000\132\000\132\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\132\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\132\000\132\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\132\000\000\000\000\000\132\000\000\000\000\000\132\000\ +\132\000\132\000\000\000\132\000\000\000\134\000\000\000\132\000\ +\134\000\134\000\014\002\000\000\000\000\132\000\000\000\000\000\ +\000\000\000\000\134\000\134\000\000\000\000\000\000\000\000\000\ +\134\000\132\000\000\000\132\000\000\000\132\000\132\000\134\000\ +\000\000\134\000\134\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\132\000\000\000\000\000\132\000\000\000\134\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\134\000\134\000\000\000\ +\000\000\000\000\000\000\181\000\000\000\000\000\000\000\135\000\ +\000\000\134\000\135\000\135\000\134\000\000\000\000\000\134\000\ +\134\000\134\000\000\000\134\000\135\000\135\000\000\000\134\000\ +\000\000\000\000\135\000\000\000\000\000\134\000\000\000\000\000\ +\000\000\135\000\000\000\135\000\135\000\000\000\000\000\000\000\ +\000\000\134\000\000\000\134\000\000\000\134\000\134\000\000\000\ +\135\000\000\000\000\000\000\000\000\000\183\002\000\000\135\000\ +\135\000\134\000\000\000\000\000\134\000\000\000\000\000\000\000\ +\000\000\130\000\000\000\135\000\130\000\130\000\135\000\000\000\ +\000\000\000\000\135\000\135\000\000\000\135\000\130\000\130\000\ +\000\000\135\000\000\000\000\000\130\000\000\000\000\000\135\000\ +\000\000\000\000\000\000\130\000\000\000\130\000\130\000\000\000\ +\000\000\000\000\000\000\135\000\000\000\135\000\000\000\135\000\ +\135\000\000\000\130\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\130\000\130\000\135\000\000\000\000\000\135\000\000\000\ +\000\000\000\000\000\000\136\000\000\000\130\000\136\000\136\000\ +\130\000\015\002\000\000\000\000\130\000\130\000\000\000\130\000\ +\136\000\136\000\000\000\130\000\000\000\000\000\136\000\000\000\ +\000\000\130\000\000\000\000\000\000\000\136\000\000\000\136\000\ +\136\000\000\000\000\000\000\000\000\000\130\000\000\000\130\000\ +\000\000\130\000\130\000\000\000\136\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\136\000\136\000\130\000\000\000\000\000\ +\130\000\000\000\000\000\000\000\000\000\000\000\000\000\136\000\ +\000\000\000\000\136\000\182\000\000\000\000\000\136\000\136\000\ +\000\000\136\000\000\000\000\000\000\000\136\000\136\000\000\000\ +\137\000\138\000\030\000\136\000\139\000\000\000\000\000\140\000\ +\141\000\000\000\014\002\000\000\000\000\014\002\000\000\136\000\ +\000\000\136\000\014\002\136\000\136\000\000\000\000\000\014\002\ +\142\000\000\000\000\000\000\000\000\000\014\002\000\000\136\000\ +\143\000\126\003\136\000\000\000\014\002\000\000\014\002\014\002\ +\145\000\000\000\000\000\000\000\000\000\021\003\000\000\000\000\ +\000\000\000\000\014\002\014\002\146\000\147\000\000\000\000\000\ +\000\000\000\000\000\000\181\000\000\000\000\000\181\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\014\002\000\000\ +\181\000\014\002\000\000\000\000\014\002\014\002\014\002\000\000\ +\000\000\000\000\000\000\098\002\014\002\181\000\181\000\181\000\ +\181\000\000\000\014\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\181\000\000\000\014\002\023\003\ +\000\000\000\000\014\002\014\002\000\000\183\002\098\002\000\000\ +\183\002\000\000\000\000\000\000\000\000\000\000\014\002\181\000\ +\000\000\014\002\183\002\080\002\000\000\181\000\181\000\181\000\ +\000\000\000\000\000\000\000\000\080\002\181\000\000\000\183\002\ +\183\002\183\002\183\002\181\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\183\002\181\000\ +\000\000\181\000\000\000\181\000\080\002\000\000\000\000\080\002\ +\000\000\232\001\000\000\000\000\000\000\000\000\000\000\181\000\ +\080\002\183\002\181\000\000\000\000\000\174\002\000\000\183\002\ +\183\002\183\002\000\000\000\000\000\000\000\000\174\002\183\002\ +\000\000\015\002\000\000\000\000\015\002\183\002\000\000\000\000\ +\000\000\015\002\000\000\000\000\000\000\000\000\015\002\000\000\ +\000\000\183\002\000\000\183\002\015\002\183\002\174\002\000\000\ +\000\000\174\002\071\000\015\002\000\000\015\002\015\002\000\000\ +\000\000\183\002\174\002\000\000\183\002\000\000\000\000\000\000\ +\000\000\015\002\015\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\182\000\000\000\015\002\182\000\000\000\ +\015\002\000\000\000\000\015\002\015\002\015\002\000\000\000\000\ +\182\000\000\000\015\002\015\002\000\000\000\000\182\000\233\001\ +\000\000\015\002\000\000\000\000\000\000\182\000\182\000\182\000\ +\182\000\000\000\000\000\000\000\000\000\015\002\000\000\000\000\ +\000\000\015\002\015\002\000\000\182\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\182\000\000\000\015\002\000\000\000\000\ +\015\002\000\000\000\000\000\000\000\000\021\003\000\000\182\000\ +\021\003\000\000\182\000\000\000\000\000\000\000\182\000\182\000\ +\000\000\182\000\021\003\000\000\235\001\182\000\000\000\000\000\ +\021\003\000\000\000\000\182\000\000\000\000\000\000\000\021\003\ +\000\000\021\003\021\003\000\000\000\000\000\000\000\000\182\000\ +\000\000\182\000\000\000\182\000\182\000\021\003\021\003\000\000\ +\000\000\000\000\000\000\000\000\000\000\021\003\021\003\182\000\ +\000\000\000\000\182\000\000\000\000\000\000\000\000\000\023\003\ +\000\000\021\003\023\003\000\000\021\003\000\000\000\000\023\003\ +\000\000\021\003\000\000\021\003\023\003\000\000\000\000\021\003\ +\000\000\000\000\023\003\000\000\000\000\021\003\000\000\000\000\ +\000\000\023\003\000\000\023\003\023\003\000\000\000\000\234\001\ +\000\000\021\003\000\000\000\000\000\000\021\003\021\003\000\000\ +\023\003\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\021\003\000\000\000\000\021\003\000\000\000\000\000\000\ +\000\000\232\001\000\000\023\003\232\001\000\000\023\003\000\000\ +\000\000\000\000\023\003\023\003\000\000\000\000\232\001\000\000\ +\000\000\023\003\000\000\000\000\232\001\000\000\000\000\023\003\ +\000\000\000\000\000\000\232\001\236\001\232\001\232\001\000\000\ +\000\000\000\000\000\000\023\003\000\000\011\002\000\000\023\003\ +\023\003\000\000\232\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\071\000\023\003\000\000\071\000\023\003\000\000\ +\000\000\000\000\000\000\000\000\000\000\232\001\000\000\071\000\ +\232\001\000\000\000\000\000\000\232\001\232\001\000\000\000\000\ +\000\000\000\000\000\000\232\001\071\000\000\000\071\000\071\000\ +\000\000\232\001\000\000\000\000\000\000\000\000\000\000\240\001\ +\000\000\000\000\071\000\071\000\000\000\232\001\000\000\000\000\ +\000\000\232\001\232\001\000\000\000\000\000\000\000\000\233\001\ +\000\000\000\000\233\001\000\000\000\000\232\001\071\000\000\000\ +\232\001\071\000\000\000\000\000\233\001\071\000\071\000\000\000\ +\000\000\000\000\233\001\000\000\071\000\000\000\000\000\000\000\ +\000\000\233\001\071\000\233\001\233\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\011\002\000\000\071\000\000\000\ +\233\001\000\000\071\000\071\000\000\000\021\003\000\000\000\000\ +\000\000\000\000\000\000\000\000\235\001\000\000\071\000\235\001\ +\000\000\071\000\000\000\233\001\000\000\000\000\233\001\000\000\ +\000\000\235\001\233\001\233\001\000\000\000\000\000\000\235\001\ +\000\000\233\001\000\000\000\000\000\000\000\000\235\001\233\001\ +\235\001\235\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\233\001\000\000\235\001\000\000\233\001\ +\233\001\000\000\125\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\233\001\000\000\000\000\233\001\000\000\ +\235\001\000\000\000\000\235\001\000\000\000\000\000\000\235\001\ +\235\001\000\000\000\000\000\000\000\000\000\000\235\001\234\001\ +\000\000\000\000\234\001\000\000\235\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\234\001\000\000\000\000\000\000\ +\235\001\000\000\234\001\000\000\235\001\235\001\000\000\126\000\ +\000\000\234\001\000\000\234\001\234\001\000\000\000\000\000\000\ +\235\001\000\000\000\000\235\001\000\000\000\000\000\000\000\000\ +\234\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\236\001\000\000\000\000\236\001\ +\000\000\000\000\000\000\234\001\000\000\011\002\234\001\000\000\ +\000\000\236\001\234\001\234\001\000\000\011\002\000\000\236\001\ +\023\003\234\001\011\002\000\000\000\000\000\000\236\001\234\001\ +\236\001\236\001\023\003\000\000\000\000\000\000\000\000\011\002\ +\000\000\011\002\011\002\234\001\000\000\236\001\000\000\234\001\ +\234\001\000\000\000\000\000\000\000\000\000\000\011\002\000\000\ +\000\000\000\000\000\000\234\001\000\000\000\000\234\001\240\001\ +\236\001\000\000\240\001\236\001\000\000\000\000\000\000\236\001\ +\236\001\011\002\000\000\000\000\240\001\000\000\236\001\011\002\ +\011\002\011\002\240\001\000\000\236\001\000\000\000\000\011\002\ +\000\000\240\001\000\000\240\001\240\001\011\002\000\000\000\000\ +\236\001\000\000\000\000\118\000\236\001\236\001\000\000\000\000\ +\240\001\011\002\000\000\000\000\000\000\011\002\000\000\000\000\ +\236\001\000\000\000\000\236\001\011\002\000\000\000\000\000\000\ +\000\000\011\002\000\000\240\001\011\002\021\003\240\001\000\000\ +\021\003\011\002\240\001\240\001\000\000\000\000\000\000\000\000\ +\000\000\240\001\021\003\000\000\000\000\000\000\011\002\240\001\ +\011\002\011\002\000\000\000\000\021\003\000\000\000\000\021\003\ +\000\000\021\003\021\003\240\001\000\000\011\002\119\000\240\001\ +\240\001\000\000\000\000\000\000\000\000\021\003\021\003\000\000\ +\000\000\000\000\000\000\240\001\000\000\000\000\240\001\000\000\ +\011\002\000\000\125\000\011\002\000\000\125\000\011\002\011\002\ +\011\002\021\003\000\000\000\000\021\003\000\000\011\002\125\000\ +\000\000\021\003\000\000\000\000\011\002\000\000\000\000\021\003\ +\000\000\000\000\000\000\000\000\125\000\021\003\125\000\125\000\ +\011\002\000\000\000\000\000\000\011\002\011\002\000\000\000\000\ +\000\000\021\003\000\000\125\000\223\001\021\003\021\003\000\000\ +\011\002\000\000\000\000\011\002\000\000\000\000\000\000\126\000\ +\000\000\021\003\126\000\000\000\021\003\000\000\125\000\000\000\ +\000\000\125\000\000\000\000\000\126\000\125\000\125\000\000\000\ +\000\000\000\000\000\000\000\000\125\000\000\000\000\000\000\000\ +\000\000\126\000\125\000\126\000\126\000\000\000\000\000\000\000\ +\061\000\000\000\000\000\000\000\000\000\000\000\125\000\000\000\ +\126\000\064\000\125\000\125\000\000\000\000\000\000\000\000\000\ +\023\003\000\000\000\000\000\000\000\000\000\000\125\000\000\000\ +\023\003\125\000\023\003\126\000\000\000\023\003\126\000\000\000\ +\000\000\000\000\126\000\126\000\000\000\000\000\000\000\023\003\ +\000\000\126\000\023\003\000\000\023\003\023\003\000\000\126\000\ +\000\000\000\000\000\000\000\000\023\003\000\000\023\003\023\003\ +\000\000\023\003\065\000\126\000\000\000\000\000\000\000\126\000\ +\126\000\000\000\000\000\023\003\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\126\000\023\003\000\000\126\000\023\003\ +\000\000\000\000\000\000\023\003\023\003\000\000\023\003\000\000\ +\000\000\023\003\023\003\118\000\000\000\023\003\023\003\000\000\ +\023\003\000\000\000\000\000\000\023\003\000\000\021\003\000\000\ +\118\000\000\000\023\003\000\000\023\003\000\000\000\000\000\000\ +\023\003\023\003\000\000\000\000\000\000\118\000\023\003\118\000\ +\118\000\000\000\023\003\023\003\023\003\000\000\000\000\023\003\ +\000\000\000\000\000\000\000\000\118\000\000\000\023\003\000\000\ +\000\000\023\003\000\000\000\000\021\003\000\000\000\000\021\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\119\000\118\000\ +\000\000\021\003\118\000\000\000\000\000\000\000\118\000\118\000\ +\000\000\000\000\000\000\119\000\000\000\118\000\021\003\000\000\ +\021\003\021\003\000\000\118\000\000\000\000\000\000\000\000\000\ +\119\000\000\000\119\000\119\000\000\000\021\003\000\000\118\000\ +\000\000\000\000\000\000\118\000\118\000\000\000\000\000\119\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\118\000\ +\021\003\000\000\118\000\021\003\000\000\000\000\000\000\000\000\ +\021\003\000\000\119\000\000\000\223\001\119\000\021\003\000\000\ +\000\000\119\000\119\000\000\000\021\003\000\000\000\000\000\000\ +\119\000\223\001\000\000\000\000\000\000\000\000\119\000\000\000\ +\021\003\000\000\000\000\000\000\021\003\021\003\223\001\000\000\ +\223\001\223\001\119\000\000\000\000\000\000\000\119\000\119\000\ +\021\003\000\000\000\000\021\003\000\000\223\001\000\000\000\000\ +\061\000\000\000\119\000\000\000\000\000\119\000\000\000\000\000\ +\000\000\064\000\000\000\000\000\000\000\061\000\000\000\000\000\ +\223\001\000\000\000\000\223\001\000\000\000\000\064\000\223\001\ +\223\001\000\000\061\000\000\000\061\000\061\000\223\001\000\000\ +\000\000\000\000\000\000\064\000\223\001\064\000\064\000\000\000\ +\000\000\061\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\223\001\000\000\064\000\000\000\223\001\223\001\000\000\000\000\ +\000\000\000\000\065\000\000\000\061\000\000\000\000\000\061\000\ +\223\001\000\000\000\000\223\001\061\000\064\000\000\000\065\000\ +\064\000\000\000\061\000\000\000\000\000\064\000\000\000\000\000\ +\061\000\000\000\000\000\064\000\065\000\000\000\065\000\065\000\ +\000\000\064\000\000\000\000\000\061\000\000\000\000\000\000\000\ +\061\000\061\000\000\000\065\000\000\000\064\000\021\003\000\000\ +\000\000\064\000\064\000\000\000\061\000\000\000\000\000\061\000\ +\000\000\000\000\000\000\021\003\000\000\064\000\065\000\000\000\ +\064\000\065\000\000\000\000\000\000\000\000\000\065\000\000\000\ +\021\003\000\000\021\003\021\003\065\000\000\000\000\000\000\000\ +\000\000\000\000\065\000\000\000\000\000\000\000\000\000\021\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\065\000\000\000\ +\000\000\000\000\065\000\065\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\021\003\000\000\000\000\021\003\065\000\000\000\ +\000\000\065\000\021\003\000\000\000\000\000\000\000\000\000\000\ +\021\003\000\000\000\000\000\000\000\000\000\000\021\003\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\021\003\016\003\000\000\000\000\021\003\021\003\ +\016\003\016\003\016\003\016\003\000\000\000\000\016\003\016\003\ +\016\003\016\003\021\003\000\000\000\000\021\003\016\003\000\000\ +\000\000\000\000\000\000\000\000\000\000\016\003\000\000\016\003\ +\016\003\016\003\016\003\016\003\016\003\016\003\016\003\000\000\ +\000\000\000\000\016\003\000\000\016\003\000\000\000\000\000\000\ +\000\000\000\000\000\000\016\003\016\003\016\003\016\003\016\003\ +\016\003\016\003\016\003\016\003\000\000\000\000\016\003\016\003\ +\000\000\000\000\016\003\016\003\016\003\016\003\000\000\016\003\ +\016\003\016\003\016\003\016\003\000\000\016\003\000\000\016\003\ +\016\003\016\003\000\000\016\003\016\003\000\000\000\000\016\003\ +\016\003\000\000\016\003\000\000\016\003\016\003\000\000\016\003\ +\016\003\000\000\000\000\016\003\016\003\000\000\016\003\000\000\ +\016\003\016\003\000\000\016\003\000\000\016\003\016\003\016\003\ +\016\003\016\003\016\003\016\003\023\003\016\003\000\000\000\000\ +\000\000\023\003\023\003\023\003\023\003\000\000\000\000\023\003\ +\023\003\000\000\000\000\000\000\000\000\000\000\000\000\023\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\023\003\000\000\ +\023\003\000\000\023\003\023\003\023\003\023\003\023\003\023\003\ +\000\000\000\000\000\000\023\003\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\023\003\023\003\023\003\023\003\ +\023\003\023\003\023\003\023\003\023\003\000\000\000\000\023\003\ +\023\003\000\000\000\000\023\003\023\003\023\003\000\000\000\000\ +\023\003\023\003\023\003\023\003\023\003\000\000\023\003\000\000\ +\023\003\023\003\023\003\000\000\000\000\023\003\000\000\000\000\ +\023\003\023\003\000\000\023\003\000\000\023\003\023\003\000\000\ +\000\000\023\003\000\000\000\000\000\000\023\003\000\000\023\003\ +\000\000\023\003\023\003\000\000\023\003\000\000\023\003\023\003\ +\000\000\023\003\023\003\023\003\023\003\000\000\023\003\026\001\ +\027\001\028\001\000\000\000\000\009\000\010\000\029\001\000\000\ +\030\001\000\000\012\000\013\000\000\000\000\000\031\001\032\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\033\001\000\000\000\000\017\000\018\000\019\000\ +\020\000\021\000\000\000\034\001\000\000\000\000\022\000\000\000\ +\000\000\035\001\036\001\037\001\038\001\039\001\040\001\000\000\ +\000\000\024\000\000\000\025\000\026\000\027\000\028\000\029\000\ +\000\000\000\000\030\000\000\000\041\001\000\000\032\000\033\000\ +\034\000\000\000\000\000\000\000\036\000\000\000\042\001\043\001\ +\000\000\044\001\000\000\040\000\000\000\041\000\000\000\000\000\ +\000\000\045\001\046\001\047\001\048\001\049\001\050\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\051\001\000\000\000\000\ +\000\000\052\001\000\000\053\001\047\000\000\000\000\000\000\000\ +\000\000\048\000\049\000\000\000\051\000\052\000\026\001\027\001\ +\028\001\054\000\000\000\009\000\010\000\029\001\000\000\030\001\ +\000\000\012\000\013\000\000\000\000\000\079\003\032\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\033\001\000\000\000\000\017\000\018\000\019\000\020\000\ +\021\000\000\000\034\001\000\000\000\000\022\000\000\000\000\000\ +\035\001\036\001\037\001\038\001\039\001\040\001\000\000\000\000\ +\024\000\000\000\025\000\026\000\027\000\028\000\029\000\000\000\ +\000\000\030\000\000\000\041\001\000\000\032\000\033\000\034\000\ +\000\000\000\000\000\000\036\000\000\000\042\001\043\001\000\000\ +\080\003\000\000\040\000\000\000\041\000\000\000\000\000\000\000\ +\045\001\046\001\047\001\048\001\049\001\050\001\000\000\000\000\ +\000\000\000\000\000\000\089\002\081\003\089\002\089\002\089\002\ +\052\001\089\002\053\001\047\000\089\002\089\002\000\000\000\000\ +\048\000\049\000\000\000\051\000\052\000\023\003\000\000\000\000\ +\054\000\000\000\023\003\023\003\023\003\089\002\000\000\000\000\ +\023\003\023\003\023\003\000\000\000\000\089\002\089\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\089\002\000\000\023\003\ +\000\000\023\003\023\003\023\003\023\003\023\003\023\003\023\003\ +\000\000\089\002\089\002\000\000\023\003\000\000\023\003\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\023\003\ +\000\000\023\003\023\003\023\003\023\003\023\003\000\000\000\000\ +\023\003\023\003\000\000\000\000\023\003\023\003\023\003\000\000\ +\000\000\023\003\023\003\000\000\023\003\023\003\000\000\023\003\ +\000\000\023\003\000\000\023\003\000\000\023\003\000\000\000\000\ +\000\000\023\003\023\003\143\002\023\003\000\000\000\000\000\000\ +\217\002\217\002\217\002\000\000\000\000\023\003\217\002\217\002\ +\000\000\000\000\023\003\000\000\000\000\000\000\000\000\023\003\ +\023\003\023\003\023\003\023\003\023\003\000\000\000\000\023\003\ +\000\000\217\002\217\002\217\002\217\002\217\002\000\000\000\000\ +\000\000\000\000\217\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\217\002\000\000\217\002\ +\217\002\217\002\217\002\217\002\000\000\000\000\217\002\000\000\ +\000\000\000\000\217\002\217\002\217\002\000\000\000\000\000\000\ +\217\002\000\000\217\002\217\002\000\000\000\000\000\000\217\002\ +\000\000\217\002\000\000\000\000\000\000\000\000\000\000\217\002\ +\217\002\144\002\217\002\000\000\000\000\000\000\218\002\218\002\ +\218\002\143\002\000\000\000\000\218\002\218\002\000\000\000\000\ +\217\002\000\000\000\000\000\000\000\000\217\002\217\002\000\000\ +\217\002\217\002\000\000\000\000\000\000\217\002\000\000\218\002\ +\218\002\218\002\218\002\218\002\000\000\000\000\000\000\000\000\ +\218\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\218\002\000\000\218\002\218\002\218\002\ +\218\002\218\002\000\000\000\000\218\002\000\000\000\000\000\000\ +\218\002\218\002\218\002\000\000\000\000\000\000\218\002\000\000\ +\218\002\218\002\000\000\000\000\000\000\218\002\000\000\218\002\ +\000\000\000\000\000\000\000\000\000\000\218\002\218\002\141\002\ +\218\002\000\000\000\000\000\000\219\002\219\002\219\002\144\002\ +\000\000\000\000\219\002\219\002\000\000\000\000\218\002\000\000\ +\000\000\000\000\000\000\218\002\218\002\000\000\218\002\218\002\ +\000\000\000\000\000\000\218\002\000\000\219\002\219\002\219\002\ +\219\002\219\002\000\000\000\000\000\000\000\000\219\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\219\002\000\000\219\002\219\002\219\002\219\002\219\002\ +\000\000\000\000\219\002\000\000\000\000\000\000\219\002\219\002\ +\219\002\000\000\000\000\000\000\219\002\000\000\219\002\219\002\ +\000\000\000\000\000\000\219\002\000\000\219\002\000\000\000\000\ +\000\000\000\000\000\000\219\002\219\002\142\002\219\002\000\000\ +\000\000\000\000\220\002\220\002\220\002\141\002\000\000\000\000\ +\220\002\220\002\000\000\000\000\219\002\000\000\000\000\000\000\ +\000\000\219\002\219\002\000\000\219\002\219\002\000\000\000\000\ +\000\000\219\002\000\000\220\002\220\002\220\002\220\002\220\002\ +\000\000\000\000\000\000\000\000\220\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\220\002\ +\000\000\220\002\220\002\220\002\220\002\220\002\000\000\000\000\ +\220\002\000\000\000\000\000\000\220\002\220\002\220\002\000\000\ +\000\000\000\000\220\002\000\000\220\002\220\002\000\000\000\000\ +\000\000\220\002\000\000\220\002\000\000\000\000\000\000\000\000\ +\000\000\220\002\220\002\000\000\220\002\000\000\000\000\000\000\ +\000\000\000\000\000\000\142\002\224\000\225\000\226\000\000\000\ +\000\000\000\000\220\002\000\000\227\000\000\000\228\000\220\002\ +\220\002\000\000\220\002\220\002\229\000\230\000\231\000\220\002\ +\000\000\232\000\233\000\234\000\000\000\235\000\236\000\237\000\ +\000\000\238\000\239\000\240\000\241\000\000\000\000\000\000\000\ +\242\000\243\000\244\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\245\000\246\000\000\000\000\000\247\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\248\000\249\000\000\000\000\000\000\000\062\002\250\000\251\000\ +\000\000\062\002\000\000\252\000\253\000\254\000\255\000\000\001\ +\001\001\002\001\000\000\003\001\000\000\000\000\062\002\000\000\ +\062\002\004\001\000\000\045\002\000\000\000\000\005\001\062\002\ +\062\002\000\000\000\000\000\000\006\001\000\000\000\000\007\001\ +\008\001\062\002\009\001\010\001\011\001\012\001\013\001\000\000\ +\014\001\015\001\016\001\017\001\018\001\062\002\062\002\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\062\002\000\000\000\000\000\000\062\002\000\000\062\002\ +\062\002\062\002\000\000\062\002\000\000\000\000\062\002\000\000\ +\000\000\000\000\026\001\027\001\028\001\000\000\000\000\000\000\ +\010\000\207\001\000\000\030\001\000\000\000\000\013\000\045\002\ +\062\002\031\001\032\001\000\000\062\002\000\000\062\002\000\000\ +\000\000\062\002\000\000\000\000\000\000\033\001\161\000\000\000\ +\017\000\018\000\062\002\000\000\062\002\000\000\034\001\000\000\ +\000\000\000\000\000\000\000\000\035\001\036\001\037\001\038\001\ +\039\001\040\001\000\000\000\000\024\000\000\000\162\000\163\000\ +\000\000\164\000\165\000\000\000\000\000\030\000\000\000\041\001\ +\000\000\000\000\166\000\167\000\000\000\000\000\000\000\000\000\ +\000\000\208\001\209\001\000\000\210\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\045\001\046\001\211\001\212\001\ +\049\001\213\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\051\001\000\000\000\000\170\000\052\001\000\000\053\001\047\000\ +\000\000\000\000\000\000\000\000\048\000\000\000\240\002\051\000\ +\171\000\026\001\027\001\028\001\000\000\000\000\000\000\010\000\ +\207\001\000\000\030\001\000\000\000\000\013\000\000\000\000\000\ +\031\001\032\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\033\001\161\000\000\000\017\000\ +\018\000\000\000\000\000\000\000\000\000\034\001\000\000\000\000\ +\000\000\000\000\000\000\035\001\036\001\037\001\038\001\039\001\ +\040\001\000\000\000\000\024\000\000\000\162\000\163\000\000\000\ +\164\000\165\000\000\000\000\000\030\000\000\000\041\001\000\000\ +\000\000\166\000\167\000\000\000\000\000\000\000\000\000\000\000\ +\208\001\209\001\000\000\210\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\045\001\046\001\211\001\212\001\049\001\ +\213\001\000\000\000\000\000\000\000\000\000\000\000\000\051\001\ +\000\000\000\000\170\000\052\001\000\000\053\001\047\000\000\000\ +\000\000\000\000\000\000\048\000\000\000\194\003\051\000\171\000\ +\026\001\027\001\028\001\000\000\000\000\000\000\010\000\207\001\ +\000\000\030\001\000\000\000\000\013\000\000\000\000\000\031\001\ +\032\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\033\001\161\000\000\000\017\000\018\000\ +\000\000\000\000\000\000\000\000\034\001\000\000\000\000\000\000\ +\000\000\000\000\035\001\036\001\037\001\038\001\039\001\040\001\ +\000\000\000\000\024\000\000\000\162\000\163\000\000\000\164\000\ +\165\000\000\000\000\000\030\000\000\000\041\001\000\000\000\000\ +\166\000\167\000\000\000\000\000\000\000\000\000\000\000\208\001\ +\209\001\000\000\210\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\045\001\046\001\211\001\212\001\049\001\213\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\051\001\000\000\ +\000\000\170\000\052\001\000\000\053\001\047\000\000\000\000\000\ +\000\000\000\000\048\000\000\000\154\004\051\000\171\000\026\001\ +\027\001\028\001\000\000\000\000\000\000\010\000\207\001\000\000\ +\030\001\000\000\000\000\013\000\000\000\000\000\031\001\032\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\033\001\161\000\000\000\017\000\018\000\000\000\ +\000\000\000\000\000\000\034\001\000\000\000\000\000\000\000\000\ +\000\000\035\001\036\001\037\001\038\001\039\001\040\001\000\000\ +\000\000\024\000\000\000\162\000\163\000\000\000\164\000\165\000\ +\000\000\000\000\030\000\000\000\041\001\000\000\000\000\166\000\ +\167\000\000\000\000\000\000\000\000\000\000\000\208\001\209\001\ +\000\000\210\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\045\001\046\001\211\001\212\001\049\001\213\001\000\000\ +\000\000\157\003\000\000\000\000\000\000\051\001\000\000\010\000\ +\170\000\052\001\000\000\053\001\047\000\013\000\000\000\000\000\ +\079\003\048\000\000\000\000\000\051\000\171\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\161\000\000\000\017\000\ +\018\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\024\000\000\000\162\000\163\000\000\000\ +\164\000\165\000\000\000\000\000\030\000\000\000\200\002\000\000\ +\000\000\166\000\167\000\000\000\010\000\000\000\000\000\000\000\ +\168\000\000\000\013\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\169\000\000\000\000\000\ +\000\000\000\000\161\000\000\000\017\000\018\000\000\000\158\003\ +\000\000\000\000\170\000\000\000\000\000\000\000\047\000\000\000\ +\000\000\000\000\000\000\048\000\000\000\000\000\051\000\171\000\ +\024\000\000\000\162\000\163\000\000\000\164\000\165\000\000\000\ +\000\000\030\000\000\000\202\002\000\000\000\000\166\000\167\000\ +\000\000\010\000\000\000\000\000\000\000\168\000\000\000\013\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\169\000\000\000\000\000\000\000\000\000\161\000\ +\000\000\017\000\018\000\000\000\000\000\000\000\000\000\170\000\ +\000\000\000\000\000\000\047\000\000\000\000\000\000\000\000\000\ +\048\000\000\000\000\000\051\000\171\000\024\000\000\000\162\000\ +\163\000\000\000\164\000\165\000\000\000\000\000\030\000\000\000\ +\204\002\000\000\000\000\166\000\167\000\000\000\010\000\000\000\ +\000\000\000\000\168\000\000\000\013\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\169\000\ +\000\000\000\000\000\000\000\000\161\000\000\000\017\000\018\000\ +\000\000\000\000\000\000\000\000\170\000\000\000\000\000\000\000\ +\047\000\000\000\000\000\000\000\000\000\048\000\000\000\000\000\ +\051\000\171\000\024\000\000\000\162\000\163\000\000\000\164\000\ +\165\000\000\000\000\000\030\000\000\000\161\004\000\000\000\000\ +\166\000\167\000\000\000\010\000\000\000\000\000\000\000\168\000\ +\000\000\013\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\169\000\000\000\000\000\000\000\ +\000\000\161\000\000\000\017\000\018\000\000\000\000\000\000\000\ +\000\000\170\000\000\000\000\000\000\000\047\000\000\000\000\000\ +\000\000\000\000\048\000\000\000\000\000\051\000\171\000\024\000\ +\000\000\162\000\163\000\000\000\164\000\165\000\000\000\000\000\ +\030\000\000\000\163\004\000\000\000\000\166\000\167\000\000\000\ +\010\000\000\000\000\000\000\000\168\000\000\000\013\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\169\000\000\000\000\000\000\000\000\000\161\000\000\000\ +\017\000\018\000\000\000\000\000\000\000\000\000\170\000\000\000\ +\000\000\000\000\047\000\000\000\000\000\000\000\000\000\048\000\ +\000\000\000\000\051\000\171\000\024\000\000\000\162\000\163\000\ +\000\000\164\000\165\000\000\000\000\000\030\000\000\000\165\004\ +\000\000\000\000\166\000\167\000\000\000\010\000\000\000\000\000\ +\000\000\168\000\000\000\013\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\169\000\000\000\ +\000\000\000\000\000\000\161\000\000\000\017\000\018\000\000\000\ +\000\000\000\000\000\000\170\000\000\000\000\000\000\000\047\000\ +\000\000\000\000\000\000\000\000\048\000\000\000\000\000\051\000\ +\171\000\024\000\000\000\162\000\163\000\000\000\164\000\165\000\ +\000\000\000\000\030\000\000\000\000\000\000\000\000\000\166\000\ +\167\000\009\000\010\000\011\000\000\000\000\000\168\000\012\000\ +\013\000\014\000\032\002\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\169\000\000\000\000\000\000\000\000\000\ +\015\000\016\000\017\000\018\000\019\000\020\000\021\000\000\000\ +\170\000\000\000\000\000\022\000\047\000\023\000\000\000\000\000\ +\000\000\048\000\000\000\000\000\051\000\171\000\024\000\000\000\ +\025\000\026\000\027\000\028\000\029\000\000\000\000\000\030\000\ +\031\000\000\000\000\000\032\000\033\000\034\000\000\000\000\000\ +\035\000\036\000\000\000\037\000\038\000\000\000\039\000\000\000\ +\040\000\000\000\041\000\000\000\042\000\000\000\000\000\000\000\ +\043\000\044\000\000\000\045\000\000\000\033\002\000\000\000\000\ +\009\000\010\000\011\000\000\000\046\000\000\000\012\000\013\000\ +\014\000\047\000\000\000\000\000\000\000\000\000\048\000\049\000\ +\050\000\051\000\052\000\053\000\000\000\000\000\054\000\015\000\ +\016\000\017\000\018\000\019\000\020\000\021\000\000\000\000\000\ +\000\000\000\000\022\000\000\000\023\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\024\000\000\000\025\000\ +\026\000\027\000\028\000\029\000\000\000\000\000\030\000\031\000\ +\000\000\000\000\032\000\033\000\034\000\000\000\000\000\035\000\ +\036\000\000\000\037\000\038\000\000\000\039\000\000\000\040\000\ +\000\000\041\000\000\000\042\000\000\000\000\000\000\000\043\000\ +\044\000\000\000\045\000\000\000\000\000\000\000\009\000\010\000\ +\011\000\000\000\000\000\046\000\012\000\013\000\000\000\000\000\ +\047\000\000\000\000\000\000\000\000\000\048\000\049\000\050\000\ +\051\000\052\000\053\000\000\000\000\000\054\000\000\000\017\000\ +\018\000\019\000\020\000\021\000\000\000\000\000\000\000\000\000\ +\022\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\024\000\000\000\025\000\026\000\027\000\ +\028\000\029\000\000\000\000\000\030\000\000\000\000\000\000\000\ +\032\000\033\000\034\000\000\000\000\000\000\000\036\000\000\000\ +\037\000\038\000\000\000\000\000\000\000\040\000\000\000\041\000\ +\000\000\000\000\000\000\000\000\000\000\043\000\044\000\000\000\ +\045\000\000\000\000\000\000\000\000\000\219\000\009\000\010\000\ +\011\000\000\000\000\000\222\000\012\000\013\000\047\000\000\000\ +\000\000\000\000\000\000\048\000\049\000\000\000\051\000\052\000\ +\000\000\000\000\000\000\054\000\000\000\000\000\000\000\017\000\ +\018\000\019\000\020\000\021\000\000\000\000\000\000\000\000\000\ +\022\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\024\000\000\000\025\000\026\000\027\000\ +\028\000\029\000\000\000\000\000\030\000\000\000\000\000\000\000\ +\032\000\033\000\034\000\000\000\000\000\000\000\036\000\000\000\ +\037\000\038\000\000\000\000\000\000\000\040\000\000\000\041\000\ +\000\000\000\000\000\000\000\000\000\000\043\000\044\000\000\000\ +\045\000\000\000\000\000\009\000\010\000\011\000\000\000\000\000\ +\000\000\012\000\013\000\000\000\000\000\000\000\047\000\000\000\ +\000\000\000\000\000\000\048\000\049\000\000\000\051\000\052\000\ +\238\001\000\000\000\000\054\000\017\000\018\000\019\000\020\000\ +\021\000\000\000\000\000\000\000\000\000\022\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\024\000\000\000\025\000\026\000\027\000\028\000\029\000\000\000\ +\000\000\030\000\000\000\000\000\000\000\032\000\033\000\034\000\ +\000\000\000\000\000\000\036\000\000\000\037\000\038\000\000\000\ +\000\000\000\000\040\000\000\000\041\000\000\000\000\000\000\000\ +\000\000\000\000\043\000\044\000\000\000\045\000\000\000\000\000\ +\009\000\010\000\011\000\000\000\000\000\000\000\012\000\013\000\ +\000\000\000\000\000\000\047\000\000\000\000\000\000\000\000\000\ +\048\000\049\000\000\000\051\000\052\000\000\000\000\000\000\000\ +\054\000\017\000\018\000\019\000\020\000\021\000\000\000\000\000\ +\000\000\000\000\022\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\024\000\000\000\025\000\ +\026\000\027\000\028\000\029\000\000\000\000\000\030\000\000\000\ +\000\000\000\000\032\000\033\000\034\000\000\000\000\000\000\000\ +\036\000\000\000\037\000\038\000\000\000\000\000\000\000\040\000\ +\000\000\041\000\000\000\000\000\000\000\000\000\100\002\043\000\ +\044\000\000\000\045\000\000\000\000\000\009\000\010\000\011\000\ +\000\000\000\000\000\000\012\000\013\000\000\000\000\000\000\000\ +\047\000\000\000\000\000\000\000\000\000\048\000\049\000\000\000\ +\051\000\052\000\000\000\000\000\000\000\054\000\017\000\018\000\ +\019\000\020\000\021\000\000\000\000\000\000\000\000\000\022\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\024\000\000\000\025\000\026\000\027\000\028\000\ +\029\000\000\000\000\000\030\000\000\000\000\000\000\000\032\000\ +\033\000\034\000\000\000\000\000\000\000\036\000\000\000\037\000\ +\038\000\000\000\000\000\000\000\040\000\000\000\041\000\000\000\ +\000\000\000\000\000\000\000\000\043\000\044\000\000\000\045\000\ +\000\000\000\000\000\000\000\000\075\003\009\000\010\000\011\000\ +\000\000\000\000\077\003\012\000\013\000\047\000\000\000\000\000\ +\000\000\000\000\048\000\049\000\000\000\051\000\052\000\000\000\ +\000\000\000\000\054\000\000\000\000\000\000\000\017\000\018\000\ +\019\000\020\000\021\000\000\000\000\000\000\000\000\000\022\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\024\000\000\000\025\000\026\000\027\000\028\000\ +\029\000\000\000\000\000\030\000\000\000\000\000\000\000\032\000\ +\033\000\034\000\000\000\000\000\000\000\036\000\000\000\037\000\ +\038\000\000\000\000\000\000\000\040\000\000\000\041\000\000\000\ +\000\000\000\000\000\000\000\000\043\000\044\000\000\000\045\000\ +\000\000\000\000\000\000\009\000\010\000\011\000\000\000\000\000\ +\000\000\012\000\013\000\000\000\000\000\047\000\000\000\000\000\ +\000\000\000\000\048\000\049\000\134\004\051\000\052\000\000\000\ +\000\000\000\000\054\000\000\000\017\000\018\000\019\000\020\000\ +\021\000\000\000\000\000\000\000\000\000\022\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\024\000\000\000\025\000\026\000\027\000\028\000\029\000\000\000\ +\000\000\030\000\000\000\000\000\000\000\032\000\033\000\034\000\ +\000\000\000\000\000\000\036\000\000\000\037\000\038\000\000\000\ +\000\000\000\000\040\000\000\000\041\000\000\000\000\000\000\000\ +\000\000\000\000\043\000\044\000\000\000\045\000\000\000\000\000\ +\025\003\025\003\025\003\000\000\000\000\000\000\025\003\025\003\ +\000\000\000\000\000\000\047\000\000\000\000\000\000\000\000\000\ +\048\000\049\000\000\000\051\000\052\000\025\003\000\000\000\000\ +\054\000\025\003\025\003\025\003\025\003\025\003\000\000\000\000\ +\000\000\000\000\025\003\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\025\003\000\000\025\003\ +\025\003\025\003\025\003\025\003\000\000\000\000\025\003\000\000\ +\000\000\000\000\025\003\025\003\025\003\000\000\000\000\000\000\ +\025\003\000\000\025\003\025\003\000\000\000\000\000\000\025\003\ +\000\000\025\003\000\000\000\000\000\000\000\000\000\000\025\003\ +\025\003\000\000\025\003\000\000\000\000\009\000\010\000\011\000\ +\000\000\000\000\000\000\012\000\013\000\000\000\000\000\000\000\ +\025\003\000\000\000\000\000\000\000\000\025\003\025\003\000\000\ +\025\003\025\003\000\000\000\000\000\000\025\003\017\000\018\000\ +\019\000\020\000\021\000\000\000\000\000\000\000\000\000\022\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\024\000\000\000\025\000\026\000\027\000\028\000\ +\029\000\000\000\000\000\030\000\000\000\000\000\000\000\032\000\ +\033\000\034\000\000\000\000\000\000\000\036\000\000\000\037\000\ +\038\000\000\000\000\000\000\000\040\000\000\000\041\000\000\000\ +\000\000\000\000\000\000\000\000\043\000\044\000\000\000\045\000\ +\000\000\000\000\025\003\025\003\025\003\000\000\000\000\000\000\ +\025\003\025\003\000\000\000\000\000\000\047\000\000\000\000\000\ +\000\000\000\000\048\000\049\000\000\000\051\000\052\000\000\000\ +\000\000\000\000\054\000\025\003\025\003\025\003\025\003\025\003\ +\000\000\000\000\000\000\000\000\025\003\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\025\003\ +\000\000\025\003\025\003\025\003\025\003\025\003\000\000\000\000\ +\025\003\000\000\000\000\000\000\025\003\025\003\025\003\000\000\ +\000\000\000\000\025\003\000\000\025\003\025\003\000\000\000\000\ +\000\000\025\003\000\000\025\003\000\000\000\000\000\000\000\000\ +\000\000\025\003\025\003\000\000\025\003\000\000\000\000\023\003\ +\023\003\023\003\000\000\000\000\000\000\023\003\023\003\000\000\ +\000\000\000\000\025\003\000\000\000\000\000\000\000\000\025\003\ +\025\003\000\000\025\003\025\003\000\000\000\000\000\000\025\003\ +\023\003\023\003\023\003\023\003\023\003\000\000\000\000\000\000\ +\000\000\023\003\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\023\003\000\000\023\003\023\003\ +\023\003\023\003\023\003\000\000\000\000\023\003\000\000\000\000\ +\000\000\023\003\023\003\023\003\000\000\000\000\000\000\023\003\ +\000\000\023\003\023\003\000\000\000\000\010\000\023\003\000\000\ +\023\003\000\000\000\000\013\000\000\000\217\003\023\003\023\003\ +\018\002\023\003\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\218\003\000\000\000\000\017\000\018\000\023\003\ +\000\000\000\000\000\000\000\000\023\003\023\003\000\000\023\003\ +\023\003\000\000\000\000\000\000\023\003\000\000\000\000\000\000\ +\000\000\024\000\252\001\000\000\163\000\000\000\164\000\165\000\ +\000\000\000\000\030\000\000\000\000\000\000\000\000\000\166\000\ +\219\003\000\000\010\000\000\000\000\000\000\000\168\000\000\000\ +\013\000\000\000\017\002\000\000\000\000\018\002\000\000\000\000\ +\254\001\000\000\000\000\169\000\000\000\000\000\000\000\218\003\ +\255\001\000\000\017\000\018\000\000\000\010\000\000\000\000\000\ +\170\000\000\000\000\000\013\000\047\000\250\002\000\000\000\002\ +\000\000\048\000\000\000\000\000\051\000\171\000\024\000\252\001\ +\000\000\163\000\000\000\164\000\165\000\017\000\018\000\030\000\ +\000\000\000\000\000\000\000\000\166\000\219\003\000\000\000\000\ +\000\000\000\000\000\000\168\000\000\000\000\000\000\000\000\000\ +\000\000\024\000\252\001\000\000\163\000\254\001\164\000\165\000\ +\169\000\000\000\030\000\000\000\000\000\255\001\000\000\166\000\ +\251\002\000\000\000\000\000\000\000\000\170\000\168\000\000\000\ +\252\002\047\000\000\000\000\000\000\002\000\000\048\000\000\000\ +\254\001\051\000\171\000\169\000\000\000\000\000\010\000\000\000\ +\255\001\000\000\000\000\000\000\013\000\000\000\107\004\000\000\ +\170\000\000\000\000\000\000\000\047\000\000\000\000\000\000\002\ +\000\000\048\000\000\000\108\004\051\000\171\000\017\000\018\000\ +\000\000\010\000\000\000\000\000\000\000\000\000\000\000\013\000\ +\000\000\031\006\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\024\000\252\001\000\000\163\000\218\003\164\000\ +\165\000\017\000\018\000\030\000\000\000\000\000\000\000\000\000\ +\166\000\167\000\000\000\000\000\000\000\000\000\000\000\168\000\ +\000\000\000\000\000\000\000\000\000\000\024\000\252\001\000\000\ +\163\000\254\001\164\000\165\000\169\000\000\000\030\000\000\000\ +\000\000\255\001\000\000\166\000\219\003\000\000\010\000\000\000\ +\000\000\170\000\168\000\000\000\013\000\047\000\000\000\000\000\ +\000\002\000\000\048\000\000\000\254\001\051\000\171\000\169\000\ +\000\000\000\000\000\000\000\000\255\001\000\000\017\000\018\000\ +\000\000\010\000\000\000\000\000\170\000\000\000\000\000\013\000\ +\047\000\000\000\000\000\000\002\000\000\048\000\000\000\000\000\ +\051\000\171\000\024\000\252\001\000\000\163\000\000\000\164\000\ +\165\000\017\000\018\000\030\000\000\000\000\000\000\000\000\000\ +\166\000\167\000\000\000\000\000\000\000\000\000\000\000\168\000\ +\000\000\015\006\000\000\000\000\000\000\024\000\252\001\000\000\ +\163\000\254\001\164\000\165\000\169\000\000\000\030\000\000\000\ +\000\000\255\001\000\000\166\000\253\001\000\000\010\000\000\000\ +\000\000\170\000\168\000\000\000\013\000\047\000\000\000\000\000\ +\000\002\000\000\048\000\000\000\254\001\051\000\171\000\169\000\ +\000\000\000\000\000\000\000\000\255\001\000\000\017\000\018\000\ +\000\000\025\003\000\000\000\000\170\000\000\000\000\000\025\003\ +\047\000\000\000\000\000\000\002\000\000\048\000\000\000\000\000\ +\051\000\171\000\024\000\252\001\000\000\163\000\000\000\164\000\ +\165\000\025\003\025\003\030\000\000\000\000\000\000\000\000\000\ +\166\000\167\000\000\000\000\000\000\000\000\000\000\000\168\000\ +\000\000\000\000\000\000\000\000\000\000\025\003\025\003\000\000\ +\025\003\254\001\025\003\025\003\169\000\000\000\025\003\000\000\ +\000\000\255\001\000\000\025\003\025\003\000\000\023\003\000\000\ +\000\000\170\000\025\003\000\000\023\003\047\000\000\000\000\000\ +\000\002\000\000\048\000\000\000\025\003\051\000\171\000\025\003\ +\000\000\000\000\000\000\000\000\025\003\000\000\023\003\023\003\ +\000\000\000\000\000\000\000\000\025\003\000\000\000\000\000\000\ +\025\003\000\000\000\000\025\003\000\000\025\003\000\000\000\000\ +\025\003\025\003\023\003\023\003\000\000\023\003\000\000\023\003\ +\023\003\000\000\000\000\023\003\000\000\000\000\000\000\000\000\ +\023\003\023\003\000\000\000\000\010\000\000\000\000\000\023\003\ +\000\000\000\000\013\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\023\003\000\000\000\000\023\003\000\000\000\000\000\000\ +\000\000\023\003\161\000\000\000\017\000\018\000\000\000\000\000\ +\000\000\023\003\000\000\000\000\000\000\023\003\000\000\000\000\ +\023\003\000\000\023\003\000\000\000\000\023\003\023\003\000\000\ +\024\000\000\000\162\000\163\000\000\000\164\000\165\000\000\000\ +\000\000\030\000\000\000\000\000\000\000\000\000\166\000\167\000\ +\000\000\000\000\000\000\010\000\000\000\168\000\000\000\205\001\ +\000\000\013\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\169\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\161\000\219\000\017\000\018\000\000\000\000\000\170\000\ +\000\000\000\000\000\000\047\000\000\000\000\000\000\000\000\000\ +\048\000\000\000\000\000\051\000\171\000\000\000\000\000\024\000\ +\000\000\162\000\163\000\000\000\164\000\165\000\000\000\000\000\ +\030\000\000\000\000\000\000\000\000\000\166\000\167\000\000\000\ +\010\000\000\000\000\000\000\000\168\000\000\000\013\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\010\000\011\000\000\000\ +\000\000\169\000\012\000\013\000\000\000\000\000\161\000\000\000\ +\017\000\018\000\000\000\000\000\000\000\000\000\170\000\000\000\ +\000\000\000\000\047\000\000\000\000\000\017\000\018\000\048\000\ +\000\000\000\000\051\000\171\000\024\000\000\000\162\000\163\000\ +\000\000\164\000\165\000\000\000\000\000\030\000\000\000\000\000\ +\000\000\024\000\166\000\167\000\026\000\027\000\028\000\029\000\ +\000\000\168\000\030\000\000\000\025\003\000\000\025\003\166\000\ +\034\000\000\000\025\003\000\000\000\000\000\000\169\000\000\000\ +\000\000\000\000\000\000\040\000\000\000\000\000\156\003\000\000\ +\000\000\000\000\025\003\170\000\025\003\025\003\045\000\047\000\ +\000\000\000\000\000\000\000\000\048\000\000\000\000\000\051\000\ +\171\000\000\000\000\000\000\000\047\000\000\000\000\000\000\000\ +\025\003\048\000\025\003\025\003\051\000\025\003\025\003\000\000\ +\000\000\025\003\000\000\000\000\000\000\000\000\025\003\025\003\ +\000\000\010\000\000\000\000\000\000\000\025\003\000\000\013\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\025\003\000\000\000\000\000\000\000\000\161\000\ +\000\000\017\000\018\000\000\000\000\000\000\000\000\000\025\003\ +\000\000\000\000\000\000\025\003\000\000\000\000\000\000\000\000\ +\025\003\000\000\000\000\025\003\025\003\024\000\000\000\162\000\ +\163\000\000\000\164\000\165\000\000\000\000\000\030\000\000\000\ +\000\000\000\000\000\000\166\000\167\000\000\000\025\003\000\000\ +\000\000\000\000\168\000\000\000\025\003\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\169\000\ +\000\000\000\000\000\000\000\000\025\003\000\000\025\003\025\003\ +\000\000\025\003\000\000\000\000\170\000\000\000\000\000\025\003\ +\047\000\000\000\000\000\000\000\000\000\048\000\000\000\000\000\ +\051\000\171\000\025\003\000\000\025\003\025\003\000\000\025\003\ +\025\003\025\003\025\003\025\003\000\000\000\000\000\000\000\000\ +\025\003\025\003\000\000\000\000\000\000\000\000\000\000\025\003\ +\000\000\000\000\000\000\000\000\000\000\025\003\000\000\025\003\ +\025\003\000\000\025\003\025\003\025\003\000\000\025\003\000\000\ +\000\000\000\000\000\000\025\003\025\003\000\000\213\002\000\000\ +\000\000\025\003\025\003\000\000\213\002\025\003\000\000\000\000\ +\000\000\000\000\025\003\000\000\000\000\025\003\025\003\025\003\ +\000\000\000\000\000\000\000\000\213\002\000\000\213\002\213\002\ +\025\003\010\000\000\000\000\000\025\003\000\000\000\000\013\000\ +\025\003\000\000\000\000\000\000\000\000\025\003\000\000\000\000\ +\025\003\025\003\213\002\000\000\213\002\213\002\000\000\213\002\ +\213\002\017\000\018\000\213\002\000\000\000\000\000\000\000\000\ +\213\002\213\002\000\000\000\000\000\000\000\000\000\000\213\002\ +\000\000\000\000\000\000\000\000\000\000\024\000\000\000\162\000\ +\163\000\000\000\164\000\165\000\213\002\000\000\030\000\000\000\ +\000\000\000\000\000\000\166\000\167\000\000\000\194\002\000\000\ +\000\000\213\002\168\000\000\000\194\002\213\002\000\000\000\000\ +\000\000\000\000\213\002\000\000\000\000\213\002\213\002\169\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\194\002\194\002\ +\000\000\023\003\000\000\000\000\170\000\000\000\000\000\023\003\ +\047\000\000\000\000\000\000\000\000\000\048\000\000\000\000\000\ +\051\000\171\000\194\002\000\000\194\002\194\002\000\000\194\002\ +\194\002\023\003\023\003\194\002\000\000\000\000\000\000\000\000\ +\194\002\194\002\000\000\000\000\000\000\000\000\000\000\194\002\ +\000\000\000\000\000\000\000\000\000\000\023\003\000\000\023\003\ +\023\003\000\000\023\003\023\003\194\002\000\000\023\003\000\000\ +\000\000\000\000\000\000\023\003\023\003\000\000\010\000\000\000\ +\000\000\194\002\023\003\000\000\013\000\194\002\000\000\000\000\ +\000\000\000\000\194\002\000\000\000\000\194\002\194\002\023\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\017\000\018\000\ +\000\000\025\003\000\000\000\000\023\003\000\000\000\000\025\003\ +\023\003\000\000\000\000\000\000\000\000\023\003\000\000\000\000\ +\023\003\023\003\024\000\000\000\000\000\163\000\000\000\164\000\ +\165\000\025\003\025\003\030\000\000\000\000\000\000\000\000\000\ +\166\000\167\000\000\000\000\000\000\000\000\000\000\000\168\000\ +\000\000\000\000\000\000\000\000\000\000\025\003\000\000\000\000\ +\025\003\000\000\025\003\025\003\169\000\000\000\025\003\000\000\ +\000\000\000\000\000\000\025\003\025\003\000\000\000\000\000\000\ +\000\000\170\000\025\003\000\000\000\000\047\000\010\000\011\000\ +\000\000\000\000\048\000\012\000\013\000\051\000\171\000\025\003\ +\000\000\000\000\000\000\000\000\000\000\000\000\115\001\000\000\ +\000\000\000\000\000\000\000\000\025\003\000\000\017\000\018\000\ +\025\003\000\000\000\000\000\000\000\000\025\003\000\000\000\000\ +\025\003\025\003\000\000\000\000\000\000\000\000\000\000\116\001\ +\000\000\000\000\024\000\117\001\000\000\026\000\027\000\028\000\ +\029\000\000\000\000\000\030\000\000\000\000\000\000\000\000\000\ +\166\000\034\000\010\000\011\000\000\000\000\000\000\000\012\000\ +\013\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\ +\000\000\118\001\000\000\000\000\000\000\000\000\000\000\045\000\ +\000\000\119\001\017\000\018\000\000\000\000\000\000\000\000\000\ +\000\000\120\001\121\001\000\000\000\000\047\000\000\000\000\000\ +\122\001\000\000\048\000\000\000\000\000\051\000\024\000\117\001\ +\000\000\026\000\027\000\028\000\029\000\000\000\000\000\030\000\ +\000\000\000\000\000\000\000\000\166\000\034\000\010\000\011\000\ +\000\000\000\000\000\000\012\000\013\000\025\003\025\003\000\000\ +\040\000\000\000\025\003\025\003\000\000\118\001\000\000\000\000\ +\000\000\000\000\000\000\045\000\000\000\119\001\017\000\018\000\ +\000\000\000\000\000\000\000\000\000\000\025\003\025\003\000\000\ +\000\000\047\000\000\000\000\000\122\001\000\000\048\000\000\000\ +\000\000\051\000\024\000\000\000\000\000\026\000\027\000\028\000\ +\029\000\025\003\000\000\030\000\025\003\025\003\025\003\025\003\ +\207\000\034\000\025\003\000\000\000\000\000\000\059\005\025\003\ +\025\003\000\000\000\000\000\000\040\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\025\003\000\000\060\005\000\000\045\000\ +\000\000\000\000\000\000\000\000\243\001\000\000\025\003\000\000\ +\000\000\000\000\000\000\000\000\000\000\047\000\000\000\000\000\ +\000\000\000\000\048\000\000\000\025\003\051\000\000\000\000\000\ +\000\000\025\003\000\000\000\000\025\003\061\005\000\000\137\000\ +\138\000\030\000\000\000\139\000\000\000\000\000\140\000\062\005\ +\000\000\000\000\000\000\032\005\078\001\079\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\080\001\000\000\000\000\142\000\ +\000\000\033\005\081\001\082\001\034\005\083\001\063\005\143\000\ +\144\000\000\000\000\000\000\000\000\000\000\000\084\001\145\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\085\001\ +\246\001\000\000\000\000\064\005\147\000\086\001\087\001\088\001\ +\089\001\090\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\091\001\000\000\000\000\000\000\000\000\186\000\000\000\000\000\ +\000\000\000\000\092\001\093\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\094\001\095\001\096\001\ +\097\001\098\001\000\000\026\001\027\001\028\001\000\000\000\000\ +\000\000\035\005\207\001\000\000\030\001\000\000\000\000\100\001\ +\000\000\000\000\023\003\032\001\023\003\023\003\023\003\000\000\ +\023\003\000\000\000\000\023\003\023\003\000\000\033\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\034\001\ +\000\000\000\000\000\000\000\000\023\003\035\001\036\001\037\001\ +\038\001\039\001\040\001\000\000\023\003\023\003\000\000\000\000\ +\000\000\000\000\000\000\000\000\023\003\000\000\000\000\000\000\ +\041\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\023\003\023\003\234\002\209\001\000\000\235\002\000\000\000\000\ +\000\000\000\000\041\004\078\001\079\001\045\001\046\001\236\002\ +\212\001\049\001\213\001\080\001\000\000\000\000\000\000\000\000\ +\000\000\081\001\082\001\000\000\083\001\052\001\000\000\053\001\ +\000\000\000\000\000\000\000\000\000\000\084\001\000\000\000\000\ +\000\000\000\000\043\004\078\001\079\001\000\000\085\001\000\000\ +\000\000\000\000\000\000\080\001\086\001\087\001\088\001\089\001\ +\090\001\081\001\082\001\000\000\083\001\000\000\000\000\000\000\ +\043\002\000\000\043\002\043\002\043\002\084\001\043\002\091\001\ +\000\000\043\002\043\002\000\000\186\000\000\000\085\001\000\000\ +\000\000\092\001\093\001\000\000\086\001\087\001\088\001\089\001\ +\090\001\000\000\043\002\000\000\094\001\095\001\096\001\097\001\ +\098\001\000\000\043\002\043\002\000\000\042\004\000\000\091\001\ +\000\000\000\000\043\002\000\000\186\000\000\000\100\001\000\000\ +\000\000\092\001\093\001\000\000\000\000\000\000\043\002\043\002\ +\045\004\078\001\079\001\000\000\094\001\095\001\096\001\097\001\ +\098\001\080\001\000\000\000\000\000\000\000\000\044\004\081\001\ +\082\001\000\000\083\001\000\000\000\000\000\000\100\001\000\000\ +\000\000\000\000\000\000\084\001\000\000\000\000\000\000\000\000\ +\041\004\078\001\079\001\000\000\085\001\000\000\000\000\000\000\ +\000\000\080\001\086\001\087\001\088\001\089\001\090\001\081\001\ +\082\001\000\000\083\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\084\001\000\000\091\001\000\000\000\000\ +\000\000\000\000\186\000\000\000\085\001\000\000\000\000\092\001\ +\093\001\000\000\086\001\087\001\088\001\089\001\090\001\000\000\ +\000\000\000\000\094\001\095\001\096\001\097\001\098\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\091\001\046\004\000\000\ +\000\000\000\000\186\000\000\000\100\001\000\000\000\000\092\001\ +\093\001\000\000\000\000\000\000\000\000\000\000\043\004\078\001\ +\079\001\000\000\094\001\095\001\096\001\097\001\098\001\080\001\ +\000\000\000\000\000\000\099\004\000\000\081\001\082\001\000\000\ +\083\001\000\000\000\000\000\000\100\001\000\000\000\000\000\000\ +\000\000\084\001\000\000\000\000\000\000\000\000\045\004\078\001\ +\079\001\000\000\085\001\000\000\000\000\000\000\000\000\080\001\ +\086\001\087\001\088\001\089\001\090\001\081\001\082\001\000\000\ +\083\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\084\001\000\000\091\001\000\000\000\000\000\000\000\000\ +\186\000\000\000\085\001\000\000\000\000\092\001\093\001\000\000\ +\086\001\087\001\088\001\089\001\090\001\000\000\000\000\000\000\ +\094\001\095\001\096\001\097\001\098\001\000\000\000\000\000\000\ +\000\000\000\000\100\004\091\001\000\000\000\000\000\000\000\000\ +\186\000\000\000\100\001\000\000\000\000\092\001\093\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\094\001\095\001\096\001\097\001\098\001\081\005\078\001\079\001\ +\000\000\000\000\000\000\000\000\101\004\000\000\080\001\000\000\ +\000\000\000\000\100\001\000\000\081\001\082\001\000\000\083\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\084\001\000\000\000\000\000\000\000\000\083\005\078\001\079\001\ +\000\000\085\001\000\000\000\000\000\000\000\000\080\001\086\001\ +\087\001\088\001\089\001\090\001\081\001\082\001\000\000\083\001\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\084\001\000\000\091\001\000\000\000\000\000\000\000\000\186\000\ +\000\000\085\001\000\000\000\000\092\001\093\001\000\000\086\001\ +\087\001\088\001\089\001\090\001\000\000\000\000\000\000\094\001\ +\095\001\096\001\097\001\098\001\000\000\000\000\000\000\000\000\ +\082\005\000\000\091\001\000\000\000\000\000\000\000\000\186\000\ +\000\000\100\001\000\000\000\000\092\001\093\001\000\000\000\000\ +\000\000\000\000\000\000\085\005\078\001\079\001\000\000\094\001\ +\095\001\096\001\097\001\098\001\080\001\000\000\000\000\000\000\ +\000\000\084\005\081\001\082\001\000\000\083\001\000\000\000\000\ +\000\000\100\001\000\000\000\000\000\000\000\000\084\001\000\000\ +\000\000\000\000\000\000\081\005\078\001\079\001\000\000\085\001\ +\000\000\000\000\000\000\000\000\080\001\086\001\087\001\088\001\ +\089\001\090\001\081\001\082\001\000\000\083\001\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\084\001\000\000\ +\091\001\000\000\000\000\000\000\000\000\186\000\000\000\085\001\ +\000\000\000\000\092\001\093\001\000\000\086\001\087\001\088\001\ +\089\001\090\001\000\000\000\000\000\000\094\001\095\001\096\001\ +\097\001\098\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\091\001\086\005\000\000\000\000\000\000\186\000\000\000\100\001\ +\000\000\000\000\092\001\093\001\000\000\000\000\000\000\000\000\ +\000\000\083\005\078\001\079\001\000\000\094\001\095\001\096\001\ +\097\001\098\001\080\001\000\000\000\000\000\000\106\005\000\000\ +\081\001\082\001\000\000\083\001\000\000\000\000\000\000\100\001\ +\000\000\000\000\000\000\000\000\084\001\000\000\000\000\000\000\ +\000\000\085\005\078\001\079\001\000\000\085\001\000\000\000\000\ +\000\000\000\000\080\001\086\001\087\001\088\001\089\001\090\001\ +\081\001\082\001\000\000\083\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\084\001\000\000\091\001\000\000\ +\000\000\000\000\000\000\186\000\000\000\085\001\000\000\000\000\ +\092\001\093\001\000\000\086\001\087\001\088\001\089\001\090\001\ +\000\000\000\000\000\000\094\001\095\001\096\001\097\001\098\001\ +\000\000\000\000\000\000\000\000\000\000\107\005\091\001\078\001\ +\079\001\000\000\000\000\186\000\000\000\100\001\000\000\080\001\ +\092\001\093\001\000\000\000\000\000\000\081\001\082\001\000\000\ +\083\001\000\000\000\000\094\001\095\001\096\001\097\001\098\001\ +\000\000\084\001\000\000\000\000\000\000\000\000\000\000\108\005\ +\000\000\000\000\085\001\000\000\000\000\100\001\000\000\000\000\ +\086\001\087\001\088\001\089\001\090\001\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\091\001\078\001\079\001\000\000\000\000\ +\186\000\000\000\000\000\000\000\080\001\092\001\093\001\000\000\ +\000\000\000\000\081\001\082\001\000\000\083\001\000\000\000\000\ +\094\001\095\001\096\001\097\001\098\001\000\000\084\001\000\000\ +\000\000\000\000\000\000\000\000\000\000\099\001\000\000\085\001\ +\000\000\000\000\100\001\000\000\000\000\086\001\087\001\088\001\ +\089\001\090\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\091\001\078\001\079\001\000\000\000\000\186\000\000\000\000\000\ +\000\000\080\001\092\001\093\001\000\000\000\000\000\000\081\001\ +\082\001\000\000\083\001\000\000\000\000\094\001\095\001\096\001\ +\097\001\098\001\000\000\084\001\000\000\000\000\031\004\000\000\ +\000\000\078\001\079\001\000\000\085\001\000\000\000\000\100\001\ +\000\000\080\001\086\001\087\001\088\001\089\001\090\001\081\001\ +\082\001\000\000\083\001\000\000\000\000\000\000\000\000\000\000\ +\000\000\138\004\000\000\084\001\000\000\091\001\000\000\000\000\ +\000\000\000\000\186\000\000\000\085\001\000\000\000\000\092\001\ +\093\001\000\000\086\001\087\001\088\001\089\001\090\001\000\000\ +\000\000\000\000\094\001\095\001\096\001\097\001\098\001\000\000\ +\000\000\000\000\000\000\093\004\000\000\091\001\078\001\079\001\ +\000\000\000\000\186\000\000\000\100\001\000\000\080\001\092\001\ +\093\001\000\000\000\000\000\000\081\001\082\001\000\000\083\001\ +\000\000\000\000\094\001\095\001\096\001\097\001\098\001\000\000\ +\084\001\000\000\000\000\000\000\000\000\000\000\240\000\240\000\ +\000\000\085\001\000\000\000\000\100\001\000\000\240\000\086\001\ +\087\001\088\001\089\001\090\001\240\000\240\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\240\000\000\000\091\001\000\000\000\000\000\000\000\000\186\000\ +\000\000\240\000\000\000\000\000\092\001\093\001\000\000\240\000\ +\240\000\240\000\240\000\240\000\000\000\000\000\000\000\094\001\ +\095\001\096\001\097\001\098\001\000\000\000\000\000\000\000\000\ +\000\000\000\000\240\000\078\001\079\001\000\000\000\000\240\000\ +\000\000\100\001\000\000\080\001\240\000\240\000\000\000\000\000\ +\000\000\081\001\000\000\000\000\000\000\000\000\000\000\240\000\ +\240\000\240\000\240\000\240\000\000\000\084\001\000\000\000\000\ +\240\000\000\000\000\000\078\001\079\001\000\000\085\001\000\000\ +\000\000\240\000\000\000\000\000\086\001\087\001\088\001\089\001\ +\090\001\081\001\000\000\000\000\000\000\000\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\084\001\000\000\091\001\ +\000\000\000\000\000\000\000\000\186\000\000\000\085\001\000\000\ +\000\000\092\001\093\001\000\000\086\001\087\001\088\001\089\001\ +\090\001\094\000\000\000\000\000\094\001\095\001\096\001\097\001\ +\098\001\000\000\000\000\000\000\000\000\000\000\000\000\091\001\ +\095\000\016\000\000\000\000\000\186\000\000\000\100\001\000\000\ +\000\000\092\001\093\001\000\000\000\000\096\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\095\001\096\001\097\001\ +\098\001\000\000\000\000\136\000\000\000\137\000\138\000\030\000\ +\031\000\139\000\000\000\000\000\140\000\141\000\100\001\000\000\ +\035\000\000\000\000\000\000\000\000\000\000\000\097\000\000\000\ +\000\000\000\000\000\000\000\000\042\000\142\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\143\000\144\000\000\000\ +\000\000\000\000\000\000\000\000\098\000\145\000\000\000\000\000\ +\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ +\099\000\146\000\147\000\053\000" + +let yycheck = "\009\000\ +\210\000\145\000\012\000\002\000\014\000\015\000\016\000\136\000\ +\199\000\019\000\020\000\021\000\022\000\023\000\002\000\025\000\ +\163\000\132\001\142\000\205\000\136\000\163\002\032\000\202\000\ +\002\000\123\001\036\000\002\000\202\000\039\000\040\000\041\000\ +\011\000\001\000\034\003\002\000\010\000\024\001\010\002\049\000\ +\050\000\027\000\136\000\053\000\054\000\139\000\002\000\026\000\ +\002\000\138\000\002\000\234\003\163\002\002\000\038\003\253\002\ +\098\000\157\000\221\000\029\000\223\000\105\002\003\000\004\000\ +\186\003\225\002\045\000\063\004\110\000\000\000\170\000\115\004\ +\194\003\063\003\003\000\004\000\031\000\006\000\046\000\241\004\ +\035\000\056\005\201\004\063\005\094\000\095\000\096\000\097\000\ +\131\000\099\000\133\000\003\000\008\001\001\000\134\001\003\000\ +\004\000\141\002\000\000\098\000\054\001\003\003\021\001\000\001\ +\083\000\034\000\085\000\086\000\058\000\110\002\098\000\110\000\ +\067\005\214\004\203\000\000\000\000\001\003\005\000\001\042\002\ +\098\000\127\002\110\000\098\000\017\001\240\001\010\001\164\001\ +\000\001\166\001\005\000\098\000\110\000\074\001\192\004\110\000\ +\000\001\056\001\046\000\149\000\007\001\110\004\098\000\110\000\ +\098\000\000\001\098\000\121\000\000\001\098\000\058\000\161\000\ +\162\000\139\000\110\000\000\001\110\000\008\001\110\000\000\000\ +\087\004\110\000\244\004\173\000\014\001\000\001\052\005\108\001\ +\226\004\000\001\144\000\077\005\060\001\066\001\000\000\001\005\ +\092\001\006\001\188\000\030\001\103\002\008\001\157\005\000\001\ +\066\001\004\001\127\000\197\000\129\000\008\001\131\000\073\001\ +\133\000\000\001\066\001\092\001\015\001\099\003\133\001\018\001\ +\129\000\000\000\066\001\030\001\055\001\000\001\037\001\162\005\ +\092\001\073\001\189\000\095\001\094\001\214\002\065\001\008\001\ +\000\001\129\000\224\002\127\000\014\001\129\000\094\001\131\000\ +\115\001\133\000\000\001\206\000\055\001\079\001\094\001\186\000\ +\187\000\115\001\203\004\121\005\091\001\030\001\065\001\091\001\ +\095\001\000\001\036\001\027\001\130\005\000\001\091\001\066\001\ +\107\002\108\002\095\001\115\001\054\005\027\001\073\001\106\001\ +\101\005\237\005\109\001\094\001\008\001\000\001\055\001\092\001\ +\095\001\017\001\121\003\196\000\022\001\023\001\064\001\000\000\ +\065\001\000\001\197\001\204\000\027\001\094\001\123\005\106\001\ +\027\001\008\001\109\001\109\005\143\002\151\004\022\001\094\001\ +\154\004\005\003\044\001\000\001\000\001\055\001\113\001\067\001\ +\091\001\204\001\037\001\206\001\202\003\095\001\064\001\097\001\ +\058\001\193\001\029\001\091\001\176\001\063\001\094\001\095\001\ +\231\001\106\001\000\001\109\001\109\001\091\001\027\001\157\001\ +\074\001\095\001\000\001\000\001\192\001\022\001\164\001\050\001\ +\166\001\252\001\046\003\217\003\091\001\010\001\001\002\173\001\ +\174\001\087\003\088\003\094\001\027\001\040\006\094\001\094\001\ +\092\001\236\001\130\002\094\001\102\001\027\001\188\001\105\001\ +\154\005\107\001\000\001\109\001\095\001\111\001\094\001\038\002\ +\205\005\163\005\033\005\034\005\151\001\115\003\153\001\019\001\ +\155\001\021\001\215\005\000\001\071\001\000\000\048\002\008\001\ +\164\005\000\001\072\001\160\002\026\001\024\001\091\001\091\001\ +\057\001\015\006\095\001\141\001\107\006\143\001\120\001\066\001\ +\255\005\066\001\117\001\118\001\069\001\026\001\121\001\000\001\ +\123\001\000\001\048\004\074\005\056\001\159\001\069\002\094\001\ +\094\001\010\001\015\001\000\001\035\001\091\001\000\001\092\001\ +\094\001\095\001\095\001\008\006\072\001\000\001\000\001\177\001\ +\178\001\014\001\099\001\000\001\017\001\000\001\103\001\081\006\ +\099\005\083\006\004\001\014\001\059\001\000\001\008\001\125\004\ +\000\001\064\001\065\001\173\001\174\001\015\001\092\001\000\001\ +\018\001\203\001\190\005\074\001\000\001\003\001\188\004\103\004\ +\210\001\000\001\088\006\092\001\214\001\004\001\105\001\092\001\ +\027\001\008\001\151\001\010\001\153\001\058\006\155\001\014\001\ +\000\001\227\001\228\001\018\001\099\001\076\004\232\001\027\001\ +\234\001\008\001\010\001\000\001\027\001\000\001\109\001\066\001\ +\062\001\094\001\000\001\092\001\091\001\000\001\095\001\007\000\ +\066\001\251\001\000\001\151\001\000\001\153\001\032\002\155\001\ +\171\005\094\001\004\001\039\006\094\001\007\002\008\001\009\002\ +\010\002\000\001\000\001\094\001\094\001\015\001\091\001\000\001\ +\018\001\092\001\095\001\055\004\052\003\241\003\193\005\000\001\ +\091\001\000\001\073\001\004\001\095\001\035\001\094\001\008\001\ +\017\001\010\001\018\001\092\001\004\001\014\001\040\002\091\001\ +\000\001\032\002\094\001\095\001\091\001\092\001\221\005\094\001\ +\095\001\027\002\027\001\190\002\032\002\059\001\014\001\000\001\ +\172\002\173\002\064\001\065\001\092\001\092\001\032\002\095\001\ +\066\001\032\002\113\001\000\001\074\001\000\001\091\001\103\003\ +\091\001\032\002\160\002\231\001\095\001\109\003\070\005\171\002\ +\094\001\002\001\253\002\094\001\032\002\003\001\032\002\091\001\ +\032\002\036\002\056\002\032\002\038\003\099\001\028\005\073\002\ +\073\001\000\001\066\001\022\001\091\001\072\002\000\001\109\001\ +\094\001\065\001\066\001\010\001\014\001\094\001\094\001\113\001\ +\074\002\075\002\091\001\092\001\151\002\094\001\095\001\094\001\ +\003\001\018\001\042\002\038\003\047\001\000\001\229\004\129\002\ +\038\003\004\001\132\002\053\003\134\002\008\001\094\001\010\001\ +\113\001\004\001\000\001\014\001\022\001\027\001\008\001\018\001\ +\230\002\066\001\232\002\092\001\164\000\165\000\017\002\018\002\ +\027\001\100\002\018\001\076\002\064\001\107\004\000\001\092\001\ +\014\001\177\000\178\000\094\001\250\002\032\002\015\001\159\005\ +\008\001\069\002\036\001\022\001\018\001\205\003\097\001\098\001\ +\018\001\076\003\170\005\078\003\066\001\183\002\090\001\103\002\ +\019\001\201\000\003\001\073\001\094\001\092\001\111\002\112\002\ +\115\001\019\001\092\001\092\001\036\001\094\001\073\001\201\002\ +\192\005\203\002\110\001\205\002\066\001\237\003\073\001\209\002\ +\090\001\008\001\068\006\065\001\066\001\048\001\180\002\018\001\ +\091\001\092\001\151\002\094\001\095\001\083\001\048\001\049\001\ +\049\003\060\001\092\001\018\001\092\001\115\001\066\003\233\002\ +\018\001\068\001\060\001\070\001\200\004\062\003\113\001\090\001\ +\094\001\027\002\068\001\018\001\070\001\094\001\092\001\146\003\ +\094\001\022\001\242\005\151\002\092\001\255\002\134\003\133\005\ +\232\005\172\003\004\003\005\003\000\000\012\001\172\003\008\001\ +\214\004\001\006\118\003\022\001\067\001\015\003\130\003\017\003\ +\251\003\036\001\253\003\254\003\111\001\113\003\000\001\027\001\ +\031\001\022\001\028\003\029\003\060\004\111\001\133\005\073\002\ +\118\003\092\001\088\001\067\004\065\001\039\003\215\002\216\002\ +\000\001\019\001\248\004\050\001\046\003\092\001\006\003\170\002\ +\026\001\220\003\047\001\027\001\094\001\139\003\220\003\057\003\ +\000\000\015\001\112\001\022\001\030\001\238\002\066\001\186\002\ +\071\001\094\001\067\001\031\005\102\003\060\003\048\001\049\001\ +\022\001\196\002\000\001\252\002\095\001\084\001\080\003\090\001\ +\060\003\065\001\060\001\047\005\187\005\055\001\189\005\065\001\ +\014\001\019\001\068\001\017\001\070\001\000\001\101\001\065\001\ +\022\001\047\001\100\003\014\001\022\004\027\001\065\001\014\001\ +\064\001\065\001\006\003\014\001\066\001\000\001\017\001\102\003\ +\027\001\004\001\237\002\022\001\027\001\008\001\094\001\014\001\ +\027\001\047\001\102\003\014\001\015\001\109\001\128\003\018\001\ +\092\005\131\003\004\001\133\003\102\003\111\001\008\001\102\003\ +\106\001\067\001\003\001\109\001\047\001\107\003\144\003\102\003\ +\018\001\177\004\148\003\097\001\098\001\070\003\064\001\101\005\ +\066\001\155\003\102\003\027\001\102\003\159\003\102\003\000\001\ +\126\003\102\003\083\003\015\001\079\001\091\001\018\001\117\005\ +\079\001\095\001\065\001\097\001\098\001\123\005\088\001\066\001\ +\066\006\067\006\047\001\181\003\196\001\064\001\184\003\215\004\ +\091\001\035\001\188\003\003\001\095\001\115\001\097\001\098\001\ +\066\001\225\004\066\001\110\004\064\001\065\001\112\001\073\001\ +\110\004\000\001\069\003\094\001\022\001\221\001\222\001\223\001\ +\115\001\059\001\125\004\213\003\022\001\229\001\066\001\065\001\ +\030\001\139\004\092\001\022\001\019\001\004\001\082\004\064\001\ +\065\001\008\001\109\001\026\001\097\001\098\001\014\001\014\001\ +\015\001\090\001\124\006\018\001\066\001\067\001\240\003\241\003\ +\096\001\055\001\110\001\073\001\000\001\005\002\115\001\065\001\ +\250\003\048\001\252\003\065\001\102\001\110\001\176\004\205\005\ +\206\005\067\001\022\001\109\001\000\001\060\001\065\001\096\001\ +\004\001\215\005\012\004\014\001\008\001\068\001\010\001\070\001\ +\035\001\033\002\014\001\015\001\197\003\198\003\018\001\035\001\ +\027\001\065\001\066\001\066\001\027\001\115\001\116\004\027\001\ +\203\004\035\001\211\003\212\003\106\001\203\004\216\004\109\001\ +\059\001\218\003\132\005\009\004\060\002\065\001\065\001\059\001\ +\000\001\051\004\227\003\053\004\064\001\065\001\000\001\094\001\ +\111\001\059\001\008\006\061\004\000\001\091\001\074\001\065\001\ +\004\001\095\001\030\001\066\001\008\001\071\004\066\001\000\000\ +\066\001\019\001\026\001\015\001\079\001\073\001\018\001\073\001\ +\026\001\220\004\084\004\102\001\055\001\000\001\209\003\099\001\ +\064\001\065\001\109\001\055\001\094\001\064\001\217\003\091\001\ +\092\001\109\001\094\001\095\001\102\001\065\001\048\001\049\001\ +\064\001\014\001\229\003\109\001\058\006\111\001\005\000\077\004\ +\007\000\028\005\060\001\075\001\008\001\113\001\027\001\065\001\ +\035\001\115\001\068\001\244\004\070\001\248\003\066\001\000\001\ +\031\001\064\001\027\001\023\001\059\005\109\001\112\004\035\003\ +\244\004\116\005\030\001\105\004\142\004\112\001\106\001\145\004\ +\059\001\109\001\073\005\050\001\127\005\064\001\065\001\051\003\ +\112\001\026\001\094\001\055\003\065\001\000\001\244\004\074\001\ +\162\004\053\001\164\004\055\001\166\004\111\001\168\004\169\004\ +\064\001\066\001\055\001\173\004\099\005\065\001\109\001\191\002\ +\178\004\008\001\180\004\064\001\182\004\004\001\184\004\026\001\ +\099\001\008\001\086\003\064\001\065\001\004\001\206\002\207\002\ +\023\001\008\001\109\001\018\001\061\005\022\001\200\004\030\001\ +\015\001\097\001\123\004\018\001\027\001\064\001\127\004\066\001\ +\108\004\061\005\125\005\132\004\027\001\109\001\106\001\125\005\ +\075\001\109\001\014\001\066\001\222\004\017\001\053\001\014\001\ +\055\001\227\004\242\002\112\001\149\004\150\004\145\005\061\005\ +\030\001\235\004\065\001\145\005\053\001\158\004\055\001\056\001\ +\065\001\136\000\169\005\066\001\139\000\247\004\141\000\142\000\ +\065\001\027\001\252\004\066\001\050\001\112\001\000\005\106\006\ +\002\005\066\001\004\005\126\004\181\004\007\005\109\001\130\004\ +\191\005\000\001\064\001\065\001\000\001\164\000\165\000\008\001\ +\167\000\065\001\083\001\106\001\022\005\064\001\109\001\064\001\ +\026\005\073\001\177\000\178\000\019\001\031\005\004\001\019\001\ +\066\001\235\005\008\001\026\001\109\001\008\005\026\001\210\005\ +\035\001\015\001\027\001\108\001\210\005\047\005\048\005\101\001\ +\050\005\028\001\201\000\202\000\106\001\027\001\205\000\109\001\ +\179\004\048\001\049\001\088\001\048\001\232\005\014\001\065\005\ +\059\001\066\001\232\005\190\004\109\001\060\001\065\001\000\001\ +\060\001\100\001\003\001\027\001\212\005\068\001\000\000\070\001\ +\068\001\066\001\070\001\112\001\013\001\014\001\027\001\066\001\ +\017\001\091\005\092\005\027\001\066\001\094\001\022\001\074\001\ +\098\005\026\001\027\001\028\001\029\001\080\001\000\001\003\005\ +\083\001\045\001\046\001\102\001\029\005\064\001\112\005\040\001\ +\041\001\065\001\109\001\066\001\111\001\035\001\003\001\047\001\ +\111\001\019\001\043\005\111\001\045\005\066\001\128\005\037\001\ +\026\001\145\003\066\001\060\001\083\001\088\001\063\001\022\001\ +\065\001\066\001\067\001\068\001\064\001\059\001\144\005\083\001\ +\073\001\074\001\064\001\065\001\064\001\151\005\048\001\080\001\ +\052\005\000\001\064\001\040\001\074\001\112\001\109\001\161\005\ +\100\001\132\005\060\001\092\001\166\005\094\001\000\001\096\001\ +\097\001\067\001\068\001\236\003\070\001\035\001\041\005\000\001\ +\066\001\155\005\180\005\108\001\158\005\099\001\111\001\199\003\ +\023\001\109\001\115\001\018\001\064\001\066\001\037\001\109\001\ +\026\001\109\001\019\001\004\004\063\005\059\001\200\005\075\001\ +\000\001\026\001\064\001\065\001\094\001\207\005\064\001\000\001\ +\224\003\225\003\226\003\213\005\074\001\111\001\230\003\136\005\ +\218\005\219\005\000\001\019\001\236\003\121\005\224\005\048\001\ +\049\001\227\005\026\001\027\001\010\001\053\001\130\005\055\001\ +\000\000\026\001\236\005\060\001\112\001\099\001\240\005\134\001\ +\064\001\065\001\244\005\068\001\004\004\070\001\064\001\109\001\ +\048\001\049\001\004\001\109\001\230\005\231\005\008\001\233\005\ +\234\005\022\001\000\001\003\001\060\001\015\001\157\001\009\006\ +\018\001\064\001\112\001\067\001\068\001\164\001\070\001\166\001\ +\090\001\027\001\064\001\040\001\143\005\012\006\173\001\174\001\ +\064\001\176\001\149\005\064\001\065\001\109\001\111\001\022\001\ +\012\006\000\001\071\001\109\001\110\001\188\001\035\001\037\001\ +\040\001\192\001\044\006\045\006\167\005\196\001\197\001\084\001\ +\225\005\051\006\052\006\053\006\054\006\090\001\109\001\111\001\ +\066\001\059\006\033\001\026\001\075\001\063\006\059\001\109\001\ +\110\001\022\001\016\001\069\006\065\001\109\001\221\001\222\001\ +\223\001\110\001\000\000\077\006\078\006\027\001\229\001\202\005\ +\055\001\004\001\037\001\040\001\059\001\008\001\064\001\095\001\ +\063\001\064\001\065\001\066\001\015\001\095\006\096\006\018\001\ +\076\000\112\001\100\006\064\001\102\006\252\001\253\001\078\001\ +\025\006\102\001\001\002\085\006\110\006\064\001\005\002\113\006\ +\109\001\008\002\035\006\015\006\064\001\022\001\053\001\097\001\ +\055\001\123\006\017\002\018\002\064\001\127\006\027\001\105\006\ +\108\000\064\001\065\001\109\001\134\006\135\006\109\001\040\001\ +\033\001\032\002\033\002\064\001\064\001\119\006\120\006\066\001\ +\109\001\125\000\000\000\042\002\095\001\000\001\064\001\065\001\ +\132\000\048\002\075\006\000\001\064\001\071\001\055\001\095\001\ +\066\001\109\001\059\001\013\001\031\006\060\002\063\001\064\001\ +\065\001\109\001\084\001\000\001\093\006\066\001\109\001\042\006\ +\090\001\083\001\028\001\029\001\073\001\078\001\064\001\091\001\ +\109\001\109\001\037\001\016\002\088\006\022\001\019\001\041\001\ +\037\001\075\001\023\002\109\001\110\001\026\001\027\001\066\006\ +\067\006\094\001\108\001\066\001\000\000\072\006\073\006\128\006\ +\103\002\091\001\060\001\000\001\109\001\063\001\047\001\082\006\ +\079\001\040\001\068\001\048\001\049\001\022\001\115\001\000\001\ +\074\001\055\001\000\001\094\006\066\001\059\001\080\001\060\001\ +\000\001\063\001\064\001\130\002\004\001\026\001\067\001\068\001\ +\008\001\070\001\010\001\066\001\111\006\083\001\014\001\015\001\ +\078\001\026\001\018\001\094\001\026\001\000\000\121\006\094\001\ +\090\001\124\006\108\001\027\001\000\001\111\001\093\001\130\006\ +\131\006\160\002\109\001\110\001\163\002\053\001\054\001\055\001\ +\056\001\031\001\169\002\170\002\110\001\172\002\173\002\109\001\ +\064\001\065\001\111\001\000\001\093\001\055\001\026\001\000\001\ +\033\001\059\001\066\001\186\002\050\001\063\001\064\001\077\001\ +\191\002\010\001\066\001\004\001\109\001\196\002\019\001\008\001\ +\080\001\073\001\094\001\083\001\078\001\026\001\055\001\206\002\ +\207\002\018\001\059\001\071\001\064\001\065\001\063\001\064\001\ +\065\001\109\001\027\001\091\001\092\001\109\001\094\001\095\001\ +\084\001\065\001\000\001\048\001\049\001\078\001\004\001\230\002\ +\093\001\232\002\008\001\109\001\010\001\003\001\237\002\060\001\ +\014\001\113\001\093\001\242\002\018\001\073\001\067\001\068\001\ +\109\001\070\001\064\001\250\002\251\002\027\001\253\002\000\000\ +\009\000\065\001\109\001\012\000\109\001\014\000\015\000\016\000\ +\007\003\073\001\019\000\020\000\021\000\022\000\023\000\004\001\ +\025\000\064\001\065\001\008\001\014\001\065\001\066\001\017\001\ +\004\001\014\001\015\001\036\000\008\001\018\001\039\000\040\000\ +\041\000\027\001\111\001\064\001\066\001\092\001\018\001\038\003\ +\049\000\050\000\000\001\073\001\053\000\054\000\004\001\027\001\ +\066\001\008\001\008\001\014\001\010\001\052\003\053\003\073\001\ +\014\001\015\001\095\001\004\001\065\001\091\001\092\001\008\001\ +\094\001\095\001\071\001\016\001\152\001\027\001\069\003\000\001\ +\036\001\022\001\000\000\000\001\094\001\066\001\027\001\084\001\ +\073\001\010\001\027\001\113\001\014\001\094\000\095\000\096\000\ +\097\000\022\001\099\000\090\001\000\000\109\001\019\001\004\001\ +\014\001\115\001\022\001\008\001\000\001\026\001\000\001\003\001\ +\103\003\095\001\015\001\095\001\066\001\018\001\109\003\055\001\ +\010\001\013\001\092\001\073\001\103\001\017\001\027\001\118\003\ +\066\001\067\001\121\003\048\001\049\001\014\001\026\001\027\001\ +\028\001\029\001\022\001\130\003\027\001\091\001\092\001\060\001\ +\094\001\095\001\064\001\065\001\139\003\041\001\067\001\068\001\ +\092\001\070\001\145\003\173\001\174\001\000\001\092\001\091\001\ +\161\000\162\000\109\001\113\001\092\001\066\001\092\001\053\001\ +\060\001\055\001\013\001\063\001\255\001\000\002\017\001\067\001\ +\068\001\014\001\109\001\065\001\094\001\172\003\074\001\026\001\ +\027\001\028\001\029\001\094\001\080\001\053\001\010\002\055\001\ +\115\001\020\001\111\001\053\001\197\000\055\001\041\001\004\001\ +\092\001\065\001\094\001\008\001\096\001\097\001\109\001\065\001\ +\199\003\053\001\015\001\055\001\046\001\018\001\205\003\115\001\ +\108\001\060\001\209\003\111\001\063\001\065\001\062\001\066\001\ +\067\001\068\001\217\003\109\001\219\003\220\003\073\001\074\001\ +\108\001\224\003\225\003\226\003\109\001\080\001\229\003\230\003\ +\053\001\109\001\055\001\234\003\022\001\236\003\237\003\002\001\ +\073\001\092\001\100\001\094\001\065\001\096\001\097\001\013\001\ +\000\000\248\003\065\001\066\001\067\001\066\001\073\001\000\001\ +\027\001\108\001\003\001\109\001\111\001\004\004\028\001\029\001\ +\115\001\015\001\092\001\064\001\013\001\022\001\023\001\094\001\ +\017\001\000\001\064\001\041\001\064\001\022\001\008\001\022\004\ +\065\001\026\001\027\001\028\001\029\001\109\001\040\001\018\001\ +\014\001\062\001\062\001\044\001\062\001\007\000\060\001\027\001\ +\041\001\000\000\092\001\094\001\064\001\079\001\068\001\064\001\ +\014\001\058\001\134\002\014\001\074\001\006\001\063\001\094\001\ +\026\000\073\001\080\001\060\001\109\001\060\004\063\001\095\001\ +\065\001\066\001\067\001\068\001\067\004\064\001\075\001\073\001\ +\073\001\074\001\096\001\090\001\022\001\076\004\092\001\080\001\ +\027\001\094\001\014\001\082\004\073\001\040\001\108\001\013\001\ +\087\004\111\001\094\001\092\001\000\001\094\001\027\001\096\001\ +\097\001\014\001\027\001\021\001\008\001\086\001\028\001\029\001\ +\064\001\013\001\064\001\108\001\107\004\108\004\111\001\110\004\ +\062\001\062\001\115\001\041\001\014\001\116\004\026\001\062\001\ +\028\001\029\001\062\001\062\001\062\001\003\001\125\004\126\004\ +\014\001\086\001\064\001\130\004\027\001\041\001\060\001\091\001\ +\095\001\063\001\073\001\101\001\139\004\014\001\068\001\094\001\ +\027\001\027\001\094\001\014\001\074\001\094\001\000\000\088\001\ +\060\001\094\001\080\001\063\001\080\001\064\001\066\001\067\001\ +\068\001\027\001\073\001\014\001\020\001\015\001\074\001\022\001\ +\177\001\094\001\096\001\097\001\080\001\053\001\008\001\145\000\ +\065\001\176\004\177\004\073\001\179\004\062\001\108\001\062\001\ +\092\001\111\001\062\001\014\001\096\001\097\001\094\001\190\004\ +\112\001\163\000\164\000\165\000\112\001\167\000\094\001\073\001\ +\108\001\210\001\021\001\111\001\203\004\064\001\091\001\177\000\ +\178\000\037\003\073\001\053\001\054\001\055\001\056\001\214\004\ +\215\004\216\004\000\001\088\001\095\001\014\001\064\001\065\001\ +\094\001\014\001\225\004\014\001\056\003\014\001\229\004\201\000\ +\202\000\061\003\091\001\205\000\027\001\019\001\019\001\027\001\ +\112\001\088\001\241\004\022\001\026\001\244\004\014\001\014\001\ +\000\001\248\004\014\001\003\001\014\001\000\000\000\000\096\001\ +\084\003\096\001\001\005\092\001\003\005\013\001\008\001\109\001\ +\109\001\017\001\048\001\109\001\064\001\092\001\022\001\036\001\ +\090\001\036\001\026\001\027\001\028\001\029\001\060\001\036\001\ +\092\001\065\001\040\001\065\001\112\003\028\005\068\001\040\002\ +\070\001\041\001\033\005\034\005\064\001\094\001\036\001\064\001\ +\091\001\000\001\041\005\053\001\003\001\001\000\002\000\003\000\ +\004\000\005\000\006\000\007\000\060\001\052\005\013\001\063\001\ +\053\001\065\001\066\001\067\001\068\001\064\001\061\005\062\005\ +\063\005\073\001\074\001\026\001\064\001\028\001\029\001\064\001\ +\080\001\111\001\064\001\074\005\064\001\064\001\077\005\127\000\ +\099\003\040\001\041\001\210\005\092\001\000\000\094\001\008\005\ +\096\001\097\001\060\001\129\005\082\006\199\005\072\002\248\002\ +\190\003\145\001\145\005\183\003\108\001\060\001\101\005\111\001\ +\063\001\122\001\103\002\115\001\067\001\068\001\109\005\199\003\ +\230\001\165\000\190\002\074\001\064\001\065\001\117\005\018\005\ +\027\004\080\001\121\005\071\001\123\005\137\001\125\005\226\001\ +\103\004\077\001\232\002\130\005\188\001\092\001\133\005\133\005\ +\084\001\096\001\097\001\099\005\171\005\221\004\090\001\255\255\ +\143\005\115\001\145\005\255\255\255\255\108\001\149\005\255\255\ +\111\001\255\255\238\003\239\003\255\255\255\255\255\255\255\255\ +\255\255\109\001\110\001\013\001\255\255\255\255\255\255\255\255\ +\167\005\255\255\255\255\255\003\255\255\255\255\255\255\145\001\ +\255\255\255\255\028\001\029\001\255\255\255\255\255\255\255\255\ +\012\004\255\255\255\255\255\255\187\005\255\255\189\005\041\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\028\004\255\255\255\255\202\005\255\255\255\255\205\005\206\005\ +\255\255\000\001\060\001\210\005\255\255\063\001\255\255\255\255\ +\215\005\255\255\068\001\255\255\255\255\255\255\221\005\193\001\ +\074\001\255\255\196\001\197\001\019\001\255\255\080\001\255\255\ +\255\255\232\005\062\004\026\001\000\001\255\255\000\000\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\096\001\097\001\ +\255\255\255\255\255\255\221\001\222\001\223\001\255\255\019\001\ +\255\255\048\001\108\001\229\001\255\255\111\001\026\001\255\255\ +\255\255\008\006\236\001\255\255\255\255\060\001\255\255\255\255\ +\015\006\255\255\007\000\255\255\067\001\068\001\011\000\070\001\ +\255\255\109\004\252\001\253\001\048\001\049\001\255\255\001\002\ +\031\006\255\255\118\004\005\002\255\255\026\000\008\002\255\255\ +\060\001\255\255\255\255\042\006\255\255\255\255\016\002\067\001\ +\068\001\255\255\070\001\255\255\255\255\023\002\255\255\255\255\ +\045\000\255\255\255\255\058\006\255\255\255\255\255\255\033\002\ +\111\001\255\255\255\255\066\006\067\006\255\255\255\255\080\003\ +\042\002\072\006\073\006\255\255\255\255\255\255\048\002\255\255\ +\255\255\255\255\081\006\082\006\083\006\000\001\255\255\255\255\ +\003\001\088\006\060\002\111\001\255\255\063\002\083\000\094\006\ +\085\000\086\000\013\001\183\004\255\255\185\004\072\002\255\255\ +\019\001\000\001\255\255\255\255\255\255\255\255\255\255\026\001\ +\111\006\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\121\006\255\255\255\255\124\006\041\001\255\255\ +\255\255\255\255\255\255\130\006\131\006\103\002\255\255\255\255\ +\255\255\255\255\222\004\255\255\255\255\255\255\255\255\227\004\ +\255\255\060\001\255\255\255\255\063\001\255\255\000\000\255\255\ +\067\001\068\001\255\255\255\255\255\255\255\255\255\255\074\001\ +\055\001\255\255\057\001\058\001\059\001\080\001\061\001\255\255\ +\255\255\064\001\065\001\086\001\255\255\255\255\255\255\164\000\ +\165\000\092\001\167\000\255\255\255\255\096\001\097\001\255\255\ +\255\255\255\255\081\001\255\255\177\000\178\000\255\255\019\005\ +\255\255\108\001\089\001\090\001\111\001\255\255\255\255\255\255\ +\189\000\255\255\097\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\201\000\202\000\109\001\110\001\ +\255\255\206\000\255\255\255\255\190\002\191\002\255\255\051\005\ +\255\255\053\005\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\206\002\207\002\000\001\255\255\ +\002\001\003\001\004\001\071\005\255\255\255\255\008\001\075\005\ +\076\005\255\255\255\255\013\001\255\255\255\255\255\255\017\001\ +\018\001\019\001\255\255\255\255\255\255\231\002\090\005\255\255\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\242\002\255\255\036\001\255\255\000\000\255\255\255\255\041\001\ +\255\255\251\002\255\255\253\002\112\005\255\255\048\001\049\001\ +\255\255\255\255\255\255\255\255\255\255\007\003\255\255\255\255\ +\029\001\255\255\060\001\255\255\255\255\063\001\064\001\255\255\ +\066\001\067\001\068\001\255\255\070\001\255\255\255\255\073\001\ +\074\001\255\255\255\255\255\255\255\255\050\001\080\001\255\255\ +\255\255\255\255\036\003\255\255\038\003\255\255\255\255\255\255\ +\255\255\091\001\092\001\255\255\094\001\095\001\096\001\097\001\ +\255\255\255\255\100\001\053\003\255\255\255\255\255\255\171\005\ +\255\255\000\001\108\001\109\001\000\000\111\001\255\255\255\255\ +\180\005\115\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\019\001\193\005\255\255\255\255\ +\196\005\255\255\255\255\026\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\000\001\007\000\255\255\ +\117\001\118\001\255\255\255\255\121\001\255\255\123\001\255\255\ +\255\255\048\001\049\001\255\255\110\003\255\255\255\255\255\255\ +\019\001\229\005\255\255\255\255\000\000\060\001\000\001\026\001\ +\002\001\003\001\004\001\255\255\067\001\068\001\008\001\070\001\ +\255\255\255\255\255\255\013\001\134\003\255\255\255\255\017\001\ +\018\001\019\001\255\255\255\255\255\255\048\001\255\255\145\003\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\060\001\036\001\255\255\255\255\255\255\255\255\041\001\ +\067\001\068\001\255\255\070\001\255\255\255\255\048\001\049\001\ +\111\001\255\255\172\003\255\255\255\255\255\255\255\255\196\001\ +\197\001\255\255\060\001\255\255\255\255\063\001\255\255\255\255\ +\066\001\067\001\068\001\255\255\070\001\255\255\255\255\073\001\ +\074\001\255\255\255\255\055\006\255\255\199\003\080\001\255\255\ +\221\001\222\001\223\001\255\255\111\001\255\255\255\255\255\255\ +\229\001\091\001\092\001\255\255\094\001\095\001\096\001\097\001\ +\255\255\219\003\220\003\255\255\255\255\255\255\224\003\225\003\ +\226\003\255\255\108\001\255\255\230\003\111\001\255\255\252\001\ +\253\001\115\001\236\003\255\255\001\002\255\255\255\255\255\255\ +\005\002\101\006\102\006\255\255\000\001\255\255\255\255\255\255\ +\255\255\109\006\255\255\164\000\165\000\255\255\167\000\255\255\ +\255\255\013\001\004\004\255\255\255\255\255\255\255\255\255\255\ +\177\000\178\000\255\255\255\255\033\002\129\006\026\001\255\255\ +\028\001\029\001\255\255\255\255\022\004\042\002\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\041\001\255\255\200\000\ +\201\000\202\000\007\000\255\255\255\255\255\255\011\000\060\002\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\255\255\255\255\072\002\054\004\026\000\066\001\067\001\ +\068\001\255\255\255\255\255\255\000\001\255\255\074\001\003\001\ +\255\255\255\255\255\255\255\255\080\001\255\255\255\255\255\255\ +\045\000\013\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\092\001\255\255\103\002\255\255\096\001\255\255\026\001\027\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\041\001\255\255\255\255\ +\255\255\000\000\108\004\255\255\110\004\255\255\083\000\255\255\ +\085\000\086\000\255\255\255\255\000\001\255\255\255\255\003\001\ +\060\001\255\255\255\255\125\004\008\001\255\255\255\255\067\001\ +\068\001\013\001\014\001\255\255\255\255\255\255\074\001\019\001\ +\255\255\139\004\022\001\255\255\080\001\255\255\026\001\007\000\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\092\001\255\255\094\001\068\001\096\001\041\001\255\255\255\255\ +\255\255\255\255\255\255\136\000\077\001\255\255\255\255\255\255\ +\108\001\255\255\191\002\111\001\255\255\255\255\176\004\255\255\ +\060\001\255\255\255\255\063\001\255\255\065\001\066\001\067\001\ +\068\001\206\002\207\002\006\001\255\255\008\001\074\001\164\000\ +\165\000\255\255\167\000\079\001\080\001\255\255\255\255\255\255\ +\255\255\203\004\255\255\255\255\177\000\178\000\255\255\255\255\ +\092\001\255\255\231\002\255\255\096\001\097\001\216\004\255\255\ +\189\000\255\255\220\004\255\255\255\255\242\002\255\255\255\255\ +\108\001\255\255\255\255\111\001\201\000\202\000\251\002\255\255\ +\253\002\206\000\255\255\255\255\055\001\255\255\057\001\058\001\ +\059\001\255\255\061\001\255\255\255\255\064\001\065\001\255\255\ +\255\255\255\255\255\255\000\001\255\255\255\255\255\255\255\255\ +\255\255\003\005\255\255\255\255\255\255\255\255\081\001\255\255\ +\013\001\255\255\255\255\255\255\255\255\006\001\089\001\090\001\ +\255\255\038\003\255\255\255\255\000\000\026\001\097\001\028\001\ +\029\001\255\255\028\005\196\001\197\001\255\255\255\255\255\255\ +\255\255\255\255\109\001\110\001\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\164\000\165\000\255\255\167\000\ +\255\255\255\255\052\005\220\001\221\001\222\001\223\001\060\001\ +\029\001\177\000\178\000\255\255\229\001\255\255\055\001\068\001\ +\057\001\058\001\059\001\255\255\061\001\074\001\015\001\064\001\ +\065\001\255\255\255\255\080\001\255\255\050\001\255\255\255\255\ +\255\255\201\000\202\000\252\001\253\001\255\255\255\255\255\255\ +\001\002\255\255\255\255\096\001\005\002\028\000\029\000\255\255\ +\255\255\090\001\043\001\044\001\045\001\046\001\015\002\108\001\ +\097\001\000\001\111\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\008\001\255\255\255\255\109\001\110\001\013\001\121\005\ +\033\002\066\001\255\255\125\005\145\003\255\255\071\001\072\001\ +\130\005\042\002\255\255\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\083\001\084\001\085\001\086\001\255\255\145\005\ +\117\001\118\001\041\001\060\002\121\001\255\255\123\001\172\003\ +\087\000\088\000\255\255\100\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\255\255\ +\255\255\255\255\199\003\074\001\255\255\255\255\255\255\255\255\ +\157\001\080\001\000\000\255\255\000\000\255\255\103\002\164\001\ +\255\255\166\001\255\255\255\255\255\255\092\001\219\003\220\003\ +\255\255\096\001\097\001\224\003\225\003\226\003\255\255\255\255\ +\210\005\230\003\212\005\255\255\255\255\108\001\255\255\236\003\ +\111\001\255\255\255\255\255\255\255\255\255\255\255\255\196\001\ +\197\001\255\255\255\255\255\255\255\255\255\255\232\005\255\255\ +\023\001\255\255\255\255\255\255\255\255\255\255\255\255\004\004\ +\134\001\243\005\255\255\255\255\255\255\036\001\255\255\255\255\ +\221\001\222\001\223\001\255\255\015\001\255\255\255\255\255\255\ +\229\001\255\255\255\255\255\255\255\255\007\006\255\255\255\255\ +\055\001\255\255\057\001\058\001\059\001\015\006\061\001\255\255\ +\018\006\064\001\065\001\255\255\000\001\255\255\191\002\252\001\ +\253\001\044\001\045\001\046\001\001\002\255\255\255\255\255\255\ +\005\002\013\001\255\255\255\255\255\255\206\002\207\002\255\255\ +\190\001\043\006\255\255\090\001\255\255\255\255\026\001\255\255\ +\028\001\029\001\097\001\255\255\071\001\072\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\033\002\041\001\109\001\110\001\ +\083\001\084\001\085\001\086\001\255\255\042\002\255\255\255\255\ +\255\255\242\002\255\255\255\255\196\001\197\001\255\255\255\255\ +\060\001\100\001\251\002\063\001\253\002\255\255\088\006\060\002\ +\068\001\110\004\255\255\255\255\255\255\255\255\074\001\255\255\ +\255\255\255\255\255\255\072\002\080\001\221\001\222\001\223\001\ +\125\004\255\255\255\255\255\255\255\255\229\001\230\001\255\255\ +\092\001\255\255\255\255\255\255\096\001\097\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\038\003\255\255\255\255\ +\108\001\255\255\103\002\111\001\252\001\253\001\255\255\255\255\ +\255\255\001\002\255\255\255\255\255\255\005\002\255\255\078\001\ +\079\001\080\001\081\001\082\001\083\001\084\001\085\001\086\001\ +\087\001\088\001\089\001\090\001\091\001\092\001\093\001\094\001\ +\095\001\096\001\097\001\098\001\000\000\100\001\255\255\255\255\ +\102\001\033\002\255\255\105\001\255\255\107\001\255\255\109\001\ +\255\255\111\001\042\002\114\001\255\255\255\255\203\004\255\255\ +\255\255\255\255\255\255\160\002\255\255\255\255\255\255\255\255\ +\127\001\255\255\255\255\255\255\060\002\255\255\255\255\013\001\ +\255\255\013\001\255\255\116\003\255\255\255\255\255\255\141\001\ +\255\255\143\001\255\255\255\255\255\255\255\255\028\001\029\001\ +\028\001\029\001\191\002\255\255\255\255\255\255\255\255\255\255\ +\255\255\159\001\255\255\041\001\255\255\041\001\255\255\255\255\ +\145\003\206\002\207\002\255\255\130\002\255\255\255\255\103\002\ +\255\255\000\000\255\255\008\005\255\255\255\255\060\001\255\255\ +\060\001\063\001\255\255\063\001\255\255\255\255\068\001\255\255\ +\068\001\255\255\255\255\172\003\074\001\255\255\074\001\028\005\ +\255\255\255\255\080\001\006\001\080\001\242\002\255\255\255\255\ +\255\255\255\255\255\255\169\002\255\255\255\255\251\002\255\255\ +\253\002\255\255\096\001\097\001\096\001\097\001\199\003\255\255\ +\255\255\255\255\015\001\255\255\255\255\255\255\108\001\255\255\ +\108\001\111\001\232\001\111\001\234\001\255\255\255\255\255\255\ +\255\255\255\255\219\003\220\003\255\255\255\255\223\003\224\003\ +\225\003\226\003\255\255\255\255\055\001\230\003\057\001\058\001\ +\059\001\038\003\061\001\236\003\255\255\064\001\065\001\191\002\ +\255\255\007\002\055\001\009\002\057\001\058\001\059\001\255\255\ +\061\001\255\255\255\255\064\001\065\001\255\255\206\002\207\002\ +\255\255\024\002\255\255\004\004\255\255\074\001\029\002\090\001\ +\255\255\255\255\255\255\255\255\081\001\255\255\097\001\255\255\ +\125\005\255\255\255\255\255\255\089\001\090\001\255\255\132\005\ +\255\255\094\001\109\001\110\001\097\001\255\255\255\255\255\255\ +\255\255\255\255\242\002\255\255\145\005\255\255\255\255\255\255\ +\109\001\110\001\255\255\251\002\255\255\253\002\255\255\255\255\ +\255\255\255\255\255\255\074\002\075\002\255\255\255\255\255\255\ +\255\255\118\003\255\255\255\255\255\255\255\255\123\003\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\052\003\255\255\ +\255\255\000\000\255\255\255\255\000\001\255\255\255\255\003\001\ +\255\255\255\255\255\255\255\255\145\003\255\255\038\003\110\002\ +\255\255\013\001\255\255\255\255\115\002\116\002\117\002\255\255\ +\255\255\255\255\255\255\255\255\255\255\210\005\026\001\027\001\ +\028\001\029\001\255\255\129\002\255\255\110\004\132\002\172\003\ +\255\255\255\255\255\255\255\255\255\255\041\001\255\255\255\255\ +\255\255\103\003\255\255\232\005\125\004\255\255\255\255\109\003\ +\255\255\255\255\255\255\255\255\255\255\255\255\243\005\255\255\ +\060\001\255\255\199\003\255\255\064\001\255\255\066\001\067\001\ +\068\001\255\255\255\255\255\255\255\255\073\001\074\001\255\255\ +\255\255\000\001\255\255\255\255\080\001\255\255\219\003\220\003\ +\255\255\255\255\255\255\224\003\225\003\226\003\013\001\255\255\ +\092\001\230\003\094\001\255\255\096\001\097\001\255\255\236\003\ +\100\001\255\255\255\255\026\001\255\255\028\001\029\001\255\255\ +\108\001\109\001\000\000\111\001\255\255\255\255\043\006\214\002\ +\255\255\145\003\041\001\255\255\219\002\220\002\221\002\004\004\ +\255\255\000\001\203\004\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\233\002\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\172\003\068\001\255\255\205\003\ +\255\255\255\255\255\255\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\002\003\255\255\255\255\004\003\255\255\ +\255\255\255\255\255\255\255\255\255\255\092\001\255\255\199\003\ +\255\255\096\001\097\001\018\003\234\003\255\255\255\255\237\003\ +\055\001\255\255\057\001\058\001\059\001\108\001\061\001\255\255\ +\111\001\064\001\065\001\219\003\220\003\074\004\255\255\255\255\ +\224\003\225\003\226\003\255\255\255\255\255\255\230\003\255\255\ +\255\255\255\255\081\001\028\005\236\003\255\255\255\255\255\255\ +\255\255\255\255\089\001\090\001\255\255\255\255\255\255\255\255\ +\000\000\255\255\097\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\110\004\255\255\255\255\004\004\108\001\109\001\110\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\125\004\255\255\255\255\255\255\255\255\255\255\255\255\094\003\ +\255\255\000\001\001\001\002\001\003\001\255\255\060\004\255\255\ +\255\255\008\001\009\001\010\001\255\255\067\004\013\001\014\001\ +\255\255\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\255\255\ +\255\255\087\004\128\003\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\255\255\000\000\255\255\ +\255\255\048\001\049\001\255\255\125\005\107\004\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\061\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\203\004\070\001\ +\145\005\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\255\255\255\255\110\004\181\003\ +\255\255\255\255\255\255\255\255\091\001\092\001\255\255\094\001\ +\095\001\096\001\097\001\255\255\255\255\125\004\255\255\255\255\ +\103\001\255\255\105\001\255\255\203\003\108\001\255\255\244\004\ +\111\001\255\255\000\001\255\255\115\001\003\001\255\255\213\003\ +\255\255\255\255\008\001\177\004\255\255\255\255\255\255\013\001\ +\255\255\255\255\255\255\008\005\255\255\019\001\023\001\255\255\ +\255\255\210\005\255\255\255\255\026\001\255\255\028\001\029\001\ +\255\255\255\255\255\255\036\001\255\255\255\255\255\255\028\005\ +\255\255\255\255\255\255\041\001\250\003\255\255\252\003\232\005\ +\214\004\215\004\255\255\255\255\255\255\255\255\055\001\255\255\ +\057\001\058\001\059\001\225\004\061\001\255\255\060\001\064\001\ +\065\001\063\001\255\255\203\004\066\001\067\001\068\001\255\255\ +\061\005\255\255\255\255\241\004\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\255\255\255\255\037\004\038\004\ +\039\004\090\001\255\255\255\255\000\000\255\255\092\001\255\255\ +\097\001\255\255\096\001\097\001\255\255\051\004\255\255\255\255\ +\255\255\255\255\255\255\255\255\109\001\110\001\108\001\255\255\ +\000\001\111\001\255\255\003\001\255\255\255\255\255\255\255\255\ +\255\255\071\004\255\255\033\005\034\005\013\001\014\001\255\255\ +\255\255\017\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\125\005\255\255\026\001\027\001\028\001\029\001\255\255\132\005\ +\255\255\096\004\097\004\098\004\028\005\255\255\255\255\255\255\ +\040\001\041\001\255\255\255\255\145\005\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\074\005\255\255\255\255\077\005\ +\255\255\255\255\255\255\255\255\060\001\255\255\255\255\063\001\ +\255\255\255\255\255\255\067\001\068\001\255\255\255\255\093\005\ +\094\005\073\001\074\001\138\004\255\255\255\255\000\001\101\005\ +\080\001\003\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\013\001\092\001\255\255\094\001\017\001\ +\096\001\097\001\255\255\255\255\255\255\123\005\255\255\255\255\ +\026\001\027\001\028\001\029\001\108\001\210\005\255\255\111\001\ +\255\255\000\000\255\255\115\001\255\255\255\255\180\004\041\001\ +\182\004\255\255\184\004\000\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\232\005\255\255\125\005\255\255\255\255\ +\255\255\255\255\060\001\255\255\255\255\063\001\205\004\206\004\ +\207\004\067\001\068\001\255\255\211\004\212\004\213\004\255\255\ +\074\001\145\005\255\255\255\255\255\255\255\255\080\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\092\001\255\255\094\001\235\004\096\001\097\001\ +\255\255\199\005\055\001\255\255\057\001\058\001\059\001\205\005\ +\061\001\247\004\108\001\064\001\065\001\111\001\255\255\255\255\ +\255\255\215\005\000\005\255\255\255\255\255\255\004\005\221\005\ +\255\255\255\255\255\255\255\255\081\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\089\001\090\001\255\255\255\255\ +\255\255\255\255\210\005\255\255\097\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\109\001\110\001\255\255\255\255\000\001\001\001\002\001\003\001\ +\232\005\255\255\008\006\255\255\008\001\009\001\010\001\255\255\ +\255\255\013\001\014\001\015\001\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\255\255\000\000\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\043\001\ +\044\001\045\001\046\001\255\255\048\001\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\058\006\255\255\255\255\255\255\ +\060\001\061\001\255\255\063\001\255\255\255\255\066\001\067\001\ +\068\001\255\255\070\001\071\001\072\001\073\001\074\001\255\255\ +\255\255\255\255\255\255\081\006\080\001\083\006\082\001\083\001\ +\084\001\085\001\086\001\255\255\255\255\255\255\255\255\091\001\ +\092\001\255\255\094\001\095\001\096\001\097\001\255\255\255\255\ +\100\001\255\255\255\255\103\001\255\255\105\001\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\115\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\166\005\255\255\255\255\255\255\013\001\255\255\255\255\255\255\ +\255\255\000\001\255\255\002\001\003\001\004\001\181\005\182\005\ +\183\005\008\001\255\255\028\001\029\001\255\255\013\001\000\000\ +\255\255\255\255\017\001\018\001\019\001\255\255\255\255\255\255\ +\041\001\255\255\200\005\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\036\001\255\255\213\005\ +\255\255\040\001\041\001\060\001\255\255\219\005\063\001\255\255\ +\255\255\048\001\049\001\068\001\255\255\255\255\255\255\255\255\ +\255\255\074\001\255\255\255\255\255\255\060\001\236\005\080\001\ +\063\001\255\255\255\255\066\001\067\001\068\001\244\005\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\096\001\ +\255\255\080\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\108\001\091\001\092\001\111\001\094\001\ +\095\001\096\001\255\255\255\255\255\255\100\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\115\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\044\006\045\006\ +\255\255\255\255\255\255\000\000\255\255\051\006\052\006\053\006\ +\054\006\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\063\006\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\000\001\001\001\002\001\003\001\255\255\ +\078\006\006\001\007\001\008\001\009\001\010\001\011\001\012\001\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\022\001\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\030\001\031\001\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\045\001\046\001\047\001\048\001\049\001\050\001\051\001\255\255\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\060\001\ +\061\001\062\001\063\001\064\001\065\001\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\076\001\ +\255\255\255\255\255\255\080\001\081\001\082\001\083\001\084\001\ +\085\001\086\001\087\001\255\255\089\001\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\097\001\098\001\255\255\100\001\ +\101\001\255\255\103\001\104\001\105\001\106\001\255\255\108\001\ +\109\001\255\255\111\001\255\255\255\255\255\255\115\001\000\001\ +\001\001\002\001\003\001\004\001\255\255\006\001\007\001\008\001\ +\009\001\010\001\011\001\012\001\013\001\014\001\015\001\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\030\001\031\001\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\043\001\044\001\045\001\046\001\047\001\048\001\ +\049\001\050\001\051\001\255\255\053\001\054\001\055\001\056\001\ +\255\255\255\255\059\001\060\001\061\001\255\255\063\001\064\001\ +\065\001\066\001\067\001\068\001\255\255\070\001\071\001\072\001\ +\073\001\074\001\255\255\076\001\255\255\255\255\255\255\080\001\ +\081\001\082\001\083\001\084\001\085\001\086\001\087\001\255\255\ +\089\001\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\097\001\098\001\255\255\100\001\101\001\255\255\103\001\104\001\ +\105\001\106\001\255\255\108\001\109\001\255\255\111\001\255\255\ +\255\255\255\255\115\001\000\001\001\001\002\001\003\001\255\255\ +\255\255\006\001\007\001\008\001\009\001\010\001\011\001\012\001\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\022\001\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\030\001\031\001\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\045\001\046\001\047\001\048\001\049\001\050\001\051\001\255\255\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\060\001\ +\061\001\255\255\063\001\064\001\065\001\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\076\001\ +\255\255\255\255\255\255\080\001\081\001\082\001\083\001\084\001\ +\085\001\086\001\087\001\255\255\089\001\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\097\001\098\001\255\255\100\001\ +\101\001\255\255\103\001\104\001\105\001\106\001\255\255\108\001\ +\109\001\255\255\111\001\255\255\255\255\255\255\115\001\255\255\ +\000\001\001\001\002\001\003\001\255\255\255\255\006\001\007\001\ +\008\001\009\001\010\001\011\001\012\001\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\022\001\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\030\001\031\001\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\046\001\047\001\ +\048\001\049\001\050\001\051\001\255\255\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\060\001\061\001\255\255\063\001\ +\064\001\065\001\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\076\001\255\255\255\255\255\255\ +\080\001\081\001\082\001\083\001\084\001\085\001\086\001\087\001\ +\255\255\089\001\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\097\001\098\001\255\255\100\001\101\001\255\255\103\001\ +\104\001\105\001\106\001\255\255\108\001\109\001\255\255\111\001\ +\255\255\255\255\255\255\115\001\000\001\001\001\002\001\003\001\ +\255\255\255\255\006\001\007\001\008\001\009\001\010\001\011\001\ +\012\001\013\001\014\001\015\001\016\001\017\001\018\001\019\001\ +\020\001\021\001\022\001\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\030\001\031\001\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\043\001\ +\044\001\045\001\046\001\047\001\048\001\049\001\050\001\051\001\ +\255\255\053\001\054\001\055\001\056\001\255\255\255\255\059\001\ +\060\001\061\001\255\255\063\001\064\001\065\001\066\001\067\001\ +\068\001\255\255\070\001\071\001\072\001\073\001\074\001\255\255\ +\076\001\255\255\255\255\255\255\080\001\081\001\082\001\083\001\ +\084\001\085\001\086\001\087\001\255\255\089\001\255\255\091\001\ +\092\001\000\000\094\001\095\001\096\001\097\001\098\001\255\255\ +\100\001\101\001\255\255\103\001\104\001\105\001\106\001\255\255\ +\108\001\109\001\255\255\111\001\255\255\255\255\255\255\115\001\ +\000\001\001\001\002\001\003\001\255\255\255\255\006\001\007\001\ +\008\001\009\001\010\001\011\001\012\001\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\022\001\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\030\001\031\001\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\046\001\047\001\ +\048\001\049\001\050\001\051\001\255\255\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\060\001\061\001\255\255\063\001\ +\064\001\065\001\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\076\001\255\255\255\255\255\255\ +\080\001\081\001\082\001\083\001\084\001\085\001\086\001\087\001\ +\255\255\089\001\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\097\001\098\001\255\255\100\001\101\001\255\255\103\001\ +\104\001\105\001\106\001\255\255\108\001\109\001\255\255\111\001\ +\255\255\255\255\255\255\115\001\255\255\000\001\001\001\002\001\ +\003\001\255\255\255\255\006\001\007\001\008\001\009\001\010\001\ +\011\001\012\001\013\001\014\001\015\001\016\001\017\001\018\001\ +\019\001\020\001\021\001\022\001\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\030\001\031\001\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\044\001\045\001\046\001\047\001\048\001\049\001\050\001\ +\051\001\255\255\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\060\001\061\001\255\255\063\001\064\001\065\001\066\001\ +\067\001\068\001\255\255\070\001\071\001\072\001\073\001\074\001\ +\255\255\076\001\255\255\255\255\255\255\080\001\081\001\082\001\ +\083\001\084\001\085\001\086\001\087\001\255\255\089\001\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\097\001\098\001\ +\255\255\100\001\101\001\255\255\103\001\104\001\105\001\106\001\ +\255\255\108\001\109\001\255\255\111\001\255\255\255\255\255\255\ +\115\001\000\001\001\001\002\001\003\001\255\255\255\255\006\001\ +\007\001\008\001\009\001\010\001\011\001\012\001\013\001\014\001\ +\015\001\016\001\017\001\018\001\019\001\020\001\021\001\022\001\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\030\001\ +\031\001\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\044\001\045\001\046\001\ +\047\001\048\001\049\001\050\001\051\001\255\255\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\060\001\061\001\255\255\ +\063\001\064\001\065\001\066\001\067\001\068\001\255\255\070\001\ +\071\001\072\001\073\001\074\001\255\255\076\001\255\255\255\255\ +\255\255\080\001\081\001\082\001\083\001\084\001\085\001\086\001\ +\087\001\255\255\089\001\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\097\001\098\001\255\255\100\001\101\001\255\255\ +\103\001\104\001\105\001\106\001\255\255\108\001\109\001\255\255\ +\111\001\255\255\255\255\255\255\115\001\000\001\001\001\002\001\ +\003\001\255\255\255\255\006\001\007\001\008\001\009\001\010\001\ +\011\001\012\001\013\001\014\001\015\001\016\001\017\001\018\001\ +\019\001\020\001\021\001\022\001\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\030\001\031\001\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\044\001\045\001\046\001\047\001\048\001\049\001\050\001\ +\051\001\255\255\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\060\001\061\001\255\255\063\001\064\001\065\001\066\001\ +\067\001\068\001\255\255\070\001\071\001\072\001\073\001\074\001\ +\255\255\076\001\255\255\255\255\255\255\080\001\081\001\082\001\ +\083\001\084\001\085\001\086\001\087\001\255\255\089\001\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\097\001\098\001\ +\255\255\100\001\101\001\255\255\103\001\104\001\105\001\106\001\ +\255\255\108\001\109\001\255\255\111\001\255\255\255\255\255\255\ +\115\001\255\255\000\001\001\001\002\001\003\001\255\255\255\255\ +\006\001\007\001\008\001\009\001\010\001\011\001\012\001\013\001\ +\014\001\015\001\016\001\017\001\018\001\019\001\020\001\021\001\ +\022\001\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\030\001\031\001\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\043\001\044\001\045\001\ +\046\001\047\001\048\001\049\001\050\001\051\001\255\255\053\001\ +\054\001\055\001\056\001\255\255\255\255\059\001\060\001\061\001\ +\255\255\063\001\064\001\065\001\066\001\067\001\068\001\255\255\ +\070\001\071\001\072\001\073\001\074\001\255\255\076\001\255\255\ +\255\255\255\255\080\001\081\001\082\001\083\001\084\001\085\001\ +\086\001\087\001\255\255\089\001\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\097\001\098\001\255\255\100\001\101\001\ +\255\255\103\001\104\001\105\001\106\001\255\255\108\001\109\001\ +\255\255\111\001\255\255\255\255\255\255\115\001\000\001\001\001\ +\002\001\003\001\255\255\255\255\006\001\007\001\008\001\009\001\ +\010\001\011\001\012\001\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\022\001\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\030\001\031\001\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\045\001\046\001\047\001\048\001\049\001\ +\050\001\051\001\255\255\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\060\001\061\001\255\255\063\001\064\001\065\001\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\076\001\255\255\255\255\255\255\080\001\081\001\ +\082\001\083\001\084\001\085\001\086\001\087\001\255\255\089\001\ +\255\255\091\001\092\001\000\000\094\001\095\001\096\001\097\001\ +\098\001\255\255\100\001\101\001\255\255\103\001\104\001\105\001\ +\106\001\255\255\108\001\109\001\255\255\111\001\255\255\255\255\ +\255\255\115\001\000\001\001\001\002\001\003\001\255\255\255\255\ +\006\001\007\001\008\001\009\001\010\001\011\001\012\001\013\001\ +\014\001\015\001\016\001\017\001\018\001\019\001\020\001\021\001\ +\022\001\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\030\001\031\001\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\043\001\044\001\045\001\ +\046\001\047\001\048\001\049\001\050\001\051\001\255\255\053\001\ +\054\001\055\001\056\001\255\255\255\255\059\001\060\001\061\001\ +\255\255\063\001\064\001\065\001\066\001\067\001\068\001\255\255\ +\070\001\071\001\072\001\073\001\074\001\255\255\076\001\255\255\ +\255\255\255\255\080\001\081\001\082\001\083\001\084\001\085\001\ +\086\001\087\001\255\255\089\001\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\097\001\098\001\255\255\100\001\101\001\ +\255\255\103\001\104\001\105\001\106\001\255\255\108\001\109\001\ +\255\255\111\001\255\255\255\255\255\255\115\001\255\255\000\001\ +\001\001\002\001\003\001\004\001\255\255\006\001\007\001\008\001\ +\009\001\010\001\011\001\012\001\013\001\014\001\015\001\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\030\001\031\001\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\043\001\044\001\045\001\046\001\255\255\048\001\ +\049\001\050\001\051\001\255\255\053\001\054\001\055\001\056\001\ +\255\255\255\255\059\001\060\001\061\001\255\255\063\001\064\001\ +\065\001\066\001\067\001\068\001\255\255\070\001\071\001\072\001\ +\073\001\074\001\255\255\076\001\255\255\255\255\255\255\080\001\ +\081\001\082\001\083\001\084\001\085\001\086\001\087\001\255\255\ +\089\001\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\097\001\098\001\255\255\100\001\101\001\255\255\103\001\104\001\ +\105\001\106\001\255\255\108\001\109\001\255\255\111\001\255\255\ +\255\255\255\255\115\001\000\001\001\001\002\001\003\001\004\001\ +\255\255\006\001\007\001\008\001\009\001\010\001\011\001\012\001\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\255\255\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\030\001\031\001\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\045\001\046\001\255\255\048\001\049\001\050\001\051\001\255\255\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\060\001\ +\061\001\255\255\063\001\064\001\065\001\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\076\001\ +\255\255\255\255\255\255\080\001\081\001\082\001\083\001\084\001\ +\085\001\086\001\087\001\255\255\089\001\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\255\255\255\255\255\255\100\001\ +\101\001\255\255\103\001\104\001\105\001\106\001\255\255\108\001\ +\109\001\255\255\111\001\255\255\255\255\255\255\115\001\000\001\ +\001\001\002\001\003\001\004\001\255\255\006\001\007\001\008\001\ +\009\001\010\001\011\001\012\001\013\001\014\001\015\001\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\030\001\031\001\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\043\001\044\001\045\001\046\001\255\255\048\001\ +\049\001\050\001\051\001\255\255\053\001\054\001\055\001\056\001\ +\255\255\255\255\059\001\060\001\061\001\255\255\063\001\064\001\ +\065\001\066\001\067\001\068\001\255\255\070\001\071\001\072\001\ +\073\001\074\001\255\255\076\001\255\255\255\255\255\255\080\001\ +\081\001\082\001\083\001\084\001\085\001\086\001\087\001\255\255\ +\089\001\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\255\255\255\255\255\255\100\001\101\001\255\255\103\001\104\001\ +\105\001\106\001\255\255\108\001\109\001\255\255\111\001\255\255\ +\255\255\255\255\115\001\255\255\000\001\001\001\002\001\003\001\ +\004\001\255\255\006\001\007\001\008\001\009\001\010\001\011\001\ +\012\001\013\001\014\001\015\001\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\030\001\031\001\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\043\001\ +\044\001\045\001\046\001\255\255\048\001\049\001\050\001\051\001\ +\255\255\053\001\054\001\055\001\056\001\255\255\255\255\059\001\ +\060\001\061\001\255\255\063\001\064\001\065\001\066\001\067\001\ +\068\001\255\255\070\001\071\001\072\001\073\001\074\001\255\255\ +\076\001\255\255\255\255\255\255\080\001\081\001\082\001\083\001\ +\084\001\085\001\086\001\087\001\255\255\089\001\255\255\091\001\ +\092\001\000\000\094\001\095\001\096\001\255\255\255\255\255\255\ +\100\001\101\001\255\255\103\001\104\001\105\001\106\001\255\255\ +\108\001\109\001\255\255\111\001\255\255\255\255\255\255\115\001\ +\000\001\001\001\002\001\003\001\255\255\255\255\255\255\255\255\ +\008\001\009\001\010\001\255\255\255\255\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\022\001\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\046\001\047\001\ +\048\001\049\001\255\255\051\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\061\001\255\255\063\001\ +\255\255\255\255\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\081\001\082\001\083\001\084\001\085\001\086\001\255\255\ +\255\255\089\001\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\097\001\098\001\255\255\100\001\255\255\255\255\103\001\ +\104\001\105\001\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\115\001\000\001\001\001\002\001\003\001\ +\255\255\255\255\255\255\255\255\008\001\009\001\010\001\255\255\ +\255\255\013\001\014\001\015\001\016\001\017\001\018\001\019\001\ +\020\001\021\001\022\001\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\043\001\ +\044\001\045\001\046\001\047\001\048\001\049\001\255\255\051\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\061\001\255\255\063\001\255\255\255\255\066\001\067\001\ +\068\001\255\255\070\001\071\001\072\001\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\081\001\082\001\083\001\ +\084\001\085\001\086\001\255\255\255\255\089\001\255\255\091\001\ +\092\001\000\000\094\001\095\001\096\001\097\001\098\001\255\255\ +\100\001\255\255\255\255\103\001\104\001\105\001\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\115\001\ +\255\255\000\001\001\001\002\001\003\001\255\255\255\255\255\255\ +\255\255\008\001\009\001\010\001\255\255\255\255\013\001\014\001\ +\015\001\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\044\001\045\001\046\001\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\061\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\070\001\ +\071\001\072\001\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\083\001\084\001\085\001\086\001\ +\255\255\255\255\255\255\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\255\255\255\255\255\255\100\001\255\255\255\255\ +\103\001\255\255\105\001\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\115\001\000\001\001\001\002\001\ +\003\001\255\255\255\255\255\255\255\255\008\001\009\001\010\001\ +\255\255\255\255\013\001\014\001\015\001\016\001\017\001\255\255\ +\019\001\020\001\021\001\255\255\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\044\001\045\001\046\001\255\255\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\061\001\255\255\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\071\001\072\001\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\255\255\082\001\ +\083\001\084\001\085\001\086\001\255\255\255\255\255\255\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\097\001\255\255\ +\255\255\100\001\255\255\255\255\103\001\255\255\105\001\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\115\001\000\001\001\001\002\001\003\001\255\255\255\255\255\255\ +\255\255\008\001\009\001\010\001\255\255\255\255\013\001\014\001\ +\015\001\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\044\001\045\001\046\001\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\061\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\070\001\ +\071\001\072\001\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\083\001\084\001\085\001\086\001\ +\255\255\255\255\255\255\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\255\255\255\255\255\255\100\001\255\255\255\255\ +\103\001\255\255\105\001\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\115\001\255\255\000\001\001\001\ +\002\001\003\001\255\255\255\255\255\255\255\255\008\001\009\001\ +\010\001\255\255\255\255\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\255\255\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\045\001\046\001\255\255\048\001\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\061\001\255\255\063\001\255\255\255\255\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\255\255\ +\082\001\083\001\084\001\085\001\086\001\255\255\255\255\255\255\ +\255\255\091\001\092\001\000\000\094\001\095\001\096\001\255\255\ +\255\255\255\255\100\001\255\255\255\255\103\001\255\255\105\001\ +\255\255\255\255\108\001\255\255\255\255\111\001\255\255\255\255\ +\255\255\115\001\000\001\001\001\002\001\003\001\255\255\255\255\ +\255\255\255\255\008\001\009\001\010\001\255\255\255\255\013\001\ +\014\001\015\001\016\001\017\001\018\001\019\001\020\001\021\001\ +\255\255\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\043\001\044\001\045\001\ +\046\001\255\255\048\001\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\ +\255\255\063\001\255\255\255\255\066\001\067\001\068\001\255\255\ +\070\001\071\001\072\001\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\082\001\083\001\084\001\085\001\ +\086\001\255\255\255\255\255\255\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\255\255\255\255\255\255\100\001\255\255\ +\255\255\103\001\255\255\105\001\255\255\255\255\108\001\255\255\ +\255\255\111\001\255\255\255\255\255\255\115\001\000\001\001\001\ +\002\001\003\001\255\255\255\255\255\255\255\255\008\001\009\001\ +\010\001\255\255\255\255\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\255\255\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\045\001\046\001\255\255\048\001\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\061\001\255\255\063\001\255\255\255\255\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\255\255\ +\082\001\083\001\084\001\085\001\086\001\255\255\255\255\255\255\ +\255\255\091\001\092\001\000\000\094\001\095\001\096\001\255\255\ +\255\255\255\255\100\001\255\255\255\255\103\001\255\255\105\001\ +\255\255\255\255\108\001\255\255\255\255\111\001\255\255\255\255\ +\255\255\115\001\255\255\000\001\001\001\002\001\003\001\255\255\ +\255\255\255\255\255\255\008\001\009\001\010\001\255\255\255\255\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\255\255\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\045\001\255\255\255\255\048\001\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\061\001\255\255\063\001\255\255\255\255\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\255\255\082\001\083\001\084\001\ +\085\001\086\001\255\255\255\255\255\255\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\097\001\255\255\255\255\100\001\ +\255\255\255\255\103\001\255\255\105\001\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\115\001\000\001\ +\001\001\002\001\003\001\255\255\255\255\255\255\255\255\008\001\ +\009\001\010\001\255\255\255\255\013\001\014\001\015\001\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\043\001\044\001\045\001\255\255\255\255\048\001\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\061\001\255\255\063\001\255\255\ +\255\255\066\001\067\001\068\001\255\255\070\001\071\001\072\001\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\082\001\083\001\084\001\085\001\086\001\255\255\255\255\ +\255\255\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\097\001\255\255\255\255\100\001\255\255\255\255\103\001\255\255\ +\105\001\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\255\255\255\255\115\001\000\001\001\001\002\001\003\001\255\255\ +\255\255\255\255\255\255\008\001\009\001\010\001\255\255\255\255\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\255\255\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\045\001\255\255\255\255\048\001\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\061\001\255\255\063\001\255\255\255\255\066\001\067\001\068\001\ +\255\255\070\001\071\001\072\001\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\255\255\082\001\083\001\084\001\ +\085\001\086\001\255\255\255\255\255\255\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\097\001\255\255\255\255\100\001\ +\255\255\255\255\103\001\255\255\105\001\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\115\001\255\255\ +\000\001\001\001\002\001\003\001\255\255\255\255\255\255\255\255\ +\008\001\009\001\010\001\255\255\255\255\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\255\255\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\255\255\255\255\ +\048\001\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\061\001\255\255\063\001\ +\255\255\255\255\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\255\255\082\001\083\001\084\001\085\001\086\001\255\255\ +\255\255\255\255\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\097\001\255\255\255\255\100\001\255\255\255\255\103\001\ +\255\255\105\001\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\115\001\000\001\001\001\002\001\003\001\ +\255\255\255\255\255\255\255\255\255\255\009\001\010\001\255\255\ +\255\255\013\001\014\001\015\001\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\043\001\ +\044\001\045\001\046\001\255\255\048\001\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\061\001\255\255\063\001\255\255\255\255\066\001\067\001\ +\068\001\255\255\070\001\071\001\072\001\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\082\001\083\001\ +\084\001\085\001\086\001\255\255\255\255\255\255\255\255\091\001\ +\092\001\000\000\094\001\095\001\096\001\097\001\255\255\255\255\ +\100\001\255\255\255\255\103\001\255\255\105\001\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\115\001\ +\000\001\001\001\002\001\003\001\255\255\255\255\255\255\255\255\ +\255\255\009\001\010\001\255\255\255\255\013\001\014\001\015\001\ +\016\001\017\001\018\001\019\001\020\001\021\001\255\255\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\043\001\044\001\045\001\046\001\255\255\ +\048\001\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\061\001\255\255\063\001\ +\255\255\255\255\066\001\067\001\068\001\255\255\070\001\071\001\ +\072\001\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\255\255\082\001\083\001\084\001\085\001\086\001\255\255\ +\255\255\255\255\255\255\091\001\092\001\000\000\094\001\095\001\ +\096\001\097\001\255\255\255\255\100\001\255\255\255\255\103\001\ +\255\255\105\001\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\115\001\255\255\000\001\001\001\002\001\ +\003\001\255\255\255\255\255\255\255\255\255\255\009\001\010\001\ +\255\255\255\255\013\001\014\001\015\001\016\001\017\001\018\001\ +\019\001\020\001\021\001\255\255\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\044\001\045\001\046\001\255\255\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\061\001\255\255\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\071\001\072\001\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\255\255\082\001\ +\083\001\084\001\085\001\086\001\255\255\255\255\255\255\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\097\001\255\255\ +\255\255\100\001\255\255\255\255\103\001\255\255\105\001\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\115\001\000\001\001\001\002\001\003\001\255\255\255\255\255\255\ +\255\255\008\001\009\001\010\001\255\255\255\255\013\001\014\001\ +\015\001\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\043\001\044\001\255\255\255\255\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\061\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\070\001\ +\071\001\072\001\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\255\255\084\001\085\001\086\001\ +\255\255\255\255\255\255\255\255\091\001\092\001\000\000\094\001\ +\095\001\096\001\097\001\255\255\255\255\255\255\255\255\255\255\ +\103\001\255\255\105\001\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\115\001\000\001\001\001\002\001\ +\003\001\255\255\255\255\255\255\255\255\008\001\009\001\010\001\ +\255\255\255\255\013\001\014\001\015\001\016\001\017\001\018\001\ +\019\001\020\001\021\001\255\255\255\255\024\001\025\001\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\036\001\037\001\255\255\255\255\040\001\041\001\042\001\ +\043\001\044\001\255\255\255\255\255\255\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\061\001\255\255\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\071\001\072\001\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\255\255\082\001\ +\255\255\084\001\085\001\086\001\255\255\255\255\255\255\255\255\ +\091\001\092\001\000\000\094\001\095\001\096\001\097\001\255\255\ +\255\255\255\255\255\255\255\255\103\001\255\255\105\001\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\115\001\255\255\000\001\001\001\002\001\003\001\255\255\255\255\ +\255\255\255\255\008\001\009\001\010\001\255\255\255\255\013\001\ +\014\001\015\001\016\001\017\001\018\001\019\001\020\001\021\001\ +\255\255\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\043\001\044\001\255\255\ +\255\255\255\255\048\001\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\ +\255\255\063\001\255\255\255\255\066\001\067\001\068\001\255\255\ +\070\001\071\001\072\001\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\082\001\255\255\084\001\085\001\ +\086\001\255\255\255\255\255\255\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\097\001\255\255\255\255\255\255\255\255\ +\255\255\103\001\255\255\105\001\255\255\255\255\108\001\255\255\ +\255\255\111\001\255\255\255\255\255\255\115\001\000\001\001\001\ +\002\001\003\001\255\255\255\255\255\255\255\255\008\001\009\001\ +\010\001\255\255\255\255\013\001\014\001\015\001\016\001\017\001\ +\018\001\019\001\020\001\021\001\255\255\255\255\024\001\025\001\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\036\001\037\001\255\255\255\255\040\001\041\001\ +\042\001\043\001\044\001\255\255\255\255\255\255\048\001\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\061\001\255\255\063\001\255\255\255\255\ +\066\001\067\001\068\001\255\255\070\001\071\001\072\001\073\001\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\255\255\ +\082\001\255\255\084\001\085\001\086\001\255\255\255\255\255\255\ +\255\255\091\001\092\001\000\000\094\001\095\001\096\001\097\001\ +\255\255\255\255\255\255\255\255\255\255\103\001\255\255\105\001\ +\255\255\255\255\108\001\255\255\255\255\111\001\255\255\255\255\ +\255\255\115\001\000\001\001\001\002\001\003\001\255\255\255\255\ +\255\255\255\255\008\001\009\001\010\001\255\255\255\255\013\001\ +\014\001\015\001\016\001\017\001\018\001\019\001\020\001\021\001\ +\255\255\255\255\024\001\025\001\026\001\027\001\028\001\029\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\036\001\037\001\ +\255\255\255\255\040\001\041\001\042\001\043\001\044\001\255\255\ +\255\255\255\255\048\001\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\061\001\ +\255\255\063\001\255\255\255\255\066\001\067\001\068\001\255\255\ +\070\001\071\001\072\001\073\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\082\001\255\255\084\001\085\001\ +\086\001\255\255\255\255\255\255\255\255\091\001\092\001\000\000\ +\094\001\095\001\096\001\097\001\255\255\255\255\255\255\255\255\ +\255\255\103\001\255\255\105\001\255\255\255\255\108\001\255\255\ +\255\255\111\001\255\255\255\255\255\255\115\001\255\255\000\001\ +\001\001\002\001\003\001\255\255\255\255\255\255\255\255\008\001\ +\009\001\010\001\255\255\255\255\013\001\014\001\015\001\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\043\001\044\001\255\255\255\255\255\255\048\001\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\061\001\255\255\063\001\255\255\ +\255\255\066\001\067\001\068\001\255\255\070\001\071\001\072\001\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\082\001\255\255\084\001\085\001\086\001\255\255\255\255\ +\255\255\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\097\001\255\255\255\255\255\255\255\255\255\255\103\001\255\255\ +\105\001\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\255\255\255\255\115\001\000\001\001\001\002\001\003\001\255\255\ +\255\255\255\255\255\255\008\001\009\001\010\001\255\255\255\255\ +\013\001\014\001\015\001\016\001\017\001\018\001\019\001\020\001\ +\021\001\255\255\255\255\024\001\025\001\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\255\255\255\255\036\001\ +\037\001\255\255\255\255\040\001\041\001\042\001\043\001\044\001\ +\045\001\046\001\255\255\048\001\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\061\001\255\255\255\255\255\255\255\255\066\001\067\001\068\001\ +\255\255\070\001\255\255\255\255\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\255\255\082\001\255\255\255\255\ +\255\255\086\001\255\255\255\255\255\255\255\255\091\001\092\001\ +\000\000\094\001\095\001\096\001\097\001\255\255\255\255\100\001\ +\255\255\255\255\103\001\255\255\105\001\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\115\001\000\001\ +\001\001\002\001\003\001\255\255\255\255\255\255\255\255\008\001\ +\009\001\010\001\255\255\255\255\013\001\014\001\255\255\016\001\ +\017\001\018\001\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\027\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\036\001\037\001\255\255\255\255\040\001\ +\041\001\042\001\255\255\255\255\255\255\255\255\255\255\048\001\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\061\001\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\070\001\255\255\255\255\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\082\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\091\001\092\001\000\000\094\001\095\001\096\001\ +\097\001\255\255\255\255\255\255\255\255\255\255\103\001\255\255\ +\105\001\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\255\255\255\255\115\001\255\255\000\001\001\001\002\001\003\001\ +\255\255\255\255\255\255\255\255\008\001\009\001\010\001\255\255\ +\255\255\013\001\014\001\255\255\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\255\255\ +\255\255\255\255\255\255\255\255\048\001\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\061\001\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\255\255\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\082\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\091\001\ +\092\001\000\000\094\001\095\001\096\001\097\001\255\255\255\255\ +\255\255\255\255\255\255\103\001\255\255\105\001\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\115\001\ +\000\001\001\001\002\001\003\001\255\255\255\255\255\255\255\255\ +\008\001\009\001\010\001\255\255\255\255\013\001\014\001\255\255\ +\016\001\017\001\018\001\019\001\020\001\021\001\255\255\255\255\ +\024\001\025\001\026\001\027\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\036\001\037\001\255\255\255\255\ +\040\001\041\001\042\001\255\255\255\255\255\255\255\255\255\255\ +\048\001\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\061\001\255\255\063\001\ +\255\255\255\255\255\255\067\001\068\001\255\255\070\001\255\255\ +\255\255\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\000\000\082\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\091\001\092\001\255\255\094\001\095\001\ +\096\001\097\001\255\255\255\255\255\255\255\255\255\255\103\001\ +\255\255\105\001\255\255\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\115\001\000\001\001\001\002\001\003\001\ +\255\255\255\255\255\255\255\255\008\001\009\001\010\001\255\255\ +\255\255\013\001\014\001\255\255\016\001\017\001\018\001\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\027\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\036\001\037\001\255\255\255\255\040\001\041\001\042\001\255\255\ +\255\255\255\255\255\255\255\255\048\001\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\060\001\061\001\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\000\000\070\001\255\255\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\082\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\091\001\ +\092\001\255\255\094\001\095\001\096\001\097\001\255\255\255\255\ +\255\255\255\255\255\255\103\001\255\255\105\001\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\115\001\ +\255\255\000\001\001\001\002\001\003\001\255\255\255\255\255\255\ +\255\255\008\001\009\001\010\001\255\255\255\255\013\001\014\001\ +\255\255\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\027\001\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\036\001\037\001\255\255\ +\255\255\040\001\041\001\042\001\255\255\255\255\255\255\255\255\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\000\000\255\255\255\255\255\255\060\001\061\001\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\091\001\092\001\255\255\094\001\ +\095\001\096\001\097\001\255\255\255\255\255\255\255\255\255\255\ +\103\001\000\001\105\001\255\255\003\001\108\001\255\255\255\255\ +\111\001\008\001\009\001\010\001\115\001\255\255\013\001\014\001\ +\255\255\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\037\001\255\255\ +\255\255\040\001\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\000\000\255\255\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\091\001\092\001\255\255\094\001\ +\095\001\096\001\097\001\255\255\255\255\255\255\255\255\255\255\ +\103\001\000\001\105\001\255\255\003\001\108\001\255\255\255\255\ +\111\001\008\001\009\001\010\001\115\001\255\255\013\001\014\001\ +\255\255\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\037\001\255\255\ +\255\255\040\001\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\000\000\255\255\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\082\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\091\001\092\001\255\255\094\001\ +\095\001\096\001\097\001\255\255\255\255\255\255\255\255\255\255\ +\103\001\000\001\105\001\255\255\003\001\108\001\255\255\255\255\ +\111\001\008\001\255\255\010\001\115\001\255\255\013\001\014\001\ +\255\255\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\037\001\255\255\ +\255\255\040\001\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\000\000\255\255\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\255\255\255\255\255\255\255\255\000\000\ +\255\255\255\255\255\255\255\255\091\001\092\001\255\255\094\001\ +\095\001\096\001\097\001\255\255\255\255\255\255\255\255\255\255\ +\103\001\000\001\105\001\255\255\003\001\108\001\255\255\255\255\ +\111\001\008\001\255\255\010\001\115\001\255\255\013\001\014\001\ +\255\255\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\037\001\255\255\ +\255\255\040\001\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\048\001\049\001\255\255\000\000\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\000\000\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\091\001\092\001\255\255\094\001\ +\095\001\096\001\097\001\255\255\255\255\255\255\255\255\255\255\ +\103\001\000\001\105\001\255\255\003\001\108\001\255\255\255\255\ +\111\001\008\001\255\255\010\001\115\001\255\255\013\001\014\001\ +\255\255\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\037\001\255\255\ +\255\255\040\001\041\001\255\255\255\255\255\255\255\255\000\000\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\091\001\092\001\255\255\094\001\ +\095\001\096\001\097\001\255\255\255\255\255\255\255\255\255\255\ +\103\001\000\001\105\001\255\255\003\001\108\001\255\255\255\255\ +\111\001\008\001\255\255\010\001\115\001\255\255\013\001\014\001\ +\255\255\016\001\017\001\018\001\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\000\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\037\001\255\255\ +\255\255\040\001\041\001\255\255\013\001\255\255\255\255\000\000\ +\023\001\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\026\001\255\255\028\001\029\001\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\041\001\255\255\073\001\074\001\255\255\000\000\255\255\255\255\ +\055\001\080\001\057\001\058\001\059\001\255\255\061\001\255\255\ +\255\255\064\001\065\001\060\001\091\001\092\001\063\001\094\001\ +\095\001\096\001\097\001\068\001\000\001\255\255\255\255\003\001\ +\103\001\074\001\105\001\255\255\008\001\108\001\010\001\080\001\ +\111\001\013\001\014\001\090\001\115\001\017\001\255\255\019\001\ +\020\001\021\001\097\001\092\001\024\001\025\001\026\001\096\001\ +\028\001\029\001\000\001\255\255\255\255\003\001\109\001\110\001\ +\255\255\037\001\255\255\108\001\040\001\041\001\111\001\013\001\ +\255\255\255\255\000\000\255\255\048\001\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\026\001\255\255\028\001\029\001\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\041\001\255\255\073\001\074\001\255\255\ +\000\000\053\001\255\255\055\001\080\001\057\001\058\001\059\001\ +\255\255\061\001\255\255\255\255\064\001\065\001\060\001\091\001\ +\092\001\255\255\094\001\095\001\096\001\097\001\068\001\000\001\ +\255\255\255\255\003\001\103\001\074\001\105\001\255\255\008\001\ +\108\001\010\001\080\001\111\001\013\001\014\001\090\001\115\001\ +\017\001\255\255\019\001\020\001\021\001\097\001\092\001\024\001\ +\025\001\026\001\096\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\109\001\110\001\255\255\037\001\255\255\108\001\040\001\ +\041\001\111\001\255\255\255\255\255\255\000\000\255\255\048\001\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\255\255\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\070\001\255\255\255\255\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\091\001\092\001\255\255\094\001\095\001\096\001\ +\097\001\255\255\255\255\255\255\255\255\255\255\103\001\000\001\ +\105\001\255\255\003\001\108\001\255\255\255\255\111\001\008\001\ +\255\255\010\001\115\001\255\255\013\001\014\001\255\255\255\255\ +\017\001\255\255\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\255\255\028\001\029\001\000\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\037\001\255\255\255\255\040\001\ +\041\001\255\255\013\001\255\255\255\255\000\000\255\255\048\001\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\026\001\ +\255\255\028\001\029\001\060\001\255\255\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\070\001\041\001\255\255\ +\073\001\074\001\255\255\000\000\255\255\255\255\055\001\080\001\ +\057\001\058\001\059\001\255\255\061\001\255\255\255\255\064\001\ +\065\001\060\001\091\001\092\001\255\255\094\001\095\001\096\001\ +\097\001\068\001\000\001\255\255\255\255\003\001\103\001\074\001\ +\105\001\255\255\008\001\108\001\010\001\080\001\111\001\013\001\ +\014\001\090\001\115\001\017\001\255\255\019\001\020\001\021\001\ +\097\001\092\001\024\001\025\001\026\001\096\001\028\001\029\001\ +\000\001\255\255\255\255\255\255\109\001\110\001\255\255\037\001\ +\255\255\108\001\040\001\041\001\111\001\013\001\255\255\255\255\ +\000\000\255\255\048\001\049\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\026\001\255\255\028\001\029\001\060\001\255\255\ +\255\255\063\001\255\255\255\255\255\255\067\001\068\001\255\255\ +\070\001\041\001\255\255\073\001\074\001\255\255\000\000\255\255\ +\255\255\255\255\080\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\091\001\092\001\255\255\ +\094\001\095\001\096\001\097\001\068\001\000\001\255\255\255\255\ +\003\001\103\001\074\001\105\001\255\255\008\001\108\001\010\001\ +\080\001\111\001\013\001\014\001\255\255\115\001\017\001\255\255\ +\019\001\020\001\021\001\255\255\092\001\024\001\025\001\026\001\ +\096\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\037\001\255\255\108\001\040\001\041\001\111\001\ +\255\255\255\255\255\255\000\000\255\255\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\255\255\255\255\063\001\255\255\255\255\255\255\ +\067\001\068\001\255\255\070\001\255\255\255\255\073\001\074\001\ +\255\255\255\255\255\255\255\255\255\255\080\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\091\001\092\001\255\255\094\001\095\001\096\001\097\001\255\255\ +\255\255\255\255\255\255\255\255\103\001\000\001\105\001\255\255\ +\003\001\108\001\255\255\255\255\111\001\008\001\255\255\010\001\ +\115\001\255\255\013\001\014\001\255\255\255\255\017\001\255\255\ +\019\001\020\001\021\001\255\255\255\255\024\001\025\001\026\001\ +\255\255\028\001\029\001\000\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\037\001\255\255\255\255\040\001\041\001\255\255\ +\013\001\255\255\255\255\000\000\255\255\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\026\001\255\255\028\001\ +\029\001\060\001\255\255\255\255\063\001\255\255\255\255\255\255\ +\067\001\068\001\255\255\070\001\041\001\255\255\073\001\074\001\ +\255\255\000\000\255\255\255\255\255\255\080\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\091\001\092\001\255\255\094\001\095\001\096\001\097\001\068\001\ +\000\001\255\255\255\255\003\001\103\001\074\001\105\001\255\255\ +\008\001\108\001\010\001\080\001\111\001\013\001\014\001\255\255\ +\115\001\017\001\255\255\019\001\020\001\021\001\255\255\092\001\ +\024\001\025\001\026\001\096\001\028\001\029\001\000\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\037\001\255\255\108\001\ +\040\001\041\001\111\001\013\001\255\255\255\255\000\000\255\255\ +\048\001\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\026\001\255\255\028\001\029\001\060\001\255\255\255\255\063\001\ +\255\255\255\255\255\255\067\001\068\001\255\255\070\001\041\001\ +\255\255\073\001\074\001\255\255\255\255\255\255\255\255\255\255\ +\080\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\091\001\092\001\255\255\094\001\095\001\ +\096\001\097\001\068\001\000\001\255\255\255\255\003\001\103\001\ +\074\001\105\001\255\255\008\001\108\001\010\001\080\001\111\001\ +\013\001\014\001\255\255\115\001\017\001\255\255\019\001\020\001\ +\021\001\255\255\092\001\024\001\025\001\026\001\096\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\037\001\255\255\108\001\040\001\041\001\111\001\255\255\255\255\ +\255\255\000\000\255\255\048\001\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\255\255\255\255\063\001\255\255\255\255\255\255\067\001\068\001\ +\255\255\070\001\255\255\255\255\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\091\001\092\001\ +\255\255\094\001\095\001\096\001\097\001\255\255\255\255\255\255\ +\255\255\255\255\103\001\000\001\105\001\255\255\003\001\108\001\ +\255\255\255\255\111\001\008\001\255\255\010\001\115\001\255\255\ +\013\001\014\001\255\255\255\255\017\001\255\255\019\001\020\001\ +\021\001\255\255\255\255\024\001\025\001\026\001\255\255\028\001\ +\029\001\000\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\037\001\255\255\255\255\040\001\041\001\255\255\013\001\255\255\ +\255\255\000\000\255\255\048\001\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\026\001\255\255\028\001\029\001\060\001\ +\255\255\255\255\063\001\255\255\255\255\255\255\067\001\068\001\ +\255\255\070\001\041\001\255\255\073\001\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\091\001\092\001\ +\255\255\094\001\095\001\096\001\097\001\068\001\000\001\255\255\ +\255\255\003\001\103\001\074\001\105\001\255\255\008\001\108\001\ +\010\001\080\001\111\001\013\001\014\001\255\255\115\001\017\001\ +\255\255\019\001\020\001\021\001\255\255\092\001\024\001\025\001\ +\026\001\096\001\028\001\029\001\006\001\255\255\008\001\255\255\ +\255\255\255\255\255\255\037\001\255\255\108\001\040\001\041\001\ +\111\001\255\255\255\255\255\255\000\000\255\255\048\001\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\255\255\255\255\063\001\255\255\255\255\ +\255\255\067\001\068\001\255\255\070\001\255\255\255\255\073\001\ +\074\001\255\255\255\255\255\255\255\255\055\001\080\001\057\001\ +\058\001\059\001\255\255\061\001\255\255\255\255\064\001\065\001\ +\255\255\091\001\092\001\255\255\094\001\095\001\096\001\097\001\ +\255\255\000\001\255\255\255\255\003\001\103\001\255\255\105\001\ +\255\255\008\001\108\001\010\001\255\255\111\001\013\001\014\001\ +\090\001\115\001\017\001\255\255\019\001\020\001\021\001\097\001\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\255\255\109\001\110\001\255\255\037\001\255\255\ +\255\255\040\001\041\001\255\255\255\255\255\255\255\255\000\000\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\091\001\092\001\255\255\094\001\ +\095\001\096\001\097\001\255\255\255\255\255\255\255\255\255\255\ +\103\001\000\001\105\001\255\255\003\001\108\001\255\255\255\255\ +\111\001\008\001\255\255\010\001\115\001\255\255\013\001\014\001\ +\255\255\255\255\017\001\255\255\019\001\020\001\021\001\255\255\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\037\001\255\255\ +\255\255\040\001\041\001\255\255\255\255\255\255\255\255\000\000\ +\255\255\048\001\049\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\255\255\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\070\001\ +\255\255\255\255\073\001\074\001\255\255\255\255\255\255\255\255\ +\255\255\080\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\091\001\092\001\255\255\094\001\ +\095\001\096\001\097\001\255\255\000\001\255\255\255\255\003\001\ +\103\001\255\255\105\001\255\255\008\001\108\001\010\001\255\255\ +\111\001\013\001\014\001\255\255\115\001\017\001\255\255\019\001\ +\020\001\021\001\255\255\255\255\024\001\025\001\026\001\255\255\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\037\001\255\255\255\255\040\001\041\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\048\001\049\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\000\000\255\255\255\255\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\070\001\255\255\255\255\073\001\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\091\001\ +\092\001\255\255\094\001\095\001\096\001\097\001\255\255\000\001\ +\255\255\255\255\003\001\103\001\255\255\105\001\255\255\008\001\ +\108\001\010\001\255\255\111\001\013\001\014\001\255\255\115\001\ +\017\001\255\255\019\001\020\001\021\001\255\255\255\255\024\001\ +\025\001\026\001\255\255\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\037\001\255\255\255\255\040\001\ +\041\001\255\255\255\255\255\255\255\255\255\255\255\255\048\001\ +\049\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\000\000\255\255\255\255\060\001\255\255\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\070\001\255\255\255\255\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\091\001\092\001\255\255\094\001\255\255\096\001\ +\097\001\255\255\255\255\255\255\255\255\255\255\103\001\000\001\ +\105\001\255\255\003\001\108\001\255\255\255\255\111\001\008\001\ +\255\255\010\001\115\001\255\255\013\001\014\001\255\255\255\255\ +\017\001\255\255\019\001\020\001\021\001\255\255\255\255\024\001\ +\255\255\026\001\255\255\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\037\001\255\255\255\255\040\001\ +\041\001\255\255\255\255\255\255\255\255\255\255\255\255\048\001\ +\049\001\255\255\255\255\000\000\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\060\001\255\255\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\070\001\255\255\255\255\ +\073\001\074\001\255\255\255\255\255\255\255\255\255\255\080\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\091\001\092\001\255\255\094\001\095\001\096\001\ +\097\001\255\255\255\255\255\255\255\255\255\255\103\001\255\255\ +\105\001\255\255\255\255\108\001\255\255\000\001\111\001\002\001\ +\003\001\004\001\115\001\255\255\255\255\008\001\255\255\255\255\ +\255\255\255\255\013\001\255\255\255\255\255\255\017\001\018\001\ +\019\001\255\255\255\255\255\255\255\255\000\001\255\255\026\001\ +\027\001\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\036\001\255\255\255\255\255\255\040\001\041\001\255\255\ +\000\000\255\255\255\255\255\255\255\255\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\255\255\255\255\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\255\255\255\255\073\001\074\001\ +\255\255\255\255\255\255\255\255\055\001\080\001\057\001\058\001\ +\059\001\255\255\061\001\255\255\255\255\064\001\065\001\255\255\ +\091\001\092\001\255\255\094\001\095\001\096\001\255\255\255\255\ +\000\001\100\001\002\001\003\001\004\001\255\255\081\001\255\255\ +\008\001\108\001\255\255\255\255\111\001\013\001\089\001\090\001\ +\115\001\017\001\018\001\019\001\255\255\255\255\097\001\255\255\ +\255\255\255\255\026\001\027\001\028\001\029\001\255\255\255\255\ +\008\001\255\255\109\001\110\001\036\001\255\255\255\255\255\255\ +\255\255\041\001\255\255\000\000\255\255\255\255\255\255\023\001\ +\048\001\049\001\255\255\255\255\255\255\255\255\030\001\255\255\ +\255\255\255\255\255\255\255\255\060\001\255\255\255\255\063\001\ +\255\255\255\255\066\001\067\001\068\001\255\255\070\001\255\255\ +\255\255\073\001\074\001\255\255\255\255\255\255\255\255\055\001\ +\080\001\057\001\058\001\059\001\255\255\061\001\255\255\255\255\ +\064\001\065\001\255\255\091\001\092\001\255\255\094\001\095\001\ +\096\001\097\001\255\255\000\001\255\255\002\001\003\001\004\001\ +\255\255\081\001\255\255\008\001\108\001\255\255\255\255\111\001\ +\013\001\089\001\090\001\115\001\017\001\018\001\019\001\255\255\ +\255\255\097\001\255\255\255\255\255\255\026\001\027\001\028\001\ +\029\001\255\255\106\001\255\255\255\255\109\001\110\001\036\001\ +\255\255\255\255\255\255\255\255\041\001\255\255\000\000\255\255\ +\255\255\255\255\255\255\048\001\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\255\255\255\255\063\001\255\255\255\255\066\001\067\001\068\001\ +\255\255\070\001\255\255\255\255\255\255\074\001\255\255\255\255\ +\255\255\255\255\255\255\080\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\091\001\092\001\ +\255\255\094\001\095\001\096\001\097\001\255\255\255\255\255\255\ +\000\001\255\255\002\001\003\001\004\001\255\255\255\255\108\001\ +\008\001\255\255\111\001\255\255\255\255\013\001\115\001\255\255\ +\255\255\017\001\018\001\019\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\026\001\027\001\028\001\029\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\036\001\255\255\255\255\255\255\ +\255\255\041\001\255\255\000\000\255\255\255\255\255\255\255\255\ +\048\001\049\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\060\001\255\255\255\255\063\001\ +\255\255\255\255\066\001\067\001\068\001\255\255\070\001\255\255\ +\255\255\255\255\074\001\255\255\255\255\255\255\255\255\055\001\ +\080\001\057\001\058\001\059\001\255\255\061\001\255\255\255\255\ +\064\001\065\001\255\255\091\001\092\001\255\255\094\001\095\001\ +\096\001\097\001\074\001\000\001\255\255\002\001\003\001\004\001\ +\255\255\081\001\255\255\008\001\108\001\255\255\255\255\111\001\ +\013\001\089\001\090\001\115\001\017\001\018\001\019\001\000\000\ +\255\255\097\001\255\255\255\255\255\255\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\109\001\110\001\036\001\ +\255\255\255\255\255\255\255\255\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\048\001\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\255\255\255\255\063\001\255\255\255\255\066\001\067\001\068\001\ +\255\255\070\001\255\255\255\255\255\255\074\001\255\255\255\255\ +\255\255\000\000\055\001\080\001\057\001\058\001\059\001\255\255\ +\061\001\255\255\255\255\064\001\065\001\255\255\091\001\092\001\ +\255\255\094\001\095\001\096\001\097\001\255\255\000\001\255\255\ +\002\001\003\001\004\001\255\255\081\001\255\255\008\001\108\001\ +\255\255\255\255\111\001\013\001\089\001\090\001\115\001\017\001\ +\018\001\019\001\255\255\255\255\097\001\255\255\255\255\255\255\ +\026\001\027\001\028\001\029\001\255\255\255\255\255\255\108\001\ +\109\001\110\001\036\001\000\000\255\255\255\255\255\255\041\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\048\001\049\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\255\255\255\255\063\001\255\255\255\255\ +\066\001\067\001\068\001\255\255\070\001\255\255\255\255\255\255\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\091\001\092\001\255\255\094\001\095\001\096\001\097\001\ +\255\255\000\000\255\255\000\001\255\255\002\001\003\001\255\255\ +\255\255\255\255\108\001\008\001\255\255\111\001\255\255\255\255\ +\013\001\115\001\255\255\255\255\017\001\018\001\019\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\255\255\255\255\036\001\ +\255\255\255\255\255\255\255\255\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\048\001\049\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\000\000\255\255\255\255\255\255\060\001\ +\255\255\255\255\063\001\255\255\255\255\066\001\067\001\068\001\ +\255\255\070\001\255\255\255\255\006\001\074\001\008\001\000\001\ +\255\255\255\255\003\001\080\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\013\001\255\255\091\001\092\001\ +\017\001\094\001\095\001\096\001\097\001\255\255\255\255\255\255\ +\255\255\026\001\027\001\028\001\029\001\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\115\001\255\255\ +\041\001\255\255\255\255\255\255\255\255\055\001\255\255\057\001\ +\058\001\059\001\255\255\061\001\255\255\255\255\064\001\065\001\ +\255\255\000\001\255\255\060\001\003\001\255\255\063\001\255\255\ +\255\255\066\001\067\001\068\001\255\255\255\255\013\001\255\255\ +\073\001\074\001\017\001\255\255\255\255\255\255\255\255\080\001\ +\090\001\255\255\092\001\026\001\027\001\028\001\029\001\097\001\ +\255\255\255\255\255\255\092\001\000\000\094\001\255\255\096\001\ +\097\001\255\255\041\001\109\001\110\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\255\255\255\255\115\001\000\001\255\255\060\001\003\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\255\255\ +\013\001\255\255\073\001\074\001\017\001\255\255\255\255\255\255\ +\255\255\080\001\255\255\255\255\255\255\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\092\001\255\255\094\001\ +\255\255\096\001\097\001\255\255\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\115\001\255\255\255\255\060\001\ +\255\255\000\001\063\001\255\255\003\001\066\001\067\001\068\001\ +\255\255\255\255\255\255\255\255\073\001\074\001\013\001\255\255\ +\255\255\255\255\017\001\080\001\019\001\255\255\255\255\255\255\ +\255\255\255\255\000\000\026\001\027\001\028\001\029\001\092\001\ +\255\255\094\001\255\255\096\001\097\001\255\255\255\255\255\255\ +\255\255\255\255\041\001\255\255\255\255\255\255\255\255\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\115\001\255\255\ +\255\255\255\255\255\255\000\001\255\255\060\001\003\001\255\255\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\255\255\ +\013\001\255\255\073\001\074\001\017\001\255\255\255\255\255\255\ +\255\255\080\001\255\255\255\255\255\255\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\092\001\255\255\094\001\ +\255\255\096\001\097\001\255\255\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\108\001\255\255\255\255\ +\111\001\000\000\255\255\255\255\115\001\255\255\255\255\060\001\ +\255\255\255\255\063\001\255\255\255\255\255\255\067\001\068\001\ +\255\255\255\255\255\255\255\255\073\001\074\001\055\001\255\255\ +\057\001\058\001\059\001\080\001\061\001\255\255\063\001\064\001\ +\065\001\255\255\255\255\255\255\255\255\255\255\255\255\092\001\ +\255\255\094\001\255\255\096\001\097\001\078\001\255\255\255\255\ +\081\001\255\255\255\255\255\255\255\255\255\255\255\255\108\001\ +\089\001\090\001\111\001\255\255\000\001\255\255\115\001\003\001\ +\097\001\005\001\006\001\007\001\008\001\255\255\255\255\011\001\ +\012\001\013\001\255\255\255\255\109\001\110\001\255\255\019\001\ +\255\255\255\255\255\255\023\001\255\255\255\255\026\001\255\255\ +\028\001\029\001\030\001\031\001\032\001\033\001\034\001\035\001\ +\036\001\255\255\255\255\039\001\040\001\041\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\048\001\049\001\050\001\051\001\ +\052\001\053\001\054\001\055\001\056\001\057\001\058\001\059\001\ +\060\001\061\001\000\000\063\001\064\001\065\001\255\255\067\001\ +\068\001\069\001\070\001\071\001\072\001\255\255\074\001\075\001\ +\076\001\077\001\078\001\255\255\080\001\081\001\255\255\255\255\ +\084\001\085\001\255\255\087\001\088\001\089\001\090\001\091\001\ +\092\001\093\001\255\255\095\001\096\001\097\001\255\255\099\001\ +\255\255\101\001\102\001\255\255\104\001\255\255\106\001\107\001\ +\108\001\109\001\110\001\111\001\112\001\255\255\114\001\005\001\ +\006\001\007\001\255\255\255\255\255\255\011\001\012\001\013\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\028\001\029\001\ +\030\001\031\001\032\001\033\001\034\001\255\255\255\255\255\255\ +\255\255\039\001\255\255\041\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\050\001\255\255\052\001\053\001\ +\054\001\055\001\056\001\255\255\255\255\059\001\060\001\255\255\ +\255\255\063\001\064\001\065\001\255\255\000\000\068\001\069\001\ +\255\255\071\001\072\001\255\255\074\001\255\255\076\001\255\255\ +\078\001\255\255\080\001\255\255\255\255\255\255\084\001\085\001\ +\255\255\087\001\255\255\255\255\255\255\255\255\005\001\006\001\ +\007\001\255\255\096\001\097\001\011\001\012\001\013\001\101\001\ +\255\255\255\255\255\255\255\255\106\001\107\001\108\001\109\001\ +\110\001\111\001\255\255\255\255\114\001\028\001\029\001\030\001\ +\031\001\032\001\033\001\034\001\255\255\255\255\255\255\255\255\ +\039\001\255\255\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\052\001\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\060\001\255\255\255\255\ +\063\001\064\001\065\001\255\255\255\255\068\001\069\001\255\255\ +\071\001\072\001\255\255\074\001\255\255\076\001\255\255\078\001\ +\255\255\080\001\255\255\000\000\255\255\084\001\085\001\255\255\ +\087\001\000\000\055\001\255\255\057\001\058\001\059\001\255\255\ +\061\001\255\255\097\001\064\001\065\001\255\255\101\001\255\255\ +\255\255\255\255\255\255\106\001\107\001\108\001\109\001\110\001\ +\111\001\255\255\255\255\114\001\081\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\089\001\090\001\255\255\005\001\ +\006\001\007\001\255\255\255\255\097\001\011\001\012\001\013\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\109\001\110\001\255\255\255\255\255\255\255\255\028\001\029\001\ +\030\001\031\001\032\001\033\001\034\001\255\255\255\255\255\255\ +\255\255\039\001\255\255\041\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\050\001\255\255\052\001\053\001\ +\054\001\055\001\056\001\255\255\255\255\059\001\060\001\255\255\ +\255\255\063\001\064\001\065\001\255\255\255\255\068\001\069\001\ +\255\255\071\001\072\001\255\255\074\001\000\000\076\001\255\255\ +\078\001\255\255\080\001\255\255\255\255\255\255\084\001\085\001\ +\255\255\087\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\097\001\255\255\255\255\255\255\101\001\ +\255\255\255\255\255\255\255\255\106\001\107\001\108\001\109\001\ +\110\001\111\001\255\255\255\255\114\001\000\001\255\255\255\255\ +\255\255\004\001\255\255\006\001\255\255\008\001\255\255\010\001\ +\255\255\012\001\255\255\014\001\015\001\255\255\017\001\018\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\027\001\028\001\255\255\030\001\031\001\000\000\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\050\001\ +\051\001\052\001\053\001\255\255\055\001\056\001\255\255\255\255\ +\059\001\255\255\255\255\255\255\255\255\064\001\065\001\066\001\ +\255\255\255\255\255\255\255\255\071\001\255\255\073\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\081\001\255\255\ +\255\255\084\001\255\255\255\255\255\255\255\255\089\001\000\000\ +\091\001\092\001\255\255\094\001\095\001\255\255\097\001\255\255\ +\255\255\255\255\101\001\000\001\255\255\104\001\255\255\106\001\ +\255\255\000\001\109\001\110\001\255\255\004\001\113\001\006\001\ +\013\001\008\001\255\255\010\001\255\255\012\001\255\255\014\001\ +\015\001\255\255\017\001\018\001\255\255\026\001\255\255\028\001\ +\029\001\255\255\255\255\255\255\027\001\255\255\255\255\030\001\ +\031\001\255\255\255\255\255\255\041\001\255\255\255\255\255\255\ +\255\255\000\000\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\051\001\255\255\053\001\060\001\ +\055\001\056\001\255\255\255\255\059\001\255\255\067\001\068\001\ +\255\255\064\001\065\001\066\001\255\255\074\001\255\255\255\255\ +\071\001\255\255\073\001\080\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\081\001\255\255\255\255\084\001\255\255\092\001\ +\255\255\255\255\089\001\096\001\091\001\092\001\255\255\094\001\ +\095\001\255\255\097\001\000\000\255\255\255\255\101\001\108\001\ +\255\255\104\001\111\001\106\001\255\255\000\001\109\001\110\001\ +\003\001\004\001\113\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\013\001\014\001\255\255\255\255\255\255\255\255\ +\019\001\255\255\255\255\255\255\255\255\255\255\255\255\026\001\ +\255\255\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\041\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\255\255\255\255\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\255\255\000\001\255\255\074\001\ +\003\001\004\001\000\000\255\255\255\255\080\001\255\255\255\255\ +\255\255\255\255\013\001\014\001\255\255\255\255\255\255\255\255\ +\019\001\092\001\255\255\094\001\255\255\096\001\097\001\026\001\ +\255\255\028\001\029\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\041\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\048\001\049\001\255\255\ +\255\255\255\255\255\255\000\000\255\255\255\255\255\255\000\001\ +\255\255\060\001\003\001\004\001\063\001\255\255\255\255\066\001\ +\067\001\068\001\255\255\070\001\013\001\014\001\255\255\074\001\ +\255\255\255\255\019\001\255\255\255\255\080\001\255\255\255\255\ +\255\255\026\001\255\255\028\001\029\001\255\255\255\255\255\255\ +\255\255\092\001\255\255\094\001\255\255\096\001\097\001\255\255\ +\041\001\255\255\255\255\255\255\255\255\000\000\255\255\048\001\ +\049\001\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\255\255\000\001\255\255\060\001\003\001\004\001\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\070\001\013\001\014\001\ +\255\255\074\001\255\255\255\255\019\001\255\255\255\255\080\001\ +\255\255\255\255\255\255\026\001\255\255\028\001\029\001\255\255\ +\255\255\255\255\255\255\092\001\255\255\094\001\255\255\096\001\ +\097\001\255\255\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\048\001\049\001\108\001\255\255\255\255\111\001\255\255\ +\255\255\255\255\255\255\000\001\255\255\060\001\003\001\004\001\ +\063\001\000\000\255\255\255\255\067\001\068\001\255\255\070\001\ +\013\001\014\001\255\255\074\001\255\255\255\255\019\001\255\255\ +\255\255\080\001\255\255\255\255\255\255\026\001\255\255\028\001\ +\029\001\255\255\255\255\255\255\255\255\092\001\255\255\094\001\ +\255\255\096\001\097\001\255\255\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\048\001\049\001\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\255\255\255\255\255\255\060\001\ +\255\255\255\255\063\001\000\000\255\255\255\255\067\001\068\001\ +\255\255\070\001\255\255\255\255\255\255\074\001\055\001\255\255\ +\057\001\058\001\059\001\080\001\061\001\255\255\255\255\064\001\ +\065\001\255\255\000\001\255\255\255\255\003\001\255\255\092\001\ +\255\255\094\001\008\001\096\001\097\001\255\255\255\255\013\001\ +\081\001\255\255\255\255\255\255\255\255\019\001\255\255\108\001\ +\089\001\090\001\111\001\255\255\026\001\255\255\028\001\029\001\ +\097\001\255\255\255\255\255\255\255\255\000\000\255\255\255\255\ +\255\255\255\255\040\001\041\001\109\001\110\001\255\255\255\255\ +\255\255\255\255\255\255\000\001\255\255\255\255\003\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\060\001\255\255\ +\013\001\063\001\255\255\255\255\066\001\067\001\068\001\255\255\ +\255\255\255\255\255\255\073\001\074\001\026\001\027\001\028\001\ +\029\001\255\255\080\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\041\001\255\255\092\001\000\000\ +\255\255\255\255\096\001\097\001\255\255\000\001\100\001\255\255\ +\003\001\255\255\255\255\255\255\255\255\255\255\108\001\060\001\ +\255\255\111\001\013\001\064\001\255\255\066\001\067\001\068\001\ +\255\255\255\255\255\255\255\255\073\001\074\001\255\255\026\001\ +\027\001\028\001\029\001\080\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\041\001\092\001\ +\255\255\094\001\255\255\096\001\097\001\255\255\255\255\100\001\ +\255\255\000\000\255\255\255\255\255\255\255\255\255\255\108\001\ +\109\001\060\001\111\001\255\255\255\255\064\001\255\255\066\001\ +\067\001\068\001\255\255\255\255\255\255\255\255\073\001\074\001\ +\255\255\000\001\255\255\255\255\003\001\080\001\255\255\255\255\ +\255\255\008\001\255\255\255\255\255\255\255\255\013\001\255\255\ +\255\255\092\001\255\255\094\001\019\001\096\001\097\001\255\255\ +\255\255\100\001\000\000\026\001\255\255\028\001\029\001\255\255\ +\255\255\108\001\109\001\255\255\111\001\255\255\255\255\255\255\ +\255\255\040\001\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\000\001\255\255\060\001\003\001\255\255\ +\063\001\255\255\255\255\066\001\067\001\068\001\255\255\255\255\ +\013\001\255\255\073\001\074\001\255\255\255\255\019\001\000\000\ +\255\255\080\001\255\255\255\255\255\255\026\001\027\001\028\001\ +\029\001\255\255\255\255\255\255\255\255\092\001\255\255\255\255\ +\255\255\096\001\097\001\255\255\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\048\001\255\255\108\001\255\255\255\255\ +\111\001\255\255\255\255\255\255\255\255\000\001\255\255\060\001\ +\003\001\255\255\063\001\255\255\255\255\255\255\067\001\068\001\ +\255\255\070\001\013\001\255\255\000\000\074\001\255\255\255\255\ +\019\001\255\255\255\255\080\001\255\255\255\255\255\255\026\001\ +\255\255\028\001\029\001\255\255\255\255\255\255\255\255\092\001\ +\255\255\094\001\255\255\096\001\097\001\040\001\041\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\048\001\049\001\108\001\ +\255\255\255\255\111\001\255\255\255\255\255\255\255\255\000\001\ +\255\255\060\001\003\001\255\255\063\001\255\255\255\255\008\001\ +\255\255\068\001\255\255\070\001\013\001\255\255\255\255\074\001\ +\255\255\255\255\019\001\255\255\255\255\080\001\255\255\255\255\ +\255\255\026\001\255\255\028\001\029\001\255\255\255\255\000\000\ +\255\255\092\001\255\255\255\255\255\255\096\001\097\001\255\255\ +\041\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\108\001\255\255\255\255\111\001\255\255\255\255\255\255\ +\255\255\000\001\255\255\060\001\003\001\255\255\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\255\255\013\001\255\255\ +\255\255\074\001\255\255\255\255\019\001\255\255\255\255\080\001\ +\255\255\255\255\255\255\026\001\000\000\028\001\029\001\255\255\ +\255\255\255\255\255\255\092\001\255\255\000\000\255\255\096\001\ +\097\001\255\255\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\000\001\108\001\255\255\003\001\111\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\060\001\255\255\013\001\ +\063\001\255\255\255\255\255\255\067\001\068\001\255\255\255\255\ +\255\255\255\255\255\255\074\001\026\001\255\255\028\001\029\001\ +\255\255\080\001\255\255\255\255\255\255\255\255\255\255\000\000\ +\255\255\255\255\040\001\041\001\255\255\092\001\255\255\255\255\ +\255\255\096\001\097\001\255\255\255\255\255\255\255\255\000\001\ +\255\255\255\255\003\001\255\255\255\255\108\001\060\001\255\255\ +\111\001\063\001\255\255\255\255\013\001\067\001\068\001\255\255\ +\255\255\255\255\019\001\255\255\074\001\255\255\255\255\255\255\ +\255\255\026\001\080\001\028\001\029\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\000\000\255\255\092\001\255\255\ +\041\001\255\255\096\001\097\001\255\255\000\000\255\255\255\255\ +\255\255\255\255\255\255\255\255\000\001\255\255\108\001\003\001\ +\255\255\111\001\255\255\060\001\255\255\255\255\063\001\255\255\ +\255\255\013\001\067\001\068\001\255\255\255\255\255\255\019\001\ +\255\255\074\001\255\255\255\255\255\255\255\255\026\001\080\001\ +\028\001\029\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\092\001\255\255\041\001\255\255\096\001\ +\097\001\255\255\000\000\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\060\001\255\255\255\255\063\001\255\255\255\255\255\255\067\001\ +\068\001\255\255\255\255\255\255\255\255\255\255\074\001\000\001\ +\255\255\255\255\003\001\255\255\080\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\013\001\255\255\255\255\255\255\ +\092\001\255\255\019\001\255\255\096\001\097\001\255\255\000\000\ +\255\255\026\001\255\255\028\001\029\001\255\255\255\255\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\255\255\ +\041\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\000\001\255\255\255\255\003\001\ +\255\255\255\255\255\255\060\001\255\255\000\001\063\001\255\255\ +\255\255\013\001\067\001\068\001\255\255\008\001\255\255\019\001\ +\000\000\074\001\013\001\255\255\255\255\255\255\026\001\080\001\ +\028\001\029\001\000\000\255\255\255\255\255\255\255\255\026\001\ +\255\255\028\001\029\001\092\001\255\255\041\001\255\255\096\001\ +\097\001\255\255\255\255\255\255\255\255\255\255\041\001\255\255\ +\255\255\255\255\255\255\108\001\255\255\255\255\111\001\000\001\ +\060\001\255\255\003\001\063\001\255\255\255\255\255\255\067\001\ +\068\001\060\001\255\255\255\255\013\001\255\255\074\001\066\001\ +\067\001\068\001\019\001\255\255\080\001\255\255\255\255\074\001\ +\255\255\026\001\255\255\028\001\029\001\080\001\255\255\255\255\ +\092\001\255\255\255\255\000\000\096\001\097\001\255\255\255\255\ +\041\001\092\001\255\255\255\255\255\255\096\001\255\255\255\255\ +\108\001\255\255\255\255\111\001\000\001\255\255\255\255\255\255\ +\255\255\108\001\255\255\060\001\111\001\000\001\063\001\255\255\ +\003\001\013\001\067\001\068\001\255\255\255\255\255\255\255\255\ +\255\255\074\001\013\001\255\255\255\255\255\255\026\001\080\001\ +\028\001\029\001\255\255\255\255\000\000\255\255\255\255\026\001\ +\255\255\028\001\029\001\092\001\255\255\041\001\000\000\096\001\ +\097\001\255\255\255\255\255\255\255\255\040\001\041\001\255\255\ +\255\255\255\255\255\255\108\001\255\255\255\255\111\001\255\255\ +\060\001\255\255\000\001\063\001\255\255\003\001\066\001\067\001\ +\068\001\060\001\255\255\255\255\063\001\255\255\074\001\013\001\ +\255\255\068\001\255\255\255\255\080\001\255\255\255\255\074\001\ +\255\255\255\255\255\255\255\255\026\001\080\001\028\001\029\001\ +\092\001\255\255\255\255\255\255\096\001\097\001\255\255\255\255\ +\255\255\092\001\255\255\041\001\000\000\096\001\097\001\255\255\ +\108\001\255\255\255\255\111\001\255\255\255\255\255\255\000\001\ +\255\255\108\001\003\001\255\255\111\001\255\255\060\001\255\255\ +\255\255\063\001\255\255\255\255\013\001\067\001\068\001\255\255\ +\255\255\255\255\255\255\255\255\074\001\255\255\255\255\255\255\ +\255\255\026\001\080\001\028\001\029\001\255\255\255\255\255\255\ +\000\000\255\255\255\255\255\255\255\255\255\255\092\001\255\255\ +\041\001\000\000\096\001\097\001\255\255\255\255\255\255\255\255\ +\000\001\255\255\255\255\255\255\255\255\255\255\108\001\255\255\ +\008\001\111\001\000\001\060\001\255\255\013\001\063\001\255\255\ +\255\255\255\255\067\001\068\001\255\255\255\255\255\255\013\001\ +\255\255\074\001\026\001\255\255\028\001\029\001\255\255\080\001\ +\255\255\255\255\255\255\255\255\026\001\255\255\028\001\029\001\ +\255\255\041\001\000\000\092\001\255\255\255\255\255\255\096\001\ +\097\001\255\255\255\255\041\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\108\001\060\001\255\255\111\001\063\001\ +\255\255\255\255\255\255\067\001\068\001\255\255\060\001\255\255\ +\255\255\063\001\074\001\000\001\255\255\067\001\068\001\255\255\ +\080\001\255\255\255\255\255\255\074\001\255\255\000\000\255\255\ +\013\001\255\255\080\001\255\255\092\001\255\255\255\255\255\255\ +\096\001\097\001\255\255\255\255\255\255\026\001\092\001\028\001\ +\029\001\255\255\096\001\097\001\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\255\255\041\001\255\255\108\001\255\255\ +\255\255\111\001\255\255\255\255\000\001\255\255\255\255\003\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\000\001\060\001\ +\255\255\013\001\063\001\255\255\255\255\255\255\067\001\068\001\ +\255\255\255\255\255\255\013\001\255\255\074\001\026\001\255\255\ +\028\001\029\001\255\255\080\001\255\255\255\255\255\255\255\255\ +\026\001\255\255\028\001\029\001\255\255\041\001\255\255\092\001\ +\255\255\255\255\255\255\096\001\097\001\255\255\255\255\041\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\108\001\ +\060\001\255\255\111\001\063\001\255\255\255\255\255\255\255\255\ +\068\001\255\255\060\001\255\255\000\001\063\001\074\001\255\255\ +\255\255\067\001\068\001\255\255\080\001\255\255\255\255\255\255\ +\074\001\013\001\255\255\255\255\255\255\255\255\080\001\255\255\ +\092\001\255\255\255\255\255\255\096\001\097\001\026\001\255\255\ +\028\001\029\001\092\001\255\255\255\255\255\255\096\001\097\001\ +\108\001\255\255\255\255\111\001\255\255\041\001\255\255\255\255\ +\000\001\255\255\108\001\255\255\255\255\111\001\255\255\255\255\ +\255\255\000\001\255\255\255\255\255\255\013\001\255\255\255\255\ +\060\001\255\255\255\255\063\001\255\255\255\255\013\001\067\001\ +\068\001\255\255\026\001\255\255\028\001\029\001\074\001\255\255\ +\255\255\255\255\255\255\026\001\080\001\028\001\029\001\255\255\ +\255\255\041\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\092\001\255\255\041\001\255\255\096\001\097\001\255\255\255\255\ +\255\255\255\255\000\001\255\255\060\001\255\255\255\255\063\001\ +\108\001\255\255\255\255\111\001\068\001\060\001\255\255\013\001\ +\063\001\255\255\074\001\255\255\255\255\068\001\255\255\255\255\ +\080\001\255\255\255\255\074\001\026\001\255\255\028\001\029\001\ +\255\255\080\001\255\255\255\255\092\001\255\255\255\255\255\255\ +\096\001\097\001\255\255\041\001\255\255\092\001\000\001\255\255\ +\255\255\096\001\097\001\255\255\108\001\255\255\255\255\111\001\ +\255\255\255\255\255\255\013\001\255\255\108\001\060\001\255\255\ +\111\001\063\001\255\255\255\255\255\255\255\255\068\001\255\255\ +\026\001\255\255\028\001\029\001\074\001\255\255\255\255\255\255\ +\255\255\255\255\080\001\255\255\255\255\255\255\255\255\041\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\092\001\255\255\ +\255\255\255\255\096\001\097\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\060\001\255\255\255\255\063\001\108\001\255\255\ +\255\255\111\001\068\001\255\255\255\255\255\255\255\255\255\255\ +\074\001\255\255\255\255\255\255\255\255\255\255\080\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\092\001\000\001\255\255\255\255\096\001\097\001\ +\005\001\006\001\007\001\008\001\255\255\255\255\011\001\012\001\ +\013\001\014\001\108\001\255\255\255\255\111\001\019\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\026\001\255\255\028\001\ +\029\001\030\001\031\001\032\001\033\001\034\001\035\001\255\255\ +\255\255\255\255\039\001\255\255\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\048\001\049\001\050\001\051\001\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\060\001\ +\255\255\255\255\063\001\064\001\065\001\066\001\255\255\068\001\ +\069\001\070\001\071\001\072\001\255\255\074\001\255\255\076\001\ +\077\001\078\001\255\255\080\001\081\001\255\255\255\255\084\001\ +\085\001\255\255\087\001\255\255\089\001\090\001\255\255\092\001\ +\093\001\255\255\255\255\096\001\097\001\255\255\099\001\255\255\ +\101\001\102\001\255\255\104\001\255\255\106\001\107\001\108\001\ +\109\001\110\001\111\001\112\001\000\001\114\001\255\255\255\255\ +\255\255\005\001\006\001\007\001\008\001\255\255\255\255\011\001\ +\012\001\255\255\255\255\255\255\255\255\255\255\255\255\019\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\026\001\255\255\ +\028\001\255\255\030\001\031\001\032\001\033\001\034\001\035\001\ +\255\255\255\255\255\255\039\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\048\001\049\001\050\001\051\001\ +\052\001\053\001\054\001\055\001\056\001\255\255\255\255\059\001\ +\060\001\255\255\255\255\063\001\064\001\065\001\255\255\255\255\ +\068\001\069\001\070\001\071\001\072\001\255\255\074\001\255\255\ +\076\001\077\001\078\001\255\255\255\255\081\001\255\255\255\255\ +\084\001\085\001\255\255\087\001\255\255\089\001\090\001\255\255\ +\255\255\093\001\255\255\255\255\255\255\097\001\255\255\099\001\ +\255\255\101\001\102\001\255\255\104\001\255\255\106\001\107\001\ +\255\255\109\001\110\001\111\001\112\001\255\255\114\001\000\001\ +\001\001\002\001\255\255\255\255\005\001\006\001\007\001\255\255\ +\009\001\255\255\011\001\012\001\255\255\255\255\015\001\016\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\027\001\255\255\255\255\030\001\031\001\032\001\ +\033\001\034\001\255\255\036\001\255\255\255\255\039\001\255\255\ +\255\255\042\001\043\001\044\001\045\001\046\001\047\001\255\255\ +\255\255\050\001\255\255\052\001\053\001\054\001\055\001\056\001\ +\255\255\255\255\059\001\255\255\061\001\255\255\063\001\064\001\ +\065\001\255\255\255\255\255\255\069\001\255\255\071\001\072\001\ +\255\255\074\001\255\255\076\001\255\255\078\001\255\255\255\255\ +\255\255\082\001\083\001\084\001\085\001\086\001\087\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\094\001\255\255\255\255\ +\255\255\098\001\255\255\100\001\101\001\255\255\255\255\255\255\ +\255\255\106\001\107\001\255\255\109\001\110\001\000\001\001\001\ +\002\001\114\001\255\255\005\001\006\001\007\001\255\255\009\001\ +\255\255\011\001\012\001\255\255\255\255\015\001\016\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\027\001\255\255\255\255\030\001\031\001\032\001\033\001\ +\034\001\255\255\036\001\255\255\255\255\039\001\255\255\255\255\ +\042\001\043\001\044\001\045\001\046\001\047\001\255\255\255\255\ +\050\001\255\255\052\001\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\255\255\061\001\255\255\063\001\064\001\065\001\ +\255\255\255\255\255\255\069\001\255\255\071\001\072\001\255\255\ +\074\001\255\255\076\001\255\255\078\001\255\255\255\255\255\255\ +\082\001\083\001\084\001\085\001\086\001\087\001\255\255\255\255\ +\255\255\255\255\255\255\055\001\094\001\057\001\058\001\059\001\ +\098\001\061\001\100\001\101\001\064\001\065\001\255\255\255\255\ +\106\001\107\001\255\255\109\001\110\001\000\001\255\255\255\255\ +\114\001\255\255\005\001\006\001\007\001\081\001\255\255\255\255\ +\011\001\012\001\013\001\255\255\255\255\089\001\090\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\097\001\255\255\026\001\ +\255\255\028\001\029\001\030\001\031\001\032\001\033\001\034\001\ +\255\255\109\001\110\001\255\255\039\001\255\255\041\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\050\001\ +\255\255\052\001\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\060\001\255\255\255\255\063\001\064\001\065\001\255\255\ +\255\255\068\001\069\001\255\255\071\001\072\001\255\255\074\001\ +\255\255\076\001\255\255\078\001\255\255\080\001\255\255\255\255\ +\255\255\084\001\085\001\000\001\087\001\255\255\255\255\255\255\ +\005\001\006\001\007\001\255\255\255\255\096\001\011\001\012\001\ +\255\255\255\255\101\001\255\255\255\255\255\255\255\255\106\001\ +\107\001\108\001\109\001\110\001\111\001\255\255\255\255\114\001\ +\255\255\030\001\031\001\032\001\033\001\034\001\255\255\255\255\ +\255\255\255\255\039\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\255\255\ +\255\255\255\255\063\001\064\001\065\001\255\255\255\255\255\255\ +\069\001\255\255\071\001\072\001\255\255\255\255\255\255\076\001\ +\255\255\078\001\255\255\255\255\255\255\255\255\255\255\084\001\ +\085\001\000\001\087\001\255\255\255\255\255\255\005\001\006\001\ +\007\001\094\001\255\255\255\255\011\001\012\001\255\255\255\255\ +\101\001\255\255\255\255\255\255\255\255\106\001\107\001\255\255\ +\109\001\110\001\255\255\255\255\255\255\114\001\255\255\030\001\ +\031\001\032\001\033\001\034\001\255\255\255\255\255\255\255\255\ +\039\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\052\001\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\255\255\255\255\255\255\ +\063\001\064\001\065\001\255\255\255\255\255\255\069\001\255\255\ +\071\001\072\001\255\255\255\255\255\255\076\001\255\255\078\001\ +\255\255\255\255\255\255\255\255\255\255\084\001\085\001\000\001\ +\087\001\255\255\255\255\255\255\005\001\006\001\007\001\094\001\ +\255\255\255\255\011\001\012\001\255\255\255\255\101\001\255\255\ +\255\255\255\255\255\255\106\001\107\001\255\255\109\001\110\001\ +\255\255\255\255\255\255\114\001\255\255\030\001\031\001\032\001\ +\033\001\034\001\255\255\255\255\255\255\255\255\039\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\050\001\255\255\052\001\053\001\054\001\055\001\056\001\ +\255\255\255\255\059\001\255\255\255\255\255\255\063\001\064\001\ +\065\001\255\255\255\255\255\255\069\001\255\255\071\001\072\001\ +\255\255\255\255\255\255\076\001\255\255\078\001\255\255\255\255\ +\255\255\255\255\255\255\084\001\085\001\000\001\087\001\255\255\ +\255\255\255\255\005\001\006\001\007\001\094\001\255\255\255\255\ +\011\001\012\001\255\255\255\255\101\001\255\255\255\255\255\255\ +\255\255\106\001\107\001\255\255\109\001\110\001\255\255\255\255\ +\255\255\114\001\255\255\030\001\031\001\032\001\033\001\034\001\ +\255\255\255\255\255\255\255\255\039\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\050\001\ +\255\255\052\001\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\255\255\255\255\255\255\063\001\064\001\065\001\255\255\ +\255\255\255\255\069\001\255\255\071\001\072\001\255\255\255\255\ +\255\255\076\001\255\255\078\001\255\255\255\255\255\255\255\255\ +\255\255\084\001\085\001\255\255\087\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\094\001\003\001\004\001\005\001\255\255\ +\255\255\255\255\101\001\255\255\011\001\255\255\013\001\106\001\ +\107\001\255\255\109\001\110\001\019\001\020\001\021\001\114\001\ +\255\255\024\001\025\001\026\001\255\255\028\001\029\001\030\001\ +\255\255\032\001\033\001\034\001\035\001\255\255\255\255\255\255\ +\039\001\040\001\041\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\048\001\049\001\255\255\255\255\052\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\063\001\064\001\255\255\255\255\255\255\000\001\069\001\070\001\ +\255\255\004\001\255\255\074\001\075\001\076\001\077\001\078\001\ +\079\001\080\001\255\255\082\001\255\255\255\255\017\001\255\255\ +\019\001\088\001\255\255\022\001\255\255\255\255\093\001\026\001\ +\027\001\255\255\255\255\255\255\099\001\255\255\255\255\102\001\ +\103\001\036\001\105\001\106\001\107\001\108\001\109\001\255\255\ +\111\001\112\001\113\001\114\001\115\001\048\001\049\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\060\001\255\255\255\255\255\255\064\001\255\255\066\001\ +\067\001\068\001\255\255\070\001\255\255\255\255\073\001\255\255\ +\255\255\255\255\000\001\001\001\002\001\255\255\255\255\255\255\ +\006\001\007\001\255\255\009\001\255\255\255\255\012\001\090\001\ +\091\001\015\001\016\001\255\255\095\001\255\255\097\001\255\255\ +\255\255\100\001\255\255\255\255\255\255\027\001\028\001\255\255\ +\030\001\031\001\109\001\255\255\111\001\255\255\036\001\255\255\ +\255\255\255\255\255\255\255\255\042\001\043\001\044\001\045\001\ +\046\001\047\001\255\255\255\255\050\001\255\255\052\001\053\001\ +\255\255\055\001\056\001\255\255\255\255\059\001\255\255\061\001\ +\255\255\255\255\064\001\065\001\255\255\255\255\255\255\255\255\ +\255\255\071\001\072\001\255\255\074\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\082\001\083\001\084\001\085\001\ +\086\001\087\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\094\001\255\255\255\255\097\001\098\001\255\255\100\001\101\001\ +\255\255\255\255\255\255\255\255\106\001\255\255\108\001\109\001\ +\110\001\000\001\001\001\002\001\255\255\255\255\255\255\006\001\ +\007\001\255\255\009\001\255\255\255\255\012\001\255\255\255\255\ +\015\001\016\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\027\001\028\001\255\255\030\001\ +\031\001\255\255\255\255\255\255\255\255\036\001\255\255\255\255\ +\255\255\255\255\255\255\042\001\043\001\044\001\045\001\046\001\ +\047\001\255\255\255\255\050\001\255\255\052\001\053\001\255\255\ +\055\001\056\001\255\255\255\255\059\001\255\255\061\001\255\255\ +\255\255\064\001\065\001\255\255\255\255\255\255\255\255\255\255\ +\071\001\072\001\255\255\074\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\082\001\083\001\084\001\085\001\086\001\ +\087\001\255\255\255\255\255\255\255\255\255\255\255\255\094\001\ +\255\255\255\255\097\001\098\001\255\255\100\001\101\001\255\255\ +\255\255\255\255\255\255\106\001\255\255\108\001\109\001\110\001\ +\000\001\001\001\002\001\255\255\255\255\255\255\006\001\007\001\ +\255\255\009\001\255\255\255\255\012\001\255\255\255\255\015\001\ +\016\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\027\001\028\001\255\255\030\001\031\001\ +\255\255\255\255\255\255\255\255\036\001\255\255\255\255\255\255\ +\255\255\255\255\042\001\043\001\044\001\045\001\046\001\047\001\ +\255\255\255\255\050\001\255\255\052\001\053\001\255\255\055\001\ +\056\001\255\255\255\255\059\001\255\255\061\001\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\072\001\255\255\074\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\082\001\083\001\084\001\085\001\086\001\087\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\094\001\255\255\ +\255\255\097\001\098\001\255\255\100\001\101\001\255\255\255\255\ +\255\255\255\255\106\001\255\255\108\001\109\001\110\001\000\001\ +\001\001\002\001\255\255\255\255\255\255\006\001\007\001\255\255\ +\009\001\255\255\255\255\012\001\255\255\255\255\015\001\016\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\027\001\028\001\255\255\030\001\031\001\255\255\ +\255\255\255\255\255\255\036\001\255\255\255\255\255\255\255\255\ +\255\255\042\001\043\001\044\001\045\001\046\001\047\001\255\255\ +\255\255\050\001\255\255\052\001\053\001\255\255\055\001\056\001\ +\255\255\255\255\059\001\255\255\061\001\255\255\255\255\064\001\ +\065\001\255\255\255\255\255\255\255\255\255\255\071\001\072\001\ +\255\255\074\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\082\001\083\001\084\001\085\001\086\001\087\001\255\255\ +\255\255\000\001\255\255\255\255\255\255\094\001\255\255\006\001\ +\097\001\098\001\255\255\100\001\101\001\012\001\255\255\255\255\ +\015\001\106\001\255\255\255\255\109\001\110\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\028\001\255\255\030\001\ +\031\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\052\001\053\001\255\255\ +\055\001\056\001\255\255\255\255\059\001\255\255\000\001\255\255\ +\255\255\064\001\065\001\255\255\006\001\255\255\255\255\255\255\ +\071\001\255\255\012\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\084\001\255\255\255\255\ +\255\255\255\255\028\001\255\255\030\001\031\001\255\255\094\001\ +\255\255\255\255\097\001\255\255\255\255\255\255\101\001\255\255\ +\255\255\255\255\255\255\106\001\255\255\255\255\109\001\110\001\ +\050\001\255\255\052\001\053\001\255\255\055\001\056\001\255\255\ +\255\255\059\001\255\255\000\001\255\255\255\255\064\001\065\001\ +\255\255\006\001\255\255\255\255\255\255\071\001\255\255\012\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\084\001\255\255\255\255\255\255\255\255\028\001\ +\255\255\030\001\031\001\255\255\255\255\255\255\255\255\097\001\ +\255\255\255\255\255\255\101\001\255\255\255\255\255\255\255\255\ +\106\001\255\255\255\255\109\001\110\001\050\001\255\255\052\001\ +\053\001\255\255\055\001\056\001\255\255\255\255\059\001\255\255\ +\000\001\255\255\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\255\255\071\001\255\255\012\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\084\001\ +\255\255\255\255\255\255\255\255\028\001\255\255\030\001\031\001\ +\255\255\255\255\255\255\255\255\097\001\255\255\255\255\255\255\ +\101\001\255\255\255\255\255\255\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\255\255\052\001\053\001\255\255\055\001\ +\056\001\255\255\255\255\059\001\255\255\000\001\255\255\255\255\ +\064\001\065\001\255\255\006\001\255\255\255\255\255\255\071\001\ +\255\255\012\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\084\001\255\255\255\255\255\255\ +\255\255\028\001\255\255\030\001\031\001\255\255\255\255\255\255\ +\255\255\097\001\255\255\255\255\255\255\101\001\255\255\255\255\ +\255\255\255\255\106\001\255\255\255\255\109\001\110\001\050\001\ +\255\255\052\001\053\001\255\255\055\001\056\001\255\255\255\255\ +\059\001\255\255\000\001\255\255\255\255\064\001\065\001\255\255\ +\006\001\255\255\255\255\255\255\071\001\255\255\012\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\084\001\255\255\255\255\255\255\255\255\028\001\255\255\ +\030\001\031\001\255\255\255\255\255\255\255\255\097\001\255\255\ +\255\255\255\255\101\001\255\255\255\255\255\255\255\255\106\001\ +\255\255\255\255\109\001\110\001\050\001\255\255\052\001\053\001\ +\255\255\055\001\056\001\255\255\255\255\059\001\255\255\000\001\ +\255\255\255\255\064\001\065\001\255\255\006\001\255\255\255\255\ +\255\255\071\001\255\255\012\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\084\001\255\255\ +\255\255\255\255\255\255\028\001\255\255\030\001\031\001\255\255\ +\255\255\255\255\255\255\097\001\255\255\255\255\255\255\101\001\ +\255\255\255\255\255\255\255\255\106\001\255\255\255\255\109\001\ +\110\001\050\001\255\255\052\001\053\001\255\255\055\001\056\001\ +\255\255\255\255\059\001\255\255\255\255\255\255\255\255\064\001\ +\065\001\005\001\006\001\007\001\255\255\255\255\071\001\011\001\ +\012\001\013\001\014\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\084\001\255\255\255\255\255\255\255\255\ +\028\001\029\001\030\001\031\001\032\001\033\001\034\001\255\255\ +\097\001\255\255\255\255\039\001\101\001\041\001\255\255\255\255\ +\255\255\106\001\255\255\255\255\109\001\110\001\050\001\255\255\ +\052\001\053\001\054\001\055\001\056\001\255\255\255\255\059\001\ +\060\001\255\255\255\255\063\001\064\001\065\001\255\255\255\255\ +\068\001\069\001\255\255\071\001\072\001\255\255\074\001\255\255\ +\076\001\255\255\078\001\255\255\080\001\255\255\255\255\255\255\ +\084\001\085\001\255\255\087\001\255\255\089\001\255\255\255\255\ +\005\001\006\001\007\001\255\255\096\001\255\255\011\001\012\001\ +\013\001\101\001\255\255\255\255\255\255\255\255\106\001\107\001\ +\108\001\109\001\110\001\111\001\255\255\255\255\114\001\028\001\ +\029\001\030\001\031\001\032\001\033\001\034\001\255\255\255\255\ +\255\255\255\255\039\001\255\255\041\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\060\001\ +\255\255\255\255\063\001\064\001\065\001\255\255\255\255\068\001\ +\069\001\255\255\071\001\072\001\255\255\074\001\255\255\076\001\ +\255\255\078\001\255\255\080\001\255\255\255\255\255\255\084\001\ +\085\001\255\255\087\001\255\255\255\255\255\255\005\001\006\001\ +\007\001\255\255\255\255\096\001\011\001\012\001\255\255\255\255\ +\101\001\255\255\255\255\255\255\255\255\106\001\107\001\108\001\ +\109\001\110\001\111\001\255\255\255\255\114\001\255\255\030\001\ +\031\001\032\001\033\001\034\001\255\255\255\255\255\255\255\255\ +\039\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\052\001\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\255\255\255\255\255\255\ +\063\001\064\001\065\001\255\255\255\255\255\255\069\001\255\255\ +\071\001\072\001\255\255\255\255\255\255\076\001\255\255\078\001\ +\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\ +\087\001\255\255\255\255\255\255\255\255\092\001\005\001\006\001\ +\007\001\255\255\255\255\010\001\011\001\012\001\101\001\255\255\ +\255\255\255\255\255\255\106\001\107\001\255\255\109\001\110\001\ +\255\255\255\255\255\255\114\001\255\255\255\255\255\255\030\001\ +\031\001\032\001\033\001\034\001\255\255\255\255\255\255\255\255\ +\039\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\050\001\255\255\052\001\053\001\054\001\ +\055\001\056\001\255\255\255\255\059\001\255\255\255\255\255\255\ +\063\001\064\001\065\001\255\255\255\255\255\255\069\001\255\255\ +\071\001\072\001\255\255\255\255\255\255\076\001\255\255\078\001\ +\255\255\255\255\255\255\255\255\255\255\084\001\085\001\255\255\ +\087\001\255\255\255\255\005\001\006\001\007\001\255\255\255\255\ +\255\255\011\001\012\001\255\255\255\255\255\255\101\001\255\255\ +\255\255\255\255\255\255\106\001\107\001\255\255\109\001\110\001\ +\026\001\255\255\255\255\114\001\030\001\031\001\032\001\033\001\ +\034\001\255\255\255\255\255\255\255\255\039\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\050\001\255\255\052\001\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\255\255\255\255\255\255\063\001\064\001\065\001\ +\255\255\255\255\255\255\069\001\255\255\071\001\072\001\255\255\ +\255\255\255\255\076\001\255\255\078\001\255\255\255\255\255\255\ +\255\255\255\255\084\001\085\001\255\255\087\001\255\255\255\255\ +\005\001\006\001\007\001\255\255\255\255\255\255\011\001\012\001\ +\255\255\255\255\255\255\101\001\255\255\255\255\255\255\255\255\ +\106\001\107\001\255\255\109\001\110\001\255\255\255\255\255\255\ +\114\001\030\001\031\001\032\001\033\001\034\001\255\255\255\255\ +\255\255\255\255\039\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\255\255\ +\255\255\255\255\063\001\064\001\065\001\255\255\255\255\255\255\ +\069\001\255\255\071\001\072\001\255\255\255\255\255\255\076\001\ +\255\255\078\001\255\255\255\255\255\255\255\255\083\001\084\001\ +\085\001\255\255\087\001\255\255\255\255\005\001\006\001\007\001\ +\255\255\255\255\255\255\011\001\012\001\255\255\255\255\255\255\ +\101\001\255\255\255\255\255\255\255\255\106\001\107\001\255\255\ +\109\001\110\001\255\255\255\255\255\255\114\001\030\001\031\001\ +\032\001\033\001\034\001\255\255\255\255\255\255\255\255\039\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\050\001\255\255\052\001\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\255\255\255\255\255\255\063\001\ +\064\001\065\001\255\255\255\255\255\255\069\001\255\255\071\001\ +\072\001\255\255\255\255\255\255\076\001\255\255\078\001\255\255\ +\255\255\255\255\255\255\255\255\084\001\085\001\255\255\087\001\ +\255\255\255\255\255\255\255\255\092\001\005\001\006\001\007\001\ +\255\255\255\255\010\001\011\001\012\001\101\001\255\255\255\255\ +\255\255\255\255\106\001\107\001\255\255\109\001\110\001\255\255\ +\255\255\255\255\114\001\255\255\255\255\255\255\030\001\031\001\ +\032\001\033\001\034\001\255\255\255\255\255\255\255\255\039\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\050\001\255\255\052\001\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\255\255\255\255\255\255\063\001\ +\064\001\065\001\255\255\255\255\255\255\069\001\255\255\071\001\ +\072\001\255\255\255\255\255\255\076\001\255\255\078\001\255\255\ +\255\255\255\255\255\255\255\255\084\001\085\001\255\255\087\001\ +\255\255\255\255\255\255\005\001\006\001\007\001\255\255\255\255\ +\255\255\011\001\012\001\255\255\255\255\101\001\255\255\255\255\ +\255\255\255\255\106\001\107\001\022\001\109\001\110\001\255\255\ +\255\255\255\255\114\001\255\255\030\001\031\001\032\001\033\001\ +\034\001\255\255\255\255\255\255\255\255\039\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\050\001\255\255\052\001\053\001\054\001\055\001\056\001\255\255\ +\255\255\059\001\255\255\255\255\255\255\063\001\064\001\065\001\ +\255\255\255\255\255\255\069\001\255\255\071\001\072\001\255\255\ +\255\255\255\255\076\001\255\255\078\001\255\255\255\255\255\255\ +\255\255\255\255\084\001\085\001\255\255\087\001\255\255\255\255\ +\005\001\006\001\007\001\255\255\255\255\255\255\011\001\012\001\ +\255\255\255\255\255\255\101\001\255\255\255\255\255\255\255\255\ +\106\001\107\001\255\255\109\001\110\001\026\001\255\255\255\255\ +\114\001\030\001\031\001\032\001\033\001\034\001\255\255\255\255\ +\255\255\255\255\039\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\054\001\055\001\056\001\255\255\255\255\059\001\255\255\ +\255\255\255\255\063\001\064\001\065\001\255\255\255\255\255\255\ +\069\001\255\255\071\001\072\001\255\255\255\255\255\255\076\001\ +\255\255\078\001\255\255\255\255\255\255\255\255\255\255\084\001\ +\085\001\255\255\087\001\255\255\255\255\005\001\006\001\007\001\ +\255\255\255\255\255\255\011\001\012\001\255\255\255\255\255\255\ +\101\001\255\255\255\255\255\255\255\255\106\001\107\001\255\255\ +\109\001\110\001\255\255\255\255\255\255\114\001\030\001\031\001\ +\032\001\033\001\034\001\255\255\255\255\255\255\255\255\039\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\050\001\255\255\052\001\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\255\255\255\255\255\255\063\001\ +\064\001\065\001\255\255\255\255\255\255\069\001\255\255\071\001\ +\072\001\255\255\255\255\255\255\076\001\255\255\078\001\255\255\ +\255\255\255\255\255\255\255\255\084\001\085\001\255\255\087\001\ +\255\255\255\255\005\001\006\001\007\001\255\255\255\255\255\255\ +\011\001\012\001\255\255\255\255\255\255\101\001\255\255\255\255\ +\255\255\255\255\106\001\107\001\255\255\109\001\110\001\255\255\ +\255\255\255\255\114\001\030\001\031\001\032\001\033\001\034\001\ +\255\255\255\255\255\255\255\255\039\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\050\001\ +\255\255\052\001\053\001\054\001\055\001\056\001\255\255\255\255\ +\059\001\255\255\255\255\255\255\063\001\064\001\065\001\255\255\ +\255\255\255\255\069\001\255\255\071\001\072\001\255\255\255\255\ +\255\255\076\001\255\255\078\001\255\255\255\255\255\255\255\255\ +\255\255\084\001\085\001\255\255\087\001\255\255\255\255\005\001\ +\006\001\007\001\255\255\255\255\255\255\011\001\012\001\255\255\ +\255\255\255\255\101\001\255\255\255\255\255\255\255\255\106\001\ +\107\001\255\255\109\001\110\001\255\255\255\255\255\255\114\001\ +\030\001\031\001\032\001\033\001\034\001\255\255\255\255\255\255\ +\255\255\039\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\050\001\255\255\052\001\053\001\ +\054\001\055\001\056\001\255\255\255\255\059\001\255\255\255\255\ +\255\255\063\001\064\001\065\001\255\255\255\255\255\255\069\001\ +\255\255\071\001\072\001\255\255\255\255\006\001\076\001\255\255\ +\078\001\255\255\255\255\012\001\255\255\014\001\084\001\085\001\ +\017\001\087\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\027\001\255\255\255\255\030\001\031\001\101\001\ +\255\255\255\255\255\255\255\255\106\001\107\001\255\255\109\001\ +\110\001\255\255\255\255\255\255\114\001\255\255\255\255\255\255\ +\255\255\050\001\051\001\255\255\053\001\255\255\055\001\056\001\ +\255\255\255\255\059\001\255\255\255\255\255\255\255\255\064\001\ +\065\001\255\255\006\001\255\255\255\255\255\255\071\001\255\255\ +\012\001\255\255\014\001\255\255\255\255\017\001\255\255\255\255\ +\081\001\255\255\255\255\084\001\255\255\255\255\255\255\027\001\ +\089\001\255\255\030\001\031\001\255\255\006\001\255\255\255\255\ +\097\001\255\255\255\255\012\001\101\001\014\001\255\255\104\001\ +\255\255\106\001\255\255\255\255\109\001\110\001\050\001\051\001\ +\255\255\053\001\255\255\055\001\056\001\030\001\031\001\059\001\ +\255\255\255\255\255\255\255\255\064\001\065\001\255\255\255\255\ +\255\255\255\255\255\255\071\001\255\255\255\255\255\255\255\255\ +\255\255\050\001\051\001\255\255\053\001\081\001\055\001\056\001\ +\084\001\255\255\059\001\255\255\255\255\089\001\255\255\064\001\ +\065\001\255\255\255\255\255\255\255\255\097\001\071\001\255\255\ +\073\001\101\001\255\255\255\255\104\001\255\255\106\001\255\255\ +\081\001\109\001\110\001\084\001\255\255\255\255\006\001\255\255\ +\089\001\255\255\255\255\255\255\012\001\255\255\014\001\255\255\ +\097\001\255\255\255\255\255\255\101\001\255\255\255\255\104\001\ +\255\255\106\001\255\255\027\001\109\001\110\001\030\001\031\001\ +\255\255\006\001\255\255\255\255\255\255\255\255\255\255\012\001\ +\255\255\014\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\050\001\051\001\255\255\053\001\027\001\055\001\ +\056\001\030\001\031\001\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\255\255\255\255\255\255\255\255\255\255\050\001\051\001\255\255\ +\053\001\081\001\055\001\056\001\084\001\255\255\059\001\255\255\ +\255\255\089\001\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\097\001\071\001\255\255\012\001\101\001\255\255\255\255\ +\104\001\255\255\106\001\255\255\081\001\109\001\110\001\084\001\ +\255\255\255\255\255\255\255\255\089\001\255\255\030\001\031\001\ +\255\255\006\001\255\255\255\255\097\001\255\255\255\255\012\001\ +\101\001\255\255\255\255\104\001\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\051\001\255\255\053\001\255\255\055\001\ +\056\001\030\001\031\001\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\255\255\073\001\255\255\255\255\255\255\050\001\051\001\255\255\ +\053\001\081\001\055\001\056\001\084\001\255\255\059\001\255\255\ +\255\255\089\001\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\097\001\071\001\255\255\012\001\101\001\255\255\255\255\ +\104\001\255\255\106\001\255\255\081\001\109\001\110\001\084\001\ +\255\255\255\255\255\255\255\255\089\001\255\255\030\001\031\001\ +\255\255\006\001\255\255\255\255\097\001\255\255\255\255\012\001\ +\101\001\255\255\255\255\104\001\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\051\001\255\255\053\001\255\255\055\001\ +\056\001\030\001\031\001\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\255\255\255\255\255\255\255\255\255\255\050\001\051\001\255\255\ +\053\001\081\001\055\001\056\001\084\001\255\255\059\001\255\255\ +\255\255\089\001\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\097\001\071\001\255\255\012\001\101\001\255\255\255\255\ +\104\001\255\255\106\001\255\255\081\001\109\001\110\001\084\001\ +\255\255\255\255\255\255\255\255\089\001\255\255\030\001\031\001\ +\255\255\255\255\255\255\255\255\097\001\255\255\255\255\255\255\ +\101\001\255\255\255\255\104\001\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\051\001\255\255\053\001\255\255\055\001\ +\056\001\255\255\255\255\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\006\001\255\255\255\255\071\001\ +\255\255\255\255\012\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\081\001\255\255\255\255\084\001\255\255\255\255\255\255\ +\255\255\089\001\028\001\255\255\030\001\031\001\255\255\255\255\ +\255\255\097\001\255\255\255\255\255\255\101\001\255\255\255\255\ +\104\001\255\255\106\001\255\255\255\255\109\001\110\001\255\255\ +\050\001\255\255\052\001\053\001\255\255\055\001\056\001\255\255\ +\255\255\059\001\255\255\255\255\255\255\255\255\064\001\065\001\ +\255\255\255\255\255\255\006\001\255\255\071\001\255\255\010\001\ +\255\255\012\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\084\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\028\001\092\001\030\001\031\001\255\255\255\255\097\001\ +\255\255\255\255\255\255\101\001\255\255\255\255\255\255\255\255\ +\106\001\255\255\255\255\109\001\110\001\255\255\255\255\050\001\ +\255\255\052\001\053\001\255\255\055\001\056\001\255\255\255\255\ +\059\001\255\255\255\255\255\255\255\255\064\001\065\001\255\255\ +\006\001\255\255\255\255\255\255\071\001\255\255\012\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\006\001\007\001\255\255\ +\255\255\084\001\011\001\012\001\255\255\255\255\028\001\255\255\ +\030\001\031\001\255\255\255\255\255\255\255\255\097\001\255\255\ +\255\255\255\255\101\001\255\255\255\255\030\001\031\001\106\001\ +\255\255\255\255\109\001\110\001\050\001\255\255\052\001\053\001\ +\255\255\055\001\056\001\255\255\255\255\059\001\255\255\255\255\ +\255\255\050\001\064\001\065\001\053\001\054\001\055\001\056\001\ +\255\255\071\001\059\001\255\255\006\001\255\255\008\001\064\001\ +\065\001\255\255\012\001\255\255\255\255\255\255\084\001\255\255\ +\255\255\255\255\255\255\076\001\255\255\255\255\092\001\255\255\ +\255\255\255\255\028\001\097\001\030\001\031\001\087\001\101\001\ +\255\255\255\255\255\255\255\255\106\001\255\255\255\255\109\001\ +\110\001\255\255\255\255\255\255\101\001\255\255\255\255\255\255\ +\050\001\106\001\052\001\053\001\109\001\055\001\056\001\255\255\ +\255\255\059\001\255\255\255\255\255\255\255\255\064\001\065\001\ +\255\255\006\001\255\255\255\255\255\255\071\001\255\255\012\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\084\001\255\255\255\255\255\255\255\255\028\001\ +\255\255\030\001\031\001\255\255\255\255\255\255\255\255\097\001\ +\255\255\255\255\255\255\101\001\255\255\255\255\255\255\255\255\ +\106\001\255\255\255\255\109\001\110\001\050\001\255\255\052\001\ +\053\001\255\255\055\001\056\001\255\255\255\255\059\001\255\255\ +\255\255\255\255\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\255\255\071\001\255\255\012\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\084\001\ +\255\255\255\255\255\255\255\255\028\001\255\255\030\001\031\001\ +\255\255\006\001\255\255\255\255\097\001\255\255\255\255\012\001\ +\101\001\255\255\255\255\255\255\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\255\255\052\001\053\001\255\255\055\001\ +\056\001\030\001\031\001\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\255\255\055\001\056\001\084\001\255\255\059\001\255\255\ +\255\255\255\255\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\097\001\071\001\255\255\012\001\101\001\255\255\255\255\ +\255\255\255\255\106\001\255\255\255\255\109\001\110\001\084\001\ +\255\255\255\255\255\255\255\255\028\001\255\255\030\001\031\001\ +\093\001\006\001\255\255\255\255\097\001\255\255\255\255\012\001\ +\101\001\255\255\255\255\255\255\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\255\255\052\001\053\001\255\255\055\001\ +\056\001\030\001\031\001\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\255\255\055\001\056\001\084\001\255\255\059\001\255\255\ +\255\255\255\255\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\097\001\071\001\255\255\012\001\101\001\255\255\255\255\ +\255\255\255\255\106\001\255\255\255\255\109\001\110\001\084\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\030\001\031\001\ +\255\255\006\001\255\255\255\255\097\001\255\255\255\255\012\001\ +\101\001\255\255\255\255\255\255\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\255\255\052\001\053\001\255\255\055\001\ +\056\001\030\001\031\001\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\052\001\ +\053\001\255\255\055\001\056\001\084\001\255\255\059\001\255\255\ +\255\255\255\255\255\255\064\001\065\001\255\255\006\001\255\255\ +\255\255\097\001\071\001\255\255\012\001\101\001\255\255\255\255\ +\255\255\255\255\106\001\255\255\255\255\109\001\110\001\084\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\030\001\031\001\ +\255\255\006\001\255\255\255\255\097\001\255\255\255\255\012\001\ +\101\001\255\255\255\255\255\255\255\255\106\001\255\255\255\255\ +\109\001\110\001\050\001\255\255\255\255\053\001\255\255\055\001\ +\056\001\030\001\031\001\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\255\255\255\255\255\255\255\255\255\255\071\001\ +\255\255\255\255\255\255\255\255\255\255\050\001\255\255\255\255\ +\053\001\255\255\055\001\056\001\084\001\255\255\059\001\255\255\ +\255\255\255\255\255\255\064\001\065\001\255\255\255\255\255\255\ +\255\255\097\001\071\001\255\255\255\255\101\001\006\001\007\001\ +\255\255\255\255\106\001\011\001\012\001\109\001\110\001\084\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\022\001\255\255\ +\255\255\255\255\255\255\255\255\097\001\255\255\030\001\031\001\ +\101\001\255\255\255\255\255\255\255\255\106\001\255\255\255\255\ +\109\001\110\001\255\255\255\255\255\255\255\255\255\255\047\001\ +\255\255\255\255\050\001\051\001\255\255\053\001\054\001\055\001\ +\056\001\255\255\255\255\059\001\255\255\255\255\255\255\255\255\ +\064\001\065\001\006\001\007\001\255\255\255\255\255\255\011\001\ +\012\001\255\255\255\255\255\255\076\001\255\255\255\255\255\255\ +\255\255\081\001\255\255\255\255\255\255\255\255\255\255\087\001\ +\255\255\089\001\030\001\031\001\255\255\255\255\255\255\255\255\ +\255\255\097\001\098\001\255\255\255\255\101\001\255\255\255\255\ +\104\001\255\255\106\001\255\255\255\255\109\001\050\001\051\001\ +\255\255\053\001\054\001\055\001\056\001\255\255\255\255\059\001\ +\255\255\255\255\255\255\255\255\064\001\065\001\006\001\007\001\ +\255\255\255\255\255\255\011\001\012\001\006\001\007\001\255\255\ +\076\001\255\255\011\001\012\001\255\255\081\001\255\255\255\255\ +\255\255\255\255\255\255\087\001\255\255\089\001\030\001\031\001\ +\255\255\255\255\255\255\255\255\255\255\030\001\031\001\255\255\ +\255\255\101\001\255\255\255\255\104\001\255\255\106\001\255\255\ +\255\255\109\001\050\001\255\255\255\255\053\001\054\001\055\001\ +\056\001\050\001\255\255\059\001\053\001\054\001\055\001\056\001\ +\064\001\065\001\059\001\255\255\255\255\255\255\008\001\064\001\ +\065\001\255\255\255\255\255\255\076\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\076\001\255\255\023\001\255\255\087\001\ +\255\255\255\255\255\255\255\255\030\001\255\255\087\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\101\001\255\255\255\255\ +\255\255\255\255\106\001\255\255\101\001\109\001\255\255\255\255\ +\255\255\106\001\255\255\255\255\109\001\055\001\255\255\057\001\ +\058\001\059\001\255\255\061\001\255\255\255\255\064\001\065\001\ +\255\255\255\255\255\255\000\001\001\001\002\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\009\001\255\255\255\255\081\001\ +\255\255\014\001\015\001\016\001\017\001\018\001\088\001\089\001\ +\090\001\255\255\255\255\255\255\255\255\255\255\027\001\097\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\036\001\ +\106\001\255\255\255\255\109\001\110\001\042\001\043\001\044\001\ +\045\001\046\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\061\001\255\255\255\255\255\255\255\255\066\001\255\255\255\255\ +\255\255\255\255\071\001\072\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\082\001\083\001\084\001\ +\085\001\086\001\255\255\000\001\001\001\002\001\255\255\255\255\ +\255\255\094\001\007\001\255\255\009\001\255\255\255\255\100\001\ +\255\255\255\255\055\001\016\001\057\001\058\001\059\001\255\255\ +\061\001\255\255\255\255\064\001\065\001\255\255\027\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\036\001\ +\255\255\255\255\255\255\255\255\081\001\042\001\043\001\044\001\ +\045\001\046\001\047\001\255\255\089\001\090\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\097\001\255\255\255\255\255\255\ +\061\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\109\001\110\001\071\001\072\001\255\255\074\001\255\255\255\255\ +\255\255\255\255\000\001\001\001\002\001\082\001\083\001\084\001\ +\085\001\086\001\087\001\009\001\255\255\255\255\255\255\255\255\ +\255\255\015\001\016\001\255\255\018\001\098\001\255\255\100\001\ +\255\255\255\255\255\255\255\255\255\255\027\001\255\255\255\255\ +\255\255\255\255\000\001\001\001\002\001\255\255\036\001\255\255\ +\255\255\255\255\255\255\009\001\042\001\043\001\044\001\045\001\ +\046\001\015\001\016\001\255\255\018\001\255\255\255\255\255\255\ +\055\001\255\255\057\001\058\001\059\001\027\001\061\001\061\001\ +\255\255\064\001\065\001\255\255\066\001\255\255\036\001\255\255\ +\255\255\071\001\072\001\255\255\042\001\043\001\044\001\045\001\ +\046\001\255\255\081\001\255\255\082\001\083\001\084\001\085\001\ +\086\001\255\255\089\001\090\001\255\255\091\001\255\255\061\001\ +\255\255\255\255\097\001\255\255\066\001\255\255\100\001\255\255\ +\255\255\071\001\072\001\255\255\255\255\255\255\109\001\110\001\ +\000\001\001\001\002\001\255\255\082\001\083\001\084\001\085\001\ +\086\001\009\001\255\255\255\255\255\255\255\255\092\001\015\001\ +\016\001\255\255\018\001\255\255\255\255\255\255\100\001\255\255\ +\255\255\255\255\255\255\027\001\255\255\255\255\255\255\255\255\ +\000\001\001\001\002\001\255\255\036\001\255\255\255\255\255\255\ +\255\255\009\001\042\001\043\001\044\001\045\001\046\001\015\001\ +\016\001\255\255\018\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\027\001\255\255\061\001\255\255\255\255\ +\255\255\255\255\066\001\255\255\036\001\255\255\255\255\071\001\ +\072\001\255\255\042\001\043\001\044\001\045\001\046\001\255\255\ +\255\255\255\255\082\001\083\001\084\001\085\001\086\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\061\001\094\001\255\255\ +\255\255\255\255\066\001\255\255\100\001\255\255\255\255\071\001\ +\072\001\255\255\255\255\255\255\255\255\255\255\000\001\001\001\ +\002\001\255\255\082\001\083\001\084\001\085\001\086\001\009\001\ +\255\255\255\255\255\255\091\001\255\255\015\001\016\001\255\255\ +\018\001\255\255\255\255\255\255\100\001\255\255\255\255\255\255\ +\255\255\027\001\255\255\255\255\255\255\255\255\000\001\001\001\ +\002\001\255\255\036\001\255\255\255\255\255\255\255\255\009\001\ +\042\001\043\001\044\001\045\001\046\001\015\001\016\001\255\255\ +\018\001\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\027\001\255\255\061\001\255\255\255\255\255\255\255\255\ +\066\001\255\255\036\001\255\255\255\255\071\001\072\001\255\255\ +\042\001\043\001\044\001\045\001\046\001\255\255\255\255\255\255\ +\082\001\083\001\084\001\085\001\086\001\255\255\255\255\255\255\ +\255\255\255\255\092\001\061\001\255\255\255\255\255\255\255\255\ +\066\001\255\255\100\001\255\255\255\255\071\001\072\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\082\001\083\001\084\001\085\001\086\001\000\001\001\001\002\001\ +\255\255\255\255\255\255\255\255\094\001\255\255\009\001\255\255\ +\255\255\255\255\100\001\255\255\015\001\016\001\255\255\018\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\027\001\255\255\255\255\255\255\255\255\000\001\001\001\002\001\ +\255\255\036\001\255\255\255\255\255\255\255\255\009\001\042\001\ +\043\001\044\001\045\001\046\001\015\001\016\001\255\255\018\001\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\027\001\255\255\061\001\255\255\255\255\255\255\255\255\066\001\ +\255\255\036\001\255\255\255\255\071\001\072\001\255\255\042\001\ +\043\001\044\001\045\001\046\001\255\255\255\255\255\255\082\001\ +\083\001\084\001\085\001\086\001\255\255\255\255\255\255\255\255\ +\091\001\255\255\061\001\255\255\255\255\255\255\255\255\066\001\ +\255\255\100\001\255\255\255\255\071\001\072\001\255\255\255\255\ +\255\255\255\255\255\255\000\001\001\001\002\001\255\255\082\001\ +\083\001\084\001\085\001\086\001\009\001\255\255\255\255\255\255\ +\255\255\092\001\015\001\016\001\255\255\018\001\255\255\255\255\ +\255\255\100\001\255\255\255\255\255\255\255\255\027\001\255\255\ +\255\255\255\255\255\255\000\001\001\001\002\001\255\255\036\001\ +\255\255\255\255\255\255\255\255\009\001\042\001\043\001\044\001\ +\045\001\046\001\015\001\016\001\255\255\018\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\027\001\255\255\ +\061\001\255\255\255\255\255\255\255\255\066\001\255\255\036\001\ +\255\255\255\255\071\001\072\001\255\255\042\001\043\001\044\001\ +\045\001\046\001\255\255\255\255\255\255\082\001\083\001\084\001\ +\085\001\086\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\061\001\094\001\255\255\255\255\255\255\066\001\255\255\100\001\ +\255\255\255\255\071\001\072\001\255\255\255\255\255\255\255\255\ +\255\255\000\001\001\001\002\001\255\255\082\001\083\001\084\001\ +\085\001\086\001\009\001\255\255\255\255\255\255\091\001\255\255\ +\015\001\016\001\255\255\018\001\255\255\255\255\255\255\100\001\ +\255\255\255\255\255\255\255\255\027\001\255\255\255\255\255\255\ +\255\255\000\001\001\001\002\001\255\255\036\001\255\255\255\255\ +\255\255\255\255\009\001\042\001\043\001\044\001\045\001\046\001\ +\015\001\016\001\255\255\018\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\027\001\255\255\061\001\255\255\ +\255\255\255\255\255\255\066\001\255\255\036\001\255\255\255\255\ +\071\001\072\001\255\255\042\001\043\001\044\001\045\001\046\001\ +\255\255\255\255\255\255\082\001\083\001\084\001\085\001\086\001\ +\255\255\255\255\255\255\255\255\255\255\092\001\061\001\001\001\ +\002\001\255\255\255\255\066\001\255\255\100\001\255\255\009\001\ +\071\001\072\001\255\255\255\255\255\255\015\001\016\001\255\255\ +\018\001\255\255\255\255\082\001\083\001\084\001\085\001\086\001\ +\255\255\027\001\255\255\255\255\255\255\255\255\255\255\094\001\ +\255\255\255\255\036\001\255\255\255\255\100\001\255\255\255\255\ +\042\001\043\001\044\001\045\001\046\001\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\061\001\001\001\002\001\255\255\255\255\ +\066\001\255\255\255\255\255\255\009\001\071\001\072\001\255\255\ +\255\255\255\255\015\001\016\001\255\255\018\001\255\255\255\255\ +\082\001\083\001\084\001\085\001\086\001\255\255\027\001\255\255\ +\255\255\255\255\255\255\255\255\255\255\095\001\255\255\036\001\ +\255\255\255\255\100\001\255\255\255\255\042\001\043\001\044\001\ +\045\001\046\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\061\001\001\001\002\001\255\255\255\255\066\001\255\255\255\255\ +\255\255\009\001\071\001\072\001\255\255\255\255\255\255\015\001\ +\016\001\255\255\018\001\255\255\255\255\082\001\083\001\084\001\ +\085\001\086\001\255\255\027\001\255\255\255\255\091\001\255\255\ +\255\255\001\001\002\001\255\255\036\001\255\255\255\255\100\001\ +\255\255\009\001\042\001\043\001\044\001\045\001\046\001\015\001\ +\016\001\255\255\018\001\255\255\255\255\255\255\255\255\255\255\ +\255\255\025\001\255\255\027\001\255\255\061\001\255\255\255\255\ +\255\255\255\255\066\001\255\255\036\001\255\255\255\255\071\001\ +\072\001\255\255\042\001\043\001\044\001\045\001\046\001\255\255\ +\255\255\255\255\082\001\083\001\084\001\085\001\086\001\255\255\ +\255\255\255\255\255\255\091\001\255\255\061\001\001\001\002\001\ +\255\255\255\255\066\001\255\255\100\001\255\255\009\001\071\001\ +\072\001\255\255\255\255\255\255\015\001\016\001\255\255\018\001\ +\255\255\255\255\082\001\083\001\084\001\085\001\086\001\255\255\ +\027\001\255\255\255\255\255\255\255\255\255\255\001\001\002\001\ +\255\255\036\001\255\255\255\255\100\001\255\255\009\001\042\001\ +\043\001\044\001\045\001\046\001\015\001\016\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\027\001\255\255\061\001\255\255\255\255\255\255\255\255\066\001\ +\255\255\036\001\255\255\255\255\071\001\072\001\255\255\042\001\ +\043\001\044\001\045\001\046\001\255\255\255\255\255\255\082\001\ +\083\001\084\001\085\001\086\001\255\255\255\255\255\255\255\255\ +\255\255\255\255\061\001\001\001\002\001\255\255\255\255\066\001\ +\255\255\100\001\255\255\009\001\071\001\072\001\255\255\255\255\ +\255\255\015\001\255\255\255\255\255\255\255\255\255\255\082\001\ +\083\001\084\001\085\001\086\001\255\255\027\001\255\255\255\255\ +\091\001\255\255\255\255\001\001\002\001\255\255\036\001\255\255\ +\255\255\100\001\255\255\255\255\042\001\043\001\044\001\045\001\ +\046\001\015\001\255\255\255\255\255\255\255\255\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\027\001\255\255\061\001\ +\255\255\255\255\255\255\255\255\066\001\255\255\036\001\255\255\ +\255\255\071\001\072\001\255\255\042\001\043\001\044\001\045\001\ +\046\001\013\001\255\255\255\255\082\001\083\001\084\001\085\001\ +\086\001\255\255\255\255\255\255\255\255\255\255\255\255\061\001\ +\028\001\029\001\255\255\255\255\066\001\255\255\100\001\255\255\ +\255\255\071\001\072\001\255\255\255\255\041\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\083\001\084\001\085\001\ +\086\001\255\255\255\255\055\001\255\255\057\001\058\001\059\001\ +\060\001\061\001\255\255\255\255\064\001\065\001\100\001\255\255\ +\068\001\255\255\255\255\255\255\255\255\255\255\074\001\255\255\ +\255\255\255\255\255\255\255\255\080\001\081\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\089\001\090\001\255\255\ +\255\255\255\255\255\255\255\255\096\001\097\001\255\255\255\255\ +\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ +\108\001\109\001\110\001\111\001" + +let yynames_const = "\ + AMPERAMPER\000\ + AMPERSAND\000\ + AND\000\ + AS\000\ + ASSERT\000\ + BACKQUOTE\000\ + BANG\000\ + BAR\000\ + BARBAR\000\ + BARRBRACKET\000\ + BEGIN\000\ + CLASS\000\ + COLON\000\ + COLONCOLON\000\ + COLONEQUAL\000\ + COLONGREATER\000\ + COMMA\000\ + CONSTRAINT\000\ + DO\000\ + DONE\000\ + DOT\000\ + DOTDOT\000\ + DOWNTO\000\ + ELSE\000\ + END\000\ + EOF\000\ + EQUAL\000\ + EXCEPTION\000\ + EXTERNAL\000\ + FALSE\000\ + FOR\000\ + FUN\000\ + FUNCTION\000\ + FUNCTOR\000\ + GREATER\000\ + GREATERRBRACE\000\ + GREATERRBRACKET\000\ + IF\000\ + IN\000\ + INCLUDE\000\ + INHERIT\000\ + INITIALIZER\000\ + LAZY\000\ + LBRACE\000\ + LBRACELESS\000\ + LBRACKET\000\ + LBRACKETBAR\000\ + LBRACKETLESS\000\ + LBRACKETGREATER\000\ + LBRACKETPERCENT\000\ + LBRACKETPERCENTPERCENT\000\ + LESS\000\ + LESSMINUS\000\ + LET\000\ + LPAREN\000\ + LBRACKETAT\000\ + LBRACKETATAT\000\ + LBRACKETATATAT\000\ + MATCH\000\ + METHOD\000\ + MINUS\000\ + MINUSDOT\000\ + MINUSGREATER\000\ + MODULE\000\ + MUTABLE\000\ + NEW\000\ + NONREC\000\ + OBJECT\000\ + OF\000\ + OPEN\000\ + OR\000\ + PERCENT\000\ + PLUS\000\ + PLUSDOT\000\ + PLUSEQ\000\ + PRIVATE\000\ + QUESTION\000\ + QUOTE\000\ + RBRACE\000\ + RBRACKET\000\ + REC\000\ + RPAREN\000\ + SEMI\000\ + SEMISEMI\000\ + HASH\000\ + SIG\000\ + STAR\000\ + STRUCT\000\ + THEN\000\ + TILDE\000\ + TO\000\ + TRUE\000\ + TRY\000\ + TYPE\000\ + UNDERSCORE\000\ + VAL\000\ + VIRTUAL\000\ + WHEN\000\ + WHILE\000\ + WITH\000\ + EOL\000\ + " + +let yynames_block = "\ + CHAR\000\ + FLOAT\000\ + INFIXOP0\000\ + INFIXOP1\000\ + INFIXOP2\000\ + INFIXOP3\000\ + INFIXOP4\000\ + DOTOP\000\ + INT\000\ + LABEL\000\ + LIDENT\000\ + OPTLABEL\000\ + PREFIXOP\000\ + HASHOP\000\ + STRING\000\ + UIDENT\000\ + COMMENT\000\ + DOCSTRING\000\ + " + +let yyact = [| + (fun _ -> failwith "parser") +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'structure) in + Obj.repr( +# 630 "parsing/parser.mly" + ( extra_str 1 _1 ) +# 7030 "parsing/parser.ml" + : Parsetree.structure)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'signature) in + Obj.repr( +# 633 "parsing/parser.mly" + ( extra_sig 1 _1 ) +# 7037 "parsing/parser.ml" + : Parsetree.signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'top_structure) in + Obj.repr( +# 636 "parsing/parser.mly" + ( Ptop_def (extra_str 1 _1) ) +# 7044 "parsing/parser.ml" + : Parsetree.toplevel_phrase)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'toplevel_directive) in + Obj.repr( +# 637 "parsing/parser.mly" + ( _1 ) +# 7051 "parsing/parser.ml" + : Parsetree.toplevel_phrase)) +; (fun __caml_parser_env -> + Obj.repr( +# 638 "parsing/parser.mly" + ( raise End_of_file ) +# 7057 "parsing/parser.ml" + : Parsetree.toplevel_phrase)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 642 "parsing/parser.mly" + ( (text_str 1) @ [mkstrexp _1 _2] ) +# 7065 "parsing/parser.ml" + : 'top_structure)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'top_structure_tail) in + Obj.repr( +# 644 "parsing/parser.mly" + ( _1 ) +# 7072 "parsing/parser.ml" + : 'top_structure)) +; (fun __caml_parser_env -> + Obj.repr( +# 647 "parsing/parser.mly" + ( [] ) +# 7078 "parsing/parser.ml" + : 'top_structure_tail)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'structure_item) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'top_structure_tail) in + Obj.repr( +# 648 "parsing/parser.mly" + ( (text_str 1) @ _1 :: _2 ) +# 7086 "parsing/parser.ml" + : 'top_structure_tail)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'use_file_body) in + Obj.repr( +# 651 "parsing/parser.mly" + ( extra_def 1 _1 ) +# 7093 "parsing/parser.ml" + : Parsetree.toplevel_phrase list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'use_file_tail) in + Obj.repr( +# 654 "parsing/parser.mly" + ( _1 ) +# 7100 "parsing/parser.ml" + : 'use_file_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'post_item_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'use_file_tail) in + Obj.repr( +# 656 "parsing/parser.mly" + ( (text_def 1) @ Ptop_def[mkstrexp _1 _2] :: _3 ) +# 7109 "parsing/parser.ml" + : 'use_file_body)) +; (fun __caml_parser_env -> + Obj.repr( +# 660 "parsing/parser.mly" + ( [] ) +# 7115 "parsing/parser.ml" + : 'use_file_tail)) +; (fun __caml_parser_env -> + Obj.repr( +# 662 "parsing/parser.mly" + ( text_def 1 ) +# 7121 "parsing/parser.ml" + : 'use_file_tail)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'post_item_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'use_file_tail) in + Obj.repr( +# 664 "parsing/parser.mly" + ( mark_rhs_docs 2 3; + (text_def 1) @ (text_def 2) @ Ptop_def[mkstrexp _2 _3] :: _4 ) +# 7131 "parsing/parser.ml" + : 'use_file_tail)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'structure_item) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'use_file_tail) in + Obj.repr( +# 667 "parsing/parser.mly" + ( (text_def 1) @ (text_def 2) @ Ptop_def[_2] :: _3 ) +# 7139 "parsing/parser.ml" + : 'use_file_tail)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'toplevel_directive) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'use_file_tail) in + Obj.repr( +# 669 "parsing/parser.mly" + ( mark_rhs_docs 2 3; + (text_def 1) @ (text_def 2) @ _2 :: _3 ) +# 7148 "parsing/parser.ml" + : 'use_file_tail)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'structure_item) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'use_file_tail) in + Obj.repr( +# 672 "parsing/parser.mly" + ( (text_def 1) @ Ptop_def[_1] :: _2 ) +# 7156 "parsing/parser.ml" + : 'use_file_tail)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'toplevel_directive) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'use_file_tail) in + Obj.repr( +# 674 "parsing/parser.mly" + ( mark_rhs_docs 1 1; + (text_def 1) @ _1 :: _2 ) +# 7165 "parsing/parser.ml" + : 'use_file_tail)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 678 "parsing/parser.mly" + ( _1 ) +# 7172 "parsing/parser.ml" + : Parsetree.core_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 681 "parsing/parser.mly" + ( _1 ) +# 7179 "parsing/parser.ml" + : Parsetree.expression)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 684 "parsing/parser.mly" + ( _1 ) +# 7186 "parsing/parser.ml" + : Parsetree.pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 691 "parsing/parser.mly" + ( mkrhs "*" 2, None ) +# 7192 "parsing/parser.ml" + : 'functor_arg)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'functor_arg_name) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 693 "parsing/parser.mly" + ( mkrhs _2 2, Some _4 ) +# 7200 "parsing/parser.ml" + : 'functor_arg)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 697 "parsing/parser.mly" + ( _1 ) +# 7207 "parsing/parser.ml" + : 'functor_arg_name)) +; (fun __caml_parser_env -> + Obj.repr( +# 698 "parsing/parser.mly" + ( "_" ) +# 7213 "parsing/parser.ml" + : 'functor_arg_name)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'functor_args) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'functor_arg) in + Obj.repr( +# 703 "parsing/parser.mly" + ( _2 :: _1 ) +# 7221 "parsing/parser.ml" + : 'functor_args)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'functor_arg) in + Obj.repr( +# 705 "parsing/parser.mly" + ( [ _1 ] ) +# 7228 "parsing/parser.ml" + : 'functor_args)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'mod_longident) in + Obj.repr( +# 710 "parsing/parser.mly" + ( mkmod(Pmod_ident (mkrhs _1 1)) ) +# 7235 "parsing/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'structure) in + Obj.repr( +# 712 "parsing/parser.mly" + ( mkmod ~attrs:_2 (Pmod_structure(extra_str 3 _3)) ) +# 7243 "parsing/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'structure) in + Obj.repr( +# 714 "parsing/parser.mly" + ( unclosed "struct" 1 "end" 4 ) +# 7251 "parsing/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'functor_args) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in + Obj.repr( +# 716 "parsing/parser.mly" + ( let modexp = + List.fold_left + (fun acc (n, t) -> mkmod(Pmod_functor(n, t, acc))) + _5 _3 + in wrap_mod_attrs modexp _2 ) +# 7264 "parsing/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'paren_module_expr) in + Obj.repr( +# 722 "parsing/parser.mly" + ( mkmod(Pmod_apply(_1, _2)) ) +# 7272 "parsing/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'module_expr) in + Obj.repr( +# 724 "parsing/parser.mly" + ( mkmod(Pmod_apply(_1, mkmod (Pmod_structure []))) ) +# 7279 "parsing/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'paren_module_expr) in + Obj.repr( +# 726 "parsing/parser.mly" + ( _1 ) +# 7286 "parsing/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 728 "parsing/parser.mly" + ( Mod.attr _1 _2 ) +# 7294 "parsing/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 730 "parsing/parser.mly" + ( mkmod(Pmod_extension _1) ) +# 7301 "parsing/parser.ml" + : 'module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 735 "parsing/parser.mly" + ( mkmod(Pmod_constraint(_2, _4)) ) +# 7309 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 737 "parsing/parser.mly" + ( unclosed "(" 1 ")" 5 ) +# 7317 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + Obj.repr( +# 739 "parsing/parser.mly" + ( _2 ) +# 7324 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + Obj.repr( +# 741 "parsing/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 7331 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 743 "parsing/parser.mly" + ( mkmod ~attrs:_3 (Pmod_unpack _4)) +# 7339 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 745 "parsing/parser.mly" + ( mkmod ~attrs:_3 + (Pmod_unpack( + ghexp(Pexp_constraint(_4, ghtyp(Ptyp_package _6))))) ) +# 7350 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : 'expr) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'package_type) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 750 "parsing/parser.mly" + ( mkmod ~attrs:_3 + (Pmod_unpack( + ghexp(Pexp_coerce(_4, Some(ghtyp(Ptyp_package _6)), + ghtyp(Ptyp_package _8))))) ) +# 7363 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 755 "parsing/parser.mly" + ( mkmod ~attrs:_3 + (Pmod_unpack( + ghexp(Pexp_coerce(_4, None, ghtyp(Ptyp_package _6))))) ) +# 7374 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + Obj.repr( +# 759 "parsing/parser.mly" + ( unclosed "(" 1 ")" 6 ) +# 7382 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + Obj.repr( +# 761 "parsing/parser.mly" + ( unclosed "(" 1 ")" 6 ) +# 7390 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 763 "parsing/parser.mly" + ( unclosed "(" 1 ")" 5 ) +# 7398 "parsing/parser.ml" + : 'paren_module_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'post_item_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'structure_tail) in + Obj.repr( +# 768 "parsing/parser.mly" + ( mark_rhs_docs 1 2; + (text_str 1) @ mkstrexp _1 _2 :: _3 ) +# 7408 "parsing/parser.ml" + : 'structure)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'structure_tail) in + Obj.repr( +# 770 "parsing/parser.mly" + ( _1 ) +# 7415 "parsing/parser.ml" + : 'structure)) +; (fun __caml_parser_env -> + Obj.repr( +# 773 "parsing/parser.mly" + ( [] ) +# 7421 "parsing/parser.ml" + : 'structure_tail)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'structure) in + Obj.repr( +# 774 "parsing/parser.mly" + ( (text_str 1) @ _2 ) +# 7428 "parsing/parser.ml" + : 'structure_tail)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'structure_item) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'structure_tail) in + Obj.repr( +# 775 "parsing/parser.mly" + ( (text_str 1) @ _1 :: _2 ) +# 7436 "parsing/parser.ml" + : 'structure_tail)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'let_bindings) in + Obj.repr( +# 779 "parsing/parser.mly" + ( val_of_let_bindings _1 ) +# 7443 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'primitive_declaration) in + Obj.repr( +# 781 "parsing/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_primitive body) ext ) +# 7450 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'value_description) in + Obj.repr( +# 783 "parsing/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_primitive body) ext ) +# 7457 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_declarations) in + Obj.repr( +# 785 "parsing/parser.mly" + ( let (nr, l, ext ) = _1 in mkstr_ext (Pstr_type (nr, List.rev l)) ext ) +# 7464 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'str_type_extension) in + Obj.repr( +# 787 "parsing/parser.mly" + ( let (l, ext) = _1 in mkstr_ext (Pstr_typext l) ext ) +# 7471 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'str_exception_declaration) in + Obj.repr( +# 789 "parsing/parser.mly" + ( let (l, ext) = _1 in mkstr_ext (Pstr_exception l) ext ) +# 7478 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_binding) in + Obj.repr( +# 791 "parsing/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_module body) ext ) +# 7485 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_bindings) in + Obj.repr( +# 793 "parsing/parser.mly" + ( let (l, ext) = _1 in mkstr_ext (Pstr_recmodule(List.rev l)) ext ) +# 7492 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_type_declaration) in + Obj.repr( +# 795 "parsing/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_modtype body) ext ) +# 7499 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'open_statement) in + Obj.repr( +# 797 "parsing/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_open body) ext ) +# 7506 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_declarations) in + Obj.repr( +# 799 "parsing/parser.mly" + ( let (l, ext) = _1 in mkstr_ext (Pstr_class (List.rev l)) ext ) +# 7513 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_type_declarations) in + Obj.repr( +# 801 "parsing/parser.mly" + ( let (l, ext) = _1 in mkstr_ext (Pstr_class_type (List.rev l)) ext ) +# 7520 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'str_include_statement) in + Obj.repr( +# 803 "parsing/parser.mly" + ( let (body, ext) = _1 in mkstr_ext (Pstr_include body) ext ) +# 7527 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 805 "parsing/parser.mly" + ( mkstr(Pstr_extension (_1, (add_docs_attrs (symbol_docs ()) _2))) ) +# 7535 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in + Obj.repr( +# 807 "parsing/parser.mly" + ( mark_symbol_docs (); + mkstr(Pstr_attribute _1) ) +# 7543 "parsing/parser.ml" + : 'structure_item)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 812 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Incl.mk _3 ~attrs:(attrs@_4) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 7555 "parsing/parser.ml" + : 'str_include_statement)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in + Obj.repr( +# 819 "parsing/parser.mly" + ( _2 ) +# 7562 "parsing/parser.ml" + : 'module_binding_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in + Obj.repr( +# 821 "parsing/parser.mly" + ( mkmod(Pmod_constraint(_4, _2)) ) +# 7570 "parsing/parser.ml" + : 'module_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'functor_arg) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_binding_body) in + Obj.repr( +# 823 "parsing/parser.mly" + ( mkmod(Pmod_functor(fst _1, snd _1, _2)) ) +# 7578 "parsing/parser.ml" + : 'module_binding_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_binding_body) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 827 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Mb.mk (mkrhs _3 3) _4 ~attrs:(attrs@_5) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext ) +# 7591 "parsing/parser.ml" + : 'module_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_binding) in + Obj.repr( +# 833 "parsing/parser.mly" + ( let (b, ext) = _1 in ([b], ext) ) +# 7598 "parsing/parser.ml" + : 'rec_module_bindings)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'rec_module_bindings) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_module_binding) in + Obj.repr( +# 835 "parsing/parser.mly" + ( let (l, ext) = _1 in (_2 :: l, ext) ) +# 7606 "parsing/parser.ml" + : 'rec_module_bindings)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'module_binding_body) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 839 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Mb.mk (mkrhs _4 4) _5 ~attrs:(attrs@_6) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext ) +# 7619 "parsing/parser.ml" + : 'rec_module_binding)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_binding_body) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 846 "parsing/parser.mly" + ( Mb.mk (mkrhs _3 3) _4 ~attrs:(_2@_5) ~loc:(symbol_rloc ()) + ~text:(symbol_text ()) ~docs:(symbol_docs ()) ) +# 7630 "parsing/parser.ml" + : 'and_module_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'mty_longident) in + Obj.repr( +# 854 "parsing/parser.mly" + ( mkmty(Pmty_ident (mkrhs _1 1)) ) +# 7637 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'signature) in + Obj.repr( +# 856 "parsing/parser.mly" + ( mkmty ~attrs:_2 (Pmty_signature (extra_sig 3 _3)) ) +# 7645 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'signature) in + Obj.repr( +# 858 "parsing/parser.mly" + ( unclosed "sig" 1 "end" 4 ) +# 7653 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'functor_args) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 861 "parsing/parser.mly" + ( let mty = + List.fold_left + (fun acc (n, t) -> mkmty(Pmty_functor(n, t, acc))) + _5 _3 + in wrap_mty_attrs mty _2 ) +# 7666 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 868 "parsing/parser.mly" + ( mkmty(Pmty_functor(mknoloc "_", Some _1, _3)) ) +# 7674 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'with_constraints) in + Obj.repr( +# 870 "parsing/parser.mly" + ( mkmty(Pmty_with(_1, List.rev _3)) ) +# 7682 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in + Obj.repr( +# 872 "parsing/parser.mly" + ( mkmty ~attrs:_4 (Pmty_typeof _5) ) +# 7690 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 876 "parsing/parser.mly" + ( _2 ) +# 7697 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + Obj.repr( +# 878 "parsing/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 7704 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 880 "parsing/parser.mly" + ( mkmty(Pmty_extension _1) ) +# 7711 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 882 "parsing/parser.mly" + ( Mty.attr _1 _2 ) +# 7719 "parsing/parser.ml" + : 'module_type)) +; (fun __caml_parser_env -> + Obj.repr( +# 885 "parsing/parser.mly" + ( [] ) +# 7725 "parsing/parser.ml" + : 'signature)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'signature) in + Obj.repr( +# 886 "parsing/parser.mly" + ( (text_sig 1) @ _2 ) +# 7732 "parsing/parser.ml" + : 'signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'signature_item) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'signature) in + Obj.repr( +# 887 "parsing/parser.mly" + ( (text_sig 1) @ _1 :: _2 ) +# 7740 "parsing/parser.ml" + : 'signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'value_description) in + Obj.repr( +# 891 "parsing/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_value body) ext ) +# 7747 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'primitive_declaration) in + Obj.repr( +# 893 "parsing/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_value body) ext) +# 7754 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_declarations) in + Obj.repr( +# 895 "parsing/parser.mly" + ( let (nr, l, ext) = _1 in mksig_ext (Psig_type (nr, List.rev l)) ext ) +# 7761 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_type_extension) in + Obj.repr( +# 897 "parsing/parser.mly" + ( let (l, ext) = _1 in mksig_ext (Psig_typext l) ext ) +# 7768 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_exception_declaration) in + Obj.repr( +# 899 "parsing/parser.mly" + ( let (l, ext) = _1 in mksig_ext (Psig_exception l) ext ) +# 7775 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_declaration) in + Obj.repr( +# 901 "parsing/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_module body) ext ) +# 7782 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_alias) in + Obj.repr( +# 903 "parsing/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_module body) ext ) +# 7789 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_declarations) in + Obj.repr( +# 905 "parsing/parser.mly" + ( let (l, ext) = _1 in mksig_ext (Psig_recmodule (List.rev l)) ext ) +# 7796 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_type_declaration) in + Obj.repr( +# 907 "parsing/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_modtype body) ext ) +# 7803 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'open_statement) in + Obj.repr( +# 909 "parsing/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_open body) ext ) +# 7810 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_include_statement) in + Obj.repr( +# 911 "parsing/parser.mly" + ( let (body, ext) = _1 in mksig_ext (Psig_include body) ext ) +# 7817 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_descriptions) in + Obj.repr( +# 913 "parsing/parser.mly" + ( let (l, ext) = _1 in mksig_ext (Psig_class (List.rev l)) ext ) +# 7824 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_type_declarations) in + Obj.repr( +# 915 "parsing/parser.mly" + ( let (l, ext) = _1 in mksig_ext (Psig_class_type (List.rev l)) ext ) +# 7831 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 917 "parsing/parser.mly" + ( mksig(Psig_extension (_1, (add_docs_attrs (symbol_docs ()) _2))) ) +# 7839 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in + Obj.repr( +# 919 "parsing/parser.mly" + ( mark_symbol_docs (); + mksig(Psig_attribute _1) ) +# 7847 "parsing/parser.ml" + : 'signature_item)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'override_flag) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'mod_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 924 "parsing/parser.mly" + ( let (ext, attrs) = _3 in + Opn.mk (mkrhs _4 4) ~override:_2 ~attrs:(attrs@_5) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext) +# 7860 "parsing/parser.ml" + : 'open_statement)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 931 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Incl.mk _3 ~attrs:(attrs@_4) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext) +# 7872 "parsing/parser.ml" + : 'sig_include_statement)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 938 "parsing/parser.mly" + ( _2 ) +# 7879 "parsing/parser.ml" + : 'module_declaration_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'module_declaration_body) in + Obj.repr( +# 940 "parsing/parser.mly" + ( mkmty(Pmty_functor(mkrhs _2 2, Some _4, _6)) ) +# 7888 "parsing/parser.ml" + : 'module_declaration_body)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'module_declaration_body) in + Obj.repr( +# 942 "parsing/parser.mly" + ( mkmty(Pmty_functor(mkrhs "*" 1, None, _3)) ) +# 7895 "parsing/parser.ml" + : 'module_declaration_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_declaration_body) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 946 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Md.mk (mkrhs _3 3) _4 ~attrs:(attrs@_5) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 7908 "parsing/parser.ml" + : 'module_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'mod_longident) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 953 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Md.mk (mkrhs _3 3) + (Mty.alias ~loc:(rhs_loc 5) (mkrhs _5 5)) ~attrs:(attrs@_6) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 7922 "parsing/parser.ml" + : 'module_alias)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_declaration) in + Obj.repr( +# 961 "parsing/parser.mly" + ( let (body, ext) = _1 in ([body], ext) ) +# 7929 "parsing/parser.ml" + : 'rec_module_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'rec_module_declarations) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_module_declaration) in + Obj.repr( +# 963 "parsing/parser.mly" + ( let (l, ext) = _1 in (_2 :: l, ext) ) +# 7937 "parsing/parser.ml" + : 'rec_module_declarations)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 967 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Md.mk (mkrhs _4 4) _6 ~attrs:(attrs@_7) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext) +# 7950 "parsing/parser.ml" + : 'rec_module_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 974 "parsing/parser.mly" + ( Md.mk (mkrhs _3 3) _5 ~attrs:(_2@_6) ~loc:(symbol_rloc()) + ~text:(symbol_text()) ~docs:(symbol_docs()) ) +# 7961 "parsing/parser.ml" + : 'and_module_declaration)) +; (fun __caml_parser_env -> + Obj.repr( +# 978 "parsing/parser.mly" + ( None ) +# 7967 "parsing/parser.ml" + : 'module_type_declaration_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 979 "parsing/parser.mly" + ( Some _2 ) +# 7974 "parsing/parser.ml" + : 'module_type_declaration_body)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'ident) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'module_type_declaration_body) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 984 "parsing/parser.mly" + ( let (ext, attrs) = _3 in + Mtd.mk (mkrhs _4 4) ?typ:_5 ~attrs:(attrs@_6) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 7987 "parsing/parser.ml" + : 'module_type_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_declaration) in + Obj.repr( +# 993 "parsing/parser.mly" + ( let (body, ext) = _1 in ([body], ext) ) +# 7994 "parsing/parser.ml" + : 'class_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_declarations) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_class_declaration) in + Obj.repr( +# 995 "parsing/parser.mly" + ( let (l, ext) = _1 in (_2 :: l, ext) ) +# 8002 "parsing/parser.ml" + : 'class_declarations)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'virtual_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'class_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'class_fun_binding) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1000 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Ci.mk (mkrhs _5 5) _6 ~virt:_3 ~params:_4 ~attrs:(attrs@_7) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext ) +# 8017 "parsing/parser.ml" + : 'class_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'virtual_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'class_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'class_fun_binding) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1008 "parsing/parser.mly" + ( Ci.mk (mkrhs _5 5) _6 ~virt:_3 ~params:_4 + ~attrs:(_2@_7) ~loc:(symbol_rloc ()) + ~text:(symbol_text ()) ~docs:(symbol_docs ()) ) +# 8031 "parsing/parser.ml" + : 'and_class_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_expr) in + Obj.repr( +# 1014 "parsing/parser.mly" + ( _2 ) +# 8038 "parsing/parser.ml" + : 'class_fun_binding)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'class_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'class_expr) in + Obj.repr( +# 1016 "parsing/parser.mly" + ( mkclass(Pcl_constraint(_4, _2)) ) +# 8046 "parsing/parser.ml" + : 'class_fun_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_fun_binding) in + Obj.repr( +# 1018 "parsing/parser.mly" + ( let (l,o,p) = _1 in mkclass(Pcl_fun(l, o, p, _2)) ) +# 8054 "parsing/parser.ml" + : 'class_fun_binding)) +; (fun __caml_parser_env -> + Obj.repr( +# 1021 "parsing/parser.mly" + ( [] ) +# 8060 "parsing/parser.ml" + : 'class_type_parameters)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'type_parameter_list) in + Obj.repr( +# 1022 "parsing/parser.mly" + ( List.rev _2 ) +# 8067 "parsing/parser.ml" + : 'class_type_parameters)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'labeled_simple_pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'class_expr) in + Obj.repr( +# 1026 "parsing/parser.mly" + ( let (l,o,p) = _1 in mkclass(Pcl_fun(l, o, p, _3)) ) +# 8075 "parsing/parser.ml" + : 'class_fun_def)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_fun_def) in + Obj.repr( +# 1028 "parsing/parser.mly" + ( let (l,o,p) = _1 in mkclass(Pcl_fun(l, o, p, _2)) ) +# 8083 "parsing/parser.ml" + : 'class_fun_def)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_simple_expr) in + Obj.repr( +# 1032 "parsing/parser.mly" + ( _1 ) +# 8090 "parsing/parser.ml" + : 'class_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'class_fun_def) in + Obj.repr( +# 1034 "parsing/parser.mly" + ( wrap_class_attrs _3 _2 ) +# 8098 "parsing/parser.ml" + : 'class_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_labeled_expr_list) in + Obj.repr( +# 1036 "parsing/parser.mly" + ( mkclass(Pcl_apply(_1, List.rev _2)) ) +# 8106 "parsing/parser.ml" + : 'class_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'let_bindings) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'class_expr) in + Obj.repr( +# 1038 "parsing/parser.mly" + ( class_of_let_bindings _1 _3 ) +# 8114 "parsing/parser.ml" + : 'class_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'class_expr) in + Obj.repr( +# 1040 "parsing/parser.mly" + ( wrap_class_attrs (mkclass(Pcl_open(_3, mkrhs _5 5, _7))) _4 ) +# 8124 "parsing/parser.ml" + : 'class_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 1042 "parsing/parser.mly" + ( Cl.attr _1 _2 ) +# 8132 "parsing/parser.ml" + : 'class_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 1044 "parsing/parser.mly" + ( mkclass(Pcl_extension _1) ) +# 8139 "parsing/parser.ml" + : 'class_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in + Obj.repr( +# 1048 "parsing/parser.mly" + ( mkclass(Pcl_constr(mkloc _4 (rhs_loc 4), List.rev _2)) ) +# 8147 "parsing/parser.ml" + : 'class_simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in + Obj.repr( +# 1050 "parsing/parser.mly" + ( mkclass(Pcl_constr(mkrhs _1 1, [])) ) +# 8154 "parsing/parser.ml" + : 'class_simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_structure) in + Obj.repr( +# 1052 "parsing/parser.mly" + ( mkclass ~attrs:_2 (Pcl_structure _3) ) +# 8162 "parsing/parser.ml" + : 'class_simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_structure) in + Obj.repr( +# 1054 "parsing/parser.mly" + ( unclosed "object" 1 "end" 4 ) +# 8170 "parsing/parser.ml" + : 'class_simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'class_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'class_type) in + Obj.repr( +# 1056 "parsing/parser.mly" + ( mkclass(Pcl_constraint(_2, _4)) ) +# 8178 "parsing/parser.ml" + : 'class_simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'class_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'class_type) in + Obj.repr( +# 1058 "parsing/parser.mly" + ( unclosed "(" 1 ")" 5 ) +# 8186 "parsing/parser.ml" + : 'class_simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'class_expr) in + Obj.repr( +# 1060 "parsing/parser.mly" + ( _2 ) +# 8193 "parsing/parser.ml" + : 'class_simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'class_expr) in + Obj.repr( +# 1062 "parsing/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 8200 "parsing/parser.ml" + : 'class_simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_self_pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_fields) in + Obj.repr( +# 1066 "parsing/parser.mly" + ( Cstr.mk _1 (extra_cstr 2 (List.rev _2)) ) +# 8208 "parsing/parser.ml" + : 'class_structure)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 1070 "parsing/parser.mly" + ( reloc_pat _2 ) +# 8215 "parsing/parser.ml" + : 'class_self_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 1072 "parsing/parser.mly" + ( mkpat(Ppat_constraint(_2, _4)) ) +# 8223 "parsing/parser.ml" + : 'class_self_pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 1074 "parsing/parser.mly" + ( ghpat(Ppat_any) ) +# 8229 "parsing/parser.ml" + : 'class_self_pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 1078 "parsing/parser.mly" + ( [] ) +# 8235 "parsing/parser.ml" + : 'class_fields)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_fields) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_field) in + Obj.repr( +# 1080 "parsing/parser.mly" + ( _2 :: (text_cstr 2) @ _1 ) +# 8243 "parsing/parser.ml" + : 'class_fields)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'class_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'parent_binder) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1085 "parsing/parser.mly" + ( mkcf (Pcf_inherit (_2, _4, _5)) ~attrs:(_3@_6) ~docs:(symbol_docs ()) ) +# 8254 "parsing/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'value) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1087 "parsing/parser.mly" + ( let v, attrs = _2 in + mkcf (Pcf_val v) ~attrs:(attrs@_3) ~docs:(symbol_docs ()) ) +# 8263 "parsing/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'method_) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1090 "parsing/parser.mly" + ( let meth, attrs = _2 in + mkcf (Pcf_method meth) ~attrs:(attrs@_3) ~docs:(symbol_docs ()) ) +# 8272 "parsing/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'constrain_field) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1093 "parsing/parser.mly" + ( mkcf (Pcf_constraint _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 8281 "parsing/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1095 "parsing/parser.mly" + ( mkcf (Pcf_initializer _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 8290 "parsing/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1097 "parsing/parser.mly" + ( mkcf (Pcf_extension _1) ~attrs:_2 ~docs:(symbol_docs ()) ) +# 8298 "parsing/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in + Obj.repr( +# 1099 "parsing/parser.mly" + ( mark_symbol_docs (); + mkcf (Pcf_attribute _1) ) +# 8306 "parsing/parser.ml" + : 'class_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 1104 "parsing/parser.mly" + ( Some (mkrhs _2 2) ) +# 8313 "parsing/parser.ml" + : 'parent_binder)) +; (fun __caml_parser_env -> + Obj.repr( +# 1106 "parsing/parser.mly" + ( None ) +# 8319 "parsing/parser.ml" + : 'parent_binder)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1111 "parsing/parser.mly" + ( if _1 = Override then syntax_error (); + (mkloc _5 (rhs_loc 5), Mutable, Cfk_virtual _7), _2 ) +# 8330 "parsing/parser.ml" + : 'value)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'mutable_flag) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1114 "parsing/parser.mly" + ( if _1 = Override then syntax_error (); + (mkrhs _5 5, _4, Cfk_virtual _7), _2 ) +# 8342 "parsing/parser.ml" + : 'value)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'mutable_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1117 "parsing/parser.mly" + ( (mkrhs _4 4, _3, Cfk_concrete (_1, _6)), _2 ) +# 8353 "parsing/parser.ml" + : 'value)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mutable_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'label) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'type_constraint) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1119 "parsing/parser.mly" + ( + let e = mkexp_constraint _7 _5 in + (mkrhs _4 4, _3, Cfk_concrete (_1, e)), _2 + ) +# 8368 "parsing/parser.ml" + : 'value)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'poly_type) in + Obj.repr( +# 1127 "parsing/parser.mly" + ( if _1 = Override then syntax_error (); + (mkloc _5 (rhs_loc 5), Private, Cfk_virtual _7), _2 ) +# 8379 "parsing/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'private_flag) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'poly_type) in + Obj.repr( +# 1130 "parsing/parser.mly" + ( if _1 = Override then syntax_error (); + (mkloc _5 (rhs_loc 5), _4, Cfk_virtual _7), _2 ) +# 8391 "parsing/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'private_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'label) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'strict_binding) in + Obj.repr( +# 1133 "parsing/parser.mly" + ( (mkloc _4 (rhs_loc 4), _3, + Cfk_concrete (_1, ghexp(Pexp_poly (_5, None)))), _2 ) +# 8403 "parsing/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 7 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'private_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'label) in + let _6 = (Parsing.peek_val __caml_parser_env 2 : 'poly_type) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1136 "parsing/parser.mly" + ( (mkloc _4 (rhs_loc 4), _3, + Cfk_concrete (_1, ghexp(Pexp_poly(_8, Some _6)))), _2 ) +# 8416 "parsing/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 10 : 'override_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 9 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 8 : 'private_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 7 : 'label) in + let _7 = (Parsing.peek_val __caml_parser_env 4 : 'lident_list) in + let _9 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _11 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1140 "parsing/parser.mly" + ( let exp, poly = wrap_type_annotation _7 _9 _11 in + (mkloc _4 (rhs_loc 4), _3, + Cfk_concrete (_1, ghexp(Pexp_poly(exp, Some poly)))), _2 ) +# 8431 "parsing/parser.ml" + : 'method_)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_signature) in + Obj.repr( +# 1149 "parsing/parser.mly" + ( _1 ) +# 8438 "parsing/parser.ml" + : 'class_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type_or_tuple) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'class_type) in + Obj.repr( +# 1152 "parsing/parser.mly" + ( mkcty(Pcty_arrow(Optional _2 , _4, _6)) ) +# 8447 "parsing/parser.ml" + : 'class_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type_or_tuple) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'class_type) in + Obj.repr( +# 1154 "parsing/parser.mly" + ( mkcty(Pcty_arrow(Optional _1, _2, _4)) ) +# 8456 "parsing/parser.ml" + : 'class_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type_or_tuple) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'class_type) in + Obj.repr( +# 1156 "parsing/parser.mly" + ( mkcty(Pcty_arrow(Labelled _1, _3, _5)) ) +# 8465 "parsing/parser.ml" + : 'class_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type_or_tuple) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'class_type) in + Obj.repr( +# 1158 "parsing/parser.mly" + ( mkcty(Pcty_arrow(Nolabel, _1, _3)) ) +# 8473 "parsing/parser.ml" + : 'class_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'clty_longident) in + Obj.repr( +# 1162 "parsing/parser.mly" + ( mkcty(Pcty_constr (mkloc _4 (rhs_loc 4), List.rev _2)) ) +# 8481 "parsing/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'clty_longident) in + Obj.repr( +# 1164 "parsing/parser.mly" + ( mkcty(Pcty_constr (mkrhs _1 1, [])) ) +# 8488 "parsing/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_sig_body) in + Obj.repr( +# 1166 "parsing/parser.mly" + ( mkcty ~attrs:_2 (Pcty_signature _3) ) +# 8496 "parsing/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_sig_body) in + Obj.repr( +# 1168 "parsing/parser.mly" + ( unclosed "object" 1 "end" 4 ) +# 8504 "parsing/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 1170 "parsing/parser.mly" + ( Cty.attr _1 _2 ) +# 8512 "parsing/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 1172 "parsing/parser.mly" + ( mkcty(Pcty_extension _1) ) +# 8519 "parsing/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'class_signature) in + Obj.repr( +# 1174 "parsing/parser.mly" + ( wrap_class_type_attrs (mkcty(Pcty_open(_3, mkrhs _5 5, _7))) _4 ) +# 8529 "parsing/parser.ml" + : 'class_signature)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_self_type) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_sig_fields) in + Obj.repr( +# 1178 "parsing/parser.mly" + ( Csig.mk _1 (extra_csig 2 (List.rev _2)) ) +# 8537 "parsing/parser.ml" + : 'class_sig_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 1182 "parsing/parser.mly" + ( _2 ) +# 8544 "parsing/parser.ml" + : 'class_self_type)) +; (fun __caml_parser_env -> + Obj.repr( +# 1184 "parsing/parser.mly" + ( mktyp(Ptyp_any) ) +# 8550 "parsing/parser.ml" + : 'class_self_type)) +; (fun __caml_parser_env -> + Obj.repr( +# 1187 "parsing/parser.mly" + ( [] ) +# 8556 "parsing/parser.ml" + : 'class_sig_fields)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_sig_fields) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_sig_field) in + Obj.repr( +# 1188 "parsing/parser.mly" + ( _2 :: (text_csig 2) @ _1 ) +# 8564 "parsing/parser.ml" + : 'class_sig_fields)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1192 "parsing/parser.mly" + ( mkctf (Pctf_inherit _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 8573 "parsing/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'value_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1194 "parsing/parser.mly" + ( mkctf (Pctf_val _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 8582 "parsing/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'private_virtual_flags) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'label) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'poly_type) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1197 "parsing/parser.mly" + ( + let (p, v) = _3 in + mkctf (Pctf_method (mkrhs _4 4, p, v, _6)) ~attrs:(_2@_7) ~docs:(symbol_docs ()) + ) +# 8596 "parsing/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'constrain_field) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1202 "parsing/parser.mly" + ( mkctf (Pctf_constraint _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) ) +# 8605 "parsing/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1204 "parsing/parser.mly" + ( mkctf (Pctf_extension _1) ~attrs:_2 ~docs:(symbol_docs ()) ) +# 8613 "parsing/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in + Obj.repr( +# 1206 "parsing/parser.mly" + ( mark_symbol_docs (); + mkctf(Pctf_attribute _1) ) +# 8621 "parsing/parser.ml" + : 'class_sig_field)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'mutable_flag) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1211 "parsing/parser.mly" + ( mkrhs _3 3, _2, Virtual, _5 ) +# 8630 "parsing/parser.ml" + : 'value_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'virtual_flag) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1213 "parsing/parser.mly" + ( mkrhs _3 3, Mutable, _2, _5 ) +# 8639 "parsing/parser.ml" + : 'value_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1215 "parsing/parser.mly" + ( mkrhs _1 1, Immutable, Concrete, _3 ) +# 8647 "parsing/parser.ml" + : 'value_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1218 "parsing/parser.mly" + ( _1, _3, symbol_rloc() ) +# 8655 "parsing/parser.ml" + : 'constrain)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1221 "parsing/parser.mly" + ( _1, _3 ) +# 8663 "parsing/parser.ml" + : 'constrain_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_description) in + Obj.repr( +# 1225 "parsing/parser.mly" + ( let (body, ext) = _1 in ([body],ext) ) +# 8670 "parsing/parser.ml" + : 'class_descriptions)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_descriptions) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_class_description) in + Obj.repr( +# 1227 "parsing/parser.mly" + ( let (l, ext) = _1 in (_2 :: l, ext) ) +# 8678 "parsing/parser.ml" + : 'class_descriptions)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'virtual_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'class_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _7 = (Parsing.peek_val __caml_parser_env 1 : 'class_type) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1232 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Ci.mk (mkrhs _5 5) _7 ~virt:_3 ~params:_4 ~attrs:(attrs @ _8) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext ) +# 8693 "parsing/parser.ml" + : 'class_description)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'virtual_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'class_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _7 = (Parsing.peek_val __caml_parser_env 1 : 'class_type) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1240 "parsing/parser.mly" + ( Ci.mk (mkrhs _5 5) _7 ~virt:_3 ~params:_4 + ~attrs:(_2@_8) ~loc:(symbol_rloc ()) + ~text:(symbol_text ()) ~docs:(symbol_docs ()) ) +# 8707 "parsing/parser.ml" + : 'and_class_description)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_type_declaration) in + Obj.repr( +# 1246 "parsing/parser.mly" + ( let (body, ext) = _1 in ([body],ext) ) +# 8714 "parsing/parser.ml" + : 'class_type_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_type_declarations) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_class_type_declaration) in + Obj.repr( +# 1248 "parsing/parser.mly" + ( let (l, ext) = _1 in (_2 :: l, ext) ) +# 8722 "parsing/parser.ml" + : 'class_type_declarations)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : 'virtual_flag) in + let _5 = (Parsing.peek_val __caml_parser_env 4 : 'class_type_parameters) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1253 "parsing/parser.mly" + ( let (ext, attrs) = _3 in + Ci.mk (mkrhs _6 6) _8 ~virt:_4 ~params:_5 ~attrs:(attrs@_9) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext) +# 8737 "parsing/parser.ml" + : 'class_type_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'virtual_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'class_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _7 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1261 "parsing/parser.mly" + ( Ci.mk (mkrhs _5 5) _7 ~virt:_3 ~params:_4 + ~attrs:(_2@_8) ~loc:(symbol_rloc ()) + ~text:(symbol_text ()) ~docs:(symbol_docs ()) ) +# 8751 "parsing/parser.ml" + : 'and_class_type_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1269 "parsing/parser.mly" + ( _1 ) +# 8758 "parsing/parser.ml" + : 'seq_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1270 "parsing/parser.mly" + ( _1 ) +# 8765 "parsing/parser.ml" + : 'seq_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1271 "parsing/parser.mly" + ( mkexp(Pexp_sequence(_1, _3)) ) +# 8773 "parsing/parser.ml" + : 'seq_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'attr_id) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1273 "parsing/parser.mly" + ( let seq = mkexp(Pexp_sequence (_1, _5)) in + let payload = PStr [mkstrexp seq []] in + mkexp (Pexp_extension (_4, payload)) ) +# 8784 "parsing/parser.ml" + : 'seq_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label_let_pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'opt_default) in + Obj.repr( +# 1279 "parsing/parser.mly" + ( (Optional (fst _3), _4, snd _3) ) +# 8792 "parsing/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_var) in + Obj.repr( +# 1281 "parsing/parser.mly" + ( (Optional (fst _2), None, snd _2) ) +# 8799 "parsing/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'let_pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'opt_default) in + Obj.repr( +# 1283 "parsing/parser.mly" + ( (Optional _1, _4, _3) ) +# 8808 "parsing/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_var) in + Obj.repr( +# 1285 "parsing/parser.mly" + ( (Optional _1, None, _2) ) +# 8816 "parsing/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'label_let_pattern) in + Obj.repr( +# 1287 "parsing/parser.mly" + ( (Labelled (fst _3), None, snd _3) ) +# 8823 "parsing/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_var) in + Obj.repr( +# 1289 "parsing/parser.mly" + ( (Labelled (fst _2), None, snd _2) ) +# 8830 "parsing/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in + Obj.repr( +# 1291 "parsing/parser.mly" + ( (Labelled _1, None, _2) ) +# 8838 "parsing/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in + Obj.repr( +# 1293 "parsing/parser.mly" + ( (Nolabel, None, _1) ) +# 8845 "parsing/parser.ml" + : 'labeled_simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 1296 "parsing/parser.mly" + ( mkpat(Ppat_var (mkrhs _1 1)) ) +# 8852 "parsing/parser.ml" + : 'pattern_var)) +; (fun __caml_parser_env -> + Obj.repr( +# 1297 "parsing/parser.mly" + ( mkpat Ppat_any ) +# 8858 "parsing/parser.ml" + : 'pattern_var)) +; (fun __caml_parser_env -> + Obj.repr( +# 1300 "parsing/parser.mly" + ( None ) +# 8864 "parsing/parser.ml" + : 'opt_default)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1301 "parsing/parser.mly" + ( Some _2 ) +# 8871 "parsing/parser.ml" + : 'opt_default)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_var) in + Obj.repr( +# 1305 "parsing/parser.mly" + ( _1 ) +# 8878 "parsing/parser.ml" + : 'label_let_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label_var) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1307 "parsing/parser.mly" + ( let (lab, pat) = _1 in (lab, mkpat(Ppat_constraint(pat, _3))) ) +# 8886 "parsing/parser.ml" + : 'label_let_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 1310 "parsing/parser.mly" + ( (_1, mkpat(Ppat_var (mkrhs _1 1))) ) +# 8893 "parsing/parser.ml" + : 'label_var)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1314 "parsing/parser.mly" + ( _1 ) +# 8900 "parsing/parser.ml" + : 'let_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1316 "parsing/parser.mly" + ( mkpat(Ppat_constraint(_1, _3)) ) +# 8908 "parsing/parser.ml" + : 'let_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1320 "parsing/parser.mly" + ( _1 ) +# 8915 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_labeled_expr_list) in + Obj.repr( +# 1322 "parsing/parser.mly" + ( mkexp(Pexp_apply(_1, List.rev _2)) ) +# 8923 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'let_bindings) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1324 "parsing/parser.mly" + ( expr_of_let_bindings _1 _3 ) +# 8931 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'module_binding_body) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1326 "parsing/parser.mly" + ( mkexp_attrs (Pexp_letmodule(mkrhs _4 4, _5, _7)) _3 ) +# 8941 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'let_exception_declaration) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1328 "parsing/parser.mly" + ( mkexp_attrs (Pexp_letexception(_4, _6)) _3 ) +# 8950 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1330 "parsing/parser.mly" + ( mkexp_attrs (Pexp_open(_3, mkrhs _5 5, _7)) _4 ) +# 8960 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_bar) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'match_cases) in + Obj.repr( +# 1332 "parsing/parser.mly" + ( mkexp_attrs (Pexp_function(List.rev _4)) _2 ) +# 8969 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in + Obj.repr( +# 1334 "parsing/parser.mly" + ( let (l,o,p) = _3 in + mkexp_attrs (Pexp_fun(l, o, p, _4)) _2 ) +# 8979 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'lident_list) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in + Obj.repr( +# 1337 "parsing/parser.mly" + ( mkexp_attrs (mk_newtypes _5 _7).pexp_desc _2 ) +# 8988 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_bar) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'match_cases) in + Obj.repr( +# 1339 "parsing/parser.mly" + ( mkexp_attrs (Pexp_match(_3, List.rev _6)) _2 ) +# 8998 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_bar) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'match_cases) in + Obj.repr( +# 1341 "parsing/parser.mly" + ( mkexp_attrs (Pexp_try(_3, List.rev _6)) _2 ) +# 9008 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + Obj.repr( +# 1343 "parsing/parser.mly" + ( syntax_error() ) +# 9016 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr_comma_list) in + Obj.repr( +# 1345 "parsing/parser.mly" + ( mkexp(Pexp_tuple(List.rev _1)) ) +# 9023 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1347 "parsing/parser.mly" + ( mkexp(Pexp_construct(mkrhs _1 1, Some _2)) ) +# 9031 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1349 "parsing/parser.mly" + ( mkexp(Pexp_variant(_1, Some _2)) ) +# 9039 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1351 "parsing/parser.mly" + ( mkexp_attrs(Pexp_ifthenelse(_3, _5, Some _7)) _2 ) +# 9049 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1353 "parsing/parser.mly" + ( mkexp_attrs (Pexp_ifthenelse(_3, _5, None)) _2 ) +# 9058 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1355 "parsing/parser.mly" + ( mkexp_attrs (Pexp_while(_3, _5)) _2 ) +# 9067 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 8 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 7 : 'pattern) in + let _5 = (Parsing.peek_val __caml_parser_env 5 : 'seq_expr) in + let _6 = (Parsing.peek_val __caml_parser_env 4 : 'direction_flag) in + let _7 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _9 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1358 "parsing/parser.mly" + ( mkexp_attrs(Pexp_for(_3, _5, _7, _6, _9)) _2 ) +# 9079 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1360 "parsing/parser.mly" + ( mkexp_cons (rhs_loc 2) (ghexp(Pexp_tuple[_1;_3])) (symbol_rloc()) ) +# 9087 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1362 "parsing/parser.mly" + ( mkinfix _1 _2 _3 ) +# 9096 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1364 "parsing/parser.mly" + ( mkinfix _1 _2 _3 ) +# 9105 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1366 "parsing/parser.mly" + ( mkinfix _1 _2 _3 ) +# 9114 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1368 "parsing/parser.mly" + ( mkinfix _1 _2 _3 ) +# 9123 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1370 "parsing/parser.mly" + ( mkinfix _1 _2 _3 ) +# 9132 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1372 "parsing/parser.mly" + ( mkinfix _1 "+" _3 ) +# 9140 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1374 "parsing/parser.mly" + ( mkinfix _1 "+." _3 ) +# 9148 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1376 "parsing/parser.mly" + ( mkinfix _1 "+=" _3 ) +# 9156 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1378 "parsing/parser.mly" + ( mkinfix _1 "-" _3 ) +# 9164 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1380 "parsing/parser.mly" + ( mkinfix _1 "-." _3 ) +# 9172 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1382 "parsing/parser.mly" + ( mkinfix _1 "*" _3 ) +# 9180 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1384 "parsing/parser.mly" + ( mkinfix _1 "%" _3 ) +# 9188 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1386 "parsing/parser.mly" + ( mkinfix _1 "=" _3 ) +# 9196 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1388 "parsing/parser.mly" + ( mkinfix _1 "<" _3 ) +# 9204 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1390 "parsing/parser.mly" + ( mkinfix _1 ">" _3 ) +# 9212 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1392 "parsing/parser.mly" + ( mkinfix _1 "or" _3 ) +# 9220 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1394 "parsing/parser.mly" + ( mkinfix _1 "||" _3 ) +# 9228 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1396 "parsing/parser.mly" + ( mkinfix _1 "&" _3 ) +# 9236 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1398 "parsing/parser.mly" + ( mkinfix _1 "&&" _3 ) +# 9244 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1400 "parsing/parser.mly" + ( mkinfix _1 ":=" _3 ) +# 9252 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'subtractive) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1402 "parsing/parser.mly" + ( mkuminus _1 _2 ) +# 9260 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'additive) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1404 "parsing/parser.mly" + ( mkuplus _1 _2 ) +# 9268 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1406 "parsing/parser.mly" + ( mkexp(Pexp_setfield(_1, mkrhs _3 3, _5)) ) +# 9277 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1408 "parsing/parser.mly" + ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function "Array" "set")), + [Nolabel,_1; Nolabel,_4; Nolabel,_7])) ) +# 9287 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1411 "parsing/parser.mly" + ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function "String" "set")), + [Nolabel,_1; Nolabel,_4; Nolabel,_7])) ) +# 9297 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1414 "parsing/parser.mly" + ( bigarray_set _1 _4 _7 ) +# 9306 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1416 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "[]<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _4; Nolabel, _7]) ) +# 9317 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1419 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "()<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _4; Nolabel, _7]) ) +# 9328 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1422 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "{}<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _4; Nolabel, _7]) ) +# 9339 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 8 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1425 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3,"." ^ _4 ^ "[]<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _6; Nolabel, _9]) ) +# 9351 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 8 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1428 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "()<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _6; Nolabel, _9]) ) +# 9363 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 8 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1431 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "{}<-")) in + mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _6; Nolabel, _9]) ) +# 9375 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1434 "parsing/parser.mly" + ( mkexp(Pexp_setinstvar(mkrhs _1 1, _3)) ) +# 9383 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1436 "parsing/parser.mly" + ( mkexp_attrs (Pexp_assert _3) _2 ) +# 9391 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1438 "parsing/parser.mly" + ( mkexp_attrs (Pexp_lazy _3) _2 ) +# 9399 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_structure) in + Obj.repr( +# 1440 "parsing/parser.mly" + ( mkexp_attrs (Pexp_object _3) _2 ) +# 9407 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_structure) in + Obj.repr( +# 1442 "parsing/parser.mly" + ( unclosed "object" 1 "end" 4 ) +# 9415 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 1444 "parsing/parser.mly" + ( Exp.attr _1 _2 ) +# 9423 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + Obj.repr( +# 1446 "parsing/parser.mly" + ( not_expecting 1 "wildcard \"_\"" ) +# 9429 "parsing/parser.ml" + : 'expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'val_longident) in + Obj.repr( +# 1450 "parsing/parser.mly" + ( mkexp(Pexp_ident (mkrhs _1 1)) ) +# 9436 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in + Obj.repr( +# 1452 "parsing/parser.mly" + ( mkexp(Pexp_constant _1) ) +# 9443 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constr_longident) in + Obj.repr( +# 1454 "parsing/parser.mly" + ( mkexp(Pexp_construct(mkrhs _1 1, None)) ) +# 9450 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in + Obj.repr( +# 1456 "parsing/parser.mly" + ( mkexp(Pexp_variant(_1, None)) ) +# 9457 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1458 "parsing/parser.mly" + ( reloc_exp _2 ) +# 9464 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1460 "parsing/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 9471 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1462 "parsing/parser.mly" + ( wrap_exp_attrs (reloc_exp _3) _2 (* check location *) ) +# 9479 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + Obj.repr( +# 1464 "parsing/parser.mly" + ( mkexp_attrs (Pexp_construct (mkloc (Lident "()") (symbol_rloc ()), + None)) _2 ) +# 9487 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1467 "parsing/parser.mly" + ( unclosed "begin" 1 "end" 4 ) +# 9495 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'type_constraint) in + Obj.repr( +# 1469 "parsing/parser.mly" + ( mkexp_constraint _2 _3 ) +# 9503 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'label_longident) in + Obj.repr( +# 1471 "parsing/parser.mly" + ( mkexp(Pexp_field(_1, mkrhs _3 3)) ) +# 9511 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1473 "parsing/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, _4)) ) +# 9519 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1475 "parsing/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, + mkexp(Pexp_construct(mkrhs (Lident "()") 1, None)))) ) +# 9527 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1478 "parsing/parser.mly" + ( unclosed "(" 3 ")" 5 ) +# 9535 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1480 "parsing/parser.mly" + ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function "Array" "get")), + [Nolabel,_1; Nolabel,_4])) ) +# 9544 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1483 "parsing/parser.mly" + ( unclosed "(" 3 ")" 5 ) +# 9552 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1485 "parsing/parser.mly" + ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function "String" "get")), + [Nolabel,_1; Nolabel,_4])) ) +# 9561 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in + Obj.repr( +# 1488 "parsing/parser.mly" + ( unclosed "[" 3 "]" 5 ) +# 9569 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1490 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "[]")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _4]) ) +# 9579 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1493 "parsing/parser.mly" + ( unclosed "[" 3 "]" 5 ) +# 9588 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1495 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "()")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _4]) ) +# 9598 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1498 "parsing/parser.mly" + ( unclosed "(" 3 ")" 5 ) +# 9607 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1500 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident ("." ^ _2 ^ "{}")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _4]) ) +# 9617 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1503 "parsing/parser.mly" + ( unclosed "{" 3 "}" 5 ) +# 9626 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1505 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "[]")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _6]) ) +# 9637 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1508 "parsing/parser.mly" + ( unclosed "[" 5 "]" 7 ) +# 9647 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1510 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "()")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _6]) ) +# 9658 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1513 "parsing/parser.mly" + ( unclosed "(" 5 ")" 7 ) +# 9668 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1515 "parsing/parser.mly" + ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, "." ^ _4 ^ "{}")) in + mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _6]) ) +# 9679 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1518 "parsing/parser.mly" + ( unclosed "{" 5 "}" 7 ) +# 9689 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in + Obj.repr( +# 1520 "parsing/parser.mly" + ( bigarray_get _1 _4 ) +# 9697 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr_comma_list) in + Obj.repr( +# 1522 "parsing/parser.mly" + ( unclosed "{" 3 "}" 5 ) +# 9705 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in + Obj.repr( +# 1524 "parsing/parser.mly" + ( let (exten, fields) = _2 in mkexp (Pexp_record(fields, exten)) ) +# 9712 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in + Obj.repr( +# 1526 "parsing/parser.mly" + ( unclosed "{" 1 "}" 3 ) +# 9719 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in + Obj.repr( +# 1528 "parsing/parser.mly" + ( let (exten, fields) = _4 in + let rec_exp = mkexp(Pexp_record(fields, exten)) in + mkexp(Pexp_open(Fresh, mkrhs _1 1, rec_exp)) ) +# 9729 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in + Obj.repr( +# 1532 "parsing/parser.mly" + ( unclosed "{" 3 "}" 5 ) +# 9737 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1534 "parsing/parser.mly" + ( mkexp (Pexp_array(List.rev _2)) ) +# 9745 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1536 "parsing/parser.mly" + ( unclosed "[|" 1 "|]" 4 ) +# 9753 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + Obj.repr( +# 1538 "parsing/parser.mly" + ( mkexp (Pexp_array []) ) +# 9759 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1540 "parsing/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp(Pexp_array(List.rev _4)))) ) +# 9768 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1542 "parsing/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp(Pexp_array []))) ) +# 9775 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1544 "parsing/parser.mly" + ( unclosed "[|" 3 "|]" 6 ) +# 9784 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1546 "parsing/parser.mly" + ( reloc_exp (mktailexp (rhs_loc 4) (List.rev _2)) ) +# 9792 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1548 "parsing/parser.mly" + ( unclosed "[" 1 "]" 4 ) +# 9800 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1550 "parsing/parser.mly" + ( let list_exp = reloc_exp (mktailexp (rhs_loc 6) (List.rev _4)) in + mkexp(Pexp_open(Fresh, mkrhs _1 1, list_exp)) ) +# 9810 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1553 "parsing/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, + mkexp(Pexp_construct(mkrhs (Lident "[]") 1, None)))) ) +# 9818 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1556 "parsing/parser.mly" + ( unclosed "[" 3 "]" 6 ) +# 9827 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1558 "parsing/parser.mly" + ( mkexp(Pexp_apply(mkoperator _1 1, [Nolabel,_2])) ) +# 9835 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1560 "parsing/parser.mly" + ( mkexp(Pexp_apply(mkoperator "!" 1, [Nolabel,_2])) ) +# 9842 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in + Obj.repr( +# 1562 "parsing/parser.mly" + ( mkexp_attrs (Pexp_new(mkrhs _3 3)) _2 ) +# 9850 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in + Obj.repr( +# 1564 "parsing/parser.mly" + ( mkexp (Pexp_override _2) ) +# 9857 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in + Obj.repr( +# 1566 "parsing/parser.mly" + ( unclosed "{<" 1 ">}" 3 ) +# 9864 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + Obj.repr( +# 1568 "parsing/parser.mly" + ( mkexp (Pexp_override [])) +# 9870 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in + Obj.repr( +# 1570 "parsing/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp (Pexp_override _4)))) +# 9878 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1572 "parsing/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp (Pexp_override [])))) +# 9885 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in + Obj.repr( +# 1574 "parsing/parser.mly" + ( unclosed "{<" 3 ">}" 5 ) +# 9893 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'label) in + Obj.repr( +# 1576 "parsing/parser.mly" + ( mkexp(Pexp_send(_1, mkrhs _3 3)) ) +# 9901 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1578 "parsing/parser.mly" + ( mkinfix _1 _2 _3 ) +# 9910 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in + Obj.repr( +# 1580 "parsing/parser.mly" + ( mkexp_attrs (Pexp_pack _4) _3 ) +# 9918 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 1582 "parsing/parser.mly" + ( mkexp_attrs (Pexp_constraint (ghexp (Pexp_pack _4), + ghtyp (Ptyp_package _6))) + _3 ) +# 9929 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'module_expr) in + Obj.repr( +# 1586 "parsing/parser.mly" + ( unclosed "(" 1 ")" 6 ) +# 9937 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 8 : 'mod_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _6 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 1589 "parsing/parser.mly" + ( mkexp(Pexp_open(Fresh, mkrhs _1 1, + mkexp_attrs (Pexp_constraint (ghexp (Pexp_pack _6), + ghtyp (Ptyp_package _8))) + _5 )) ) +# 9950 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 7 : 'mod_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _6 = (Parsing.peek_val __caml_parser_env 2 : 'module_expr) in + Obj.repr( +# 1594 "parsing/parser.mly" + ( unclosed "(" 3 ")" 8 ) +# 9959 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 1596 "parsing/parser.mly" + ( mkexp (Pexp_extension _1) ) +# 9966 "parsing/parser.ml" + : 'simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'labeled_simple_expr) in + Obj.repr( +# 1600 "parsing/parser.mly" + ( [_1] ) +# 9973 "parsing/parser.ml" + : 'simple_labeled_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_labeled_expr_list) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'labeled_simple_expr) in + Obj.repr( +# 1602 "parsing/parser.mly" + ( _2 :: _1 ) +# 9981 "parsing/parser.ml" + : 'simple_labeled_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1606 "parsing/parser.mly" + ( (Nolabel, _1) ) +# 9988 "parsing/parser.ml" + : 'labeled_simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_expr) in + Obj.repr( +# 1608 "parsing/parser.mly" + ( _1 ) +# 9995 "parsing/parser.ml" + : 'labeled_simple_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1612 "parsing/parser.mly" + ( (Labelled _1, _2) ) +# 10003 "parsing/parser.ml" + : 'label_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_ident) in + Obj.repr( +# 1614 "parsing/parser.mly" + ( (Labelled (fst _2), snd _2) ) +# 10010 "parsing/parser.ml" + : 'label_expr)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_ident) in + Obj.repr( +# 1616 "parsing/parser.mly" + ( (Optional (fst _2), snd _2) ) +# 10017 "parsing/parser.ml" + : 'label_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in + Obj.repr( +# 1618 "parsing/parser.mly" + ( (Optional _1, _2) ) +# 10025 "parsing/parser.ml" + : 'label_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 1621 "parsing/parser.mly" + ( (_1, mkexp(Pexp_ident(mkrhs (Lident _1) 1))) ) +# 10032 "parsing/parser.ml" + : 'label_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 1624 "parsing/parser.mly" + ( [mkrhs _1 1] ) +# 10039 "parsing/parser.ml" + : 'lident_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'lident_list) in + Obj.repr( +# 1625 "parsing/parser.mly" + ( mkrhs _1 1 :: _2 ) +# 10047 "parsing/parser.ml" + : 'lident_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'val_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'strict_binding) in + Obj.repr( +# 1629 "parsing/parser.mly" + ( (mkpatvar _1 1, _2) ) +# 10055 "parsing/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'val_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'type_constraint) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1631 "parsing/parser.mly" + ( let v = mkpatvar _1 1 in (* PR#7344 *) + let t = + match _2 with + Some t, None -> t + | _, Some t -> t + | _ -> assert false + in + (ghpat(Ppat_constraint(v, ghtyp(Ptyp_poly([],t)))), + mkexp_constraint _4 _2) ) +# 10072 "parsing/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'val_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'typevar_list) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1641 "parsing/parser.mly" + ( (ghpat(Ppat_constraint(mkpatvar _1 1, + ghtyp(Ptyp_poly(List.rev _3,_5)))), + _7) ) +# 10084 "parsing/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 7 : 'val_ident) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'lident_list) in + let _6 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1645 "parsing/parser.mly" + ( let exp, poly = wrap_type_annotation _4 _6 _8 in + (ghpat(Ppat_constraint(mkpatvar _1 1, poly)), exp) ) +# 10095 "parsing/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1648 "parsing/parser.mly" + ( (_1, _3) ) +# 10103 "parsing/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_pattern_not_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1650 "parsing/parser.mly" + ( (ghpat(Ppat_constraint(_1, _3)), _5) ) +# 10112 "parsing/parser.ml" + : 'let_binding_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'let_binding) in + Obj.repr( +# 1653 "parsing/parser.mly" + ( _1 ) +# 10119 "parsing/parser.ml" + : 'let_bindings)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'let_bindings) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_let_binding) in + Obj.repr( +# 1654 "parsing/parser.mly" + ( addlb _1 _2 ) +# 10127 "parsing/parser.ml" + : 'let_bindings)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'rec_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'let_binding_body) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1658 "parsing/parser.mly" + ( let (ext, attr) = _2 in + mklbs ext _3 (mklb true _4 (attr@_5)) ) +# 10138 "parsing/parser.ml" + : 'let_binding)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'let_binding_body) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1663 "parsing/parser.mly" + ( mklb false _3 (_2@_4) ) +# 10147 "parsing/parser.ml" + : 'and_let_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'strict_binding) in + Obj.repr( +# 1667 "parsing/parser.mly" + ( _1 ) +# 10154 "parsing/parser.ml" + : 'fun_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'type_constraint) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1669 "parsing/parser.mly" + ( mkexp_constraint _3 _1 ) +# 10162 "parsing/parser.ml" + : 'fun_binding)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1673 "parsing/parser.mly" + ( _2 ) +# 10169 "parsing/parser.ml" + : 'strict_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'fun_binding) in + Obj.repr( +# 1675 "parsing/parser.mly" + ( let (l, o, p) = _1 in ghexp(Pexp_fun(l, o, p, _2)) ) +# 10177 "parsing/parser.ml" + : 'strict_binding)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'lident_list) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'fun_binding) in + Obj.repr( +# 1677 "parsing/parser.mly" + ( mk_newtypes _3 _5 ) +# 10185 "parsing/parser.ml" + : 'strict_binding)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'match_case) in + Obj.repr( +# 1680 "parsing/parser.mly" + ( [_1] ) +# 10192 "parsing/parser.ml" + : 'match_cases)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'match_cases) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'match_case) in + Obj.repr( +# 1681 "parsing/parser.mly" + ( _3 :: _1 ) +# 10200 "parsing/parser.ml" + : 'match_cases)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1685 "parsing/parser.mly" + ( Exp.case _1 _3 ) +# 10208 "parsing/parser.ml" + : 'match_case)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1687 "parsing/parser.mly" + ( Exp.case _1 ~guard:_3 _5 ) +# 10217 "parsing/parser.ml" + : 'match_case)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1689 "parsing/parser.mly" + ( Exp.case _1 (Exp.unreachable ~loc:(rhs_loc 3) ())) +# 10224 "parsing/parser.ml" + : 'match_case)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1693 "parsing/parser.mly" + ( _2 ) +# 10231 "parsing/parser.ml" + : 'fun_def)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 1695 "parsing/parser.mly" + ( mkexp (Pexp_constraint (_4, _2)) ) +# 10239 "parsing/parser.ml" + : 'fun_def)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in + Obj.repr( +# 1698 "parsing/parser.mly" + ( + let (l,o,p) = _1 in + ghexp(Pexp_fun(l, o, p, _2)) + ) +# 10250 "parsing/parser.ml" + : 'fun_def)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'lident_list) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in + Obj.repr( +# 1703 "parsing/parser.mly" + ( mk_newtypes _3 _5 ) +# 10258 "parsing/parser.ml" + : 'fun_def)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr_comma_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1706 "parsing/parser.mly" + ( _3 :: _1 ) +# 10266 "parsing/parser.ml" + : 'expr_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1707 "parsing/parser.mly" + ( [_3; _1] ) +# 10274 "parsing/parser.ml" + : 'expr_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr_list) in + Obj.repr( +# 1710 "parsing/parser.mly" + ( (Some _1, _3) ) +# 10282 "parsing/parser.ml" + : 'record_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr_list) in + Obj.repr( +# 1711 "parsing/parser.mly" + ( (None, _1) ) +# 10289 "parsing/parser.ml" + : 'record_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr) in + Obj.repr( +# 1714 "parsing/parser.mly" + ( [_1] ) +# 10296 "parsing/parser.ml" + : 'lbl_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'lbl_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr_list) in + Obj.repr( +# 1715 "parsing/parser.mly" + ( _1 :: _3 ) +# 10304 "parsing/parser.ml" + : 'lbl_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_expr) in + Obj.repr( +# 1716 "parsing/parser.mly" + ( [_1] ) +# 10311 "parsing/parser.ml" + : 'lbl_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'label_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_type_constraint) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1720 "parsing/parser.mly" + ( (mkrhs _1 1, mkexp_opt_constraint _4 _2) ) +# 10320 "parsing/parser.ml" + : 'lbl_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'label_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'opt_type_constraint) in + Obj.repr( +# 1722 "parsing/parser.mly" + ( (mkrhs _1 1, mkexp_opt_constraint (exp_of_label _1 1) _2) ) +# 10328 "parsing/parser.ml" + : 'lbl_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'opt_semi) in + Obj.repr( +# 1725 "parsing/parser.mly" + ( [_1] ) +# 10336 "parsing/parser.ml" + : 'field_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'field_expr) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'field_expr_list) in + Obj.repr( +# 1726 "parsing/parser.mly" + ( _1 :: _3 ) +# 10344 "parsing/parser.ml" + : 'field_expr_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1730 "parsing/parser.mly" + ( (mkrhs _1 1, _3) ) +# 10352 "parsing/parser.ml" + : 'field_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label) in + Obj.repr( +# 1732 "parsing/parser.mly" + ( (mkrhs _1 1, exp_of_label (Lident _1) 1) ) +# 10359 "parsing/parser.ml" + : 'field_expr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1735 "parsing/parser.mly" + ( [_1] ) +# 10366 "parsing/parser.ml" + : 'expr_semi_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in + Obj.repr( +# 1736 "parsing/parser.mly" + ( _3 :: _1 ) +# 10374 "parsing/parser.ml" + : 'expr_semi_list)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1739 "parsing/parser.mly" + ( (Some _2, None) ) +# 10381 "parsing/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1740 "parsing/parser.mly" + ( (Some _2, Some _4) ) +# 10389 "parsing/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1741 "parsing/parser.mly" + ( (None, Some _2) ) +# 10396 "parsing/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 1742 "parsing/parser.mly" + ( syntax_error() ) +# 10402 "parsing/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 1743 "parsing/parser.mly" + ( syntax_error() ) +# 10408 "parsing/parser.ml" + : 'type_constraint)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_constraint) in + Obj.repr( +# 1746 "parsing/parser.mly" + ( Some _1 ) +# 10415 "parsing/parser.ml" + : 'opt_type_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 1747 "parsing/parser.mly" + ( None ) +# 10421 "parsing/parser.ml" + : 'opt_type_constraint)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 1754 "parsing/parser.mly" + ( mkpat(Ppat_alias(_1, mkrhs _3 3)) ) +# 10429 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1756 "parsing/parser.mly" + ( expecting 3 "identifier" ) +# 10436 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_comma_list) in + Obj.repr( +# 1758 "parsing/parser.mly" + ( mkpat(Ppat_tuple(List.rev _1)) ) +# 10443 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1760 "parsing/parser.mly" + ( mkpat_cons (rhs_loc 2) (ghpat(Ppat_tuple[_1;_3])) (symbol_rloc()) ) +# 10451 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1762 "parsing/parser.mly" + ( expecting 3 "pattern" ) +# 10458 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1764 "parsing/parser.mly" + ( mkpat(Ppat_or(_1, _3)) ) +# 10466 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1766 "parsing/parser.mly" + ( expecting 3 "pattern" ) +# 10473 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1768 "parsing/parser.mly" + ( mkpat_attrs (Ppat_exception _3) _2) +# 10481 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 1770 "parsing/parser.mly" + ( Pat.attr _1 _2 ) +# 10489 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_gen) in + Obj.repr( +# 1771 "parsing/parser.mly" + ( _1 ) +# 10496 "parsing/parser.ml" + : 'pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 1775 "parsing/parser.mly" + ( mkpat(Ppat_alias(_1, mkrhs _3 3)) ) +# 10504 "parsing/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + Obj.repr( +# 1777 "parsing/parser.mly" + ( expecting 3 "identifier" ) +# 10511 "parsing/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_no_exn_comma_list) in + Obj.repr( +# 1779 "parsing/parser.mly" + ( mkpat(Ppat_tuple(List.rev _1)) ) +# 10518 "parsing/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1781 "parsing/parser.mly" + ( mkpat_cons (rhs_loc 2) (ghpat(Ppat_tuple[_1;_3])) (symbol_rloc()) ) +# 10526 "parsing/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + Obj.repr( +# 1783 "parsing/parser.mly" + ( expecting 3 "pattern" ) +# 10533 "parsing/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1785 "parsing/parser.mly" + ( mkpat(Ppat_or(_1, _3)) ) +# 10541 "parsing/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + Obj.repr( +# 1787 "parsing/parser.mly" + ( expecting 3 "pattern" ) +# 10548 "parsing/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'pattern_no_exn) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 1789 "parsing/parser.mly" + ( Pat.attr _1 _2 ) +# 10556 "parsing/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_gen) in + Obj.repr( +# 1790 "parsing/parser.mly" + ( _1 ) +# 10563 "parsing/parser.ml" + : 'pattern_no_exn)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in + Obj.repr( +# 1794 "parsing/parser.mly" + ( _1 ) +# 10570 "parsing/parser.ml" + : 'pattern_gen)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1796 "parsing/parser.mly" + ( mkpat(Ppat_construct(mkrhs _1 1, Some _2)) ) +# 10578 "parsing/parser.ml" + : 'pattern_gen)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1798 "parsing/parser.mly" + ( mkpat(Ppat_variant(_1, Some _2)) ) +# 10586 "parsing/parser.ml" + : 'pattern_gen)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in + Obj.repr( +# 1800 "parsing/parser.mly" + ( mkpat_attrs (Ppat_lazy _3) _2) +# 10594 "parsing/parser.ml" + : 'pattern_gen)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 1804 "parsing/parser.mly" + ( mkpat(Ppat_var (mkrhs _1 1)) ) +# 10601 "parsing/parser.ml" + : 'simple_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern_not_ident) in + Obj.repr( +# 1805 "parsing/parser.mly" + ( _1 ) +# 10608 "parsing/parser.ml" + : 'simple_pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 1809 "parsing/parser.mly" + ( mkpat(Ppat_any) ) +# 10614 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'signed_constant) in + Obj.repr( +# 1811 "parsing/parser.mly" + ( mkpat(Ppat_constant _1) ) +# 10621 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'signed_constant) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'signed_constant) in + Obj.repr( +# 1813 "parsing/parser.mly" + ( mkpat(Ppat_interval (_1, _3)) ) +# 10629 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constr_longident) in + Obj.repr( +# 1815 "parsing/parser.mly" + ( mkpat(Ppat_construct(mkrhs _1 1, None)) ) +# 10636 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in + Obj.repr( +# 1817 "parsing/parser.mly" + ( mkpat(Ppat_variant(_1, None)) ) +# 10643 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in + Obj.repr( +# 1819 "parsing/parser.mly" + ( mkpat(Ppat_type (mkrhs _2 2)) ) +# 10650 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_delimited_pattern) in + Obj.repr( +# 1821 "parsing/parser.mly" + ( _1 ) +# 10657 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_delimited_pattern) in + Obj.repr( +# 1823 "parsing/parser.mly" + ( mkpat @@ Ppat_open(mkrhs _1 1, _3) ) +# 10665 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1825 "parsing/parser.mly" + ( mkpat @@ Ppat_open(mkrhs _1 1, mkpat @@ + Ppat_construct ( mkrhs (Lident "[]") 4, None)) ) +# 10673 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1828 "parsing/parser.mly" + ( mkpat @@ Ppat_open( mkrhs _1 1, mkpat @@ + Ppat_construct ( mkrhs (Lident "()") 4, None) ) ) +# 10681 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 1831 "parsing/parser.mly" + ( mkpat @@ Ppat_open (mkrhs _1 1, _4)) +# 10689 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 1833 "parsing/parser.mly" + (unclosed "(" 3 ")" 5 ) +# 10697 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in + Obj.repr( +# 1835 "parsing/parser.mly" + ( expecting 4 "pattern" ) +# 10704 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 1837 "parsing/parser.mly" + ( reloc_pat _2 ) +# 10711 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in + Obj.repr( +# 1839 "parsing/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 10718 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 1841 "parsing/parser.mly" + ( mkpat(Ppat_constraint(_2, _4)) ) +# 10726 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + Obj.repr( +# 1843 "parsing/parser.mly" + ( unclosed "(" 1 ")" 5 ) +# 10734 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1845 "parsing/parser.mly" + ( expecting 4 "type" ) +# 10741 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in + Obj.repr( +# 1847 "parsing/parser.mly" + ( mkpat_attrs (Ppat_unpack (mkrhs _4 4)) _3 ) +# 10749 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 1849 "parsing/parser.mly" + ( mkpat_attrs + (Ppat_constraint(mkpat(Ppat_unpack (mkrhs _4 4)), + ghtyp(Ptyp_package _6))) + _3 ) +# 10761 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 1854 "parsing/parser.mly" + ( unclosed "(" 1 ")" 7 ) +# 10770 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 1856 "parsing/parser.mly" + ( mkpat(Ppat_extension _1) ) +# 10777 "parsing/parser.ml" + : 'simple_pattern_not_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_pattern_list) in + Obj.repr( +# 1861 "parsing/parser.mly" + ( let (fields, closed) = _2 in mkpat(Ppat_record(fields, closed)) ) +# 10784 "parsing/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_pattern_list) in + Obj.repr( +# 1863 "parsing/parser.mly" + ( unclosed "{" 1 "}" 3 ) +# 10791 "parsing/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1865 "parsing/parser.mly" + ( reloc_pat (mktailpat (rhs_loc 4) (List.rev _2)) ) +# 10799 "parsing/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1867 "parsing/parser.mly" + ( unclosed "[" 1 "]" 4 ) +# 10807 "parsing/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1869 "parsing/parser.mly" + ( mkpat(Ppat_array(List.rev _2)) ) +# 10815 "parsing/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + Obj.repr( +# 1871 "parsing/parser.mly" + ( mkpat(Ppat_array []) ) +# 10821 "parsing/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in + Obj.repr( +# 1873 "parsing/parser.mly" + ( unclosed "[|" 1 "|]" 4 ) +# 10829 "parsing/parser.ml" + : 'simple_delimited_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_comma_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1876 "parsing/parser.mly" + ( _3 :: _1 ) +# 10837 "parsing/parser.ml" + : 'pattern_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1877 "parsing/parser.mly" + ( [_3; _1] ) +# 10845 "parsing/parser.ml" + : 'pattern_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + Obj.repr( +# 1878 "parsing/parser.mly" + ( expecting 3 "pattern" ) +# 10852 "parsing/parser.ml" + : 'pattern_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn_comma_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1881 "parsing/parser.mly" + ( _3 :: _1 ) +# 10860 "parsing/parser.ml" + : 'pattern_no_exn_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1882 "parsing/parser.mly" + ( [_3; _1] ) +# 10868 "parsing/parser.ml" + : 'pattern_no_exn_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in + Obj.repr( +# 1883 "parsing/parser.mly" + ( expecting 3 "pattern" ) +# 10875 "parsing/parser.ml" + : 'pattern_no_exn_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1886 "parsing/parser.mly" + ( [_1] ) +# 10882 "parsing/parser.ml" + : 'pattern_semi_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1887 "parsing/parser.mly" + ( _3 :: _1 ) +# 10890 "parsing/parser.ml" + : 'pattern_semi_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_pattern) in + Obj.repr( +# 1890 "parsing/parser.mly" + ( [_1], Closed ) +# 10897 "parsing/parser.ml" + : 'lbl_pattern_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_pattern) in + Obj.repr( +# 1891 "parsing/parser.mly" + ( [_1], Closed ) +# 10904 "parsing/parser.ml" + : 'lbl_pattern_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'lbl_pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'opt_semi) in + Obj.repr( +# 1892 "parsing/parser.mly" + ( [_1], Open ) +# 10912 "parsing/parser.ml" + : 'lbl_pattern_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'lbl_pattern) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_pattern_list) in + Obj.repr( +# 1894 "parsing/parser.mly" + ( let (fields, closed) = _3 in _1 :: fields, closed ) +# 10920 "parsing/parser.ml" + : 'lbl_pattern_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'label_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_pattern_type_constraint) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 1898 "parsing/parser.mly" + ( (mkrhs _1 1, mkpat_opt_constraint _4 _2) ) +# 10929 "parsing/parser.ml" + : 'lbl_pattern)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'label_longident) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'opt_pattern_type_constraint) in + Obj.repr( +# 1900 "parsing/parser.mly" + ( (mkrhs _1 1, mkpat_opt_constraint (pat_of_label _1 1) _2) ) +# 10937 "parsing/parser.ml" + : 'lbl_pattern)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1903 "parsing/parser.mly" + ( Some _2 ) +# 10944 "parsing/parser.ml" + : 'opt_pattern_type_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 1904 "parsing/parser.mly" + ( None ) +# 10950 "parsing/parser.ml" + : 'opt_pattern_type_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'val_ident) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1911 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Val.mk (mkrhs _3 3) _5 ~attrs:(attrs@_6) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 10963 "parsing/parser.ml" + : 'value_description)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string * string option) in + Obj.repr( +# 1920 "parsing/parser.mly" + ( [fst _1] ) +# 10970 "parsing/parser.ml" + : 'primitive_declaration_body)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : string * string option) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'primitive_declaration_body) in + Obj.repr( +# 1921 "parsing/parser.mly" + ( fst _1 :: _2 ) +# 10978 "parsing/parser.ml" + : 'primitive_declaration_body)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'val_ident) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : 'core_type) in + let _7 = (Parsing.peek_val __caml_parser_env 1 : 'primitive_declaration_body) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1926 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + Val.mk (mkrhs _3 3) _5 ~prim:_7 ~attrs:(attrs@_8) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + , ext ) +# 10992 "parsing/parser.ml" + : 'primitive_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_declaration) in + Obj.repr( +# 1936 "parsing/parser.mly" + ( let (nonrec_flag, ty, ext) = _1 in (nonrec_flag, [ty], ext) ) +# 10999 "parsing/parser.ml" + : 'type_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'type_declarations) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_type_declaration) in + Obj.repr( +# 1938 "parsing/parser.mly" + ( let (nonrec_flag, tys, ext) = _1 in (nonrec_flag, _2 :: tys, ext) ) +# 11007 "parsing/parser.ml" + : 'type_declarations)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 5 : 'nonrec_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 4 : 'optional_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _6 = (Parsing.peek_val __caml_parser_env 2 : 'type_kind) in + let _7 = (Parsing.peek_val __caml_parser_env 1 : 'constraints) in + let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1944 "parsing/parser.mly" + ( let (kind, priv, manifest) = _6 in + let (ext, attrs) = _2 in + let ty = + Type.mk (mkrhs _5 5) ~params:_4 ~cstrs:(List.rev _7) ~kind + ~priv ?manifest ~attrs:(attrs@_8) + ~loc:(symbol_rloc ()) ~docs:(symbol_docs ()) + in + (_3, ty, ext) ) +# 11027 "parsing/parser.ml" + : 'type_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'optional_type_parameters) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'type_kind) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'constraints) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 1956 "parsing/parser.mly" + ( let (kind, priv, manifest) = _5 in + Type.mk (mkrhs _4 4) ~params:_3 ~cstrs:(List.rev _6) + ~kind ~priv ?manifest ~attrs:(_2@_7) ~loc:(symbol_rloc ()) + ~text:(symbol_text ()) ~docs:(symbol_docs ()) ) +# 11042 "parsing/parser.ml" + : 'and_type_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constraints) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'constrain) in + Obj.repr( +# 1962 "parsing/parser.mly" + ( _3 :: _1 ) +# 11050 "parsing/parser.ml" + : 'constraints)) +; (fun __caml_parser_env -> + Obj.repr( +# 1963 "parsing/parser.mly" + ( [] ) +# 11056 "parsing/parser.ml" + : 'constraints)) +; (fun __caml_parser_env -> + Obj.repr( +# 1967 "parsing/parser.mly" + ( (Ptype_abstract, Public, None) ) +# 11062 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1969 "parsing/parser.mly" + ( (Ptype_abstract, Public, Some _2) ) +# 11069 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 1971 "parsing/parser.mly" + ( (Ptype_abstract, Private, Some _3) ) +# 11076 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declarations) in + Obj.repr( +# 1973 "parsing/parser.mly" + ( (Ptype_variant(List.rev _2), Public, None) ) +# 11083 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declarations) in + Obj.repr( +# 1975 "parsing/parser.mly" + ( (Ptype_variant(List.rev _3), Private, None) ) +# 11090 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + Obj.repr( +# 1977 "parsing/parser.mly" + ( (Ptype_open, Public, None) ) +# 11096 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + Obj.repr( +# 1979 "parsing/parser.mly" + ( (Ptype_open, Private, None) ) +# 11102 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'private_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'label_declarations) in + Obj.repr( +# 1981 "parsing/parser.mly" + ( (Ptype_record _4, _2, None) ) +# 11110 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'private_flag) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declarations) in + Obj.repr( +# 1983 "parsing/parser.mly" + ( (Ptype_variant(List.rev _5), _4, Some _2) ) +# 11119 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'private_flag) in + Obj.repr( +# 1985 "parsing/parser.mly" + ( (Ptype_open, _4, Some _2) ) +# 11127 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'core_type) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'private_flag) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'label_declarations) in + Obj.repr( +# 1987 "parsing/parser.mly" + ( (Ptype_record _6, _4, Some _2) ) +# 11136 "parsing/parser.ml" + : 'type_kind)) +; (fun __caml_parser_env -> + Obj.repr( +# 1990 "parsing/parser.mly" + ( [] ) +# 11142 "parsing/parser.ml" + : 'optional_type_parameters)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_parameter) in + Obj.repr( +# 1991 "parsing/parser.mly" + ( [_1] ) +# 11149 "parsing/parser.ml" + : 'optional_type_parameters)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'optional_type_parameter_list) in + Obj.repr( +# 1992 "parsing/parser.mly" + ( List.rev _2 ) +# 11156 "parsing/parser.ml" + : 'optional_type_parameters)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'type_variance) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_variable) in + Obj.repr( +# 1995 "parsing/parser.mly" + ( _2, _1 ) +# 11164 "parsing/parser.ml" + : 'optional_type_parameter)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_parameter) in + Obj.repr( +# 1998 "parsing/parser.mly" + ( [_1] ) +# 11171 "parsing/parser.ml" + : 'optional_type_parameter_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'optional_type_parameter_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_parameter) in + Obj.repr( +# 1999 "parsing/parser.mly" + ( _3 :: _1 ) +# 11179 "parsing/parser.ml" + : 'optional_type_parameter_list)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2002 "parsing/parser.mly" + ( mktyp(Ptyp_var _2) ) +# 11186 "parsing/parser.ml" + : 'optional_type_variable)) +; (fun __caml_parser_env -> + Obj.repr( +# 2003 "parsing/parser.mly" + ( mktyp(Ptyp_any) ) +# 11192 "parsing/parser.ml" + : 'optional_type_variable)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'type_variance) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'type_variable) in + Obj.repr( +# 2008 "parsing/parser.mly" + ( _2, _1 ) +# 11200 "parsing/parser.ml" + : 'type_parameter)) +; (fun __caml_parser_env -> + Obj.repr( +# 2011 "parsing/parser.mly" + ( Invariant ) +# 11206 "parsing/parser.ml" + : 'type_variance)) +; (fun __caml_parser_env -> + Obj.repr( +# 2012 "parsing/parser.mly" + ( Covariant ) +# 11212 "parsing/parser.ml" + : 'type_variance)) +; (fun __caml_parser_env -> + Obj.repr( +# 2013 "parsing/parser.mly" + ( Contravariant ) +# 11218 "parsing/parser.ml" + : 'type_variance)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2016 "parsing/parser.mly" + ( mktyp(Ptyp_var _2) ) +# 11225 "parsing/parser.ml" + : 'type_variable)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_parameter) in + Obj.repr( +# 2019 "parsing/parser.mly" + ( [_1] ) +# 11232 "parsing/parser.ml" + : 'type_parameter_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'type_parameter_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'type_parameter) in + Obj.repr( +# 2020 "parsing/parser.mly" + ( _3 :: _1 ) +# 11240 "parsing/parser.ml" + : 'type_parameter_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declaration) in + Obj.repr( +# 2023 "parsing/parser.mly" + ( [_1] ) +# 11247 "parsing/parser.ml" + : 'constructor_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_constructor_declaration) in + Obj.repr( +# 2024 "parsing/parser.mly" + ( [_1] ) +# 11254 "parsing/parser.ml" + : 'constructor_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constructor_declarations) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_constructor_declaration) in + Obj.repr( +# 2025 "parsing/parser.mly" + ( _2 :: _1 ) +# 11262 "parsing/parser.ml" + : 'constructor_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2029 "parsing/parser.mly" + ( + let args,res = _2 in + Type.constructor (mkrhs _1 1) ~args ?res ~attrs:_3 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) + ) +# 11275 "parsing/parser.ml" + : 'constructor_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2037 "parsing/parser.mly" + ( + let args,res = _3 in + Type.constructor (mkrhs _2 2) ~args ?res ~attrs:_4 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) + ) +# 11288 "parsing/parser.ml" + : 'bar_constructor_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_exception_declaration) in + Obj.repr( +# 2044 "parsing/parser.mly" + ( _1 ) +# 11295 "parsing/parser.ml" + : 'str_exception_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 4 : 'constr_ident) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'constr_longident) in + let _6 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 2047 "parsing/parser.mly" + ( let (ext,attrs) = _2 in + Te.rebind (mkrhs _3 3) (mkrhs _5 5) ~attrs:(attrs @ _6 @ _7) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 11309 "parsing/parser.ml" + : 'str_exception_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'constr_ident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'generalized_constructor_arguments) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 2055 "parsing/parser.mly" + ( let args, res = _4 in + let (ext,attrs) = _2 in + Te.decl (mkrhs _3 3) ~args ?res ~attrs:(attrs @ _5 @ _6) + ~loc:(symbol_rloc()) ~docs:(symbol_docs ()) + , ext ) +# 11324 "parsing/parser.ml" + : 'sig_exception_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2063 "parsing/parser.mly" + ( let args, res = _2 in + Te.decl (mkrhs _1 1) ~args ?res ~attrs:_3 ~loc:(symbol_rloc()) ) +# 11334 "parsing/parser.ml" + : 'let_exception_declaration)) +; (fun __caml_parser_env -> + Obj.repr( +# 2067 "parsing/parser.mly" + ( (Pcstr_tuple [],None) ) +# 11340 "parsing/parser.ml" + : 'generalized_constructor_arguments)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_arguments) in + Obj.repr( +# 2068 "parsing/parser.mly" + ( (_2,None) ) +# 11347 "parsing/parser.ml" + : 'generalized_constructor_arguments)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'constructor_arguments) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2070 "parsing/parser.mly" + ( (_2,Some _4) ) +# 11355 "parsing/parser.ml" + : 'generalized_constructor_arguments)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2072 "parsing/parser.mly" + ( (Pcstr_tuple [],Some _2) ) +# 11362 "parsing/parser.ml" + : 'generalized_constructor_arguments)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_list) in + Obj.repr( +# 2076 "parsing/parser.mly" + ( Pcstr_tuple (List.rev _1) ) +# 11369 "parsing/parser.ml" + : 'constructor_arguments)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'label_declarations) in + Obj.repr( +# 2077 "parsing/parser.mly" + ( Pcstr_record _2 ) +# 11376 "parsing/parser.ml" + : 'constructor_arguments)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_declaration) in + Obj.repr( +# 2080 "parsing/parser.mly" + ( [_1] ) +# 11383 "parsing/parser.ml" + : 'label_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_declaration_semi) in + Obj.repr( +# 2081 "parsing/parser.mly" + ( [_1] ) +# 11390 "parsing/parser.ml" + : 'label_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'label_declaration_semi) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_declarations) in + Obj.repr( +# 2082 "parsing/parser.mly" + ( _1 :: _2 ) +# 11398 "parsing/parser.ml" + : 'label_declarations)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mutable_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'label) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'poly_type_no_attr) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2086 "parsing/parser.mly" + ( + Type.field (mkrhs _2 2) _4 ~mut:_1 ~attrs:_5 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) + ) +# 11411 "parsing/parser.ml" + : 'label_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 6 : 'mutable_flag) in + let _2 = (Parsing.peek_val __caml_parser_env 5 : 'label) in + let _4 = (Parsing.peek_val __caml_parser_env 3 : 'poly_type_no_attr) in + let _5 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _7 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2093 "parsing/parser.mly" + ( + let info = + match rhs_info 5 with + | Some _ as info_before_semi -> info_before_semi + | None -> symbol_info () + in + Type.field (mkrhs _2 2) _4 ~mut:_1 ~attrs:(_5 @ _7) + ~loc:(symbol_rloc()) ~info + ) +# 11430 "parsing/parser.ml" + : 'label_declaration_semi)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 7 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'nonrec_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : 'optional_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 4 : 'type_longident) in + let _7 = (Parsing.peek_val __caml_parser_env 2 : 'private_flag) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'str_extension_constructors) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 2109 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + if _3 <> Recursive then not_expecting 3 "nonrec flag"; + Te.mk (mkrhs _5 5) (List.rev _8) ~params:_4 ~priv:_7 + ~attrs:(attrs@_9) ~docs:(symbol_docs ()) + , ext ) +# 11447 "parsing/parser.ml" + : 'str_type_extension)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 7 : 'ext_attributes) in + let _3 = (Parsing.peek_val __caml_parser_env 6 : 'nonrec_flag) in + let _4 = (Parsing.peek_val __caml_parser_env 5 : 'optional_type_parameters) in + let _5 = (Parsing.peek_val __caml_parser_env 4 : 'type_longident) in + let _7 = (Parsing.peek_val __caml_parser_env 2 : 'private_flag) in + let _8 = (Parsing.peek_val __caml_parser_env 1 : 'sig_extension_constructors) in + let _9 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 2118 "parsing/parser.mly" + ( let (ext, attrs) = _2 in + if _3 <> Recursive then not_expecting 3 "nonrec flag"; + Te.mk (mkrhs _5 5) (List.rev _8) ~params:_4 ~priv:_7 + ~attrs:(attrs @ _9) ~docs:(symbol_docs ()) + , ext ) +# 11464 "parsing/parser.ml" + : 'sig_type_extension)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension_constructor_declaration) in + Obj.repr( +# 2125 "parsing/parser.mly" + ( [_1] ) +# 11471 "parsing/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in + Obj.repr( +# 2126 "parsing/parser.mly" + ( [_1] ) +# 11478 "parsing/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension_constructor_rebind) in + Obj.repr( +# 2127 "parsing/parser.mly" + ( [_1] ) +# 11485 "parsing/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_rebind) in + Obj.repr( +# 2128 "parsing/parser.mly" + ( [_1] ) +# 11492 "parsing/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'str_extension_constructors) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in + Obj.repr( +# 2130 "parsing/parser.mly" + ( _2 :: _1 ) +# 11500 "parsing/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'str_extension_constructors) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_rebind) in + Obj.repr( +# 2132 "parsing/parser.mly" + ( _2 :: _1 ) +# 11508 "parsing/parser.ml" + : 'str_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension_constructor_declaration) in + Obj.repr( +# 2135 "parsing/parser.mly" + ( [_1] ) +# 11515 "parsing/parser.ml" + : 'sig_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in + Obj.repr( +# 2136 "parsing/parser.mly" + ( [_1] ) +# 11522 "parsing/parser.ml" + : 'sig_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'sig_extension_constructors) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in + Obj.repr( +# 2138 "parsing/parser.mly" + ( _2 :: _1 ) +# 11530 "parsing/parser.ml" + : 'sig_extension_constructors)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2142 "parsing/parser.mly" + ( let args, res = _2 in + Te.decl (mkrhs _1 1) ~args ?res ~attrs:_3 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) ) +# 11541 "parsing/parser.ml" + : 'extension_constructor_declaration)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2148 "parsing/parser.mly" + ( let args, res = _3 in + Te.decl (mkrhs _2 2) ~args ?res ~attrs:_4 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) ) +# 11552 "parsing/parser.ml" + : 'bar_extension_constructor_declaration)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'constr_ident) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2154 "parsing/parser.mly" + ( Te.rebind (mkrhs _1 1) (mkrhs _3 3) ~attrs:_4 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) ) +# 11562 "parsing/parser.ml" + : 'extension_constructor_rebind)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'constr_ident) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2159 "parsing/parser.mly" + ( Te.rebind (mkrhs _2 2) (mkrhs _4 4) ~attrs:_5 + ~loc:(symbol_rloc()) ~info:(symbol_info ()) ) +# 11572 "parsing/parser.ml" + : 'bar_extension_constructor_rebind)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'with_constraint) in + Obj.repr( +# 2166 "parsing/parser.mly" + ( [_1] ) +# 11579 "parsing/parser.ml" + : 'with_constraints)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'with_constraints) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'with_constraint) in + Obj.repr( +# 2167 "parsing/parser.mly" + ( _3 :: _1 ) +# 11587 "parsing/parser.ml" + : 'with_constraints)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'optional_type_parameters) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'label_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'with_type_binder) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'core_type_no_attr) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'constraints) in + Obj.repr( +# 2172 "parsing/parser.mly" + ( Pwith_type + (mkrhs _3 3, + (Type.mk (mkrhs (Longident.last _3) 3) + ~params:_2 + ~cstrs:(List.rev _6) + ~manifest:_5 + ~priv:_4 + ~loc:(symbol_rloc()))) ) +# 11605 "parsing/parser.ml" + : 'with_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'optional_type_parameters) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label_longident) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 2183 "parsing/parser.mly" + ( Pwith_typesubst + (mkrhs _3 3, + (Type.mk (mkrhs (Longident.last _3) 3) + ~params:_2 + ~manifest:_5 + ~loc:(symbol_rloc()))) ) +# 11619 "parsing/parser.ml" + : 'with_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'mod_ext_longident) in + Obj.repr( +# 2190 "parsing/parser.mly" + ( Pwith_module (mkrhs _2 2, mkrhs _4 4) ) +# 11627 "parsing/parser.ml" + : 'with_constraint)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'mod_ext_longident) in + Obj.repr( +# 2192 "parsing/parser.mly" + ( Pwith_modsubst (mkrhs _2 2, mkrhs _4 4) ) +# 11635 "parsing/parser.ml" + : 'with_constraint)) +; (fun __caml_parser_env -> + Obj.repr( +# 2195 "parsing/parser.mly" + ( Public ) +# 11641 "parsing/parser.ml" + : 'with_type_binder)) +; (fun __caml_parser_env -> + Obj.repr( +# 2196 "parsing/parser.mly" + ( Private ) +# 11647 "parsing/parser.ml" + : 'with_type_binder)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2202 "parsing/parser.mly" + ( [mkrhs _2 2] ) +# 11654 "parsing/parser.ml" + : 'typevar_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typevar_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2203 "parsing/parser.mly" + ( mkrhs _3 3 :: _1 ) +# 11662 "parsing/parser.ml" + : 'typevar_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 2207 "parsing/parser.mly" + ( _1 ) +# 11669 "parsing/parser.ml" + : 'poly_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typevar_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 2209 "parsing/parser.mly" + ( mktyp(Ptyp_poly(List.rev _1, _3)) ) +# 11677 "parsing/parser.ml" + : 'poly_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 2213 "parsing/parser.mly" + ( _1 ) +# 11684 "parsing/parser.ml" + : 'poly_type_no_attr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typevar_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 2215 "parsing/parser.mly" + ( mktyp(Ptyp_poly(List.rev _1, _3)) ) +# 11692 "parsing/parser.ml" + : 'poly_type_no_attr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 2222 "parsing/parser.mly" + ( _1 ) +# 11699 "parsing/parser.ml" + : 'core_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in + Obj.repr( +# 2224 "parsing/parser.mly" + ( Typ.attr _1 _2 ) +# 11707 "parsing/parser.ml" + : 'core_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2228 "parsing/parser.mly" + ( _1 ) +# 11714 "parsing/parser.ml" + : 'core_type_no_attr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'core_type2) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2230 "parsing/parser.mly" + ( mktyp(Ptyp_alias(_1, _4)) ) +# 11722 "parsing/parser.ml" + : 'core_type_no_attr)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type_or_tuple) in + Obj.repr( +# 2234 "parsing/parser.mly" + ( _1 ) +# 11729 "parsing/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2236 "parsing/parser.mly" + ( let param = extra_rhs_core_type _4 ~pos:4 in + mktyp (Ptyp_arrow(Optional _2 , param, _6)) ) +# 11739 "parsing/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2239 "parsing/parser.mly" + ( let param = extra_rhs_core_type _2 ~pos:2 in + mktyp(Ptyp_arrow(Optional _1 , param, _4)) + ) +# 11750 "parsing/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : string) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2243 "parsing/parser.mly" + ( let param = extra_rhs_core_type _3 ~pos:3 in + mktyp(Ptyp_arrow(Labelled _1, param, _5)) ) +# 11760 "parsing/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in + Obj.repr( +# 2246 "parsing/parser.mly" + ( let param = extra_rhs_core_type _1 ~pos:1 in + mktyp(Ptyp_arrow(Nolabel, param, _3)) ) +# 11769 "parsing/parser.ml" + : 'core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type2) in + Obj.repr( +# 2252 "parsing/parser.mly" + ( _1 ) +# 11776 "parsing/parser.ml" + : 'simple_core_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'core_type_comma_list) in + Obj.repr( +# 2254 "parsing/parser.mly" + ( match _2 with [sty] -> sty | _ -> raise Parse_error ) +# 11783 "parsing/parser.ml" + : 'simple_core_type)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2259 "parsing/parser.mly" + ( mktyp(Ptyp_var _2) ) +# 11790 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + Obj.repr( +# 2261 "parsing/parser.mly" + ( mktyp(Ptyp_any) ) +# 11796 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in + Obj.repr( +# 2263 "parsing/parser.mly" + ( mktyp(Ptyp_constr(mkrhs _1 1, [])) ) +# 11803 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_core_type2) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in + Obj.repr( +# 2265 "parsing/parser.mly" + ( mktyp(Ptyp_constr(mkrhs _2 2, [_1])) ) +# 11811 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in + Obj.repr( +# 2267 "parsing/parser.mly" + ( mktyp(Ptyp_constr(mkrhs _4 4, List.rev _2)) ) +# 11819 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'meth_list) in + Obj.repr( +# 2269 "parsing/parser.mly" + ( let (f, c) = _2 in mktyp(Ptyp_object (f, c)) ) +# 11826 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + Obj.repr( +# 2271 "parsing/parser.mly" + ( mktyp(Ptyp_object ([], Closed)) ) +# 11832 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in + Obj.repr( +# 2273 "parsing/parser.mly" + ( mktyp(Ptyp_class(mkrhs _2 2, [])) ) +# 11839 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type2) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in + Obj.repr( +# 2275 "parsing/parser.mly" + ( mktyp(Ptyp_class(mkrhs _3 3, [_1])) ) +# 11847 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'core_type_comma_list) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in + Obj.repr( +# 2277 "parsing/parser.mly" + ( mktyp(Ptyp_class(mkrhs _5 5, List.rev _2)) ) +# 11855 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tag_field) in + Obj.repr( +# 2279 "parsing/parser.mly" + ( mktyp(Ptyp_variant([_2], Closed, None)) ) +# 11862 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in + Obj.repr( +# 2285 "parsing/parser.mly" + ( mktyp(Ptyp_variant(List.rev _3, Closed, None)) ) +# 11869 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 3 : 'row_field) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in + Obj.repr( +# 2287 "parsing/parser.mly" + ( mktyp(Ptyp_variant(_2 :: List.rev _4, Closed, None)) ) +# 11877 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_bar) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in + Obj.repr( +# 2289 "parsing/parser.mly" + ( mktyp(Ptyp_variant(List.rev _3, Open, None)) ) +# 11885 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + Obj.repr( +# 2291 "parsing/parser.mly" + ( mktyp(Ptyp_variant([], Open, None)) ) +# 11891 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_bar) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in + Obj.repr( +# 2293 "parsing/parser.mly" + ( mktyp(Ptyp_variant(List.rev _3, Closed, Some [])) ) +# 11899 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 4 : 'opt_bar) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'row_field_list) in + let _5 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag_list) in + Obj.repr( +# 2295 "parsing/parser.mly" + ( mktyp(Ptyp_variant(List.rev _3, Closed, Some (List.rev _5))) ) +# 11908 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in + Obj.repr( +# 2297 "parsing/parser.mly" + ( mktyp_attrs (Ptyp_package _4) _3 ) +# 11916 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in + Obj.repr( +# 2299 "parsing/parser.mly" + ( mktyp (Ptyp_extension _1) ) +# 11923 "parsing/parser.ml" + : 'simple_core_type2)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in + Obj.repr( +# 2302 "parsing/parser.mly" + ( package_type_of_module_type _1 ) +# 11930 "parsing/parser.ml" + : 'package_type)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'row_field) in + Obj.repr( +# 2305 "parsing/parser.mly" + ( [_1] ) +# 11937 "parsing/parser.ml" + : 'row_field_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'row_field_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'row_field) in + Obj.repr( +# 2306 "parsing/parser.mly" + ( _3 :: _1 ) +# 11945 "parsing/parser.ml" + : 'row_field_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tag_field) in + Obj.repr( +# 2309 "parsing/parser.mly" + ( _1 ) +# 11952 "parsing/parser.ml" + : 'row_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2310 "parsing/parser.mly" + ( Rinherit _1 ) +# 11959 "parsing/parser.ml" + : 'row_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'name_tag) in + let _3 = (Parsing.peek_val __caml_parser_env 2 : 'opt_ampersand) in + let _4 = (Parsing.peek_val __caml_parser_env 1 : 'amper_type_list) in + let _5 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2314 "parsing/parser.mly" + ( Rtag (mkrhs _1 1, add_info_attrs (symbol_info ()) _5, + _3, List.rev _4) ) +# 11970 "parsing/parser.ml" + : 'tag_field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2317 "parsing/parser.mly" + ( Rtag (mkrhs _1 1, add_info_attrs (symbol_info ()) _2, true, []) ) +# 11978 "parsing/parser.ml" + : 'tag_field)) +; (fun __caml_parser_env -> + Obj.repr( +# 2320 "parsing/parser.mly" + ( true ) +# 11984 "parsing/parser.ml" + : 'opt_ampersand)) +; (fun __caml_parser_env -> + Obj.repr( +# 2321 "parsing/parser.mly" + ( false ) +# 11990 "parsing/parser.ml" + : 'opt_ampersand)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 2324 "parsing/parser.mly" + ( [_1] ) +# 11997 "parsing/parser.ml" + : 'amper_type_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'amper_type_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in + Obj.repr( +# 2325 "parsing/parser.mly" + ( _3 :: _1 ) +# 12005 "parsing/parser.ml" + : 'amper_type_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in + Obj.repr( +# 2328 "parsing/parser.mly" + ( [_1] ) +# 12012 "parsing/parser.ml" + : 'name_tag_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag_list) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in + Obj.repr( +# 2329 "parsing/parser.mly" + ( _2 :: _1 ) +# 12020 "parsing/parser.ml" + : 'name_tag_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2332 "parsing/parser.mly" + ( _1 ) +# 12027 "parsing/parser.ml" + : 'simple_core_type_or_tuple)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_list) in + Obj.repr( +# 2334 "parsing/parser.mly" + ( mktyp(Ptyp_tuple(_1 :: List.rev _3)) ) +# 12035 "parsing/parser.ml" + : 'simple_core_type_or_tuple)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 2337 "parsing/parser.mly" + ( [_1] ) +# 12042 "parsing/parser.ml" + : 'core_type_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 2338 "parsing/parser.mly" + ( _3 :: _1 ) +# 12050 "parsing/parser.ml" + : 'core_type_comma_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2341 "parsing/parser.mly" + ( [_1] ) +# 12057 "parsing/parser.ml" + : 'core_type_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_list) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2342 "parsing/parser.mly" + ( _3 :: _1 ) +# 12065 "parsing/parser.ml" + : 'core_type_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'field_semi) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'meth_list) in + Obj.repr( +# 2345 "parsing/parser.mly" + ( let (f, c) = _2 in (_1 :: f, c) ) +# 12073 "parsing/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'inherit_field_semi) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'meth_list) in + Obj.repr( +# 2346 "parsing/parser.mly" + ( let (f, c) = _2 in (_1 :: f, c) ) +# 12081 "parsing/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'field_semi) in + Obj.repr( +# 2347 "parsing/parser.mly" + ( [_1], Closed ) +# 12088 "parsing/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'field) in + Obj.repr( +# 2348 "parsing/parser.mly" + ( [_1], Closed ) +# 12095 "parsing/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'inherit_field_semi) in + Obj.repr( +# 2349 "parsing/parser.mly" + ( [_1], Closed ) +# 12102 "parsing/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in + Obj.repr( +# 2350 "parsing/parser.mly" + ( [Oinherit _1], Closed ) +# 12109 "parsing/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + Obj.repr( +# 2351 "parsing/parser.mly" + ( [], Open ) +# 12115 "parsing/parser.ml" + : 'meth_list)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'poly_type_no_attr) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2355 "parsing/parser.mly" + ( Otag (mkrhs _1 1, add_info_attrs (symbol_info ()) _4, _3) ) +# 12124 "parsing/parser.ml" + : 'field)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 5 : 'label) in + let _3 = (Parsing.peek_val __caml_parser_env 3 : 'poly_type_no_attr) in + let _4 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in + let _6 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2360 "parsing/parser.mly" + ( let info = + match rhs_info 4 with + | Some _ as info_before_semi -> info_before_semi + | None -> symbol_info () + in + ( Otag (mkrhs _1 1, add_info_attrs info (_4 @ _6), _3)) ) +# 12139 "parsing/parser.ml" + : 'field_semi)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_core_type) in + Obj.repr( +# 2369 "parsing/parser.mly" + ( Oinherit _1 ) +# 12146 "parsing/parser.ml" + : 'inherit_field_semi)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2372 "parsing/parser.mly" + ( _1 ) +# 12153 "parsing/parser.ml" + : 'label)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2378 "parsing/parser.mly" + ( let (n, m) = _1 in Pconst_integer (n, m) ) +# 12160 "parsing/parser.ml" + : 'constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : char) in + Obj.repr( +# 2379 "parsing/parser.mly" + ( Pconst_char _1 ) +# 12167 "parsing/parser.ml" + : 'constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string * string option) in + Obj.repr( +# 2380 "parsing/parser.mly" + ( let (s, d) = _1 in Pconst_string (s, d) ) +# 12174 "parsing/parser.ml" + : 'constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2381 "parsing/parser.mly" + ( let (f, m) = _1 in Pconst_float (f, m) ) +# 12181 "parsing/parser.ml" + : 'constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in + Obj.repr( +# 2384 "parsing/parser.mly" + ( _1 ) +# 12188 "parsing/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2385 "parsing/parser.mly" + ( let (n, m) = _2 in Pconst_integer("-" ^ n, m) ) +# 12195 "parsing/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2386 "parsing/parser.mly" + ( let (f, m) = _2 in Pconst_float("-" ^ f, m) ) +# 12202 "parsing/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2387 "parsing/parser.mly" + ( let (n, m) = _2 in Pconst_integer (n, m) ) +# 12209 "parsing/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2388 "parsing/parser.mly" + ( let (f, m) = _2 in Pconst_float(f, m) ) +# 12216 "parsing/parser.ml" + : 'signed_constant)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2394 "parsing/parser.mly" + ( _1 ) +# 12223 "parsing/parser.ml" + : 'ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2395 "parsing/parser.mly" + ( _1 ) +# 12230 "parsing/parser.ml" + : 'ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2398 "parsing/parser.mly" + ( _1 ) +# 12237 "parsing/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'operator) in + Obj.repr( +# 2399 "parsing/parser.mly" + ( _2 ) +# 12244 "parsing/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'operator) in + Obj.repr( +# 2400 "parsing/parser.mly" + ( unclosed "(" 1 ")" 3 ) +# 12251 "parsing/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2401 "parsing/parser.mly" + ( expecting 2 "operator" ) +# 12257 "parsing/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2402 "parsing/parser.mly" + ( expecting 3 "module-expr" ) +# 12263 "parsing/parser.ml" + : 'val_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2405 "parsing/parser.mly" + ( _1 ) +# 12270 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2406 "parsing/parser.mly" + ( _1 ) +# 12277 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2407 "parsing/parser.mly" + ( _1 ) +# 12284 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2408 "parsing/parser.mly" + ( _1 ) +# 12291 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2409 "parsing/parser.mly" + ( _1 ) +# 12298 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2410 "parsing/parser.mly" + ( _1 ) +# 12305 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in + Obj.repr( +# 2411 "parsing/parser.mly" + ( "."^ _1 ^"()" ) +# 12312 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in + Obj.repr( +# 2412 "parsing/parser.mly" + ( "."^ _1 ^ "()<-" ) +# 12319 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in + Obj.repr( +# 2413 "parsing/parser.mly" + ( "."^ _1 ^"[]" ) +# 12326 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in + Obj.repr( +# 2414 "parsing/parser.mly" + ( "."^ _1 ^ "[]<-" ) +# 12333 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in + Obj.repr( +# 2415 "parsing/parser.mly" + ( "."^ _1 ^"{}" ) +# 12340 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in + Obj.repr( +# 2416 "parsing/parser.mly" + ( "."^ _1 ^ "{}<-" ) +# 12347 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2417 "parsing/parser.mly" + ( _1 ) +# 12354 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2418 "parsing/parser.mly" + ( "!" ) +# 12360 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2419 "parsing/parser.mly" + ( "+" ) +# 12366 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2420 "parsing/parser.mly" + ( "+." ) +# 12372 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2421 "parsing/parser.mly" + ( "-" ) +# 12378 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2422 "parsing/parser.mly" + ( "-." ) +# 12384 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2423 "parsing/parser.mly" + ( "*" ) +# 12390 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2424 "parsing/parser.mly" + ( "=" ) +# 12396 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2425 "parsing/parser.mly" + ( "<" ) +# 12402 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2426 "parsing/parser.mly" + ( ">" ) +# 12408 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2427 "parsing/parser.mly" + ( "or" ) +# 12414 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2428 "parsing/parser.mly" + ( "||" ) +# 12420 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2429 "parsing/parser.mly" + ( "&" ) +# 12426 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2430 "parsing/parser.mly" + ( "&&" ) +# 12432 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2431 "parsing/parser.mly" + ( ":=" ) +# 12438 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2432 "parsing/parser.mly" + ( "+=" ) +# 12444 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + Obj.repr( +# 2433 "parsing/parser.mly" + ( "%" ) +# 12450 "parsing/parser.ml" + : 'operator)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2436 "parsing/parser.mly" + ( _1 ) +# 12457 "parsing/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2437 "parsing/parser.mly" + ( "[]" ) +# 12463 "parsing/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2438 "parsing/parser.mly" + ( "()" ) +# 12469 "parsing/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2439 "parsing/parser.mly" + ( "::" ) +# 12475 "parsing/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2440 "parsing/parser.mly" + ( "false" ) +# 12481 "parsing/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2441 "parsing/parser.mly" + ( "true" ) +# 12487 "parsing/parser.ml" + : 'constr_ident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 2445 "parsing/parser.mly" + ( Lident _1 ) +# 12494 "parsing/parser.ml" + : 'val_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in + Obj.repr( +# 2446 "parsing/parser.mly" + ( Ldot(_1, _3) ) +# 12502 "parsing/parser.ml" + : 'val_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'mod_longident) in + Obj.repr( +# 2449 "parsing/parser.mly" + ( _1 ) +# 12509 "parsing/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in + Obj.repr( +# 2450 "parsing/parser.mly" + ( Ldot(_1,"::") ) +# 12516 "parsing/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2451 "parsing/parser.mly" + ( Lident "[]" ) +# 12522 "parsing/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2452 "parsing/parser.mly" + ( Lident "()" ) +# 12528 "parsing/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2453 "parsing/parser.mly" + ( Lident "::" ) +# 12534 "parsing/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2454 "parsing/parser.mly" + ( Lident "false" ) +# 12540 "parsing/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + Obj.repr( +# 2455 "parsing/parser.mly" + ( Lident "true" ) +# 12546 "parsing/parser.ml" + : 'constr_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2458 "parsing/parser.mly" + ( Lident _1 ) +# 12553 "parsing/parser.ml" + : 'label_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2459 "parsing/parser.mly" + ( Ldot(_1, _3) ) +# 12561 "parsing/parser.ml" + : 'label_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2462 "parsing/parser.mly" + ( Lident _1 ) +# 12568 "parsing/parser.ml" + : 'type_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2463 "parsing/parser.mly" + ( Ldot(_1, _3) ) +# 12576 "parsing/parser.ml" + : 'type_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2466 "parsing/parser.mly" + ( Lident _1 ) +# 12583 "parsing/parser.ml" + : 'mod_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2467 "parsing/parser.mly" + ( Ldot(_1, _3) ) +# 12591 "parsing/parser.ml" + : 'mod_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2470 "parsing/parser.mly" + ( Lident _1 ) +# 12598 "parsing/parser.ml" + : 'mod_ext_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2471 "parsing/parser.mly" + ( Ldot(_1, _3) ) +# 12606 "parsing/parser.ml" + : 'mod_ext_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'mod_ext_longident) in + Obj.repr( +# 2472 "parsing/parser.mly" + ( lapply _1 _3 ) +# 12614 "parsing/parser.ml" + : 'mod_ext_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2475 "parsing/parser.mly" + ( Lident _1 ) +# 12621 "parsing/parser.ml" + : 'mty_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2476 "parsing/parser.mly" + ( Ldot(_1, _3) ) +# 12629 "parsing/parser.ml" + : 'mty_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2479 "parsing/parser.mly" + ( Lident _1 ) +# 12636 "parsing/parser.ml" + : 'clty_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2480 "parsing/parser.mly" + ( Ldot(_1, _3) ) +# 12644 "parsing/parser.ml" + : 'clty_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2483 "parsing/parser.mly" + ( Lident _1 ) +# 12651 "parsing/parser.ml" + : 'class_longident)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2484 "parsing/parser.mly" + ( Ldot(_1, _3) ) +# 12659 "parsing/parser.ml" + : 'class_longident)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2490 "parsing/parser.mly" + ( Ptop_dir(_2, Pdir_none) ) +# 12666 "parsing/parser.ml" + : 'toplevel_directive)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string * string option) in + Obj.repr( +# 2491 "parsing/parser.mly" + ( Ptop_dir(_2, Pdir_string (fst _3)) ) +# 12674 "parsing/parser.ml" + : 'toplevel_directive)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in + Obj.repr( +# 2492 "parsing/parser.mly" + ( let (n, m) = _3 in + Ptop_dir(_2, Pdir_int (n ,m)) ) +# 12683 "parsing/parser.ml" + : 'toplevel_directive)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_longident) in + Obj.repr( +# 2494 "parsing/parser.mly" + ( Ptop_dir(_2, Pdir_ident _3) ) +# 12691 "parsing/parser.ml" + : 'toplevel_directive)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ident) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'mod_longident) in + Obj.repr( +# 2495 "parsing/parser.mly" + ( Ptop_dir(_2, Pdir_ident _3) ) +# 12699 "parsing/parser.ml" + : 'toplevel_directive)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ident) in + Obj.repr( +# 2496 "parsing/parser.mly" + ( Ptop_dir(_2, Pdir_bool false) ) +# 12706 "parsing/parser.ml" + : 'toplevel_directive)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ident) in + Obj.repr( +# 2497 "parsing/parser.mly" + ( Ptop_dir(_2, Pdir_bool true) ) +# 12713 "parsing/parser.ml" + : 'toplevel_directive)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in + Obj.repr( +# 2503 "parsing/parser.mly" + ( _2 ) +# 12720 "parsing/parser.ml" + : 'name_tag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2506 "parsing/parser.mly" + ( Nonrecursive ) +# 12726 "parsing/parser.ml" + : 'rec_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2507 "parsing/parser.mly" + ( Recursive ) +# 12732 "parsing/parser.ml" + : 'rec_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2510 "parsing/parser.mly" + ( Recursive ) +# 12738 "parsing/parser.ml" + : 'nonrec_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2511 "parsing/parser.mly" + ( Nonrecursive ) +# 12744 "parsing/parser.ml" + : 'nonrec_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2514 "parsing/parser.mly" + ( Upto ) +# 12750 "parsing/parser.ml" + : 'direction_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2515 "parsing/parser.mly" + ( Downto ) +# 12756 "parsing/parser.ml" + : 'direction_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2518 "parsing/parser.mly" + ( Public ) +# 12762 "parsing/parser.ml" + : 'private_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2519 "parsing/parser.mly" + ( Private ) +# 12768 "parsing/parser.ml" + : 'private_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2522 "parsing/parser.mly" + ( Immutable ) +# 12774 "parsing/parser.ml" + : 'mutable_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2523 "parsing/parser.mly" + ( Mutable ) +# 12780 "parsing/parser.ml" + : 'mutable_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2526 "parsing/parser.mly" + ( Concrete ) +# 12786 "parsing/parser.ml" + : 'virtual_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2527 "parsing/parser.mly" + ( Virtual ) +# 12792 "parsing/parser.ml" + : 'virtual_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2530 "parsing/parser.mly" + ( Public, Concrete ) +# 12798 "parsing/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2531 "parsing/parser.mly" + ( Private, Concrete ) +# 12804 "parsing/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2532 "parsing/parser.mly" + ( Public, Virtual ) +# 12810 "parsing/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2533 "parsing/parser.mly" + ( Private, Virtual ) +# 12816 "parsing/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2534 "parsing/parser.mly" + ( Private, Virtual ) +# 12822 "parsing/parser.ml" + : 'private_virtual_flags)) +; (fun __caml_parser_env -> + Obj.repr( +# 2537 "parsing/parser.mly" + ( Fresh ) +# 12828 "parsing/parser.ml" + : 'override_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2538 "parsing/parser.mly" + ( Override ) +# 12834 "parsing/parser.ml" + : 'override_flag)) +; (fun __caml_parser_env -> + Obj.repr( +# 2541 "parsing/parser.mly" + ( () ) +# 12840 "parsing/parser.ml" + : 'opt_bar)) +; (fun __caml_parser_env -> + Obj.repr( +# 2542 "parsing/parser.mly" + ( () ) +# 12846 "parsing/parser.ml" + : 'opt_bar)) +; (fun __caml_parser_env -> + Obj.repr( +# 2545 "parsing/parser.mly" + ( () ) +# 12852 "parsing/parser.ml" + : 'opt_semi)) +; (fun __caml_parser_env -> + Obj.repr( +# 2546 "parsing/parser.mly" + ( () ) +# 12858 "parsing/parser.ml" + : 'opt_semi)) +; (fun __caml_parser_env -> + Obj.repr( +# 2549 "parsing/parser.mly" + ( "-" ) +# 12864 "parsing/parser.ml" + : 'subtractive)) +; (fun __caml_parser_env -> + Obj.repr( +# 2550 "parsing/parser.mly" + ( "-." ) +# 12870 "parsing/parser.ml" + : 'subtractive)) +; (fun __caml_parser_env -> + Obj.repr( +# 2553 "parsing/parser.mly" + ( "+" ) +# 12876 "parsing/parser.ml" + : 'additive)) +; (fun __caml_parser_env -> + Obj.repr( +# 2554 "parsing/parser.mly" + ( "+." ) +# 12882 "parsing/parser.ml" + : 'additive)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2560 "parsing/parser.mly" + ( _1 ) +# 12889 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in + Obj.repr( +# 2561 "parsing/parser.mly" + ( _1 ) +# 12896 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2562 "parsing/parser.mly" + ( "and" ) +# 12902 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2563 "parsing/parser.mly" + ( "as" ) +# 12908 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2564 "parsing/parser.mly" + ( "assert" ) +# 12914 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2565 "parsing/parser.mly" + ( "begin" ) +# 12920 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2566 "parsing/parser.mly" + ( "class" ) +# 12926 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2567 "parsing/parser.mly" + ( "constraint" ) +# 12932 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2568 "parsing/parser.mly" + ( "do" ) +# 12938 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2569 "parsing/parser.mly" + ( "done" ) +# 12944 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2570 "parsing/parser.mly" + ( "downto" ) +# 12950 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2571 "parsing/parser.mly" + ( "else" ) +# 12956 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2572 "parsing/parser.mly" + ( "end" ) +# 12962 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2573 "parsing/parser.mly" + ( "exception" ) +# 12968 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2574 "parsing/parser.mly" + ( "external" ) +# 12974 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2575 "parsing/parser.mly" + ( "false" ) +# 12980 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2576 "parsing/parser.mly" + ( "for" ) +# 12986 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2577 "parsing/parser.mly" + ( "fun" ) +# 12992 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2578 "parsing/parser.mly" + ( "function" ) +# 12998 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2579 "parsing/parser.mly" + ( "functor" ) +# 13004 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2580 "parsing/parser.mly" + ( "if" ) +# 13010 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2581 "parsing/parser.mly" + ( "in" ) +# 13016 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2582 "parsing/parser.mly" + ( "include" ) +# 13022 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2583 "parsing/parser.mly" + ( "inherit" ) +# 13028 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2584 "parsing/parser.mly" + ( "initializer" ) +# 13034 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2585 "parsing/parser.mly" + ( "lazy" ) +# 13040 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2586 "parsing/parser.mly" + ( "let" ) +# 13046 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2587 "parsing/parser.mly" + ( "match" ) +# 13052 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2588 "parsing/parser.mly" + ( "method" ) +# 13058 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2589 "parsing/parser.mly" + ( "module" ) +# 13064 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2590 "parsing/parser.mly" + ( "mutable" ) +# 13070 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2591 "parsing/parser.mly" + ( "new" ) +# 13076 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2592 "parsing/parser.mly" + ( "nonrec" ) +# 13082 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2593 "parsing/parser.mly" + ( "object" ) +# 13088 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2594 "parsing/parser.mly" + ( "of" ) +# 13094 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2595 "parsing/parser.mly" + ( "open" ) +# 13100 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2596 "parsing/parser.mly" + ( "or" ) +# 13106 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2597 "parsing/parser.mly" + ( "private" ) +# 13112 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2598 "parsing/parser.mly" + ( "rec" ) +# 13118 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2599 "parsing/parser.mly" + ( "sig" ) +# 13124 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2600 "parsing/parser.mly" + ( "struct" ) +# 13130 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2601 "parsing/parser.mly" + ( "then" ) +# 13136 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2602 "parsing/parser.mly" + ( "to" ) +# 13142 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2603 "parsing/parser.mly" + ( "true" ) +# 13148 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2604 "parsing/parser.mly" + ( "try" ) +# 13154 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2605 "parsing/parser.mly" + ( "type" ) +# 13160 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2606 "parsing/parser.mly" + ( "val" ) +# 13166 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2607 "parsing/parser.mly" + ( "virtual" ) +# 13172 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2608 "parsing/parser.mly" + ( "when" ) +# 13178 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2609 "parsing/parser.mly" + ( "while" ) +# 13184 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + Obj.repr( +# 2610 "parsing/parser.mly" + ( "with" ) +# 13190 "parsing/parser.ml" + : 'single_attr_id)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'single_attr_id) in + Obj.repr( +# 2615 "parsing/parser.mly" + ( mkloc _1 (symbol_rloc()) ) +# 13197 "parsing/parser.ml" + : 'attr_id)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 2 : 'single_attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attr_id) in + Obj.repr( +# 2616 "parsing/parser.mly" + ( mkloc (_1 ^ "." ^ _3.txt) (symbol_rloc())) +# 13205 "parsing/parser.ml" + : 'attr_id)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2619 "parsing/parser.mly" + ( (_2, _3) ) +# 13213 "parsing/parser.ml" + : 'attribute)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2622 "parsing/parser.mly" + ( (_2, _3) ) +# 13221 "parsing/parser.ml" + : 'post_item_attribute)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2625 "parsing/parser.mly" + ( (_2, _3) ) +# 13229 "parsing/parser.ml" + : 'floating_attribute)) +; (fun __caml_parser_env -> + Obj.repr( +# 2628 "parsing/parser.mly" + ( [] ) +# 13235 "parsing/parser.ml" + : 'post_item_attributes)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'post_item_attribute) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in + Obj.repr( +# 2629 "parsing/parser.mly" + ( _1 :: _2 ) +# 13243 "parsing/parser.ml" + : 'post_item_attributes)) +; (fun __caml_parser_env -> + Obj.repr( +# 2632 "parsing/parser.mly" + ( [] ) +# 13249 "parsing/parser.ml" + : 'attributes)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'attribute) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2633 "parsing/parser.mly" + ( _1 :: _2 ) +# 13257 "parsing/parser.ml" + : 'attributes)) +; (fun __caml_parser_env -> + Obj.repr( +# 2636 "parsing/parser.mly" + ( None, [] ) +# 13263 "parsing/parser.ml" + : 'ext_attributes)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 1 : 'attribute) in + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2637 "parsing/parser.mly" + ( None, _1 :: _2 ) +# 13271 "parsing/parser.ml" + : 'ext_attributes)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 1 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in + Obj.repr( +# 2638 "parsing/parser.mly" + ( Some _2, _3 ) +# 13279 "parsing/parser.ml" + : 'ext_attributes)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2641 "parsing/parser.mly" + ( (_2, _3) ) +# 13287 "parsing/parser.ml" + : 'extension)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in + let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in + Obj.repr( +# 2644 "parsing/parser.mly" + ( (_2, _3) ) +# 13295 "parsing/parser.ml" + : 'item_extension)) +; (fun __caml_parser_env -> + let _1 = (Parsing.peek_val __caml_parser_env 0 : 'structure) in + Obj.repr( +# 2647 "parsing/parser.mly" + ( PStr _1 ) +# 13302 "parsing/parser.ml" + : 'payload)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'signature) in + Obj.repr( +# 2648 "parsing/parser.mly" + ( PSig _2 ) +# 13309 "parsing/parser.ml" + : 'payload)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in + Obj.repr( +# 2649 "parsing/parser.mly" + ( PTyp _2 ) +# 13316 "parsing/parser.ml" + : 'payload)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in + Obj.repr( +# 2650 "parsing/parser.mly" + ( PPat (_2, None) ) +# 13323 "parsing/parser.ml" + : 'payload)) +; (fun __caml_parser_env -> + let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in + let _4 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in + Obj.repr( +# 2651 "parsing/parser.mly" + ( PPat (_2, Some _4) ) +# 13331 "parsing/parser.ml" + : 'payload)) +(* Entry implementation *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry interface *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry toplevel_phrase *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry use_file *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry parse_core_type *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry parse_expression *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +(* Entry parse_pattern *) +; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0))) +|] +let yytables = + { Parsing.actions=yyact; + Parsing.transl_const=yytransl_const; + Parsing.transl_block=yytransl_block; + Parsing.lhs=yylhs; + Parsing.len=yylen; + Parsing.defred=yydefred; + Parsing.dgoto=yydgoto; + Parsing.sindex=yysindex; + Parsing.rindex=yyrindex; + Parsing.gindex=yygindex; + Parsing.tablesize=yytablesize; + Parsing.table=yytable; + Parsing.check=yycheck; + Parsing.error_function=parse_error; + Parsing.names_const=yynames_const; + Parsing.names_block=yynames_block } +let implementation (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 1 lexfun lexbuf : Parsetree.structure) +let interface (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 2 lexfun lexbuf : Parsetree.signature) +let toplevel_phrase (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 3 lexfun lexbuf : Parsetree.toplevel_phrase) +let use_file (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 4 lexfun lexbuf : Parsetree.toplevel_phrase list) +let parse_core_type (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 5 lexfun lexbuf : Parsetree.core_type) +let parse_expression (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 6 lexfun lexbuf : Parsetree.expression) +let parse_pattern (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) = + (Parsing.yyparse yytables 7 lexfun lexbuf : Parsetree.pattern) +;; + +end +module Lexer : sig +#1 "lexer.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* The lexical analyzer *) + +val init : unit -> unit +val token: Lexing.lexbuf -> Parser.token +val skip_hash_bang: Lexing.lexbuf -> unit + +type directive_type + +type error = + | Illegal_character of char + | Illegal_escape of string + | Unterminated_comment of Location.t + | Unterminated_string + | Unterminated_string_in_comment of Location.t * Location.t + | Keyword_as_label of string + | Invalid_literal of string + | Invalid_directive of string * string option + | Unterminated_paren_in_conditional + | Unterminated_if + | Unterminated_else + | Unexpected_token_in_conditional + | Expect_hash_then_in_conditional + | Illegal_semver of string + | Unexpected_directive + | Conditional_expr_expected_type of directive_type * directive_type +;; + +exception Error of error * Location.t + +open Format + +val report_error: formatter -> error -> unit + (* Deprecated. Use Location.{error_of_exn, report_error}. *) + +val in_comment : unit -> bool;; +val in_string : unit -> bool;; + + +val print_warnings : bool ref +val handle_docstrings: bool ref +val comments : unit -> (string * Location.t) list +val token_with_comments : Lexing.lexbuf -> Parser.token + +(* + [set_preprocessor init preprocessor] registers [init] as the function +to call to initialize the preprocessor when the lexer is initialized, +and [preprocessor] a function that is called when a new token is needed +by the parser, as [preprocessor lexer lexbuf] where [lexer] is the +lexing function. + +When a preprocessor is configured by calling [set_preprocessor], the lexer +changes its behavior to accept backslash-newline as a token-separating blank. +*) + +val set_preprocessor : + (unit -> unit) -> + ((Lexing.lexbuf -> Parser.token) -> Lexing.lexbuf -> Parser.token) -> + unit + +(** semantic version predicate *) +val semver : Location.t -> string -> string -> bool + +val filter_directive_from_lexbuf : Lexing.lexbuf -> (int * int) list + +val replace_directive_int : string -> int -> unit +val replace_directive_string : string -> string -> unit +val replace_directive_bool : string -> bool -> unit +val remove_directive_built_in_value : string -> unit + +(** @return false means failed to define *) +val define_key_value : string -> string -> bool +val list_variables : Format.formatter -> unit + +end = struct +#1 "lexer.ml" +# 18 "parsing/lexer.mll" + +open Lexing +open Misc +open Parser + +type directive_value = + | Dir_bool of bool + | Dir_float of float + | Dir_int of int + | Dir_string of string + | Dir_null + +type directive_type = + | Dir_type_bool + | Dir_type_float + | Dir_type_int + | Dir_type_string + | Dir_type_null + +let type_of_directive x = + match x with + | Dir_bool _ -> Dir_type_bool + | Dir_float _ -> Dir_type_float + | Dir_int _ -> Dir_type_int + | Dir_string _ -> Dir_type_string + | Dir_null -> Dir_type_null + +let string_of_type_directive x = + match x with + | Dir_type_bool -> "bool" + | Dir_type_float -> "float" + | Dir_type_int -> "int" + | Dir_type_string -> "string" + | Dir_type_null -> "null" + +type error = + | Illegal_character of char + | Illegal_escape of string + | Unterminated_comment of Location.t + | Unterminated_string + | Unterminated_string_in_comment of Location.t * Location.t + | Keyword_as_label of string + | Invalid_literal of string + | Invalid_directive of string * string option + | Unterminated_paren_in_conditional + | Unterminated_if + | Unterminated_else + | Unexpected_token_in_conditional + | Expect_hash_then_in_conditional + | Illegal_semver of string + | Unexpected_directive + | Conditional_expr_expected_type of directive_type * directive_type + +;; + +exception Error of error * Location.t;; + +let assert_same_type lexbuf x y = + let lhs = type_of_directive x in let rhs = type_of_directive y in + if lhs <> rhs then + raise (Error(Conditional_expr_expected_type(lhs,rhs), Location.curr lexbuf)) + else y + +let directive_built_in_values = + Hashtbl.create 51 + + +let replace_directive_built_in_value k v = + Hashtbl.replace directive_built_in_values k v + +let remove_directive_built_in_value k = + Hashtbl.replace directive_built_in_values k Dir_null + +let replace_directive_int k v = + Hashtbl.replace directive_built_in_values k (Dir_int v) + +let replace_directive_bool k v = + Hashtbl.replace directive_built_in_values k (Dir_bool v) + +let replace_directive_string k v = + Hashtbl.replace directive_built_in_values k (Dir_string v) + +let () = + (* Note we use {!Config} instead of {!Sys} becasue + we want to overwrite in some cases with the + same stdlib + *) + let version = + Config.version (* so that it can be overridden*) + in + replace_directive_built_in_value "OCAML_VERSION" + (Dir_string version); + replace_directive_built_in_value "OCAML_PATCH" + (Dir_string + (match String.rindex version '+' with + | exception Not_found -> "" + | i -> + String.sub version (i + 1) + (String.length version - i - 1))) + ; + replace_directive_built_in_value "OS_TYPE" + (Dir_string Sys.os_type); + replace_directive_built_in_value "BIG_ENDIAN" + (Dir_bool Sys.big_endian); + replace_directive_built_in_value "WORD_SIZE" + (Dir_int Sys.word_size) + +let find_directive_built_in_value k = + Hashtbl.find directive_built_in_values k + +let iter_directive_built_in_value f = Hashtbl.iter f directive_built_in_values + +(* + {[ + # semver 0 "12";; + - : int * int * int * string = (12, 0, 0, "");; + # semver 0 "12.3";; + - : int * int * int * string = (12, 3, 0, "");; + semver 0 "12.3.10";; + - : int * int * int * string = (12, 3, 10, "");; + # semver 0 "12.3.10+x";; + - : int * int * int * string = (12, 3, 10, "+x") + ]} +*) +let zero = Char.code '0' +let dot = Char.code '.' +let semantic_version_parse str start last_index = + let rec aux start acc last_index = + if start <= last_index then + let c = Char.code (String.unsafe_get str start) in + if c = dot then (acc, start + 1) (* consume [4.] instead of [4]*) + else + let v = c - zero in + if v >=0 && v <= 9 then + aux (start + 1) (acc * 10 + v) last_index + else (acc , start) + else (acc, start) + in + let major, major_end = aux start 0 last_index in + let minor, minor_end = aux major_end 0 last_index in + let patch, patch_end = aux minor_end 0 last_index in + let additional = String.sub str patch_end (last_index - patch_end +1) in + (major, minor, patch), additional + +(** + {[ + semver Location.none "1.2.3" "~1.3.0" = false;; + semver Location.none "1.2.3" "^1.3.0" = true ;; + semver Location.none "1.2.3" ">1.3.0" = false ;; + semver Location.none "1.2.3" ">=1.3.0" = false ;; + semver Location.none "1.2.3" "<1.3.0" = true ;; + semver Location.none "1.2.3" "<=1.3.0" = true ;; + ]} +*) +let semver loc lhs str = + let last_index = String.length str - 1 in + if last_index < 0 then raise (Error(Illegal_semver str, loc)) + else + let pred, ((major, minor, _patch) as version, _) = + let v = String.unsafe_get str 0 in + match v with + | '>' -> + if last_index = 0 then raise (Error(Illegal_semver str, loc)) else + if String.unsafe_get str 1 = '=' then + `Ge, semantic_version_parse str 2 last_index + else `Gt, semantic_version_parse str 1 last_index + | '<' + -> + if last_index = 0 then raise (Error(Illegal_semver str, loc)) else + if String.unsafe_get str 1 = '=' then + `Le, semantic_version_parse str 2 last_index + else `Lt, semantic_version_parse str 1 last_index + | '^' + -> `Compatible, semantic_version_parse str 1 last_index + | '~' -> `Approximate, semantic_version_parse str 1 last_index + | _ -> `Exact, semantic_version_parse str 0 last_index + in + let ((l_major, l_minor, _l_patch) as lversion,_) = + semantic_version_parse lhs 0 (String.length lhs - 1) in + match pred with + | `Ge -> lversion >= version + | `Gt -> lversion > version + | `Le -> lversion <= version + | `Lt -> lversion < version + | `Approximate -> major = l_major && minor = l_minor + | `Compatible -> major = l_major + | `Exact -> lversion = version + + +let pp_directive_value fmt (x : directive_value) = + match x with + | Dir_bool b -> Format.pp_print_bool fmt b + | Dir_int b -> Format.pp_print_int fmt b + | Dir_float b -> Format.pp_print_float fmt b + | Dir_string s -> Format.fprintf fmt "%S" s + | Dir_null -> Format.pp_print_string fmt "null" + +let list_variables fmt = + iter_directive_built_in_value + (fun s dir_value -> + Format.fprintf + fmt "@[%s@ %a@]@." + s pp_directive_value dir_value + ) + +let defined str = + begin match find_directive_built_in_value str with + | Dir_null -> false + | _ -> true + | exception _ -> + try ignore @@ Sys.getenv str; true with _ -> false + end + +let query _loc str = + begin match find_directive_built_in_value str with + | Dir_null -> Dir_bool false + | v -> v + | exception Not_found -> + begin match Sys.getenv str with + | v -> + begin + try Dir_bool (bool_of_string v) with + _ -> + begin + try Dir_int (int_of_string v ) + with + _ -> + begin try (Dir_float (float_of_string v)) + with _ -> Dir_string v + end + end + end + | exception Not_found -> + Dir_bool false + end + end + + +let define_key_value key v = + if String.length key > 0 + && Char.uppercase_ascii (key.[0]) = key.[0] then + begin + replace_directive_built_in_value key + begin + (* NEED Sync up across {!lexer.mll} {!bspp.ml} and here, + TODO: put it in {!lexer.mll} + *) + try Dir_bool (bool_of_string v) with + _ -> + begin + try Dir_int (int_of_string v ) + with + _ -> + begin try (Dir_float (float_of_string v)) + with _ -> Dir_string v + end + end + end; + true + end + else false + +let cvt_int_literal s = + - int_of_string ("-" ^ s) + +let value_of_token loc (t : Parser.token) = + match t with + | INT (i,None) -> Dir_int (cvt_int_literal i) + | STRING (s,_) -> Dir_string s + | FLOAT (s,None) -> Dir_float (float_of_string s) + | TRUE -> Dir_bool true + | FALSE -> Dir_bool false + | UIDENT s -> query loc s + | _ -> raise (Error (Unexpected_token_in_conditional, loc)) + + +let directive_parse token_with_comments lexbuf = + let look_ahead = ref None in + let token () : Parser.token = + let v = !look_ahead in + match v with + | Some v -> + look_ahead := None ; + v + | None -> + let rec skip () = + match token_with_comments lexbuf with + | COMMENT _ + | DOCSTRING _ + | EOL -> skip () + | EOF -> raise (Error (Unterminated_if, Location.curr lexbuf)) + | t -> t + in skip () + in + let push e = + (* INVARIANT: only look at most one token *) + assert (!look_ahead = None); + look_ahead := Some e + in + let rec + token_op calc ~no lhs = + match token () with + | (LESS + | GREATER + | INFIXOP0 "<=" + | INFIXOP0 ">=" + | EQUAL + | INFIXOP0 "<>" as op) -> + let f = + match op with + | LESS -> (<) + | GREATER -> (>) + | INFIXOP0 "<=" -> (<=) + | EQUAL -> (=) + | INFIXOP0 "<>" -> (<>) + | _ -> assert false + in + let curr_loc = Location.curr lexbuf in + let rhs = value_of_token curr_loc (token ()) in + not calc || + f lhs (assert_same_type lexbuf lhs rhs) + | INFIXOP0 "=~" -> + not calc || + begin match lhs with + | Dir_string s -> + let curr_loc = Location.curr lexbuf in + let rhs = value_of_token curr_loc (token ()) in + begin match rhs with + | Dir_string rhs -> + semver curr_loc s rhs + | _ -> + raise + (Error + ( Conditional_expr_expected_type + (Dir_type_string, type_of_directive lhs), Location.curr lexbuf)) + end + | _ -> raise + (Error + ( Conditional_expr_expected_type + (Dir_type_string, type_of_directive lhs), Location.curr lexbuf)) + end + | e -> no e + and + parse_or calc : bool = + parse_or_aux calc (parse_and calc) + and (* a || (b || (c || d))*) + parse_or_aux calc v : bool = + (* let l = v in *) + match token () with + | BARBAR -> + let b = parse_or (calc && not v) in + v || b + | e -> push e ; v + and parse_and calc = + parse_and_aux calc (parse_relation calc) + and parse_and_aux calc v = (* a && (b && (c && d)) *) + (* let l = v in *) + match token () with + | AMPERAMPER -> + let b = parse_and (calc && v) in + v && b + | e -> push e ; v + and parse_relation (calc : bool) : bool = + let curr_token = token () in + let curr_loc = Location.curr lexbuf in + match curr_token with + | TRUE -> true + | FALSE -> false + | UIDENT v -> + let value_v = query curr_loc v in + token_op calc + ~no:(fun e -> push e ; + match value_v with + | Dir_bool b -> b + | _ -> + let ty = type_of_directive value_v in + raise + (Error(Conditional_expr_expected_type (Dir_type_bool, ty), + curr_loc))) + value_v + | INT (v,None) -> + let num_v = cvt_int_literal v in + token_op calc + ~no:(fun e -> + push e; + num_v <> 0 + ) + (Dir_int num_v) + | FLOAT (v,None) -> + token_op calc + ~no:(fun _e -> + raise (Error(Conditional_expr_expected_type(Dir_type_bool, Dir_type_float), + curr_loc))) + (Dir_float (float_of_string v)) + | STRING (v,_) -> + token_op calc + ~no:(fun _e -> + raise (Error + (Conditional_expr_expected_type(Dir_type_bool, Dir_type_string), + curr_loc))) + (Dir_string v) + | LIDENT ("defined" | "undefined" as r) -> + let t = token () in + let loc = Location.curr lexbuf in + begin match t with + | UIDENT s -> + not calc || + if r.[0] = 'u' then + not @@ defined s + else defined s + | _ -> raise (Error (Unexpected_token_in_conditional, loc)) + end + | LPAREN -> + let v = parse_or calc in + begin match token () with + | RPAREN -> v + | _ -> raise (Error(Unterminated_paren_in_conditional, Location.curr lexbuf)) + end + + | _ -> raise (Error (Unexpected_token_in_conditional, curr_loc)) + in + let v = parse_or true in + begin match token () with + | THEN -> v + | _ -> raise (Error (Expect_hash_then_in_conditional, Location.curr lexbuf)) + end + + +type dir_conditional = + | Dir_if_true + | Dir_if_false + | Dir_out + +(* let string_of_dir_conditional (x : dir_conditional) = *) +(* match x with *) +(* | Dir_if_true -> "Dir_if_true" *) +(* | Dir_if_false -> "Dir_if_false" *) +(* | Dir_out -> "Dir_out" *) + +let is_elif (i : Parser.token ) = + match i with + | LIDENT "elif" -> true + | _ -> false (* avoid polymorphic equal *) + + +(* The table of keywords *) + +let keyword_table = + create_hashtable 149 [ + "and", AND; + "as", AS; + "assert", ASSERT; + "begin", BEGIN; + "class", CLASS; + "constraint", CONSTRAINT; + "do", DO; + "done", DONE; + "downto", DOWNTO; + "else", ELSE; + "end", END; + "exception", EXCEPTION; + "external", EXTERNAL; + "false", FALSE; + "for", FOR; + "fun", FUN; + "function", FUNCTION; + "functor", FUNCTOR; + "if", IF; + "in", IN; + "include", INCLUDE; + "inherit", INHERIT; + "initializer", INITIALIZER; + "lazy", LAZY; + "let", LET; + "match", MATCH; + "method", METHOD; + "module", MODULE; + "mutable", MUTABLE; + "new", NEW; + "nonrec", NONREC; + "object", OBJECT; + "of", OF; + "open", OPEN; + "or", OR; +(* "parser", PARSER; *) + "private", PRIVATE; + "rec", REC; + "sig", SIG; + "struct", STRUCT; + "then", THEN; + "to", TO; + "true", TRUE; + "try", TRY; + "type", TYPE; + "val", VAL; + "virtual", VIRTUAL; + "when", WHEN; + "while", WHILE; + "with", WITH; + + "lor", INFIXOP3("lor"); (* Should be INFIXOP2 *) + "lxor", INFIXOP3("lxor"); (* Should be INFIXOP2 *) + "mod", INFIXOP3("mod"); + "land", INFIXOP3("land"); + "lsl", INFIXOP4("lsl"); + "lsr", INFIXOP4("lsr"); + "asr", INFIXOP4("asr") +] + +(* To buffer string literals *) + +let string_buffer = Buffer.create 256 +let reset_string_buffer () = Buffer.reset string_buffer +let get_stored_string () = Buffer.contents string_buffer + +let store_string_char c = Buffer.add_char string_buffer c +let store_string_utf_8_uchar u = Buffer.add_utf_8_uchar string_buffer u +let store_string s = Buffer.add_string string_buffer s +let store_lexeme lexbuf = store_string (Lexing.lexeme lexbuf) + +(* To store the position of the beginning of a string and comment *) +let string_start_loc = ref Location.none;; +let comment_start_loc = ref [];; +let in_comment () = !comment_start_loc <> [];; +let is_in_string = ref false +let in_string () = !is_in_string +let print_warnings = ref true +let if_then_else = ref Dir_out +let sharp_look_ahead = ref None +let update_if_then_else v = + (* Format.fprintf Format.err_formatter "@[update %s \n@]@." (string_of_dir_conditional v); *) + if_then_else := v + +(* Escaped chars are interpreted in strings unless they are in comments. *) +let store_escaped_char lexbuf c = + if in_comment () then store_lexeme lexbuf else store_string_char c + +let store_escaped_uchar lexbuf u = + if in_comment () then store_lexeme lexbuf else store_string_utf_8_uchar u + +let with_comment_buffer comment lexbuf = + let start_loc = Location.curr lexbuf in + comment_start_loc := [start_loc]; + reset_string_buffer (); + let end_loc = comment lexbuf in + let s = get_stored_string () in + reset_string_buffer (); + let loc = { start_loc with Location.loc_end = end_loc.Location.loc_end } in + s, loc + +(* To translate escape sequences *) + +let hex_digit_value d = (* assert (d in '0'..'9' 'a'..'f' 'A'..'F') *) + let d = Char.code d in + if d >= 97 then d - 87 else + if d >= 65 then d - 55 else + d - 48 + +let hex_num_value lexbuf ~first ~last = + let rec loop acc i = match i > last with + | true -> acc + | false -> + let value = hex_digit_value (Lexing.lexeme_char lexbuf i) in + loop (16 * acc + value) (i + 1) + in + loop 0 first + +let char_for_backslash = function + | 'n' -> '\010' + | 'r' -> '\013' + | 'b' -> '\008' + | 't' -> '\009' + | c -> c + +let char_for_decimal_code lexbuf i = + let c = 100 * (Char.code(Lexing.lexeme_char lexbuf i) - 48) + + 10 * (Char.code(Lexing.lexeme_char lexbuf (i+1)) - 48) + + (Char.code(Lexing.lexeme_char lexbuf (i+2)) - 48) in + if (c < 0 || c > 255) then + if in_comment () + then 'x' + else raise (Error(Illegal_escape (Lexing.lexeme lexbuf), + Location.curr lexbuf)) + else Char.chr c + +let char_for_octal_code lexbuf i = + let c = 64 * (Char.code(Lexing.lexeme_char lexbuf i) - 48) + + 8 * (Char.code(Lexing.lexeme_char lexbuf (i+1)) - 48) + + (Char.code(Lexing.lexeme_char lexbuf (i+2)) - 48) in + Char.chr c + +let char_for_hexadecimal_code lexbuf i = + let byte = hex_num_value lexbuf ~first:i ~last:(i+1) in + Char.chr byte + +let uchar_for_uchar_escape lexbuf = + let err e = + raise + (Error (Illegal_escape (Lexing.lexeme lexbuf ^ e), Location.curr lexbuf)) + in + let len = Lexing.lexeme_end lexbuf - Lexing.lexeme_start lexbuf in + let first = 3 (* skip opening \u{ *) in + let last = len - 2 (* skip closing } *) in + let digit_count = last - first + 1 in + match digit_count > 6 with + | true -> err ", too many digits, expected 1 to 6 hexadecimal digits" + | false -> + let cp = hex_num_value lexbuf ~first ~last in + if Uchar.is_valid cp then Uchar.unsafe_of_int cp else + err (", " ^ Printf.sprintf "%X" cp ^ " is not a Unicode scalar value") + +(* recover the name from a LABEL or OPTLABEL token *) + +let get_label_name lexbuf = + let s = Lexing.lexeme lexbuf in + let name = String.sub s 1 (String.length s - 2) in + if Hashtbl.mem keyword_table name then + raise (Error(Keyword_as_label name, Location.curr lexbuf)); + name +;; + +(* Update the current location with file name and line number. *) + +let update_loc lexbuf file line absolute chars = + let pos = lexbuf.lex_curr_p in + let new_file = match file with + | None -> pos.pos_fname + | Some s -> s + in + lexbuf.lex_curr_p <- { pos with + pos_fname = new_file; + pos_lnum = if absolute then line else pos.pos_lnum + line; + pos_bol = pos.pos_cnum - chars; + } +;; + +let preprocessor = ref None + +let escaped_newlines = ref false + +(* Warn about Latin-1 characters used in idents *) + +let warn_latin1 lexbuf = + Location.deprecated (Location.curr lexbuf)"ISO-Latin1 characters in identifiers" + +let handle_docstrings = ref true +let comment_list = ref [] + +let add_comment com = + comment_list := com :: !comment_list + +let add_docstring_comment ds = + let com = + ("*" ^ Docstrings.docstring_body ds, Docstrings.docstring_loc ds) + in + add_comment com + +let comments () = List.rev !comment_list + +(* Error report *) + +open Format + +let report_error ppf = function + | Illegal_character c -> + fprintf ppf "Illegal character (%s)" (Char.escaped c) + | Illegal_escape s -> + fprintf ppf "Illegal backslash escape in string or character (%s)" s + | Unterminated_comment _ -> + fprintf ppf "Comment not terminated" + | Unterminated_string -> + fprintf ppf "String literal not terminated" + | Unterminated_string_in_comment (_, loc) -> + fprintf ppf "This comment contains an unterminated string literal@.\ + %aString literal begins here" + Location.print_error loc + | Keyword_as_label kwd -> + fprintf ppf "`%s' is a keyword, it cannot be used as label name" kwd + | Invalid_literal s -> + fprintf ppf "Invalid literal %s" s + | Invalid_directive (dir, explanation) -> + fprintf ppf "Invalid lexer directive %S" dir; + begin match explanation with + | None -> () + | Some expl -> fprintf ppf ": %s" expl + end + | Unterminated_if -> + fprintf ppf "#if not terminated" + | Unterminated_else -> + fprintf ppf "#else not terminated" + | Unexpected_directive -> fprintf ppf "Unexpected directive" + | Unexpected_token_in_conditional -> + fprintf ppf "Unexpected token in conditional predicate" + | Unterminated_paren_in_conditional -> + fprintf ppf "Unterminated parens in conditional predicate" + | Expect_hash_then_in_conditional -> + fprintf ppf "Expect `then` after conditional predicate" + | Conditional_expr_expected_type (a,b) -> + fprintf ppf "Conditional expression type mismatch (%s,%s)" + (string_of_type_directive a ) + (string_of_type_directive b ) + | Illegal_semver s -> + fprintf ppf "Illegal semantic version string %s" s + +let () = + Location.register_error_of_exn + (function + | Error (err, loc) -> + Some (Location.error_of_printer loc report_error err) + | _ -> + None + ) + + +# 717 "parsing/lexer.ml" +let __ocaml_lex_tables = { + Lexing.lex_base = + "\000\000\162\255\163\255\224\000\003\001\038\001\073\001\108\001\ + \143\001\186\255\178\001\215\001\194\255\091\000\252\001\031\002\ + \068\000\071\000\065\002\100\002\212\255\214\255\217\255\135\002\ + \230\002\009\003\088\000\255\000\039\003\236\255\123\003\207\003\ + \035\004\243\004\195\005\147\006\114\007\206\007\158\008\122\000\ + \254\255\001\000\005\000\255\255\006\000\007\000\125\009\155\009\ + \107\010\250\255\249\255\059\011\011\012\247\255\246\255\219\012\ + \047\013\131\013\215\013\043\014\127\014\211\014\039\015\123\015\ + \207\015\035\016\087\000\119\016\203\016\031\017\115\017\199\017\ + \108\000\192\255\235\255\007\003\034\018\106\000\107\000\011\000\ + \234\255\233\255\228\255\152\002\099\000\118\000\113\000\232\255\ + \128\000\147\000\231\255\224\000\003\001\148\000\230\255\110\004\ + \149\000\229\255\148\000\224\255\217\000\223\255\222\000\034\018\ + \222\255\073\018\101\005\009\003\221\255\012\000\014\001\080\001\ + \115\001\024\001\221\255\013\000\119\018\158\018\193\018\231\018\ + \010\019\209\255\204\255\205\255\206\255\202\255\045\019\154\000\ + \183\000\195\255\196\255\197\255\217\000\182\255\180\255\189\255\ + \080\019\185\255\187\255\115\019\150\019\185\019\220\019\130\005\ + \243\255\244\255\017\000\245\255\174\001\223\005\253\255\248\000\ + \249\000\255\255\254\255\252\255\005\006\238\019\003\001\004\001\ + \018\000\251\255\250\255\249\255\222\006\026\003\005\001\248\255\ + \036\003\008\001\247\255\066\008\020\001\246\255\059\001\234\001\ + \245\255\246\255\247\255\060\001\055\020\255\255\248\255\193\000\ + \233\008\038\001\133\004\253\255\073\001\094\001\113\001\143\004\ + \252\255\192\002\027\004\251\255\230\009\250\255\182\010\089\020\ + \249\255\129\001\130\001\252\255\085\007\254\255\255\255\146\001\ + \147\001\253\255\177\007\033\001\044\001\148\001\151\001\045\001\ + \153\001\044\001\019\000\255\255"; + Lexing.lex_backtrk = + "\255\255\255\255\255\255\090\000\089\000\086\000\085\000\078\000\ + \076\000\255\255\067\000\064\000\255\255\057\000\056\000\054\000\ + \052\000\048\000\045\000\081\000\255\255\255\255\255\255\036\000\ + \035\000\042\000\040\000\039\000\062\000\255\255\014\000\014\000\ + \013\000\012\000\011\000\010\000\007\000\004\000\003\000\002\000\ + \255\255\093\000\093\000\255\255\255\255\255\255\084\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\018\000\ + \018\000\016\000\015\000\018\000\015\000\015\000\014\000\016\000\ + \015\000\016\000\255\255\017\000\017\000\014\000\014\000\016\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\027\000\027\000\027\000\027\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\028\000\255\255\029\000\255\255\030\000\088\000\ + \255\255\091\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\037\000\087\000\082\000\044\000\ + \047\000\255\255\255\255\255\255\255\255\255\255\055\000\074\000\ + \071\000\255\255\255\255\255\255\072\000\255\255\255\255\255\255\ + \065\000\255\255\255\255\083\000\077\000\080\000\079\000\255\255\ + \255\255\255\255\012\000\255\255\012\000\012\000\255\255\012\000\ + \012\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\010\000\010\000\255\255\255\255\007\000\ + \007\000\007\000\007\000\255\255\001\000\007\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\003\000\255\255\255\255\003\000\ + \255\255\255\255\255\255\002\000\255\255\255\255\001\000\255\255\ + \255\255\255\255\255\255\255\255"; + Lexing.lex_default = + "\001\000\000\000\000\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\000\000\255\255\255\255\000\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\000\000\000\000\000\000\255\255\ + \255\255\255\255\255\255\077\000\255\255\000\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \000\000\255\255\255\255\000\000\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\255\255\255\255\000\000\000\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\255\255\082\000\255\255\255\255\255\255\ + \000\000\000\000\000\000\255\255\255\255\255\255\255\255\000\000\ + \255\255\255\255\000\000\255\255\255\255\255\255\000\000\255\255\ + \255\255\000\000\255\255\000\000\255\255\000\000\255\255\255\255\ + \000\000\255\255\110\000\255\255\000\000\255\255\110\000\111\000\ + \110\000\113\000\000\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\000\000\000\000\000\000\255\255\255\255\ + \255\255\000\000\000\000\000\000\255\255\000\000\000\000\000\000\ + \255\255\000\000\000\000\255\255\255\255\255\255\255\255\144\000\ + \000\000\000\000\255\255\000\000\158\000\255\255\000\000\255\255\ + \255\255\000\000\000\000\000\000\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\000\000\255\255\255\255\255\255\000\000\ + \255\255\255\255\000\000\255\255\255\255\000\000\255\255\176\000\ + \000\000\000\000\000\000\255\255\182\000\000\000\000\000\255\255\ + \255\255\255\255\255\255\000\000\255\255\255\255\255\255\255\255\ + \000\000\255\255\255\255\000\000\255\255\000\000\255\255\255\255\ + \000\000\255\255\203\000\000\000\255\255\000\000\000\000\255\255\ + \255\255\000\000\255\255\255\255\255\255\213\000\216\000\255\255\ + \216\000\255\255\255\255\000\000"; + Lexing.lex_trans = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\039\000\040\000\040\000\039\000\041\000\045\000\043\000\ + \043\000\040\000\044\000\044\000\045\000\078\000\108\000\114\000\ + \079\000\109\000\115\000\145\000\159\000\219\000\174\000\160\000\ + \039\000\008\000\029\000\024\000\006\000\004\000\023\000\027\000\ + \026\000\021\000\025\000\007\000\020\000\019\000\018\000\003\000\ + \031\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\017\000\016\000\015\000\014\000\010\000\036\000\ + \005\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\013\000\042\000\012\000\005\000\038\000\ + \022\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\028\000\011\000\009\000\037\000\125\000\ + \127\000\124\000\098\000\039\000\123\000\122\000\039\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\081\000\080\000\091\000\091\000\091\000\091\000\130\000\ + \087\000\129\000\039\000\128\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\088\000\088\000\ + \088\000\088\000\088\000\088\000\088\000\088\000\088\000\088\000\ + \089\000\089\000\089\000\089\000\089\000\089\000\089\000\089\000\ + \089\000\089\000\090\000\094\000\097\000\099\000\100\000\134\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\131\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\132\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \002\000\003\000\101\000\102\000\003\000\003\000\003\000\101\000\ + \102\000\078\000\003\000\003\000\079\000\003\000\003\000\003\000\ + \092\000\092\000\092\000\092\000\092\000\092\000\092\000\092\000\ + \108\000\133\000\003\000\109\000\003\000\003\000\003\000\003\000\ + \003\000\154\000\114\000\153\000\003\000\115\000\255\255\003\000\ + \003\000\003\000\163\000\162\000\167\000\003\000\003\000\170\000\ + \003\000\003\000\003\000\093\000\093\000\093\000\093\000\093\000\ + \093\000\093\000\093\000\173\000\198\000\003\000\003\000\003\000\ + \003\000\003\000\003\000\003\000\212\000\145\000\178\000\005\000\ + \174\000\201\000\005\000\005\000\005\000\213\000\217\000\218\000\ + \005\000\005\000\188\000\005\000\005\000\005\000\193\000\193\000\ + \193\000\193\000\108\000\076\000\003\000\109\000\003\000\000\000\ + \005\000\003\000\005\000\005\000\005\000\005\000\005\000\000\000\ + \188\000\188\000\006\000\190\000\000\000\006\000\006\000\006\000\ + \000\000\000\000\113\000\006\000\006\000\000\000\006\000\006\000\ + \006\000\000\000\000\000\188\000\112\000\108\000\190\000\003\000\ + \109\000\003\000\000\000\006\000\005\000\006\000\006\000\006\000\ + \006\000\006\000\000\000\178\000\206\000\117\000\201\000\207\000\ + \117\000\117\000\117\000\112\000\000\000\111\000\117\000\117\000\ + \000\000\117\000\142\000\117\000\206\000\206\000\214\000\208\000\ + \208\000\215\000\005\000\215\000\005\000\000\000\117\000\006\000\ + \117\000\141\000\117\000\117\000\117\000\000\000\000\000\000\000\ + \139\000\000\000\000\000\139\000\139\000\139\000\000\000\000\000\ + \159\000\139\000\139\000\160\000\139\000\139\000\139\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\006\000\000\000\006\000\ + \000\000\139\000\117\000\139\000\140\000\139\000\139\000\139\000\ + \000\000\000\000\000\000\006\000\000\000\161\000\006\000\006\000\ + \006\000\000\000\000\000\000\000\006\000\006\000\000\000\006\000\ + \006\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \117\000\000\000\117\000\000\000\006\000\139\000\006\000\006\000\ + \006\000\006\000\006\000\000\000\178\000\000\000\000\000\179\000\ + \006\000\000\000\000\000\006\000\006\000\006\000\204\000\255\255\ + \000\000\006\000\006\000\000\000\006\000\006\000\006\000\000\000\ + \000\000\000\000\157\000\139\000\181\000\139\000\255\255\138\000\ + \006\000\006\000\000\000\006\000\006\000\006\000\006\000\006\000\ + \255\255\000\000\000\000\000\000\000\000\006\000\000\000\000\000\ + \006\000\006\000\006\000\000\000\000\000\000\000\006\000\006\000\ + \000\000\006\000\006\000\006\000\000\000\000\000\006\000\137\000\ + \006\000\000\000\000\000\000\000\135\000\006\000\006\000\000\000\ + \006\000\006\000\006\000\006\000\006\000\000\000\000\000\000\000\ + \006\000\000\000\000\000\006\000\006\000\006\000\180\000\000\000\ + \000\000\006\000\006\000\000\000\126\000\006\000\006\000\000\000\ + \255\255\000\000\000\000\136\000\000\000\006\000\000\000\000\000\ + \000\000\006\000\006\000\006\000\006\000\006\000\006\000\006\000\ + \000\000\000\000\120\000\000\000\000\000\120\000\120\000\120\000\ + \000\000\000\000\000\000\120\000\120\000\000\000\120\000\121\000\ + \120\000\000\000\000\000\255\255\000\000\000\000\000\000\000\000\ + \006\000\000\000\006\000\120\000\000\000\006\000\120\000\120\000\ + \120\000\120\000\205\000\000\000\000\000\117\000\000\000\000\000\ + \117\000\117\000\117\000\000\000\000\000\000\000\117\000\117\000\ + \000\000\117\000\118\000\117\000\255\255\000\000\000\000\255\255\ + \000\000\255\255\000\000\006\000\000\000\006\000\117\000\120\000\ + \117\000\117\000\119\000\117\000\117\000\000\000\000\000\000\000\ + \006\000\000\000\000\000\006\000\006\000\116\000\255\255\000\000\ + \000\000\006\000\006\000\000\000\006\000\006\000\006\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\120\000\000\000\120\000\ + \000\000\006\000\117\000\006\000\006\000\006\000\006\000\006\000\ + \095\000\095\000\095\000\095\000\095\000\095\000\095\000\095\000\ + \095\000\095\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\095\000\095\000\095\000\095\000\095\000\095\000\000\000\ + \117\000\000\000\117\000\000\000\000\000\006\000\000\000\000\000\ + \000\000\000\000\177\000\000\000\000\000\000\000\000\000\107\000\ + \194\000\194\000\194\000\194\000\194\000\194\000\194\000\194\000\ + \000\000\095\000\095\000\095\000\095\000\095\000\095\000\000\000\ + \000\000\000\000\000\000\006\000\000\000\006\000\107\000\105\000\ + \000\000\105\000\105\000\105\000\105\000\000\000\000\000\000\000\ + \105\000\105\000\107\000\105\000\105\000\105\000\106\000\106\000\ + \106\000\106\000\106\000\106\000\106\000\106\000\106\000\106\000\ + \105\000\000\000\105\000\105\000\105\000\105\000\105\000\000\000\ + \000\000\107\000\003\000\000\000\000\000\003\000\003\000\003\000\ + \000\000\000\000\104\000\103\000\003\000\000\000\003\000\003\000\ + \003\000\106\000\106\000\106\000\106\000\106\000\106\000\106\000\ + \106\000\106\000\106\000\003\000\105\000\003\000\003\000\003\000\ + \003\000\003\000\168\000\168\000\168\000\168\000\168\000\168\000\ + \168\000\168\000\168\000\168\000\169\000\169\000\169\000\169\000\ + \169\000\169\000\169\000\169\000\169\000\169\000\000\000\000\000\ + \000\000\000\000\105\000\073\000\105\000\000\000\075\000\003\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\000\000\074\000\000\000\003\000\075\000\003\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\055\000\074\000\000\000\000\000\000\000\000\000\ + \000\000\057\000\000\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \056\000\055\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\000\000\000\000\ + \000\000\000\000\030\000\000\000\055\000\055\000\055\000\055\000\ + \056\000\055\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\057\000\000\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\059\000\055\000\055\000\056\000\055\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\060\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\061\000\ + \058\000\058\000\000\000\000\000\000\000\000\000\030\000\000\000\ + \055\000\059\000\055\000\055\000\056\000\055\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\060\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\061\000\ + \058\000\058\000\032\000\195\000\195\000\195\000\195\000\195\000\ + \195\000\195\000\195\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\000\000\000\000\ + \000\000\000\000\032\000\000\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\096\000\096\000\ + \096\000\096\000\096\000\096\000\096\000\096\000\096\000\096\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\096\000\ + \096\000\096\000\096\000\096\000\096\000\191\000\191\000\191\000\ + \191\000\191\000\191\000\191\000\191\000\191\000\191\000\192\000\ + \192\000\192\000\192\000\192\000\192\000\192\000\192\000\192\000\ + \192\000\000\000\000\000\000\000\000\000\000\000\000\000\096\000\ + \096\000\096\000\096\000\096\000\096\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\000\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\033\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\000\000\000\000\ + \000\000\000\000\033\000\000\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\112\000\108\000\ + \000\000\000\000\109\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\112\000\000\000\111\000\ + \000\000\000\000\000\000\000\000\145\000\000\000\000\000\146\000\ + \000\000\000\000\000\000\000\000\000\000\106\000\106\000\106\000\ + \106\000\106\000\106\000\106\000\106\000\106\000\106\000\000\000\ + \000\000\000\000\000\000\000\000\150\000\000\000\000\000\000\000\ + \000\000\148\000\152\000\000\000\151\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\000\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\034\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\149\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\000\000\000\000\ + \000\000\000\000\034\000\000\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\156\000\000\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\000\000\155\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\156\000\255\255\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \000\000\155\000\147\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\000\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\035\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\000\000\000\000\ + \000\000\000\000\035\000\000\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\171\000\171\000\ + \171\000\171\000\171\000\171\000\171\000\171\000\171\000\171\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\171\000\ + \171\000\171\000\171\000\171\000\171\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\171\000\ + \171\000\171\000\171\000\171\000\171\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\000\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\000\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\046\000\000\000\000\000\046\000\046\000\ + \046\000\000\000\000\000\000\000\046\000\046\000\000\000\046\000\ + \046\000\046\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\046\000\000\000\046\000\046\000\ + \046\000\046\000\046\000\000\000\210\000\000\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \046\000\052\000\209\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\000\000\046\000\046\000\ + \046\000\000\000\046\000\046\000\046\000\000\000\000\000\000\000\ + \046\000\046\000\000\000\046\000\046\000\046\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \046\000\000\000\046\000\046\000\046\000\046\000\046\000\000\000\ + \210\000\000\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\046\000\048\000\209\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\000\000\046\000\000\000\046\000\000\000\000\000\000\000\ + \000\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\000\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\172\000\172\000\172\000\172\000\172\000\172\000\ + \172\000\172\000\172\000\172\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\172\000\172\000\172\000\172\000\172\000\ + \172\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\172\000\172\000\172\000\172\000\172\000\ + \172\000\000\000\000\000\000\000\000\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\035\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\000\000\000\000\000\000\000\000\035\000\000\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\196\000\196\000\196\000\196\000\196\000\196\000\196\000\ + \196\000\196\000\196\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\196\000\196\000\196\000\196\000\196\000\196\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\196\000\196\000\196\000\196\000\196\000\196\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\000\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\000\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\046\000\000\000\ + \000\000\046\000\046\000\046\000\000\000\000\000\000\000\046\000\ + \046\000\000\000\046\000\046\000\046\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\046\000\ + \000\000\046\000\046\000\046\000\046\000\046\000\000\000\000\000\ + \000\000\000\000\047\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\050\000\000\000\000\000\ + \000\000\000\000\000\000\046\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\000\000\000\000\ + \000\000\046\000\047\000\046\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\197\000\197\000\ + \197\000\197\000\197\000\197\000\197\000\197\000\197\000\197\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\197\000\ + \197\000\197\000\197\000\197\000\197\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\197\000\ + \197\000\197\000\197\000\197\000\197\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\048\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\049\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\000\000\000\000\ + \000\000\000\000\048\000\000\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\199\000\199\000\ + \199\000\199\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\199\000\ + \199\000\199\000\199\000\199\000\199\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\199\000\ + \199\000\199\000\199\000\199\000\199\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\000\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\051\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\054\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\000\000\000\000\ + \000\000\000\000\051\000\000\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\000\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\052\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\053\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\000\000\000\000\ + \000\000\000\000\052\000\000\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\000\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\055\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\000\000\000\000\ + \000\000\000\000\055\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\000\000\ + \000\000\000\000\072\000\000\000\072\000\000\000\000\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \056\000\055\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\000\000\000\000\ + \000\000\000\000\057\000\000\000\055\000\055\000\055\000\055\000\ + \056\000\055\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\070\000\070\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\000\000\000\000\ + \000\000\000\000\055\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\055\000\ + \055\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\000\000\000\000\ + \000\000\000\000\055\000\000\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\063\000\000\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\064\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\000\000\000\000\000\000\000\000\062\000\000\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\064\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\068\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\000\000\000\000\ + \000\000\000\000\063\000\000\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\068\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\055\000\000\000\ + \000\000\000\000\066\000\000\000\066\000\000\000\000\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\000\000\000\000\ + \000\000\000\000\065\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\000\000\000\000\000\000\000\000\055\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\000\000\000\000\000\000\066\000\000\000\ + \066\000\000\000\000\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\000\000\000\000\ + \000\000\000\000\055\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\055\000\ + \055\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\000\000\000\000\000\000\000\000\069\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\055\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\070\000\070\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\000\000\000\000\ + \000\000\000\000\070\000\000\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\055\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\000\000\000\000\000\000\000\000\071\000\000\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\086\000\103\000\086\000\000\000\103\000\103\000\ + \103\000\086\000\000\000\000\000\103\000\103\000\000\000\103\000\ + \103\000\103\000\085\000\085\000\085\000\085\000\085\000\085\000\ + \085\000\085\000\085\000\085\000\103\000\000\000\103\000\103\000\ + \103\000\103\000\103\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\105\000\000\000\105\000\105\000\105\000\105\000\ + \000\000\000\000\000\000\105\000\105\000\000\000\105\000\105\000\ + \105\000\000\000\000\000\000\000\000\000\000\000\086\000\000\000\ + \103\000\000\000\000\000\105\000\086\000\105\000\105\000\105\000\ + \105\000\105\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \086\000\084\000\000\000\000\000\086\000\000\000\086\000\000\000\ + \006\000\000\000\083\000\006\000\006\000\006\000\103\000\000\000\ + \103\000\006\000\006\000\000\000\006\000\006\000\006\000\105\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\006\000\000\000\006\000\006\000\006\000\006\000\006\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\117\000\ + \000\000\000\000\117\000\117\000\117\000\105\000\000\000\105\000\ + \117\000\117\000\000\000\117\000\117\000\117\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\006\000\000\000\000\000\ + \117\000\000\000\117\000\117\000\117\000\117\000\117\000\000\000\ + \000\000\000\000\117\000\000\000\000\000\117\000\117\000\117\000\ + \000\000\000\000\000\000\117\000\117\000\000\000\117\000\117\000\ + \117\000\000\000\000\000\006\000\000\000\006\000\000\000\000\000\ + \000\000\000\000\000\000\117\000\117\000\117\000\117\000\117\000\ + \117\000\117\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \117\000\000\000\000\000\117\000\117\000\117\000\000\000\000\000\ + \000\000\117\000\117\000\000\000\117\000\117\000\117\000\000\000\ + \000\000\000\000\117\000\000\000\117\000\000\000\000\000\117\000\ + \000\000\117\000\255\255\117\000\117\000\117\000\117\000\117\000\ + \000\000\000\000\000\000\120\000\000\000\000\000\120\000\120\000\ + \120\000\000\000\000\000\000\000\120\000\120\000\000\000\120\000\ + \120\000\120\000\000\000\000\000\000\000\117\000\000\000\117\000\ + \000\000\000\000\000\000\000\000\120\000\117\000\120\000\120\000\ + \120\000\120\000\120\000\000\000\000\000\000\000\006\000\000\000\ + \000\000\006\000\006\000\006\000\000\000\000\000\000\000\006\000\ + \006\000\000\000\006\000\006\000\006\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\117\000\000\000\117\000\000\000\006\000\ + \120\000\006\000\006\000\006\000\006\000\006\000\000\000\000\000\ + \000\000\006\000\000\000\000\000\006\000\006\000\006\000\000\000\ + \000\000\000\000\006\000\006\000\000\000\006\000\006\000\006\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\120\000\000\000\ + \120\000\000\000\006\000\006\000\006\000\006\000\006\000\006\000\ + \006\000\000\000\000\000\000\000\139\000\000\000\000\000\139\000\ + \139\000\139\000\000\000\000\000\000\000\139\000\139\000\000\000\ + \139\000\139\000\139\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\006\000\000\000\006\000\000\000\139\000\006\000\139\000\ + \139\000\139\000\139\000\139\000\000\000\000\000\000\000\139\000\ + \000\000\000\000\139\000\139\000\139\000\000\000\000\000\000\000\ + \139\000\139\000\000\000\139\000\139\000\139\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\006\000\000\000\006\000\000\000\ + \139\000\139\000\139\000\139\000\139\000\139\000\139\000\000\000\ + \000\000\000\000\117\000\000\000\000\000\117\000\117\000\117\000\ + \000\000\000\000\000\000\117\000\117\000\000\000\117\000\117\000\ + \117\000\000\000\000\000\000\000\000\000\000\000\000\000\139\000\ + \000\000\139\000\000\000\117\000\139\000\117\000\117\000\117\000\ + \117\000\117\000\000\000\000\000\000\000\117\000\000\000\000\000\ + \117\000\117\000\117\000\000\000\000\000\000\000\117\000\117\000\ + \000\000\117\000\117\000\117\000\000\000\000\000\166\000\000\000\ + \166\000\000\000\139\000\000\000\139\000\166\000\117\000\117\000\ + \117\000\117\000\117\000\117\000\117\000\000\000\165\000\165\000\ + \165\000\165\000\165\000\165\000\165\000\165\000\165\000\165\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\117\000\000\000\117\000\ + \000\000\000\000\117\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\188\000\000\000\000\000\189\000\000\000\000\000\000\000\ + \000\000\000\000\166\000\000\000\000\000\000\000\000\000\000\000\ + \166\000\000\000\000\000\000\000\000\000\000\000\000\000\187\000\ + \117\000\187\000\117\000\000\000\166\000\000\000\187\000\000\000\ + \166\000\000\000\166\000\000\000\000\000\000\000\164\000\186\000\ + \186\000\186\000\186\000\186\000\186\000\186\000\186\000\186\000\ + \186\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\199\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \199\000\199\000\199\000\187\000\000\000\000\000\000\000\000\000\ + \000\000\187\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \000\000\000\000\000\000\000\000\000\000\187\000\185\000\000\000\ + \000\000\187\000\000\000\187\000\183\000\000\000\000\000\184\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\255\255\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000"; + Lexing.lex_check = + "\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\000\000\000\000\041\000\000\000\000\000\041\000\042\000\ + \044\000\045\000\042\000\044\000\045\000\079\000\109\000\115\000\ + \079\000\109\000\115\000\146\000\160\000\218\000\146\000\160\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\016\000\ + \013\000\017\000\026\000\039\000\017\000\017\000\039\000\066\000\ + \066\000\066\000\066\000\066\000\066\000\066\000\066\000\066\000\ + \066\000\077\000\078\000\084\000\084\000\084\000\084\000\013\000\ + \086\000\013\000\039\000\013\000\072\000\072\000\072\000\072\000\ + \072\000\072\000\072\000\072\000\072\000\072\000\085\000\085\000\ + \085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\ + \088\000\088\000\088\000\088\000\088\000\088\000\088\000\088\000\ + \088\000\088\000\089\000\093\000\096\000\098\000\098\000\127\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\013\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\128\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\003\000\100\000\100\000\003\000\003\000\003\000\102\000\ + \102\000\027\000\003\000\003\000\027\000\003\000\003\000\003\000\ + \091\000\091\000\091\000\091\000\091\000\091\000\091\000\091\000\ + \110\000\132\000\003\000\110\000\003\000\003\000\003\000\003\000\ + \003\000\151\000\113\000\152\000\004\000\113\000\027\000\004\000\ + \004\000\004\000\158\000\159\000\166\000\004\000\004\000\169\000\ + \004\000\004\000\004\000\092\000\092\000\092\000\092\000\092\000\ + \092\000\092\000\092\000\172\000\183\000\004\000\003\000\004\000\ + \004\000\004\000\004\000\004\000\211\000\174\000\179\000\005\000\ + \174\000\179\000\005\000\005\000\005\000\212\000\215\000\217\000\ + \005\000\005\000\188\000\005\000\005\000\005\000\185\000\185\000\ + \185\000\185\000\111\000\027\000\003\000\111\000\003\000\255\255\ + \005\000\004\000\005\000\005\000\005\000\005\000\005\000\255\255\ + \189\000\188\000\006\000\189\000\255\255\006\000\006\000\006\000\ + \255\255\255\255\111\000\006\000\006\000\255\255\006\000\006\000\ + \006\000\255\255\255\255\190\000\112\000\112\000\190\000\004\000\ + \112\000\004\000\255\255\006\000\005\000\006\000\006\000\006\000\ + \006\000\006\000\255\255\201\000\202\000\007\000\201\000\202\000\ + \007\000\007\000\007\000\112\000\255\255\112\000\007\000\007\000\ + \255\255\007\000\007\000\007\000\207\000\208\000\213\000\207\000\ + \208\000\214\000\005\000\216\000\005\000\255\255\007\000\006\000\ + \007\000\007\000\007\000\007\000\007\000\255\255\255\255\255\255\ + \008\000\255\255\255\255\008\000\008\000\008\000\255\255\255\255\ + \148\000\008\000\008\000\148\000\008\000\008\000\008\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\006\000\255\255\006\000\ + \255\255\008\000\007\000\008\000\008\000\008\000\008\000\008\000\ + \255\255\255\255\255\255\010\000\255\255\148\000\010\000\010\000\ + \010\000\255\255\255\255\255\255\010\000\010\000\255\255\010\000\ + \010\000\010\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \007\000\255\255\007\000\255\255\010\000\008\000\010\000\010\000\ + \010\000\010\000\010\000\255\255\175\000\255\255\255\255\175\000\ + \011\000\255\255\255\255\011\000\011\000\011\000\202\000\027\000\ + \255\255\011\000\011\000\255\255\011\000\011\000\011\000\255\255\ + \255\255\255\255\148\000\008\000\175\000\008\000\110\000\010\000\ + \010\000\011\000\255\255\011\000\011\000\011\000\011\000\011\000\ + \113\000\255\255\255\255\255\255\255\255\014\000\255\255\255\255\ + \014\000\014\000\014\000\255\255\255\255\255\255\014\000\014\000\ + \255\255\014\000\014\000\014\000\255\255\255\255\010\000\010\000\ + \010\000\255\255\255\255\255\255\011\000\011\000\014\000\255\255\ + \014\000\014\000\014\000\014\000\014\000\255\255\255\255\255\255\ + \015\000\255\255\255\255\015\000\015\000\015\000\175\000\255\255\ + \255\255\015\000\015\000\255\255\015\000\015\000\015\000\255\255\ + \111\000\255\255\255\255\011\000\255\255\011\000\255\255\255\255\ + \255\255\015\000\014\000\015\000\015\000\015\000\015\000\015\000\ + \255\255\255\255\018\000\255\255\255\255\018\000\018\000\018\000\ + \255\255\255\255\255\255\018\000\018\000\255\255\018\000\018\000\ + \018\000\255\255\255\255\112\000\255\255\255\255\255\255\255\255\ + \014\000\255\255\014\000\018\000\255\255\015\000\018\000\018\000\ + \018\000\018\000\202\000\255\255\255\255\019\000\255\255\255\255\ + \019\000\019\000\019\000\255\255\255\255\255\255\019\000\019\000\ + \255\255\019\000\019\000\019\000\213\000\255\255\255\255\214\000\ + \255\255\216\000\255\255\015\000\255\255\015\000\019\000\018\000\ + \019\000\019\000\019\000\019\000\019\000\255\255\255\255\255\255\ + \023\000\255\255\255\255\023\000\023\000\023\000\148\000\255\255\ + \255\255\023\000\023\000\255\255\023\000\023\000\023\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\018\000\255\255\018\000\ + \255\255\023\000\019\000\023\000\023\000\023\000\023\000\023\000\ + \083\000\083\000\083\000\083\000\083\000\083\000\083\000\083\000\ + \083\000\083\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\083\000\083\000\083\000\083\000\083\000\083\000\255\255\ + \019\000\255\255\019\000\255\255\255\255\023\000\255\255\255\255\ + \255\255\255\255\175\000\255\255\255\255\255\255\255\255\024\000\ + \193\000\193\000\193\000\193\000\193\000\193\000\193\000\193\000\ + \255\255\083\000\083\000\083\000\083\000\083\000\083\000\255\255\ + \255\255\255\255\255\255\023\000\255\255\023\000\024\000\024\000\ + \255\255\024\000\024\000\024\000\024\000\255\255\255\255\255\255\ + \024\000\024\000\107\000\024\000\024\000\024\000\024\000\024\000\ + \024\000\024\000\024\000\024\000\024\000\024\000\024\000\024\000\ + \024\000\255\255\024\000\024\000\024\000\024\000\024\000\255\255\ + \255\255\107\000\025\000\255\255\255\255\025\000\025\000\025\000\ + \255\255\255\255\025\000\025\000\025\000\255\255\025\000\025\000\ + \025\000\107\000\107\000\107\000\107\000\107\000\107\000\107\000\ + \107\000\107\000\107\000\025\000\024\000\025\000\025\000\025\000\ + \025\000\025\000\165\000\165\000\165\000\165\000\165\000\165\000\ + \165\000\165\000\165\000\165\000\168\000\168\000\168\000\168\000\ + \168\000\168\000\168\000\168\000\168\000\168\000\255\255\255\255\ + \255\255\255\255\024\000\028\000\024\000\255\255\075\000\025\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\075\000\075\000\075\000\075\000\075\000\075\000\ + \075\000\075\000\255\255\075\000\255\255\025\000\028\000\025\000\ + \028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\ + \028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\ + \028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\ + \028\000\028\000\030\000\028\000\255\255\255\255\255\255\255\255\ + \255\255\030\000\255\255\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\255\255\255\255\ + \255\255\255\255\030\000\255\255\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ + \030\000\030\000\030\000\030\000\030\000\030\000\031\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\031\000\255\255\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\255\255\255\255\255\255\255\255\031\000\255\255\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ + \031\000\031\000\032\000\194\000\194\000\194\000\194\000\194\000\ + \194\000\194\000\194\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\255\255\255\255\ + \255\255\255\255\032\000\255\255\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\095\000\095\000\ + \095\000\095\000\095\000\095\000\095\000\095\000\095\000\095\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\095\000\ + \095\000\095\000\095\000\095\000\095\000\186\000\186\000\186\000\ + \186\000\186\000\186\000\186\000\186\000\186\000\186\000\191\000\ + \191\000\191\000\191\000\191\000\191\000\191\000\191\000\191\000\ + \191\000\255\255\255\255\255\255\255\255\255\255\255\255\095\000\ + \095\000\095\000\095\000\095\000\095\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\255\255\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\033\000\032\000\032\000\032\000\032\000\032\000\ + \032\000\032\000\032\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\255\255\255\255\ + \255\255\255\255\033\000\255\255\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\106\000\106\000\ + \255\255\255\255\106\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\106\000\255\255\106\000\ + \255\255\255\255\255\255\255\255\143\000\255\255\255\255\143\000\ + \255\255\255\255\255\255\255\255\255\255\106\000\106\000\106\000\ + \106\000\106\000\106\000\106\000\106\000\106\000\106\000\255\255\ + \255\255\255\255\255\255\255\255\143\000\255\255\255\255\255\255\ + \255\255\143\000\143\000\255\255\143\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\255\255\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\034\000\033\000\033\000\033\000\033\000\033\000\ + \033\000\033\000\033\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\143\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\255\255\255\255\ + \255\255\255\255\034\000\255\255\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\149\000\255\255\ + \149\000\149\000\149\000\149\000\149\000\149\000\149\000\149\000\ + \149\000\149\000\149\000\149\000\149\000\149\000\149\000\149\000\ + \149\000\149\000\149\000\149\000\149\000\149\000\149\000\149\000\ + \149\000\149\000\255\255\149\000\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\156\000\106\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \156\000\156\000\156\000\156\000\156\000\156\000\156\000\156\000\ + \255\255\156\000\143\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\255\255\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\035\000\034\000\034\000\034\000\034\000\034\000\ + \034\000\034\000\034\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\255\255\255\255\ + \255\255\255\255\035\000\255\255\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\164\000\164\000\ + \164\000\164\000\164\000\164\000\164\000\164\000\164\000\164\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\164\000\ + \164\000\164\000\164\000\164\000\164\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\164\000\ + \164\000\164\000\164\000\164\000\164\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\255\255\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\255\255\035\000\035\000\035\000\035\000\035\000\ + \035\000\035\000\035\000\036\000\255\255\255\255\036\000\036\000\ + \036\000\255\255\255\255\255\255\036\000\036\000\255\255\036\000\ + \036\000\036\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\036\000\255\255\036\000\036\000\ + \036\000\036\000\036\000\255\255\204\000\255\255\204\000\204\000\ + \204\000\204\000\204\000\204\000\204\000\204\000\204\000\204\000\ + \204\000\204\000\204\000\204\000\204\000\204\000\204\000\204\000\ + \204\000\204\000\204\000\204\000\204\000\204\000\204\000\204\000\ + \036\000\036\000\204\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\255\255\036\000\037\000\ + \036\000\255\255\037\000\037\000\037\000\255\255\255\255\255\255\ + \037\000\037\000\255\255\037\000\037\000\037\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \037\000\255\255\037\000\037\000\037\000\037\000\037\000\255\255\ + \210\000\255\255\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\210\000\210\000\210\000\210\000\ + \210\000\210\000\210\000\210\000\037\000\037\000\210\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\255\255\037\000\255\255\037\000\255\255\255\255\255\255\ + \255\255\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\255\255\036\000\036\000\036\000\036\000\036\000\036\000\ + \036\000\036\000\171\000\171\000\171\000\171\000\171\000\171\000\ + \171\000\171\000\171\000\171\000\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\171\000\171\000\171\000\171\000\171\000\ + \171\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\171\000\171\000\171\000\171\000\171\000\ + \171\000\255\255\255\255\255\255\255\255\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\038\000\037\000\037\000\ + \037\000\037\000\037\000\037\000\037\000\037\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\255\255\255\255\255\255\255\255\038\000\255\255\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\184\000\184\000\184\000\184\000\184\000\184\000\184\000\ + \184\000\184\000\184\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\184\000\184\000\184\000\184\000\184\000\184\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\184\000\184\000\184\000\184\000\184\000\184\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\255\255\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\255\255\038\000\038\000\ + \038\000\038\000\038\000\038\000\038\000\038\000\046\000\255\255\ + \255\255\046\000\046\000\046\000\255\255\255\255\255\255\046\000\ + \046\000\255\255\046\000\046\000\046\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\046\000\ + \255\255\046\000\046\000\046\000\046\000\046\000\255\255\255\255\ + \255\255\255\255\047\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\255\255\255\255\ + \255\255\255\255\255\255\046\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\255\255\255\255\ + \255\255\046\000\047\000\046\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\196\000\196\000\ + \196\000\196\000\196\000\196\000\196\000\196\000\196\000\196\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\196\000\ + \196\000\196\000\196\000\196\000\196\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\196\000\ + \196\000\196\000\196\000\196\000\196\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\255\255\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\048\000\047\000\047\000\047\000\047\000\047\000\ + \047\000\047\000\047\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\255\255\255\255\ + \255\255\255\255\048\000\255\255\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\198\000\198\000\ + \198\000\198\000\198\000\198\000\198\000\198\000\198\000\198\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\198\000\ + \198\000\198\000\198\000\198\000\198\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\198\000\ + \198\000\198\000\198\000\198\000\198\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\255\255\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\051\000\048\000\048\000\048\000\048\000\048\000\ + \048\000\048\000\048\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\255\255\255\255\ + \255\255\255\255\051\000\255\255\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\255\255\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\052\000\051\000\051\000\051\000\051\000\051\000\ + \051\000\051\000\051\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\255\255\255\255\ + \255\255\255\255\052\000\255\255\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\255\255\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\055\000\052\000\052\000\052\000\052\000\052\000\ + \052\000\052\000\052\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\255\255\255\255\ + \255\255\255\255\055\000\255\255\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ + \055\000\055\000\055\000\055\000\055\000\055\000\056\000\255\255\ + \255\255\255\255\056\000\255\255\056\000\255\255\255\255\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\255\255\255\255\255\255\255\255\056\000\255\255\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\056\000\056\000\056\000\056\000\056\000\056\000\ + \056\000\056\000\057\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\255\255\255\255\ + \255\255\255\255\057\000\255\255\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\057\000\057\000\ + \057\000\057\000\057\000\057\000\057\000\057\000\058\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\255\255\255\255\255\255\255\255\058\000\255\255\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\058\000\058\000\058\000\058\000\058\000\058\000\ + \058\000\058\000\059\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\255\255\255\255\ + \255\255\255\255\059\000\255\255\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ + \059\000\059\000\059\000\059\000\059\000\059\000\060\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\255\255\255\255\255\255\255\255\060\000\255\255\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\060\000\060\000\060\000\060\000\060\000\060\000\ + \060\000\060\000\061\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\255\255\255\255\ + \255\255\255\255\061\000\255\255\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\061\000\061\000\ + \061\000\061\000\061\000\061\000\061\000\061\000\062\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\062\000\255\255\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\255\255\255\255\255\255\255\255\062\000\255\255\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ + \062\000\062\000\063\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\255\255\255\255\ + \255\255\255\255\063\000\255\255\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\063\000\063\000\ + \063\000\063\000\063\000\063\000\063\000\063\000\064\000\255\255\ + \255\255\255\255\064\000\255\255\064\000\255\255\255\255\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\255\255\255\255\255\255\255\255\064\000\255\255\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\064\000\064\000\064\000\064\000\064\000\064\000\ + \064\000\064\000\065\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\255\255\255\255\ + \255\255\255\255\065\000\255\255\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\065\000\065\000\ + \065\000\065\000\065\000\065\000\065\000\065\000\067\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\255\255\255\255\255\255\255\255\067\000\255\255\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\067\000\067\000\067\000\067\000\067\000\067\000\ + \067\000\067\000\068\000\255\255\255\255\255\255\068\000\255\255\ + \068\000\255\255\255\255\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\255\255\255\255\ + \255\255\255\255\068\000\255\255\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\068\000\068\000\ + \068\000\068\000\068\000\068\000\068\000\068\000\069\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\255\255\255\255\255\255\255\255\069\000\255\255\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ + \069\000\069\000\070\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\255\255\255\255\ + \255\255\255\255\070\000\255\255\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\070\000\070\000\ + \070\000\070\000\070\000\070\000\070\000\070\000\071\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\255\255\255\255\255\255\255\255\071\000\255\255\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\071\000\071\000\071\000\071\000\071\000\071\000\ + \071\000\071\000\076\000\103\000\076\000\255\255\103\000\103\000\ + \103\000\076\000\255\255\255\255\103\000\103\000\255\255\103\000\ + \103\000\103\000\076\000\076\000\076\000\076\000\076\000\076\000\ + \076\000\076\000\076\000\076\000\103\000\255\255\103\000\103\000\ + \103\000\103\000\103\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\105\000\255\255\105\000\105\000\105\000\105\000\ + \255\255\255\255\255\255\105\000\105\000\255\255\105\000\105\000\ + \105\000\255\255\255\255\255\255\255\255\255\255\076\000\255\255\ + \103\000\255\255\255\255\105\000\076\000\105\000\105\000\105\000\ + \105\000\105\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \076\000\076\000\255\255\255\255\076\000\255\255\076\000\255\255\ + \116\000\255\255\076\000\116\000\116\000\116\000\103\000\255\255\ + \103\000\116\000\116\000\255\255\116\000\116\000\116\000\105\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\116\000\255\255\116\000\116\000\116\000\116\000\116\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\117\000\ + \255\255\255\255\117\000\117\000\117\000\105\000\255\255\105\000\ + \117\000\117\000\255\255\117\000\117\000\117\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\116\000\255\255\255\255\ + \117\000\255\255\117\000\117\000\117\000\117\000\117\000\255\255\ + \255\255\255\255\118\000\255\255\255\255\118\000\118\000\118\000\ + \255\255\255\255\255\255\118\000\118\000\255\255\118\000\118\000\ + \118\000\255\255\255\255\116\000\255\255\116\000\255\255\255\255\ + \255\255\255\255\255\255\118\000\117\000\118\000\118\000\118\000\ + \118\000\118\000\255\255\255\255\255\255\255\255\255\255\255\255\ + \119\000\255\255\255\255\119\000\119\000\119\000\255\255\255\255\ + \255\255\119\000\119\000\255\255\119\000\119\000\119\000\255\255\ + \255\255\255\255\117\000\255\255\117\000\255\255\255\255\118\000\ + \255\255\119\000\076\000\119\000\119\000\119\000\119\000\119\000\ + \255\255\255\255\255\255\120\000\255\255\255\255\120\000\120\000\ + \120\000\255\255\255\255\255\255\120\000\120\000\255\255\120\000\ + \120\000\120\000\255\255\255\255\255\255\118\000\255\255\118\000\ + \255\255\255\255\255\255\255\255\120\000\119\000\120\000\120\000\ + \120\000\120\000\120\000\255\255\255\255\255\255\126\000\255\255\ + \255\255\126\000\126\000\126\000\255\255\255\255\255\255\126\000\ + \126\000\255\255\126\000\126\000\126\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\119\000\255\255\119\000\255\255\126\000\ + \120\000\126\000\126\000\126\000\126\000\126\000\255\255\255\255\ + \255\255\136\000\255\255\255\255\136\000\136\000\136\000\255\255\ + \255\255\255\255\136\000\136\000\255\255\136\000\136\000\136\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\120\000\255\255\ + \120\000\255\255\136\000\126\000\136\000\136\000\136\000\136\000\ + \136\000\255\255\255\255\255\255\139\000\255\255\255\255\139\000\ + \139\000\139\000\255\255\255\255\255\255\139\000\139\000\255\255\ + \139\000\139\000\139\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\126\000\255\255\126\000\255\255\139\000\136\000\139\000\ + \139\000\139\000\139\000\139\000\255\255\255\255\255\255\140\000\ + \255\255\255\255\140\000\140\000\140\000\255\255\255\255\255\255\ + \140\000\140\000\255\255\140\000\140\000\140\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\136\000\255\255\136\000\255\255\ + \140\000\139\000\140\000\140\000\140\000\140\000\140\000\255\255\ + \255\255\255\255\141\000\255\255\255\255\141\000\141\000\141\000\ + \255\255\255\255\255\255\141\000\141\000\255\255\141\000\141\000\ + \141\000\255\255\255\255\255\255\255\255\255\255\255\255\139\000\ + \255\255\139\000\255\255\141\000\140\000\141\000\141\000\141\000\ + \141\000\141\000\255\255\255\255\255\255\142\000\255\255\255\255\ + \142\000\142\000\142\000\255\255\255\255\255\255\142\000\142\000\ + \255\255\142\000\142\000\142\000\255\255\255\255\157\000\255\255\ + \157\000\255\255\140\000\255\255\140\000\157\000\142\000\141\000\ + \142\000\142\000\142\000\142\000\142\000\255\255\157\000\157\000\ + \157\000\157\000\157\000\157\000\157\000\157\000\157\000\157\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\141\000\255\255\141\000\ + \255\255\255\255\142\000\255\255\255\255\255\255\255\255\255\255\ + \255\255\180\000\255\255\255\255\180\000\255\255\255\255\255\255\ + \255\255\255\255\157\000\255\255\255\255\255\255\255\255\255\255\ + \157\000\255\255\255\255\255\255\255\255\255\255\255\255\180\000\ + \142\000\180\000\142\000\255\255\157\000\255\255\180\000\255\255\ + \157\000\255\255\157\000\255\255\255\255\255\255\157\000\180\000\ + \180\000\180\000\180\000\180\000\180\000\180\000\180\000\180\000\ + \180\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\199\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \199\000\199\000\199\000\180\000\255\255\255\255\255\255\255\255\ + \255\255\180\000\199\000\199\000\199\000\199\000\199\000\199\000\ + \255\255\255\255\255\255\255\255\255\255\180\000\180\000\255\255\ + \255\255\180\000\255\255\180\000\180\000\255\255\255\255\180\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\199\000\199\000\199\000\199\000\199\000\199\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\199\000\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\180\000\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255"; + Lexing.lex_base_code = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\010\000\036\000\022\000\000\000\000\000\000\000\ + \005\000\000\000\039\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\002\000\005\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000"; + Lexing.lex_backtrk_code = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\053\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000"; + Lexing.lex_default_code = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\031\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000"; + Lexing.lex_trans_code = + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\001\000\000\000\050\000\050\000\000\000\009\000\050\000\ + \000\000\000\000\000\000\009\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \001\000\000\000\009\000\001\000\000\000\009\000\000\000\034\000\ + \000\000\000\000\009\000\000\000\012\000\001\000\000\000\000\000\ + \004\000\004\000\004\000\004\000\004\000\004\000\004\000\004\000\ + \004\000\004\000\017\000\017\000\017\000\017\000\017\000\017\000\ + \017\000\017\000\017\000\017\000\001\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\017\000\017\000\017\000\017\000\ + \017\000\017\000\017\000\017\000\017\000\017\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ + \000\000\000\000\000\000\000\000\000\000"; + Lexing.lex_check_code = + "\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\024\000\111\000\180\000\189\000\111\000\112\000\190\000\ + \255\255\255\255\255\255\106\000\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \024\000\255\255\111\000\000\000\255\255\112\000\255\255\112\000\ + \255\255\255\255\106\000\255\255\106\000\107\000\255\255\255\255\ + \024\000\024\000\024\000\024\000\024\000\024\000\024\000\024\000\ + \024\000\024\000\106\000\106\000\106\000\106\000\106\000\106\000\ + \106\000\106\000\106\000\106\000\107\000\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\107\000\107\000\107\000\107\000\ + \107\000\107\000\107\000\107\000\107\000\107\000\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \111\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ + \255\255\255\255\255\255\255\255\255\255"; + Lexing.lex_code = + "\255\005\255\255\007\255\006\255\255\007\255\255\009\255\008\255\ + \255\006\255\007\255\255\004\255\000\005\001\006\002\007\255\009\ + \255\255\008\255\009\255\255\000\005\001\006\004\008\003\009\002\ + \007\255\001\255\255\000\001\255"; +} + +let rec token lexbuf = + lexbuf.Lexing.lex_mem <- Array.make 10 (-1); __ocaml_lex_token_rec lexbuf 0 +and __ocaml_lex_token_rec lexbuf __ocaml_lex_state = + match Lexing.new_engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 770 "parsing/lexer.mll" + ( + if not !escaped_newlines then + raise (Error(Illegal_character (Lexing.lexeme_char lexbuf 0), + Location.curr lexbuf)); + update_loc lexbuf None 1 false 0; + token lexbuf ) +# 2358 "parsing/lexer.ml" + + | 1 -> +# 777 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 0; + EOL ) +# 2364 "parsing/lexer.ml" + + | 2 -> +# 780 "parsing/lexer.mll" + ( token lexbuf ) +# 2369 "parsing/lexer.ml" + + | 3 -> +# 782 "parsing/lexer.mll" + ( UNDERSCORE ) +# 2374 "parsing/lexer.ml" + + | 4 -> +# 784 "parsing/lexer.mll" + ( TILDE ) +# 2379 "parsing/lexer.ml" + + | 5 -> +# 786 "parsing/lexer.mll" + ( LABEL (get_label_name lexbuf) ) +# 2384 "parsing/lexer.ml" + + | 6 -> +# 788 "parsing/lexer.mll" + ( warn_latin1 lexbuf; LABEL (get_label_name lexbuf) ) +# 2389 "parsing/lexer.ml" + + | 7 -> +# 790 "parsing/lexer.mll" + ( QUESTION ) +# 2394 "parsing/lexer.ml" + + | 8 -> +# 792 "parsing/lexer.mll" + ( OPTLABEL (get_label_name lexbuf) ) +# 2399 "parsing/lexer.ml" + + | 9 -> +# 794 "parsing/lexer.mll" + ( warn_latin1 lexbuf; OPTLABEL (get_label_name lexbuf) ) +# 2404 "parsing/lexer.ml" + + | 10 -> +# 796 "parsing/lexer.mll" + ( let s = Lexing.lexeme lexbuf in + try Hashtbl.find keyword_table s + with Not_found -> LIDENT s ) +# 2411 "parsing/lexer.ml" + + | 11 -> +# 800 "parsing/lexer.mll" + ( warn_latin1 lexbuf; LIDENT (Lexing.lexeme lexbuf) ) +# 2416 "parsing/lexer.ml" + + | 12 -> +# 802 "parsing/lexer.mll" + ( UIDENT(Lexing.lexeme lexbuf) ) +# 2421 "parsing/lexer.ml" + + | 13 -> +# 804 "parsing/lexer.mll" + ( warn_latin1 lexbuf; UIDENT(Lexing.lexeme lexbuf) ) +# 2426 "parsing/lexer.ml" + + | 14 -> +# 805 "parsing/lexer.mll" + ( INT (Lexing.lexeme lexbuf, None) ) +# 2431 "parsing/lexer.ml" + + | 15 -> +let +# 806 "parsing/lexer.mll" + lit +# 2437 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos (lexbuf.Lexing.lex_curr_pos + -1) +and +# 806 "parsing/lexer.mll" + modif +# 2442 "parsing/lexer.ml" += Lexing.sub_lexeme_char lexbuf (lexbuf.Lexing.lex_curr_pos + -1) in +# 807 "parsing/lexer.mll" + ( INT (lit, Some modif) ) +# 2446 "parsing/lexer.ml" + + | 16 -> +# 809 "parsing/lexer.mll" + ( FLOAT (Lexing.lexeme lexbuf, None) ) +# 2451 "parsing/lexer.ml" + + | 17 -> +let +# 810 "parsing/lexer.mll" + lit +# 2457 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos (lexbuf.Lexing.lex_curr_pos + -1) +and +# 810 "parsing/lexer.mll" + modif +# 2462 "parsing/lexer.ml" += Lexing.sub_lexeme_char lexbuf (lexbuf.Lexing.lex_curr_pos + -1) in +# 811 "parsing/lexer.mll" + ( FLOAT (lit, Some modif) ) +# 2466 "parsing/lexer.ml" + + | 18 -> +# 813 "parsing/lexer.mll" + ( raise (Error(Invalid_literal (Lexing.lexeme lexbuf), + Location.curr lexbuf)) ) +# 2472 "parsing/lexer.ml" + + | 19 -> +# 816 "parsing/lexer.mll" + ( reset_string_buffer(); + is_in_string := true; + let string_start = lexbuf.lex_start_p in + string_start_loc := Location.curr lexbuf; + string lexbuf; + is_in_string := false; + lexbuf.lex_start_p <- string_start; + STRING (get_stored_string(), None) ) +# 2484 "parsing/lexer.ml" + + | 20 -> +# 825 "parsing/lexer.mll" + ( reset_string_buffer(); + let delim = Lexing.lexeme lexbuf in + let delim = String.sub delim 1 (String.length delim - 2) in + is_in_string := true; + let string_start = lexbuf.lex_start_p in + string_start_loc := Location.curr lexbuf; + quoted_string delim lexbuf; + is_in_string := false; + lexbuf.lex_start_p <- string_start; + STRING (get_stored_string(), Some delim) ) +# 2498 "parsing/lexer.ml" + + | 21 -> +# 836 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 1; + CHAR (Lexing.lexeme_char lexbuf 1) ) +# 2504 "parsing/lexer.ml" + + | 22 -> +# 839 "parsing/lexer.mll" + ( CHAR(Lexing.lexeme_char lexbuf 1) ) +# 2509 "parsing/lexer.ml" + + | 23 -> +# 841 "parsing/lexer.mll" + ( CHAR(char_for_backslash (Lexing.lexeme_char lexbuf 2)) ) +# 2514 "parsing/lexer.ml" + + | 24 -> +# 843 "parsing/lexer.mll" + ( CHAR(char_for_decimal_code lexbuf 2) ) +# 2519 "parsing/lexer.ml" + + | 25 -> +# 845 "parsing/lexer.mll" + ( CHAR(char_for_octal_code lexbuf 3) ) +# 2524 "parsing/lexer.ml" + + | 26 -> +# 847 "parsing/lexer.mll" + ( CHAR(char_for_hexadecimal_code lexbuf 3) ) +# 2529 "parsing/lexer.ml" + + | 27 -> +# 849 "parsing/lexer.mll" + ( let l = Lexing.lexeme lexbuf in + let esc = String.sub l 1 (String.length l - 1) in + raise (Error(Illegal_escape esc, Location.curr lexbuf)) + ) +# 2537 "parsing/lexer.ml" + + | 28 -> +# 854 "parsing/lexer.mll" + ( let s, loc = with_comment_buffer comment lexbuf in + COMMENT (s, loc) ) +# 2543 "parsing/lexer.ml" + + | 29 -> +# 857 "parsing/lexer.mll" + ( let s, loc = with_comment_buffer comment lexbuf in + if !handle_docstrings then + DOCSTRING (Docstrings.docstring s loc) + else + COMMENT ("*" ^ s, loc) + ) +# 2553 "parsing/lexer.ml" + + | 30 -> +let +# 863 "parsing/lexer.mll" + stars +# 2559 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf (lexbuf.Lexing.lex_start_pos + 3) lexbuf.Lexing.lex_curr_pos in +# 864 "parsing/lexer.mll" + ( let s, loc = + with_comment_buffer + (fun lexbuf -> + store_string ("*" ^ stars); + comment lexbuf) + lexbuf + in + COMMENT (s, loc) ) +# 2570 "parsing/lexer.ml" + + | 31 -> +# 873 "parsing/lexer.mll" + ( if !print_warnings then + Location.prerr_warning (Location.curr lexbuf) Warnings.Comment_start; + let s, loc = with_comment_buffer comment lexbuf in + COMMENT (s, loc) ) +# 2578 "parsing/lexer.ml" + + | 32 -> +let +# 877 "parsing/lexer.mll" + stars +# 2584 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf (lexbuf.Lexing.lex_start_pos + 2) (lexbuf.Lexing.lex_curr_pos + -2) in +# 878 "parsing/lexer.mll" + ( if !handle_docstrings && stars="" then + (* (**) is an empty docstring *) + DOCSTRING(Docstrings.docstring "" (Location.curr lexbuf)) + else + COMMENT (stars, Location.curr lexbuf) ) +# 2592 "parsing/lexer.ml" + + | 33 -> +# 884 "parsing/lexer.mll" + ( let loc = Location.curr lexbuf in + Location.prerr_warning loc Warnings.Comment_not_end; + lexbuf.Lexing.lex_curr_pos <- lexbuf.Lexing.lex_curr_pos - 1; + let curpos = lexbuf.lex_curr_p in + lexbuf.lex_curr_p <- { curpos with pos_cnum = curpos.pos_cnum - 1 }; + STAR + ) +# 2603 "parsing/lexer.ml" + + | 34 -> +let +# 891 "parsing/lexer.mll" + num +# 2609 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_mem.(0) lexbuf.Lexing.lex_mem.(1) +and +# 892 "parsing/lexer.mll" + name +# 2614 "parsing/lexer.ml" += Lexing.sub_lexeme_opt lexbuf lexbuf.Lexing.lex_mem.(4) lexbuf.Lexing.lex_mem.(3) +and +# 892 "parsing/lexer.mll" + directive +# 2619 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_start_pos lexbuf.Lexing.lex_mem.(2) in +# 894 "parsing/lexer.mll" + ( + match int_of_string num with + | exception _ -> + (* PR#7165 *) + let loc = Location.curr lexbuf in + let explanation = "line number out of range" in + let error = Invalid_directive (directive, Some explanation) in + raise (Error (error, loc)) + | line_num -> + (* Documentation says that the line number should be + positive, but we have never guarded against this and it + might have useful hackish uses. *) + update_loc lexbuf name line_num true 0; + token lexbuf + ) +# 2637 "parsing/lexer.ml" + + | 35 -> +# 909 "parsing/lexer.mll" + ( HASH ) +# 2642 "parsing/lexer.ml" + + | 36 -> +# 910 "parsing/lexer.mll" + ( AMPERSAND ) +# 2647 "parsing/lexer.ml" + + | 37 -> +# 911 "parsing/lexer.mll" + ( AMPERAMPER ) +# 2652 "parsing/lexer.ml" + + | 38 -> +# 912 "parsing/lexer.mll" + ( BACKQUOTE ) +# 2657 "parsing/lexer.ml" + + | 39 -> +# 913 "parsing/lexer.mll" + ( QUOTE ) +# 2662 "parsing/lexer.ml" + + | 40 -> +# 914 "parsing/lexer.mll" + ( LPAREN ) +# 2667 "parsing/lexer.ml" + + | 41 -> +# 915 "parsing/lexer.mll" + ( RPAREN ) +# 2672 "parsing/lexer.ml" + + | 42 -> +# 916 "parsing/lexer.mll" + ( STAR ) +# 2677 "parsing/lexer.ml" + + | 43 -> +# 917 "parsing/lexer.mll" + ( COMMA ) +# 2682 "parsing/lexer.ml" + + | 44 -> +# 918 "parsing/lexer.mll" + ( MINUSGREATER ) +# 2687 "parsing/lexer.ml" + + | 45 -> +# 919 "parsing/lexer.mll" + ( DOT ) +# 2692 "parsing/lexer.ml" + + | 46 -> +# 920 "parsing/lexer.mll" + ( DOTDOT ) +# 2697 "parsing/lexer.ml" + + | 47 -> +let +# 921 "parsing/lexer.mll" + s +# 2703 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf (lexbuf.Lexing.lex_start_pos + 1) lexbuf.Lexing.lex_curr_pos in +# 921 "parsing/lexer.mll" + ( DOTOP s ) +# 2707 "parsing/lexer.ml" + + | 48 -> +# 922 "parsing/lexer.mll" + ( COLON ) +# 2712 "parsing/lexer.ml" + + | 49 -> +# 923 "parsing/lexer.mll" + ( COLONCOLON ) +# 2717 "parsing/lexer.ml" + + | 50 -> +# 924 "parsing/lexer.mll" + ( COLONEQUAL ) +# 2722 "parsing/lexer.ml" + + | 51 -> +# 925 "parsing/lexer.mll" + ( COLONGREATER ) +# 2727 "parsing/lexer.ml" + + | 52 -> +# 926 "parsing/lexer.mll" + ( SEMI ) +# 2732 "parsing/lexer.ml" + + | 53 -> +# 927 "parsing/lexer.mll" + ( SEMISEMI ) +# 2737 "parsing/lexer.ml" + + | 54 -> +# 928 "parsing/lexer.mll" + ( LESS ) +# 2742 "parsing/lexer.ml" + + | 55 -> +# 929 "parsing/lexer.mll" + ( LESSMINUS ) +# 2747 "parsing/lexer.ml" + + | 56 -> +# 930 "parsing/lexer.mll" + ( EQUAL ) +# 2752 "parsing/lexer.ml" + + | 57 -> +# 931 "parsing/lexer.mll" + ( LBRACKET ) +# 2757 "parsing/lexer.ml" + + | 58 -> +# 932 "parsing/lexer.mll" + ( LBRACKETBAR ) +# 2762 "parsing/lexer.ml" + + | 59 -> +# 933 "parsing/lexer.mll" + ( LBRACKETLESS ) +# 2767 "parsing/lexer.ml" + + | 60 -> +# 934 "parsing/lexer.mll" + ( LBRACKETGREATER ) +# 2772 "parsing/lexer.ml" + + | 61 -> +# 935 "parsing/lexer.mll" + ( RBRACKET ) +# 2777 "parsing/lexer.ml" + + | 62 -> +# 936 "parsing/lexer.mll" + ( LBRACE ) +# 2782 "parsing/lexer.ml" + + | 63 -> +# 937 "parsing/lexer.mll" + ( LBRACELESS ) +# 2787 "parsing/lexer.ml" + + | 64 -> +# 938 "parsing/lexer.mll" + ( BAR ) +# 2792 "parsing/lexer.ml" + + | 65 -> +# 939 "parsing/lexer.mll" + ( BARBAR ) +# 2797 "parsing/lexer.ml" + + | 66 -> +# 940 "parsing/lexer.mll" + ( BARRBRACKET ) +# 2802 "parsing/lexer.ml" + + | 67 -> +# 941 "parsing/lexer.mll" + ( GREATER ) +# 2807 "parsing/lexer.ml" + + | 68 -> +# 942 "parsing/lexer.mll" + ( GREATERRBRACKET ) +# 2812 "parsing/lexer.ml" + + | 69 -> +# 943 "parsing/lexer.mll" + ( RBRACE ) +# 2817 "parsing/lexer.ml" + + | 70 -> +# 944 "parsing/lexer.mll" + ( GREATERRBRACE ) +# 2822 "parsing/lexer.ml" + + | 71 -> +# 945 "parsing/lexer.mll" + ( LBRACKETAT ) +# 2827 "parsing/lexer.ml" + + | 72 -> +# 946 "parsing/lexer.mll" + ( LBRACKETATAT ) +# 2832 "parsing/lexer.ml" + + | 73 -> +# 947 "parsing/lexer.mll" + ( LBRACKETATATAT ) +# 2837 "parsing/lexer.ml" + + | 74 -> +# 948 "parsing/lexer.mll" + ( LBRACKETPERCENT ) +# 2842 "parsing/lexer.ml" + + | 75 -> +# 949 "parsing/lexer.mll" + ( LBRACKETPERCENTPERCENT ) +# 2847 "parsing/lexer.ml" + + | 76 -> +# 950 "parsing/lexer.mll" + ( BANG ) +# 2852 "parsing/lexer.ml" + + | 77 -> +# 951 "parsing/lexer.mll" + ( INFIXOP0 "!=" ) +# 2857 "parsing/lexer.ml" + + | 78 -> +# 952 "parsing/lexer.mll" + ( PLUS ) +# 2862 "parsing/lexer.ml" + + | 79 -> +# 953 "parsing/lexer.mll" + ( PLUSDOT ) +# 2867 "parsing/lexer.ml" + + | 80 -> +# 954 "parsing/lexer.mll" + ( PLUSEQ ) +# 2872 "parsing/lexer.ml" + + | 81 -> +# 955 "parsing/lexer.mll" + ( MINUS ) +# 2877 "parsing/lexer.ml" + + | 82 -> +# 956 "parsing/lexer.mll" + ( MINUSDOT ) +# 2882 "parsing/lexer.ml" + + | 83 -> +# 959 "parsing/lexer.mll" + ( PREFIXOP(Lexing.lexeme lexbuf) ) +# 2887 "parsing/lexer.ml" + + | 84 -> +# 961 "parsing/lexer.mll" + ( PREFIXOP(Lexing.lexeme lexbuf) ) +# 2892 "parsing/lexer.ml" + + | 85 -> +# 963 "parsing/lexer.mll" + ( INFIXOP0(Lexing.lexeme lexbuf) ) +# 2897 "parsing/lexer.ml" + + | 86 -> +# 965 "parsing/lexer.mll" + ( INFIXOP1(Lexing.lexeme lexbuf) ) +# 2902 "parsing/lexer.ml" + + | 87 -> +# 967 "parsing/lexer.mll" + ( INFIXOP2(Lexing.lexeme lexbuf) ) +# 2907 "parsing/lexer.ml" + + | 88 -> +# 969 "parsing/lexer.mll" + ( INFIXOP4(Lexing.lexeme lexbuf) ) +# 2912 "parsing/lexer.ml" + + | 89 -> +# 970 "parsing/lexer.mll" + ( PERCENT ) +# 2917 "parsing/lexer.ml" + + | 90 -> +# 972 "parsing/lexer.mll" + ( INFIXOP3(Lexing.lexeme lexbuf) ) +# 2922 "parsing/lexer.ml" + + | 91 -> +# 974 "parsing/lexer.mll" + ( HASHOP(Lexing.lexeme lexbuf) ) +# 2927 "parsing/lexer.ml" + + | 92 -> +# 975 "parsing/lexer.mll" + ( + if !if_then_else <> Dir_out then + if !if_then_else = Dir_if_true then + raise (Error (Unterminated_if, Location.curr lexbuf)) + else raise (Error(Unterminated_else, Location.curr lexbuf)) + else + EOF + + ) +# 2940 "parsing/lexer.ml" + + | 93 -> +# 985 "parsing/lexer.mll" + ( raise (Error(Illegal_character (Lexing.lexeme_char lexbuf 0), + Location.curr lexbuf)) + ) +# 2947 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_token_rec lexbuf __ocaml_lex_state + +and comment lexbuf = + __ocaml_lex_comment_rec lexbuf 143 +and __ocaml_lex_comment_rec lexbuf __ocaml_lex_state = + match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 991 "parsing/lexer.mll" + ( comment_start_loc := (Location.curr lexbuf) :: !comment_start_loc; + store_lexeme lexbuf; + comment lexbuf + ) +# 2962 "parsing/lexer.ml" + + | 1 -> +# 996 "parsing/lexer.mll" + ( match !comment_start_loc with + | [] -> assert false + | [_] -> comment_start_loc := []; Location.curr lexbuf + | _ :: l -> comment_start_loc := l; + store_lexeme lexbuf; + comment lexbuf + ) +# 2973 "parsing/lexer.ml" + + | 2 -> +# 1004 "parsing/lexer.mll" + ( + string_start_loc := Location.curr lexbuf; + store_string_char '\"'; + is_in_string := true; + begin try string lexbuf + with Error (Unterminated_string, str_start) -> + match !comment_start_loc with + | [] -> assert false + | loc :: _ -> + let start = List.hd (List.rev !comment_start_loc) in + comment_start_loc := []; + raise (Error (Unterminated_string_in_comment (start, str_start), + loc)) + end; + is_in_string := false; + store_string_char '\"'; + comment lexbuf ) +# 2994 "parsing/lexer.ml" + + | 3 -> +# 1022 "parsing/lexer.mll" + ( + let delim = Lexing.lexeme lexbuf in + let delim = String.sub delim 1 (String.length delim - 2) in + string_start_loc := Location.curr lexbuf; + store_lexeme lexbuf; + is_in_string := true; + begin try quoted_string delim lexbuf + with Error (Unterminated_string, str_start) -> + match !comment_start_loc with + | [] -> assert false + | loc :: _ -> + let start = List.hd (List.rev !comment_start_loc) in + comment_start_loc := []; + raise (Error (Unterminated_string_in_comment (start, str_start), + loc)) + end; + is_in_string := false; + store_string_char '|'; + store_string delim; + store_string_char '}'; + comment lexbuf ) +# 3019 "parsing/lexer.ml" + + | 4 -> +# 1045 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 3024 "parsing/lexer.ml" + + | 5 -> +# 1047 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 1; + store_lexeme lexbuf; + comment lexbuf + ) +# 3032 "parsing/lexer.ml" + + | 6 -> +# 1052 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 3037 "parsing/lexer.ml" + + | 7 -> +# 1054 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 3042 "parsing/lexer.ml" + + | 8 -> +# 1056 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 3047 "parsing/lexer.ml" + + | 9 -> +# 1058 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 3052 "parsing/lexer.ml" + + | 10 -> +# 1060 "parsing/lexer.mll" + ( match !comment_start_loc with + | [] -> assert false + | loc :: _ -> + let start = List.hd (List.rev !comment_start_loc) in + comment_start_loc := []; + raise (Error (Unterminated_comment start, loc)) + ) +# 3063 "parsing/lexer.ml" + + | 11 -> +# 1068 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 0; + store_lexeme lexbuf; + comment lexbuf + ) +# 3071 "parsing/lexer.ml" + + | 12 -> +# 1073 "parsing/lexer.mll" + ( store_lexeme lexbuf; comment lexbuf ) +# 3076 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_comment_rec lexbuf __ocaml_lex_state + +and string lexbuf = + lexbuf.Lexing.lex_mem <- Array.make 2 (-1); __ocaml_lex_string_rec lexbuf 175 +and __ocaml_lex_string_rec lexbuf __ocaml_lex_state = + match Lexing.new_engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 1077 "parsing/lexer.mll" + ( () ) +# 3088 "parsing/lexer.ml" + + | 1 -> +let +# 1078 "parsing/lexer.mll" + space +# 3094 "parsing/lexer.ml" += Lexing.sub_lexeme lexbuf lexbuf.Lexing.lex_mem.(0) lexbuf.Lexing.lex_curr_pos in +# 1079 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false (String.length space); + if in_comment () then store_lexeme lexbuf; + string lexbuf + ) +# 3101 "parsing/lexer.ml" + + | 2 -> +# 1084 "parsing/lexer.mll" + ( store_escaped_char lexbuf + (char_for_backslash(Lexing.lexeme_char lexbuf 1)); + string lexbuf ) +# 3108 "parsing/lexer.ml" + + | 3 -> +# 1088 "parsing/lexer.mll" + ( store_escaped_char lexbuf (char_for_decimal_code lexbuf 1); + string lexbuf ) +# 3114 "parsing/lexer.ml" + + | 4 -> +# 1091 "parsing/lexer.mll" + ( store_escaped_char lexbuf (char_for_octal_code lexbuf 2); + string lexbuf ) +# 3120 "parsing/lexer.ml" + + | 5 -> +# 1094 "parsing/lexer.mll" + ( store_escaped_char lexbuf (char_for_hexadecimal_code lexbuf 2); + string lexbuf ) +# 3126 "parsing/lexer.ml" + + | 6 -> +# 1097 "parsing/lexer.mll" + ( store_escaped_uchar lexbuf (uchar_for_uchar_escape lexbuf); + string lexbuf ) +# 3132 "parsing/lexer.ml" + + | 7 -> +# 1100 "parsing/lexer.mll" + ( if not (in_comment ()) then begin +(* Should be an error, but we are very lax. + raise (Error (Illegal_escape (Lexing.lexeme lexbuf), + Location.curr lexbuf)) +*) + let loc = Location.curr lexbuf in + Location.prerr_warning loc Warnings.Illegal_backslash; + end; + store_lexeme lexbuf; + string lexbuf + ) +# 3147 "parsing/lexer.ml" + + | 8 -> +# 1112 "parsing/lexer.mll" + ( if not (in_comment ()) then + Location.prerr_warning (Location.curr lexbuf) Warnings.Eol_in_string; + update_loc lexbuf None 1 false 0; + store_lexeme lexbuf; + string lexbuf + ) +# 3157 "parsing/lexer.ml" + + | 9 -> +# 1119 "parsing/lexer.mll" + ( is_in_string := false; + raise (Error (Unterminated_string, !string_start_loc)) ) +# 3163 "parsing/lexer.ml" + + | 10 -> +# 1122 "parsing/lexer.mll" + ( store_string_char(Lexing.lexeme_char lexbuf 0); + string lexbuf ) +# 3169 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_string_rec lexbuf __ocaml_lex_state + +and quoted_string delim lexbuf = + __ocaml_lex_quoted_string_rec delim lexbuf 202 +and __ocaml_lex_quoted_string_rec delim lexbuf __ocaml_lex_state = + match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 1127 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 0; + store_lexeme lexbuf; + quoted_string delim lexbuf + ) +# 3184 "parsing/lexer.ml" + + | 1 -> +# 1132 "parsing/lexer.mll" + ( is_in_string := false; + raise (Error (Unterminated_string, !string_start_loc)) ) +# 3190 "parsing/lexer.ml" + + | 2 -> +# 1135 "parsing/lexer.mll" + ( + let edelim = Lexing.lexeme lexbuf in + let edelim = String.sub edelim 1 (String.length edelim - 2) in + if delim = edelim then () + else (store_lexeme lexbuf; quoted_string delim lexbuf) + ) +# 3200 "parsing/lexer.ml" + + | 3 -> +# 1142 "parsing/lexer.mll" + ( store_string_char(Lexing.lexeme_char lexbuf 0); + quoted_string delim lexbuf ) +# 3206 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_quoted_string_rec delim lexbuf __ocaml_lex_state + +and skip_hash_bang lexbuf = + __ocaml_lex_skip_hash_bang_rec lexbuf 211 +and __ocaml_lex_skip_hash_bang_rec lexbuf __ocaml_lex_state = + match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with + | 0 -> +# 1147 "parsing/lexer.mll" + ( update_loc lexbuf None 3 false 0 ) +# 3218 "parsing/lexer.ml" + + | 1 -> +# 1149 "parsing/lexer.mll" + ( update_loc lexbuf None 1 false 0 ) +# 3223 "parsing/lexer.ml" + + | 2 -> +# 1150 "parsing/lexer.mll" + ( () ) +# 3228 "parsing/lexer.ml" + + | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; + __ocaml_lex_skip_hash_bang_rec lexbuf __ocaml_lex_state + +;; + +# 1152 "parsing/lexer.mll" + + let at_bol lexbuf = + let pos = Lexing.lexeme_start_p lexbuf in + pos.pos_cnum = pos.pos_bol + + let token_with_comments lexbuf = + match !preprocessor with + | None -> token lexbuf + | Some (_init, preprocess) -> preprocess token lexbuf + + type newline_state = + | NoLine (* There have been no blank lines yet. *) + | NewLine + (* There have been no blank lines, and the previous + token was a newline. *) + | BlankLine (* There have been blank lines. *) + + type doc_state = + | Initial (* There have been no docstrings yet *) + | After of docstring list + (* There have been docstrings, none of which were + preceded by a blank line *) + | Before of docstring list * docstring list * docstring list + (* There have been docstrings, some of which were + preceded by a blank line *) + + and docstring = Docstrings.docstring + + let interpret_directive lexbuf cont look_ahead = + let if_then_else = !if_then_else in + begin match token_with_comments lexbuf, if_then_else with + | IF, Dir_out -> + let rec skip_from_if_false () = + let token = token_with_comments lexbuf in + if token = EOF then + raise (Error (Unterminated_if, Location.curr lexbuf)) else + if token = HASH && at_bol lexbuf then + begin + let token = token_with_comments lexbuf in + match token with + | END -> + begin + update_if_then_else Dir_out; + cont lexbuf + end + | ELSE -> + begin + update_if_then_else Dir_if_false; + cont lexbuf + end + | IF -> + raise (Error (Unexpected_directive, Location.curr lexbuf)) + | _ -> + if is_elif token && + directive_parse token_with_comments lexbuf then + begin + update_if_then_else Dir_if_true; + cont lexbuf + end + else skip_from_if_false () + end + else skip_from_if_false () in + if directive_parse token_with_comments lexbuf then + begin + update_if_then_else Dir_if_true (* Next state: ELSE *); + cont lexbuf + end + else + skip_from_if_false () + | IF, (Dir_if_false | Dir_if_true)-> + raise (Error(Unexpected_directive, Location.curr lexbuf)) + | LIDENT "elif", (Dir_if_false | Dir_out) + -> (* when the predicate is false, it will continue eating `elif` *) + raise (Error(Unexpected_directive, Location.curr lexbuf)) + | (LIDENT "elif" | ELSE as token), Dir_if_true -> + (* looking for #end, however, it can not see #if anymore *) + let rec skip_from_if_true else_seen = + let token = token_with_comments lexbuf in + if token = EOF then + raise (Error (Unterminated_else, Location.curr lexbuf)) else + if token = HASH && at_bol lexbuf then + begin + let token = token_with_comments lexbuf in + match token with + | END -> + begin + update_if_then_else Dir_out; + cont lexbuf + end + | IF -> + raise (Error (Unexpected_directive, Location.curr lexbuf)) + | ELSE -> + if else_seen then + raise (Error (Unexpected_directive, Location.curr lexbuf)) + else + skip_from_if_true true + | _ -> + if else_seen && is_elif token then + raise (Error (Unexpected_directive, Location.curr lexbuf)) + else + skip_from_if_true else_seen + end + else skip_from_if_true else_seen in + skip_from_if_true (token = ELSE) + | ELSE, Dir_if_false + | ELSE, Dir_out -> + raise (Error(Unexpected_directive, Location.curr lexbuf)) + | END, (Dir_if_false | Dir_if_true ) -> + update_if_then_else Dir_out; + cont lexbuf + | END, Dir_out -> + raise (Error(Unexpected_directive, Location.curr lexbuf)) + | token, (Dir_if_true | Dir_if_false | Dir_out) -> + look_ahead token + end + + let token lexbuf = + let post_pos = lexeme_end_p lexbuf in + let attach lines docs pre_pos = + let open Docstrings in + match docs, lines with + | Initial, _ -> () + | After a, (NoLine | NewLine) -> + set_post_docstrings post_pos (List.rev a); + set_pre_docstrings pre_pos a; + | After a, BlankLine -> + set_post_docstrings post_pos (List.rev a); + set_pre_extra_docstrings pre_pos (List.rev a) + | Before(a, f, b), (NoLine | NewLine) -> + set_post_docstrings post_pos (List.rev a); + set_post_extra_docstrings post_pos + (List.rev_append f (List.rev b)); + set_floating_docstrings pre_pos (List.rev f); + set_pre_extra_docstrings pre_pos (List.rev a); + set_pre_docstrings pre_pos b + | Before(a, f, b), BlankLine -> + set_post_docstrings post_pos (List.rev a); + set_post_extra_docstrings post_pos + (List.rev_append f (List.rev b)); + set_floating_docstrings pre_pos + (List.rev_append f (List.rev b)); + set_pre_extra_docstrings pre_pos (List.rev a) + in + let rec loop lines docs lexbuf = + match token_with_comments lexbuf with + | COMMENT (s, loc) -> + add_comment (s, loc); + let lines' = + match lines with + | NoLine -> NoLine + | NewLine -> NoLine + | BlankLine -> BlankLine + in + loop lines' docs lexbuf + | EOL -> + let lines' = + match lines with + | NoLine -> NewLine + | NewLine -> BlankLine + | BlankLine -> BlankLine + in + loop lines' docs lexbuf + | HASH when at_bol lexbuf -> + interpret_directive lexbuf + (fun lexbuf -> loop lines docs lexbuf) + (fun token -> sharp_look_ahead := Some token; HASH) + | DOCSTRING doc -> + Docstrings.register doc; + add_docstring_comment doc; + let docs' = + if Docstrings.docstring_body doc = "/*" then + match docs with + | Initial -> Before([], [doc], []) + | After a -> Before (a, [doc], []) + | Before(a, f, b) -> Before(a, doc :: b @ f, []) + else + match docs, lines with + | Initial, (NoLine | NewLine) -> After [doc] + | Initial, BlankLine -> Before([], [], [doc]) + | After a, (NoLine | NewLine) -> After (doc :: a) + | After a, BlankLine -> Before (a, [], [doc]) + | Before(a, f, b), (NoLine | NewLine) -> Before(a, f, doc :: b) + | Before(a, f, b), BlankLine -> Before(a, b @ f, [doc]) + in + loop NoLine docs' lexbuf + | tok -> + attach lines docs (lexeme_start_p lexbuf); + tok + in + match !sharp_look_ahead with + | None -> + loop NoLine Initial lexbuf + | Some token -> + sharp_look_ahead := None ; + token + + let init () = + sharp_look_ahead := None; + update_if_then_else Dir_out; + is_in_string := false; + comment_start_loc := []; + comment_list := []; + match !preprocessor with + | None -> () + | Some (init, _preprocess) -> init () + + let rec filter_directive pos acc lexbuf : (int * int ) list = + match token_with_comments lexbuf with + | HASH when at_bol lexbuf -> + (* ^[start_pos]#if ... #then^[end_pos] *) + let start_pos = Lexing.lexeme_start lexbuf in + interpret_directive lexbuf + (fun lexbuf -> + filter_directive + (Lexing.lexeme_end lexbuf) + ((pos, start_pos) :: acc) + lexbuf + + ) + (fun _token -> filter_directive pos acc lexbuf ) + | EOF -> (pos, Lexing.lexeme_end lexbuf) :: acc + | _ -> filter_directive pos acc lexbuf + + let filter_directive_from_lexbuf lexbuf = + List.rev (filter_directive 0 [] lexbuf ) + + let set_preprocessor init preprocess = + escaped_newlines := true; + preprocessor := Some (init, preprocess) + + +# 3467 "parsing/lexer.ml" + +end +module Parse : sig +#1 "parse.mli" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(** Entry points in the parser *) + +val implementation : Lexing.lexbuf -> Parsetree.structure +val interface : Lexing.lexbuf -> Parsetree.signature +val toplevel_phrase : Lexing.lexbuf -> Parsetree.toplevel_phrase +val use_file : Lexing.lexbuf -> Parsetree.toplevel_phrase list +val core_type : Lexing.lexbuf -> Parsetree.core_type +val expression : Lexing.lexbuf -> Parsetree.expression +val pattern : Lexing.lexbuf -> Parsetree.pattern + +end = struct +#1 "parse.ml" +(**************************************************************************) +(* *) +(* OCaml *) +(* *) +(* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) +(* *) +(* Copyright 1996 Institut National de Recherche en Informatique et *) +(* en Automatique. *) +(* *) +(* All rights reserved. This file is distributed under the terms of *) +(* the GNU Lesser General Public License version 2.1, with the *) +(* special exception on linking described in the file LICENSE. *) +(* *) +(**************************************************************************) + +(* Entry points in the parser *) + +(* Skip tokens to the end of the phrase *) + +let rec skip_phrase lexbuf = + try + match Lexer.token lexbuf with + Parser.SEMISEMI | Parser.EOF -> () + | _ -> skip_phrase lexbuf + with + | Lexer.Error (Lexer.Unterminated_comment _, _) + | Lexer.Error (Lexer.Unterminated_string, _) + | Lexer.Error (Lexer.Unterminated_string_in_comment _, _) + | Lexer.Error (Lexer.Illegal_character _, _) -> skip_phrase lexbuf +;; + +let maybe_skip_phrase lexbuf = + if Parsing.is_current_lookahead Parser.SEMISEMI + || Parsing.is_current_lookahead Parser.EOF + then () + else skip_phrase lexbuf + +let wrap parsing_fun lexbuf = + try + Docstrings.init (); + Lexer.init (); + let ast = parsing_fun Lexer.token lexbuf in + Parsing.clear_parser(); + Docstrings.warn_bad_docstrings (); + ast + with + | Lexer.Error(Lexer.Illegal_character _, _) as err + when !Location.input_name = "//toplevel//"-> + skip_phrase lexbuf; + raise err + | Syntaxerr.Error _ as err + when !Location.input_name = "//toplevel//" -> + maybe_skip_phrase lexbuf; + raise err + | Parsing.Parse_error | Syntaxerr.Escape_error -> + let loc = Location.curr lexbuf in + if !Location.input_name = "//toplevel//" + then maybe_skip_phrase lexbuf; + raise(Syntaxerr.Error(Syntaxerr.Other loc)) + +let implementation = wrap Parser.implementation +and interface = wrap Parser.interface +and toplevel_phrase = wrap Parser.toplevel_phrase +and use_file = wrap Parser.use_file +and core_type = wrap Parser.parse_core_type +and expression = wrap Parser.parse_expression +and pattern = wrap Parser.parse_pattern + +end +module Bspack_main : sig +#1 "bspack_main.mli" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + +val read_lines : string -> string -> string list +(* example + {[ + Line_process.read_lines "." "./tools/tools.mllib" + ]} + + FIXME: we can only concat (dir/file) not (dir/dir) + {[ + Filename.concat "/bb/x/" "/bb/x/";; + ]} +*) + + +end = struct +#1 "bspack_main.ml" +(* Copyright (C) 2015-2016 Bloomberg Finance L.P. + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * In addition to the permissions granted to you by the LGPL, you may combine + * or link a "work that uses the Library" with a publicly distributed version + * of this file to produce a combined library or application, then distribute + * that combined work under the terms of your choosing, with no requirement + * to comply with the obligations normally placed on you by section 4 of the + * LGPL version 3 (or the corresponding section of a later version of the LGPL + * should you choose to use a later version). + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) + + +module L_string_set = Set.Make(String) +(* lexical order *) + +let (@>) (b, v) acc = + if b then + v :: acc + else + acc + +let preprocess_to_buffer fn (str : string) (oc : Buffer.t) : unit = + let lexbuf = Lexing.from_string str in + Lexer.init () ; + Location.init lexbuf fn; + let segments = + Lexer.filter_directive_from_lexbuf lexbuf in + Ext_list.iter segments + (fun (start, pos) -> + Buffer.add_substring oc str start (pos - start) + ) + +let verify_valid_ml (str : string) = + try + ignore @@ Parse.implementation (Lexing.from_string str); + true + with _ -> false + +(* same as {!preprocess_to_buffer} except writing to channel directly *) +let preprocess_string fn (str : string) oc = + let lexbuf = Lexing.from_string str in + Lexer.init () ; + Location.init lexbuf fn; + let segments = + Lexer.filter_directive_from_lexbuf lexbuf in + Ext_list.iter segments + (fun (start, pos) -> + output_substring oc str start (pos - start) + ) + +let (//) = Filename.concat + +let rec process_line cwd filedir line = + let line = Ext_string.trim line in + let len = String.length line in + if len = 0 then [] + else + match line.[0] with + | '#' -> [] + | _ -> + let segments = + Ext_string.split_by ~keep_empty:false (fun x -> x = ' ' || x = '\t' ) line + in + begin + match segments with + | ["include" ; path ] + -> + (* prerr_endline path; *) + read_lines cwd (filedir// path) + | [ x ] -> + let ml = filedir // x ^ ".ml" in + let mli = filedir // x ^ ".mli" in + let ml_exists, mli_exists = Sys.file_exists ml , Sys.file_exists mli in + if not ml_exists && not mli_exists then + begin + prerr_endline (filedir //x ^ " not exists"); + [] + end + else + (ml_exists, ml) @> (mli_exists , mli) @> [] + | _ + -> Ext_fmt.failwithf ~loc:__LOC__ "invalid line %s" line + end +and read_lines (cwd : string) (file : string) : string list = + Ext_list.fold_left (Ext_io.rev_lines_of_file file) [] (fun acc f -> + let filedir = Filename.dirname file in + let extras = process_line cwd filedir f in + Ext_list.append extras acc + ) +let implementation sourcefile = + let content = Ext_io.load_file sourcefile in + let ast = + let oldname = !Location.input_name in + Location.input_name := sourcefile ; + let lexbuf = Lexing.from_string content in + Location.init lexbuf sourcefile ; + match Parse.implementation lexbuf + with + | exception e -> + Location.input_name := oldname; + raise e + | ast -> + Location.input_name := oldname ; + ast + in + ast, content + +let interface sourcefile = + let content = Ext_io.load_file sourcefile in + let ast = + let oldname = !Location.input_name in + Location.input_name := sourcefile ; + let lexbuf = Lexing.from_string content in + Location.init lexbuf sourcefile; + match Parse.interface lexbuf with + | exception e -> + Location.input_name := oldname ; + raise e + | ast -> + Location.input_name := oldname ; + ast in + ast, content + +let emit_line_directive = ref true + +let emit out_chan name = + if !emit_line_directive then begin + output_string out_chan "#1 \""; + (*Note here we do this is mostly to avoid leaking user's + information, like private path, in the future, we can have + a flag + *) + output_string out_chan (Filename.basename name) ; + output_string out_chan "\"\n" + end +let decorate_module + ?(module_bound=true) + out_chan base mli_name ml_name mli_content ml_content = + if module_bound then begin + let base = Ext_string.capitalize_ascii base in + output_string out_chan "module "; + output_string out_chan base ; + output_string out_chan " : sig \n"; + emit out_chan mli_name ; + preprocess_string mli_name mli_content out_chan; + output_string out_chan "\nend = struct\n"; + emit out_chan ml_name ; + preprocess_string ml_name ml_content out_chan; + output_string out_chan "\nend\n" + end + else + begin + output_string out_chan "include (struct\n"; + emit out_chan ml_name ; + preprocess_string ml_name ml_content out_chan; + output_string out_chan "\nend : sig \n"; + emit out_chan mli_name ; + preprocess_string mli_name mli_content out_chan; + output_string out_chan "\nend)"; + end + +let decorate_module_only + ?(check : unit option) + ?(module_bound=true) + out_chan base ml_name ml_content = + if module_bound then begin + let base = Ext_string.capitalize_ascii base in + output_string out_chan "module "; + output_string out_chan base ; + output_string out_chan "\n= struct\n" + end; + emit out_chan ml_name; + if check <> None then + let buf = Buffer.create 2000 in + preprocess_to_buffer ml_name ml_content buf; + let str = Buffer.contents buf in + if not @@ verify_valid_ml str then + failwith (ml_name ^ " can not be a valid ml module") + else + output_string out_chan str + else + preprocess_string ml_name ml_content out_chan ; + if module_bound then + output_string out_chan "\nend\n" + +(** recursive module is not good for performance, here module type only + has to be pure types otherwise it would not compile any way +*) +let decorate_interface_only out_chan base mli_name mli_content = + output_string out_chan "(** Interface as module *)\n"; + decorate_module_only out_chan base mli_name mli_content ~check:() + +(** set mllib *) +let mllib = ref None +let set_string s = mllib := Some s + +let batch_files = ref [] +let collect_file name = + batch_files := name :: !batch_files + +let output_file = ref None +let set_output file = output_file := Some file +let header_option = ref false + +type main_module = { modulename : string ; export : bool } + +(** set bs-main*) +let main_module : main_module option ref = ref None + +let set_main_module modulename = + main_module := Some {modulename; export = false } + +let set_main_export modulename = + main_module := Some {modulename; export = true } + +let set_mllib_file = ref false + +let prelude = ref None +let set_prelude f = + if Sys.file_exists f then + prelude := Some f + else raise (Arg.Bad ("file " ^ f ^ " don't exist ")) +let prelude_str = ref None +let set_prelude_str f = prelude_str := Some f + +(** + {[ + # process_include "ghsogh?a,b,c";; + - : [> `Dir of string | `Dir_with_excludes of string * string list ] = + `Dir_with_excludes ("ghsogh", ["a"; "b"; "c"]) + # process_include "ghsogh?a";; + - : [> `Dir of string | `Dir_with_excludes of string * string list ] = + `Dir_with_excludes ("ghsogh", ["a"]) + ]} +*) +(* type dir_spec = *) +(* [ `Dir of string | `Dir_with_excludes of string * string list ] *) + +let cwd = Sys.getcwd () + +let normalize s = + Ext_path.normalize_absolute_path (Ext_path.combine cwd s ) + +let process_include s : Ast_extract.dir_spec = + let i = Ext_string.rindex_neg s '?' in + if i < 0 then + { dir = normalize s; excludes = []} + else + let dir = String.sub s 0 i in + { dir = normalize dir; + excludes = Ext_string.split + (String.sub s (i + 1) (String.length s - i - 1) ) + ','} + +let deduplicate_dirs (xs : Ast_extract.dir_spec list) = + let set : Ast_extract.dir_spec Hash_string.t = Hash_string.create 64 in + List.filter (fun ({Ast_extract.dir ; excludes = new_excludes } as y) -> + match Hash_string.find_opt set dir with + | None -> + Hash_string.add set dir y; + true + | Some x -> x.excludes <- new_excludes @ x.excludes ; false + ) xs + +let includes : _ list ref = ref [] + +let add_include dir = + includes := process_include dir :: !includes + +let exclude_modules = ref [] +let add_exclude module_ = + exclude_modules := module_ :: !exclude_modules +let no_implicit_include = ref false + +let alias_map = Hash_string.create 0 +let alias_map_rev = Hash_string.create 0 + +(** + {[ + A -> B + A1 -> B + ]} + print + {[ + + module A = B + module A1 = B + ]} + We don't suppport + {[ + A -> B + A -> C + ]} +*) +let alias_module s = + match Ext_string.split s '=' with + | [a;b] -> + (* Error checking later*) + if Hash_string.mem alias_map a then + raise (Arg.Bad ("duplicated module alias " ^ a)) + else + begin + Hash_string.add alias_map_rev b a; + Hash_string.add alias_map a b + end + | _ -> raise (Arg.Bad "invalid module alias format like A=B") + +let undefine_symbol (s : string) = + Lexer.remove_directive_built_in_value s +let define_symbol (s : string) = + match Ext_string.split ~keep_empty:true s '=' with + | [key; v] -> + if not @@ Lexer.define_key_value key v then + raise (Arg.Bad ("illegal definition: " ^ s)) + | _ -> raise (Arg.Bad ("illegal definition: " ^ s)) + +let specs : (string * Arg.spec * string) list = + [ "-bs-loc", (Arg.Set emit_line_directive), + " Add # linum filename directive"; + "-bs-no-implicit-include", (Arg.Set no_implicit_include), + " Not including cwd as search path"; + "-prelude-str", (Arg.String set_prelude_str), + " Set a prelude string, (before -prelude) option" ; + "-module-alias", (Arg.String alias_module ), + " -module-alis A=B, whenever need A,replace it with B" ; + "-prelude", (Arg.String set_prelude), + " Set a prelude file, literally copy into the beginning"; + "-bs-mllib", (Arg.String set_string), + " Files collected from mllib"; + "-bs-MD", (Arg.Set set_mllib_file), + " Log files into mllib(only effective under -bs-main mode)"; + "-o", (Arg.String set_output), + " Set output file (default to stdout)" ; + "-with-header", (Arg.Set header_option), + " with header of time stamp" ; + "-bs-exclude-I", (Arg.String add_exclude), + " don't read and pack such modules from -I (in the future, we should detect conflicts in mllib or commandline) " + ; + "-bs-main", (Arg.String set_main_module), + " set the main entry module"; + "-main-export", (Arg.String set_main_export), + " Set the main module and respect its exports"; + "-I", (Arg.String add_include), + " add dir to search path"; + "-U", Arg.String undefine_symbol, + " Undefine a symbol when bspacking"; + "-D", Arg.String define_symbol, + " Define a symbol when bspacking" + ] + + +let anonymous filename = + collect_file filename + +let usage = "Usage: bspack \nOptions are:" +let () = + try + (Arg.parse specs anonymous usage; + let command_files = !batch_files in + let mllib = !mllib in + (* emit code now *) + let out_chan = + lazy (match !output_file with + | None -> stdout + | Some file -> open_out_bin file) in + let emit_header out_chan = + let local_time = Unix.(localtime (gettimeofday ())) in + (if !header_option + then + output_string out_chan + (Printf.sprintf "(** Generated by bspack %02d/%02d-%02d:%02d *)\n" + (local_time.tm_mon + 1) local_time.tm_mday + local_time.tm_hour local_time.tm_min)); + (match !prelude_str with + | None -> () + | Some s -> output_string out_chan s; output_string out_chan "\n" ); + match !prelude with + | None -> () + | Some f -> + begin + output_string out_chan (Ext_io.load_file f); + output_string out_chan "\n" + end + in + let close_out_chan out_chan = + (if out_chan != stdout then close_out out_chan) in + let files = + Ext_list.append (match mllib with + | Some s + -> read_lines (Sys.getcwd ()) s + | None -> []) command_files in + + match !main_module, files with + | Some _ , _ :: _ + -> + Ext_fmt.failwithf ~loc:__LOC__ + "-bs-main conflicts with other flags [ %s ]" + (String.concat ", " files) + | Some {modulename = main_module ; export }, [] + -> + let excludes = + match !exclude_modules with + | [] -> [] + | xs -> + Ext_list.flat_map xs (fun x -> [x ^ ".ml" ; x ^ ".mli"] ) in + let extra_dirs = + deduplicate_dirs @@ + if not !no_implicit_include then {Ast_extract.dir = cwd; excludes = []} :: !includes + else !includes + in + let ast_table, tasks = + Ast_extract.collect_from_main ~excludes ~extra_dirs ~alias_map + Format.err_formatter + (fun _ppf sourcefile -> lazy (implementation sourcefile)) + (fun _ppf sourcefile -> lazy (interface sourcefile)) + (fun (lazy (stru, _)) -> stru) + (fun (lazy (sigi, _)) -> sigi) + main_module + in + if Queue.is_empty tasks then + raise (Arg.Bad (main_module ^ " does not pull in any libs, maybe wrong input")) + ; + let out_chan = Lazy.force out_chan in + let collect_module_by_filenames = !set_mllib_file in + let collection_modules = Queue.create () in + let count = ref 0 in + let task_length = Queue.length tasks in + emit_header out_chan ; + begin + Ast_extract.handle_queue tasks ast_table + (fun base ml_name (lazy(_, ml_content)) -> + incr count ; + if collect_module_by_filenames then + Queue.add ml_name collection_modules; + let module_bound = not export || task_length > !count in + decorate_module_only ~module_bound out_chan base ml_name ml_content; + let aliased = Ext_string.capitalize_ascii base in + Hash_string.find_all alias_map_rev aliased + |> List.iter + (fun s -> output_string out_chan (Printf.sprintf "module %s = %s \n" s aliased)) + + ) + (fun base mli_name (lazy (_, mli_content)) -> + incr count ; + if collect_module_by_filenames then + Queue.add mli_name collection_modules; + + decorate_interface_only out_chan base mli_name mli_content; + let aliased = Ext_string.capitalize_ascii base in + Hash_string.find_all alias_map_rev aliased + |> List.iter + (fun s -> output_string out_chan (Printf.sprintf "module %s = %s \n" s aliased)) + + ) + (fun base mli_name ml_name (lazy (_, mli_content)) (lazy (_, ml_content)) + -> + incr count; + (*TODO: assume mli_name, ml_name are in the same dir, + Needs to be addressed + *) + if collect_module_by_filenames then + begin + Queue.add ml_name collection_modules; + Queue.add mli_name collection_modules + end; + (** if export + print it as + {[inclue (struct end : sig end)]} + *) + let module_bound = not export || task_length > !count in + decorate_module ~module_bound out_chan base mli_name ml_name mli_content ml_content; + let aliased = (Ext_string.capitalize_ascii base) in + Hash_string.find_all alias_map_rev aliased + |> List.iter + (fun s -> output_string out_chan (Printf.sprintf "module %s = %s \n" s aliased)) + + ) + end; + close_out_chan out_chan; + begin + if !set_mllib_file then + match !output_file with + | None -> () + | Some file -> + let output = file ^ ".d" in + let sorted_dep_queue = + Queue.fold + (fun acc collection_module -> + L_string_set.add + ( + (*FIXME: now we normalized path, + we need a beautiful output too for relative path + The relative path should be also be normalized.. + *) + Filename.concat + (Ext_path.rel_normalized_absolute_path + ~from:cwd + (Filename.dirname collection_module) + ) (Filename.basename collection_module) + + ) + (* collection_module *) + acc + ) L_string_set.empty collection_modules in + Ext_io.write_file + output + ( + L_string_set.fold + (fun dep acc -> + acc ^ + dep ^ + " " + ) sorted_dep_queue + (file ^ ": " ) + (* collection_modules *) + ) + end + | None, _ -> + let ast_table = + Ast_extract.collect_ast_map + Format.err_formatter files + (fun _ppf sourcefile -> implementation sourcefile + ) + (fun _ppf sourcefile -> interface sourcefile) in + let tasks = Ast_extract.sort fst fst ast_table in + let out_chan = (Lazy.force out_chan) in + emit_header out_chan ; + Ast_extract.handle_queue tasks ast_table + (fun base ml_name (_, ml_content) -> decorate_module_only out_chan base ml_name ml_content) + (fun base mli_name (_, mli_content) -> decorate_interface_only out_chan base mli_name mli_content ) + (fun base mli_name ml_name (_, mli_content) (_, ml_content) + -> decorate_module out_chan base mli_name ml_name mli_content ml_content); + close_out_chan out_chan + ) + with x -> + begin + Location.report_exception Format.err_formatter x ; + exit 2 + end + +end diff --git a/res_syntax/scripts/test.sh b/res_syntax/scripts/test.sh new file mode 100755 index 0000000000..54d1c07f45 --- /dev/null +++ b/res_syntax/scripts/test.sh @@ -0,0 +1,120 @@ +#!/bin/bash + +# Note: +# 1. This was converted from zsh to bash because zsh is not available on Linux and Windows Github action runners. +# 2. macOS still has bash 3 and therefore no globstar ("**") support. +# Therefore we need to use find + temp files for the file lists. + +function exp { + echo "$(dirname $1)/expected/$(basename $1).txt" +} + +taskCount=0 +function maybeWait { + let taskCount+=1 + # spawn in batch of 20 processes + [[ $((taskCount % 20)) = 0 ]] && wait +} + +rm -rf temp +mkdir temp + +# parsing +find tests/parsing/{errors,infiniteLoops,recovery} -name "*.res" -o -name "*.resi" >temp/files.txt +while read file; do + rescript -recover -print ml $file &> $(exp $file) & maybeWait +done temp/files.txt +while read file; do + rescript -print ml $file &> $(exp $file) & maybeWait +done temp/files.txt +while read file; do + rescript $file &> $(exp $file) & maybeWait +done temp/files.txt +while read file; do + rescript -jsx-version 4 -jsx-mode "automatic" $file &> $(exp $file) & maybeWait +done temp/diff.txt +diff=$(cat temp/diff.txt) +if [[ $diff = "" ]]; then + printf "${successGreen}✅ No unstaged tests difference.${reset}\n" +else + printf "${warningYellow}⚠️ There are unstaged differences in tests/! Did you break a test?\n${diff}\n${reset}" + rm -r temp/ + exit 1 +fi + +# roundtrip tests +if [[ $ROUNDTRIP_TEST = 1 ]]; then + echo "Running roundtrip tests…" + roundtripTestsResult="temp/result.txt" + touch $roundtripTestsResult + + find tests/{idempotency,printer} -name "*.res" -o -name "*.resi" -o -name "*.ml" -o -name "*.mli" >temp/files.txt + while read file; do { + mkdir -p temp/$(dirname $file) + sexpAst1=temp/$file.sexp + sexpAst2=temp/$file.2.sexp + rescript1=temp/$file.res + rescript2=temp/$file.2.res + + case $file in + *.ml ) class="ml" ; resIntf="" ;; + *.mli ) class="ml" ; resIntf=-interface ;; + *.res ) class="res"; resIntf="" ;; + *.resi ) class="res"; resIntf=-interface ;; + esac + + rescript $resIntf -parse $class -print sexp $file > $sexpAst1 + rescript $resIntf -parse $class -print res $file > $rescript1 + + rescript $resIntf -print sexp $rescript1 > $sexpAst2 + rescript $resIntf -print res $rescript1 > $rescript2 + + diff --unified $sexpAst1 $sexpAst2 + [[ "$?" = 1 ]] && echo 1 > $roundtripTestsResult + diff --unified $rescript1 $rescript2 + [[ "$?" = 1 ]] && echo 1 > $roundtripTestsResult + } & maybeWait + done Longident.unflatten with + | Some longident -> longident + | None -> l2 + +(* TODO: support nested open's ? *) +let rec rewritePpatOpen longidentOpen pat = + match pat.Parsetree.ppat_desc with + | Ppat_array (first :: rest) -> + (* Color.[Red, Blue, Green] -> [Color.Red, Blue, Green] *) + { + pat with + ppat_desc = Ppat_array (rewritePpatOpen longidentOpen first :: rest); + } + | Ppat_tuple (first :: rest) -> + (* Color.(Red, Blue, Green) -> (Color.Red, Blue, Green) *) + { + pat with + ppat_desc = Ppat_tuple (rewritePpatOpen longidentOpen first :: rest); + } + | Ppat_construct + ( ({txt = Longident.Lident "::"} as listConstructor), + Some ({ppat_desc = Ppat_tuple (pat :: rest)} as element) ) -> + (* Color.(list[Red, Blue, Green]) -> list[Color.Red, Blue, Green] *) + { + pat with + ppat_desc = + Ppat_construct + ( listConstructor, + Some + { + element with + ppat_desc = + Ppat_tuple (rewritePpatOpen longidentOpen pat :: rest); + } ); + } + | Ppat_construct (({txt = constructor} as longidentLoc), optPattern) -> + (* Foo.(Bar(a)) -> Foo.Bar(a) *) + { + pat with + ppat_desc = + Ppat_construct + ( {longidentLoc with txt = concatLongidents longidentOpen constructor}, + optPattern ); + } + | Ppat_record ((({txt = lbl} as longidentLoc), firstPat) :: rest, flag) -> + (* Foo.{x} -> {Foo.x: x} *) + let firstRow = + ({longidentLoc with txt = concatLongidents longidentOpen lbl}, firstPat) + in + {pat with ppat_desc = Ppat_record (firstRow :: rest, flag)} + | Ppat_or (pat1, pat2) -> + { + pat with + ppat_desc = + Ppat_or + ( rewritePpatOpen longidentOpen pat1, + rewritePpatOpen longidentOpen pat2 ); + } + | Ppat_constraint (pattern, typ) -> + { + pat with + ppat_desc = Ppat_constraint (rewritePpatOpen longidentOpen pattern, typ); + } + | Ppat_type ({txt = constructor} as longidentLoc) -> + { + pat with + ppat_desc = + Ppat_type + {longidentLoc with txt = concatLongidents longidentOpen constructor}; + } + | Ppat_lazy p -> + {pat with ppat_desc = Ppat_lazy (rewritePpatOpen longidentOpen p)} + | Ppat_exception p -> + {pat with ppat_desc = Ppat_exception (rewritePpatOpen longidentOpen p)} + | _ -> pat + +let rec rewriteReasonFastPipe expr = + let open Parsetree in + match expr.pexp_desc with + | Pexp_apply + ( { + pexp_desc = + Pexp_apply + ( ({pexp_desc = Pexp_ident {txt = Longident.Lident "|."}} as op), + [(Asttypes.Nolabel, lhs); (Nolabel, rhs)] ); + pexp_attributes = subAttrs; + }, + args ) -> + let rhsLoc = {rhs.pexp_loc with loc_end = expr.pexp_loc.loc_end} in + let newLhs = + let expr = rewriteReasonFastPipe lhs in + {expr with pexp_attributes = List.concat [lhs.pexp_attributes; subAttrs]} + in + let newRhs = + { + pexp_loc = rhsLoc; + pexp_attributes = []; + pexp_desc = Pexp_apply (rhs, args); + } + in + let allArgs = (Asttypes.Nolabel, newLhs) :: [(Asttypes.Nolabel, newRhs)] in + {expr with pexp_desc = Pexp_apply (op, allArgs)} + | _ -> expr + +let makeReasonArityMapper ~forPrinter = + let open Ast_mapper in + { + default_mapper with + expr = + (fun mapper expr -> + match expr with + (* Don't mind this case, Reason doesn't handle this. *) + (* | {pexp_desc = Pexp_variant (lbl, args); pexp_loc; pexp_attributes} -> *) + (* let newArgs = match args with *) + (* | (Some {pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _ } as sp]}) as args-> *) + (* if forPrinter then args else Some sp *) + (* | Some {pexp_desc = Pexp_tuple [sp]} -> Some sp *) + (* | _ -> args *) + (* in *) + (* default_mapper.expr mapper {pexp_desc=Pexp_variant(lbl, newArgs); pexp_loc; pexp_attributes} *) + | {pexp_desc = Pexp_construct (lid, args); pexp_loc; pexp_attributes} -> + let newArgs = + match args with + | Some {pexp_desc = Pexp_tuple [({pexp_desc = Pexp_tuple _} as sp)]} + as args -> + if forPrinter then args else Some sp + | Some {pexp_desc = Pexp_tuple [sp]} -> Some sp + | _ -> args + in + default_mapper.expr mapper + { + pexp_desc = Pexp_construct (lid, newArgs); + pexp_loc; + pexp_attributes; + } + | expr -> default_mapper.expr mapper (rewriteReasonFastPipe expr)); + pat = + (fun mapper pattern -> + match pattern with + (* Don't mind this case, Reason doesn't handle this. *) + (* | {ppat_desc = Ppat_variant (lbl, args); ppat_loc; ppat_attributes} -> *) + (* let newArgs = match args with *) + (* | (Some {ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as sp]}) as args -> *) + (* if forPrinter then args else Some sp *) + (* | Some {ppat_desc = Ppat_tuple [sp]} -> Some sp *) + (* | _ -> args *) + (* in *) + (* default_mapper.pat mapper {ppat_desc = Ppat_variant (lbl, newArgs); ppat_loc; ppat_attributes;} *) + | {ppat_desc = Ppat_construct (lid, args); ppat_loc; ppat_attributes} -> + let new_args = + match args with + | Some {ppat_desc = Ppat_tuple [({ppat_desc = Ppat_tuple _} as sp)]} + as args -> + if forPrinter then args else Some sp + | Some {ppat_desc = Ppat_tuple [sp]} -> Some sp + | _ -> args + in + default_mapper.pat mapper + { + ppat_desc = Ppat_construct (lid, new_args); + ppat_loc; + ppat_attributes; + } + | x -> default_mapper.pat mapper x); + } + +let escapeTemplateLiteral s = + let len = String.length s in + let b = Buffer.create len in + let i = ref 0 in + while !i < len do + let c = (String.get [@doesNotRaise]) s !i in + if c = '`' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '`'; + incr i) + else if c = '$' then + if !i + 1 < len then ( + let c2 = (String.get [@doesNotRaise]) s (!i + 1) in + if c2 = '{' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '$'; + Buffer.add_char b '{') + else ( + Buffer.add_char b c; + Buffer.add_char b c2); + i := !i + 2) + else ( + Buffer.add_char b c; + incr i) + else if c = '\\' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '\\'; + incr i) + else ( + Buffer.add_char b c; + incr i) + done; + Buffer.contents b + +let escapeStringContents s = + let len = String.length s in + let b = Buffer.create len in + + let i = ref 0 in + + while !i < len do + let c = String.unsafe_get s !i in + if c = '\\' then ( + incr i; + Buffer.add_char b c; + let c = String.unsafe_get s !i in + if !i < len then + let () = Buffer.add_char b c in + incr i + else ()) + else if c = '"' then ( + Buffer.add_char b '\\'; + Buffer.add_char b c; + incr i) + else ( + Buffer.add_char b c; + incr i) + done; + Buffer.contents b + +let looksLikeRecursiveTypeDeclaration typeDeclaration = + let open Parsetree in + let name = typeDeclaration.ptype_name.txt in + let rec checkKind kind = + match kind with + | Ptype_abstract | Ptype_open -> false + | Ptype_variant constructorDeclarations -> + List.exists checkConstructorDeclaration constructorDeclarations + | Ptype_record labelDeclarations -> + List.exists checkLabelDeclaration labelDeclarations + and checkConstructorDeclaration constrDecl = + checkConstructorArguments constrDecl.pcd_args + || + match constrDecl.pcd_res with + | Some typexpr -> checkTypExpr typexpr + | None -> false + and checkLabelDeclaration labelDeclaration = + checkTypExpr labelDeclaration.pld_type + and checkConstructorArguments constrArg = + match constrArg with + | Pcstr_tuple types -> List.exists checkTypExpr types + | Pcstr_record labelDeclarations -> + List.exists checkLabelDeclaration labelDeclarations + and checkTypExpr typ = + match typ.ptyp_desc with + | Ptyp_any -> false + | Ptyp_var _ -> false + | Ptyp_object (fields, _) -> List.exists checkObjectField fields + | Ptyp_class _ -> false + | Ptyp_package _ -> false + | Ptyp_extension _ -> false + | Ptyp_arrow (_lbl, typ1, typ2) -> checkTypExpr typ1 || checkTypExpr typ2 + | Ptyp_tuple types -> List.exists checkTypExpr types + | Ptyp_constr ({txt = longident}, types) -> + (match longident with + | Lident ident -> ident = name + | _ -> false) + || List.exists checkTypExpr types + | Ptyp_alias (typ, _) -> checkTypExpr typ + | Ptyp_variant (rowFields, _, _) -> List.exists checkRowFields rowFields + | Ptyp_poly (_, typ) -> checkTypExpr typ + and checkObjectField field = + match field with + | Otag (_label, _attrs, typ) -> checkTypExpr typ + | Oinherit typ -> checkTypExpr typ + and checkRowFields rowField = + match rowField with + | Rtag (_, _, _, types) -> List.exists checkTypExpr types + | Rinherit typexpr -> checkTypExpr typexpr + and checkManifest manifest = + match manifest with + | Some typ -> checkTypExpr typ + | None -> false + in + checkKind typeDeclaration.ptype_kind + || checkManifest typeDeclaration.ptype_manifest + +let filterReasonRawLiteral attrs = + List.filter + (fun attr -> + match attr with + | {Location.txt = "reason.raw_literal"}, _ -> false + | _ -> true) + attrs + +let stringLiteralMapper stringData = + let isSameLocation l1 l2 = + let open Location in + l1.loc_start.pos_cnum == l2.loc_start.pos_cnum + in + let remainingStringData = stringData in + let open Ast_mapper in + { + default_mapper with + expr = + (fun mapper expr -> + match expr.pexp_desc with + | Pexp_constant (Pconst_string (_txt, None)) -> ( + match + List.find_opt + (fun (_stringData, stringLoc) -> + isSameLocation stringLoc expr.pexp_loc) + remainingStringData + with + | Some (stringData, _) -> + let stringData = + let attr = + List.find_opt + (fun attr -> + match attr with + | {Location.txt = "reason.raw_literal"}, _ -> true + | _ -> false) + expr.pexp_attributes + in + match attr with + | Some + ( _, + PStr + [ + { + pstr_desc = + Pstr_eval + ( { + pexp_desc = + Pexp_constant (Pconst_string (raw, _)); + }, + _ ); + }; + ] ) -> + raw + | _ -> + (String.sub [@doesNotRaise]) stringData 1 + (String.length stringData - 2) + in + { + expr with + pexp_attributes = filterReasonRawLiteral expr.pexp_attributes; + pexp_desc = Pexp_constant (Pconst_string (stringData, None)); + } + | None -> default_mapper.expr mapper expr) + | _ -> default_mapper.expr mapper expr); + } + +let hasUncurriedAttribute attrs = + List.exists + (fun attr -> + match attr with + | {Asttypes.txt = "bs"}, Parsetree.PStr [] -> true + | _ -> false) + attrs + +let templateLiteralAttr = (Location.mknoloc "res.template", Parsetree.PStr []) + +let normalize = + let open Ast_mapper in + { + default_mapper with + extension = + (fun mapper ext -> + match ext with + | id, payload -> + ( {id with txt = Res_printer.convertBsExtension id.txt}, + default_mapper.payload mapper payload )); + attribute = + (fun mapper attr -> + match attr with + | id, payload -> + ( {id with txt = Res_printer.convertBsExternalAttribute id.txt}, + default_mapper.payload mapper payload )); + attributes = + (fun mapper attrs -> + attrs + |> List.filter (fun attr -> + match attr with + | ( { + Location.txt = + ( "reason.preserve_braces" | "explicit_arity" + | "implicity_arity" ); + }, + _ ) -> + false + | _ -> true) + |> default_mapper.attributes mapper); + pat = + (fun mapper p -> + match p.ppat_desc with + | Ppat_open ({txt = longidentOpen}, pattern) -> + let p = rewritePpatOpen longidentOpen pattern in + default_mapper.pat mapper p + | Ppat_constant (Pconst_string (txt, tag)) -> + let newTag = + match tag with + (* transform {|abc|} into {js|abc|js}, because `template string` is interpreted as {js||js} *) + | Some "" -> Some "js" + | tag -> tag + in + let s = Parsetree.Pconst_string (escapeTemplateLiteral txt, newTag) in + { + p with + ppat_attributes = + templateLiteralAttr :: mapper.attributes mapper p.ppat_attributes; + ppat_desc = Ppat_constant s; + } + | _ -> default_mapper.pat mapper p); + typ = + (fun mapper typ -> + match typ.ptyp_desc with + | Ptyp_constr + ({txt = Longident.Ldot (Longident.Lident "Js", "t")}, [arg]) -> + (* Js.t({"a": b}) -> {"a": b} + Since compiler >9.0.1 objects don't need Js.t wrapping anymore *) + mapper.typ mapper arg + | _ -> default_mapper.typ mapper typ); + expr = + (fun mapper expr -> + match expr.pexp_desc with + | Pexp_constant (Pconst_string (txt, None)) -> + let raw = escapeStringContents txt in + let s = Parsetree.Pconst_string (raw, None) in + {expr with pexp_desc = Pexp_constant s} + | Pexp_constant (Pconst_string (txt, tag)) -> + let newTag = + match tag with + (* transform {|abc|} into {js|abc|js}, we want to preserve unicode by default *) + | Some "" -> Some "js" + | tag -> tag + in + let s = Parsetree.Pconst_string (escapeTemplateLiteral txt, newTag) in + { + expr with + pexp_attributes = + templateLiteralAttr + :: mapper.attributes mapper expr.pexp_attributes; + pexp_desc = Pexp_constant s; + } + | Pexp_apply + ( callExpr, + [ + ( Nolabel, + ({ + pexp_desc = + Pexp_construct ({txt = Longident.Lident "()"}, None); + pexp_attributes = []; + } as unitExpr) ); + ] ) + when hasUncurriedAttribute expr.pexp_attributes -> + { + expr with + pexp_attributes = mapper.attributes mapper expr.pexp_attributes; + pexp_desc = + Pexp_apply + ( callExpr, + [ + ( Nolabel, + { + unitExpr with + pexp_loc = {unitExpr.pexp_loc with loc_ghost = true}; + } ); + ] ); + } + | Pexp_function cases -> + let loc = + match (cases, List.rev cases) with + | first :: _, last :: _ -> + { + first.pc_lhs.ppat_loc with + loc_end = last.pc_rhs.pexp_loc.loc_end; + } + | _ -> Location.none + in + let var = + { + Parsetree.ppat_loc = Location.none; + ppat_attributes = []; + ppat_desc = Ppat_var (Location.mknoloc "x"); + } + in + { + pexp_loc = loc; + pexp_attributes = []; + pexp_desc = + Pexp_fun + ( Asttypes.Nolabel, + None, + var, + { + pexp_loc = loc; + pexp_attributes = []; + pexp_desc = + Pexp_match + ( { + pexp_loc = Location.none; + pexp_attributes = []; + pexp_desc = + Pexp_ident + (Location.mknoloc (Longident.Lident "x")); + }, + mapper.cases mapper cases ); + } ); + } + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident "!"}}, + [(Asttypes.Nolabel, operand)] ) -> + (* turn `!foo` into `foo.contents` *) + { + pexp_loc = expr.pexp_loc; + pexp_attributes = expr.pexp_attributes; + pexp_desc = + Pexp_field + ( mapper.expr mapper operand, + Location.mknoloc (Longident.Lident "contents") ); + } + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident "##"}}, + [ + (Asttypes.Nolabel, lhs); + ( Nolabel, + { + pexp_desc = + ( Pexp_constant (Pconst_string (txt, None)) + | Pexp_ident {txt = Longident.Lident txt} ); + pexp_loc = labelLoc; + } ); + ] ) -> + let label = Location.mkloc txt labelLoc in + { + pexp_loc = expr.pexp_loc; + pexp_attributes = expr.pexp_attributes; + pexp_desc = Pexp_send (mapper.expr mapper lhs, label); + } + | Pexp_match + ( condition, + [ + { + pc_lhs = + { + ppat_desc = + Ppat_construct ({txt = Longident.Lident "true"}, None); + }; + pc_rhs = thenExpr; + }; + { + pc_lhs = + { + ppat_desc = + Ppat_construct ({txt = Longident.Lident "false"}, None); + }; + pc_rhs = elseExpr; + }; + ] ) -> + let ternaryMarker = + (Location.mknoloc "ns.ternary", Parsetree.PStr []) + in + { + Parsetree.pexp_loc = expr.pexp_loc; + pexp_desc = + Pexp_ifthenelse + ( mapper.expr mapper condition, + mapper.expr mapper thenExpr, + Some (mapper.expr mapper elseExpr) ); + pexp_attributes = ternaryMarker :: expr.pexp_attributes; + } + | _ -> default_mapper.expr mapper expr); + structure_item = + (fun mapper structureItem -> + match structureItem.pstr_desc with + (* heuristic: if we have multiple type declarations, mark them recursive *) + | Pstr_type ((Recursive as recFlag), typeDeclarations) -> + let flag = + match typeDeclarations with + | [td] -> + if looksLikeRecursiveTypeDeclaration td then Asttypes.Recursive + else Asttypes.Nonrecursive + | _ -> recFlag + in + { + structureItem with + pstr_desc = + Pstr_type + ( flag, + List.map + (fun typeDeclaration -> + default_mapper.type_declaration mapper typeDeclaration) + typeDeclarations ); + } + | _ -> default_mapper.structure_item mapper structureItem); + signature_item = + (fun mapper signatureItem -> + match signatureItem.psig_desc with + (* heuristic: if we have multiple type declarations, mark them recursive *) + | Psig_type ((Recursive as recFlag), typeDeclarations) -> + let flag = + match typeDeclarations with + | [td] -> + if looksLikeRecursiveTypeDeclaration td then Asttypes.Recursive + else Asttypes.Nonrecursive + | _ -> recFlag + in + { + signatureItem with + psig_desc = + Psig_type + ( flag, + List.map + (fun typeDeclaration -> + default_mapper.type_declaration mapper typeDeclaration) + typeDeclarations ); + } + | _ -> default_mapper.signature_item mapper signatureItem); + value_binding = + (fun mapper vb -> + match vb with + | { + pvb_pat = {ppat_desc = Ppat_var _} as pat; + pvb_expr = + {pexp_loc = expr_loc; pexp_desc = Pexp_constraint (expr, typ)}; + } + when expr_loc.loc_ghost -> + (* let t: t = (expr : t) -> let t: t = expr *) + let typ = default_mapper.typ mapper typ in + let pat = default_mapper.pat mapper pat in + let expr = mapper.expr mapper expr in + let newPattern = + { + Parsetree.ppat_loc = + {pat.ppat_loc with loc_end = typ.ptyp_loc.loc_end}; + ppat_attributes = []; + ppat_desc = Ppat_constraint (pat, typ); + } + in + { + vb with + pvb_pat = newPattern; + pvb_expr = expr; + pvb_attributes = default_mapper.attributes mapper vb.pvb_attributes; + } + | { + pvb_pat = + {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly ([], _)})}; + pvb_expr = + {pexp_loc = expr_loc; pexp_desc = Pexp_constraint (expr, typ)}; + } + when expr_loc.loc_ghost -> + (* let t: . t = (expr : t) -> let t: t = expr *) + let typ = default_mapper.typ mapper typ in + let pat = default_mapper.pat mapper pat in + let expr = mapper.expr mapper expr in + let newPattern = + { + Parsetree.ppat_loc = + {pat.ppat_loc with loc_end = typ.ptyp_loc.loc_end}; + ppat_attributes = []; + ppat_desc = Ppat_constraint (pat, typ); + } + in + { + vb with + pvb_pat = newPattern; + pvb_expr = expr; + pvb_attributes = default_mapper.attributes mapper vb.pvb_attributes; + } + | _ -> default_mapper.value_binding mapper vb); + } + +let normalizeReasonArityStructure ~forPrinter s = + let mapper = makeReasonArityMapper ~forPrinter in + mapper.Ast_mapper.structure mapper s + +let normalizeReasonAritySignature ~forPrinter s = + let mapper = makeReasonArityMapper ~forPrinter in + mapper.Ast_mapper.signature mapper s + +let structure s = normalize.Ast_mapper.structure normalize s +let signature s = normalize.Ast_mapper.signature normalize s + +let replaceStringLiteralStructure stringData structure = + let mapper = stringLiteralMapper stringData in + mapper.Ast_mapper.structure mapper structure + +let replaceStringLiteralSignature stringData signature = + let mapper = stringLiteralMapper stringData in + mapper.Ast_mapper.signature mapper signature diff --git a/res_syntax/src/res_ast_conversion.mli b/res_syntax/src/res_ast_conversion.mli new file mode 100644 index 0000000000..8c868f44b0 --- /dev/null +++ b/res_syntax/src/res_ast_conversion.mli @@ -0,0 +1,24 @@ +(* The purpose of this module is to convert a parsetree coming from the reason + * or ocaml parser, into something consumable by the rescript printer. *) + +(* Ocaml/Reason parser interprets string literals: i.e. escape sequences and unicode. + * For printing purposes you want to preserve the original string. + * Example: "😎" is interpreted as "\240\159\152\142" + * The purpose of this routine is to place the original string back in + * the parsetree for printing purposes. Unicode and escape sequences + * shouldn't be mangled when *) +val replaceStringLiteralStructure : + (string * Location.t) list -> Parsetree.structure -> Parsetree.structure +val replaceStringLiteralSignature : + (string * Location.t) list -> Parsetree.signature -> Parsetree.signature + +(* Get rid of the explicit/implicit arity attributes *) +val normalizeReasonArityStructure : + forPrinter:bool -> Parsetree.structure -> Parsetree.structure +val normalizeReasonAritySignature : + forPrinter:bool -> Parsetree.signature -> Parsetree.signature + +(* transform parts of the parsetree into a suitable parsetree suitable + * for printing. Example: convert reason ternaries into rescript ternaries *) +val structure : Parsetree.structure -> Parsetree.structure +val signature : Parsetree.signature -> Parsetree.signature diff --git a/res_syntax/src/res_ast_debugger.ml b/res_syntax/src/res_ast_debugger.ml new file mode 100644 index 0000000000..150ff78e35 --- /dev/null +++ b/res_syntax/src/res_ast_debugger.ml @@ -0,0 +1,949 @@ +module Doc = Res_doc +module CommentTable = Res_comments_table + +let printEngine = + Res_driver. + { + printImplementation = + (fun ~width:_ ~filename:_ ~comments:_ structure -> + Printast.implementation Format.std_formatter structure); + printInterface = + (fun ~width:_ ~filename:_ ~comments:_ signature -> + Printast.interface Format.std_formatter signature); + } + +module Sexp : sig + type t + + val atom : string -> t + val list : t list -> t + val toString : t -> string +end = struct + type t = Atom of string | List of t list + + let atom s = Atom s + let list l = List l + + let rec toDoc t = + match t with + | Atom s -> Doc.text s + | List [] -> Doc.text "()" + | List [sexpr] -> Doc.concat [Doc.lparen; toDoc sexpr; Doc.rparen] + | List (hd :: tail) -> + Doc.group + (Doc.concat + [ + Doc.lparen; + toDoc hd; + Doc.indent + (Doc.concat + [Doc.line; Doc.join ~sep:Doc.line (List.map toDoc tail)]); + Doc.rparen; + ]) + + let toString sexpr = + let doc = toDoc sexpr in + Doc.toString ~width:80 doc +end + +module SexpAst = struct + open Parsetree + + let mapEmpty ~f items = + match items with + | [] -> [Sexp.list []] + | items -> List.map f items + + let string txt = Sexp.atom ("\"" ^ txt ^ "\"") + + let char c = Sexp.atom ("'" ^ Char.escaped c ^ "'") + + let optChar oc = + match oc with + | None -> Sexp.atom "None" + | Some c -> Sexp.list [Sexp.atom "Some"; char c] + + let longident l = + let rec loop l = + match l with + | Longident.Lident ident -> Sexp.list [Sexp.atom "Lident"; string ident] + | Longident.Ldot (lident, txt) -> + Sexp.list [Sexp.atom "Ldot"; loop lident; string txt] + | Longident.Lapply (l1, l2) -> + Sexp.list [Sexp.atom "Lapply"; loop l1; loop l2] + in + Sexp.list [Sexp.atom "longident"; loop l] + + let closedFlag flag = + match flag with + | Asttypes.Closed -> Sexp.atom "Closed" + | Open -> Sexp.atom "Open" + + let directionFlag flag = + match flag with + | Asttypes.Upto -> Sexp.atom "Upto" + | Downto -> Sexp.atom "Downto" + + let recFlag flag = + match flag with + | Asttypes.Recursive -> Sexp.atom "Recursive" + | Nonrecursive -> Sexp.atom "Nonrecursive" + + let overrideFlag flag = + match flag with + | Asttypes.Override -> Sexp.atom "Override" + | Fresh -> Sexp.atom "Fresh" + + let privateFlag flag = + match flag with + | Asttypes.Public -> Sexp.atom "Public" + | Private -> Sexp.atom "Private" + + let mutableFlag flag = + match flag with + | Asttypes.Immutable -> Sexp.atom "Immutable" + | Mutable -> Sexp.atom "Mutable" + + let variance v = + match v with + | Asttypes.Covariant -> Sexp.atom "Covariant" + | Contravariant -> Sexp.atom "Contravariant" + | Invariant -> Sexp.atom "Invariant" + + let argLabel lbl = + match lbl with + | Asttypes.Nolabel -> Sexp.atom "Nolabel" + | Labelled txt -> Sexp.list [Sexp.atom "Labelled"; string txt] + | Optional txt -> Sexp.list [Sexp.atom "Optional"; string txt] + + let constant c = + let sexpr = + match c with + | Pconst_integer (txt, tag) -> + Sexp.list [Sexp.atom "Pconst_integer"; string txt; optChar tag] + | Pconst_char _ -> Sexp.list [Sexp.atom "Pconst_char"] + | Pconst_string (_, Some "INTERNAL_RES_CHAR_CONTENTS") -> + Sexp.list [Sexp.atom "Pconst_char"] + | Pconst_string (txt, tag) -> + Sexp.list + [ + Sexp.atom "Pconst_string"; + string txt; + (match tag with + | Some txt -> Sexp.list [Sexp.atom "Some"; string txt] + | None -> Sexp.atom "None"); + ] + | Pconst_float (txt, tag) -> + Sexp.list [Sexp.atom "Pconst_float"; string txt; optChar tag] + in + Sexp.list [Sexp.atom "constant"; sexpr] + + let rec structure s = + Sexp.list (Sexp.atom "structure" :: List.map structureItem s) + + and structureItem si = + let desc = + match si.pstr_desc with + | Pstr_eval (expr, attrs) -> + Sexp.list [Sexp.atom "Pstr_eval"; expression expr; attributes attrs] + | Pstr_value (flag, vbs) -> + Sexp.list + [ + Sexp.atom "Pstr_value"; + recFlag flag; + Sexp.list (mapEmpty ~f:valueBinding vbs); + ] + | Pstr_primitive vd -> + Sexp.list [Sexp.atom "Pstr_primitive"; valueDescription vd] + | Pstr_type (flag, tds) -> + Sexp.list + [ + Sexp.atom "Pstr_type"; + recFlag flag; + Sexp.list (mapEmpty ~f:typeDeclaration tds); + ] + | Pstr_typext typext -> + Sexp.list [Sexp.atom "Pstr_type"; typeExtension typext] + | Pstr_exception ec -> + Sexp.list [Sexp.atom "Pstr_exception"; extensionConstructor ec] + | Pstr_module mb -> Sexp.list [Sexp.atom "Pstr_module"; moduleBinding mb] + | Pstr_recmodule mbs -> + Sexp.list + [ + Sexp.atom "Pstr_recmodule"; Sexp.list (mapEmpty ~f:moduleBinding mbs); + ] + | Pstr_modtype modTypDecl -> + Sexp.list [Sexp.atom "Pstr_modtype"; moduleTypeDeclaration modTypDecl] + | Pstr_open openDesc -> + Sexp.list [Sexp.atom "Pstr_open"; openDescription openDesc] + | Pstr_class _ -> Sexp.atom "Pstr_class" + | Pstr_class_type _ -> Sexp.atom "Pstr_class_type" + | Pstr_include id -> + Sexp.list [Sexp.atom "Pstr_include"; includeDeclaration id] + | Pstr_attribute attr -> + Sexp.list [Sexp.atom "Pstr_attribute"; attribute attr] + | Pstr_extension (ext, attrs) -> + Sexp.list [Sexp.atom "Pstr_extension"; extension ext; attributes attrs] + in + Sexp.list [Sexp.atom "structure_item"; desc] + + and includeDeclaration id = + Sexp.list + [ + Sexp.atom "include_declaration"; + moduleExpression id.pincl_mod; + attributes id.pincl_attributes; + ] + + and openDescription od = + Sexp.list + [ + Sexp.atom "open_description"; + longident od.popen_lid.Asttypes.txt; + attributes od.popen_attributes; + ] + + and moduleTypeDeclaration mtd = + Sexp.list + [ + Sexp.atom "module_type_declaration"; + string mtd.pmtd_name.Asttypes.txt; + (match mtd.pmtd_type with + | None -> Sexp.atom "None" + | Some modType -> Sexp.list [Sexp.atom "Some"; moduleType modType]); + attributes mtd.pmtd_attributes; + ] + + and moduleBinding mb = + Sexp.list + [ + Sexp.atom "module_binding"; + string mb.pmb_name.Asttypes.txt; + moduleExpression mb.pmb_expr; + attributes mb.pmb_attributes; + ] + + and moduleExpression me = + let desc = + match me.pmod_desc with + | Pmod_ident modName -> + Sexp.list [Sexp.atom "Pmod_ident"; longident modName.Asttypes.txt] + | Pmod_structure s -> Sexp.list [Sexp.atom "Pmod_structure"; structure s] + | Pmod_functor (lbl, optModType, modExpr) -> + Sexp.list + [ + Sexp.atom "Pmod_functor"; + string lbl.Asttypes.txt; + (match optModType with + | None -> Sexp.atom "None" + | Some modType -> Sexp.list [Sexp.atom "Some"; moduleType modType]); + moduleExpression modExpr; + ] + | Pmod_apply (callModExpr, modExprArg) -> + Sexp.list + [ + Sexp.atom "Pmod_apply"; + moduleExpression callModExpr; + moduleExpression modExprArg; + ] + | Pmod_constraint (modExpr, modType) -> + Sexp.list + [ + Sexp.atom "Pmod_constraint"; + moduleExpression modExpr; + moduleType modType; + ] + | Pmod_unpack expr -> Sexp.list [Sexp.atom "Pmod_unpack"; expression expr] + | Pmod_extension ext -> + Sexp.list [Sexp.atom "Pmod_extension"; extension ext] + in + Sexp.list [Sexp.atom "module_expr"; desc; attributes me.pmod_attributes] + + and moduleType mt = + let desc = + match mt.pmty_desc with + | Pmty_ident longidentLoc -> + Sexp.list [Sexp.atom "Pmty_ident"; longident longidentLoc.Asttypes.txt] + | Pmty_signature s -> Sexp.list [Sexp.atom "Pmty_signature"; signature s] + | Pmty_functor (lbl, optModType, modType) -> + Sexp.list + [ + Sexp.atom "Pmty_functor"; + string lbl.Asttypes.txt; + (match optModType with + | None -> Sexp.atom "None" + | Some modType -> Sexp.list [Sexp.atom "Some"; moduleType modType]); + moduleType modType; + ] + | Pmty_alias longidentLoc -> + Sexp.list [Sexp.atom "Pmty_alias"; longident longidentLoc.Asttypes.txt] + | Pmty_extension ext -> + Sexp.list [Sexp.atom "Pmty_extension"; extension ext] + | Pmty_typeof modExpr -> + Sexp.list [Sexp.atom "Pmty_typeof"; moduleExpression modExpr] + | Pmty_with (modType, withConstraints) -> + Sexp.list + [ + Sexp.atom "Pmty_with"; + moduleType modType; + Sexp.list (mapEmpty ~f:withConstraint withConstraints); + ] + in + Sexp.list [Sexp.atom "module_type"; desc; attributes mt.pmty_attributes] + + and withConstraint wc = + match wc with + | Pwith_type (longidentLoc, td) -> + Sexp.list + [ + Sexp.atom "Pmty_with"; + longident longidentLoc.Asttypes.txt; + typeDeclaration td; + ] + | Pwith_module (l1, l2) -> + Sexp.list + [ + Sexp.atom "Pwith_module"; + longident l1.Asttypes.txt; + longident l2.Asttypes.txt; + ] + | Pwith_typesubst (longidentLoc, td) -> + Sexp.list + [ + Sexp.atom "Pwith_typesubst"; + longident longidentLoc.Asttypes.txt; + typeDeclaration td; + ] + | Pwith_modsubst (l1, l2) -> + Sexp.list + [ + Sexp.atom "Pwith_modsubst"; + longident l1.Asttypes.txt; + longident l2.Asttypes.txt; + ] + + and signature s = Sexp.list (Sexp.atom "signature" :: List.map signatureItem s) + + and signatureItem si = + let descr = + match si.psig_desc with + | Psig_value vd -> Sexp.list [Sexp.atom "Psig_value"; valueDescription vd] + | Psig_type (flag, typeDeclarations) -> + Sexp.list + [ + Sexp.atom "Psig_type"; + recFlag flag; + Sexp.list (mapEmpty ~f:typeDeclaration typeDeclarations); + ] + | Psig_typext typExt -> + Sexp.list [Sexp.atom "Psig_typext"; typeExtension typExt] + | Psig_exception extConstr -> + Sexp.list [Sexp.atom "Psig_exception"; extensionConstructor extConstr] + | Psig_module modDecl -> + Sexp.list [Sexp.atom "Psig_module"; moduleDeclaration modDecl] + | Psig_recmodule modDecls -> + Sexp.list + [ + Sexp.atom "Psig_recmodule"; + Sexp.list (mapEmpty ~f:moduleDeclaration modDecls); + ] + | Psig_modtype modTypDecl -> + Sexp.list [Sexp.atom "Psig_modtype"; moduleTypeDeclaration modTypDecl] + | Psig_open openDesc -> + Sexp.list [Sexp.atom "Psig_open"; openDescription openDesc] + | Psig_include inclDecl -> + Sexp.list [Sexp.atom "Psig_include"; includeDescription inclDecl] + | Psig_class _ -> Sexp.list [Sexp.atom "Psig_class"] + | Psig_class_type _ -> Sexp.list [Sexp.atom "Psig_class_type"] + | Psig_attribute attr -> + Sexp.list [Sexp.atom "Psig_attribute"; attribute attr] + | Psig_extension (ext, attrs) -> + Sexp.list [Sexp.atom "Psig_extension"; extension ext; attributes attrs] + in + Sexp.list [Sexp.atom "signature_item"; descr] + + and includeDescription id = + Sexp.list + [ + Sexp.atom "include_description"; + moduleType id.pincl_mod; + attributes id.pincl_attributes; + ] + + and moduleDeclaration md = + Sexp.list + [ + Sexp.atom "module_declaration"; + string md.pmd_name.Asttypes.txt; + moduleType md.pmd_type; + attributes md.pmd_attributes; + ] + + and valueBinding vb = + Sexp.list + [ + Sexp.atom "value_binding"; + pattern vb.pvb_pat; + expression vb.pvb_expr; + attributes vb.pvb_attributes; + ] + + and valueDescription vd = + Sexp.list + [ + Sexp.atom "value_description"; + string vd.pval_name.Asttypes.txt; + coreType vd.pval_type; + Sexp.list (mapEmpty ~f:string vd.pval_prim); + attributes vd.pval_attributes; + ] + + and typeDeclaration td = + Sexp.list + [ + Sexp.atom "type_declaration"; + string td.ptype_name.Asttypes.txt; + Sexp.list + [ + Sexp.atom "ptype_params"; + Sexp.list + (mapEmpty + ~f:(fun (typexpr, var) -> + Sexp.list [coreType typexpr; variance var]) + td.ptype_params); + ]; + Sexp.list + [ + Sexp.atom "ptype_cstrs"; + Sexp.list + (mapEmpty + ~f:(fun (typ1, typ2, _loc) -> + Sexp.list [coreType typ1; coreType typ2]) + td.ptype_cstrs); + ]; + Sexp.list [Sexp.atom "ptype_kind"; typeKind td.ptype_kind]; + Sexp.list + [ + Sexp.atom "ptype_manifest"; + (match td.ptype_manifest with + | None -> Sexp.atom "None" + | Some typ -> Sexp.list [Sexp.atom "Some"; coreType typ]); + ]; + Sexp.list [Sexp.atom "ptype_private"; privateFlag td.ptype_private]; + attributes td.ptype_attributes; + ] + + and extensionConstructor ec = + Sexp.list + [ + Sexp.atom "extension_constructor"; + string ec.pext_name.Asttypes.txt; + extensionConstructorKind ec.pext_kind; + attributes ec.pext_attributes; + ] + + and extensionConstructorKind kind = + match kind with + | Pext_decl (args, optTypExpr) -> + Sexp.list + [ + Sexp.atom "Pext_decl"; + constructorArguments args; + (match optTypExpr with + | None -> Sexp.atom "None" + | Some typ -> Sexp.list [Sexp.atom "Some"; coreType typ]); + ] + | Pext_rebind longidentLoc -> + Sexp.list [Sexp.atom "Pext_rebind"; longident longidentLoc.Asttypes.txt] + + and typeExtension te = + Sexp.list + [ + Sexp.atom "type_extension"; + Sexp.list + [Sexp.atom "ptyext_path"; longident te.ptyext_path.Asttypes.txt]; + Sexp.list + [ + Sexp.atom "ptyext_parms"; + Sexp.list + (mapEmpty + ~f:(fun (typexpr, var) -> + Sexp.list [coreType typexpr; variance var]) + te.ptyext_params); + ]; + Sexp.list + [ + Sexp.atom "ptyext_constructors"; + Sexp.list (mapEmpty ~f:extensionConstructor te.ptyext_constructors); + ]; + Sexp.list [Sexp.atom "ptyext_private"; privateFlag te.ptyext_private]; + attributes te.ptyext_attributes; + ] + + and typeKind kind = + match kind with + | Ptype_abstract -> Sexp.atom "Ptype_abstract" + | Ptype_variant constrDecls -> + Sexp.list + [ + Sexp.atom "Ptype_variant"; + Sexp.list (mapEmpty ~f:constructorDeclaration constrDecls); + ] + | Ptype_record lblDecls -> + Sexp.list + [ + Sexp.atom "Ptype_record"; + Sexp.list (mapEmpty ~f:labelDeclaration lblDecls); + ] + | Ptype_open -> Sexp.atom "Ptype_open" + + and constructorDeclaration cd = + Sexp.list + [ + Sexp.atom "constructor_declaration"; + string cd.pcd_name.Asttypes.txt; + Sexp.list [Sexp.atom "pcd_args"; constructorArguments cd.pcd_args]; + Sexp.list + [ + Sexp.atom "pcd_res"; + (match cd.pcd_res with + | None -> Sexp.atom "None" + | Some typ -> Sexp.list [Sexp.atom "Some"; coreType typ]); + ]; + attributes cd.pcd_attributes; + ] + + and constructorArguments args = + match args with + | Pcstr_tuple types -> + Sexp.list + [Sexp.atom "Pcstr_tuple"; Sexp.list (mapEmpty ~f:coreType types)] + | Pcstr_record lds -> + Sexp.list + [Sexp.atom "Pcstr_record"; Sexp.list (mapEmpty ~f:labelDeclaration lds)] + + and labelDeclaration ld = + Sexp.list + [ + Sexp.atom "label_declaration"; + string ld.pld_name.Asttypes.txt; + mutableFlag ld.pld_mutable; + coreType ld.pld_type; + attributes ld.pld_attributes; + ] + + and expression expr = + let desc = + match expr.pexp_desc with + | Pexp_ident longidentLoc -> + Sexp.list [Sexp.atom "Pexp_ident"; longident longidentLoc.Asttypes.txt] + | Pexp_constant c -> Sexp.list [Sexp.atom "Pexp_constant"; constant c] + | Pexp_let (flag, vbs, expr) -> + Sexp.list + [ + Sexp.atom "Pexp_let"; + recFlag flag; + Sexp.list (mapEmpty ~f:valueBinding vbs); + expression expr; + ] + | Pexp_function cases -> + Sexp.list + [Sexp.atom "Pexp_function"; Sexp.list (mapEmpty ~f:case cases)] + | Pexp_fun (argLbl, exprOpt, pat, expr) -> + Sexp.list + [ + Sexp.atom "Pexp_fun"; + argLabel argLbl; + (match exprOpt with + | None -> Sexp.atom "None" + | Some expr -> Sexp.list [Sexp.atom "Some"; expression expr]); + pattern pat; + expression expr; + ] + | Pexp_apply (expr, args) -> + Sexp.list + [ + Sexp.atom "Pexp_apply"; + expression expr; + Sexp.list + (mapEmpty + ~f:(fun (argLbl, expr) -> + Sexp.list [argLabel argLbl; expression expr]) + args); + ] + | Pexp_match (expr, cases) -> + Sexp.list + [ + Sexp.atom "Pexp_match"; + expression expr; + Sexp.list (mapEmpty ~f:case cases); + ] + | Pexp_try (expr, cases) -> + Sexp.list + [ + Sexp.atom "Pexp_try"; + expression expr; + Sexp.list (mapEmpty ~f:case cases); + ] + | Pexp_tuple exprs -> + Sexp.list + [Sexp.atom "Pexp_tuple"; Sexp.list (mapEmpty ~f:expression exprs)] + | Pexp_construct (longidentLoc, exprOpt) -> + Sexp.list + [ + Sexp.atom "Pexp_construct"; + longident longidentLoc.Asttypes.txt; + (match exprOpt with + | None -> Sexp.atom "None" + | Some expr -> Sexp.list [Sexp.atom "Some"; expression expr]); + ] + | Pexp_variant (lbl, exprOpt) -> + Sexp.list + [ + Sexp.atom "Pexp_variant"; + string lbl; + (match exprOpt with + | None -> Sexp.atom "None" + | Some expr -> Sexp.list [Sexp.atom "Some"; expression expr]); + ] + | Pexp_record (rows, optExpr) -> + Sexp.list + [ + Sexp.atom "Pexp_record"; + Sexp.list + (mapEmpty + ~f:(fun (longidentLoc, expr) -> + Sexp.list + [longident longidentLoc.Asttypes.txt; expression expr]) + rows); + (match optExpr with + | None -> Sexp.atom "None" + | Some expr -> Sexp.list [Sexp.atom "Some"; expression expr]); + ] + | Pexp_field (expr, longidentLoc) -> + Sexp.list + [ + Sexp.atom "Pexp_field"; + expression expr; + longident longidentLoc.Asttypes.txt; + ] + | Pexp_setfield (expr1, longidentLoc, expr2) -> + Sexp.list + [ + Sexp.atom "Pexp_setfield"; + expression expr1; + longident longidentLoc.Asttypes.txt; + expression expr2; + ] + | Pexp_array exprs -> + Sexp.list + [Sexp.atom "Pexp_array"; Sexp.list (mapEmpty ~f:expression exprs)] + | Pexp_ifthenelse (expr1, expr2, optExpr) -> + Sexp.list + [ + Sexp.atom "Pexp_ifthenelse"; + expression expr1; + expression expr2; + (match optExpr with + | None -> Sexp.atom "None" + | Some expr -> Sexp.list [Sexp.atom "Some"; expression expr]); + ] + | Pexp_sequence (expr1, expr2) -> + Sexp.list + [Sexp.atom "Pexp_sequence"; expression expr1; expression expr2] + | Pexp_while (expr1, expr2) -> + Sexp.list [Sexp.atom "Pexp_while"; expression expr1; expression expr2] + | Pexp_for (pat, e1, e2, flag, e3) -> + Sexp.list + [ + Sexp.atom "Pexp_for"; + pattern pat; + expression e1; + expression e2; + directionFlag flag; + expression e3; + ] + | Pexp_constraint (expr, typexpr) -> + Sexp.list + [Sexp.atom "Pexp_constraint"; expression expr; coreType typexpr] + | Pexp_coerce (expr, optTyp, typexpr) -> + Sexp.list + [ + Sexp.atom "Pexp_coerce"; + expression expr; + (match optTyp with + | None -> Sexp.atom "None" + | Some typ -> Sexp.list [Sexp.atom "Some"; coreType typ]); + coreType typexpr; + ] + | Pexp_send _ -> Sexp.list [Sexp.atom "Pexp_send"] + | Pexp_new _ -> Sexp.list [Sexp.atom "Pexp_new"] + | Pexp_setinstvar _ -> Sexp.list [Sexp.atom "Pexp_setinstvar"] + | Pexp_override _ -> Sexp.list [Sexp.atom "Pexp_override"] + | Pexp_letmodule (modName, modExpr, expr) -> + Sexp.list + [ + Sexp.atom "Pexp_letmodule"; + string modName.Asttypes.txt; + moduleExpression modExpr; + expression expr; + ] + | Pexp_letexception (extConstr, expr) -> + Sexp.list + [ + Sexp.atom "Pexp_letexception"; + extensionConstructor extConstr; + expression expr; + ] + | Pexp_assert expr -> Sexp.list [Sexp.atom "Pexp_assert"; expression expr] + | Pexp_lazy expr -> Sexp.list [Sexp.atom "Pexp_lazy"; expression expr] + | Pexp_poly _ -> Sexp.list [Sexp.atom "Pexp_poly"] + | Pexp_object _ -> Sexp.list [Sexp.atom "Pexp_object"] + | Pexp_newtype (lbl, expr) -> + Sexp.list + [Sexp.atom "Pexp_newtype"; string lbl.Asttypes.txt; expression expr] + | Pexp_pack modExpr -> + Sexp.list [Sexp.atom "Pexp_pack"; moduleExpression modExpr] + | Pexp_open (flag, longidentLoc, expr) -> + Sexp.list + [ + Sexp.atom "Pexp_open"; + overrideFlag flag; + longident longidentLoc.Asttypes.txt; + expression expr; + ] + | Pexp_extension ext -> + Sexp.list [Sexp.atom "Pexp_extension"; extension ext] + | Pexp_unreachable -> Sexp.atom "Pexp_unreachable" + in + Sexp.list [Sexp.atom "expression"; desc] + + and case c = + Sexp.list + [ + Sexp.atom "case"; + Sexp.list [Sexp.atom "pc_lhs"; pattern c.pc_lhs]; + Sexp.list + [ + Sexp.atom "pc_guard"; + (match c.pc_guard with + | None -> Sexp.atom "None" + | Some expr -> Sexp.list [Sexp.atom "Some"; expression expr]); + ]; + Sexp.list [Sexp.atom "pc_rhs"; expression c.pc_rhs]; + ] + + and pattern p = + let descr = + match p.ppat_desc with + | Ppat_any -> Sexp.atom "Ppat_any" + | Ppat_var var -> + Sexp.list [Sexp.atom "Ppat_var"; string var.Location.txt] + | Ppat_alias (p, alias) -> + Sexp.list [Sexp.atom "Ppat_alias"; pattern p; string alias.txt] + | Ppat_constant c -> Sexp.list [Sexp.atom "Ppat_constant"; constant c] + | Ppat_interval (lo, hi) -> + Sexp.list [Sexp.atom "Ppat_interval"; constant lo; constant hi] + | Ppat_tuple patterns -> + Sexp.list + [Sexp.atom "Ppat_tuple"; Sexp.list (mapEmpty ~f:pattern patterns)] + | Ppat_construct (longidentLoc, optPattern) -> + Sexp.list + [ + Sexp.atom "Ppat_construct"; + longident longidentLoc.Location.txt; + (match optPattern with + | None -> Sexp.atom "None" + | Some p -> Sexp.list [Sexp.atom "some"; pattern p]); + ] + | Ppat_variant (lbl, optPattern) -> + Sexp.list + [ + Sexp.atom "Ppat_variant"; + string lbl; + (match optPattern with + | None -> Sexp.atom "None" + | Some p -> Sexp.list [Sexp.atom "Some"; pattern p]); + ] + | Ppat_record (rows, flag) -> + Sexp.list + [ + Sexp.atom "Ppat_record"; + closedFlag flag; + Sexp.list + (mapEmpty + ~f:(fun (longidentLoc, p) -> + Sexp.list [longident longidentLoc.Location.txt; pattern p]) + rows); + ] + | Ppat_array patterns -> + Sexp.list + [Sexp.atom "Ppat_array"; Sexp.list (mapEmpty ~f:pattern patterns)] + | Ppat_or (p1, p2) -> + Sexp.list [Sexp.atom "Ppat_or"; pattern p1; pattern p2] + | Ppat_constraint (p, typexpr) -> + Sexp.list [Sexp.atom "Ppat_constraint"; pattern p; coreType typexpr] + | Ppat_type longidentLoc -> + Sexp.list [Sexp.atom "Ppat_type"; longident longidentLoc.Location.txt] + | Ppat_lazy p -> Sexp.list [Sexp.atom "Ppat_lazy"; pattern p] + | Ppat_unpack stringLoc -> + Sexp.list [Sexp.atom "Ppat_unpack"; string stringLoc.Location.txt] + | Ppat_exception p -> Sexp.list [Sexp.atom "Ppat_exception"; pattern p] + | Ppat_extension ext -> + Sexp.list [Sexp.atom "Ppat_extension"; extension ext] + | Ppat_open (longidentLoc, p) -> + Sexp.list + [ + Sexp.atom "Ppat_open"; longident longidentLoc.Location.txt; pattern p; + ] + in + Sexp.list [Sexp.atom "pattern"; descr] + + and objectField field = + match field with + | Otag (lblLoc, attrs, typexpr) -> + Sexp.list + [ + Sexp.atom "Otag"; string lblLoc.txt; attributes attrs; coreType typexpr; + ] + | Oinherit typexpr -> Sexp.list [Sexp.atom "Oinherit"; coreType typexpr] + + and rowField field = + match field with + | Rtag (labelLoc, attrs, truth, types) -> + Sexp.list + [ + Sexp.atom "Rtag"; + string labelLoc.txt; + attributes attrs; + Sexp.atom (if truth then "true" else "false"); + Sexp.list (mapEmpty ~f:coreType types); + ] + | Rinherit typexpr -> Sexp.list [Sexp.atom "Rinherit"; coreType typexpr] + + and packageType (modNameLoc, packageConstraints) = + Sexp.list + [ + Sexp.atom "package_type"; + longident modNameLoc.Asttypes.txt; + Sexp.list + (mapEmpty + ~f:(fun (modNameLoc, typexpr) -> + Sexp.list [longident modNameLoc.Asttypes.txt; coreType typexpr]) + packageConstraints); + ] + + and coreType typexpr = + let desc = + match typexpr.ptyp_desc with + | Ptyp_any -> Sexp.atom "Ptyp_any" + | Ptyp_var var -> Sexp.list [Sexp.atom "Ptyp_var"; string var] + | Ptyp_arrow (argLbl, typ1, typ2) -> + Sexp.list + [ + Sexp.atom "Ptyp_arrow"; argLabel argLbl; coreType typ1; coreType typ2; + ] + | Ptyp_tuple types -> + Sexp.list + [Sexp.atom "Ptyp_tuple"; Sexp.list (mapEmpty ~f:coreType types)] + | Ptyp_constr (longidentLoc, types) -> + Sexp.list + [ + Sexp.atom "Ptyp_constr"; + longident longidentLoc.txt; + Sexp.list (mapEmpty ~f:coreType types); + ] + | Ptyp_alias (typexpr, alias) -> + Sexp.list [Sexp.atom "Ptyp_alias"; coreType typexpr; string alias] + | Ptyp_object (fields, flag) -> + Sexp.list + [ + Sexp.atom "Ptyp_object"; + closedFlag flag; + Sexp.list (mapEmpty ~f:objectField fields); + ] + | Ptyp_class (longidentLoc, types) -> + Sexp.list + [ + Sexp.atom "Ptyp_class"; + longident longidentLoc.Location.txt; + Sexp.list (mapEmpty ~f:coreType types); + ] + | Ptyp_variant (fields, flag, optLabels) -> + Sexp.list + [ + Sexp.atom "Ptyp_variant"; + Sexp.list (mapEmpty ~f:rowField fields); + closedFlag flag; + (match optLabels with + | None -> Sexp.atom "None" + | Some lbls -> Sexp.list (mapEmpty ~f:string lbls)); + ] + | Ptyp_poly (lbls, typexpr) -> + Sexp.list + [ + Sexp.atom "Ptyp_poly"; + Sexp.list (mapEmpty ~f:(fun lbl -> string lbl.Asttypes.txt) lbls); + coreType typexpr; + ] + | Ptyp_package package -> + Sexp.list [Sexp.atom "Ptyp_package"; packageType package] + | Ptyp_extension ext -> + Sexp.list [Sexp.atom "Ptyp_extension"; extension ext] + in + Sexp.list [Sexp.atom "core_type"; desc] + + and payload p = + match p with + | PStr s -> Sexp.list (Sexp.atom "PStr" :: mapEmpty ~f:structureItem s) + | PSig s -> Sexp.list [Sexp.atom "PSig"; signature s] + | PTyp ct -> Sexp.list [Sexp.atom "PTyp"; coreType ct] + | PPat (pat, optExpr) -> + Sexp.list + [ + Sexp.atom "PPat"; + pattern pat; + (match optExpr with + | Some expr -> Sexp.list [Sexp.atom "Some"; expression expr] + | None -> Sexp.atom "None"); + ] + + and attribute (stringLoc, p) = + Sexp.list + [Sexp.atom "attribute"; Sexp.atom stringLoc.Asttypes.txt; payload p] + + and extension (stringLoc, p) = + Sexp.list + [Sexp.atom "extension"; Sexp.atom stringLoc.Asttypes.txt; payload p] + + and attributes attrs = + let sexprs = mapEmpty ~f:attribute attrs in + Sexp.list (Sexp.atom "attributes" :: sexprs) + + let printEngine = + Res_driver. + { + printImplementation = + (fun ~width:_ ~filename:_ ~comments:_ parsetree -> + parsetree |> structure |> Sexp.toString |> print_string); + printInterface = + (fun ~width:_ ~filename:_ ~comments:_ parsetree -> + parsetree |> signature |> Sexp.toString |> print_string); + } +end + +let sexpPrintEngine = SexpAst.printEngine + +let commentsPrintEngine = + { + Res_driver.printImplementation = + (fun ~width:_ ~filename:_ ~comments s -> + let cmtTbl = CommentTable.make () in + CommentTable.walkStructure s cmtTbl comments; + CommentTable.log cmtTbl); + printInterface = + (fun ~width:_ ~filename:_ ~comments s -> + let cmtTbl = CommentTable.make () in + CommentTable.walkSignature s cmtTbl comments; + CommentTable.log cmtTbl); + } diff --git a/res_syntax/src/res_ast_debugger.mli b/res_syntax/src/res_ast_debugger.mli new file mode 100644 index 0000000000..1b325b742f --- /dev/null +++ b/res_syntax/src/res_ast_debugger.mli @@ -0,0 +1,3 @@ +val printEngine : Res_driver.printEngine +val sexpPrintEngine : Res_driver.printEngine +val commentsPrintEngine : Res_driver.printEngine diff --git a/res_syntax/src/res_comment.ml b/res_syntax/src/res_comment.ml new file mode 100644 index 0000000000..23898f8bcb --- /dev/null +++ b/res_syntax/src/res_comment.ml @@ -0,0 +1,67 @@ +type style = SingleLine | MultiLine | DocComment | ModuleComment + +let styleToString s = + match s with + | SingleLine -> "SingleLine" + | MultiLine -> "MultiLine" + | DocComment -> "DocComment" + | ModuleComment -> "ModuleComment" + +type t = { + txt: string; + style: style; + loc: Location.t; + mutable prevTokEndPos: Lexing.position; +} + +let loc t = t.loc +let txt t = t.txt +let prevTokEndPos t = t.prevTokEndPos + +let setPrevTokEndPos t pos = t.prevTokEndPos <- pos + +let isSingleLineComment t = t.style = SingleLine + +let isDocComment t = t.style = DocComment + +let isModuleComment t = t.style = ModuleComment + +let toString t = + let {Location.loc_start; loc_end} = t.loc in + Format.sprintf "(txt: %s\nstyle: %s\nlocation: %d,%d-%d,%d)" t.txt + (styleToString t.style) loc_start.pos_lnum + (loc_start.pos_cnum - loc_start.pos_bol) + loc_end.pos_lnum + (loc_end.pos_cnum - loc_end.pos_bol) + +let makeSingleLineComment ~loc txt = + {txt; loc; style = SingleLine; prevTokEndPos = Lexing.dummy_pos} + +let makeMultiLineComment ~loc ~docComment ~standalone txt = + { + txt; + loc; + style = + (if docComment then if standalone then ModuleComment else DocComment + else MultiLine); + prevTokEndPos = Lexing.dummy_pos; + } + +let fromOcamlComment ~loc ~txt ~prevTokEndPos = + {txt; loc; style = MultiLine; prevTokEndPos} + +let trimSpaces s = + let len = String.length s in + if len = 0 then s + else if String.unsafe_get s 0 = ' ' || String.unsafe_get s (len - 1) = ' ' + then ( + let i = ref 0 in + while !i < len && String.unsafe_get s !i = ' ' do + incr i + done; + let j = ref (len - 1) in + while !j >= !i && String.unsafe_get s !j = ' ' do + decr j + done; + if !j >= !i then (String.sub [@doesNotRaise]) s !i (!j - !i + 1) else "") + else s diff --git a/res_syntax/src/res_comment.mli b/res_syntax/src/res_comment.mli new file mode 100644 index 0000000000..f1d5424d9c --- /dev/null +++ b/res_syntax/src/res_comment.mli @@ -0,0 +1,22 @@ +type t + +val toString : t -> string + +val loc : t -> Location.t +val txt : t -> string +val prevTokEndPos : t -> Lexing.position + +val setPrevTokEndPos : t -> Lexing.position -> unit + +val isDocComment : t -> bool + +val isModuleComment : t -> bool + +val isSingleLineComment : t -> bool + +val makeSingleLineComment : loc:Location.t -> string -> t +val makeMultiLineComment : + loc:Location.t -> docComment:bool -> standalone:bool -> string -> t +val fromOcamlComment : + loc:Location.t -> txt:string -> prevTokEndPos:Lexing.position -> t +val trimSpaces : string -> string diff --git a/res_syntax/src/res_comments_table.ml b/res_syntax/src/res_comments_table.ml new file mode 100644 index 0000000000..3078505ba3 --- /dev/null +++ b/res_syntax/src/res_comments_table.ml @@ -0,0 +1,1891 @@ +module Comment = Res_comment +module Doc = Res_doc +module ParsetreeViewer = Res_parsetree_viewer + +type t = { + leading: (Location.t, Comment.t list) Hashtbl.t; + inside: (Location.t, Comment.t list) Hashtbl.t; + trailing: (Location.t, Comment.t list) Hashtbl.t; +} + +let make () = + { + leading = Hashtbl.create 100; + inside = Hashtbl.create 100; + trailing = Hashtbl.create 100; + } + +let copy tbl = + { + leading = Hashtbl.copy tbl.leading; + inside = Hashtbl.copy tbl.inside; + trailing = Hashtbl.copy tbl.trailing; + } + +let empty = make () + +let printEntries tbl = + let open Location in + Hashtbl.fold + (fun (k : Location.t) (v : Comment.t list) acc -> + let loc = + Doc.concat + [ + Doc.lbracket; + Doc.text (string_of_int k.loc_start.pos_lnum); + Doc.text ":"; + Doc.text + (string_of_int (k.loc_start.pos_cnum - k.loc_start.pos_bol)); + Doc.text "-"; + Doc.text (string_of_int k.loc_end.pos_lnum); + Doc.text ":"; + Doc.text (string_of_int (k.loc_end.pos_cnum - k.loc_end.pos_bol)); + Doc.rbracket; + ] + in + let doc = + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + loc; + Doc.indent + (Doc.concat + [ + Doc.line; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map (fun c -> Doc.text (Comment.txt c)) v); + ]); + Doc.line; + ]) + in + doc :: acc) + tbl [] + +let log t = + let leadingStuff = printEntries t.leading in + let trailingStuff = printEntries t.trailing in + let stuffInside = printEntries t.inside in + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.text "leading comments:"; + Doc.indent (Doc.concat [Doc.line; Doc.concat leadingStuff]); + Doc.line; + Doc.text "comments inside:"; + Doc.indent (Doc.concat [Doc.line; Doc.concat stuffInside]); + Doc.line; + Doc.text "trailing comments:"; + Doc.indent (Doc.concat [Doc.line; Doc.concat trailingStuff]); + Doc.line; + ]) + |> Doc.toString ~width:80 |> print_endline + +let attach tbl loc comments = + match comments with + | [] -> () + | comments -> Hashtbl.replace tbl loc comments + +let partitionByLoc comments loc = + let rec loop (leading, inside, trailing) comments = + let open Location in + match comments with + | comment :: rest -> + let cmtLoc = Comment.loc comment in + if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum then + loop (comment :: leading, inside, trailing) rest + else if cmtLoc.loc_start.pos_cnum >= loc.loc_end.pos_cnum then + loop (leading, inside, comment :: trailing) rest + else loop (leading, comment :: inside, trailing) rest + | [] -> (List.rev leading, List.rev inside, List.rev trailing) + in + loop ([], [], []) comments + +let partitionLeadingTrailing comments loc = + let rec loop (leading, trailing) comments = + let open Location in + match comments with + | comment :: rest -> + let cmtLoc = Comment.loc comment in + if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum then + loop (comment :: leading, trailing) rest + else loop (leading, comment :: trailing) rest + | [] -> (List.rev leading, List.rev trailing) + in + loop ([], []) comments + +let partitionByOnSameLine loc comments = + let rec loop (onSameLine, onOtherLine) comments = + let open Location in + match comments with + | [] -> (List.rev onSameLine, List.rev onOtherLine) + | comment :: rest -> + let cmtLoc = Comment.loc comment in + if cmtLoc.loc_start.pos_lnum == loc.loc_end.pos_lnum then + loop (comment :: onSameLine, onOtherLine) rest + else loop (onSameLine, comment :: onOtherLine) rest + in + loop ([], []) comments + +let partitionAdjacentTrailing loc1 comments = + let open Location in + let open Lexing in + let rec loop ~prevEndPos afterLoc1 comments = + match comments with + | [] -> (List.rev afterLoc1, []) + | comment :: rest as comments -> + let cmtPrevEndPos = Comment.prevTokEndPos comment in + if prevEndPos.Lexing.pos_cnum == cmtPrevEndPos.pos_cnum then + let commentEnd = (Comment.loc comment).loc_end in + loop ~prevEndPos:commentEnd (comment :: afterLoc1) rest + else (List.rev afterLoc1, comments) + in + loop ~prevEndPos:loc1.loc_end [] comments + +let rec collectListPatterns acc pattern = + let open Parsetree in + match pattern.ppat_desc with + | Ppat_construct + ({txt = Longident.Lident "::"}, Some {ppat_desc = Ppat_tuple [pat; rest]}) + -> + collectListPatterns (pat :: acc) rest + | Ppat_construct ({txt = Longident.Lident "[]"}, None) -> List.rev acc + | _ -> List.rev (pattern :: acc) + +let rec collectListExprs acc expr = + let open Parsetree in + match expr.pexp_desc with + | Pexp_construct + ({txt = Longident.Lident "::"}, Some {pexp_desc = Pexp_tuple [expr; rest]}) + -> + collectListExprs (expr :: acc) rest + | Pexp_construct ({txt = Longident.Lident "[]"}, _) -> List.rev acc + | _ -> List.rev (expr :: acc) + +(* TODO: use ParsetreeViewer *) +let arrowType ct = + let open Parsetree in + let rec process attrsBefore acc typ = + match typ with + | { + ptyp_desc = Ptyp_arrow ((Nolabel as lbl), typ1, typ2); + ptyp_attributes = []; + } -> + let arg = ([], lbl, typ1) in + process attrsBefore (arg :: acc) typ2 + | { + ptyp_desc = Ptyp_arrow ((Nolabel as lbl), typ1, typ2); + ptyp_attributes = [({txt = "bs"}, _)] as attrs; + } -> + let arg = (attrs, lbl, typ1) in + process attrsBefore (arg :: acc) typ2 + | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = _attrs} + as returnType -> + let args = List.rev acc in + (attrsBefore, args, returnType) + | { + ptyp_desc = Ptyp_arrow (((Labelled _ | Optional _) as lbl), typ1, typ2); + ptyp_attributes = attrs; + } -> + let arg = (attrs, lbl, typ1) in + process attrsBefore (arg :: acc) typ2 + | typ -> (attrsBefore, List.rev acc, typ) + in + match ct with + | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = attrs} as + typ -> + process attrs [] {typ with ptyp_attributes = []} + | typ -> process [] [] typ + +(* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? *) +let modExprApply modExpr = + let rec loop acc modExpr = + match modExpr with + | {Parsetree.pmod_desc = Pmod_apply (next, arg)} -> loop (arg :: acc) next + | _ -> modExpr :: acc + in + loop [] modExpr + +(* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? *) +let modExprFunctor modExpr = + let rec loop acc modExpr = + match modExpr with + | { + Parsetree.pmod_desc = Pmod_functor (lbl, modType, returnModExpr); + pmod_attributes = attrs; + } -> + let param = (attrs, lbl, modType) in + loop (param :: acc) returnModExpr + | returnModExpr -> (List.rev acc, returnModExpr) + in + loop [] modExpr + +let functorType modtype = + let rec process acc modtype = + match modtype with + | { + Parsetree.pmty_desc = Pmty_functor (lbl, argType, returnType); + pmty_attributes = attrs; + } -> + let arg = (attrs, lbl, argType) in + process (arg :: acc) returnType + | modType -> (List.rev acc, modType) + in + process [] modtype + +let funExpr expr = + let open Parsetree in + (* Turns (type t, type u, type z) into "type t u z" *) + let rec collectNewTypes acc returnExpr = + match returnExpr with + | {pexp_desc = Pexp_newtype (stringLoc, returnExpr); pexp_attributes = []} + -> + collectNewTypes (stringLoc :: acc) returnExpr + | returnExpr -> + let loc = + match (acc, List.rev acc) with + | _startLoc :: _, endLoc :: _ -> + {endLoc.loc with loc_end = endLoc.loc.loc_end} + | _ -> Location.none + in + let txt = + List.fold_right + (fun curr acc -> acc ^ " " ^ curr.Location.txt) + acc "type" + in + (Location.mkloc txt loc, returnExpr) + in + (* For simplicity reason Pexp_newtype gets converted to a Nolabel parameter, + * otherwise this function would need to return a variant: + * | NormalParamater(...) + * | NewType(...) + * This complicates printing with an extra variant/boxing/allocation for a code-path + * that is not often used. Lets just keep it simple for now *) + let rec collect attrsBefore acc expr = + match expr with + | { + pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); + pexp_attributes = []; + } -> + let parameter = ([], lbl, defaultExpr, pattern) in + collect attrsBefore (parameter :: acc) returnExpr + | {pexp_desc = Pexp_newtype (stringLoc, rest); pexp_attributes = attrs} -> + let var, returnExpr = collectNewTypes [stringLoc] rest in + let parameter = + ( attrs, + Asttypes.Nolabel, + None, + Ast_helper.Pat.var ~loc:stringLoc.loc var ) + in + collect attrsBefore (parameter :: acc) returnExpr + | { + pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); + pexp_attributes = [({txt = "bs"}, _)] as attrs; + } -> + let parameter = (attrs, lbl, defaultExpr, pattern) in + collect attrsBefore (parameter :: acc) returnExpr + | { + pexp_desc = + Pexp_fun + (((Labelled _ | Optional _) as lbl), defaultExpr, pattern, returnExpr); + pexp_attributes = attrs; + } -> + let parameter = (attrs, lbl, defaultExpr, pattern) in + collect attrsBefore (parameter :: acc) returnExpr + | expr -> (attrsBefore, List.rev acc, expr) + in + match expr with + | { + pexp_desc = Pexp_fun (Nolabel, _defaultExpr, _pattern, _returnExpr); + pexp_attributes = attrs; + } as expr -> + collect attrs [] {expr with pexp_attributes = []} + | expr -> collect [] [] expr + +let rec isBlockExpr expr = + let open Parsetree in + match expr.pexp_desc with + | Pexp_letmodule _ | Pexp_letexception _ | Pexp_let _ | Pexp_open _ + | Pexp_sequence _ -> + true + | Pexp_apply (callExpr, _) when isBlockExpr callExpr -> true + | Pexp_constraint (expr, _) when isBlockExpr expr -> true + | Pexp_field (expr, _) when isBlockExpr expr -> true + | Pexp_setfield (expr, _, _) when isBlockExpr expr -> true + | _ -> false + +let isIfThenElseExpr expr = + let open Parsetree in + match expr.pexp_desc with + | Pexp_ifthenelse _ -> true + | _ -> false + +type node = + | Case of Parsetree.case + | CoreType of Parsetree.core_type + | ExprArgument of Parsetree.expression + | Expression of Parsetree.expression + | ExprRecordRow of Longident.t Asttypes.loc * Parsetree.expression + | ExtensionConstructor of Parsetree.extension_constructor + | LabelDeclaration of Parsetree.label_declaration + | ModuleBinding of Parsetree.module_binding + | ModuleDeclaration of Parsetree.module_declaration + | ModuleExpr of Parsetree.module_expr + | ObjectField of Parsetree.object_field + | PackageConstraint of Longident.t Asttypes.loc * Parsetree.core_type + | Pattern of Parsetree.pattern + | PatternRecordRow of Longident.t Asttypes.loc * Parsetree.pattern + | RowField of Parsetree.row_field + | SignatureItem of Parsetree.signature_item + | StructureItem of Parsetree.structure_item + | TypeDeclaration of Parsetree.type_declaration + | ValueBinding of Parsetree.value_binding + +let getLoc node = + let open Parsetree in + match node with + | Case case -> + {case.pc_lhs.ppat_loc with loc_end = case.pc_rhs.pexp_loc.loc_end} + | CoreType ct -> ct.ptyp_loc + | ExprArgument expr -> ( + match expr.Parsetree.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _attrs -> + {loc with loc_end = expr.pexp_loc.loc_end} + | _ -> expr.pexp_loc) + | Expression e -> ( + match e.pexp_attributes with + | ({txt = "ns.braces"; loc}, _) :: _ -> loc + | _ -> e.pexp_loc) + | ExprRecordRow (li, e) -> {li.loc with loc_end = e.pexp_loc.loc_end} + | ExtensionConstructor ec -> ec.pext_loc + | LabelDeclaration ld -> ld.pld_loc + | ModuleBinding mb -> mb.pmb_loc + | ModuleDeclaration md -> md.pmd_loc + | ModuleExpr me -> me.pmod_loc + | ObjectField field -> ( + match field with + | Parsetree.Otag (lbl, _, typ) -> + {lbl.loc with loc_end = typ.ptyp_loc.loc_end} + | _ -> Location.none) + | PackageConstraint (li, te) -> {li.loc with loc_end = te.ptyp_loc.loc_end} + | Pattern p -> p.ppat_loc + | PatternRecordRow (li, p) -> {li.loc with loc_end = p.ppat_loc.loc_end} + | RowField rf -> ( + match rf with + | Parsetree.Rtag ({loc}, _, _, _) -> loc + | Rinherit {ptyp_loc} -> ptyp_loc) + | SignatureItem si -> si.psig_loc + | StructureItem si -> si.pstr_loc + | TypeDeclaration td -> td.ptype_loc + | ValueBinding vb -> vb.pvb_loc + +let rec walkStructure s t comments = + match s with + | _ when comments = [] -> () + | [] -> attach t.inside Location.none comments + | s -> walkList (s |> List.map (fun si -> StructureItem si)) t comments + +and walkStructureItem si t comments = + match si.Parsetree.pstr_desc with + | _ when comments = [] -> () + | Pstr_primitive valueDescription -> + walkValueDescription valueDescription t comments + | Pstr_open openDescription -> walkOpenDescription openDescription t comments + | Pstr_value (_, valueBindings) -> walkValueBindings valueBindings t comments + | Pstr_type (_, typeDeclarations) -> + walkTypeDeclarations typeDeclarations t comments + | Pstr_eval (expr, _) -> walkExpression expr t comments + | Pstr_module moduleBinding -> walkModuleBinding moduleBinding t comments + | Pstr_recmodule moduleBindings -> + walkList + (moduleBindings |> List.map (fun mb -> ModuleBinding mb)) + t comments + | Pstr_modtype modTypDecl -> walkModuleTypeDeclaration modTypDecl t comments + | Pstr_attribute attribute -> walkAttribute attribute t comments + | Pstr_extension (extension, _) -> walkExtension extension t comments + | Pstr_include includeDeclaration -> + walkIncludeDeclaration includeDeclaration t comments + | Pstr_exception extensionConstructor -> + walkExtensionConstructor extensionConstructor t comments + | Pstr_typext typeExtension -> walkTypeExtension typeExtension t comments + | Pstr_class_type _ | Pstr_class _ -> () + +and walkValueDescription vd t comments = + let leading, trailing = partitionLeadingTrailing comments vd.pval_name.loc in + attach t.leading vd.pval_name.loc leading; + let afterName, rest = partitionAdjacentTrailing vd.pval_name.loc trailing in + attach t.trailing vd.pval_name.loc afterName; + let before, inside, after = partitionByLoc rest vd.pval_type.ptyp_loc in + attach t.leading vd.pval_type.ptyp_loc before; + walkCoreType vd.pval_type t inside; + attach t.trailing vd.pval_type.ptyp_loc after + +and walkTypeExtension te t comments = + let leading, trailing = + partitionLeadingTrailing comments te.ptyext_path.loc + in + attach t.leading te.ptyext_path.loc leading; + let afterPath, rest = partitionAdjacentTrailing te.ptyext_path.loc trailing in + attach t.trailing te.ptyext_path.loc afterPath; + + (* type params *) + let rest = + match te.ptyext_params with + | [] -> rest + | typeParams -> + visitListButContinueWithRemainingComments + ~getLoc:(fun (typexpr, _variance) -> typexpr.Parsetree.ptyp_loc) + ~walkNode:walkTypeParam ~newlineDelimited:false typeParams t rest + in + walkList + (te.ptyext_constructors |> List.map (fun ec -> ExtensionConstructor ec)) + t rest + +and walkIncludeDeclaration inclDecl t comments = + let before, inside, after = + partitionByLoc comments inclDecl.pincl_mod.pmod_loc + in + attach t.leading inclDecl.pincl_mod.pmod_loc before; + walkModuleExpr inclDecl.pincl_mod t inside; + attach t.trailing inclDecl.pincl_mod.pmod_loc after + +and walkModuleTypeDeclaration mtd t comments = + let leading, trailing = partitionLeadingTrailing comments mtd.pmtd_name.loc in + attach t.leading mtd.pmtd_name.loc leading; + match mtd.pmtd_type with + | None -> attach t.trailing mtd.pmtd_name.loc trailing + | Some modType -> + let afterName, rest = + partitionAdjacentTrailing mtd.pmtd_name.loc trailing + in + attach t.trailing mtd.pmtd_name.loc afterName; + let before, inside, after = partitionByLoc rest modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + attach t.trailing modType.pmty_loc after + +and walkModuleBinding mb t comments = + let leading, trailing = partitionLeadingTrailing comments mb.pmb_name.loc in + attach t.leading mb.pmb_name.loc leading; + let afterName, rest = partitionAdjacentTrailing mb.pmb_name.loc trailing in + attach t.trailing mb.pmb_name.loc afterName; + let leading, inside, trailing = partitionByLoc rest mb.pmb_expr.pmod_loc in + (match mb.pmb_expr.pmod_desc with + | Pmod_constraint _ -> + walkModuleExpr mb.pmb_expr t (List.concat [leading; inside]) + | _ -> + attach t.leading mb.pmb_expr.pmod_loc leading; + walkModuleExpr mb.pmb_expr t inside); + attach t.trailing mb.pmb_expr.pmod_loc trailing + +and walkSignature signature t comments = + match signature with + | _ when comments = [] -> () + | [] -> attach t.inside Location.none comments + | _s -> + walkList (signature |> List.map (fun si -> SignatureItem si)) t comments + +and walkSignatureItem (si : Parsetree.signature_item) t comments = + match si.psig_desc with + | _ when comments = [] -> () + | Psig_value valueDescription -> + walkValueDescription valueDescription t comments + | Psig_type (_, typeDeclarations) -> + walkTypeDeclarations typeDeclarations t comments + | Psig_typext typeExtension -> walkTypeExtension typeExtension t comments + | Psig_exception extensionConstructor -> + walkExtensionConstructor extensionConstructor t comments + | Psig_module moduleDeclaration -> + walkModuleDeclaration moduleDeclaration t comments + | Psig_recmodule moduleDeclarations -> + walkList + (moduleDeclarations |> List.map (fun md -> ModuleDeclaration md)) + t comments + | Psig_modtype moduleTypeDeclaration -> + walkModuleTypeDeclaration moduleTypeDeclaration t comments + | Psig_open openDescription -> walkOpenDescription openDescription t comments + | Psig_include includeDescription -> + walkIncludeDescription includeDescription t comments + | Psig_attribute attribute -> walkAttribute attribute t comments + | Psig_extension (extension, _) -> walkExtension extension t comments + | Psig_class _ | Psig_class_type _ -> () + +and walkIncludeDescription id t comments = + let before, inside, after = partitionByLoc comments id.pincl_mod.pmty_loc in + attach t.leading id.pincl_mod.pmty_loc before; + walkModType id.pincl_mod t inside; + attach t.trailing id.pincl_mod.pmty_loc after + +and walkModuleDeclaration md t comments = + let leading, trailing = partitionLeadingTrailing comments md.pmd_name.loc in + attach t.leading md.pmd_name.loc leading; + let afterName, rest = partitionAdjacentTrailing md.pmd_name.loc trailing in + attach t.trailing md.pmd_name.loc afterName; + let leading, inside, trailing = partitionByLoc rest md.pmd_type.pmty_loc in + attach t.leading md.pmd_type.pmty_loc leading; + walkModType md.pmd_type t inside; + attach t.trailing md.pmd_type.pmty_loc trailing + +and walkNode node tbl comments = + match node with + | Case c -> walkCase c tbl comments + | CoreType ct -> walkCoreType ct tbl comments + | ExprArgument ea -> walkExprArgument ea tbl comments + | Expression e -> walkExpression e tbl comments + | ExprRecordRow (ri, e) -> walkExprRecordRow (ri, e) tbl comments + | ExtensionConstructor ec -> walkExtensionConstructor ec tbl comments + | LabelDeclaration ld -> walkLabelDeclaration ld tbl comments + | ModuleBinding mb -> walkModuleBinding mb tbl comments + | ModuleDeclaration md -> walkModuleDeclaration md tbl comments + | ModuleExpr me -> walkModuleExpr me tbl comments + | ObjectField f -> walkObjectField f tbl comments + | PackageConstraint (li, te) -> walkPackageConstraint (li, te) tbl comments + | Pattern p -> walkPattern p tbl comments + | PatternRecordRow (li, p) -> walkPatternRecordRow (li, p) tbl comments + | RowField rf -> walkRowField rf tbl comments + | SignatureItem si -> walkSignatureItem si tbl comments + | StructureItem si -> walkStructureItem si tbl comments + | TypeDeclaration td -> walkTypeDeclaration td tbl comments + | ValueBinding vb -> walkValueBinding vb tbl comments + +and walkList : ?prevLoc:Location.t -> node list -> t -> Comment.t list -> unit = + fun ?prevLoc l t comments -> + match l with + | _ when comments = [] -> () + | [] -> ( + match prevLoc with + | Some loc -> attach t.trailing loc comments + | None -> ()) + | node :: rest -> + let currLoc = getLoc node in + let leading, inside, trailing = partitionByLoc comments currLoc in + (match prevLoc with + | None -> + (* first node, all leading comments attach here *) + attach t.leading currLoc leading + | Some prevLoc -> + (* Same line *) + if prevLoc.loc_end.pos_lnum == currLoc.loc_start.pos_lnum then ( + let afterPrev, beforeCurr = partitionAdjacentTrailing prevLoc leading in + attach t.trailing prevLoc afterPrev; + attach t.leading currLoc beforeCurr) + else + let onSameLineAsPrev, afterPrev = + partitionByOnSameLine prevLoc leading + in + attach t.trailing prevLoc onSameLineAsPrev; + let leading, _inside, _trailing = partitionByLoc afterPrev currLoc in + attach t.leading currLoc leading); + walkNode node t inside; + walkList ~prevLoc:currLoc rest t trailing + +(* The parsetree doesn't always contain location info about the opening or + * closing token of a "list-of-things". This routine visits the whole list, + * but returns any remaining comments that likely fall after the whole list. *) +and visitListButContinueWithRemainingComments : + 'node. + ?prevLoc:Location.t -> + newlineDelimited:bool -> + getLoc:('node -> Location.t) -> + walkNode:('node -> t -> Comment.t list -> unit) -> + 'node list -> + t -> + Comment.t list -> + Comment.t list = + fun ?prevLoc ~newlineDelimited ~getLoc ~walkNode l t comments -> + let open Location in + match l with + | _ when comments = [] -> [] + | [] -> ( + match prevLoc with + | Some loc -> + let afterPrev, rest = + if newlineDelimited then partitionByOnSameLine loc comments + else partitionAdjacentTrailing loc comments + in + attach t.trailing loc afterPrev; + rest + | None -> comments) + | node :: rest -> + let currLoc = getLoc node in + let leading, inside, trailing = partitionByLoc comments currLoc in + let () = + match prevLoc with + | None -> + (* first node, all leading comments attach here *) + attach t.leading currLoc leading; + () + | Some prevLoc -> + (* Same line *) + if prevLoc.loc_end.pos_lnum == currLoc.loc_start.pos_lnum then + let afterPrev, beforeCurr = + partitionAdjacentTrailing prevLoc leading + in + let () = attach t.trailing prevLoc afterPrev in + let () = attach t.leading currLoc beforeCurr in + () + else + let onSameLineAsPrev, afterPrev = + partitionByOnSameLine prevLoc leading + in + let () = attach t.trailing prevLoc onSameLineAsPrev in + let leading, _inside, _trailing = partitionByLoc afterPrev currLoc in + let () = attach t.leading currLoc leading in + () + in + walkNode node t inside; + visitListButContinueWithRemainingComments ~prevLoc:currLoc ~getLoc ~walkNode + ~newlineDelimited rest t trailing + +and walkValueBindings vbs t comments = + walkList (vbs |> List.map (fun vb -> ValueBinding vb)) t comments + +and walkOpenDescription openDescription t comments = + let loc = openDescription.popen_lid.loc in + let leading, trailing = partitionLeadingTrailing comments loc in + attach t.leading loc leading; + attach t.trailing loc trailing + +and walkTypeDeclarations typeDeclarations t comments = + walkList + (typeDeclarations |> List.map (fun td -> TypeDeclaration td)) + t comments + +and walkTypeParam (typexpr, _variance) t comments = + walkCoreType typexpr t comments + +and walkTypeDeclaration (td : Parsetree.type_declaration) t comments = + let beforeName, rest = partitionLeadingTrailing comments td.ptype_name.loc in + attach t.leading td.ptype_name.loc beforeName; + + let afterName, rest = partitionAdjacentTrailing td.ptype_name.loc rest in + attach t.trailing td.ptype_name.loc afterName; + + (* type params *) + let rest = + match td.ptype_params with + | [] -> rest + | typeParams -> + visitListButContinueWithRemainingComments + ~getLoc:(fun (typexpr, _variance) -> typexpr.Parsetree.ptyp_loc) + ~walkNode:walkTypeParam ~newlineDelimited:false typeParams t rest + in + + (* manifest: = typexpr *) + let rest = + match td.ptype_manifest with + | Some typexpr -> + let beforeTyp, insideTyp, afterTyp = + partitionByLoc rest typexpr.ptyp_loc + in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkCoreType typexpr t insideTyp; + let afterTyp, rest = + partitionAdjacentTrailing typexpr.Parsetree.ptyp_loc afterTyp + in + attach t.trailing typexpr.ptyp_loc afterTyp; + rest + | None -> rest + in + + let rest = + match td.ptype_kind with + | Ptype_abstract | Ptype_open -> rest + | Ptype_record labelDeclarations -> + let () = + if labelDeclarations = [] then attach t.inside td.ptype_loc rest + else + walkList + (labelDeclarations |> List.map (fun ld -> LabelDeclaration ld)) + t rest + in + [] + | Ptype_variant constructorDeclarations -> + walkConstructorDeclarations constructorDeclarations t rest + in + attach t.trailing td.ptype_loc rest + +and walkLabelDeclarations lds t comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun ld -> ld.Parsetree.pld_loc) + ~walkNode:walkLabelDeclaration ~newlineDelimited:false lds t comments + +and walkLabelDeclaration ld t comments = + let beforeName, rest = partitionLeadingTrailing comments ld.pld_name.loc in + attach t.leading ld.pld_name.loc beforeName; + let afterName, rest = partitionAdjacentTrailing ld.pld_name.loc rest in + attach t.trailing ld.pld_name.loc afterName; + let beforeTyp, insideTyp, afterTyp = + partitionByLoc rest ld.pld_type.ptyp_loc + in + attach t.leading ld.pld_type.ptyp_loc beforeTyp; + walkCoreType ld.pld_type t insideTyp; + attach t.trailing ld.pld_type.ptyp_loc afterTyp + +and walkConstructorDeclarations cds t comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun cd -> cd.Parsetree.pcd_loc) + ~walkNode:walkConstructorDeclaration ~newlineDelimited:false cds t comments + +and walkConstructorDeclaration cd t comments = + let beforeName, rest = partitionLeadingTrailing comments cd.pcd_name.loc in + attach t.leading cd.pcd_name.loc beforeName; + let afterName, rest = partitionAdjacentTrailing cd.pcd_name.loc rest in + attach t.trailing cd.pcd_name.loc afterName; + let rest = walkConstructorArguments cd.pcd_args t rest in + + let rest = + match cd.pcd_res with + | Some typexpr -> + let beforeTyp, insideTyp, afterTyp = + partitionByLoc rest typexpr.ptyp_loc + in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkCoreType typexpr t insideTyp; + let afterTyp, rest = + partitionAdjacentTrailing typexpr.Parsetree.ptyp_loc afterTyp + in + attach t.trailing typexpr.ptyp_loc afterTyp; + rest + | None -> rest + in + attach t.trailing cd.pcd_loc rest + +and walkConstructorArguments args t comments = + match args with + | Pcstr_tuple typexprs -> + visitListButContinueWithRemainingComments + ~getLoc:(fun n -> n.Parsetree.ptyp_loc) + ~walkNode:walkCoreType ~newlineDelimited:false typexprs t comments + | Pcstr_record labelDeclarations -> + walkLabelDeclarations labelDeclarations t comments + +and walkValueBinding vb t comments = + let open Location in + let vb = + let open Parsetree in + match (vb.pvb_pat, vb.pvb_expr) with + | ( {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly ([], t)})}, + {pexp_desc = Pexp_constraint (expr, _typ)} ) -> + { + vb with + pvb_pat = + Ast_helper.Pat.constraint_ + ~loc:{pat.ppat_loc with loc_end = t.Parsetree.ptyp_loc.loc_end} + pat t; + pvb_expr = expr; + } + | ( {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly (_ :: _, t)})}, + {pexp_desc = Pexp_fun _} ) -> + { + vb with + pvb_pat = + { + vb.pvb_pat with + ppat_loc = {pat.ppat_loc with loc_end = t.ptyp_loc.loc_end}; + }; + } + | ( ({ + ppat_desc = + Ppat_constraint (pat, ({ptyp_desc = Ptyp_poly (_ :: _, t)} as typ)); + } as constrainedPattern), + {pexp_desc = Pexp_newtype (_, {pexp_desc = Pexp_constraint (expr, _)})} + ) -> + (* + * The location of the Ptyp_poly on the pattern is the whole thing. + * let x: + * type t. (int, int) => int = + * (a, b) => { + * // comment + * a + b + * } + *) + { + vb with + pvb_pat = + { + constrainedPattern with + ppat_desc = Ppat_constraint (pat, typ); + ppat_loc = + {constrainedPattern.ppat_loc with loc_end = t.ptyp_loc.loc_end}; + }; + pvb_expr = expr; + } + | _ -> vb + in + let patternLoc = vb.Parsetree.pvb_pat.ppat_loc in + let exprLoc = vb.Parsetree.pvb_expr.pexp_loc in + let expr = vb.pvb_expr in + + let leading, inside, trailing = partitionByLoc comments patternLoc in + + (* everything before start of pattern can only be leading on the pattern: + * let |* before *| a = 1 *) + attach t.leading patternLoc leading; + walkPattern vb.Parsetree.pvb_pat t inside; + let afterPat, surroundingExpr = + partitionAdjacentTrailing patternLoc trailing + in + attach t.trailing patternLoc afterPat; + let beforeExpr, insideExpr, afterExpr = + partitionByLoc surroundingExpr exprLoc + in + if isBlockExpr expr then + walkExpression expr t (List.concat [beforeExpr; insideExpr; afterExpr]) + else ( + attach t.leading exprLoc beforeExpr; + walkExpression expr t insideExpr; + attach t.trailing exprLoc afterExpr) + +and walkExpression expr t comments = + let open Location in + match expr.Parsetree.pexp_desc with + | _ when comments = [] -> () + | Pexp_constant _ -> + let leading, trailing = partitionLeadingTrailing comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + attach t.trailing expr.pexp_loc trailing + | Pexp_ident longident -> + let leading, trailing = partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + attach t.trailing longident.loc trailing + | Pexp_let + ( _recFlag, + valueBindings, + {pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, None)} ) -> + walkValueBindings valueBindings t comments + | Pexp_let (_recFlag, valueBindings, expr2) -> + let comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun n -> + if n.Parsetree.pvb_pat.ppat_loc.loc_ghost then n.pvb_expr.pexp_loc + else n.Parsetree.pvb_loc) + ~walkNode:walkValueBinding ~newlineDelimited:true valueBindings t + comments + in + if isBlockExpr expr2 then walkExpression expr2 t comments + else + let leading, inside, trailing = partitionByLoc comments expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpression expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + | Pexp_sequence (expr1, expr2) -> + let leading, inside, trailing = partitionByLoc comments expr1.pexp_loc in + let comments = + if isBlockExpr expr1 then ( + let afterExpr, comments = + partitionByOnSameLine expr1.pexp_loc trailing + in + walkExpression expr1 t (List.concat [leading; inside; afterExpr]); + comments) + else ( + attach t.leading expr1.pexp_loc leading; + walkExpression expr1 t inside; + let afterExpr, comments = + partitionByOnSameLine expr1.pexp_loc trailing + in + attach t.trailing expr1.pexp_loc afterExpr; + comments) + in + if isBlockExpr expr2 then walkExpression expr2 t comments + else + let leading, inside, trailing = partitionByLoc comments expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpression expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + | Pexp_open (_override, longident, expr2) -> + let leading, comments = partitionLeadingTrailing comments expr.pexp_loc in + attach t.leading + {expr.pexp_loc with loc_end = longident.loc.loc_end} + leading; + let leading, trailing = partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + let afterLongident, rest = partitionByOnSameLine longident.loc trailing in + attach t.trailing longident.loc afterLongident; + if isBlockExpr expr2 then walkExpression expr2 t rest + else + let leading, inside, trailing = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpression expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + | Pexp_extension + ( {txt = "bs.obj" | "obj"}, + PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_record (rows, _)}, [])}] + ) -> + walkList + (rows |> List.map (fun (li, e) -> ExprRecordRow (li, e))) + t comments + | Pexp_extension extension -> walkExtension extension t comments + | Pexp_letexception (extensionConstructor, expr2) -> + let leading, comments = partitionLeadingTrailing comments expr.pexp_loc in + attach t.leading + {expr.pexp_loc with loc_end = extensionConstructor.pext_loc.loc_end} + leading; + let leading, inside, trailing = + partitionByLoc comments extensionConstructor.pext_loc + in + attach t.leading extensionConstructor.pext_loc leading; + walkExtensionConstructor extensionConstructor t inside; + let afterExtConstr, rest = + partitionByOnSameLine extensionConstructor.pext_loc trailing + in + attach t.trailing extensionConstructor.pext_loc afterExtConstr; + if isBlockExpr expr2 then walkExpression expr2 t rest + else + let leading, inside, trailing = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpression expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + | Pexp_letmodule (stringLoc, modExpr, expr2) -> + let leading, comments = partitionLeadingTrailing comments expr.pexp_loc in + attach t.leading + {expr.pexp_loc with loc_end = modExpr.pmod_loc.loc_end} + leading; + let leading, trailing = partitionLeadingTrailing comments stringLoc.loc in + attach t.leading stringLoc.loc leading; + let afterString, rest = partitionAdjacentTrailing stringLoc.loc trailing in + attach t.trailing stringLoc.loc afterString; + let beforeModExpr, insideModExpr, afterModExpr = + partitionByLoc rest modExpr.pmod_loc + in + attach t.leading modExpr.pmod_loc beforeModExpr; + walkModuleExpr modExpr t insideModExpr; + let afterModExpr, rest = + partitionByOnSameLine modExpr.pmod_loc afterModExpr + in + attach t.trailing modExpr.pmod_loc afterModExpr; + if isBlockExpr expr2 then walkExpression expr2 t rest + else + let leading, inside, trailing = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpression expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + | Pexp_assert expr | Pexp_lazy expr -> + if isBlockExpr expr then walkExpression expr t comments + else + let leading, inside, trailing = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpression expr t inside; + attach t.trailing expr.pexp_loc trailing + | Pexp_coerce (expr, optTypexpr, typexpr) -> + let leading, inside, trailing = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpression expr t inside; + let afterExpr, rest = partitionAdjacentTrailing expr.pexp_loc trailing in + attach t.trailing expr.pexp_loc afterExpr; + let rest = + match optTypexpr with + | Some typexpr -> + let leading, inside, trailing = + partitionByLoc comments typexpr.ptyp_loc + in + attach t.leading typexpr.ptyp_loc leading; + walkCoreType typexpr t inside; + let afterTyp, rest = + partitionAdjacentTrailing typexpr.ptyp_loc trailing + in + attach t.trailing typexpr.ptyp_loc afterTyp; + rest + | None -> rest + in + let leading, inside, trailing = partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc leading; + walkCoreType typexpr t inside; + attach t.trailing typexpr.ptyp_loc trailing + | Pexp_constraint (expr, typexpr) -> + let leading, inside, trailing = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpression expr t inside; + let afterExpr, rest = partitionAdjacentTrailing expr.pexp_loc trailing in + attach t.trailing expr.pexp_loc afterExpr; + let leading, inside, trailing = partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc leading; + walkCoreType typexpr t inside; + attach t.trailing typexpr.ptyp_loc trailing + | Pexp_tuple [] + | Pexp_array [] + | Pexp_construct ({txt = Longident.Lident "[]"}, _) -> + attach t.inside expr.pexp_loc comments + | Pexp_construct ({txt = Longident.Lident "::"}, _) -> + walkList + (collectListExprs [] expr |> List.map (fun e -> Expression e)) + t comments + | Pexp_construct (longident, args) -> ( + let leading, trailing = partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + match args with + | Some expr -> + let afterLongident, rest = + partitionAdjacentTrailing longident.loc trailing + in + attach t.trailing longident.loc afterLongident; + walkExpression expr t rest + | None -> attach t.trailing longident.loc trailing) + | Pexp_variant (_label, None) -> () + | Pexp_variant (_label, Some expr) -> walkExpression expr t comments + | Pexp_array exprs | Pexp_tuple exprs -> + walkList (exprs |> List.map (fun e -> Expression e)) t comments + | Pexp_record (rows, spreadExpr) -> + if rows = [] then attach t.inside expr.pexp_loc comments + else + let comments = + match spreadExpr with + | None -> comments + | Some expr -> + let leading, inside, trailing = + partitionByLoc comments expr.pexp_loc + in + attach t.leading expr.pexp_loc leading; + walkExpression expr t inside; + let afterExpr, rest = + partitionAdjacentTrailing expr.pexp_loc trailing + in + attach t.trailing expr.pexp_loc afterExpr; + rest + in + walkList + (rows |> List.map (fun (li, e) -> ExprRecordRow (li, e))) + t comments + | Pexp_field (expr, longident) -> + let leading, inside, trailing = partitionByLoc comments expr.pexp_loc in + let trailing = + if isBlockExpr expr then ( + let afterExpr, rest = + partitionAdjacentTrailing expr.pexp_loc trailing + in + walkExpression expr t (List.concat [leading; inside; afterExpr]); + rest) + else ( + attach t.leading expr.pexp_loc leading; + walkExpression expr t inside; + trailing) + in + let afterExpr, rest = partitionAdjacentTrailing expr.pexp_loc trailing in + attach t.trailing expr.pexp_loc afterExpr; + let leading, trailing = partitionLeadingTrailing rest longident.loc in + attach t.leading longident.loc leading; + attach t.trailing longident.loc trailing + | Pexp_setfield (expr1, longident, expr2) -> + let leading, inside, trailing = partitionByLoc comments expr1.pexp_loc in + let rest = + if isBlockExpr expr1 then ( + let afterExpr, rest = + partitionAdjacentTrailing expr1.pexp_loc trailing + in + walkExpression expr1 t (List.concat [leading; inside; afterExpr]); + rest) + else + let afterExpr, rest = + partitionAdjacentTrailing expr1.pexp_loc trailing + in + attach t.leading expr1.pexp_loc leading; + walkExpression expr1 t inside; + attach t.trailing expr1.pexp_loc afterExpr; + rest + in + let beforeLongident, afterLongident = + partitionLeadingTrailing rest longident.loc + in + attach t.leading longident.loc beforeLongident; + let afterLongident, rest = + partitionAdjacentTrailing longident.loc afterLongident + in + attach t.trailing longident.loc afterLongident; + if isBlockExpr expr2 then walkExpression expr2 t rest + else + let leading, inside, trailing = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpression expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + | Pexp_ifthenelse (ifExpr, thenExpr, elseExpr) -> ( + let leading, rest = partitionLeadingTrailing comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + let leading, inside, trailing = partitionByLoc rest ifExpr.pexp_loc in + let comments = + if isBlockExpr ifExpr then ( + let afterExpr, comments = + partitionAdjacentTrailing ifExpr.pexp_loc trailing + in + walkExpression ifExpr t (List.concat [leading; inside; afterExpr]); + comments) + else ( + attach t.leading ifExpr.pexp_loc leading; + walkExpression ifExpr t inside; + let afterExpr, comments = + partitionAdjacentTrailing ifExpr.pexp_loc trailing + in + attach t.trailing ifExpr.pexp_loc afterExpr; + comments) + in + let leading, inside, trailing = partitionByLoc comments thenExpr.pexp_loc in + let comments = + if isBlockExpr thenExpr then ( + let afterExpr, trailing = + partitionAdjacentTrailing thenExpr.pexp_loc trailing + in + walkExpression thenExpr t (List.concat [leading; inside; afterExpr]); + trailing) + else ( + attach t.leading thenExpr.pexp_loc leading; + walkExpression thenExpr t inside; + let afterExpr, comments = + partitionAdjacentTrailing thenExpr.pexp_loc trailing + in + attach t.trailing thenExpr.pexp_loc afterExpr; + comments) + in + match elseExpr with + | None -> () + | Some expr -> + if isBlockExpr expr || isIfThenElseExpr expr then + walkExpression expr t comments + else + let leading, inside, trailing = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpression expr t inside; + attach t.trailing expr.pexp_loc trailing) + | Pexp_while (expr1, expr2) -> + let leading, inside, trailing = partitionByLoc comments expr1.pexp_loc in + let rest = + if isBlockExpr expr1 then ( + let afterExpr, rest = + partitionAdjacentTrailing expr1.pexp_loc trailing + in + walkExpression expr1 t (List.concat [leading; inside; afterExpr]); + rest) + else ( + attach t.leading expr1.pexp_loc leading; + walkExpression expr1 t inside; + let afterExpr, rest = + partitionAdjacentTrailing expr1.pexp_loc trailing + in + attach t.trailing expr1.pexp_loc afterExpr; + rest) + in + if isBlockExpr expr2 then walkExpression expr2 t rest + else + let leading, inside, trailing = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpression expr2 t inside; + attach t.trailing expr2.pexp_loc trailing + | Pexp_for (pat, expr1, expr2, _, expr3) -> + let leading, inside, trailing = partitionByLoc comments pat.ppat_loc in + attach t.leading pat.ppat_loc leading; + walkPattern pat t inside; + let afterPat, rest = partitionAdjacentTrailing pat.ppat_loc trailing in + attach t.trailing pat.ppat_loc afterPat; + let leading, inside, trailing = partitionByLoc rest expr1.pexp_loc in + attach t.leading expr1.pexp_loc leading; + walkExpression expr1 t inside; + let afterExpr, rest = partitionAdjacentTrailing expr1.pexp_loc trailing in + attach t.trailing expr1.pexp_loc afterExpr; + let leading, inside, trailing = partitionByLoc rest expr2.pexp_loc in + attach t.leading expr2.pexp_loc leading; + walkExpression expr2 t inside; + let afterExpr, rest = partitionAdjacentTrailing expr2.pexp_loc trailing in + attach t.trailing expr2.pexp_loc afterExpr; + if isBlockExpr expr3 then walkExpression expr3 t rest + else + let leading, inside, trailing = partitionByLoc rest expr3.pexp_loc in + attach t.leading expr3.pexp_loc leading; + walkExpression expr3 t inside; + attach t.trailing expr3.pexp_loc trailing + | Pexp_pack modExpr -> + let before, inside, after = partitionByLoc comments modExpr.pmod_loc in + attach t.leading modExpr.pmod_loc before; + walkModuleExpr modExpr t inside; + attach t.trailing modExpr.pmod_loc after + | Pexp_match (expr1, [case; elseBranch]) + when Res_parsetree_viewer.hasIfLetAttribute expr.pexp_attributes -> + let before, inside, after = partitionByLoc comments case.pc_lhs.ppat_loc in + attach t.leading case.pc_lhs.ppat_loc before; + walkPattern case.pc_lhs t inside; + let afterPat, rest = partitionAdjacentTrailing case.pc_lhs.ppat_loc after in + attach t.trailing case.pc_lhs.ppat_loc afterPat; + let before, inside, after = partitionByLoc rest expr1.pexp_loc in + attach t.leading expr1.pexp_loc before; + walkExpression expr1 t inside; + let afterExpr, rest = partitionAdjacentTrailing expr1.pexp_loc after in + attach t.trailing expr1.pexp_loc afterExpr; + let before, inside, after = partitionByLoc rest case.pc_rhs.pexp_loc in + let after = + if isBlockExpr case.pc_rhs then ( + let afterExpr, rest = + partitionAdjacentTrailing case.pc_rhs.pexp_loc after + in + walkExpression case.pc_rhs t (List.concat [before; inside; afterExpr]); + rest) + else ( + attach t.leading case.pc_rhs.pexp_loc before; + walkExpression case.pc_rhs t inside; + after) + in + let afterExpr, rest = + partitionAdjacentTrailing case.pc_rhs.pexp_loc after + in + attach t.trailing case.pc_rhs.pexp_loc afterExpr; + let before, inside, after = + partitionByLoc rest elseBranch.pc_rhs.pexp_loc + in + let after = + if isBlockExpr elseBranch.pc_rhs then ( + let afterExpr, rest = + partitionAdjacentTrailing elseBranch.pc_rhs.pexp_loc after + in + walkExpression elseBranch.pc_rhs t + (List.concat [before; inside; afterExpr]); + rest) + else ( + attach t.leading elseBranch.pc_rhs.pexp_loc before; + walkExpression elseBranch.pc_rhs t inside; + after) + in + attach t.trailing elseBranch.pc_rhs.pexp_loc after + | Pexp_match (expr, cases) | Pexp_try (expr, cases) -> + let before, inside, after = partitionByLoc comments expr.pexp_loc in + let after = + if isBlockExpr expr then ( + let afterExpr, rest = partitionAdjacentTrailing expr.pexp_loc after in + walkExpression expr t (List.concat [before; inside; afterExpr]); + rest) + else ( + attach t.leading expr.pexp_loc before; + walkExpression expr t inside; + after) + in + let afterExpr, rest = partitionAdjacentTrailing expr.pexp_loc after in + attach t.trailing expr.pexp_loc afterExpr; + walkList (cases |> List.map (fun case -> Case case)) t rest + (* unary expression: todo use parsetreeviewer *) + | Pexp_apply + ( { + pexp_desc = + Pexp_ident + { + txt = + Longident.Lident ("~+" | "~+." | "~-" | "~-." | "not" | "!"); + }; + }, + [(Nolabel, argExpr)] ) -> + let before, inside, after = partitionByLoc comments argExpr.pexp_loc in + attach t.leading argExpr.pexp_loc before; + walkExpression argExpr t inside; + attach t.trailing argExpr.pexp_loc after + (* binary expression *) + | Pexp_apply + ( { + pexp_desc = + Pexp_ident + { + txt = + Longident.Lident + ( ":=" | "||" | "&&" | "=" | "==" | "<" | ">" | "!=" | "!==" + | "<=" | ">=" | "|>" | "+" | "+." | "-" | "-." | "++" | "^" + | "*" | "*." | "/" | "/." | "**" | "|." | "<>" ); + }; + }, + [(Nolabel, operand1); (Nolabel, operand2)] ) -> + let before, inside, after = partitionByLoc comments operand1.pexp_loc in + attach t.leading operand1.pexp_loc before; + walkExpression operand1 t inside; + let afterOperand1, rest = + partitionAdjacentTrailing operand1.pexp_loc after + in + attach t.trailing operand1.pexp_loc afterOperand1; + let before, inside, after = partitionByLoc rest operand2.pexp_loc in + attach t.leading operand2.pexp_loc before; + walkExpression operand2 t inside; + (* (List.concat [inside; after]); *) + attach t.trailing operand2.pexp_loc after + | Pexp_apply (callExpr, arguments) -> + let before, inside, after = partitionByLoc comments callExpr.pexp_loc in + let after = + if isBlockExpr callExpr then ( + let afterExpr, rest = + partitionAdjacentTrailing callExpr.pexp_loc after + in + walkExpression callExpr t (List.concat [before; inside; afterExpr]); + rest) + else ( + attach t.leading callExpr.pexp_loc before; + walkExpression callExpr t inside; + after) + in + if ParsetreeViewer.isJsxExpression expr then ( + let props = + arguments + |> List.filter (fun (label, _) -> + match label with + | Asttypes.Labelled "children" -> false + | Asttypes.Nolabel -> false + | _ -> true) + in + let maybeChildren = + arguments + |> List.find_opt (fun (label, _) -> + label = Asttypes.Labelled "children") + in + match maybeChildren with + (* There is no need to deal with this situation as the children cannot be NONE *) + | None -> () + | Some (_, children) -> + let leading, inside, _ = partitionByLoc after children.pexp_loc in + if props = [] then + (* All comments inside a tag are trailing comments of the tag if there are no props +
+ *) + let afterExpr, _ = + partitionAdjacentTrailing callExpr.pexp_loc after + in + attach t.trailing callExpr.pexp_loc afterExpr + else + walkList (props |> List.map (fun (_, e) -> ExprArgument e)) t leading; + walkExpression children t inside) + else + let afterExpr, rest = partitionAdjacentTrailing callExpr.pexp_loc after in + attach t.trailing callExpr.pexp_loc afterExpr; + walkList (arguments |> List.map (fun (_, e) -> ExprArgument e)) t rest + | Pexp_fun (_, _, _, _) | Pexp_newtype _ -> ( + let _, parameters, returnExpr = funExpr expr in + let comments = + visitListButContinueWithRemainingComments ~newlineDelimited:false + ~walkNode:walkExprPararameter + ~getLoc:(fun (_attrs, _argLbl, exprOpt, pattern) -> + let open Parsetree in + let startPos = + match pattern.ppat_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _attrs -> + loc.loc_start + | _ -> pattern.ppat_loc.loc_start + in + match exprOpt with + | None -> {pattern.ppat_loc with loc_start = startPos} + | Some expr -> + { + pattern.ppat_loc with + loc_start = startPos; + loc_end = expr.pexp_loc.loc_end; + }) + parameters t comments + in + match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) + when expr.pexp_loc.loc_start.pos_cnum >= typ.ptyp_loc.loc_end.pos_cnum -> + let leading, inside, trailing = partitionByLoc comments typ.ptyp_loc in + attach t.leading typ.ptyp_loc leading; + walkCoreType typ t inside; + let afterTyp, comments = + partitionAdjacentTrailing typ.ptyp_loc trailing + in + attach t.trailing typ.ptyp_loc afterTyp; + if isBlockExpr expr then walkExpression expr t comments + else + let leading, inside, trailing = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpression expr t inside; + attach t.trailing expr.pexp_loc trailing + | _ -> + if isBlockExpr returnExpr then walkExpression returnExpr t comments + else + let leading, inside, trailing = + partitionByLoc comments returnExpr.pexp_loc + in + attach t.leading returnExpr.pexp_loc leading; + walkExpression returnExpr t inside; + attach t.trailing returnExpr.pexp_loc trailing) + | _ -> () + +and walkExprPararameter (_attrs, _argLbl, exprOpt, pattern) t comments = + let leading, inside, trailing = partitionByLoc comments pattern.ppat_loc in + attach t.leading pattern.ppat_loc leading; + walkPattern pattern t inside; + match exprOpt with + | Some expr -> + let _afterPat, rest = partitionAdjacentTrailing pattern.ppat_loc trailing in + attach t.trailing pattern.ppat_loc trailing; + if isBlockExpr expr then walkExpression expr t rest + else + let leading, inside, trailing = partitionByLoc rest expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpression expr t inside; + attach t.trailing expr.pexp_loc trailing + | None -> attach t.trailing pattern.ppat_loc trailing + +and walkExprArgument expr t comments = + match expr.Parsetree.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _attrs -> + let leading, trailing = partitionLeadingTrailing comments loc in + attach t.leading loc leading; + let afterLabel, rest = partitionAdjacentTrailing loc trailing in + attach t.trailing loc afterLabel; + let before, inside, after = partitionByLoc rest expr.pexp_loc in + attach t.leading expr.pexp_loc before; + walkExpression expr t inside; + attach t.trailing expr.pexp_loc after + | _ -> + let before, inside, after = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc before; + walkExpression expr t inside; + attach t.trailing expr.pexp_loc after + +and walkCase (case : Parsetree.case) t comments = + let before, inside, after = partitionByLoc comments case.pc_lhs.ppat_loc in + (* cases don't have a location on their own, leading comments should go + * after the bar on the pattern *) + walkPattern case.pc_lhs t (List.concat [before; inside]); + let afterPat, rest = partitionAdjacentTrailing case.pc_lhs.ppat_loc after in + attach t.trailing case.pc_lhs.ppat_loc afterPat; + let comments = + match case.pc_guard with + | Some expr -> + let before, inside, after = partitionByLoc rest expr.pexp_loc in + let afterExpr, rest = partitionAdjacentTrailing expr.pexp_loc after in + if isBlockExpr expr then + walkExpression expr t (List.concat [before; inside; afterExpr]) + else ( + attach t.leading expr.pexp_loc before; + walkExpression expr t inside; + attach t.trailing expr.pexp_loc afterExpr); + rest + | None -> rest + in + if isBlockExpr case.pc_rhs then walkExpression case.pc_rhs t comments + else + let before, inside, after = partitionByLoc comments case.pc_rhs.pexp_loc in + attach t.leading case.pc_rhs.pexp_loc before; + walkExpression case.pc_rhs t inside; + attach t.trailing case.pc_rhs.pexp_loc after + +and walkExprRecordRow (longident, expr) t comments = + let beforeLongident, afterLongident = + partitionLeadingTrailing comments longident.loc + in + attach t.leading longident.loc beforeLongident; + let afterLongident, rest = + partitionAdjacentTrailing longident.loc afterLongident + in + attach t.trailing longident.loc afterLongident; + let leading, inside, trailing = partitionByLoc rest expr.pexp_loc in + attach t.leading expr.pexp_loc leading; + walkExpression expr t inside; + attach t.trailing expr.pexp_loc trailing + +and walkExtensionConstructor extConstr t comments = + let leading, trailing = + partitionLeadingTrailing comments extConstr.pext_name.loc + in + attach t.leading extConstr.pext_name.loc leading; + let afterName, rest = + partitionAdjacentTrailing extConstr.pext_name.loc trailing + in + attach t.trailing extConstr.pext_name.loc afterName; + walkExtensionConstructorKind extConstr.pext_kind t rest + +and walkExtensionConstructorKind kind t comments = + match kind with + | Pext_rebind longident -> + let leading, trailing = partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + attach t.trailing longident.loc trailing + | Pext_decl (constructorArguments, maybeTypExpr) -> ( + let rest = walkConstructorArguments constructorArguments t comments in + match maybeTypExpr with + | None -> () + | Some typexpr -> + let before, inside, after = partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc before; + walkCoreType typexpr t inside; + attach t.trailing typexpr.ptyp_loc after) + +and walkModuleExpr modExpr t comments = + match modExpr.pmod_desc with + | Pmod_ident longident -> + let before, after = partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc before; + attach t.trailing longident.loc after + | Pmod_structure [] -> attach t.inside modExpr.pmod_loc comments + | Pmod_structure structure -> walkStructure structure t comments + | Pmod_extension extension -> walkExtension extension t comments + | Pmod_unpack expr -> + let before, inside, after = partitionByLoc comments expr.pexp_loc in + attach t.leading expr.pexp_loc before; + walkExpression expr t inside; + attach t.trailing expr.pexp_loc after + | Pmod_constraint (modexpr, modtype) -> + if modtype.pmty_loc.loc_start >= modexpr.pmod_loc.loc_end then ( + let before, inside, after = partitionByLoc comments modexpr.pmod_loc in + attach t.leading modexpr.pmod_loc before; + walkModuleExpr modexpr t inside; + let after, rest = partitionAdjacentTrailing modexpr.pmod_loc after in + attach t.trailing modexpr.pmod_loc after; + let before, inside, after = partitionByLoc rest modtype.pmty_loc in + attach t.leading modtype.pmty_loc before; + walkModType modtype t inside; + attach t.trailing modtype.pmty_loc after) + else + let before, inside, after = partitionByLoc comments modtype.pmty_loc in + attach t.leading modtype.pmty_loc before; + walkModType modtype t inside; + let after, rest = partitionAdjacentTrailing modtype.pmty_loc after in + attach t.trailing modtype.pmty_loc after; + let before, inside, after = partitionByLoc rest modexpr.pmod_loc in + attach t.leading modexpr.pmod_loc before; + walkModuleExpr modexpr t inside; + attach t.trailing modexpr.pmod_loc after + | Pmod_apply (_callModExpr, _argModExpr) -> + let modExprs = modExprApply modExpr in + walkList (modExprs |> List.map (fun me -> ModuleExpr me)) t comments + | Pmod_functor _ -> ( + let parameters, returnModExpr = modExprFunctor modExpr in + let comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun (_, lbl, modTypeOption) -> + match modTypeOption with + | None -> lbl.Asttypes.loc + | Some modType -> + {lbl.loc with loc_end = modType.Parsetree.pmty_loc.loc_end}) + ~walkNode:walkModExprParameter ~newlineDelimited:false parameters t + comments + in + match returnModExpr.pmod_desc with + | Pmod_constraint (modExpr, modType) + when modType.pmty_loc.loc_end.pos_cnum + <= modExpr.pmod_loc.loc_start.pos_cnum -> + let before, inside, after = partitionByLoc comments modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + let after, rest = partitionAdjacentTrailing modType.pmty_loc after in + attach t.trailing modType.pmty_loc after; + let before, inside, after = partitionByLoc rest modExpr.pmod_loc in + attach t.leading modExpr.pmod_loc before; + walkModuleExpr modExpr t inside; + attach t.trailing modExpr.pmod_loc after + | _ -> + let before, inside, after = + partitionByLoc comments returnModExpr.pmod_loc + in + attach t.leading returnModExpr.pmod_loc before; + walkModuleExpr returnModExpr t inside; + attach t.trailing returnModExpr.pmod_loc after) + +and walkModExprParameter parameter t comments = + let _attrs, lbl, modTypeOption = parameter in + let leading, trailing = partitionLeadingTrailing comments lbl.loc in + attach t.leading lbl.loc leading; + match modTypeOption with + | None -> attach t.trailing lbl.loc trailing + | Some modType -> + let afterLbl, rest = partitionAdjacentTrailing lbl.loc trailing in + attach t.trailing lbl.loc afterLbl; + let before, inside, after = partitionByLoc rest modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + attach t.trailing modType.pmty_loc after + +and walkModType modType t comments = + match modType.pmty_desc with + | Pmty_ident longident | Pmty_alias longident -> + let leading, trailing = partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc leading; + attach t.trailing longident.loc trailing + | Pmty_signature [] -> attach t.inside modType.pmty_loc comments + | Pmty_signature signature -> walkSignature signature t comments + | Pmty_extension extension -> walkExtension extension t comments + | Pmty_typeof modExpr -> + let before, inside, after = partitionByLoc comments modExpr.pmod_loc in + attach t.leading modExpr.pmod_loc before; + walkModuleExpr modExpr t inside; + attach t.trailing modExpr.pmod_loc after + | Pmty_with (modType, _withConstraints) -> + let before, inside, after = partitionByLoc comments modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + attach t.trailing modType.pmty_loc after + (* TODO: withConstraints*) + | Pmty_functor _ -> + let parameters, returnModType = functorType modType in + let comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun (_, lbl, modTypeOption) -> + match modTypeOption with + | None -> lbl.Asttypes.loc + | Some modType -> + if lbl.txt = "_" then modType.Parsetree.pmty_loc + else {lbl.loc with loc_end = modType.Parsetree.pmty_loc.loc_end}) + ~walkNode:walkModTypeParameter ~newlineDelimited:false parameters t + comments + in + let before, inside, after = + partitionByLoc comments returnModType.pmty_loc + in + attach t.leading returnModType.pmty_loc before; + walkModType returnModType t inside; + attach t.trailing returnModType.pmty_loc after + +and walkModTypeParameter (_, lbl, modTypeOption) t comments = + let leading, trailing = partitionLeadingTrailing comments lbl.loc in + attach t.leading lbl.loc leading; + match modTypeOption with + | None -> attach t.trailing lbl.loc trailing + | Some modType -> + let afterLbl, rest = partitionAdjacentTrailing lbl.loc trailing in + attach t.trailing lbl.loc afterLbl; + let before, inside, after = partitionByLoc rest modType.pmty_loc in + attach t.leading modType.pmty_loc before; + walkModType modType t inside; + attach t.trailing modType.pmty_loc after + +and walkPattern pat t comments = + let open Location in + match pat.Parsetree.ppat_desc with + | _ when comments = [] -> () + | Ppat_alias (pat, alias) -> + let leading, inside, trailing = partitionByLoc comments pat.ppat_loc in + attach t.leading pat.ppat_loc leading; + walkPattern pat t inside; + let afterPat, rest = partitionAdjacentTrailing pat.ppat_loc trailing in + attach t.leading pat.ppat_loc leading; + attach t.trailing pat.ppat_loc afterPat; + let beforeAlias, afterAlias = partitionLeadingTrailing rest alias.loc in + attach t.leading alias.loc beforeAlias; + attach t.trailing alias.loc afterAlias + | Ppat_tuple [] + | Ppat_array [] + | Ppat_construct ({txt = Longident.Lident "()"}, _) + | Ppat_construct ({txt = Longident.Lident "[]"}, _) -> + attach t.inside pat.ppat_loc comments + | Ppat_array patterns -> + walkList (patterns |> List.map (fun p -> Pattern p)) t comments + | Ppat_tuple patterns -> + walkList (patterns |> List.map (fun p -> Pattern p)) t comments + | Ppat_construct ({txt = Longident.Lident "::"}, _) -> + walkList + (collectListPatterns [] pat |> List.map (fun p -> Pattern p)) + t comments + | Ppat_construct (constr, None) -> + let beforeConstr, afterConstr = + partitionLeadingTrailing comments constr.loc + in + attach t.leading constr.loc beforeConstr; + attach t.trailing constr.loc afterConstr + | Ppat_construct (constr, Some pat) -> + let leading, trailing = partitionLeadingTrailing comments constr.loc in + attach t.leading constr.loc leading; + let afterConstructor, rest = + partitionAdjacentTrailing constr.loc trailing + in + attach t.trailing constr.loc afterConstructor; + let leading, inside, trailing = partitionByLoc rest pat.ppat_loc in + attach t.leading pat.ppat_loc leading; + walkPattern pat t inside; + attach t.trailing pat.ppat_loc trailing + | Ppat_variant (_label, None) -> () + | Ppat_variant (_label, Some pat) -> walkPattern pat t comments + | Ppat_type _ -> () + | Ppat_record (recordRows, _) -> + walkList + (recordRows |> List.map (fun (li, p) -> PatternRecordRow (li, p))) + t comments + | Ppat_or _ -> + walkList + (Res_parsetree_viewer.collectOrPatternChain pat + |> List.map (fun pat -> Pattern pat)) + t comments + | Ppat_constraint (pattern, typ) -> + let beforePattern, insidePattern, afterPattern = + partitionByLoc comments pattern.ppat_loc + in + attach t.leading pattern.ppat_loc beforePattern; + walkPattern pattern t insidePattern; + let afterPattern, rest = + partitionAdjacentTrailing pattern.ppat_loc afterPattern + in + attach t.trailing pattern.ppat_loc afterPattern; + let beforeTyp, insideTyp, afterTyp = partitionByLoc rest typ.ptyp_loc in + attach t.leading typ.ptyp_loc beforeTyp; + walkCoreType typ t insideTyp; + attach t.trailing typ.ptyp_loc afterTyp + | Ppat_lazy pattern | Ppat_exception pattern -> + let leading, inside, trailing = partitionByLoc comments pattern.ppat_loc in + attach t.leading pattern.ppat_loc leading; + walkPattern pattern t inside; + attach t.trailing pattern.ppat_loc trailing + | Ppat_unpack stringLoc -> + let leading, trailing = partitionLeadingTrailing comments stringLoc.loc in + attach t.leading stringLoc.loc leading; + attach t.trailing stringLoc.loc trailing + | Ppat_extension extension -> walkExtension extension t comments + | _ -> () + +(* name: firstName *) +and walkPatternRecordRow row t comments = + match row with + (* punned {x}*) + | ( {Location.txt = Longident.Lident ident; loc = longidentLoc}, + {Parsetree.ppat_desc = Ppat_var {txt; _}} ) + when ident = txt -> + let beforeLbl, afterLbl = partitionLeadingTrailing comments longidentLoc in + attach t.leading longidentLoc beforeLbl; + attach t.trailing longidentLoc afterLbl + | longident, pattern -> + let beforeLbl, afterLbl = partitionLeadingTrailing comments longident.loc in + attach t.leading longident.loc beforeLbl; + let afterLbl, rest = partitionAdjacentTrailing longident.loc afterLbl in + attach t.trailing longident.loc afterLbl; + let leading, inside, trailing = partitionByLoc rest pattern.ppat_loc in + attach t.leading pattern.ppat_loc leading; + walkPattern pattern t inside; + attach t.trailing pattern.ppat_loc trailing + +and walkRowField (rowField : Parsetree.row_field) t comments = + match rowField with + | Parsetree.Rtag ({loc}, _, _, _) -> + let before, after = partitionLeadingTrailing comments loc in + attach t.leading loc before; + attach t.trailing loc after + | Rinherit _ -> () + +and walkCoreType typ t comments = + match typ.Parsetree.ptyp_desc with + | _ when comments = [] -> () + | Ptyp_tuple typexprs -> + walkList (typexprs |> List.map (fun ct -> CoreType ct)) t comments + | Ptyp_extension extension -> walkExtension extension t comments + | Ptyp_package packageType -> walkPackageType packageType t comments + | Ptyp_alias (typexpr, _alias) -> + let beforeTyp, insideTyp, afterTyp = + partitionByLoc comments typexpr.ptyp_loc + in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkCoreType typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + | Ptyp_poly (strings, typexpr) -> + let comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun n -> n.Asttypes.loc) + ~walkNode:(fun longident t comments -> + let beforeLongident, afterLongident = + partitionLeadingTrailing comments longident.loc + in + attach t.leading longident.loc beforeLongident; + attach t.trailing longident.loc afterLongident) + ~newlineDelimited:false strings t comments + in + let beforeTyp, insideTyp, afterTyp = + partitionByLoc comments typexpr.ptyp_loc + in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkCoreType typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + | Ptyp_variant (rowFields, _, _) -> + walkList (rowFields |> List.map (fun rf -> RowField rf)) t comments + | Ptyp_constr (longident, typexprs) -> + let beforeLongident, _afterLongident = + partitionLeadingTrailing comments longident.loc + in + let afterLongident, rest = + partitionAdjacentTrailing longident.loc comments + in + attach t.leading longident.loc beforeLongident; + attach t.trailing longident.loc afterLongident; + walkList (typexprs |> List.map (fun ct -> CoreType ct)) t rest + | Ptyp_arrow _ -> + let _, parameters, typexpr = arrowType typ in + let comments = walkTypeParameters parameters t comments in + let beforeTyp, insideTyp, afterTyp = + partitionByLoc comments typexpr.ptyp_loc + in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkCoreType typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + | Ptyp_object (fields, _) -> walkTypObjectFields fields t comments + | _ -> () + +and walkTypObjectFields fields t comments = + walkList (fields |> List.map (fun f -> ObjectField f)) t comments + +and walkObjectField field t comments = + match field with + | Otag (lbl, _, typexpr) -> + let beforeLbl, afterLbl = partitionLeadingTrailing comments lbl.loc in + attach t.leading lbl.loc beforeLbl; + let afterLbl, rest = partitionAdjacentTrailing lbl.loc afterLbl in + attach t.trailing lbl.loc afterLbl; + let beforeTyp, insideTyp, afterTyp = partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkCoreType typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + | _ -> () + +and walkTypeParameters typeParameters t comments = + visitListButContinueWithRemainingComments + ~getLoc:(fun (_, _, typexpr) -> + match typexpr.Parsetree.ptyp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _attrs -> + {loc with loc_end = typexpr.ptyp_loc.loc_end} + | _ -> typexpr.ptyp_loc) + ~walkNode:walkTypeParameter ~newlineDelimited:false typeParameters t + comments + +and walkTypeParameter (_attrs, _lbl, typexpr) t comments = + let beforeTyp, insideTyp, afterTyp = + partitionByLoc comments typexpr.ptyp_loc + in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkCoreType typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + +and walkPackageType packageType t comments = + let longident, packageConstraints = packageType in + let beforeLongident, afterLongident = + partitionLeadingTrailing comments longident.loc + in + attach t.leading longident.loc beforeLongident; + let afterLongident, rest = + partitionAdjacentTrailing longident.loc afterLongident + in + attach t.trailing longident.loc afterLongident; + walkPackageConstraints packageConstraints t rest + +and walkPackageConstraints packageConstraints t comments = + walkList + (packageConstraints |> List.map (fun (li, te) -> PackageConstraint (li, te))) + t comments + +and walkPackageConstraint packageConstraint t comments = + let longident, typexpr = packageConstraint in + let beforeLongident, afterLongident = + partitionLeadingTrailing comments longident.loc + in + attach t.leading longident.loc beforeLongident; + let afterLongident, rest = + partitionAdjacentTrailing longident.loc afterLongident + in + attach t.trailing longident.loc afterLongident; + let beforeTyp, insideTyp, afterTyp = partitionByLoc rest typexpr.ptyp_loc in + attach t.leading typexpr.ptyp_loc beforeTyp; + walkCoreType typexpr t insideTyp; + attach t.trailing typexpr.ptyp_loc afterTyp + +and walkExtension extension t comments = + let id, payload = extension in + let beforeId, afterId = partitionLeadingTrailing comments id.loc in + attach t.leading id.loc beforeId; + let afterId, rest = partitionAdjacentTrailing id.loc afterId in + attach t.trailing id.loc afterId; + walkPayload payload t rest + +and walkAttribute (id, payload) t comments = + let beforeId, afterId = partitionLeadingTrailing comments id.loc in + attach t.leading id.loc beforeId; + let afterId, rest = partitionAdjacentTrailing id.loc afterId in + attach t.trailing id.loc afterId; + walkPayload payload t rest + +and walkPayload payload t comments = + match payload with + | PStr s -> walkStructure s t comments + | _ -> () diff --git a/res_syntax/src/res_core.ml b/res_syntax/src/res_core.ml new file mode 100644 index 0000000000..e4048594f2 --- /dev/null +++ b/res_syntax/src/res_core.ml @@ -0,0 +1,6320 @@ +module Doc = Res_doc +module Grammar = Res_grammar +module Token = Res_token +module Diagnostics = Res_diagnostics +module CommentTable = Res_comments_table +module ResPrinter = Res_printer +module Scanner = Res_scanner +module Parser = Res_parser + +let mkLoc startLoc endLoc = + Location.{loc_start = startLoc; loc_end = endLoc; loc_ghost = false} + +module Recover = struct + let defaultExpr () = + let id = Location.mknoloc "rescript.exprhole" in + Ast_helper.Exp.mk (Pexp_extension (id, PStr [])) + + let defaultType () = + let id = Location.mknoloc "rescript.typehole" in + Ast_helper.Typ.extension (id, PStr []) + + let defaultPattern () = + let id = Location.mknoloc "rescript.patternhole" in + Ast_helper.Pat.extension (id, PStr []) + + let defaultModuleExpr () = Ast_helper.Mod.structure [] + let defaultModuleType () = Ast_helper.Mty.signature [] + + let defaultSignatureItem = + let id = Location.mknoloc "rescript.sigitemhole" in + Ast_helper.Sig.extension (id, PStr []) + + let recoverEqualGreater p = + Parser.expect EqualGreater p; + match p.Parser.token with + | MinusGreater -> Parser.next p + | _ -> () + + let shouldAbortListParse p = + let rec check breadcrumbs = + match breadcrumbs with + | [] -> false + | (grammar, _) :: rest -> + if Grammar.isPartOfList grammar p.Parser.token then true else check rest + in + check p.breadcrumbs +end + +module ErrorMessages = struct + let listPatternSpread = + "List pattern matches only supports one `...` spread, at the end.\n\ + Explanation: a list spread at the tail is efficient, but a spread in the \ + middle would create new lists; out of performance concern, our pattern \ + matching currently guarantees to never create new intermediate data." + + let recordPatternSpread = + "Record's `...` spread is not supported in pattern matches.\n\ + Explanation: you can't collect a subset of a record's field into its own \ + record, since a record needs an explicit declaration and that subset \ + wouldn't have one.\n\ + Solution: you need to pull out each field you want explicitly." + (* let recordPatternUnderscore = "Record patterns only support one `_`, at the end." *) + [@@live] + + let arrayPatternSpread = + "Array's `...` spread is not supported in pattern matches.\n\ + Explanation: such spread would create a subarray; out of performance \ + concern, our pattern matching currently guarantees to never create new \ + intermediate data.\n\ + Solution: if it's to validate the first few elements, use a `when` clause \ + + Array size check + `get` checks on the current pattern. If it's to \ + obtain a subarray, use `Array.sub` or `Belt.Array.slice`." + + let arrayExprSpread = + "Arrays can't use the `...` spread currently. Please use `concat` or other \ + Array helpers." + + let recordExprSpread = + "Records can only have one `...` spread, at the beginning.\n\ + Explanation: since records have a known, fixed shape, a spread like `{a, \ + ...b}` wouldn't make sense, as `b` would override every field of `a` \ + anyway." + + let variantIdent = + "A polymorphic variant (e.g. #id) must start with an alphabetical letter \ + or be a number (e.g. #742)" + + let experimentalIfLet expr = + let switchExpr = {expr with Parsetree.pexp_attributes = []} in + Doc.concat + [ + Doc.text "If-let is currently highly experimental."; + Doc.line; + Doc.text "Use a regular `switch` with pattern matching instead:"; + Doc.concat + [ + Doc.hardLine; + Doc.hardLine; + ResPrinter.printExpression switchExpr CommentTable.empty; + ]; + ] + |> Doc.toString ~width:80 + + let typeParam = + "A type param consists of a singlequote followed by a name like `'a` or \ + `'A`" + let typeVar = + "A type variable consists of a singlequote followed by a name like `'a` or \ + `'A`" + + let attributeWithoutNode (attr : Parsetree.attribute) = + let {Asttypes.txt = attrName}, _ = attr in + "Did you forget to attach `" ^ attrName + ^ "` to an item?\n Standalone attributes start with `@@` like: `@@" + ^ attrName ^ "`" + + let typeDeclarationNameLongident longident = + "A type declaration's name cannot contain a module access. Did you mean `" + ^ Longident.last longident ^ "`?" + + let tupleSingleElement = "A tuple needs at least two elements" + + let missingTildeLabeledParameter name = + if name = "" then "A labeled parameter starts with a `~`." + else "A labeled parameter starts with a `~`. Did you mean: `~" ^ name ^ "`?" + + let stringInterpolationInPattern = + "String interpolation is not supported in pattern matching." + + let spreadInRecordDeclaration = + "A record type declaration doesn't support the ... spread. Only an object \ + (with quoted field names) does." + + let objectQuotedFieldName name = + "An object type declaration needs quoted field names. Did you mean \"" + ^ name ^ "\"?" + + let forbiddenInlineRecordDeclaration = + "An inline record type declaration is only allowed in a variant \ + constructor's declaration" + + let sameTypeSpread = + "You're using a ... spread without extra fields. This is the same type." + + let polyVarIntWithSuffix number = + "A numeric polymorphic variant cannot be followed by a letter. Did you \ + mean `#" ^ number ^ "`?" +end + +let jsxAttr = (Location.mknoloc "JSX", Parsetree.PStr []) +let uncurryAttr = (Location.mknoloc "bs", Parsetree.PStr []) +let ternaryAttr = (Location.mknoloc "ns.ternary", Parsetree.PStr []) +let ifLetAttr = (Location.mknoloc "ns.iflet", Parsetree.PStr []) +let optionalAttr = (Location.mknoloc "ns.optional", Parsetree.PStr []) +let makeAwaitAttr loc = (Location.mkloc "res.await" loc, Parsetree.PStr []) +let makeAsyncAttr loc = (Location.mkloc "res.async" loc, Parsetree.PStr []) + +let makeExpressionOptional ~optional (e : Parsetree.expression) = + if optional then {e with pexp_attributes = optionalAttr :: e.pexp_attributes} + else e +let makePatternOptional ~optional (p : Parsetree.pattern) = + if optional then {p with ppat_attributes = optionalAttr :: p.ppat_attributes} + else p + +let suppressFragileMatchWarningAttr = + ( Location.mknoloc "warning", + Parsetree.PStr + [ + Ast_helper.Str.eval + (Ast_helper.Exp.constant (Pconst_string ("-4", None))); + ] ) +let makeBracesAttr loc = (Location.mkloc "ns.braces" loc, Parsetree.PStr []) +let templateLiteralAttr = (Location.mknoloc "res.template", Parsetree.PStr []) + +let spreadAttr = (Location.mknoloc "res.spread", Parsetree.PStr []) + +type typDefOrExt = + | TypeDef of { + recFlag: Asttypes.rec_flag; + types: Parsetree.type_declaration list; + } + | TypeExt of Parsetree.type_extension + +type labelledParameter = + | TermParameter of { + uncurried: bool; + attrs: Parsetree.attributes; + label: Asttypes.arg_label; + expr: Parsetree.expression option; + pat: Parsetree.pattern; + pos: Lexing.position; + } + | TypeParameter of { + uncurried: bool; + attrs: Parsetree.attributes; + locs: string Location.loc list; + pos: Lexing.position; + } + +type recordPatternItem = + | PatUnderscore + | PatField of (Ast_helper.lid * Parsetree.pattern) + +type context = OrdinaryExpr | TernaryTrueBranchExpr | WhenExpr + +let getClosingToken = function + | Token.Lparen -> Token.Rparen + | Lbrace -> Rbrace + | Lbracket -> Rbracket + | List -> Rbrace + | LessThan -> GreaterThan + | _ -> assert false + +let rec goToClosing closingToken state = + match (state.Parser.token, closingToken) with + | Rparen, Token.Rparen + | Rbrace, Rbrace + | Rbracket, Rbracket + | GreaterThan, GreaterThan -> + Parser.next state; + () + | ((Token.Lbracket | Lparen | Lbrace | List | LessThan) as t), _ -> + Parser.next state; + goToClosing (getClosingToken t) state; + goToClosing closingToken state + | (Rparen | Token.Rbrace | Rbracket | Eof), _ -> + () (* TODO: how do report errors here? *) + | _ -> + Parser.next state; + goToClosing closingToken state + +(* Madness *) +let isEs6ArrowExpression ~inTernary p = + Parser.lookahead p (fun state -> + let async = + match state.Parser.token with + | Lident "async" -> + Parser.next state; + true + | _ -> false + in + match state.Parser.token with + | Lident _ | Underscore -> ( + Parser.next state; + match state.Parser.token with + (* Don't think that this valid + * Imagine: let x = (a: int) + * This is a parenthesized expression with a type constraint, wait for + * the arrow *) + (* | Colon when not inTernary -> true *) + | EqualGreater -> true + | _ -> false) + | Lparen -> ( + let prevEndPos = state.prevEndPos in + Parser.next state; + match state.token with + (* arrived at `()` here *) + | Rparen -> ( + Parser.next state; + match state.Parser.token with + (* arrived at `() :` here *) + | Colon when not inTernary -> ( + Parser.next state; + match state.Parser.token with + (* arrived at `() :typ` here *) + | Lident _ -> ( + Parser.next state; + (match state.Parser.token with + (* arrived at `() :typ<` here *) + | LessThan -> + Parser.next state; + goToClosing GreaterThan state + | _ -> ()); + match state.Parser.token with + (* arrived at `() :typ =>` or `() :typ<'a,'b> =>` here *) + | EqualGreater -> true + | _ -> false) + | _ -> true) + | EqualGreater -> true + | _ -> false) + | Dot (* uncurried *) -> true + | Tilde when not async -> true + | Backtick -> + false + (* (` always indicates the start of an expr, can't be es6 parameter *) + | _ -> ( + goToClosing Rparen state; + match state.Parser.token with + | EqualGreater -> true + (* | Lbrace TODO: detect missing =>, is this possible? *) + | Colon when not inTernary -> true + | Rparen -> + (* imagine having something as : + * switch colour { + * | Red + * when l == l' + * || (&Clflags.classic && (l == Nolabel && !is_optional(l'))) => (t1, t2) + * We'll arrive at the outer rparen just before the =>. + * This is not an es6 arrow. + * *) + false + | _ -> ( + Parser.nextUnsafe state; + (* error recovery, peek at the next token, + * (elements, providerId] => { + * in the example above, we have an unbalanced ] here + *) + match state.Parser.token with + | EqualGreater when state.startPos.pos_lnum == prevEndPos.pos_lnum + -> + true + | _ -> false))) + | _ -> false) + +let isEs6ArrowFunctor p = + Parser.lookahead p (fun state -> + match state.Parser.token with + (* | Uident _ | Underscore -> *) + (* Parser.next state; *) + (* begin match state.Parser.token with *) + (* | EqualGreater -> true *) + (* | _ -> false *) + (* end *) + | Lparen -> ( + Parser.next state; + match state.token with + | Rparen -> ( + Parser.next state; + match state.token with + | Colon | EqualGreater -> true + | _ -> false) + | _ -> ( + goToClosing Rparen state; + match state.Parser.token with + | EqualGreater | Lbrace -> true + | Colon -> true + | _ -> false)) + | _ -> false) + +let isEs6ArrowType p = + Parser.lookahead p (fun state -> + match state.Parser.token with + | Lparen -> ( + Parser.next state; + match state.Parser.token with + | Rparen -> ( + Parser.next state; + match state.Parser.token with + | EqualGreater -> true + | _ -> false) + | Tilde | Dot -> true + | _ -> ( + goToClosing Rparen state; + match state.Parser.token with + | EqualGreater -> true + | _ -> false)) + | Tilde -> true + | _ -> false) + +let buildLongident words = + match List.rev words with + | [] -> assert false + | hd :: tl -> List.fold_left (fun p s -> Longident.Ldot (p, s)) (Lident hd) tl + +let makeInfixOperator p token startPos endPos = + let stringifiedToken = + if token = Token.MinusGreater then "|." + else if token = Token.PlusPlus then "^" + else if token = Token.BangEqual then "<>" + else if token = Token.BangEqualEqual then "!=" + else if token = Token.Equal then ( + (* TODO: could have a totally different meaning like x->fooSet(y)*) + Parser.err ~startPos ~endPos p + (Diagnostics.message "Did you mean `==` here?"); + "=") + else if token = Token.EqualEqual then "=" + else if token = Token.EqualEqualEqual then "==" + else Token.toString token + in + let loc = mkLoc startPos endPos in + let operator = Location.mkloc (Longident.Lident stringifiedToken) loc in + Ast_helper.Exp.ident ~loc operator + +let negateString s = + if String.length s > 0 && (s.[0] [@doesNotRaise]) = '-' then + (String.sub [@doesNotRaise]) s 1 (String.length s - 1) + else "-" ^ s + +let makeUnaryExpr startPos tokenEnd token operand = + match (token, operand.Parsetree.pexp_desc) with + | (Token.Plus | PlusDot), Pexp_constant (Pconst_integer _ | Pconst_float _) -> + operand + | Minus, Pexp_constant (Pconst_integer (n, m)) -> + { + operand with + pexp_desc = Pexp_constant (Pconst_integer (negateString n, m)); + } + | (Minus | MinusDot), Pexp_constant (Pconst_float (n, m)) -> + {operand with pexp_desc = Pexp_constant (Pconst_float (negateString n, m))} + | (Token.Plus | PlusDot | Minus | MinusDot), _ -> + let tokenLoc = mkLoc startPos tokenEnd in + let operator = "~" ^ Token.toString token in + Ast_helper.Exp.apply + ~loc:(mkLoc startPos operand.Parsetree.pexp_loc.loc_end) + (Ast_helper.Exp.ident ~loc:tokenLoc + (Location.mkloc (Longident.Lident operator) tokenLoc)) + [(Nolabel, operand)] + | Token.Bang, _ -> + let tokenLoc = mkLoc startPos tokenEnd in + Ast_helper.Exp.apply + ~loc:(mkLoc startPos operand.Parsetree.pexp_loc.loc_end) + (Ast_helper.Exp.ident ~loc:tokenLoc + (Location.mkloc (Longident.Lident "not") tokenLoc)) + [(Nolabel, operand)] + | _ -> operand + +let makeListExpression loc seq extOpt = + let rec handleSeq = function + | [] -> ( + match extOpt with + | Some ext -> ext + | None -> + let loc = {loc with Location.loc_ghost = true} in + let nil = Location.mkloc (Longident.Lident "[]") loc in + Ast_helper.Exp.construct ~loc nil None) + | e1 :: el -> + let exp_el = handleSeq el in + let loc = + mkLoc e1.Parsetree.pexp_loc.Location.loc_start exp_el.pexp_loc.loc_end + in + let arg = Ast_helper.Exp.tuple ~loc [e1; exp_el] in + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident "::") loc) + (Some arg) + in + let expr = handleSeq seq in + {expr with pexp_loc = loc} + +let makeListPattern loc seq ext_opt = + let rec handle_seq = function + | [] -> + let base_case = + match ext_opt with + | Some ext -> ext + | None -> + let loc = {loc with Location.loc_ghost = true} in + let nil = {Location.txt = Longident.Lident "[]"; loc} in + Ast_helper.Pat.construct ~loc nil None + in + base_case + | p1 :: pl -> + let pat_pl = handle_seq pl in + let loc = mkLoc p1.Parsetree.ppat_loc.loc_start pat_pl.ppat_loc.loc_end in + let arg = Ast_helper.Pat.mk ~loc (Ppat_tuple [p1; pat_pl]) in + Ast_helper.Pat.mk ~loc + (Ppat_construct (Location.mkloc (Longident.Lident "::") loc, Some arg)) + in + handle_seq seq + +(* TODO: diagnostic reporting *) +let lidentOfPath longident = + match Longident.flatten longident |> List.rev with + | [] -> "" + | ident :: _ -> ident + +let makeNewtypes ~attrs ~loc newtypes exp = + let expr = + List.fold_right + (fun newtype exp -> Ast_helper.Exp.mk ~loc (Pexp_newtype (newtype, exp))) + newtypes exp + in + {expr with pexp_attributes = attrs} + +(* locally abstract types syntax sugar + * Transforms + * let f: type t u v. = (foo : list) => ... + * into + * let f = (type t u v. foo : list) => ... + *) +let wrapTypeAnnotation ~loc newtypes core_type body = + let exp = + makeNewtypes ~attrs:[] ~loc newtypes + (Ast_helper.Exp.constraint_ ~loc body core_type) + in + let typ = + Ast_helper.Typ.poly ~loc newtypes + (Ast_helper.Typ.varify_constructors newtypes core_type) + in + (exp, typ) + +(** + * process the occurrence of _ in the arguments of a function application + * replace _ with a new variable, currently __x, in the arguments + * return a wrapping function that wraps ((__x) => ...) around an expression + * e.g. foo(_, 3) becomes (__x) => foo(__x, 3) + *) +let processUnderscoreApplication args = + let exp_question = ref None in + let hidden_var = "__x" in + let check_arg ((lab, exp) as arg) = + match exp.Parsetree.pexp_desc with + | Pexp_ident ({txt = Lident "_"} as id) -> + let new_id = Location.mkloc (Longident.Lident hidden_var) id.loc in + let new_exp = Ast_helper.Exp.mk (Pexp_ident new_id) ~loc:exp.pexp_loc in + exp_question := Some new_exp; + (lab, new_exp) + | _ -> arg + in + let args = List.map check_arg args in + let wrap (exp_apply : Parsetree.expression) = + match !exp_question with + | Some {pexp_loc = loc} -> + let pattern = + Ast_helper.Pat.mk + (Ppat_var (Location.mkloc hidden_var loc)) + ~loc:Location.none + in + Ast_helper.Exp.mk (Pexp_fun (Nolabel, None, pattern, exp_apply)) ~loc + | None -> exp_apply + in + (args, wrap) + +(* Transform A.a into a. For use with punned record fields as in {A.a, b}. *) +let removeModuleNameFromPunnedFieldValue exp = + match exp.Parsetree.pexp_desc with + | Pexp_ident pathIdent -> + { + exp with + pexp_desc = + Pexp_ident {pathIdent with txt = Lident (Longident.last pathIdent.txt)}; + } + | _ -> exp + +let rec parseLident p = + let recoverLident p = + if + Token.isKeyword p.Parser.token + && p.Parser.prevEndPos.pos_lnum == p.startPos.pos_lnum + then ( + Parser.err p (Diagnostics.lident p.Parser.token); + Parser.next p; + None) + else + let rec loop p = + if (not (Recover.shouldAbortListParse p)) && p.token <> Eof then ( + Parser.next p; + loop p) + in + Parser.err p (Diagnostics.lident p.Parser.token); + Parser.next p; + loop p; + match p.Parser.token with + | Lident _ -> Some () + | _ -> None + in + let startPos = p.Parser.startPos in + match p.Parser.token with + | Lident ident -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + (ident, loc) + | Eof -> + Parser.err ~startPos p (Diagnostics.unexpected p.Parser.token p.breadcrumbs); + ("_", mkLoc startPos p.prevEndPos) + | _ -> ( + match recoverLident p with + | Some () -> parseLident p + | None -> ("_", mkLoc startPos p.prevEndPos)) + +let parseIdent ~msg ~startPos p = + match p.Parser.token with + | Lident ident | Uident ident -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + (ident, loc) + | token + when Token.isKeyword token && p.prevEndPos.pos_lnum == p.startPos.pos_lnum + -> + let tokenTxt = Token.toString token in + let msg = + "`" ^ tokenTxt + ^ "` is a reserved keyword. Keywords need to be escaped: \\\"" ^ tokenTxt + ^ "\"" + in + Parser.err ~startPos p (Diagnostics.message msg); + Parser.next p; + (tokenTxt, mkLoc startPos p.prevEndPos) + | _token -> + Parser.err ~startPos p (Diagnostics.message msg); + Parser.next p; + ("", mkLoc startPos p.prevEndPos) + +let parseHashIdent ~startPos p = + Parser.expect Hash p; + match p.token with + | String text -> + Parser.next p; + (text, mkLoc startPos p.prevEndPos) + | Int {i; suffix} -> + let () = + match suffix with + | Some _ -> + Parser.err p + (Diagnostics.message (ErrorMessages.polyVarIntWithSuffix i)) + | None -> () + in + Parser.next p; + (i, mkLoc startPos p.prevEndPos) + | Eof -> + Parser.err ~startPos p (Diagnostics.unexpected p.token p.breadcrumbs); + ("", mkLoc startPos p.prevEndPos) + | _ -> parseIdent ~startPos ~msg:ErrorMessages.variantIdent p + +(* Ldot (Ldot (Lident "Foo", "Bar"), "baz") *) +let parseValuePath p = + let startPos = p.Parser.startPos in + let rec aux p path = + let startPos = p.Parser.startPos in + let token = p.token in + + Parser.next p; + if p.Parser.token = Dot then ( + Parser.expect Dot p; + + match p.Parser.token with + | Lident ident -> Longident.Ldot (path, ident) + | Uident uident -> aux p (Ldot (path, uident)) + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Longident.Ldot (path, "_")) + else ( + Parser.err p ~startPos ~endPos:p.prevEndPos (Diagnostics.lident token); + path) + in + let ident = + match p.Parser.token with + | Lident ident -> + Parser.next p; + Longident.Lident ident + | Uident ident -> + let res = aux p (Lident ident) in + Parser.nextUnsafe p; + res + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Parser.nextUnsafe p; + Longident.Lident "_" + in + Location.mkloc ident (mkLoc startPos p.prevEndPos) + +let parseValuePathAfterDot p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | Lident _ | Uident _ -> parseValuePath p + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Location.mkloc (Longident.Lident "_") (mkLoc startPos p.prevEndPos) + +let parseValuePathTail p startPos ident = + let rec loop p path = + match p.Parser.token with + | Lident ident -> + Parser.next p; + Location.mkloc + (Longident.Ldot (path, ident)) + (mkLoc startPos p.prevEndPos) + | Uident ident -> + Parser.next p; + Parser.expect Dot p; + loop p (Longident.Ldot (path, ident)) + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Location.mkloc (Longident.Ldot (path, "_")) (mkLoc startPos p.prevEndPos) + in + loop p ident + +let parseModuleLongIdentTail ~lowercase p startPos ident = + let rec loop p acc = + match p.Parser.token with + | Lident ident when lowercase -> + Parser.next p; + let lident = Longident.Ldot (acc, ident) in + Location.mkloc lident (mkLoc startPos p.prevEndPos) + | Uident ident -> ( + Parser.next p; + let endPos = p.prevEndPos in + let lident = Longident.Ldot (acc, ident) in + match p.Parser.token with + | Dot -> + Parser.next p; + loop p lident + | _ -> Location.mkloc lident (mkLoc startPos endPos)) + | t -> + Parser.err p (Diagnostics.uident t); + Location.mkloc (Longident.Ldot (acc, "_")) (mkLoc startPos p.prevEndPos) + in + loop p ident + +(* Parses module identifiers: + Foo + Foo.Bar *) +let parseModuleLongIdent ~lowercase p = + (* Parser.leaveBreadcrumb p Reporting.ModuleLongIdent; *) + let startPos = p.Parser.startPos in + let moduleIdent = + match p.Parser.token with + | Lident ident when lowercase -> + let loc = mkLoc startPos p.endPos in + let lident = Longident.Lident ident in + Parser.next p; + Location.mkloc lident loc + | Uident ident -> ( + let lident = Longident.Lident ident in + let endPos = p.endPos in + Parser.next p; + match p.Parser.token with + | Dot -> + Parser.next p; + parseModuleLongIdentTail ~lowercase p startPos lident + | _ -> Location.mkloc lident (mkLoc startPos endPos)) + | t -> + Parser.err p (Diagnostics.uident t); + Location.mkloc (Longident.Lident "_") (mkLoc startPos p.prevEndPos) + in + (* Parser.eatBreadcrumb p; *) + moduleIdent + +let verifyJsxOpeningClosingName p nameExpr = + let closing = + match p.Parser.token with + | Lident lident -> + Parser.next p; + Longident.Lident lident + | Uident _ -> (parseModuleLongIdent ~lowercase:true p).txt + | _ -> Longident.Lident "" + in + match nameExpr.Parsetree.pexp_desc with + | Pexp_ident openingIdent -> + let opening = + let withoutCreateElement = + Longident.flatten openingIdent.txt + |> List.filter (fun s -> s <> "createElement") + in + match Longident.unflatten withoutCreateElement with + | Some li -> li + | None -> Longident.Lident "" + in + opening = closing + | _ -> assert false + +let string_of_pexp_ident nameExpr = + match nameExpr.Parsetree.pexp_desc with + | Pexp_ident openingIdent -> + Longident.flatten openingIdent.txt + |> List.filter (fun s -> s <> "createElement") + |> String.concat "." + | _ -> "" + +(* open-def ::= + * | open module-path + * | open! module-path *) +let parseOpenDescription ~attrs p = + Parser.leaveBreadcrumb p Grammar.OpenDescription; + let startPos = p.Parser.startPos in + Parser.expect Open p; + let override = + if Parser.optional p Token.Bang then Asttypes.Override else Asttypes.Fresh + in + let modident = parseModuleLongIdent ~lowercase:false p in + let loc = mkLoc startPos p.prevEndPos in + Parser.eatBreadcrumb p; + Ast_helper.Opn.mk ~loc ~attrs ~override modident + +(* constant ::= integer-literal *) +(* ∣ float-literal *) +(* ∣ string-literal *) +let parseConstant p = + let isNegative = + match p.Parser.token with + | Token.Minus -> + Parser.next p; + true + | Plus -> + Parser.next p; + false + | _ -> false + in + let constant = + match p.Parser.token with + | Int {i; suffix} -> + let intTxt = if isNegative then "-" ^ i else i in + Parsetree.Pconst_integer (intTxt, suffix) + | Float {f; suffix} -> + let floatTxt = if isNegative then "-" ^ f else f in + Parsetree.Pconst_float (floatTxt, suffix) + | String s -> + Pconst_string (s, if p.mode = ParseForTypeChecker then Some "js" else None) + | Codepoint {c; original} -> + if p.mode = ParseForTypeChecker then Pconst_char c + else + (* Pconst_char char does not have enough information for formatting. + * When parsing for the printer, we encode the char contents as a string + * with a special prefix. *) + Pconst_string (original, Some "INTERNAL_RES_CHAR_CONTENTS") + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Pconst_string ("", None) + in + Parser.nextUnsafe p; + constant + +let parseTemplateConstant ~prefix (p : Parser.t) = + (* Arrived at the ` char *) + let startPos = p.startPos in + Parser.nextTemplateLiteralToken p; + match p.token with + | TemplateTail (txt, _) -> + Parser.next p; + Parsetree.Pconst_string (txt, prefix) + | _ -> + let rec skipTokens () = + if p.token <> Eof then ( + Parser.next p; + match p.token with + | Backtick -> + Parser.next p; + () + | _ -> skipTokens ()) + in + skipTokens (); + Parser.err ~startPos ~endPos:p.prevEndPos p + (Diagnostics.message ErrorMessages.stringInterpolationInPattern); + Pconst_string ("", None) + +let parseCommaDelimitedRegion p ~grammar ~closing ~f = + Parser.leaveBreadcrumb p grammar; + let rec loop nodes = + match f p with + | Some node -> ( + match p.Parser.token with + | Comma -> + Parser.next p; + loop (node :: nodes) + | token when token = closing || token = Eof -> List.rev (node :: nodes) + | _ when Grammar.isListElement grammar p.token -> + (* missing comma between nodes in the region and the current token + * looks like the start of something valid in the current region. + * Example: + * type student<'extraInfo> = { + * name: string, + * age: int + * otherInfo: 'extraInfo + * } + * There is a missing comma between `int` and `otherInfo`. + * `otherInfo` looks like a valid start of the record declaration. + * We report the error here and then continue parsing the region. + *) + Parser.expect Comma p; + loop (node :: nodes) + | _ -> + if + not + (p.token = Eof || p.token = closing + || Recover.shouldAbortListParse p) + then Parser.expect Comma p; + if p.token = Semicolon then Parser.next p; + loop (node :: nodes)) + | None -> + if p.token = Eof || p.token = closing || Recover.shouldAbortListParse p + then List.rev nodes + else ( + Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs); + Parser.next p; + loop nodes) + in + let nodes = loop [] in + Parser.eatBreadcrumb p; + nodes + +let parseCommaDelimitedReversedList p ~grammar ~closing ~f = + Parser.leaveBreadcrumb p grammar; + let rec loop nodes = + match f p with + | Some node -> ( + match p.Parser.token with + | Comma -> + Parser.next p; + loop (node :: nodes) + | token when token = closing || token = Eof -> node :: nodes + | _ when Grammar.isListElement grammar p.token -> + (* missing comma between nodes in the region and the current token + * looks like the start of something valid in the current region. + * Example: + * type student<'extraInfo> = { + * name: string, + * age: int + * otherInfo: 'extraInfo + * } + * There is a missing comma between `int` and `otherInfo`. + * `otherInfo` looks like a valid start of the record declaration. + * We report the error here and then continue parsing the region. + *) + Parser.expect Comma p; + loop (node :: nodes) + | _ -> + if + not + (p.token = Eof || p.token = closing + || Recover.shouldAbortListParse p) + then Parser.expect Comma p; + if p.token = Semicolon then Parser.next p; + loop (node :: nodes)) + | None -> + if p.token = Eof || p.token = closing || Recover.shouldAbortListParse p + then nodes + else ( + Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs); + Parser.next p; + loop nodes) + in + let nodes = loop [] in + Parser.eatBreadcrumb p; + nodes + +let parseDelimitedRegion p ~grammar ~closing ~f = + Parser.leaveBreadcrumb p grammar; + let rec loop nodes = + match f p with + | Some node -> loop (node :: nodes) + | None -> + if + p.Parser.token = Token.Eof || p.token = closing + || Recover.shouldAbortListParse p + then List.rev nodes + else ( + Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs); + Parser.next p; + loop nodes) + in + let nodes = loop [] in + Parser.eatBreadcrumb p; + nodes + +let parseRegion p ~grammar ~f = + Parser.leaveBreadcrumb p grammar; + let rec loop nodes = + match f p with + | Some node -> loop (node :: nodes) + | None -> + if p.Parser.token = Token.Eof || Recover.shouldAbortListParse p then + List.rev nodes + else ( + Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs); + Parser.next p; + loop nodes) + in + let nodes = loop [] in + Parser.eatBreadcrumb p; + nodes + +(* let-binding ::= pattern = expr *) +(* ∣ value-name { parameter } [: typexpr] [:> typexpr] = expr *) +(* ∣ value-name : poly-typexpr = expr *) + +(* pattern ::= value-name *) +(* ∣ _ *) +(* ∣ constant *) +(* ∣ pattern as value-name *) +(* ∣ ( pattern ) *) +(* ∣ ( pattern : typexpr ) *) +(* ∣ pattern | pattern *) +(* ∣ constr pattern *) +(* ∣ #variant variant-pattern *) +(* ∣ #...type *) +(* ∣ / pattern { , pattern }+ / *) +(* ∣ { field [: typexpr] [= pattern] { ; field [: typexpr] [= pattern] } [; _ ] [ ; ] } *) +(* ∣ [ pattern { ; pattern } [ ; ] ] *) +(* ∣ pattern :: pattern *) +(* ∣ [| pattern { ; pattern } [ ; ] |] *) +(* ∣ char-literal .. char-literal *) +(* ∣ exception pattern *) +let rec parsePattern ?(alias = true) ?(or_ = true) p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + let pat = + match p.Parser.token with + | (True | False) as token -> + let endPos = p.endPos in + Parser.next p; + let loc = mkLoc startPos endPos in + Ast_helper.Pat.construct ~loc + (Location.mkloc (Longident.Lident (Token.toString token)) loc) + None + | Int _ | String _ | Float _ | Codepoint _ | Minus | Plus -> ( + let c = parseConstant p in + match p.token with + | DotDot -> + Parser.next p; + let c2 = parseConstant p in + Ast_helper.Pat.interval ~loc:(mkLoc startPos p.prevEndPos) c c2 + | _ -> Ast_helper.Pat.constant ~loc:(mkLoc startPos p.prevEndPos) c) + | Backtick -> + let constant = parseTemplateConstant ~prefix:(Some "js") p in + Ast_helper.Pat.constant ~attrs:[templateLiteralAttr] + ~loc:(mkLoc startPos p.prevEndPos) + constant + | Lparen -> ( + Parser.next p; + match p.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let lid = Location.mkloc (Longident.Lident "()") loc in + Ast_helper.Pat.construct ~loc lid None + | _ -> ( + let pat = parseConstrainedPattern p in + match p.token with + | Comma -> + Parser.next p; + parseTuplePattern ~attrs ~first:pat ~startPos p + | _ -> + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + { + pat with + ppat_loc = loc; + ppat_attributes = attrs @ pat.Parsetree.ppat_attributes; + })) + | Lbracket -> parseArrayPattern ~attrs p + | Lbrace -> parseRecordPattern ~attrs p + | Underscore -> + let endPos = p.endPos in + let loc = mkLoc startPos endPos in + Parser.next p; + Ast_helper.Pat.any ~loc ~attrs () + | Lident ident -> ( + let endPos = p.endPos in + let loc = mkLoc startPos endPos in + Parser.next p; + match p.token with + | Backtick -> + let constant = parseTemplateConstant ~prefix:(Some ident) p in + Ast_helper.Pat.constant ~loc:(mkLoc startPos p.prevEndPos) constant + | _ -> Ast_helper.Pat.var ~loc ~attrs (Location.mkloc ident loc)) + | Uident _ -> ( + let constr = parseModuleLongIdent ~lowercase:false p in + match p.Parser.token with + | Lparen -> parseConstructorPatternArgs p constr startPos attrs + | _ -> Ast_helper.Pat.construct ~loc:constr.loc ~attrs constr None) + | Hash -> ( + Parser.next p; + if p.Parser.token == DotDotDot then ( + Parser.next p; + let ident = parseValuePath p in + let loc = mkLoc startPos ident.loc.loc_end in + Ast_helper.Pat.type_ ~loc ~attrs ident) + else + let ident, loc = + match p.token with + | String text -> + Parser.next p; + (text, mkLoc startPos p.prevEndPos) + | Int {i; suffix} -> + let () = + match suffix with + | Some _ -> + Parser.err p + (Diagnostics.message (ErrorMessages.polyVarIntWithSuffix i)) + | None -> () + in + Parser.next p; + (i, mkLoc startPos p.prevEndPos) + | Eof -> + Parser.err ~startPos p + (Diagnostics.unexpected p.token p.breadcrumbs); + ("", mkLoc startPos p.prevEndPos) + | _ -> parseIdent ~msg:ErrorMessages.variantIdent ~startPos p + in + match p.Parser.token with + | Lparen -> parseVariantPatternArgs p ident startPos attrs + | _ -> Ast_helper.Pat.variant ~loc ~attrs ident None) + | Exception -> + Parser.next p; + let pat = parsePattern ~alias:false ~or_:false p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.exception_ ~loc ~attrs pat + | Lazy -> + Parser.next p; + let pat = parsePattern ~alias:false ~or_:false p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.lazy_ ~loc ~attrs pat + | List -> + Parser.next p; + parseListPattern ~startPos ~attrs p + | Module -> parseModulePattern ~attrs p + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.extension ~loc ~attrs extension + | Eof -> + Parser.err p (Diagnostics.unexpected p.Parser.token p.breadcrumbs); + Recover.defaultPattern () + | token -> ( + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + match + skipTokensAndMaybeRetry p ~isStartOfGrammar:Grammar.isAtomicPatternStart + with + | None -> Recover.defaultPattern () + | Some () -> parsePattern p) + in + let pat = if alias then parseAliasPattern ~attrs pat p else pat in + if or_ then parseOrPattern pat p else pat + +and skipTokensAndMaybeRetry p ~isStartOfGrammar = + if + Token.isKeyword p.Parser.token + && p.Parser.prevEndPos.pos_lnum == p.startPos.pos_lnum + then ( + Parser.next p; + None) + else if Recover.shouldAbortListParse p then + if isStartOfGrammar p.Parser.token then ( + Parser.next p; + Some ()) + else None + else ( + Parser.next p; + let rec loop p = + if not (Recover.shouldAbortListParse p) then ( + Parser.next p; + loop p) + in + loop p; + if isStartOfGrammar p.Parser.token then Some () else None) + +(* alias ::= pattern as lident *) +and parseAliasPattern ~attrs pattern p = + match p.Parser.token with + | As -> + Parser.next p; + let name, loc = parseLident p in + let name = Location.mkloc name loc in + Ast_helper.Pat.alias + ~loc:{pattern.ppat_loc with loc_end = p.prevEndPos} + ~attrs pattern name + | _ -> pattern + +(* or ::= pattern | pattern + * precedence: Red | Blue | Green is interpreted as (Red | Blue) | Green *) +and parseOrPattern pattern1 p = + let rec loop pattern1 = + match p.Parser.token with + | Bar -> + Parser.next p; + let pattern2 = parsePattern ~or_:false p in + let loc = + {pattern1.Parsetree.ppat_loc with loc_end = pattern2.ppat_loc.loc_end} + in + loop (Ast_helper.Pat.or_ ~loc pattern1 pattern2) + | _ -> pattern1 + in + loop pattern1 + +and parseNonSpreadPattern ~msg p = + let () = + match p.Parser.token with + | DotDotDot -> + Parser.err p (Diagnostics.message msg); + Parser.next p + | _ -> () + in + match p.Parser.token with + | token when Grammar.isPatternStart token -> ( + let pat = parsePattern p in + match p.Parser.token with + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc pat.ppat_loc.loc_start typ.Parsetree.ptyp_loc.loc_end in + Some (Ast_helper.Pat.constraint_ ~loc pat typ) + | _ -> Some pat) + | _ -> None + +and parseConstrainedPattern p = + let pat = parsePattern p in + match p.Parser.token with + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc pat.ppat_loc.loc_start typ.Parsetree.ptyp_loc.loc_end in + Ast_helper.Pat.constraint_ ~loc pat typ + | _ -> pat + +and parseConstrainedPatternRegion p = + match p.Parser.token with + | token when Grammar.isPatternStart token -> Some (parseConstrainedPattern p) + | _ -> None + +and parseOptionalLabel p = + match p.Parser.token with + | Question -> + Parser.next p; + true + | _ -> false + +(* field ::= + * | longident + * | longident : pattern + * | longident as lident + * + * row ::= + * | field , + * | field , _ + * | field , _, + *) +and parseRecordPatternRowField ~attrs p = + let label = parseValuePath p in + let pattern = + match p.Parser.token with + | Colon -> + Parser.next p; + let optional = parseOptionalLabel p in + let pat = parsePattern p in + makePatternOptional ~optional pat + | _ -> + Ast_helper.Pat.var ~loc:label.loc ~attrs + (Location.mkloc (Longident.last label.txt) label.loc) + in + (label, pattern) + +(* TODO: there are better representations than PatField|Underscore ? *) +and parseRecordPatternRow p = + let attrs = parseAttributes p in + match p.Parser.token with + | DotDotDot -> + Parser.next p; + Some (true, PatField (parseRecordPatternRowField ~attrs p)) + | Uident _ | Lident _ -> + Some (false, PatField (parseRecordPatternRowField ~attrs p)) + | Question -> ( + Parser.next p; + match p.token with + | Uident _ | Lident _ -> + let lid, pat = parseRecordPatternRowField ~attrs p in + Some (false, PatField (lid, makePatternOptional ~optional:true pat)) + | _ -> None) + | Underscore -> + Parser.next p; + Some (false, PatUnderscore) + | _ -> None + +and parseRecordPattern ~attrs p = + let startPos = p.startPos in + Parser.expect Lbrace p; + let rawFields = + parseCommaDelimitedReversedList p ~grammar:PatternRecord ~closing:Rbrace + ~f:parseRecordPatternRow + in + Parser.expect Rbrace p; + let fields, closedFlag = + let rawFields, flag = + match rawFields with + | (_hasSpread, PatUnderscore) :: rest -> (rest, Asttypes.Open) + | rawFields -> (rawFields, Asttypes.Closed) + in + List.fold_left + (fun (fields, flag) curr -> + let hasSpread, field = curr in + match field with + | PatField field -> + (if hasSpread then + let _, pattern = field in + Parser.err ~startPos:pattern.Parsetree.ppat_loc.loc_start p + (Diagnostics.message ErrorMessages.recordPatternSpread)); + (field :: fields, flag) + | PatUnderscore -> (fields, flag)) + ([], flag) rawFields + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.record ~loc ~attrs fields closedFlag + +and parseTuplePattern ~attrs ~first ~startPos p = + let patterns = + first + :: parseCommaDelimitedRegion p ~grammar:Grammar.PatternList ~closing:Rparen + ~f:parseConstrainedPatternRegion + in + Parser.expect Rparen p; + let () = + match patterns with + | [_] -> + Parser.err ~startPos ~endPos:p.prevEndPos p + (Diagnostics.message ErrorMessages.tupleSingleElement) + | _ -> () + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.tuple ~loc ~attrs patterns + +and parsePatternRegion p = + match p.Parser.token with + | DotDotDot -> + Parser.next p; + Some (true, parseConstrainedPattern p) + | token when Grammar.isPatternStart token -> + Some (false, parseConstrainedPattern p) + | _ -> None + +and parseModulePattern ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Module p; + Parser.expect Lparen p; + let uident = + match p.token with + | Uident uident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc uident loc + | _ -> + (* TODO: error recovery *) + Location.mknoloc "_" + in + match p.token with + | Colon -> + let colonStart = p.Parser.startPos in + Parser.next p; + let packageTypAttrs = parseAttributes p in + let packageType = + parsePackageType ~startPos:colonStart ~attrs:packageTypAttrs p + in + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + let unpack = Ast_helper.Pat.unpack ~loc:uident.loc uident in + Ast_helper.Pat.constraint_ ~loc ~attrs unpack packageType + | _ -> + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.unpack ~loc ~attrs uident + +and parseListPattern ~startPos ~attrs p = + let listPatterns = + parseCommaDelimitedReversedList p ~grammar:Grammar.PatternOcamlList + ~closing:Rbrace ~f:parsePatternRegion + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let filterSpread (hasSpread, pattern) = + if hasSpread then ( + Parser.err ~startPos:pattern.Parsetree.ppat_loc.loc_start p + (Diagnostics.message ErrorMessages.listPatternSpread); + pattern) + else pattern + in + match listPatterns with + | (true, pattern) :: patterns -> + let patterns = patterns |> List.map filterSpread |> List.rev in + let pat = makeListPattern loc patterns (Some pattern) in + {pat with ppat_loc = loc; ppat_attributes = attrs} + | patterns -> + let patterns = patterns |> List.map filterSpread |> List.rev in + let pat = makeListPattern loc patterns None in + {pat with ppat_loc = loc; ppat_attributes = attrs} + +and parseArrayPattern ~attrs p = + let startPos = p.startPos in + Parser.expect Lbracket p; + let patterns = + parseCommaDelimitedRegion p ~grammar:Grammar.PatternList ~closing:Rbracket + ~f:(parseNonSpreadPattern ~msg:ErrorMessages.arrayPatternSpread) + in + Parser.expect Rbracket p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.array ~loc ~attrs patterns + +and parseConstructorPatternArgs p constr startPos attrs = + let lparen = p.startPos in + Parser.expect Lparen p; + let args = + parseCommaDelimitedRegion p ~grammar:Grammar.PatternList ~closing:Rparen + ~f:parseConstrainedPatternRegion + in + Parser.expect Rparen p; + let args = + match args with + | [] -> + let loc = mkLoc lparen p.prevEndPos in + Some + (Ast_helper.Pat.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) + None) + | [({ppat_desc = Ppat_tuple _} as pat)] as patterns -> + if p.mode = ParseForTypeChecker then + (* Some(1, 2) for type-checker *) + Some pat + else + (* Some((1, 2)) for printer *) + Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns) + | [pattern] -> Some pattern + | patterns -> + Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns) + in + Ast_helper.Pat.construct ~loc:(mkLoc startPos p.prevEndPos) ~attrs constr args + +and parseVariantPatternArgs p ident startPos attrs = + let lparen = p.startPos in + Parser.expect Lparen p; + let patterns = + parseCommaDelimitedRegion p ~grammar:Grammar.PatternList ~closing:Rparen + ~f:parseConstrainedPatternRegion + in + let args = + match patterns with + | [] -> + let loc = mkLoc lparen p.prevEndPos in + Some + (Ast_helper.Pat.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) + None) + | [({ppat_desc = Ppat_tuple _} as pat)] as patterns -> + if p.mode = ParseForTypeChecker then + (* #ident(1, 2) for type-checker *) + Some pat + else + (* #ident((1, 2)) for printer *) + Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns) + | [pattern] -> Some pattern + | patterns -> + Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns) + in + Parser.expect Rparen p; + Ast_helper.Pat.variant ~loc:(mkLoc startPos p.prevEndPos) ~attrs ident args + +and parseExpr ?(context = OrdinaryExpr) p = + let expr = parseOperandExpr ~context p in + let expr = parseBinaryExpr ~context ~a:expr p 1 in + parseTernaryExpr expr p + +(* expr ? expr : expr *) +and parseTernaryExpr leftOperand p = + match p.Parser.token with + | Question -> + Parser.leaveBreadcrumb p Grammar.Ternary; + Parser.next p; + let trueBranch = parseExpr ~context:TernaryTrueBranchExpr p in + Parser.expect Colon p; + let falseBranch = parseExpr p in + Parser.eatBreadcrumb p; + let loc = + { + leftOperand.Parsetree.pexp_loc with + loc_start = leftOperand.pexp_loc.loc_start; + loc_end = falseBranch.Parsetree.pexp_loc.loc_end; + } + in + Ast_helper.Exp.ifthenelse ~attrs:[ternaryAttr] ~loc leftOperand trueBranch + (Some falseBranch) + | _ -> leftOperand + +and parseEs6ArrowExpression ?context ?parameters p = + let startPos = p.Parser.startPos in + Parser.leaveBreadcrumb p Grammar.Es6ArrowExpr; + let parameters = + match parameters with + | Some params -> params + | None -> parseParameters p + in + let returnType = + match p.Parser.token with + | Colon -> + Parser.next p; + Some (parseTypExpr ~es6Arrow:false p) + | _ -> None + in + Parser.expect EqualGreater p; + let body = + let expr = parseExpr ?context p in + match returnType with + | Some typ -> + Ast_helper.Exp.constraint_ + ~loc:(mkLoc expr.pexp_loc.loc_start typ.Parsetree.ptyp_loc.loc_end) + expr typ + | None -> expr + in + Parser.eatBreadcrumb p; + let endPos = p.prevEndPos in + let arrowExpr = + List.fold_right + (fun parameter expr -> + match parameter with + | TermParameter + { + uncurried; + attrs; + label = lbl; + expr = defaultExpr; + pat; + pos = startPos; + } -> + let attrs = if uncurried then uncurryAttr :: attrs else attrs in + Ast_helper.Exp.fun_ ~loc:(mkLoc startPos endPos) ~attrs lbl + defaultExpr pat expr + | TypeParameter {uncurried; attrs; locs = newtypes; pos = startPos} -> + let attrs = if uncurried then uncurryAttr :: attrs else attrs in + makeNewtypes ~attrs ~loc:(mkLoc startPos endPos) newtypes expr) + parameters body + in + {arrowExpr with pexp_loc = {arrowExpr.pexp_loc with loc_start = startPos}} + +(* + * uncurried_parameter ::= + * | . parameter + * + * parameter ::= + * | pattern + * | pattern : type + * | ~ labelName + * | ~ labelName as pattern + * | ~ labelName as pattern : type + * | ~ labelName = expr + * | ~ labelName as pattern = expr + * | ~ labelName as pattern : type = expr + * | ~ labelName = ? + * | ~ labelName as pattern = ? + * | ~ labelName as pattern : type = ? + * + * labelName ::= lident + *) +and parseParameter p = + if + p.Parser.token = Token.Typ || p.token = Tilde || p.token = Dot + || Grammar.isPatternStart p.token + then + let startPos = p.Parser.startPos in + let uncurried = Parser.optional p Token.Dot in + (* two scenarios: + * attrs ~lbl ... + * attrs pattern + * Attributes before a labelled arg, indicate that it's on the whole arrow expr + * Otherwise it's part of the pattern + * *) + let attrs = parseAttributes p in + if p.Parser.token = Typ then ( + Parser.next p; + let lidents = parseLidentList p in + Some (TypeParameter {uncurried; attrs; locs = lidents; pos = startPos})) + else + let attrs, lbl, pat = + match p.Parser.token with + | Tilde -> ( + Parser.next p; + let lblName, loc = parseLident p in + let propLocAttr = + (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) + in + match p.Parser.token with + | Comma | Equal | Rparen -> + let loc = mkLoc startPos p.prevEndPos in + ( attrs, + Asttypes.Labelled lblName, + Ast_helper.Pat.var ~attrs:[propLocAttr] ~loc + (Location.mkloc lblName loc) ) + | Colon -> + let lblEnd = p.prevEndPos in + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc startPos lblEnd in + let pat = + let pat = Ast_helper.Pat.var ~loc (Location.mkloc lblName loc) in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Pat.constraint_ ~attrs:[propLocAttr] ~loc pat typ + in + (attrs, Asttypes.Labelled lblName, pat) + | As -> + Parser.next p; + let pat = + let pat = parseConstrainedPattern p in + {pat with ppat_attributes = propLocAttr :: pat.ppat_attributes} + in + (attrs, Asttypes.Labelled lblName, pat) + | t -> + Parser.err p (Diagnostics.unexpected t p.breadcrumbs); + let loc = mkLoc startPos p.prevEndPos in + ( attrs, + Asttypes.Labelled lblName, + Ast_helper.Pat.var ~loc (Location.mkloc lblName loc) )) + | _ -> + let pattern = parseConstrainedPattern p in + let attrs = List.concat [attrs; pattern.ppat_attributes] in + ([], Asttypes.Nolabel, {pattern with ppat_attributes = attrs}) + in + match p.Parser.token with + | Equal -> ( + Parser.next p; + let lbl = + match lbl with + | Asttypes.Labelled lblName -> Asttypes.Optional lblName + | Asttypes.Nolabel -> + let lblName = + match pat.ppat_desc with + | Ppat_var var -> var.txt + | _ -> "" + in + Parser.err ~startPos ~endPos:p.prevEndPos p + (Diagnostics.message + (ErrorMessages.missingTildeLabeledParameter lblName)); + Asttypes.Optional lblName + | lbl -> lbl + in + match p.Parser.token with + | Question -> + Parser.next p; + Some + (TermParameter + {uncurried; attrs; label = lbl; expr = None; pat; pos = startPos}) + | _ -> + let expr = parseConstrainedOrCoercedExpr p in + Some + (TermParameter + { + uncurried; + attrs; + label = lbl; + expr = Some expr; + pat; + pos = startPos; + })) + | _ -> + Some + (TermParameter + {uncurried; attrs; label = lbl; expr = None; pat; pos = startPos}) + else None + +and parseParameterList p = + let parameters = + parseCommaDelimitedRegion ~grammar:Grammar.ParameterList ~f:parseParameter + ~closing:Rparen p + in + Parser.expect Rparen p; + parameters + +(* parameters ::= + * | _ + * | lident + * | () + * | (.) + * | ( parameter {, parameter} [,] ) + *) +and parseParameters p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | Lident ident -> + Parser.next p; + let loc = mkLoc startPos p.Parser.prevEndPos in + [ + TermParameter + { + uncurried = false; + attrs = []; + label = Asttypes.Nolabel; + expr = None; + pat = Ast_helper.Pat.var ~loc (Location.mkloc ident loc); + pos = startPos; + }; + ] + | Underscore -> + Parser.next p; + let loc = mkLoc startPos p.Parser.prevEndPos in + [ + TermParameter + { + uncurried = false; + attrs = []; + label = Asttypes.Nolabel; + expr = None; + pat = Ast_helper.Pat.any ~loc (); + pos = startPos; + }; + ] + | Lparen -> ( + Parser.next p; + match p.Parser.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.Parser.prevEndPos in + let unitPattern = + Ast_helper.Pat.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) + None + in + [ + TermParameter + { + uncurried = false; + attrs = []; + label = Asttypes.Nolabel; + expr = None; + pat = unitPattern; + pos = startPos; + }; + ] + | Dot -> ( + Parser.next p; + match p.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.Parser.prevEndPos in + let unitPattern = + Ast_helper.Pat.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) + None + in + [ + TermParameter + { + uncurried = true; + attrs = []; + label = Asttypes.Nolabel; + expr = None; + pat = unitPattern; + pos = startPos; + }; + ] + | _ -> ( + match parseParameterList p with + | TermParameter + { + attrs; + label = lbl; + expr = defaultExpr; + pat = pattern; + pos = startPos; + } + :: rest -> + TermParameter + { + uncurried = true; + attrs; + label = lbl; + expr = defaultExpr; + pat = pattern; + pos = startPos; + } + :: rest + | parameters -> parameters)) + | _ -> parseParameterList p) + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + [] + +and parseCoercedExpr ~(expr : Parsetree.expression) p = + Parser.expect ColonGreaterThan p; + let typ = parseTypExpr p in + let loc = mkLoc expr.pexp_loc.loc_start p.prevEndPos in + Ast_helper.Exp.coerce ~loc expr None typ + +and parseConstrainedOrCoercedExpr p = + let expr = parseExpr p in + match p.Parser.token with + | ColonGreaterThan -> parseCoercedExpr ~expr p + | Colon -> ( + Parser.next p; + match p.token with + | _ -> ( + let typ = parseTypExpr p in + let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in + let expr = Ast_helper.Exp.constraint_ ~loc expr typ in + match p.token with + | ColonGreaterThan -> parseCoercedExpr ~expr p + | _ -> expr)) + | _ -> expr + +and parseConstrainedExprRegion p = + match p.Parser.token with + | token when Grammar.isExprStart token -> ( + let expr = parseExpr p in + match p.Parser.token with + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in + Some (Ast_helper.Exp.constraint_ ~loc expr typ) + | _ -> Some expr) + | _ -> None + +(* Atomic expressions represent unambiguous expressions. + * This means that regardless of the context, these expressions + * are always interpreted correctly. *) +and parseAtomicExpr p = + Parser.leaveBreadcrumb p Grammar.ExprOperand; + let startPos = p.Parser.startPos in + let expr = + match p.Parser.token with + | (True | False) as token -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident (Token.toString token)) loc) + None + | Int _ | String _ | Float _ | Codepoint _ -> + let c = parseConstant p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.constant ~loc c + | Backtick -> + let expr = parseTemplateExpr p in + {expr with pexp_loc = mkLoc startPos p.prevEndPos} + | Uident _ | Lident _ -> parseValueOrConstructor p + | Hash -> parsePolyVariantExpr p + | Lparen -> ( + Parser.next p; + match p.Parser.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) + None + | _t -> ( + let expr = parseConstrainedOrCoercedExpr p in + match p.token with + | Comma -> + Parser.next p; + parseTupleExpr ~startPos ~first:expr p + | _ -> + Parser.expect Rparen p; + expr + (* {expr with pexp_loc = mkLoc startPos p.prevEndPos} + * What does this location mean here? It means that when there's + * a parenthesized we keep the location here for whitespace interleaving. + * Without the closing paren in the location there will always be an extra + * line. For now we don't include it, because it does weird things + * with for comments. *))) + | List -> + Parser.next p; + parseListExpr ~startPos p + | Module -> + Parser.next p; + parseFirstClassModuleExpr ~startPos p + | Lbracket -> parseArrayExp p + | Lbrace -> parseBracedOrRecordExpr p + | LessThan -> parseJsx p + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.extension ~loc extension + | Underscore as token -> + (* This case is for error recovery. Not sure if it's the correct place *) + Parser.err p (Diagnostics.lident token); + Parser.next p; + Recover.defaultExpr () + | Eof -> + Parser.err ~startPos:p.prevEndPos p + (Diagnostics.unexpected p.Parser.token p.breadcrumbs); + Recover.defaultExpr () + | token -> ( + let errPos = p.prevEndPos in + Parser.err ~startPos:errPos p (Diagnostics.unexpected token p.breadcrumbs); + match + skipTokensAndMaybeRetry p ~isStartOfGrammar:Grammar.isAtomicExprStart + with + | None -> Recover.defaultExpr () + | Some () -> parseAtomicExpr p) + in + Parser.eatBreadcrumb p; + expr + +(* module(module-expr) + * module(module-expr : package-type) *) +and parseFirstClassModuleExpr ~startPos p = + Parser.expect Lparen p; + + let modExpr = parseModuleExpr p in + let modEndLoc = p.prevEndPos in + match p.Parser.token with + | Colon -> + let colonStart = p.Parser.startPos in + Parser.next p; + let attrs = parseAttributes p in + let packageType = parsePackageType ~startPos:colonStart ~attrs p in + Parser.expect Rparen p; + let loc = mkLoc startPos modEndLoc in + let firstClassModule = Ast_helper.Exp.pack ~loc modExpr in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.constraint_ ~loc firstClassModule packageType + | _ -> + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.pack ~loc modExpr + +and parseBracketAccess p expr startPos = + Parser.leaveBreadcrumb p Grammar.ExprArrayAccess; + let lbracket = p.startPos in + Parser.expect Lbracket p; + let stringStart = p.startPos in + match p.Parser.token with + | String s -> ( + Parser.next p; + let stringEnd = p.prevEndPos in + Parser.expect Rbracket p; + Parser.eatBreadcrumb p; + let rbracket = p.prevEndPos in + let e = + let identLoc = mkLoc stringStart stringEnd in + let loc = mkLoc startPos rbracket in + Ast_helper.Exp.send ~loc expr (Location.mkloc s identLoc) + in + let e = parsePrimaryExpr ~operand:e p in + let equalStart = p.startPos in + match p.token with + | Equal -> + Parser.next p; + let equalEnd = p.prevEndPos in + let rhsExpr = parseExpr p in + let loc = mkLoc startPos rhsExpr.pexp_loc.loc_end in + let operatorLoc = mkLoc equalStart equalEnd in + Ast_helper.Exp.apply ~loc + (Ast_helper.Exp.ident ~loc:operatorLoc + (Location.mkloc (Longident.Lident "#=") operatorLoc)) + [(Nolabel, e); (Nolabel, rhsExpr)] + | _ -> e) + | _ -> ( + let accessExpr = parseConstrainedOrCoercedExpr p in + Parser.expect Rbracket p; + Parser.eatBreadcrumb p; + let rbracket = p.prevEndPos in + let arrayLoc = mkLoc lbracket rbracket in + match p.token with + | Equal -> + Parser.leaveBreadcrumb p ExprArrayMutation; + Parser.next p; + let rhsExpr = parseExpr p in + let arraySet = + Location.mkloc (Longident.Ldot (Lident "Array", "set")) arrayLoc + in + let endPos = p.prevEndPos in + let arraySet = + Ast_helper.Exp.apply ~loc:(mkLoc startPos endPos) + (Ast_helper.Exp.ident ~loc:arrayLoc arraySet) + [(Nolabel, expr); (Nolabel, accessExpr); (Nolabel, rhsExpr)] + in + Parser.eatBreadcrumb p; + arraySet + | _ -> + let endPos = p.prevEndPos in + let e = + Ast_helper.Exp.apply ~loc:(mkLoc startPos endPos) + (Ast_helper.Exp.ident ~loc:arrayLoc + (Location.mkloc (Longident.Ldot (Lident "Array", "get")) arrayLoc)) + [(Nolabel, expr); (Nolabel, accessExpr)] + in + parsePrimaryExpr ~operand:e p) + +(* * A primary expression represents + * - atomic-expr + * - john.age + * - array[0] + * - applyFunctionTo(arg1, arg2) + * + * The "operand" represents the expression that is operated on + *) +and parsePrimaryExpr ~operand ?(noCall = false) p = + let startPos = operand.pexp_loc.loc_start in + let rec loop p expr = + match p.Parser.token with + | Dot -> ( + Parser.next p; + let lident = parseValuePathAfterDot p in + match p.Parser.token with + | Equal when noCall = false -> + Parser.leaveBreadcrumb p Grammar.ExprSetField; + Parser.next p; + let targetExpr = parseExpr p in + let loc = mkLoc startPos p.prevEndPos in + let setfield = Ast_helper.Exp.setfield ~loc expr lident targetExpr in + Parser.eatBreadcrumb p; + setfield + | _ -> + let endPos = p.prevEndPos in + let loc = mkLoc startPos endPos in + loop p (Ast_helper.Exp.field ~loc expr lident)) + | Lbracket + when noCall = false && p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + parseBracketAccess p expr startPos + | Lparen when noCall = false && p.prevEndPos.pos_lnum == p.startPos.pos_lnum + -> + loop p (parseCallExpr p expr) + | Backtick + when noCall = false && p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> ( + match expr.pexp_desc with + | Pexp_ident {txt = Longident.Lident ident} -> + parseTemplateExpr ~prefix:ident p + | _ -> + Parser.err ~startPos:expr.pexp_loc.loc_start + ~endPos:expr.pexp_loc.loc_end p + (Diagnostics.message + "Tagged template literals are currently restricted to names like: \ + json`null`."); + parseTemplateExpr p) + | _ -> expr + in + loop p operand + +(* a unary expression is an expression with only one operand and + * unary operator. Examples: + * -1 + * !condition + * -. 1.6 + *) +and parseUnaryExpr p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | (Minus | MinusDot | Plus | PlusDot | Bang) as token -> + Parser.leaveBreadcrumb p Grammar.ExprUnary; + let tokenEnd = p.endPos in + Parser.next p; + let operand = parseUnaryExpr p in + let unaryExpr = makeUnaryExpr startPos tokenEnd token operand in + Parser.eatBreadcrumb p; + unaryExpr + | _ -> parsePrimaryExpr ~operand:(parseAtomicExpr p) p + +(* Represents an "operand" in a binary expression. + * If you have `a + b`, `a` and `b` both represent + * the operands of the binary expression with opeartor `+` *) +and parseOperandExpr ~context p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + let expr = + match p.Parser.token with + | Assert -> + Parser.next p; + let expr = parseUnaryExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.assert_ ~loc expr + | Lident "async" + (* we need to be careful when we're in a ternary true branch: + `condition ? ternary-true-branch : false-branch` + Arrow expressions could be of the form: `async (): int => stuff()` + But if we're in a ternary, the `:` of the ternary takes precedence + *) + when isEs6ArrowExpression ~inTernary:(context = TernaryTrueBranchExpr) p + -> + parseAsyncArrowExpression p + | Await -> parseAwaitExpression p + | Lazy -> + Parser.next p; + let expr = parseUnaryExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.lazy_ ~loc expr + | Try -> parseTryExpression p + | If -> parseIfOrIfLetExpression p + | For -> parseForExpression p + | While -> parseWhileExpression p + | Switch -> parseSwitchExpression p + | _ -> + if + context != WhenExpr + && isEs6ArrowExpression ~inTernary:(context = TernaryTrueBranchExpr) p + then parseEs6ArrowExpression ~context p + else parseUnaryExpr p + in + (* let endPos = p.Parser.prevEndPos in *) + { + expr with + pexp_attributes = List.concat [expr.Parsetree.pexp_attributes; attrs]; + (* pexp_loc = mkLoc startPos endPos *) + } + +(* a binary expression is an expression that combines two expressions with an + * operator. Examples: + * a + b + * f(x) |> g(y) + *) +and parseBinaryExpr ?(context = OrdinaryExpr) ?a p prec = + let a = + match a with + | Some e -> e + | None -> parseOperandExpr ~context p + in + let rec loop a = + let token = p.Parser.token in + let tokenPrec = + match token with + (* Can the minus be interpreted as a binary operator? Or is it a unary? + * let w = { + * x + * -10 + * } + * vs + * let w = { + * width + * - gap + * } + * + * First case is unary, second is a binary operator. + * See Scanner.isBinaryOp *) + | (Minus | MinusDot | LessThan) + when (not + (Scanner.isBinaryOp p.scanner.src p.startPos.pos_cnum + p.endPos.pos_cnum)) + && p.startPos.pos_lnum > p.prevEndPos.pos_lnum -> + -1 + | token -> Token.precedence token + in + if tokenPrec < prec then a + else ( + Parser.leaveBreadcrumb p (Grammar.ExprBinaryAfterOp token); + let startPos = p.startPos in + Parser.next p; + let endPos = p.prevEndPos in + let b = parseBinaryExpr ~context p (tokenPrec + 1) in + let loc = mkLoc a.Parsetree.pexp_loc.loc_start b.pexp_loc.loc_end in + let expr = + Ast_helper.Exp.apply ~loc + (makeInfixOperator p token startPos endPos) + [(Nolabel, a); (Nolabel, b)] + in + Parser.eatBreadcrumb p; + loop expr) + in + loop a + +(* If we even need this, determines if < might be the start of jsx. Not 100% complete *) +(* and isStartOfJsx p = *) +(* Parser.lookahead p (fun p -> *) +(* match p.Parser.token with *) +(* | LessThan -> *) +(* Parser.next p; *) +(* begin match p.token with *) +(* | GreaterThan (* <> *) -> true *) +(* | Lident _ | Uident _ | List -> *) +(* ignore (parseJsxName p); *) +(* begin match p.token with *) +(* | GreaterThan (*
*) -> true *) +(* | Question (* true *) +(* | Lident _ | List -> *) +(* Parser.next p; *) +(* begin match p.token with *) +(* | Equal (* true *) +(* | _ -> false (* TODO *) *) +(* end *) +(* | Forwardslash (* *) +(* Parser.next p; *) +(* begin match p.token with *) +(* | GreaterThan (* *) -> true *) +(* | _ -> false *) +(* end *) +(* | _ -> *) +(* false *) +(* end *) +(* | _ -> false *) +(* end *) +(* | _ -> false *) +(* ) *) + +and parseTemplateExpr ?(prefix = "js") p = + let hiddenOperator = + let op = Location.mknoloc (Longident.Lident "^") in + Ast_helper.Exp.ident op + in + let concat (e1 : Parsetree.expression) (e2 : Parsetree.expression) = + let loc = mkLoc e1.pexp_loc.loc_start e2.pexp_loc.loc_end in + Ast_helper.Exp.apply ~attrs:[templateLiteralAttr] ~loc hiddenOperator + [(Nolabel, e1); (Nolabel, e2)] + in + let rec parseParts (acc : Parsetree.expression) = + let startPos = p.Parser.startPos in + Parser.nextTemplateLiteralToken p; + match p.token with + | TemplateTail (txt, lastPos) -> + Parser.next p; + let loc = mkLoc startPos lastPos in + let str = + Ast_helper.Exp.constant ~attrs:[templateLiteralAttr] ~loc + (Pconst_string (txt, Some prefix)) + in + concat acc str + | TemplatePart (txt, lastPos) -> + Parser.next p; + let loc = mkLoc startPos lastPos in + let expr = parseExprBlock p in + let str = + Ast_helper.Exp.constant ~attrs:[templateLiteralAttr] ~loc + (Pconst_string (txt, Some prefix)) + in + let next = + let a = concat acc str in + concat a expr + in + parseParts next + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Ast_helper.Exp.constant (Pconst_string ("", None)) + in + let startPos = p.startPos in + Parser.nextTemplateLiteralToken p; + match p.token with + | TemplateTail (txt, lastPos) -> + Parser.next p; + Ast_helper.Exp.constant ~attrs:[templateLiteralAttr] + ~loc:(mkLoc startPos lastPos) + (Pconst_string (txt, Some prefix)) + | TemplatePart (txt, lastPos) -> + Parser.next p; + let constantLoc = mkLoc startPos lastPos in + let expr = parseExprBlock p in + let str = + Ast_helper.Exp.constant ~attrs:[templateLiteralAttr] ~loc:constantLoc + (Pconst_string (txt, Some prefix)) + in + let next = concat str expr in + parseParts next + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Ast_helper.Exp.constant (Pconst_string ("", None)) + +(* Overparse: let f = a : int => a + 1, is it (a : int) => or (a): int => + * Also overparse constraints: + * let x = { + * let a = 1 + * a + pi: int + * } + * + * We want to give a nice error message in these cases + * *) +and overParseConstrainedOrCoercedOrArrowExpression p expr = + match p.Parser.token with + | ColonGreaterThan -> parseCoercedExpr ~expr p + | Colon -> ( + Parser.next p; + let typ = parseTypExpr ~es6Arrow:false p in + match p.Parser.token with + | EqualGreater -> + Parser.next p; + let body = parseExpr p in + let pat = + match expr.pexp_desc with + | Pexp_ident longident -> + Ast_helper.Pat.var ~loc:expr.pexp_loc + (Location.mkloc + (Longident.flatten longident.txt |> String.concat ".") + longident.loc) + (* TODO: can we convert more expressions to patterns?*) + | _ -> + Ast_helper.Pat.var ~loc:expr.pexp_loc + (Location.mkloc "pattern" expr.pexp_loc) + in + let arrow1 = + Ast_helper.Exp.fun_ + ~loc:(mkLoc expr.pexp_loc.loc_start body.pexp_loc.loc_end) + Asttypes.Nolabel None pat + (Ast_helper.Exp.constraint_ body typ) + in + let arrow2 = + Ast_helper.Exp.fun_ + ~loc:(mkLoc expr.pexp_loc.loc_start body.pexp_loc.loc_end) + Asttypes.Nolabel None + (Ast_helper.Pat.constraint_ pat typ) + body + in + let msg = + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.text + "Did you mean to annotate the parameter type or the return \ + type?"; + Doc.indent + (Doc.concat + [ + Doc.line; + Doc.text "1) "; + ResPrinter.printExpression arrow1 CommentTable.empty; + Doc.line; + Doc.text "2) "; + ResPrinter.printExpression arrow2 CommentTable.empty; + ]); + ]) + |> Doc.toString ~width:80 + in + Parser.err ~startPos:expr.pexp_loc.loc_start ~endPos:body.pexp_loc.loc_end + p (Diagnostics.message msg); + arrow1 + | _ -> + let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in + let expr = Ast_helper.Exp.constraint_ ~loc expr typ in + let () = + Parser.err ~startPos:expr.pexp_loc.loc_start + ~endPos:typ.ptyp_loc.loc_end p + (Diagnostics.message + (Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.text + "Expressions with type constraints need to be wrapped \ + in parens:"; + Doc.indent + (Doc.concat + [ + Doc.line; + ResPrinter.addParens + (ResPrinter.printExpression expr + CommentTable.empty); + ]); + ]) + |> Doc.toString ~width:80)) + in + expr) + | _ -> expr + +and parseLetBindingBody ~startPos ~attrs p = + Parser.beginRegion p; + Parser.leaveBreadcrumb p Grammar.LetBinding; + let pat, exp = + Parser.leaveBreadcrumb p Grammar.Pattern; + let pat = parsePattern p in + Parser.eatBreadcrumb p; + match p.Parser.token with + | Colon -> ( + Parser.next p; + match p.token with + | Typ -> + (* locally abstract types *) + Parser.next p; + let newtypes = parseLidentList p in + Parser.expect Dot p; + let typ = parseTypExpr p in + Parser.expect Equal p; + let expr = parseExpr p in + let loc = mkLoc startPos p.prevEndPos in + let exp, poly = wrapTypeAnnotation ~loc newtypes typ expr in + let pat = Ast_helper.Pat.constraint_ ~loc pat poly in + (pat, exp) + | _ -> + let polyType = parsePolyTypeExpr p in + let loc = + {pat.ppat_loc with loc_end = polyType.Parsetree.ptyp_loc.loc_end} + in + let pat = Ast_helper.Pat.constraint_ ~loc pat polyType in + Parser.expect Token.Equal p; + let exp = parseExpr p in + let exp = overParseConstrainedOrCoercedOrArrowExpression p exp in + (pat, exp)) + | _ -> + Parser.expect Token.Equal p; + let exp = + overParseConstrainedOrCoercedOrArrowExpression p (parseExpr p) + in + (pat, exp) + in + let loc = mkLoc startPos p.prevEndPos in + let vb = Ast_helper.Vb.mk ~loc ~attrs pat exp in + Parser.eatBreadcrumb p; + Parser.endRegion p; + vb + +(* TODO: find a better way? Is it possible? + * let a = 1 + * @attr + * and b = 2 + * + * The problem is that without semi we need a lookahead to determine + * if the attr is on the letbinding or the start of a new thing + * + * let a = 1 + * @attr + * let b = 1 + * + * Here @attr should attach to something "new": `let b = 1` + * The parser state is forked, which is quite expensive… + *) +and parseAttributesAndBinding (p : Parser.t) = + let err = p.scanner.err in + let ch = p.scanner.ch in + let offset = p.scanner.offset in + let lineOffset = p.scanner.lineOffset in + let lnum = p.scanner.lnum in + let mode = p.scanner.mode in + let token = p.token in + let startPos = p.startPos in + let endPos = p.endPos in + let prevEndPos = p.prevEndPos in + let breadcrumbs = p.breadcrumbs in + let errors = p.errors in + let diagnostics = p.diagnostics in + let comments = p.comments in + + match p.Parser.token with + | At -> ( + let attrs = parseAttributes p in + match p.Parser.token with + | And -> attrs + | _ -> + p.scanner.err <- err; + p.scanner.ch <- ch; + p.scanner.offset <- offset; + p.scanner.lineOffset <- lineOffset; + p.scanner.lnum <- lnum; + p.scanner.mode <- mode; + p.token <- token; + p.startPos <- startPos; + p.endPos <- endPos; + p.prevEndPos <- prevEndPos; + p.breadcrumbs <- breadcrumbs; + p.errors <- errors; + p.diagnostics <- diagnostics; + p.comments <- comments; + []) + | _ -> [] + +(* definition ::= let [rec] let-binding { and let-binding } *) +and parseLetBindings ~attrs p = + let startPos = p.Parser.startPos in + Parser.optional p Let |> ignore; + let recFlag = + if Parser.optional p Token.Rec then Asttypes.Recursive + else Asttypes.Nonrecursive + in + let first = parseLetBindingBody ~startPos ~attrs p in + + let rec loop p bindings = + let startPos = p.Parser.startPos in + let attrs = parseAttributesAndBinding p in + match p.Parser.token with + | And -> + Parser.next p; + ignore (Parser.optional p Let); + (* overparse for fault tolerance *) + let letBinding = parseLetBindingBody ~startPos ~attrs p in + loop p (letBinding :: bindings) + | _ -> List.rev bindings + in + (recFlag, loop p [first]) + +(* + * div -> div + * Foo -> Foo.createElement + * Foo.Bar -> Foo.Bar.createElement + *) +and parseJsxName p = + let longident = + match p.Parser.token with + | Lident ident -> + let identStart = p.startPos in + let identEnd = p.endPos in + Parser.next p; + let loc = mkLoc identStart identEnd in + Location.mkloc (Longident.Lident ident) loc + | Uident _ -> + let longident = parseModuleLongIdent ~lowercase:true p in + Location.mkloc + (Longident.Ldot (longident.txt, "createElement")) + longident.loc + | _ -> + let msg = + "A jsx name must be a lowercase or uppercase name, like: div in
or Navbar in " + in + Parser.err p (Diagnostics.message msg); + Location.mknoloc (Longident.Lident "_") + in + Ast_helper.Exp.ident ~loc:longident.loc longident + +and parseJsxOpeningOrSelfClosingElement ~startPos p = + let jsxStartPos = p.Parser.startPos in + let name = parseJsxName p in + let jsxProps = parseJsxProps p in + let children = + match p.Parser.token with + | Forwardslash -> + (* *) + let childrenStartPos = p.Parser.startPos in + Parser.next p; + let childrenEndPos = p.Parser.startPos in + Parser.expect GreaterThan p; + let loc = mkLoc childrenStartPos childrenEndPos in + makeListExpression loc [] None (* no children *) + | GreaterThan -> ( + (* bar *) + let childrenStartPos = p.Parser.startPos in + Scanner.setJsxMode p.scanner; + Parser.next p; + let spread, children = parseJsxChildren p in + let childrenEndPos = p.Parser.startPos in + let () = + match p.token with + | LessThanSlash -> Parser.next p + | LessThan -> + Parser.next p; + Parser.expect Forwardslash p + | token when Grammar.isStructureItemStart token -> () + | _ -> Parser.expect LessThanSlash p + in + match p.Parser.token with + | (Lident _ | Uident _) when verifyJsxOpeningClosingName p name -> ( + Parser.expect GreaterThan p; + let loc = mkLoc childrenStartPos childrenEndPos in + match (spread, children) with + | true, child :: _ -> child + | _ -> makeListExpression loc children None) + | token -> ( + let () = + if Grammar.isStructureItemStart token then + let closing = "" in + let msg = Diagnostics.message ("Missing " ^ closing) in + Parser.err ~startPos ~endPos:p.prevEndPos p msg + else + let opening = "" in + let msg = + "Closing jsx name should be the same as the opening name. Did \ + you mean " ^ opening ^ " ?" + in + Parser.err ~startPos ~endPos:p.prevEndPos p + (Diagnostics.message msg); + Parser.expect GreaterThan p + in + let loc = mkLoc childrenStartPos childrenEndPos in + match (spread, children) with + | true, child :: _ -> child + | _ -> makeListExpression loc children None)) + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + makeListExpression Location.none [] None + in + let jsxEndPos = p.prevEndPos in + let loc = mkLoc jsxStartPos jsxEndPos in + Ast_helper.Exp.apply ~loc name + (List.concat + [ + jsxProps; + [ + (Asttypes.Labelled "children", children); + ( Asttypes.Nolabel, + Ast_helper.Exp.construct + (Location.mknoloc (Longident.Lident "()")) + None ); + ]; + ]) + +(* + * jsx ::= + * | <> jsx-children + * | + * | jsx-children + * + * jsx-children ::= primary-expr* * => 0 or more + *) +and parseJsx p = + Parser.leaveBreadcrumb p Grammar.Jsx; + let startPos = p.Parser.startPos in + Parser.expect LessThan p; + let jsxExpr = + match p.Parser.token with + | Lident _ | Uident _ -> parseJsxOpeningOrSelfClosingElement ~startPos p + | GreaterThan -> + (* fragment: <> foo *) + parseJsxFragment p + | _ -> parseJsxName p + in + Parser.eatBreadcrumb p; + {jsxExpr with pexp_attributes = [jsxAttr]} + +(* + * jsx-fragment ::= + * | <> + * | <> jsx-children + *) +and parseJsxFragment p = + let childrenStartPos = p.Parser.startPos in + Scanner.setJsxMode p.scanner; + Parser.expect GreaterThan p; + let _spread, children = parseJsxChildren p in + let childrenEndPos = p.Parser.startPos in + Parser.expect LessThanSlash p; + Parser.expect GreaterThan p; + let loc = mkLoc childrenStartPos childrenEndPos in + makeListExpression loc children None + +(* + * jsx-prop ::= + * | lident + * | ?lident + * | lident = jsx_expr + * | lident = ?jsx_expr + * | {...jsx_expr} + *) +and parseJsxProp p = + match p.Parser.token with + | Question | Lident _ -> ( + let optional = Parser.optional p Question in + let name, loc = parseLident p in + let propLocAttr = + (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) + in + (* optional punning: *) + if optional then + Some + ( Asttypes.Optional name, + Ast_helper.Exp.ident ~attrs:[propLocAttr] ~loc + (Location.mkloc (Longident.Lident name) loc) ) + else + match p.Parser.token with + | Equal -> + Parser.next p; + (* no punning *) + let optional = Parser.optional p Question in + let attrExpr = + let e = parsePrimaryExpr ~operand:(parseAtomicExpr p) p in + {e with pexp_attributes = propLocAttr :: e.pexp_attributes} + in + let label = + if optional then Asttypes.Optional name else Asttypes.Labelled name + in + Some (label, attrExpr) + | _ -> + let attrExpr = + Ast_helper.Exp.ident ~loc ~attrs:[propLocAttr] + (Location.mkloc (Longident.Lident name) loc) + in + let label = + if optional then Asttypes.Optional name else Asttypes.Labelled name + in + Some (label, attrExpr)) + (* {...props} *) + | Lbrace -> ( + Parser.next p; + match p.Parser.token with + | DotDotDot -> ( + Parser.next p; + let loc = mkLoc p.Parser.startPos p.prevEndPos in + let propLocAttr = + (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) + in + let attrExpr = + let e = parsePrimaryExpr ~operand:(parseAtomicExpr p) p in + {e with pexp_attributes = propLocAttr :: e.pexp_attributes} + in + (* using label "spreadProps" to distinguish from others *) + let label = Asttypes.Labelled "_spreadProps" in + match p.Parser.token with + | Rbrace -> + Parser.next p; + Some (label, attrExpr) + | _ -> None) + | _ -> None) + | _ -> None + +and parseJsxProps p = + parseRegion ~grammar:Grammar.JsxAttribute ~f:parseJsxProp p + +and parseJsxChildren p = + let rec loop p children = + match p.Parser.token with + | Token.Eof | LessThanSlash -> + Scanner.popMode p.scanner Jsx; + List.rev children + | LessThan -> + (* Imagine:
< + * is `<` the start of a jsx-child?
+ * reconsiderLessThan peeks at the next token and + * determines the correct token to disambiguate *) + let token = Scanner.reconsiderLessThan p.scanner in + if token = LessThan then + let child = + parsePrimaryExpr ~operand:(parseAtomicExpr p) ~noCall:true p + in + loop p (child :: children) + else + (* LessThanSlash *) + let () = p.token <- token in + let () = Scanner.popMode p.scanner Jsx in + List.rev children + | token when Grammar.isJsxChildStart token -> + let () = Scanner.popMode p.scanner Jsx in + let child = + parsePrimaryExpr ~operand:(parseAtomicExpr p) ~noCall:true p + in + loop p (child :: children) + | _ -> + Scanner.popMode p.scanner Jsx; + List.rev children + in + match p.Parser.token with + | DotDotDot -> + Parser.next p; + (true, [parsePrimaryExpr ~operand:(parseAtomicExpr p) ~noCall:true p]) + | _ -> (false, loop p []) + +and parseBracedOrRecordExpr p = + let startPos = p.Parser.startPos in + Parser.expect Lbrace p; + match p.Parser.token with + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.record ~loc [] None + | DotDotDot -> + (* beginning of record spread, parse record *) + Parser.next p; + let spreadExpr = parseConstrainedOrCoercedExpr p in + Parser.expect Comma p; + let expr = parseRecordExpr ~startPos ~spread:(Some spreadExpr) [] p in + Parser.expect Rbrace p; + expr + | String s -> ( + let field = + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc (Longident.Lident s) loc + in + match p.Parser.token with + | Colon -> + Parser.next p; + let fieldExpr = parseExpr p in + Parser.optional p Comma |> ignore; + let expr = parseRecordExprWithStringKeys ~startPos (field, fieldExpr) p in + Parser.expect Rbrace p; + expr + | _ -> ( + let tag = if p.mode = ParseForTypeChecker then Some "js" else None in + let constant = + Ast_helper.Exp.constant ~loc:field.loc + (Parsetree.Pconst_string (s, tag)) + in + let a = parsePrimaryExpr ~operand:constant p in + let e = parseBinaryExpr ~a p 1 in + let e = parseTernaryExpr e p in + match p.Parser.token with + | Semicolon -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + { + expr with + Parsetree.pexp_attributes = braces :: expr.Parsetree.pexp_attributes; + } + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {e with pexp_attributes = braces :: e.pexp_attributes} + | _ -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes})) + | Question -> + let expr = parseRecordExpr ~startPos [] p in + Parser.expect Rbrace p; + expr + (* + The branch below takes care of the "braced" expression {async}. + The big reason that we need all these branches is that {x} isn't a record with a punned field x, but a braced expression… There's lots of "ambiguity" between a record with a single punned field and a braced expression… + What is {x}? + 1) record {x: x} + 2) expression x which happens to wrapped in braces + Due to historical reasons, we always follow 2 + *) + | Lident "async" when isEs6ArrowExpression ~inTernary:false p -> + let expr = parseAsyncArrowExpression p in + let expr = parseExprBlock ~first:expr p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes} + | Uident _ | Lident _ -> ( + let startToken = p.token in + let valueOrConstructor = parseValueOrConstructor p in + match valueOrConstructor.pexp_desc with + | Pexp_ident pathIdent -> ( + let identEndPos = p.prevEndPos in + match p.Parser.token with + | Comma -> + Parser.next p; + let valueOrConstructor = + match startToken with + | Uident _ -> removeModuleNameFromPunnedFieldValue valueOrConstructor + | _ -> valueOrConstructor + in + let expr = + parseRecordExpr ~startPos [(pathIdent, valueOrConstructor)] p + in + Parser.expect Rbrace p; + expr + | Colon -> ( + Parser.next p; + let optional = parseOptionalLabel p in + let fieldExpr = parseExpr p in + let fieldExpr = makeExpressionOptional ~optional fieldExpr in + match p.token with + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.record ~loc [(pathIdent, fieldExpr)] None + | _ -> + Parser.expect Comma p; + let expr = parseRecordExpr ~startPos [(pathIdent, fieldExpr)] p in + Parser.expect Rbrace p; + expr) + (* error case *) + | Lident _ -> + if p.prevEndPos.pos_lnum < p.startPos.pos_lnum then ( + Parser.expect Comma p; + let expr = + parseRecordExpr ~startPos [(pathIdent, valueOrConstructor)] p + in + Parser.expect Rbrace p; + expr) + else ( + Parser.expect Colon p; + let expr = + parseRecordExpr ~startPos [(pathIdent, valueOrConstructor)] p + in + Parser.expect Rbrace p; + expr) + | Semicolon -> + let expr = parseExprBlock ~first:(Ast_helper.Exp.ident pathIdent) p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes} + | Rbrace -> + Parser.next p; + let expr = Ast_helper.Exp.ident ~loc:pathIdent.loc pathIdent in + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes} + | EqualGreater -> ( + let loc = mkLoc startPos identEndPos in + let ident = Location.mkloc (Longident.last pathIdent.txt) loc in + let a = + parseEs6ArrowExpression + ~parameters: + [ + TermParameter + { + uncurried = false; + attrs = []; + label = Asttypes.Nolabel; + expr = None; + pat = Ast_helper.Pat.var ident; + pos = startPos; + }; + ] + p + in + let e = parseBinaryExpr ~a p 1 in + let e = parseTernaryExpr e p in + match p.Parser.token with + | Semicolon -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes} + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {e with pexp_attributes = braces :: e.pexp_attributes} + | _ -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes}) + | _ -> ( + Parser.leaveBreadcrumb p Grammar.ExprBlock; + let a = + parsePrimaryExpr + ~operand:(Ast_helper.Exp.ident ~loc:pathIdent.loc pathIdent) + p + in + let e = parseBinaryExpr ~a p 1 in + let e = parseTernaryExpr e p in + Parser.eatBreadcrumb p; + match p.Parser.token with + | Semicolon -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes} + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {e with pexp_attributes = braces :: e.pexp_attributes} + | _ -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes})) + | _ -> ( + Parser.leaveBreadcrumb p Grammar.ExprBlock; + let a = parsePrimaryExpr ~operand:valueOrConstructor p in + let e = parseBinaryExpr ~a p 1 in + let e = parseTernaryExpr e p in + Parser.eatBreadcrumb p; + match p.Parser.token with + | Semicolon -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes} + | Rbrace -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {e with pexp_attributes = braces :: e.pexp_attributes} + | _ -> + let expr = parseExprBlock ~first:e p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes})) + | _ -> + let expr = parseExprBlock p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let braces = makeBracesAttr loc in + {expr with pexp_attributes = braces :: expr.pexp_attributes} + +and parseRecordExprRowWithStringKey p = + match p.Parser.token with + | String s -> ( + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + let field = Location.mkloc (Longident.Lident s) loc in + match p.Parser.token with + | Colon -> + Parser.next p; + let fieldExpr = parseExpr p in + Some (field, fieldExpr) + | _ -> Some (field, Ast_helper.Exp.ident ~loc:field.loc field)) + | _ -> None + +and parseRecordExprRow p = + let attrs = parseAttributes p in + let () = + match p.Parser.token with + | Token.DotDotDot -> + Parser.err p (Diagnostics.message ErrorMessages.recordExprSpread); + Parser.next p + | _ -> () + in + match p.Parser.token with + | Lident _ | Uident _ -> ( + let startToken = p.token in + let field = parseValuePath p in + match p.Parser.token with + | Colon -> + Parser.next p; + let optional = parseOptionalLabel p in + let fieldExpr = parseExpr p in + let fieldExpr = makeExpressionOptional ~optional fieldExpr in + Some (field, fieldExpr) + | _ -> + let value = Ast_helper.Exp.ident ~loc:field.loc ~attrs field in + let value = + match startToken with + | Uident _ -> removeModuleNameFromPunnedFieldValue value + | _ -> value + in + Some (field, value)) + | Question -> ( + Parser.next p; + match p.Parser.token with + | Lident _ | Uident _ -> + let startToken = p.token in + let field = parseValuePath p in + let value = Ast_helper.Exp.ident ~loc:field.loc ~attrs field in + let value = + match startToken with + | Uident _ -> removeModuleNameFromPunnedFieldValue value + | _ -> value + in + Some (field, makeExpressionOptional ~optional:true value) + | _ -> None) + | _ -> None + +and parseRecordExprWithStringKeys ~startPos firstRow p = + let rows = + firstRow + :: parseCommaDelimitedRegion ~grammar:Grammar.RecordRowsStringKey + ~closing:Rbrace ~f:parseRecordExprRowWithStringKey p + in + let loc = mkLoc startPos p.endPos in + let recordStrExpr = + Ast_helper.Str.eval ~loc (Ast_helper.Exp.record ~loc rows None) + in + Ast_helper.Exp.extension ~loc + (Location.mkloc "obj" loc, Parsetree.PStr [recordStrExpr]) + +and parseRecordExpr ~startPos ?(spread = None) rows p = + let exprs = + parseCommaDelimitedRegion ~grammar:Grammar.RecordRows ~closing:Rbrace + ~f:parseRecordExprRow p + in + let rows = List.concat [rows; exprs] in + let () = + match rows with + | [] -> + let msg = "Record spread needs at least one field that's updated" in + Parser.err p (Diagnostics.message msg) + | _rows -> () + in + let loc = mkLoc startPos p.endPos in + Ast_helper.Exp.record ~loc rows spread + +and parseNewlineOrSemicolonExprBlock p = + match p.Parser.token with + | Semicolon -> Parser.next p + | token when Grammar.isBlockExprStart token -> + if p.prevEndPos.pos_lnum < p.startPos.pos_lnum then () + else + Parser.err ~startPos:p.prevEndPos ~endPos:p.endPos p + (Diagnostics.message + "consecutive expressions on a line must be separated by ';' or a \ + newline") + | _ -> () + +and parseExprBlockItem p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + match p.Parser.token with + | Module -> ( + Parser.next p; + match p.token with + | Lparen -> + let expr = parseFirstClassModuleExpr ~startPos p in + let a = parsePrimaryExpr ~operand:expr p in + let expr = parseBinaryExpr ~a p 1 in + parseTernaryExpr expr p + | _ -> + let name = + match p.Parser.token with + | Uident ident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let body = parseModuleBindingBody p in + parseNewlineOrSemicolonExprBlock p; + let expr = parseExprBlock p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.letmodule ~loc name body expr) + | Exception -> + let extensionConstructor = parseExceptionDef ~attrs p in + parseNewlineOrSemicolonExprBlock p; + let blockExpr = parseExprBlock p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.letexception ~loc extensionConstructor blockExpr + | Open -> + let od = parseOpenDescription ~attrs p in + parseNewlineOrSemicolonExprBlock p; + let blockExpr = parseExprBlock p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.open_ ~loc od.popen_override od.popen_lid blockExpr + | Let -> + let recFlag, letBindings = parseLetBindings ~attrs p in + parseNewlineOrSemicolonExprBlock p; + let next = + if Grammar.isBlockExprStart p.Parser.token then parseExprBlock p + else + let loc = mkLoc p.startPos p.endPos in + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) + None + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.let_ ~loc recFlag letBindings next + | _ -> + let e1 = + let expr = parseExpr p in + {expr with pexp_attributes = List.concat [attrs; expr.pexp_attributes]} + in + parseNewlineOrSemicolonExprBlock p; + if Grammar.isBlockExprStart p.Parser.token then + let e2 = parseExprBlock p in + let loc = {e1.pexp_loc with loc_end = e2.pexp_loc.loc_end} in + Ast_helper.Exp.sequence ~loc e1 e2 + else e1 + +(* blockExpr ::= expr + * | expr ; + * | expr ; blockExpr + * | module ... ; blockExpr + * | open ... ; blockExpr + * | exception ... ; blockExpr + * | let ... + * | let ... ; + * | let ... ; blockExpr + * + * note: semi should be made optional + * a block of expression is always + *) +and parseExprBlock ?first p = + Parser.leaveBreadcrumb p Grammar.ExprBlock; + let item = + match first with + | Some e -> e + | None -> parseExprBlockItem p + in + parseNewlineOrSemicolonExprBlock p; + let blockExpr = + if Grammar.isBlockExprStart p.Parser.token then + let next = parseExprBlockItem p in + let loc = {item.pexp_loc with loc_end = next.pexp_loc.loc_end} in + Ast_helper.Exp.sequence ~loc item next + else item + in + Parser.eatBreadcrumb p; + overParseConstrainedOrCoercedOrArrowExpression p blockExpr + +and parseAsyncArrowExpression p = + let startPos = p.Parser.startPos in + Parser.expect (Lident "async") p; + let asyncAttr = makeAsyncAttr (mkLoc startPos p.prevEndPos) in + let expr = parseEs6ArrowExpression p in + { + expr with + pexp_attributes = asyncAttr :: expr.pexp_attributes; + pexp_loc = {expr.pexp_loc with loc_start = startPos}; + } + +and parseAwaitExpression p = + let awaitLoc = mkLoc p.Parser.startPos p.endPos in + let awaitAttr = makeAwaitAttr awaitLoc in + Parser.expect Await p; + let tokenPrec = Token.precedence MinusGreater in + let expr = parseBinaryExpr ~context:OrdinaryExpr p tokenPrec in + { + expr with + pexp_attributes = awaitAttr :: expr.pexp_attributes; + pexp_loc = {expr.pexp_loc with loc_start = awaitLoc.loc_start}; + } + +and parseTryExpression p = + let startPos = p.Parser.startPos in + Parser.expect Try p; + let expr = parseExpr ~context:WhenExpr p in + Parser.expect Res_token.catch p; + Parser.expect Lbrace p; + let cases = parsePatternMatching p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.try_ ~loc expr cases + +and parseIfCondition p = + Parser.leaveBreadcrumb p Grammar.IfCondition; + (* doesn't make sense to try es6 arrow here? *) + let conditionExpr = parseExpr ~context:WhenExpr p in + Parser.eatBreadcrumb p; + conditionExpr + +and parseThenBranch p = + Parser.leaveBreadcrumb p IfBranch; + Parser.expect Lbrace p; + let thenExpr = parseExprBlock p in + Parser.expect Rbrace p; + Parser.eatBreadcrumb p; + thenExpr + +and parseElseBranch p = + Parser.expect Lbrace p; + let blockExpr = parseExprBlock p in + Parser.expect Rbrace p; + blockExpr + +and parseIfExpr startPos p = + let conditionExpr = parseIfCondition p in + let thenExpr = parseThenBranch p in + let elseExpr = + match p.Parser.token with + | Else -> + Parser.endRegion p; + Parser.leaveBreadcrumb p Grammar.ElseBranch; + Parser.next p; + Parser.beginRegion p; + let elseExpr = + match p.token with + | If -> parseIfOrIfLetExpression p + | _ -> parseElseBranch p + in + Parser.eatBreadcrumb p; + Parser.endRegion p; + Some elseExpr + | _ -> + Parser.endRegion p; + None + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.ifthenelse ~loc conditionExpr thenExpr elseExpr + +and parseIfLetExpr startPos p = + let pattern = parsePattern p in + Parser.expect Equal p; + let conditionExpr = parseIfCondition p in + let thenExpr = parseThenBranch p in + let elseExpr = + match p.Parser.token with + | Else -> + Parser.endRegion p; + Parser.leaveBreadcrumb p Grammar.ElseBranch; + Parser.next p; + Parser.beginRegion p; + let elseExpr = + match p.token with + | If -> parseIfOrIfLetExpression p + | _ -> parseElseBranch p + in + Parser.eatBreadcrumb p; + Parser.endRegion p; + elseExpr + | _ -> + Parser.endRegion p; + let startPos = p.Parser.startPos in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) + None + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.match_ + ~attrs:[ifLetAttr; suppressFragileMatchWarningAttr] + ~loc conditionExpr + [ + Ast_helper.Exp.case pattern thenExpr; + Ast_helper.Exp.case (Ast_helper.Pat.any ()) elseExpr; + ] + +and parseIfOrIfLetExpression p = + Parser.beginRegion p; + Parser.leaveBreadcrumb p Grammar.ExprIf; + let startPos = p.Parser.startPos in + Parser.expect If p; + let expr = + match p.Parser.token with + | Let -> + Parser.next p; + let ifLetExpr = parseIfLetExpr startPos p in + Parser.err ~startPos:ifLetExpr.pexp_loc.loc_start + ~endPos:ifLetExpr.pexp_loc.loc_end p + (Diagnostics.message (ErrorMessages.experimentalIfLet ifLetExpr)); + ifLetExpr + | _ -> parseIfExpr startPos p + in + Parser.eatBreadcrumb p; + expr + +and parseForRest hasOpeningParen pattern startPos p = + Parser.expect In p; + let e1 = parseExpr p in + let direction = + match p.Parser.token with + | Lident "to" -> Asttypes.Upto + | Lident "downto" -> Asttypes.Downto + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Asttypes.Upto + in + if p.Parser.token = Eof then + Parser.err ~startPos:p.startPos p + (Diagnostics.unexpected p.Parser.token p.breadcrumbs) + else Parser.next p; + let e2 = parseExpr ~context:WhenExpr p in + if hasOpeningParen then Parser.expect Rparen p; + Parser.expect Lbrace p; + let bodyExpr = parseExprBlock p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.for_ ~loc pattern e1 e2 direction bodyExpr + +and parseForExpression p = + let startPos = p.Parser.startPos in + Parser.leaveBreadcrumb p Grammar.ExprFor; + Parser.expect For p; + Parser.beginRegion p; + let forExpr = + match p.token with + | Lparen -> ( + let lparen = p.startPos in + Parser.next p; + match p.token with + | Rparen -> + Parser.next p; + let unitPattern = + let loc = mkLoc lparen p.prevEndPos in + let lid = Location.mkloc (Longident.Lident "()") loc in + Ast_helper.Pat.construct lid None + in + parseForRest false + (parseAliasPattern ~attrs:[] unitPattern p) + startPos p + | _ -> ( + Parser.leaveBreadcrumb p Grammar.Pattern; + let pat = parsePattern p in + Parser.eatBreadcrumb p; + match p.token with + | Comma -> + Parser.next p; + let tuplePattern = + parseTuplePattern ~attrs:[] ~startPos:lparen ~first:pat p + in + let pattern = parseAliasPattern ~attrs:[] tuplePattern p in + parseForRest false pattern startPos p + | _ -> parseForRest true pat startPos p)) + | _ -> + Parser.leaveBreadcrumb p Grammar.Pattern; + let pat = parsePattern p in + Parser.eatBreadcrumb p; + parseForRest false pat startPos p + in + Parser.eatBreadcrumb p; + Parser.endRegion p; + forExpr + +and parseWhileExpression p = + let startPos = p.Parser.startPos in + Parser.expect While p; + let expr1 = parseExpr ~context:WhenExpr p in + Parser.expect Lbrace p; + let expr2 = parseExprBlock p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.while_ ~loc expr1 expr2 + +and parsePatternGuard p = + match p.Parser.token with + | When | If -> + Parser.next p; + Some (parseExpr ~context:WhenExpr p) + | _ -> None + +and parsePatternMatchCase p = + Parser.beginRegion p; + Parser.leaveBreadcrumb p Grammar.PatternMatchCase; + match p.Parser.token with + | Token.Bar -> + Parser.next p; + Parser.leaveBreadcrumb p Grammar.Pattern; + let lhs = parsePattern p in + Parser.eatBreadcrumb p; + let guard = parsePatternGuard p in + let () = + match p.token with + | EqualGreater -> Parser.next p + | _ -> Recover.recoverEqualGreater p + in + let rhs = parseExprBlock p in + Parser.endRegion p; + Parser.eatBreadcrumb p; + Some (Ast_helper.Exp.case lhs ?guard rhs) + | _ -> + Parser.endRegion p; + Parser.eatBreadcrumb p; + None + +and parsePatternMatching p = + let cases = + parseDelimitedRegion ~grammar:Grammar.PatternMatching ~closing:Rbrace + ~f:parsePatternMatchCase p + in + let () = + match cases with + | [] -> + Parser.err ~startPos:p.prevEndPos p + (Diagnostics.message "Pattern matching needs at least one case") + | _ -> () + in + cases + +and parseSwitchExpression p = + let startPos = p.Parser.startPos in + Parser.expect Switch p; + let switchExpr = parseExpr ~context:WhenExpr p in + Parser.expect Lbrace p; + let cases = parsePatternMatching p in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.match_ ~loc switchExpr cases + +(* + * argument ::= + * | _ (* syntax sugar *) + * | expr + * | expr : type + * | ~ label-name + * | ~ label-name + * | ~ label-name ? + * | ~ label-name = expr + * | ~ label-name = _ (* syntax sugar *) + * | ~ label-name = expr : type + * | ~ label-name = ? expr + * | ~ label-name = ? _ (* syntax sugar *) + * | ~ label-name = ? expr : type + * + * uncurried_argument ::= + * | . argument + *) +and parseArgument p = + if + p.Parser.token = Token.Tilde + || p.token = Dot || p.token = Underscore + || Grammar.isExprStart p.token + then + match p.Parser.token with + | Dot -> ( + let uncurried = true in + Parser.next p; + match p.token with + (* apply(.) *) + | Rparen -> + let unitExpr = + Ast_helper.Exp.construct + (Location.mknoloc (Longident.Lident "()")) + None + in + Some (uncurried, Asttypes.Nolabel, unitExpr) + | _ -> parseArgument2 p ~uncurried) + | _ -> parseArgument2 p ~uncurried:false + else None + +and parseArgument2 p ~uncurried = + match p.Parser.token with + (* foo(_), do not confuse with foo(_ => x), TODO: performance *) + | Underscore when not (isEs6ArrowExpression ~inTernary:false p) -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + let exp = + Ast_helper.Exp.ident ~loc (Location.mkloc (Longident.Lident "_") loc) + in + Some (uncurried, Asttypes.Nolabel, exp) + | Tilde -> ( + Parser.next p; + (* TODO: nesting of pattern matches not intuitive for error recovery *) + match p.Parser.token with + | Lident ident -> ( + let startPos = p.startPos in + Parser.next p; + let endPos = p.prevEndPos in + let loc = mkLoc startPos endPos in + let propLocAttr = + (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) + in + let identExpr = + Ast_helper.Exp.ident ~attrs:[propLocAttr] ~loc + (Location.mkloc (Longident.Lident ident) loc) + in + match p.Parser.token with + | Question -> + Parser.next p; + Some (uncurried, Asttypes.Optional ident, identExpr) + | Equal -> + Parser.next p; + let label = + match p.Parser.token with + | Question -> + Parser.next p; + Asttypes.Optional ident + | _ -> Labelled ident + in + let expr = + match p.Parser.token with + | Underscore when not (isEs6ArrowExpression ~inTernary:false p) -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Ast_helper.Exp.ident ~loc + (Location.mkloc (Longident.Lident "_") loc) + | _ -> + let expr = parseConstrainedOrCoercedExpr p in + {expr with pexp_attributes = propLocAttr :: expr.pexp_attributes} + in + Some (uncurried, label, expr) + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc startPos p.prevEndPos in + let expr = + Ast_helper.Exp.constraint_ ~attrs:[propLocAttr] ~loc identExpr typ + in + Some (uncurried, Labelled ident, expr) + | _ -> Some (uncurried, Labelled ident, identExpr)) + | t -> + Parser.err p (Diagnostics.lident t); + Some (uncurried, Nolabel, Recover.defaultExpr ())) + | _ -> Some (uncurried, Nolabel, parseConstrainedOrCoercedExpr p) + +and parseCallExpr p funExpr = + Parser.expect Lparen p; + let startPos = p.Parser.startPos in + Parser.leaveBreadcrumb p Grammar.ExprCall; + let args = + parseCommaDelimitedRegion ~grammar:Grammar.ArgumentList ~closing:Rparen + ~f:parseArgument p + in + Parser.expect Rparen p; + let args = + match args with + | [] -> + let loc = mkLoc startPos p.prevEndPos in + (* No args -> unit sugar: `foo()` *) + [ + ( false, + Asttypes.Nolabel, + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) + None ); + ] + | [ + ( true, + Asttypes.Nolabel, + ({ + pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, None); + pexp_loc = loc; + pexp_attributes = []; + } as expr) ); + ] + when (not loc.loc_ghost) && p.mode = ParseForTypeChecker -> + (* Since there is no syntax space for arity zero vs arity one, + * we expand + * `fn(. ())` into + * `fn(. {let __res_unit = (); __res_unit})` + * when the parsetree is intended for type checking + * + * Note: + * `fn(.)` is treated as zero arity application. + * The invisible unit expression here has loc_ghost === true + * + * Related: https://github.com/rescript-lang/syntax/issues/138 + *) + [ + ( true, + Asttypes.Nolabel, + Ast_helper.Exp.let_ Asttypes.Nonrecursive + [ + Ast_helper.Vb.mk + (Ast_helper.Pat.var (Location.mknoloc "__res_unit")) + expr; + ] + (Ast_helper.Exp.ident + (Location.mknoloc (Longident.Lident "__res_unit"))) ); + ] + | args -> args + in + let loc = {funExpr.pexp_loc with loc_end = p.prevEndPos} in + let args = + match args with + | (u, lbl, expr) :: args -> + let group (grp, acc) (uncurried, lbl, expr) = + let _u, grp = grp in + if uncurried == true then + ((true, [(lbl, expr)]), (_u, List.rev grp) :: acc) + else ((_u, (lbl, expr) :: grp), acc) + in + let (_u, grp), acc = List.fold_left group ((u, [(lbl, expr)]), []) args in + List.rev ((_u, List.rev grp) :: acc) + | [] -> [] + in + let apply = + List.fold_left + (fun callBody group -> + let uncurried, args = group in + let args, wrap = processUnderscoreApplication args in + let exp = + if uncurried then + let attrs = [uncurryAttr] in + Ast_helper.Exp.apply ~loc ~attrs callBody args + else Ast_helper.Exp.apply ~loc callBody args + in + wrap exp) + funExpr args + in + Parser.eatBreadcrumb p; + apply + +and parseValueOrConstructor p = + let startPos = p.Parser.startPos in + let rec aux p acc = + match p.Parser.token with + | Uident ident -> ( + let endPosLident = p.endPos in + Parser.next p; + match p.Parser.token with + | Dot -> + Parser.next p; + aux p (ident :: acc) + | Lparen when p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + let lparen = p.startPos in + let args = parseConstructorArgs p in + let rparen = p.prevEndPos in + let lident = buildLongident (ident :: acc) in + let tail = + match args with + | [] -> None + | [({Parsetree.pexp_desc = Pexp_tuple _} as arg)] as args -> + let loc = mkLoc lparen rparen in + if p.mode = ParseForTypeChecker then + (* Some(1, 2) for type-checker *) + Some arg + else + (* Some((1, 2)) for printer *) + Some (Ast_helper.Exp.tuple ~loc args) + | [arg] -> Some arg + | args -> + let loc = mkLoc lparen rparen in + Some (Ast_helper.Exp.tuple ~loc args) + in + let loc = mkLoc startPos p.prevEndPos in + let identLoc = mkLoc startPos endPosLident in + Ast_helper.Exp.construct ~loc (Location.mkloc lident identLoc) tail + | _ -> + let loc = mkLoc startPos p.prevEndPos in + let lident = buildLongident (ident :: acc) in + Ast_helper.Exp.construct ~loc (Location.mkloc lident loc) None) + | Lident ident -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let lident = buildLongident (ident :: acc) in + Ast_helper.Exp.ident ~loc (Location.mkloc lident loc) + | token -> + if acc = [] then ( + Parser.nextUnsafe p; + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultExpr ()) + else + let loc = mkLoc startPos p.prevEndPos in + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + let lident = buildLongident ("_" :: acc) in + Ast_helper.Exp.ident ~loc (Location.mkloc lident loc) + in + aux p [] + +and parsePolyVariantExpr p = + let startPos = p.startPos in + let ident, _loc = parseHashIdent ~startPos p in + match p.Parser.token with + | Lparen when p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + let lparen = p.startPos in + let args = parseConstructorArgs p in + let rparen = p.prevEndPos in + let loc_paren = mkLoc lparen rparen in + let tail = + match args with + | [] -> None + | [({Parsetree.pexp_desc = Pexp_tuple _} as expr)] as args -> + if p.mode = ParseForTypeChecker then + (* #a(1, 2) for type-checker *) + Some expr + else + (* #a((1, 2)) for type-checker *) + Some (Ast_helper.Exp.tuple ~loc:loc_paren args) + | [arg] -> Some arg + | args -> + (* #a((1, 2)) for printer *) + Some (Ast_helper.Exp.tuple ~loc:loc_paren args) + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.variant ~loc ident tail + | _ -> + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.variant ~loc ident None + +and parseConstructorArgs p = + let lparen = p.Parser.startPos in + Parser.expect Lparen p; + let args = + parseCommaDelimitedRegion ~grammar:Grammar.ExprList + ~f:parseConstrainedExprRegion ~closing:Rparen p + in + Parser.expect Rparen p; + match args with + | [] -> + let loc = mkLoc lparen p.prevEndPos in + [ + Ast_helper.Exp.construct ~loc + (Location.mkloc (Longident.Lident "()") loc) + None; + ] + | args -> args + +and parseTupleExpr ~first ~startPos p = + let exprs = + first + :: parseCommaDelimitedRegion p ~grammar:Grammar.ExprList ~closing:Rparen + ~f:parseConstrainedExprRegion + in + Parser.expect Rparen p; + let () = + match exprs with + | [_] -> + Parser.err ~startPos ~endPos:p.prevEndPos p + (Diagnostics.message ErrorMessages.tupleSingleElement) + | _ -> () + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Exp.tuple ~loc exprs + +and parseSpreadExprRegionWithLoc p = + let startPos = p.Parser.prevEndPos in + match p.Parser.token with + | DotDotDot -> + Parser.next p; + let expr = parseConstrainedOrCoercedExpr p in + Some (true, expr, startPos, p.prevEndPos) + | token when Grammar.isExprStart token -> + Some (false, parseConstrainedOrCoercedExpr p, startPos, p.prevEndPos) + | _ -> None + +and parseListExpr ~startPos p = + let split_by_spread exprs = + List.fold_left + (fun acc curr -> + match (curr, acc) with + | (true, expr, startPos, endPos), _ -> + (* find a spread expression, prepend a new sublist *) + ([], Some expr, startPos, endPos) :: acc + | ( (false, expr, startPos, _endPos), + (no_spreads, spread, _accStartPos, accEndPos) :: acc ) -> + (* find a non-spread expression, and the accumulated is not empty, + * prepend to the first sublist, and update the loc of the first sublist *) + (expr :: no_spreads, spread, startPos, accEndPos) :: acc + | (false, expr, startPos, endPos), [] -> + (* find a non-spread expression, and the accumulated is empty *) + [([expr], None, startPos, endPos)]) + [] exprs + in + let make_sub_expr = function + | exprs, Some spread, startPos, endPos -> + makeListExpression (mkLoc startPos endPos) exprs (Some spread) + | exprs, None, startPos, endPos -> + makeListExpression (mkLoc startPos endPos) exprs None + in + let listExprsRev = + parseCommaDelimitedReversedList p ~grammar:Grammar.ListExpr ~closing:Rbrace + ~f:parseSpreadExprRegionWithLoc + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + match split_by_spread listExprsRev with + | [] -> makeListExpression loc [] None + | [(exprs, Some spread, _, _)] -> makeListExpression loc exprs (Some spread) + | [(exprs, None, _, _)] -> makeListExpression loc exprs None + | exprs -> + let listExprs = List.map make_sub_expr exprs in + Ast_helper.Exp.apply ~loc + (Ast_helper.Exp.ident ~loc ~attrs:[spreadAttr] + (Location.mkloc + (Longident.Ldot + (Longident.Ldot (Longident.Lident "Belt", "List"), "concatMany")) + loc)) + [(Asttypes.Nolabel, Ast_helper.Exp.array ~loc listExprs)] + +(* Overparse ... and give a nice error message *) +and parseNonSpreadExp ~msg p = + let () = + match p.Parser.token with + | DotDotDot -> + Parser.err p (Diagnostics.message msg); + Parser.next p + | _ -> () + in + match p.Parser.token with + | token when Grammar.isExprStart token -> ( + let expr = parseExpr p in + match p.Parser.token with + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in + Some (Ast_helper.Exp.constraint_ ~loc expr typ) + | _ -> Some expr) + | _ -> None + +and parseArrayExp p = + let startPos = p.Parser.startPos in + Parser.expect Lbracket p; + let exprs = + parseCommaDelimitedRegion p ~grammar:Grammar.ExprList ~closing:Rbracket + ~f:(parseNonSpreadExp ~msg:ErrorMessages.arrayExprSpread) + in + Parser.expect Rbracket p; + Ast_helper.Exp.array ~loc:(mkLoc startPos p.prevEndPos) exprs + +(* TODO: check attributes in the case of poly type vars, + * might be context dependend: parseFieldDeclaration (see ocaml) *) +and parsePolyTypeExpr p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | SingleQuote -> ( + let vars = parseTypeVarList p in + match vars with + | _v1 :: _v2 :: _ -> + Parser.expect Dot p; + let typ = parseTypExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.poly ~loc vars typ + | [var] -> ( + match p.Parser.token with + | Dot -> + Parser.next p; + let typ = parseTypExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.poly ~loc vars typ + | EqualGreater -> + Parser.next p; + let typ = Ast_helper.Typ.var ~loc:var.loc var.txt in + let returnType = parseTypExpr ~alias:false p in + let loc = mkLoc typ.Parsetree.ptyp_loc.loc_start p.prevEndPos in + Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ returnType + | _ -> Ast_helper.Typ.var ~loc:var.loc var.txt) + | _ -> assert false) + | _ -> parseTypExpr p + +(* 'a 'b 'c *) +and parseTypeVarList p = + let rec loop p vars = + match p.Parser.token with + | SingleQuote -> + Parser.next p; + let lident, loc = parseLident p in + let var = Location.mkloc lident loc in + loop p (var :: vars) + | _ -> List.rev vars + in + loop p [] + +and parseLidentList p = + let rec loop p ls = + match p.Parser.token with + | Lident lident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + loop p (Location.mkloc lident loc :: ls) + | _ -> List.rev ls + in + loop p [] + +and parseAtomicTypExpr ~attrs p = + Parser.leaveBreadcrumb p Grammar.AtomicTypExpr; + let startPos = p.Parser.startPos in + let typ = + match p.Parser.token with + | SingleQuote -> + Parser.next p; + let ident, loc = + if p.Parser.token = Eof then ( + Parser.err ~startPos:p.startPos p + (Diagnostics.unexpected p.Parser.token p.breadcrumbs); + ("", mkLoc p.startPos p.prevEndPos)) + else parseIdent ~msg:ErrorMessages.typeVar ~startPos:p.startPos p + in + Ast_helper.Typ.var ~loc ~attrs ident + | Underscore -> + let endPos = p.endPos in + Parser.next p; + Ast_helper.Typ.any ~loc:(mkLoc startPos endPos) ~attrs () + | Lparen -> ( + Parser.next p; + match p.Parser.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let unitConstr = Location.mkloc (Longident.Lident "unit") loc in + Ast_helper.Typ.constr ~attrs unitConstr [] + | _ -> ( + let t = parseTypExpr p in + match p.token with + | Comma -> + Parser.next p; + parseTupleType ~attrs ~first:t ~startPos p + | _ -> + Parser.expect Rparen p; + { + t with + ptyp_loc = mkLoc startPos p.prevEndPos; + ptyp_attributes = List.concat [attrs; t.ptyp_attributes]; + })) + | Lbracket -> parsePolymorphicVariantType ~attrs p + | Uident _ | Lident _ -> + let constr = parseValuePath p in + let args = parseTypeConstructorArgs ~constrName:constr p in + Ast_helper.Typ.constr + ~loc:(mkLoc startPos p.prevEndPos) + ~attrs constr args + | Module -> + Parser.next p; + Parser.expect Lparen p; + let packageType = parsePackageType ~startPos ~attrs p in + Parser.expect Rparen p; + {packageType with ptyp_loc = mkLoc startPos p.prevEndPos} + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.extension ~attrs ~loc extension + | Lbrace -> parseRecordOrObjectType ~attrs p + | Eof -> + Parser.err p (Diagnostics.unexpected p.Parser.token p.breadcrumbs); + Recover.defaultType () + | token -> ( + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + match + skipTokensAndMaybeRetry p ~isStartOfGrammar:Grammar.isAtomicTypExprStart + with + | Some () -> parseAtomicTypExpr ~attrs p + | None -> + Parser.err ~startPos:p.prevEndPos p + (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultType ()) + in + Parser.eatBreadcrumb p; + typ + +(* package-type ::= + | modtype-path + ∣ modtype-path with package-constraint { and package-constraint } +*) +and parsePackageType ~startPos ~attrs p = + let modTypePath = parseModuleLongIdent ~lowercase:true p in + match p.Parser.token with + | Lident "with" -> + Parser.next p; + let constraints = parsePackageConstraints p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.package ~loc ~attrs modTypePath constraints + | _ -> + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.package ~loc ~attrs modTypePath [] + +(* package-constraint { and package-constraint } *) +and parsePackageConstraints p = + let first = + Parser.expect Typ p; + let typeConstr = parseValuePath p in + Parser.expect Equal p; + let typ = parseTypExpr p in + (typeConstr, typ) + in + let rest = + parseRegion ~grammar:Grammar.PackageConstraint ~f:parsePackageConstraint p + in + first :: rest + +(* and type typeconstr = typexpr *) +and parsePackageConstraint p = + match p.Parser.token with + | And -> + Parser.next p; + Parser.expect Typ p; + let typeConstr = parseValuePath p in + Parser.expect Equal p; + let typ = parseTypExpr p in + Some (typeConstr, typ) + | _ -> None + +and parseRecordOrObjectType ~attrs p = + (* for inline record in constructor *) + let startPos = p.Parser.startPos in + Parser.expect Lbrace p; + let closedFlag = + match p.token with + | DotDot -> + Parser.next p; + Asttypes.Open + | Dot -> + Parser.next p; + Asttypes.Closed + | _ -> Asttypes.Closed + in + let () = + match p.token with + | Lident _ -> + Parser.err p + (Diagnostics.message ErrorMessages.forbiddenInlineRecordDeclaration) + | _ -> () + in + let startFirstField = p.startPos in + let fields = + parseCommaDelimitedRegion ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace ~f:parseStringFieldDeclaration p + in + let () = + match fields with + | [Parsetree.Oinherit {ptyp_loc}] -> + (* {...x}, spread without extra fields *) + Parser.err p ~startPos:startFirstField ~endPos:ptyp_loc.loc_end + (Diagnostics.message ErrorMessages.sameTypeSpread) + | _ -> () + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.object_ ~loc ~attrs fields closedFlag + +(* TODO: check associativity in combination with attributes *) +and parseTypeAlias p typ = + match p.Parser.token with + | As -> + Parser.next p; + Parser.expect SingleQuote p; + let ident, _loc = parseLident p in + (* TODO: how do we parse attributes here? *) + Ast_helper.Typ.alias + ~loc:(mkLoc typ.Parsetree.ptyp_loc.loc_start p.prevEndPos) + typ ident + | _ -> typ + +(* type_parameter ::= + * | type_expr + * | ~ident: type_expr + * | ~ident: type_expr=? + * + * note: + * | attrs ~ident: type_expr -> attrs are on the arrow + * | attrs type_expr -> attrs are here part of the type_expr + * + * uncurried_type_parameter ::= + * | . type_parameter +*) +and parseTypeParameter p = + if + p.Parser.token = Token.Tilde + || p.token = Dot + || Grammar.isTypExprStart p.token + then + let startPos = p.Parser.startPos in + let uncurried = Parser.optional p Dot in + let attrs = parseAttributes p in + match p.Parser.token with + | Tilde -> ( + Parser.next p; + let name, loc = parseLident p in + let lblLocAttr = + (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) + in + Parser.expect ~grammar:Grammar.TypeExpression Colon p; + let typ = + let typ = parseTypExpr p in + {typ with ptyp_attributes = lblLocAttr :: typ.ptyp_attributes} + in + match p.Parser.token with + | Equal -> + Parser.next p; + Parser.expect Question p; + Some (uncurried, attrs, Asttypes.Optional name, typ, startPos) + | _ -> Some (uncurried, attrs, Asttypes.Labelled name, typ, startPos)) + | Lident _ -> ( + let name, loc = parseLident p in + match p.token with + | Colon -> ( + let () = + let error = + Diagnostics.message + (ErrorMessages.missingTildeLabeledParameter name) + in + Parser.err ~startPos:loc.loc_start ~endPos:loc.loc_end p error + in + Parser.next p; + let typ = parseTypExpr p in + match p.Parser.token with + | Equal -> + Parser.next p; + Parser.expect Question p; + Some (uncurried, attrs, Asttypes.Optional name, typ, startPos) + | _ -> Some (uncurried, attrs, Asttypes.Labelled name, typ, startPos)) + | _ -> + let constr = Location.mkloc (Longident.Lident name) loc in + let args = parseTypeConstructorArgs ~constrName:constr p in + let typ = + Ast_helper.Typ.constr + ~loc:(mkLoc startPos p.prevEndPos) + ~attrs constr args + in + + let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in + let typ = parseTypeAlias p typ in + Some (uncurried, [], Asttypes.Nolabel, typ, startPos)) + | _ -> + let typ = parseTypExpr p in + let typWithAttributes = + {typ with ptyp_attributes = List.concat [attrs; typ.ptyp_attributes]} + in + Some (uncurried, [], Asttypes.Nolabel, typWithAttributes, startPos) + else None + +(* (int, ~x:string, float) *) +and parseTypeParameters p = + let startPos = p.Parser.startPos in + Parser.expect Lparen p; + match p.Parser.token with + | Rparen -> + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + let unitConstr = Location.mkloc (Longident.Lident "unit") loc in + let typ = Ast_helper.Typ.constr unitConstr [] in + [(false, [], Asttypes.Nolabel, typ, startPos)] + | _ -> + let params = + parseCommaDelimitedRegion ~grammar:Grammar.TypeParameters ~closing:Rparen + ~f:parseTypeParameter p + in + Parser.expect Rparen p; + params + +and parseEs6ArrowType ~attrs p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | Tilde -> + Parser.next p; + let name, loc = parseLident p in + let lblLocAttr = (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) in + Parser.expect ~grammar:Grammar.TypeExpression Colon p; + let typ = + let typ = parseTypExpr ~alias:false ~es6Arrow:false p in + {typ with ptyp_attributes = lblLocAttr :: typ.ptyp_attributes} + in + let arg = + match p.Parser.token with + | Equal -> + Parser.next p; + Parser.expect Question p; + Asttypes.Optional name + | _ -> Asttypes.Labelled name + in + Parser.expect EqualGreater p; + let returnType = parseTypExpr ~alias:false p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.arrow ~loc ~attrs arg typ returnType + | _ -> + let parameters = parseTypeParameters p in + Parser.expect EqualGreater p; + let returnType = parseTypExpr ~alias:false p in + let endPos = p.prevEndPos in + let typ = + List.fold_right + (fun (uncurried, attrs, argLbl, typ, startPos) t -> + let attrs = if uncurried then uncurryAttr :: attrs else attrs in + Ast_helper.Typ.arrow ~loc:(mkLoc startPos endPos) ~attrs argLbl typ t) + parameters returnType + in + { + typ with + ptyp_attributes = List.concat [typ.ptyp_attributes; attrs]; + ptyp_loc = mkLoc startPos p.prevEndPos; + } + +(* + * typexpr ::= + * | 'ident + * | _ + * | (typexpr) + * | typexpr => typexpr --> es6 arrow + * | (typexpr, typexpr) => typexpr --> es6 arrow + * | /typexpr, typexpr, typexpr/ --> tuple + * | typeconstr + * | typeconstr + * | typeconstr + * | typexpr as 'ident + * | %attr-id --> extension + * | %attr-id(payload) --> extension + * + * typeconstr ::= + * | lident + * | uident.lident + * | uident.uident.lident --> long module path + *) +and parseTypExpr ?attrs ?(es6Arrow = true) ?(alias = true) p = + (* Parser.leaveBreadcrumb p Grammar.TypeExpression; *) + let startPos = p.Parser.startPos in + let attrs = + match attrs with + | Some attrs -> attrs + | None -> parseAttributes p + in + let typ = + if es6Arrow && isEs6ArrowType p then parseEs6ArrowType ~attrs p + else + let typ = parseAtomicTypExpr ~attrs p in + parseArrowTypeRest ~es6Arrow ~startPos typ p + in + let typ = if alias then parseTypeAlias p typ else typ in + (* Parser.eatBreadcrumb p; *) + typ + +and parseArrowTypeRest ~es6Arrow ~startPos typ p = + match p.Parser.token with + | (EqualGreater | MinusGreater) as token when es6Arrow == true -> + (* error recovery *) + if token = MinusGreater then Parser.expect EqualGreater p; + Parser.next p; + let returnType = parseTypExpr ~alias:false p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ returnType + | _ -> typ + +and parseTypExprRegion p = + if Grammar.isTypExprStart p.Parser.token then Some (parseTypExpr p) else None + +and parseTupleType ~attrs ~first ~startPos p = + let typexprs = + first + :: parseCommaDelimitedRegion ~grammar:Grammar.TypExprList ~closing:Rparen + ~f:parseTypExprRegion p + in + Parser.expect Rparen p; + let () = + match typexprs with + | [_] -> + Parser.err ~startPos ~endPos:p.prevEndPos p + (Diagnostics.message ErrorMessages.tupleSingleElement) + | _ -> () + in + let tupleLoc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.tuple ~attrs ~loc:tupleLoc typexprs + +and parseTypeConstructorArgRegion p = + if Grammar.isTypExprStart p.Parser.token then Some (parseTypExpr p) + else if p.token = LessThan then ( + Parser.next p; + parseTypeConstructorArgRegion p) + else None + +(* Js.Nullable.value<'a> *) +and parseTypeConstructorArgs ~constrName p = + let opening = p.Parser.token in + let openingStartPos = p.startPos in + match opening with + | LessThan | Lparen -> + Scanner.setDiamondMode p.scanner; + Parser.next p; + let typeArgs = + (* TODO: change Grammar.TypExprList to TypArgList!!! Why did I wrote this? *) + parseCommaDelimitedRegion ~grammar:Grammar.TypExprList + ~closing:GreaterThan ~f:parseTypeConstructorArgRegion p + in + let () = + match p.token with + | Rparen when opening = Token.Lparen -> + let typ = Ast_helper.Typ.constr constrName typeArgs in + let msg = + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.text "Type parameters require angle brackets:"; + Doc.indent + (Doc.concat + [Doc.line; ResPrinter.printTypExpr typ CommentTable.empty]); + ]) + |> Doc.toString ~width:80 + in + Parser.err ~startPos:openingStartPos p (Diagnostics.message msg); + Parser.next p + | _ -> Parser.expect GreaterThan p + in + Scanner.popMode p.scanner Diamond; + typeArgs + | _ -> [] + +(* string-field-decl ::= + * | string: poly-typexpr + * | attributes string-field-decl *) +and parseStringFieldDeclaration p = + let attrs = parseAttributes p in + match p.Parser.token with + | String name -> + let nameStartPos = p.startPos in + let nameEndPos = p.endPos in + Parser.next p; + let fieldName = Location.mkloc name (mkLoc nameStartPos nameEndPos) in + Parser.expect ~grammar:Grammar.TypeExpression Colon p; + let typ = parsePolyTypeExpr p in + Some (Parsetree.Otag (fieldName, attrs, typ)) + | DotDotDot -> + Parser.next p; + let typ = parseTypExpr p in + Some (Parsetree.Oinherit typ) + | Lident name -> + let nameLoc = mkLoc p.startPos p.endPos in + Parser.err p + (Diagnostics.message (ErrorMessages.objectQuotedFieldName name)); + Parser.next p; + let fieldName = Location.mkloc name nameLoc in + Parser.expect ~grammar:Grammar.TypeExpression Colon p; + let typ = parsePolyTypeExpr p in + Some (Parsetree.Otag (fieldName, attrs, typ)) + | _token -> None + +(* field-decl ::= + * | [mutable] field-name : poly-typexpr + * | attributes field-decl *) +and parseFieldDeclaration p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + let mut = + if Parser.optional p Token.Mutable then Asttypes.Mutable + else Asttypes.Immutable + in + let lident, loc = + match p.token with + | _ -> parseLident p + in + let optional = parseOptionalLabel p in + let name = Location.mkloc lident loc in + let typ = + match p.Parser.token with + | Colon -> + Parser.next p; + parsePolyTypeExpr p + | _ -> + Ast_helper.Typ.constr ~loc:name.loc {name with txt = Lident name.txt} [] + in + let loc = mkLoc startPos typ.ptyp_loc.loc_end in + (optional, Ast_helper.Type.field ~attrs ~loc ~mut name typ) + +and parseFieldDeclarationRegion p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + let mut = + if Parser.optional p Token.Mutable then Asttypes.Mutable + else Asttypes.Immutable + in + match p.token with + | Lident _ -> + let lident, loc = parseLident p in + let name = Location.mkloc lident loc in + let optional = parseOptionalLabel p in + let typ = + match p.Parser.token with + | Colon -> + Parser.next p; + parsePolyTypeExpr p + | _ -> + Ast_helper.Typ.constr ~loc:name.loc ~attrs + {name with txt = Lident name.txt} + [] + in + let loc = mkLoc startPos typ.ptyp_loc.loc_end in + let attrs = if optional then optionalAttr :: attrs else attrs in + Some (Ast_helper.Type.field ~attrs ~loc ~mut name typ) + | _ -> None + +(* record-decl ::= + * | { field-decl } + * | { field-decl, field-decl } + * | { field-decl, field-decl, field-decl, } + *) +and parseRecordDeclaration p = + Parser.leaveBreadcrumb p Grammar.RecordDecl; + Parser.expect Lbrace p; + let rows = + parseCommaDelimitedRegion ~grammar:Grammar.RecordDecl ~closing:Rbrace + ~f:parseFieldDeclarationRegion p + in + Parser.expect Rbrace p; + Parser.eatBreadcrumb p; + rows + +(* constr-args ::= + * | (typexpr) + * | (typexpr, typexpr) + * | (typexpr, typexpr, typexpr,) + * | (record-decl) + * + * TODO: should we overparse inline-records in every position? + * Give a good error message afterwards? + *) +and parseConstrDeclArgs p = + let constrArgs = + match p.Parser.token with + | Lparen -> ( + Parser.next p; + (* TODO: this could use some cleanup/stratification *) + match p.Parser.token with + | Lbrace -> ( + let lbrace = p.startPos in + Parser.next p; + let startPos = p.Parser.startPos in + match p.Parser.token with + | DotDot | Dot -> + let closedFlag = + match p.token with + | DotDot -> + Parser.next p; + Asttypes.Open + | Dot -> + Parser.next p; + Asttypes.Closed + | _ -> Asttypes.Closed + in + let fields = + parseCommaDelimitedRegion ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace ~f:parseStringFieldDeclaration p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = Ast_helper.Typ.object_ ~loc ~attrs:[] fields closedFlag in + Parser.optional p Comma |> ignore; + let moreArgs = + parseCommaDelimitedRegion ~grammar:Grammar.TypExprList + ~closing:Rparen ~f:parseTypExprRegion p + in + Parser.expect Rparen p; + Parsetree.Pcstr_tuple (typ :: moreArgs) + | DotDotDot -> + let dotdotdotStart = p.startPos in + let dotdotdotEnd = p.endPos in + (* start of object type spreading, e.g. `User({...a, "u": int})` *) + Parser.next p; + let typ = parseTypExpr p in + let () = + match p.token with + | Rbrace -> + (* {...x}, spread without extra fields *) + Parser.err ~startPos:dotdotdotStart ~endPos:dotdotdotEnd p + (Diagnostics.message ErrorMessages.sameTypeSpread); + Parser.next p + | _ -> Parser.expect Comma p + in + let () = + match p.token with + | Lident _ -> + Parser.err ~startPos:dotdotdotStart ~endPos:dotdotdotEnd p + (Diagnostics.message ErrorMessages.spreadInRecordDeclaration) + | _ -> () + in + let fields = + Parsetree.Oinherit typ + :: parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations ~closing:Rbrace + ~f:parseStringFieldDeclaration p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = + Ast_helper.Typ.object_ ~loc fields Asttypes.Closed + |> parseTypeAlias p + in + let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in + Parser.optional p Comma |> ignore; + let moreArgs = + parseCommaDelimitedRegion ~grammar:Grammar.TypExprList + ~closing:Rparen ~f:parseTypExprRegion p + in + Parser.expect Rparen p; + Parsetree.Pcstr_tuple (typ :: moreArgs) + | _ -> ( + let attrs = parseAttributes p in + match p.Parser.token with + | String _ -> + let closedFlag = Asttypes.Closed in + let fields = + match attrs with + | [] -> + parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations ~closing:Rbrace + ~f:parseStringFieldDeclaration p + | attrs -> + let first = + Parser.leaveBreadcrumb p Grammar.StringFieldDeclarations; + let field = + match parseStringFieldDeclaration p with + | Some field -> field + | None -> assert false + in + (* parse comma after first *) + let () = + match p.Parser.token with + | Rbrace | Eof -> () + | Comma -> Parser.next p + | _ -> Parser.expect Comma p + in + Parser.eatBreadcrumb p; + match field with + | Parsetree.Otag (label, _, ct) -> + Parsetree.Otag (label, attrs, ct) + | Oinherit ct -> Oinherit ct + in + first + :: parseCommaDelimitedRegion + ~grammar:Grammar.StringFieldDeclarations ~closing:Rbrace + ~f:parseStringFieldDeclaration p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = + Ast_helper.Typ.object_ ~loc ~attrs:[] fields closedFlag + |> parseTypeAlias p + in + let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in + Parser.optional p Comma |> ignore; + let moreArgs = + parseCommaDelimitedRegion ~grammar:Grammar.TypExprList + ~closing:Rparen ~f:parseTypExprRegion p + in + Parser.expect Rparen p; + Parsetree.Pcstr_tuple (typ :: moreArgs) + | _ -> + let fields = + match attrs with + | [] -> + parseCommaDelimitedRegion ~grammar:Grammar.FieldDeclarations + ~closing:Rbrace ~f:parseFieldDeclarationRegion p + | attrs -> + let first = + let optional, field = parseFieldDeclaration p in + let attrs = + if optional then optionalAttr :: attrs else attrs + in + Parser.expect Comma p; + {field with Parsetree.pld_attributes = attrs} + in + first + :: parseCommaDelimitedRegion ~grammar:Grammar.FieldDeclarations + ~closing:Rbrace ~f:parseFieldDeclarationRegion p + in + let () = + match fields with + | [] -> + Parser.err ~startPos:lbrace p + (Diagnostics.message + "An inline record declaration needs at least one field") + | _ -> () + in + Parser.expect Rbrace p; + Parser.optional p Comma |> ignore; + Parser.expect Rparen p; + Parsetree.Pcstr_record fields)) + | _ -> + let args = + parseCommaDelimitedRegion ~grammar:Grammar.TypExprList ~closing:Rparen + ~f:parseTypExprRegion p + in + Parser.expect Rparen p; + Parsetree.Pcstr_tuple args) + | _ -> Pcstr_tuple [] + in + let res = + match p.Parser.token with + | Colon -> + Parser.next p; + Some (parseTypExpr p) + | _ -> None + in + (constrArgs, res) + +(* constr-decl ::= + * | constr-name + * | attrs constr-name + * | constr-name const-args + * | attrs constr-name const-args *) +and parseTypeConstructorDeclarationWithBar p = + match p.Parser.token with + | Bar -> + let startPos = p.Parser.startPos in + Parser.next p; + Some (parseTypeConstructorDeclaration ~startPos p) + | _ -> None + +and parseTypeConstructorDeclaration ~startPos p = + Parser.leaveBreadcrumb p Grammar.ConstructorDeclaration; + let attrs = parseAttributes p in + match p.Parser.token with + | Uident uident -> + let uidentLoc = mkLoc p.startPos p.endPos in + Parser.next p; + let args, res = parseConstrDeclArgs p in + Parser.eatBreadcrumb p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Type.constructor ~loc ~attrs ?res ~args + (Location.mkloc uident uidentLoc) + | t -> + Parser.err p (Diagnostics.uident t); + Ast_helper.Type.constructor (Location.mknoloc "_") + +(* [|] constr-decl { | constr-decl } *) +and parseTypeConstructorDeclarations ?first p = + let firstConstrDecl = + match first with + | None -> + let startPos = p.Parser.startPos in + ignore (Parser.optional p Token.Bar); + parseTypeConstructorDeclaration ~startPos p + | Some firstConstrDecl -> firstConstrDecl + in + firstConstrDecl + :: parseRegion ~grammar:Grammar.ConstructorDeclaration + ~f:parseTypeConstructorDeclarationWithBar p + +(* + * type-representation ::= + * ∣ = [ | ] constr-decl { | constr-decl } + * ∣ = private [ | ] constr-decl { | constr-decl } + * | = | + * ∣ = private | + * ∣ = record-decl + * ∣ = private record-decl + * | = .. + *) +and parseTypeRepresentation p = + Parser.leaveBreadcrumb p Grammar.TypeRepresentation; + (* = consumed *) + let privateFlag = + if Parser.optional p Token.Private then Asttypes.Private + else Asttypes.Public + in + let kind = + match p.Parser.token with + | Bar | Uident _ -> + Parsetree.Ptype_variant (parseTypeConstructorDeclarations p) + | Lbrace -> Parsetree.Ptype_record (parseRecordDeclaration p) + | DotDot -> + Parser.next p; + Ptype_open + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + (* TODO: I have no idea if this is even remotely a good idea *) + Parsetree.Ptype_variant [] + in + Parser.eatBreadcrumb p; + (privateFlag, kind) + +(* type-param ::= + * | variance 'lident + * | variance 'uident + * | variance _ + * + * variance ::= + * | + + * | - + * | (* empty *) + *) +and parseTypeParam p = + let variance = + match p.Parser.token with + | Plus -> + Parser.next p; + Asttypes.Covariant + | Minus -> + Parser.next p; + Contravariant + | _ -> Invariant + in + match p.Parser.token with + | SingleQuote -> + Parser.next p; + let ident, loc = + if p.Parser.token = Eof then ( + Parser.err ~startPos:p.startPos p + (Diagnostics.unexpected p.Parser.token p.breadcrumbs); + ("", mkLoc p.startPos p.prevEndPos)) + else parseIdent ~msg:ErrorMessages.typeParam ~startPos:p.startPos p + in + Some (Ast_helper.Typ.var ~loc ident, variance) + | Underscore -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Some (Ast_helper.Typ.any ~loc (), variance) + | (Uident _ | Lident _) as token -> + Parser.err p + (Diagnostics.message + ("Type params start with a singlequote: '" ^ Token.toString token)); + let ident, loc = + parseIdent ~msg:ErrorMessages.typeParam ~startPos:p.startPos p + in + Some (Ast_helper.Typ.var ~loc ident, variance) + | _token -> None + +(* type-params ::= + * | + * ∣ + * ∣ + * ∣ + * + * TODO: when we have pretty-printer show an error + * with the actual code corrected. *) +and parseTypeParams ~parent p = + let opening = p.Parser.token in + match opening with + | (LessThan | Lparen) when p.startPos.pos_lnum == p.prevEndPos.pos_lnum -> + Scanner.setDiamondMode p.scanner; + let openingStartPos = p.startPos in + Parser.leaveBreadcrumb p Grammar.TypeParams; + Parser.next p; + let params = + parseCommaDelimitedRegion ~grammar:Grammar.TypeParams ~closing:GreaterThan + ~f:parseTypeParam p + in + let () = + match p.token with + | Rparen when opening = Token.Lparen -> + let msg = + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.text "Type parameters require angle brackets:"; + Doc.indent + (Doc.concat + [ + Doc.line; + Doc.concat + [ + ResPrinter.printLongident parent.Location.txt; + ResPrinter.printTypeParams params CommentTable.empty; + ]; + ]); + ]) + |> Doc.toString ~width:80 + in + Parser.err ~startPos:openingStartPos p (Diagnostics.message msg); + Parser.next p + | _ -> Parser.expect GreaterThan p + in + Scanner.popMode p.scanner Diamond; + Parser.eatBreadcrumb p; + params + | _ -> [] + +(* type-constraint ::= constraint ' ident = typexpr *) +and parseTypeConstraint p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | Token.Constraint -> ( + Parser.next p; + Parser.expect SingleQuote p; + match p.Parser.token with + | Lident ident -> + let identLoc = mkLoc startPos p.endPos in + Parser.next p; + Parser.expect Equal p; + let typ = parseTypExpr p in + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Typ.var ~loc:identLoc ident, typ, loc) + | t -> + Parser.err p (Diagnostics.lident t); + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Typ.any (), parseTypExpr p, loc)) + | _ -> None + +(* type-constraints ::= + * | (* empty *) + * | type-constraint + * | type-constraint type-constraint + * | type-constraint type-constraint type-constraint (* 0 or more *) + *) +and parseTypeConstraints p = + parseRegion ~grammar:Grammar.TypeConstraint ~f:parseTypeConstraint p + +and parseTypeEquationOrConstrDecl p = + let uidentStartPos = p.Parser.startPos in + match p.Parser.token with + | Uident uident -> ( + Parser.next p; + match p.Parser.token with + | Dot -> ( + Parser.next p; + let typeConstr = + parseValuePathTail p uidentStartPos (Longident.Lident uident) + in + let loc = mkLoc uidentStartPos p.prevEndPos in + let typ = + parseTypeAlias p + (Ast_helper.Typ.constr ~loc typeConstr + (parseTypeConstructorArgs ~constrName:typeConstr p)) + in + match p.token with + | Equal -> + Parser.next p; + let priv, kind = parseTypeRepresentation p in + (Some typ, priv, kind) + | EqualGreater -> + Parser.next p; + let returnType = parseTypExpr ~alias:false p in + let loc = mkLoc uidentStartPos p.prevEndPos in + let arrowType = + Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ returnType + in + let typ = parseTypeAlias p arrowType in + (Some typ, Asttypes.Public, Parsetree.Ptype_abstract) + | _ -> (Some typ, Asttypes.Public, Parsetree.Ptype_abstract)) + | _ -> + let uidentEndPos = p.prevEndPos in + let args, res = parseConstrDeclArgs p in + let first = + Some + (let uidentLoc = mkLoc uidentStartPos uidentEndPos in + Ast_helper.Type.constructor + ~loc:(mkLoc uidentStartPos p.prevEndPos) + ?res ~args + (Location.mkloc uident uidentLoc)) + in + ( None, + Asttypes.Public, + Parsetree.Ptype_variant (parseTypeConstructorDeclarations p ?first) )) + | t -> + Parser.err p (Diagnostics.uident t); + (* TODO: is this a good idea? *) + (None, Asttypes.Public, Parsetree.Ptype_abstract) + +and parseRecordOrObjectDecl p = + let startPos = p.Parser.startPos in + Parser.expect Lbrace p; + match p.Parser.token with + | DotDot | Dot -> + let closedFlag = + match p.token with + | DotDot -> + Parser.next p; + Asttypes.Open + | Dot -> + Parser.next p; + Asttypes.Closed + | _ -> Asttypes.Closed + in + let fields = + parseCommaDelimitedRegion ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace ~f:parseStringFieldDeclaration p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = + Ast_helper.Typ.object_ ~loc ~attrs:[] fields closedFlag + |> parseTypeAlias p + in + let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in + (Some typ, Asttypes.Public, Parsetree.Ptype_abstract) + | DotDotDot -> + let dotdotdotStart = p.startPos in + let dotdotdotEnd = p.endPos in + (* start of object type spreading, e.g. `type u = {...a, "u": int}` *) + Parser.next p; + let typ = parseTypExpr p in + let () = + match p.token with + | Rbrace -> + (* {...x}, spread without extra fields *) + Parser.err ~startPos:dotdotdotStart ~endPos:dotdotdotEnd p + (Diagnostics.message ErrorMessages.sameTypeSpread); + Parser.next p + | _ -> Parser.expect Comma p + in + let () = + match p.token with + | Lident _ -> + Parser.err ~startPos:dotdotdotStart ~endPos:dotdotdotEnd p + (Diagnostics.message ErrorMessages.spreadInRecordDeclaration) + | _ -> () + in + let fields = + Parsetree.Oinherit typ + :: parseCommaDelimitedRegion ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace ~f:parseStringFieldDeclaration p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = + Ast_helper.Typ.object_ ~loc fields Asttypes.Closed |> parseTypeAlias p + in + let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in + (Some typ, Asttypes.Public, Parsetree.Ptype_abstract) + | _ -> ( + let attrs = parseAttributes p in + match p.Parser.token with + | String _ -> + let closedFlag = Asttypes.Closed in + let fields = + match attrs with + | [] -> + parseCommaDelimitedRegion ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace ~f:parseStringFieldDeclaration p + | attrs -> + let first = + Parser.leaveBreadcrumb p Grammar.StringFieldDeclarations; + let field = + match parseStringFieldDeclaration p with + | Some field -> field + | None -> assert false + in + (* parse comma after first *) + let () = + match p.Parser.token with + | Rbrace | Eof -> () + | Comma -> Parser.next p + | _ -> Parser.expect Comma p + in + Parser.eatBreadcrumb p; + match field with + | Parsetree.Otag (label, _, ct) -> Parsetree.Otag (label, attrs, ct) + | Oinherit ct -> Oinherit ct + in + first + :: parseCommaDelimitedRegion ~grammar:Grammar.StringFieldDeclarations + ~closing:Rbrace ~f:parseStringFieldDeclaration p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + let typ = + Ast_helper.Typ.object_ ~loc ~attrs:[] fields closedFlag + |> parseTypeAlias p + in + let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in + (Some typ, Asttypes.Public, Parsetree.Ptype_abstract) + | _ -> + Parser.leaveBreadcrumb p Grammar.RecordDecl; + let fields = + (* XXX *) + match attrs with + | [] -> + parseCommaDelimitedRegion ~grammar:Grammar.FieldDeclarations + ~closing:Rbrace ~f:parseFieldDeclarationRegion p + | attr :: _ as attrs -> + let first = + let optional, field = parseFieldDeclaration p in + let attrs = if optional then optionalAttr :: attrs else attrs in + Parser.optional p Comma |> ignore; + { + field with + Parsetree.pld_attributes = attrs; + pld_loc = + { + field.Parsetree.pld_loc with + loc_start = (attr |> fst).loc.loc_start; + }; + } + in + first + :: parseCommaDelimitedRegion ~grammar:Grammar.FieldDeclarations + ~closing:Rbrace ~f:parseFieldDeclarationRegion p + in + Parser.expect Rbrace p; + Parser.eatBreadcrumb p; + (None, Asttypes.Public, Parsetree.Ptype_record fields)) + +and parsePrivateEqOrRepr p = + Parser.expect Private p; + match p.Parser.token with + | Lbrace -> + let manifest, _, kind = parseRecordOrObjectDecl p in + (manifest, Asttypes.Private, kind) + | Uident _ -> + let manifest, _, kind = parseTypeEquationOrConstrDecl p in + (manifest, Asttypes.Private, kind) + | Bar | DotDot -> + let _, kind = parseTypeRepresentation p in + (None, Asttypes.Private, kind) + | t when Grammar.isTypExprStart t -> + (Some (parseTypExpr p), Asttypes.Private, Parsetree.Ptype_abstract) + | _ -> + let _, kind = parseTypeRepresentation p in + (None, Asttypes.Private, kind) + +(* + polymorphic-variant-type ::= + | [ tag-spec-first { | tag-spec } ] + | [> [ tag-spec ] { | tag-spec } ] + | [< [|] tag-spec-full { | tag-spec-full } [ > { `tag-name }+ ] ] + + tag-spec-first ::= `tag-name [ of typexpr ] + | [ typexpr ] | tag-spec + + tag-spec ::= `tag-name [ of typexpr ] + | typexpr + + tag-spec-full ::= `tag-name [ of [&] typexpr { & typexpr } ] + | typexpr +*) +and parsePolymorphicVariantType ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Lbracket p; + match p.token with + | GreaterThan -> + Parser.next p; + let rowFields = + match p.token with + | Rbracket -> [] + | Bar -> parseTagSpecs p + | _ -> + let rowField = parseTagSpec p in + rowField :: parseTagSpecs p + in + let variant = + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.variant ~attrs ~loc rowFields Open None + in + Parser.expect Rbracket p; + variant + | LessThan -> + Parser.next p; + Parser.optional p Bar |> ignore; + let rowField = parseTagSpecFull p in + let rowFields = parseTagSpecFulls p in + let tagNames = parseTagNames p in + let variant = + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.variant ~attrs ~loc (rowField :: rowFields) Closed + (Some tagNames) + in + Parser.expect Rbracket p; + variant + | _ -> + let rowFields1 = parseTagSpecFirst p in + let rowFields2 = parseTagSpecs p in + let variant = + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Typ.variant ~attrs ~loc (rowFields1 @ rowFields2) Closed None + in + Parser.expect Rbracket p; + variant + +and parseTagName p = + match p.Parser.token with + | Hash -> + let ident, _loc = parseHashIdent ~startPos:p.startPos p in + Some ident + | _ -> None + +and parseTagNames p = + if p.Parser.token == GreaterThan then ( + Parser.next p; + parseRegion p ~grammar:Grammar.TagNames ~f:parseTagName) + else [] + +and parseTagSpecFulls p = + match p.Parser.token with + | Rbracket -> [] + | GreaterThan -> [] + | Bar -> + Parser.next p; + let rowField = parseTagSpecFull p in + rowField :: parseTagSpecFulls p + | _ -> [] + +and parseTagSpecFull p = + let attrs = parseAttributes p in + match p.Parser.token with + | Hash -> parsePolymorphicVariantTypeSpecHash ~attrs ~full:true p + | _ -> + let typ = parseTypExpr ~attrs p in + Parsetree.Rinherit typ + +and parseTagSpecs p = + match p.Parser.token with + | Bar -> + Parser.next p; + let rowField = parseTagSpec p in + rowField :: parseTagSpecs p + | _ -> [] + +and parseTagSpec p = + let attrs = parseAttributes p in + match p.Parser.token with + | Hash -> parsePolymorphicVariantTypeSpecHash ~attrs ~full:false p + | _ -> + let typ = parseTypExpr ~attrs p in + Parsetree.Rinherit typ + +and parseTagSpecFirst p = + let attrs = parseAttributes p in + match p.Parser.token with + | Bar -> + Parser.next p; + [parseTagSpec p] + | Hash -> [parsePolymorphicVariantTypeSpecHash ~attrs ~full:false p] + | _ -> ( + let typ = parseTypExpr ~attrs p in + match p.token with + | Rbracket -> + (* example: [ListStyleType.t] *) + [Parsetree.Rinherit typ] + | _ -> + Parser.expect Bar p; + [Parsetree.Rinherit typ; parseTagSpec p]) + +and parsePolymorphicVariantTypeSpecHash ~attrs ~full p : Parsetree.row_field = + let startPos = p.Parser.startPos in + let ident, loc = parseHashIdent ~startPos p in + let rec loop p = + match p.Parser.token with + | Band when full -> + Parser.next p; + let rowField = parsePolymorphicVariantTypeArgs p in + rowField :: loop p + | _ -> [] + in + let firstTuple, tagContainsAConstantEmptyConstructor = + match p.Parser.token with + | Band when full -> + Parser.next p; + ([parsePolymorphicVariantTypeArgs p], true) + | Lparen -> ([parsePolymorphicVariantTypeArgs p], false) + | _ -> ([], true) + in + let tuples = firstTuple @ loop p in + Parsetree.Rtag + ( Location.mkloc ident loc, + attrs, + tagContainsAConstantEmptyConstructor, + tuples ) + +and parsePolymorphicVariantTypeArgs p = + let startPos = p.Parser.startPos in + Parser.expect Lparen p; + let args = + parseCommaDelimitedRegion ~grammar:Grammar.TypExprList ~closing:Rparen + ~f:parseTypExprRegion p + in + Parser.expect Rparen p; + let attrs = [] in + let loc = mkLoc startPos p.prevEndPos in + match args with + | [({ptyp_desc = Ptyp_tuple _} as typ)] as types -> + if p.mode = ParseForTypeChecker then typ + else Ast_helper.Typ.tuple ~loc ~attrs types + | [typ] -> typ + | types -> Ast_helper.Typ.tuple ~loc ~attrs types + +and parseTypeEquationAndRepresentation p = + match p.Parser.token with + | (Equal | Bar) as token -> ( + if token = Bar then Parser.expect Equal p; + Parser.next p; + match p.Parser.token with + | Uident _ -> parseTypeEquationOrConstrDecl p + | Lbrace -> parseRecordOrObjectDecl p + | Private -> parsePrivateEqOrRepr p + | Bar | DotDot -> + let priv, kind = parseTypeRepresentation p in + (None, priv, kind) + | _ -> ( + let manifest = Some (parseTypExpr p) in + match p.Parser.token with + | Equal -> + Parser.next p; + let priv, kind = parseTypeRepresentation p in + (manifest, priv, kind) + | _ -> (manifest, Public, Parsetree.Ptype_abstract))) + | _ -> (None, Public, Parsetree.Ptype_abstract) + +(* type-definition ::= type [rec] typedef { and typedef } + * typedef ::= typeconstr-name [type-params] type-information + * type-information ::= [type-equation] [type-representation] { type-constraint } + * type-equation ::= = typexpr *) +and parseTypeDef ~attrs ~startPos p = + Parser.leaveBreadcrumb p Grammar.TypeDef; + (* let attrs = match attrs with | Some attrs -> attrs | None -> parseAttributes p in *) + Parser.leaveBreadcrumb p Grammar.TypeConstrName; + let name, loc = parseLident p in + let typeConstrName = Location.mkloc name loc in + Parser.eatBreadcrumb p; + let params = + let constrName = Location.mkloc (Longident.Lident name) loc in + parseTypeParams ~parent:constrName p + in + let typeDef = + let manifest, priv, kind = parseTypeEquationAndRepresentation p in + let cstrs = parseTypeConstraints p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Type.mk ~loc ~attrs ~priv ~kind ~params ~cstrs ?manifest + typeConstrName + in + Parser.eatBreadcrumb p; + typeDef + +and parseTypeExtension ~params ~attrs ~name p = + Parser.expect PlusEqual p; + let priv = + if Parser.optional p Token.Private then Asttypes.Private + else Asttypes.Public + in + let constrStart = p.Parser.startPos in + Parser.optional p Bar |> ignore; + let first = + let attrs, name, kind = + match p.Parser.token with + | Bar -> + Parser.next p; + parseConstrDef ~parseAttrs:true p + | _ -> parseConstrDef ~parseAttrs:true p + in + let loc = mkLoc constrStart p.prevEndPos in + Ast_helper.Te.constructor ~loc ~attrs name kind + in + let rec loop p cs = + match p.Parser.token with + | Bar -> + let startPos = p.Parser.startPos in + Parser.next p; + let attrs, name, kind = parseConstrDef ~parseAttrs:true p in + let extConstr = + Ast_helper.Te.constructor ~attrs + ~loc:(mkLoc startPos p.prevEndPos) + name kind + in + loop p (extConstr :: cs) + | _ -> List.rev cs + in + let constructors = loop p [first] in + Ast_helper.Te.mk ~attrs ~params ~priv name constructors + +and parseTypeDefinitions ~attrs ~name ~params ~startPos p = + let typeDef = + let manifest, priv, kind = parseTypeEquationAndRepresentation p in + let cstrs = parseTypeConstraints p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Type.mk ~loc ~attrs ~priv ~kind ~params ~cstrs ?manifest + {name with txt = lidentOfPath name.Location.txt} + in + let rec loop p defs = + let startPos = p.Parser.startPos in + let attrs = parseAttributesAndBinding p in + match p.Parser.token with + | And -> + Parser.next p; + let typeDef = parseTypeDef ~attrs ~startPos p in + loop p (typeDef :: defs) + | _ -> List.rev defs + in + loop p [typeDef] + +(* TODO: decide if we really want type extensions (eg. type x += Blue) + * It adds quite a bit of complexity that can be avoided, + * implemented for now. Needed to get a feel for the complexities of + * this territory of the grammar *) +and parseTypeDefinitionOrExtension ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Token.Typ p; + let recFlag = + match p.token with + | Rec -> + Parser.next p; + Asttypes.Recursive + | Lident "nonrec" -> + Parser.next p; + Asttypes.Nonrecursive + | _ -> Asttypes.Nonrecursive + in + let name = parseValuePath p in + let params = parseTypeParams ~parent:name p in + match p.Parser.token with + | PlusEqual -> TypeExt (parseTypeExtension ~params ~attrs ~name p) + | _ -> + (* shape of type name should be Lident, i.e. `t` is accepted. `User.t` not *) + let () = + match name.Location.txt with + | Lident _ -> () + | longident -> + Parser.err ~startPos:name.loc.loc_start ~endPos:name.loc.loc_end p + (longident |> ErrorMessages.typeDeclarationNameLongident + |> Diagnostics.message) + in + let typeDefs = parseTypeDefinitions ~attrs ~name ~params ~startPos p in + TypeDef {recFlag; types = typeDefs} + +(* external value-name : typexp = external-declaration *) +and parseExternalDef ~attrs ~startPos p = + Parser.leaveBreadcrumb p Grammar.External; + Parser.expect Token.External p; + let name, loc = parseLident p in + let name = Location.mkloc name loc in + Parser.expect ~grammar:Grammar.TypeExpression Colon p; + let typExpr = parseTypExpr p in + let equalStart = p.startPos in + let equalEnd = p.endPos in + Parser.expect Equal p; + let prim = + match p.token with + | String s -> + Parser.next p; + [s] + | _ -> + Parser.err ~startPos:equalStart ~endPos:equalEnd p + (Diagnostics.message + ("An external requires the name of the JS value you're referring \ + to, like \"" ^ name.txt ^ "\".")); + [] + in + let loc = mkLoc startPos p.prevEndPos in + let vb = Ast_helper.Val.mk ~loc ~attrs ~prim name typExpr in + Parser.eatBreadcrumb p; + vb + +(* constr-def ::= + * | constr-decl + * | constr-name = constr + * + * constr-decl ::= constr-name constr-args + * constr-name ::= uident + * constr ::= path-uident *) +and parseConstrDef ~parseAttrs p = + let attrs = if parseAttrs then parseAttributes p else [] in + let name = + match p.Parser.token with + | Uident name -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc name loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let kind = + match p.Parser.token with + | Lparen -> + let args, res = parseConstrDeclArgs p in + Parsetree.Pext_decl (args, res) + | Equal -> + Parser.next p; + let longident = parseModuleLongIdent ~lowercase:false p in + Parsetree.Pext_rebind longident + | Colon -> + Parser.next p; + let typ = parseTypExpr p in + Parsetree.Pext_decl (Pcstr_tuple [], Some typ) + | _ -> Parsetree.Pext_decl (Pcstr_tuple [], None) + in + (attrs, name, kind) + +(* + * exception-definition ::= + * | exception constr-decl + * ∣ exception constr-name = constr + * + * constr-name ::= uident + * constr ::= long_uident *) +and parseExceptionDef ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Token.Exception p; + let _, name, kind = parseConstrDef ~parseAttrs:false p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Te.constructor ~loc ~attrs name kind + +and parseNewlineOrSemicolonStructure p = + match p.Parser.token with + | Semicolon -> Parser.next p + | token when Grammar.isStructureItemStart token -> + if p.prevEndPos.pos_lnum < p.startPos.pos_lnum then () + else + Parser.err ~startPos:p.prevEndPos ~endPos:p.endPos p + (Diagnostics.message + "consecutive statements on a line must be separated by ';' or a \ + newline") + | _ -> () + +and parseStructureItemRegion p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + match p.Parser.token with + | Open -> + let openDescription = parseOpenDescription ~attrs p in + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.open_ ~loc openDescription) + | Let -> + let recFlag, letBindings = parseLetBindings ~attrs p in + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.value ~loc recFlag letBindings) + | Typ -> ( + Parser.beginRegion p; + match parseTypeDefinitionOrExtension ~attrs p with + | TypeDef {recFlag; types} -> + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Str.type_ ~loc recFlag types) + | TypeExt ext -> + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Str.type_extension ~loc ext)) + | External -> + let externalDef = parseExternalDef ~attrs ~startPos p in + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.primitive ~loc externalDef) + | Exception -> + let exceptionDef = parseExceptionDef ~attrs p in + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.exception_ ~loc exceptionDef) + | Include -> + let includeStatement = parseIncludeStatement ~attrs p in + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.include_ ~loc includeStatement) + | Module -> + Parser.beginRegion p; + let structureItem = parseModuleOrModuleTypeImplOrPackExpr ~attrs p in + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some {structureItem with pstr_loc = loc} + | ModuleComment (loc, s) -> + Parser.next p; + Some + (Ast_helper.Str.attribute ~loc + ( {txt = "ns.doc"; loc}, + PStr + [ + Ast_helper.Str.eval ~loc + (Ast_helper.Exp.constant ~loc (Pconst_string (s, None))); + ] )) + | AtAt -> + let attr = parseStandaloneAttribute p in + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.attribute ~loc attr) + | PercentPercent -> + let extension = parseExtension ~moduleLanguage:true p in + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Str.extension ~attrs ~loc extension) + | token when Grammar.isExprStart token -> + let prevEndPos = p.Parser.endPos in + let exp = parseExpr p in + parseNewlineOrSemicolonStructure p; + let loc = mkLoc startPos p.prevEndPos in + Parser.checkProgress ~prevEndPos + ~result:(Ast_helper.Str.eval ~loc ~attrs exp) + p + | _ -> ( + match attrs with + | (({Asttypes.loc = attrLoc}, _) as attr) :: _ -> + Parser.err ~startPos:attrLoc.loc_start ~endPos:attrLoc.loc_end p + (Diagnostics.message (ErrorMessages.attributeWithoutNode attr)); + let expr = parseExpr p in + Some + (Ast_helper.Str.eval ~loc:(mkLoc p.startPos p.prevEndPos) ~attrs expr) + | _ -> None) + [@@progress Parser.next, Parser.expect] + +(* include-statement ::= include module-expr *) +and parseIncludeStatement ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Token.Include p; + let modExpr = parseModuleExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Incl.mk ~loc ~attrs modExpr + +and parseAtomicModuleExpr p = + let startPos = p.Parser.startPos in + match p.Parser.token with + | Uident _ident -> + let longident = parseModuleLongIdent ~lowercase:false p in + Ast_helper.Mod.ident ~loc:longident.loc longident + | Lbrace -> + Parser.next p; + let structure = + Ast_helper.Mod.structure + (parseDelimitedRegion ~grammar:Grammar.Structure ~closing:Rbrace + ~f:parseStructureItemRegion p) + in + Parser.expect Rbrace p; + let endPos = p.prevEndPos in + {structure with pmod_loc = mkLoc startPos endPos} + | Lparen -> + Parser.next p; + let modExpr = + match p.token with + | Rparen -> Ast_helper.Mod.structure ~loc:(mkLoc startPos p.prevEndPos) [] + | _ -> parseConstrainedModExpr p + in + Parser.expect Rparen p; + modExpr + | Lident "unpack" -> ( + (* TODO: should this be made a keyword?? *) + Parser.next p; + Parser.expect Lparen p; + let expr = parseExpr p in + match p.Parser.token with + | Colon -> + let colonStart = p.Parser.startPos in + Parser.next p; + let attrs = parseAttributes p in + let packageType = parsePackageType ~startPos:colonStart ~attrs p in + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + let constraintExpr = Ast_helper.Exp.constraint_ ~loc expr packageType in + Ast_helper.Mod.unpack ~loc constraintExpr + | _ -> + Parser.expect Rparen p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mod.unpack ~loc expr) + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mod.extension ~loc extension + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultModuleExpr () + +and parsePrimaryModExpr p = + let startPos = p.Parser.startPos in + let modExpr = parseAtomicModuleExpr p in + let rec loop p modExpr = + match p.Parser.token with + | Lparen when p.prevEndPos.pos_lnum == p.startPos.pos_lnum -> + loop p (parseModuleApplication p modExpr) + | _ -> modExpr + in + let modExpr = loop p modExpr in + {modExpr with pmod_loc = mkLoc startPos p.prevEndPos} + +(* + * functor-arg ::= + * | uident : modtype + * | _ : modtype + * | modtype --> "punning" for _ : modtype + * | attributes functor-arg + *) +and parseFunctorArg p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + match p.Parser.token with + | Uident ident -> ( + Parser.next p; + let uidentEndPos = p.prevEndPos in + match p.Parser.token with + | Colon -> + Parser.next p; + let moduleType = parseModuleType p in + let loc = mkLoc startPos uidentEndPos in + let argName = Location.mkloc ident loc in + Some (attrs, argName, Some moduleType, startPos) + | Dot -> + Parser.next p; + let moduleType = + let moduleLongIdent = + parseModuleLongIdentTail ~lowercase:false p startPos + (Longident.Lident ident) + in + Ast_helper.Mty.ident ~loc:moduleLongIdent.loc moduleLongIdent + in + let argName = Location.mknoloc "_" in + Some (attrs, argName, Some moduleType, startPos) + | _ -> + let loc = mkLoc startPos uidentEndPos in + let modIdent = Location.mkloc (Longident.Lident ident) loc in + let moduleType = Ast_helper.Mty.ident ~loc modIdent in + let argName = Location.mknoloc "_" in + Some (attrs, argName, Some moduleType, startPos)) + | Underscore -> + Parser.next p; + let argName = Location.mkloc "_" (mkLoc startPos p.prevEndPos) in + Parser.expect Colon p; + let moduleType = parseModuleType p in + Some (attrs, argName, Some moduleType, startPos) + | Lparen -> + Parser.next p; + Parser.expect Rparen p; + let argName = Location.mkloc "*" (mkLoc startPos p.prevEndPos) in + Some (attrs, argName, None, startPos) + | _ -> None + +and parseFunctorArgs p = + let startPos = p.Parser.startPos in + Parser.expect Lparen p; + let args = + parseCommaDelimitedRegion ~grammar:Grammar.FunctorArgs ~closing:Rparen + ~f:parseFunctorArg p + in + Parser.expect Rparen p; + match args with + | [] -> + [([], Location.mkloc "*" (mkLoc startPos p.prevEndPos), None, startPos)] + | args -> args + +and parseFunctorModuleExpr p = + let startPos = p.Parser.startPos in + let args = parseFunctorArgs p in + let returnType = + match p.Parser.token with + | Colon -> + Parser.next p; + Some (parseModuleType ~es6Arrow:false p) + | _ -> None + in + Parser.expect EqualGreater p; + let rhsModuleExpr = + let modExpr = parseModuleExpr p in + match returnType with + | Some modType -> + Ast_helper.Mod.constraint_ + ~loc: + (mkLoc modExpr.pmod_loc.loc_start modType.Parsetree.pmty_loc.loc_end) + modExpr modType + | None -> modExpr + in + let endPos = p.prevEndPos in + let modExpr = + List.fold_right + (fun (attrs, name, moduleType, startPos) acc -> + Ast_helper.Mod.functor_ ~loc:(mkLoc startPos endPos) ~attrs name + moduleType acc) + args rhsModuleExpr + in + {modExpr with pmod_loc = mkLoc startPos endPos} + +(* module-expr ::= + * | module-path + * ∣ { structure-items } + * ∣ functorArgs => module-expr + * ∣ module-expr(module-expr) + * ∣ ( module-expr ) + * ∣ ( module-expr : module-type ) + * | extension + * | attributes module-expr *) +and parseModuleExpr p = + let attrs = parseAttributes p in + let modExpr = + if isEs6ArrowFunctor p then parseFunctorModuleExpr p + else parsePrimaryModExpr p + in + {modExpr with pmod_attributes = List.concat [modExpr.pmod_attributes; attrs]} + +and parseConstrainedModExpr p = + let modExpr = parseModuleExpr p in + match p.Parser.token with + | Colon -> + Parser.next p; + let modType = parseModuleType p in + let loc = mkLoc modExpr.pmod_loc.loc_start modType.pmty_loc.loc_end in + Ast_helper.Mod.constraint_ ~loc modExpr modType + | _ -> modExpr + +and parseConstrainedModExprRegion p = + if Grammar.isModExprStart p.Parser.token then Some (parseConstrainedModExpr p) + else None + +and parseModuleApplication p modExpr = + let startPos = p.Parser.startPos in + Parser.expect Lparen p; + let args = + parseCommaDelimitedRegion ~grammar:Grammar.ModExprList ~closing:Rparen + ~f:parseConstrainedModExprRegion p + in + Parser.expect Rparen p; + let args = + match args with + | [] -> + let loc = mkLoc startPos p.prevEndPos in + [Ast_helper.Mod.structure ~loc []] + | args -> args + in + List.fold_left + (fun modExpr arg -> + Ast_helper.Mod.apply + ~loc: + (mkLoc modExpr.Parsetree.pmod_loc.loc_start + arg.Parsetree.pmod_loc.loc_end) + modExpr arg) + modExpr args + +and parseModuleOrModuleTypeImplOrPackExpr ~attrs p = + let startPos = p.Parser.startPos in + Parser.expect Module p; + match p.Parser.token with + | Typ -> parseModuleTypeImpl ~attrs startPos p + | Lparen -> + let expr = parseFirstClassModuleExpr ~startPos p in + let a = parsePrimaryExpr ~operand:expr p in + let expr = parseBinaryExpr ~a p 1 in + let expr = parseTernaryExpr expr p in + Ast_helper.Str.eval ~attrs expr + | _ -> parseMaybeRecModuleBinding ~attrs ~startPos p + +and parseModuleTypeImpl ~attrs startPos p = + Parser.expect Typ p; + let nameStart = p.Parser.startPos in + let name = + match p.Parser.token with + | Lident ident -> + Parser.next p; + let loc = mkLoc nameStart p.prevEndPos in + Location.mkloc ident loc + | Uident ident -> + Parser.next p; + let loc = mkLoc nameStart p.prevEndPos in + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + Parser.expect Equal p; + let moduleType = parseModuleType p in + let moduleTypeDeclaration = + Ast_helper.Mtd.mk ~attrs + ~loc:(mkLoc nameStart p.prevEndPos) + ~typ:moduleType name + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Str.modtype ~loc moduleTypeDeclaration + +(* definition ::= + ∣ module rec module-name : module-type = module-expr { and module-name + : module-type = module-expr } *) +and parseMaybeRecModuleBinding ~attrs ~startPos p = + match p.Parser.token with + | Token.Rec -> + Parser.next p; + Ast_helper.Str.rec_module (parseModuleBindings ~startPos ~attrs p) + | _ -> + Ast_helper.Str.module_ + (parseModuleBinding ~attrs ~startPos:p.Parser.startPos p) + +and parseModuleBinding ~attrs ~startPos p = + let name = + match p.Parser.token with + | Uident ident -> + let startPos = p.Parser.startPos in + Parser.next p; + let loc = mkLoc startPos p.prevEndPos in + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let body = parseModuleBindingBody p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mb.mk ~attrs ~loc name body + +and parseModuleBindingBody p = + (* TODO: make required with good error message when rec module binding *) + let returnModType = + match p.Parser.token with + | Colon -> + Parser.next p; + Some (parseModuleType p) + | _ -> None + in + Parser.expect Equal p; + let modExpr = parseModuleExpr p in + match returnModType with + | Some modType -> + Ast_helper.Mod.constraint_ + ~loc:(mkLoc modType.pmty_loc.loc_start modExpr.pmod_loc.loc_end) + modExpr modType + | None -> modExpr + +(* module-name : module-type = module-expr + * { and module-name : module-type = module-expr } *) +and parseModuleBindings ~attrs ~startPos p = + let rec loop p acc = + let startPos = p.Parser.startPos in + let attrs = parseAttributesAndBinding p in + match p.Parser.token with + | And -> + Parser.next p; + ignore (Parser.optional p Module); + (* over-parse for fault-tolerance *) + let modBinding = parseModuleBinding ~attrs ~startPos p in + loop p (modBinding :: acc) + | _ -> List.rev acc + in + let first = parseModuleBinding ~attrs ~startPos p in + loop p [first] + +and parseAtomicModuleType p = + let startPos = p.Parser.startPos in + let moduleType = + match p.Parser.token with + | Uident _ | Lident _ -> + (* Ocaml allows module types to end with lowercase: module Foo : bar = { ... } + * lets go with uppercase terminal for now *) + let moduleLongIdent = parseModuleLongIdent ~lowercase:true p in + Ast_helper.Mty.ident ~loc:moduleLongIdent.loc moduleLongIdent + | Lparen -> + Parser.next p; + let mty = parseModuleType p in + Parser.expect Rparen p; + {mty with pmty_loc = mkLoc startPos p.prevEndPos} + | Lbrace -> + Parser.next p; + let spec = + parseDelimitedRegion ~grammar:Grammar.Signature ~closing:Rbrace + ~f:parseSignatureItemRegion p + in + Parser.expect Rbrace p; + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mty.signature ~loc spec + | Module -> + (* TODO: check if this is still atomic when implementing first class modules*) + parseModuleTypeOf p + | Percent -> + let extension = parseExtension p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Mty.extension ~loc extension + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultModuleType () + in + let moduleTypeLoc = mkLoc startPos p.prevEndPos in + {moduleType with pmty_loc = moduleTypeLoc} + +and parseFunctorModuleType p = + let startPos = p.Parser.startPos in + let args = parseFunctorArgs p in + Parser.expect EqualGreater p; + let rhs = parseModuleType p in + let endPos = p.prevEndPos in + let modType = + List.fold_right + (fun (attrs, name, moduleType, startPos) acc -> + Ast_helper.Mty.functor_ ~loc:(mkLoc startPos endPos) ~attrs name + moduleType acc) + args rhs + in + {modType with pmty_loc = mkLoc startPos endPos} + +(* Module types are the module-level equivalent of type expressions: they + * specify the general shape and type properties of modules. + * + * module-type ::= + * | modtype-path + * | { signature } + * | ( module-type ) --> parenthesized module-type + * | functor-args => module-type --> functor + * | module-type => module-type --> functor + * | module type of module-expr + * | attributes module-type + * | module-type with-mod-constraints + * | extension + *) +and parseModuleType ?(es6Arrow = true) ?(with_ = true) p = + let attrs = parseAttributes p in + let modty = + if es6Arrow && isEs6ArrowFunctor p then parseFunctorModuleType p + else + let modty = parseAtomicModuleType p in + match p.Parser.token with + | EqualGreater when es6Arrow == true -> + Parser.next p; + let rhs = parseModuleType ~with_:false p in + let str = Location.mknoloc "_" in + let loc = mkLoc modty.pmty_loc.loc_start p.prevEndPos in + Ast_helper.Mty.functor_ ~loc str (Some modty) rhs + | _ -> modty + in + let moduleType = + {modty with pmty_attributes = List.concat [modty.pmty_attributes; attrs]} + in + if with_ then parseWithConstraints moduleType p else moduleType + +and parseWithConstraints moduleType p = + match p.Parser.token with + | Lident "with" -> + Parser.next p; + let first = parseWithConstraint p in + let rec loop p acc = + match p.Parser.token with + | And -> + Parser.next p; + loop p (parseWithConstraint p :: acc) + | _ -> List.rev acc + in + let constraints = loop p [first] in + let loc = mkLoc moduleType.pmty_loc.loc_start p.prevEndPos in + Ast_helper.Mty.with_ ~loc moduleType constraints + | _ -> moduleType + +(* mod-constraint ::= + * | type typeconstr type-equation type-constraints? + * ∣ type typeconstr-name := typexpr + * ∣ module module-path = extended-module-path + * ∣ module module-path := extended-module-path + * + * TODO: split this up into multiple functions, better errors *) +and parseWithConstraint p = + match p.Parser.token with + | Module -> ( + Parser.next p; + let modulePath = parseModuleLongIdent ~lowercase:false p in + match p.Parser.token with + | ColonEqual -> + Parser.next p; + let lident = parseModuleLongIdent ~lowercase:false p in + Parsetree.Pwith_modsubst (modulePath, lident) + | Equal -> + Parser.next p; + let lident = parseModuleLongIdent ~lowercase:false p in + Parsetree.Pwith_module (modulePath, lident) + | token -> + (* TODO: revisit *) + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + let lident = parseModuleLongIdent ~lowercase:false p in + Parsetree.Pwith_modsubst (modulePath, lident)) + | Typ -> ( + Parser.next p; + let typeConstr = parseValuePath p in + let params = parseTypeParams ~parent:typeConstr p in + match p.Parser.token with + | ColonEqual -> + Parser.next p; + let typExpr = parseTypExpr p in + Parsetree.Pwith_typesubst + ( typeConstr, + Ast_helper.Type.mk ~loc:typeConstr.loc ~params ~manifest:typExpr + (Location.mkloc (Longident.last typeConstr.txt) typeConstr.loc) ) + | Equal -> + Parser.next p; + let typExpr = parseTypExpr p in + let typeConstraints = parseTypeConstraints p in + Parsetree.Pwith_type + ( typeConstr, + Ast_helper.Type.mk ~loc:typeConstr.loc ~params ~manifest:typExpr + ~cstrs:typeConstraints + (Location.mkloc (Longident.last typeConstr.txt) typeConstr.loc) ) + | token -> + (* TODO: revisit *) + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + let typExpr = parseTypExpr p in + let typeConstraints = parseTypeConstraints p in + Parsetree.Pwith_type + ( typeConstr, + Ast_helper.Type.mk ~loc:typeConstr.loc ~params ~manifest:typExpr + ~cstrs:typeConstraints + (Location.mkloc (Longident.last typeConstr.txt) typeConstr.loc) )) + | token -> + (* TODO: implement recovery strategy *) + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Parsetree.Pwith_type + ( Location.mknoloc (Longident.Lident ""), + Ast_helper.Type.mk ~params:[] ~manifest:(Recover.defaultType ()) + ~cstrs:[] (Location.mknoloc "") ) + +and parseModuleTypeOf p = + let startPos = p.Parser.startPos in + Parser.expect Module p; + Parser.expect Typ p; + Parser.expect Of p; + let moduleExpr = parseModuleExpr p in + Ast_helper.Mty.typeof_ ~loc:(mkLoc startPos p.prevEndPos) moduleExpr + +and parseNewlineOrSemicolonSignature p = + match p.Parser.token with + | Semicolon -> Parser.next p + | token when Grammar.isSignatureItemStart token -> + if p.prevEndPos.pos_lnum < p.startPos.pos_lnum then () + else + Parser.err ~startPos:p.prevEndPos ~endPos:p.endPos p + (Diagnostics.message + "consecutive specifications on a line must be separated by ';' or a \ + newline") + | _ -> () + +and parseSignatureItemRegion p = + let startPos = p.Parser.startPos in + let attrs = parseAttributes p in + match p.Parser.token with + | Let -> + Parser.beginRegion p; + let valueDesc = parseSignLetDesc ~attrs p in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Sig.value ~loc valueDesc) + | Typ -> ( + Parser.beginRegion p; + match parseTypeDefinitionOrExtension ~attrs p with + | TypeDef {recFlag; types} -> + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Sig.type_ ~loc recFlag types) + | TypeExt ext -> + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Sig.type_extension ~loc ext)) + | External -> + let externalDef = parseExternalDef ~attrs ~startPos p in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.value ~loc externalDef) + | Exception -> + let exceptionDef = parseExceptionDef ~attrs p in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.exception_ ~loc exceptionDef) + | Open -> + let openDescription = parseOpenDescription ~attrs p in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.open_ ~loc openDescription) + | Include -> + Parser.next p; + let moduleType = parseModuleType p in + let includeDescription = + Ast_helper.Incl.mk ~loc:(mkLoc startPos p.prevEndPos) ~attrs moduleType + in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.include_ ~loc includeDescription) + | Module -> ( + Parser.beginRegion p; + Parser.next p; + match p.Parser.token with + | Uident _ -> + let modDecl = parseModuleDeclarationOrAlias ~attrs p in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Sig.module_ ~loc modDecl) + | Rec -> + let recModule = parseRecModuleSpec ~attrs ~startPos p in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Sig.rec_module ~loc recModule) + | Typ -> + let modTypeDecl = parseModuleTypeDeclaration ~attrs ~startPos p in + Parser.endRegion p; + Some modTypeDecl + | _t -> + let modDecl = parseModuleDeclarationOrAlias ~attrs p in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Parser.endRegion p; + Some (Ast_helper.Sig.module_ ~loc modDecl)) + | AtAt -> + let attr = parseStandaloneAttribute p in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.attribute ~loc attr) + | ModuleComment (loc, s) -> + Parser.next p; + Some + (Ast_helper.Sig.attribute ~loc + ( {txt = "ns.doc"; loc}, + PStr + [ + Ast_helper.Str.eval ~loc + (Ast_helper.Exp.constant ~loc (Pconst_string (s, None))); + ] )) + | PercentPercent -> + let extension = parseExtension ~moduleLanguage:true p in + parseNewlineOrSemicolonSignature p; + let loc = mkLoc startPos p.prevEndPos in + Some (Ast_helper.Sig.extension ~attrs ~loc extension) + | _ -> ( + match attrs with + | (({Asttypes.loc = attrLoc}, _) as attr) :: _ -> + Parser.err ~startPos:attrLoc.loc_start ~endPos:attrLoc.loc_end p + (Diagnostics.message (ErrorMessages.attributeWithoutNode attr)); + Some Recover.defaultSignatureItem + | _ -> None) + [@@progress Parser.next, Parser.expect] + +(* module rec module-name : module-type { and module-name: module-type } *) +and parseRecModuleSpec ~attrs ~startPos p = + Parser.expect Rec p; + let rec loop p spec = + let startPos = p.Parser.startPos in + let attrs = parseAttributesAndBinding p in + match p.Parser.token with + | And -> + (* TODO: give a good error message when with constraint, no parens + * and ASet: (Set.S with type elt = A.t) + * and BTree: (Btree.S with type elt = A.t) + * Without parens, the `and` signals the start of another + * `with-constraint` + *) + Parser.expect And p; + let decl = parseRecModuleDeclaration ~attrs ~startPos p in + loop p (decl :: spec) + | _ -> List.rev spec + in + let first = parseRecModuleDeclaration ~attrs ~startPos p in + loop p [first] + +(* module-name : module-type *) +and parseRecModuleDeclaration ~attrs ~startPos p = + let name = + match p.Parser.token with + | Uident modName -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc modName loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + Parser.expect Colon p; + let modType = parseModuleType p in + Ast_helper.Md.mk ~loc:(mkLoc startPos p.prevEndPos) ~attrs name modType + +and parseModuleDeclarationOrAlias ~attrs p = + let startPos = p.Parser.startPos in + let moduleName = + match p.Parser.token with + | Uident ident -> + let loc = mkLoc p.Parser.startPos p.endPos in + Parser.next p; + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let body = + match p.Parser.token with + | Colon -> + Parser.next p; + parseModuleType p + | Equal -> + Parser.next p; + let lident = parseModuleLongIdent ~lowercase:false p in + Ast_helper.Mty.alias lident + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + Recover.defaultModuleType () + in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Md.mk ~loc ~attrs moduleName body + +and parseModuleTypeDeclaration ~attrs ~startPos p = + Parser.expect Typ p; + let moduleName = + match p.Parser.token with + | Uident ident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc ident loc + | Lident ident -> + let loc = mkLoc p.startPos p.endPos in + Parser.next p; + Location.mkloc ident loc + | t -> + Parser.err p (Diagnostics.uident t); + Location.mknoloc "_" + in + let typ = + match p.Parser.token with + | Equal -> + Parser.next p; + Some (parseModuleType p) + | _ -> None + in + let moduleDecl = Ast_helper.Mtd.mk ~attrs ?typ moduleName in + Ast_helper.Sig.modtype ~loc:(mkLoc startPos p.prevEndPos) moduleDecl + +and parseSignLetDesc ~attrs p = + let startPos = p.Parser.startPos in + Parser.optional p Let |> ignore; + let name, loc = parseLident p in + let name = Location.mkloc name loc in + Parser.expect Colon p; + let typExpr = parsePolyTypeExpr p in + let loc = mkLoc startPos p.prevEndPos in + Ast_helper.Val.mk ~loc ~attrs name typExpr + +(* attr-id ::= lowercase-ident + ∣ capitalized-ident + ∣ attr-id . attr-id *) +and parseAttributeId ~startPos p = + let rec loop p acc = + match p.Parser.token with + | Lident ident | Uident ident -> ( + Parser.next p; + let id = acc ^ ident in + match p.Parser.token with + | Dot -> + Parser.next p; + loop p (id ^ ".") + | _ -> id) + | token when Token.isKeyword token -> ( + Parser.next p; + let id = acc ^ Token.toString token in + match p.Parser.token with + | Dot -> + Parser.next p; + loop p (id ^ ".") + | _ -> id) + | token -> + Parser.err p (Diagnostics.unexpected token p.breadcrumbs); + acc + in + let id = loop p "" in + let endPos = p.prevEndPos in + Location.mkloc id (mkLoc startPos endPos) + +(* + * payload ::= empty + * | ( structure-item ) + * + * TODO: what about multiple structure items? + * @attr({let x = 1; let x = 2}) + * + * Also what about type-expressions and specifications? + * @attr(:myType) ??? + *) +and parsePayload p = + match p.Parser.token with + | Lparen when p.startPos.pos_cnum = p.prevEndPos.pos_cnum -> ( + Parser.leaveBreadcrumb p Grammar.AttributePayload; + Parser.next p; + match p.token with + | Colon -> + Parser.next p; + let payload = + if Grammar.isSignatureItemStart p.token then + Parsetree.PSig + (parseDelimitedRegion ~grammar:Grammar.Signature ~closing:Rparen + ~f:parseSignatureItemRegion p) + else Parsetree.PTyp (parseTypExpr p) + in + Parser.expect Rparen p; + Parser.eatBreadcrumb p; + payload + | Question -> + Parser.next p; + let pattern = parsePattern p in + let expr = + match p.token with + | When | If -> + Parser.next p; + Some (parseExpr p) + | _ -> None + in + Parser.expect Rparen p; + Parser.eatBreadcrumb p; + Parsetree.PPat (pattern, expr) + | _ -> + let items = + parseDelimitedRegion ~grammar:Grammar.Structure ~closing:Rparen + ~f:parseStructureItemRegion p + in + Parser.expect Rparen p; + Parser.eatBreadcrumb p; + Parsetree.PStr items) + | _ -> Parsetree.PStr [] + +(* type attribute = string loc * payload *) +and parseAttribute p = + match p.Parser.token with + | At -> + let startPos = p.startPos in + Parser.next p; + let attrId = parseAttributeId ~startPos p in + let payload = parsePayload p in + Some (attrId, payload) + | DocComment (loc, s) -> + Parser.next p; + Some + ( {txt = "ns.doc"; loc}, + PStr + [ + Ast_helper.Str.eval ~loc + (Ast_helper.Exp.constant ~loc (Pconst_string (s, None))); + ] ) + | _ -> None + +and parseAttributes p = + parseRegion p ~grammar:Grammar.Attribute ~f:parseAttribute + +(* + * standalone-attribute ::= + * | @@ atribute-id + * | @@ attribute-id ( structure-item ) + *) +and parseStandaloneAttribute p = + let startPos = p.startPos in + (* XX *) + Parser.expect AtAt p; + let attrId = parseAttributeId ~startPos p in + let payload = parsePayload p in + (attrId, payload) + +(* extension ::= % attr-id attr-payload + * | %% attr-id( + * expr ::= ... + * ∣ extension + * + * typexpr ::= ... + * ∣ extension + * + * pattern ::= ... + * ∣ extension + * + * module-expr ::= ... + * ∣ extension + * + * module-type ::= ... + * ∣ extension + * + * class-expr ::= ... + * ∣ extension + * + * class-type ::= ... + * ∣ extension + * + * + * item extension nodes usable in structures and signature + * + * item-extension ::= %% attr-id + * | %% attr-id(structure-item) + * + * attr-payload ::= structure-item + * + * ~moduleLanguage represents whether we're on the module level or not + *) +and parseExtension ?(moduleLanguage = false) p = + let startPos = p.Parser.startPos in + if moduleLanguage then Parser.expect PercentPercent p + else Parser.expect Percent p; + let attrId = parseAttributeId ~startPos p in + let payload = parsePayload p in + (attrId, payload) + +(* module signature on the file level *) +let parseSpecification p : Parsetree.signature = + parseRegion p ~grammar:Grammar.Specification ~f:parseSignatureItemRegion + +(* module structure on the file level *) +let parseImplementation p : Parsetree.structure = + parseRegion p ~grammar:Grammar.Implementation ~f:parseStructureItemRegion diff --git a/res_syntax/src/res_core.mli b/res_syntax/src/res_core.mli new file mode 100644 index 0000000000..e77ca30bb1 --- /dev/null +++ b/res_syntax/src/res_core.mli @@ -0,0 +1,2 @@ +val parseImplementation : Res_parser.t -> Parsetree.structure +val parseSpecification : Res_parser.t -> Parsetree.signature diff --git a/res_syntax/src/res_diagnostics.ml b/res_syntax/src/res_diagnostics.ml new file mode 100644 index 0000000000..dc6ea559ca --- /dev/null +++ b/res_syntax/src/res_diagnostics.ml @@ -0,0 +1,173 @@ +module Grammar = Res_grammar +module Token = Res_token + +type category = + | Unexpected of {token: Token.t; context: (Grammar.t * Lexing.position) list} + | Expected of { + context: Grammar.t option; + pos: Lexing.position; (* prev token end*) + token: Token.t; + } + | Message of string + | Uident of Token.t + | Lident of Token.t + | UnclosedString + | UnclosedTemplate + | UnclosedComment + | UnknownUchar of Char.t + +type t = { + startPos: Lexing.position; + endPos: Lexing.position; + category: category; +} + +type report = t list + +let getStartPos t = t.startPos +let getEndPos t = t.endPos + +let defaultUnexpected token = + "I'm not sure what to parse here when looking at \"" ^ Token.toString token + ^ "\"." + +let reservedKeyword token = + let tokenTxt = Token.toString token in + "`" ^ tokenTxt ^ "` is a reserved keyword. Keywords need to be escaped: \\\"" + ^ tokenTxt ^ "\"" + +let explain t = + match t.category with + | Uident currentToken -> ( + match currentToken with + | Lident lident -> + let guess = String.capitalize_ascii lident in + "Did you mean `" ^ guess ^ "` instead of `" ^ lident ^ "`?" + | t when Token.isKeyword t -> + let token = Token.toString t in + "`" ^ token ^ "` is a reserved keyword." + | _ -> + "At this point, I'm looking for an uppercased name like `Belt` or `Array`" + ) + | Lident currentToken -> ( + match currentToken with + | Uident uident -> + let guess = String.uncapitalize_ascii uident in + "Did you mean `" ^ guess ^ "` instead of `" ^ uident ^ "`?" + | t when Token.isKeyword t -> + let token = Token.toString t in + "`" ^ token ^ "` is a reserved keyword. Keywords need to be escaped: \\\"" + ^ token ^ "\"" + | Underscore -> "`_` isn't a valid name." + | _ -> "I'm expecting a lowercase name like `user or `age`") + | Message txt -> txt + | UnclosedString -> "This string is missing a double quote at the end" + | UnclosedTemplate -> + "Did you forget to close this template expression with a backtick?" + | UnclosedComment -> "This comment seems to be missing a closing `*/`" + | UnknownUchar uchar -> ( + match uchar with + | '^' -> + "Not sure what to do with this character.\n" + ^ " If you're trying to dereference a mutable value, use \ + `myValue.contents` instead.\n" + ^ " To concatenate strings, use `\"a\" ++ \"b\"` instead." + | _ -> "Not sure what to do with this character.") + | Expected {context; token = t} -> + let hint = + match context with + | Some grammar -> " It signals the start of " ^ Grammar.toString grammar + | None -> "" + in + "Did you forget a `" ^ Token.toString t ^ "` here?" ^ hint + | Unexpected {token = t; context = breadcrumbs} -> ( + let name = Token.toString t in + match breadcrumbs with + | (AtomicTypExpr, _) :: breadcrumbs -> ( + match (breadcrumbs, t) with + | ( ((StringFieldDeclarations | FieldDeclarations), _) :: _, + (String _ | At | Rbrace | Comma | Eof) ) -> + "I'm missing a type here" + | _, t when Grammar.isStructureItemStart t || t = Eof -> + "Missing a type here" + | _ -> defaultUnexpected t) + | (ExprOperand, _) :: breadcrumbs -> ( + match (breadcrumbs, t) with + | (ExprBlock, _) :: _, Rbrace -> + "It seems that this expression block is empty" + | (ExprBlock, _) :: _, Bar -> + (* Pattern matching *) + "Looks like there might be an expression missing here" + | (ExprSetField, _) :: _, _ -> + "It seems that this record field mutation misses an expression" + | (ExprArrayMutation, _) :: _, _ -> + "Seems that an expression is missing, with what do I mutate the array?" + | ((ExprBinaryAfterOp _ | ExprUnary), _) :: _, _ -> + "Did you forget to write an expression here?" + | (Grammar.LetBinding, _) :: _, _ -> + "This let-binding misses an expression" + | _ :: _, (Rbracket | Rbrace | Eof) -> "Missing expression" + | _ -> "I'm not sure what to parse here when looking at \"" ^ name ^ "\"." + ) + | (TypeParam, _) :: _ -> ( + match t with + | Lident ident -> + "Did you mean '" ^ ident ^ "? A Type parameter starts with a quote." + | _ -> "I'm not sure what to parse here when looking at \"" ^ name ^ "\"." + ) + | (Pattern, _) :: breadcrumbs -> ( + match (t, breadcrumbs) with + | Equal, (LetBinding, _) :: _ -> + "I was expecting a name for this let-binding. Example: `let message = \ + \"hello\"`" + | In, (ExprFor, _) :: _ -> + "A for-loop has the following form: `for i in 0 to 10`. Did you forget \ + to supply a name before `in`?" + | EqualGreater, (PatternMatchCase, _) :: _ -> + "I was expecting a pattern to match on before the `=>`" + | token, _ when Token.isKeyword t -> reservedKeyword token + | token, _ -> defaultUnexpected token) + | _ -> + (* TODO: match on circumstance to verify Lident needed ? *) + if Token.isKeyword t then + "`" ^ name + ^ "` is a reserved keyword. Keywords need to be escaped: \\\"" + ^ Token.toString t ^ "\"" + else "I'm not sure what to parse here when looking at \"" ^ name ^ "\".") + +let make ~startPos ~endPos category = {startPos; endPos; category} + +let printReport diagnostics src = + let rec print diagnostics src = + match diagnostics with + | [] -> () + | d :: rest -> + Res_diagnostics_printing_utils.Super_location.super_error_reporter + Format.err_formatter src + Location. + { + loc = {loc_start = d.startPos; loc_end = d.endPos; loc_ghost = false}; + msg = explain d; + sub = []; + if_highlight = ""; + }; + (match rest with + | [] -> () + | _ -> Format.fprintf Format.err_formatter "@."); + print rest src + in + Format.fprintf Format.err_formatter "@["; + print (List.rev diagnostics) src; + Format.fprintf Format.err_formatter "@]@." + +let unexpected token context = Unexpected {token; context} + +let expected ?grammar pos token = Expected {context = grammar; pos; token} + +let uident currentToken = Uident currentToken +let lident currentToken = Lident currentToken +let unclosedString = UnclosedString +let unclosedComment = UnclosedComment +let unclosedTemplate = UnclosedTemplate +let unknownUchar code = UnknownUchar code +let message txt = Message txt diff --git a/res_syntax/src/res_diagnostics.mli b/res_syntax/src/res_diagnostics.mli new file mode 100644 index 0000000000..0ae74cec23 --- /dev/null +++ b/res_syntax/src/res_diagnostics.mli @@ -0,0 +1,25 @@ +module Token = Res_token +module Grammar = Res_grammar + +type t +type category +type report + +val getStartPos : t -> Lexing.position [@@live] (* for playground *) +val getEndPos : t -> Lexing.position [@@live] (* for playground *) + +val explain : t -> string [@@live] (* for playground *) + +val unexpected : Token.t -> (Grammar.t * Lexing.position) list -> category +val expected : ?grammar:Grammar.t -> Lexing.position -> Token.t -> category +val uident : Token.t -> category +val lident : Token.t -> category +val unclosedString : category +val unclosedTemplate : category +val unclosedComment : category +val unknownUchar : Char.t -> category +val message : string -> category + +val make : startPos:Lexing.position -> endPos:Lexing.position -> category -> t + +val printReport : t list -> string -> unit diff --git a/res_syntax/src/res_diagnostics_printing_utils.ml b/res_syntax/src/res_diagnostics_printing_utils.ml new file mode 100644 index 0000000000..74d23e4049 --- /dev/null +++ b/res_syntax/src/res_diagnostics_printing_utils.ml @@ -0,0 +1,389 @@ +(* + This file is taken from ReScript's super_code_frame.ml and super_location.ml + We're copying the look of ReScript's terminal error reporting. + See https://github.com/rescript-lang/syntax/pull/77 for the rationale. + A few lines have been commented out and swapped for their tweaked version. +*) + +(* ===== super_code_frame.ml *) + +module Super_code_frame = struct + let digits_count n = + let rec loop n base count = + if n >= base then loop n (base * 10) (count + 1) else count + in + loop (abs n) 1 0 + + let seek_2_lines_before src pos = + let open Lexing in + let original_line = pos.pos_lnum in + let rec loop current_line current_char = + if current_line + 2 >= original_line then (current_char, current_line) + else + loop + (if (src.[current_char] [@doesNotRaise]) = '\n' then current_line + 1 + else current_line) + (current_char + 1) + in + loop 1 0 + + let seek_2_lines_after src pos = + let open Lexing in + let original_line = pos.pos_lnum in + let rec loop current_line current_char = + if current_char = String.length src then (current_char, current_line) + else + match src.[current_char] [@doesNotRaise] with + | '\n' when current_line = original_line + 2 -> + (current_char, current_line) + | '\n' -> loop (current_line + 1) (current_char + 1) + | _ -> loop current_line (current_char + 1) + in + loop original_line pos.pos_cnum + + let leading_space_count str = + let rec loop i count = + if i = String.length str then count + else if (str.[i] [@doesNotRaise]) != ' ' then count + else loop (i + 1) (count + 1) + in + loop 0 0 + + let break_long_line max_width line = + let rec loop pos accum = + if pos = String.length line then accum + else + let chunk_length = min max_width (String.length line - pos) in + let chunk = (String.sub [@doesNotRaise]) line pos chunk_length in + loop (pos + chunk_length) (chunk :: accum) + in + loop 0 [] |> List.rev + + let filter_mapi f l = + let rec loop f l i accum = + match l with + | [] -> accum + | head :: rest -> + let accum = + match f i head with + | None -> accum + | Some result -> result :: accum + in + loop f rest (i + 1) accum + in + loop f l 0 [] |> List.rev + + (* Spiritual equivalent of + https://github.com/ocaml/ocaml/blob/414bdec9ae387129b8102cc6bf3c0b6ae173eeb9/utils/misc.ml#L601 + *) + module Color = struct + type color = + | Dim + (* | Filename *) + | Err + | Warn + | NoColor + + let dim = "\x1b[2m" + + (* let filename = "\x1b[46m" *) + let err = "\x1b[1;31m" + let warn = "\x1b[1;33m" + let reset = "\x1b[0m" + + external isatty : out_channel -> bool = "caml_sys_isatty" + + (* reasonable heuristic on whether colors should be enabled *) + let should_enable_color () = + let term = try Sys.getenv "TERM" with Not_found -> "" in + term <> "dumb" && term <> "" && isatty stderr + + let color_enabled = ref true + + let setup = + let first = ref true in + (* initialize only once *) + fun o -> + if !first then ( + first := false; + color_enabled := + match o with + | Some Misc.Color.Always -> true + | Some Auto -> should_enable_color () + | Some Never -> false + | None -> should_enable_color ()); + () + end + + let setup = Color.setup + + type gutter = Number of int | Elided + type highlighted_string = {s: string; start: int; end_: int} + type line = {gutter: gutter; content: highlighted_string list} + + (* + Features: + - display a line gutter + - break long line into multiple for terminal display + - peek 2 lines before & after for context + - center snippet when it's heavily indented + - ellide intermediate lines when the reported range is huge +*) + let print ~is_warning ~src ~startPos ~endPos = + let open Lexing in + let indent = 2 in + let highlight_line_start_line = startPos.pos_lnum in + let highlight_line_end_line = endPos.pos_lnum in + let start_line_line_offset, first_shown_line = + seek_2_lines_before src startPos + in + let end_line_line_end_offset, last_shown_line = + seek_2_lines_after src endPos + in + + let more_than_5_highlighted_lines = + highlight_line_end_line - highlight_line_start_line + 1 > 5 + in + let max_line_digits_count = digits_count last_shown_line in + (* TODO: change this back to a fixed 100? *) + (* 3 for separator + the 2 spaces around it *) + let line_width = 78 - max_line_digits_count - indent - 3 in + let lines = + (String.sub [@doesNotRaise]) src start_line_line_offset + (end_line_line_end_offset - start_line_line_offset) + |> String.split_on_char '\n' + |> filter_mapi (fun i line -> + let line_number = i + first_shown_line in + if more_than_5_highlighted_lines then + if line_number = highlight_line_start_line + 2 then + Some (Elided, line) + else if + line_number > highlight_line_start_line + 2 + && line_number < highlight_line_end_line - 1 + then None + else Some (Number line_number, line) + else Some (Number line_number, line)) + in + let leading_space_to_cut = + lines + |> List.fold_left + (fun current_max (_, line) -> + let leading_spaces = leading_space_count line in + if String.length line = leading_spaces then + (* the line's nothing but spaces. Doesn't count *) + current_max + else min leading_spaces current_max) + 99999 + in + let separator = if leading_space_to_cut = 0 then "│" else "┆" in + let stripped_lines = + lines + |> List.map (fun (gutter, line) -> + let new_content = + if String.length line <= leading_space_to_cut then + [{s = ""; start = 0; end_ = 0}] + else + (String.sub [@doesNotRaise]) line leading_space_to_cut + (String.length line - leading_space_to_cut) + |> break_long_line line_width + |> List.mapi (fun i line -> + match gutter with + | Elided -> {s = line; start = 0; end_ = 0} + | Number line_number -> + let highlight_line_start_offset = + startPos.pos_cnum - startPos.pos_bol + in + let highlight_line_end_offset = + endPos.pos_cnum - endPos.pos_bol + in + let start = + if i = 0 && line_number = highlight_line_start_line + then + highlight_line_start_offset - leading_space_to_cut + else 0 + in + let end_ = + if line_number < highlight_line_start_line then 0 + else if + line_number = highlight_line_start_line + && line_number = highlight_line_end_line + then + highlight_line_end_offset - leading_space_to_cut + else if line_number = highlight_line_start_line then + String.length line + else if + line_number > highlight_line_start_line + && line_number < highlight_line_end_line + then String.length line + else if line_number = highlight_line_end_line then + highlight_line_end_offset - leading_space_to_cut + else 0 + in + {s = line; start; end_}) + in + {gutter; content = new_content}) + in + let buf = Buffer.create 100 in + let open Color in + let add_ch = + let last_color = ref NoColor in + fun color ch -> + if (not !Color.color_enabled) || !last_color = color then + Buffer.add_char buf ch + else + let ansi = + match (!last_color, color) with + | NoColor, Dim -> dim + (* | NoColor, Filename -> filename *) + | NoColor, Err -> err + | NoColor, Warn -> warn + | _, NoColor -> reset + | _, Dim -> reset ^ dim + (* | _, Filename -> reset ^ filename *) + | _, Err -> reset ^ err + | _, Warn -> reset ^ warn + in + Buffer.add_string buf ansi; + Buffer.add_char buf ch; + last_color := color + in + let draw_gutter color s = + for _i = 1 to max_line_digits_count + indent - String.length s do + add_ch NoColor ' ' + done; + s |> String.iter (add_ch color); + add_ch NoColor ' '; + separator |> String.iter (add_ch Dim); + add_ch NoColor ' ' + in + stripped_lines + |> List.iter (fun {gutter; content} -> + match gutter with + | Elided -> + draw_gutter Dim "."; + add_ch Dim '.'; + add_ch Dim '.'; + add_ch Dim '.'; + add_ch NoColor '\n' + | Number line_number -> + content + |> List.iteri (fun i line -> + let gutter_content = + if i = 0 then string_of_int line_number else "" + in + let gutter_color = + if + i = 0 + && line_number >= highlight_line_start_line + && line_number <= highlight_line_end_line + then if is_warning then Warn else Err + else NoColor + in + draw_gutter gutter_color gutter_content; + + line.s + |> String.iteri (fun ii ch -> + let c = + if ii >= line.start && ii < line.end_ then + if is_warning then Warn else Err + else NoColor + in + add_ch c ch); + add_ch NoColor '\n')); + Buffer.contents buf +end + +(* ===== super_location.ml *) +module Super_location = struct + let fprintf = Format.fprintf + + let setup_colors () = + Misc.Color.setup !Clflags.color; + Super_code_frame.setup !Clflags.color + + let print_filename = Location.print_filename + + let print_loc ~normalizedRange ppf (loc : Location.t) = + setup_colors (); + let dim_loc ppf = function + | None -> () + | Some ((start_line, start_line_start_char), (end_line, end_line_end_char)) + -> + if start_line = end_line then + if start_line_start_char = end_line_end_char then + fprintf ppf ":@{%i:%i@}" start_line start_line_start_char + else + fprintf ppf ":@{%i:%i-%i@}" start_line start_line_start_char + end_line_end_char + else + fprintf ppf ":@{%i:%i-%i:%i@}" start_line start_line_start_char + end_line end_line_end_char + in + fprintf ppf "@{%a@}%a" print_filename loc.loc_start.pos_fname + dim_loc normalizedRange + + (* let print ~message_kind intro ppf (loc : Location.t) = *) + let print ~message_kind intro src ppf (loc : Location.t) = + (match message_kind with + | `warning -> fprintf ppf "@[@{%s@}@]@," intro + | `warning_as_error -> + fprintf ppf "@[@{%s@} (configured as error) @]@," intro + | `error -> fprintf ppf "@[@{%s@}@]@," intro); + (* ocaml's reported line/col numbering is horrible and super error-prone + when being handled programmatically (or humanly for that matter. If you're + an ocaml contributor reading this: who the heck reads the character count + starting from the first erroring character?) *) + (* let (file, start_line, start_char) = Location.get_pos_info loc.loc_start in *) + let _file, start_line, start_char = Location.get_pos_info loc.loc_start in + let _, end_line, end_char = Location.get_pos_info loc.loc_end in + (* line is 1-indexed, column is 0-indexed. We convert all of them to 1-indexed to avoid confusion *) + (* start_char is inclusive, end_char is exclusive *) + let normalizedRange = + (* TODO: lots of the handlings here aren't needed anymore because the new + rescript syntax has much stronger invariants regarding positions, e.g. + no -1 *) + if start_char == -1 || end_char == -1 then + (* happens sometimes. Syntax error for example *) + None + else if start_line = end_line && start_char >= end_char then + (* in some errors, starting char and ending char can be the same. But + since ending char was supposed to be exclusive, here it might end up + smaller than the starting char if we naively did start_char + 1 to + just the starting char and forget ending char *) + let same_char = start_char + 1 in + Some ((start_line, same_char), (end_line, same_char)) + else + (* again: end_char is exclusive, so +1-1=0 *) + Some ((start_line, start_char + 1), (end_line, end_char)) + in + fprintf ppf " @[%a@]@," (print_loc ~normalizedRange) loc; + match normalizedRange with + | None -> () + | Some _ -> ( + try + (* let src = Ext_io.load_file file in *) + (* we're putting the line break `@,` here rather than above, because this + branch might not be reached (aka no inline file content display) so + we don't wanna end up with two line breaks in the the consequent *) + fprintf ppf "@,%s" + (Super_code_frame.print ~is_warning:(message_kind = `warning) ~src + ~startPos:loc.loc_start ~endPos:loc.loc_end) + with + (* this might happen if the file is e.g. "", "_none_" or any of the fake file name placeholders. + we've already printed the location above, so nothing more to do here. *) + | Sys_error _ -> + ()) + + (* taken from https://github.com/rescript-lang/ocaml/blob/d4144647d1bf9bc7dc3aadc24c25a7efa3a67915/parsing/location.ml#L380 *) + (* This is the error report entry point. We'll replace the default reporter with this one. *) + (* let rec super_error_reporter ppf ({loc; msg; sub} : Location.error) = *) + let super_error_reporter ppf src ({loc; msg} : Location.error) = + setup_colors (); + (* open a vertical box. Everything in our message is indented 2 spaces *) + (* Format.fprintf ppf "@[@, %a@, %s@,@]" (print ~message_kind:`error "We've found a bug for you!") src loc msg; *) + Format.fprintf ppf "@[@, %a@, %s@,@]" + (print ~message_kind:`error "Syntax error!" src) + loc msg + (* List.iter (Format.fprintf ppf "@,@[%a@]" super_error_reporter) sub *) + (* no need to flush here; location's report_exception (which uses this ultimately) flushes *) +end diff --git a/res_syntax/src/res_doc.ml b/res_syntax/src/res_doc.ml new file mode 100644 index 0000000000..125ac77254 --- /dev/null +++ b/res_syntax/src/res_doc.ml @@ -0,0 +1,350 @@ +module MiniBuffer = Res_minibuffer + +type mode = Break | Flat + +type lineStyle = + | Classic (* fits? -> replace with space *) + | Soft (* fits? -> replaced with nothing *) + | Hard + (* always included, forces breaks in parents *) + (* always included, forces breaks in parents, but doesn't increase indentation + use case: template literals, multiline string content *) + | Literal + +type t = + | Nil + | Text of string + | Concat of t list + | Indent of t + | IfBreaks of {yes: t; no: t; mutable broken: bool} + (* when broken is true, treat as the yes branch *) + | LineSuffix of t + | LineBreak of lineStyle + | Group of {mutable shouldBreak: bool; doc: t} + | CustomLayout of t list + | BreakParent + +let nil = Nil +let line = LineBreak Classic +let hardLine = LineBreak Hard +let softLine = LineBreak Soft +let literalLine = LineBreak Literal +let text s = Text s + +(* Optimization. We eagerly collapse and reduce whatever allocation we can *) +let rec _concat acc l = + match l with + | Text s1 :: Text s2 :: rest -> Text (s1 ^ s2) :: _concat acc rest + | Nil :: rest -> _concat acc rest + | Concat l2 :: rest -> + _concat (_concat acc rest) l2 (* notice the order here *) + | x :: rest -> + let rest1 = _concat acc rest in + if rest1 == rest then l else x :: rest1 + | [] -> acc + +let concat l = Concat (_concat [] l) + +let indent d = Indent d +let ifBreaks t f = IfBreaks {yes = t; no = f; broken = false} +let lineSuffix d = LineSuffix d +let group d = Group {shouldBreak = false; doc = d} +let breakableGroup ~forceBreak d = Group {shouldBreak = forceBreak; doc = d} +let customLayout gs = CustomLayout gs +let breakParent = BreakParent + +let space = Text " " +let comma = Text "," +let dot = Text "." +let dotdot = Text ".." +let dotdotdot = Text "..." +let lessThan = Text "<" +let greaterThan = Text ">" +let lbrace = Text "{" +let rbrace = Text "}" +let lparen = Text "(" +let rparen = Text ")" +let lbracket = Text "[" +let rbracket = Text "]" +let question = Text "?" +let tilde = Text "~" +let equal = Text "=" +let trailingComma = ifBreaks comma nil +let doubleQuote = Text "\"" + +let propagateForcedBreaks doc = + let rec walk doc = + match doc with + | Text _ | Nil | LineSuffix _ -> false + | BreakParent -> true + | LineBreak (Hard | Literal) -> true + | LineBreak (Classic | Soft) -> false + | Indent children -> + let childForcesBreak = walk children in + childForcesBreak + | IfBreaks ({yes = trueDoc; no = falseDoc} as ib) -> + let falseForceBreak = walk falseDoc in + if falseForceBreak then ( + let _ = walk trueDoc in + ib.broken <- true; + true) + else + let forceBreak = walk trueDoc in + forceBreak + | Group ({shouldBreak = forceBreak; doc = children} as gr) -> + let childForcesBreak = walk children in + let shouldBreak = forceBreak || childForcesBreak in + gr.shouldBreak <- shouldBreak; + shouldBreak + | Concat children -> + List.fold_left + (fun forceBreak child -> + let childForcesBreak = walk child in + forceBreak || childForcesBreak) + false children + | CustomLayout children -> + (* When using CustomLayout, we don't want to propagate forced breaks + * from the children up. By definition it picks the first layout that fits + * otherwise it takes the last of the list. + * However we do want to propagate forced breaks in the sublayouts. They + * might need to be broken. We just don't propagate them any higher here *) + let _ = walk (Concat children) in + false + in + let _ = walk doc in + () + +(* See documentation in interface file *) +let rec willBreak doc = + match doc with + | LineBreak (Hard | Literal) | BreakParent | Group {shouldBreak = true} -> + true + | Group {doc} | Indent doc | CustomLayout (doc :: _) -> willBreak doc + | Concat docs -> List.exists willBreak docs + | IfBreaks {yes; no} -> willBreak yes || willBreak no + | _ -> false + +let join ~sep docs = + let rec loop acc sep docs = + match docs with + | [] -> List.rev acc + | [x] -> List.rev (x :: acc) + | x :: xs -> loop (sep :: x :: acc) sep xs + in + concat (loop [] sep docs) + +let joinWithSep docsWithSep = + let rec loop acc docs = + match docs with + | [] -> List.rev acc + | [(x, _sep)] -> List.rev (x :: acc) + | (x, sep) :: xs -> loop (sep :: x :: acc) xs + in + concat (loop [] docsWithSep) + +let fits w stack = + let width = ref w in + let result = ref None in + + let rec calculate indent mode doc = + match (mode, doc) with + | _ when result.contents != None -> () + | _ when width.contents < 0 -> result := Some false + | _, Nil | _, LineSuffix _ | _, BreakParent -> () + | _, Text txt -> width := width.contents - String.length txt + | _, Indent doc -> calculate (indent + 2) mode doc + | Flat, LineBreak Hard | Flat, LineBreak Literal -> result := Some true + | Flat, LineBreak Classic -> width := width.contents - 1 + | Flat, LineBreak Soft -> () + | Break, LineBreak _ -> result := Some true + | _, Group {shouldBreak = true; doc} -> calculate indent Break doc + | _, Group {doc} -> calculate indent mode doc + | _, IfBreaks {yes = breakDoc; broken = true} -> + calculate indent mode breakDoc + | Break, IfBreaks {yes = breakDoc} -> calculate indent mode breakDoc + | Flat, IfBreaks {no = flatDoc} -> calculate indent mode flatDoc + | _, Concat docs -> calculateConcat indent mode docs + | _, CustomLayout (hd :: _) -> + (* TODO: if we have nested custom layouts, what we should do here? *) + calculate indent mode hd + | _, CustomLayout [] -> () + and calculateConcat indent mode docs = + if result.contents == None then + match docs with + | [] -> () + | doc :: rest -> + calculate indent mode doc; + calculateConcat indent mode rest + in + let rec calculateAll stack = + match (result.contents, stack) with + | Some r, _ -> r + | None, [] -> !width >= 0 + | None, (indent, mode, doc) :: rest -> + calculate indent mode doc; + calculateAll rest + in + calculateAll stack + +let toString ~width doc = + propagateForcedBreaks doc; + let buffer = MiniBuffer.create 1000 in + + let rec process ~pos lineSuffices stack = + match stack with + | ((ind, mode, doc) as cmd) :: rest -> ( + match doc with + | Nil | BreakParent -> process ~pos lineSuffices rest + | Text txt -> + MiniBuffer.add_string buffer txt; + process ~pos:(String.length txt + pos) lineSuffices rest + | LineSuffix doc -> process ~pos ((ind, mode, doc) :: lineSuffices) rest + | Concat docs -> + let ops = List.map (fun doc -> (ind, mode, doc)) docs in + process ~pos lineSuffices (List.append ops rest) + | Indent doc -> process ~pos lineSuffices ((ind + 2, mode, doc) :: rest) + | IfBreaks {yes = breakDoc; broken = true} -> + process ~pos lineSuffices ((ind, mode, breakDoc) :: rest) + | IfBreaks {yes = breakDoc; no = flatDoc} -> + if mode = Break then + process ~pos lineSuffices ((ind, mode, breakDoc) :: rest) + else process ~pos lineSuffices ((ind, mode, flatDoc) :: rest) + | LineBreak lineStyle -> + if mode = Break then + match lineSuffices with + | [] -> + if lineStyle = Literal then ( + MiniBuffer.add_char buffer '\n'; + process ~pos:0 [] rest) + else ( + MiniBuffer.flush_newline buffer; + MiniBuffer.add_string buffer (String.make ind ' ' [@doesNotRaise]); + process ~pos:ind [] rest) + | _docs -> + process ~pos:ind [] + (List.concat [List.rev lineSuffices; cmd :: rest]) + else + (* mode = Flat *) + let pos = + match lineStyle with + | Classic -> + MiniBuffer.add_string buffer " "; + pos + 1 + | Hard -> + MiniBuffer.flush_newline buffer; + 0 + | Literal -> + MiniBuffer.add_char buffer '\n'; + 0 + | Soft -> pos + in + process ~pos lineSuffices rest + | Group {shouldBreak; doc} -> + if shouldBreak || not (fits (width - pos) ((ind, Flat, doc) :: rest)) + then process ~pos lineSuffices ((ind, Break, doc) :: rest) + else process ~pos lineSuffices ((ind, Flat, doc) :: rest) + | CustomLayout docs -> + let rec findGroupThatFits groups = + match groups with + | [] -> Nil + | [lastGroup] -> lastGroup + | doc :: docs -> + if fits (width - pos) ((ind, Flat, doc) :: rest) then doc + else findGroupThatFits docs + in + let doc = findGroupThatFits docs in + process ~pos lineSuffices ((ind, Flat, doc) :: rest)) + | [] -> ( + match lineSuffices with + | [] -> () + | suffices -> process ~pos:0 [] (List.rev suffices)) + in + process ~pos:0 [] [(0, Flat, doc)]; + MiniBuffer.contents buffer + +let debug t = + let rec toDoc = function + | Nil -> text "nil" + | BreakParent -> text "breakparent" + | Text txt -> text ("text(\"" ^ txt ^ "\")") + | LineSuffix doc -> + group + (concat + [ + text "linesuffix("; + indent (concat [line; toDoc doc]); + line; + text ")"; + ]) + | Concat [] -> text "concat()" + | Concat docs -> + group + (concat + [ + text "concat("; + indent + (concat + [ + line; + join ~sep:(concat [text ","; line]) (List.map toDoc docs); + ]); + line; + text ")"; + ]) + | CustomLayout docs -> + group + (concat + [ + text "customLayout("; + indent + (concat + [ + line; + join ~sep:(concat [text ","; line]) (List.map toDoc docs); + ]); + line; + text ")"; + ]) + | Indent doc -> + concat [text "indent("; softLine; toDoc doc; softLine; text ")"] + | IfBreaks {yes = trueDoc; broken = true} -> toDoc trueDoc + | IfBreaks {yes = trueDoc; no = falseDoc} -> + group + (concat + [ + text "ifBreaks("; + indent + (concat + [line; toDoc trueDoc; concat [text ","; line]; toDoc falseDoc]); + line; + text ")"; + ]) + | LineBreak break -> + let breakTxt = + match break with + | Classic -> "Classic" + | Soft -> "Soft" + | Hard -> "Hard" + | Literal -> "Liteal" + in + text ("LineBreak(" ^ breakTxt ^ ")") + | Group {shouldBreak; doc} -> + group + (concat + [ + text "Group("; + indent + (concat + [ + line; + text ("{shouldBreak: " ^ string_of_bool shouldBreak ^ "}"); + concat [text ","; line]; + toDoc doc; + ]); + line; + text ")"; + ]) + in + let doc = toDoc t in + toString ~width:10 doc |> print_endline + [@@live] diff --git a/res_syntax/src/res_doc.mli b/res_syntax/src/res_doc.mli new file mode 100644 index 0000000000..f1a0c6ea6a --- /dev/null +++ b/res_syntax/src/res_doc.mli @@ -0,0 +1,67 @@ +type t + +val nil : t +val line : t +val hardLine : t +val softLine : t +val literalLine : t +val text : string -> t +val concat : t list -> t +val indent : t -> t +val ifBreaks : t -> t -> t +val lineSuffix : t -> t +val group : t -> t +val breakableGroup : forceBreak:bool -> t -> t + +(* `customLayout docs` will pick the layout that fits from `docs`. + * This is a very expensive computation as every layout from the list + * will be checked until one fits. *) +val customLayout : t list -> t +val breakParent : t +val join : sep:t -> t list -> t + +(* [(doc1, sep1); (doc2,sep2)] joins as doc1 sep1 doc2 *) +val joinWithSep : (t * t) list -> t + +val space : t +val comma : t +val dot : t +val dotdot : t +val dotdotdot : t +val lessThan : t +val greaterThan : t +val lbrace : t +val rbrace : t +val lparen : t +val rparen : t +val lbracket : t +val rbracket : t +val question : t +val tilde : t +val equal : t +val trailingComma : t +val doubleQuote : t [@@live] + +(* + * `willBreak doc` checks whether `doc` contains forced line breaks. + * This is more or less a "workaround" to make the parent of a `customLayout` break. + * Forced breaks are not propagated through `customLayout`; otherwise we would always + * get the last layout the algorithm tries… + * This might result into some weird layouts: + * [fn(x => { + * let _ = x + * }), fn(y => { + * let _ = y + * }), fn(z => { + * let _ = z + * })] + * The `[` and `]` would be a lot better broken out. + * Although the layout of `fn(x => {...})` is correct, we need to break its parent (the array). + * `willBreak` can be used in this scenario to check if the `fn…` contains any forced breaks. + * The consumer can then manually insert a `breakParent` doc, to manually propagate the + * force breaks from bottom to top. + *) +val willBreak : t -> bool + +val toString : width:int -> t -> string +val debug : t -> unit [@@live] diff --git a/res_syntax/src/res_driver.ml b/res_syntax/src/res_driver.ml new file mode 100644 index 0000000000..c7c722d9c7 --- /dev/null +++ b/res_syntax/src/res_driver.ml @@ -0,0 +1,154 @@ +module IO = Res_io + +type ('ast, 'diagnostics) parseResult = { + filename: string; [@live] + source: string; + parsetree: 'ast; + diagnostics: 'diagnostics; + invalid: bool; + comments: Res_comment.t list; +} + +type 'diagnostics parsingEngine = { + parseImplementation: + forPrinter:bool -> + filename:string -> + (Parsetree.structure, 'diagnostics) parseResult; + parseInterface: + forPrinter:bool -> + filename:string -> + (Parsetree.signature, 'diagnostics) parseResult; + stringOfDiagnostics: source:string -> filename:string -> 'diagnostics -> unit; +} + +type printEngine = { + printImplementation: + width:int -> + filename:string -> + comments:Res_comment.t list -> + Parsetree.structure -> + unit; + printInterface: + width:int -> + filename:string -> + comments:Res_comment.t list -> + Parsetree.signature -> + unit; +} + +let setup ~filename ~forPrinter () = + let src = IO.readFile ~filename in + let mode = if forPrinter then Res_parser.Default else ParseForTypeChecker in + Res_parser.make ~mode src filename + +let setupFromSource ~displayFilename ~source ~forPrinter () = + let mode = if forPrinter then Res_parser.Default else ParseForTypeChecker in + Res_parser.make ~mode source displayFilename + +let parsingEngine = + { + parseImplementation = + (fun ~forPrinter ~filename -> + let engine = setup ~filename ~forPrinter () in + let structure = Res_core.parseImplementation engine in + let invalid, diagnostics = + match engine.diagnostics with + | [] as diagnostics -> (false, diagnostics) + | _ as diagnostics -> (true, diagnostics) + in + { + filename = engine.scanner.filename; + source = engine.scanner.src; + parsetree = structure; + diagnostics; + invalid; + comments = List.rev engine.comments; + }); + parseInterface = + (fun ~forPrinter ~filename -> + let engine = setup ~filename ~forPrinter () in + let signature = Res_core.parseSpecification engine in + let invalid, diagnostics = + match engine.diagnostics with + | [] as diagnostics -> (false, diagnostics) + | _ as diagnostics -> (true, diagnostics) + in + { + filename = engine.scanner.filename; + source = engine.scanner.src; + parsetree = signature; + diagnostics; + invalid; + comments = List.rev engine.comments; + }); + stringOfDiagnostics = + (fun ~source ~filename:_ diagnostics -> + Res_diagnostics.printReport diagnostics source); + } + +let parseImplementationFromSource ~forPrinter ~displayFilename ~source = + let engine = setupFromSource ~displayFilename ~source ~forPrinter () in + let structure = Res_core.parseImplementation engine in + let invalid, diagnostics = + match engine.diagnostics with + | [] as diagnostics -> (false, diagnostics) + | _ as diagnostics -> (true, diagnostics) + in + { + filename = engine.scanner.filename; + source = engine.scanner.src; + parsetree = structure; + diagnostics; + invalid; + comments = List.rev engine.comments; + } + +let parseInterfaceFromSource ~forPrinter ~displayFilename ~source = + let engine = setupFromSource ~displayFilename ~source ~forPrinter () in + let signature = Res_core.parseSpecification engine in + let invalid, diagnostics = + match engine.diagnostics with + | [] as diagnostics -> (false, diagnostics) + | _ as diagnostics -> (true, diagnostics) + in + { + filename = engine.scanner.filename; + source = engine.scanner.src; + parsetree = signature; + diagnostics; + invalid; + comments = List.rev engine.comments; + } + +let printEngine = + { + printImplementation = + (fun ~width ~filename:_ ~comments structure -> + print_string + (Res_printer.printImplementation ~width structure ~comments)); + printInterface = + (fun ~width ~filename:_ ~comments signature -> + print_string (Res_printer.printInterface ~width signature ~comments)); + } + +let parse_implementation sourcefile = + Location.input_name := sourcefile; + let parseResult = + parsingEngine.parseImplementation ~forPrinter:false ~filename:sourcefile + in + if parseResult.invalid then ( + Res_diagnostics.printReport parseResult.diagnostics parseResult.source; + exit 1); + parseResult.parsetree + [@@raises exit] + +let parse_interface sourcefile = + Location.input_name := sourcefile; + let parseResult = + parsingEngine.parseInterface ~forPrinter:false ~filename:sourcefile + in + if parseResult.invalid then ( + Res_diagnostics.printReport parseResult.diagnostics parseResult.source; + exit 1); + parseResult.parsetree + [@@raises exit] diff --git a/res_syntax/src/res_driver.mli b/res_syntax/src/res_driver.mli new file mode 100644 index 0000000000..fe44722a62 --- /dev/null +++ b/res_syntax/src/res_driver.mli @@ -0,0 +1,61 @@ +type ('ast, 'diagnostics) parseResult = { + filename: string; [@live] + source: string; + parsetree: 'ast; + diagnostics: 'diagnostics; + invalid: bool; + comments: Res_comment.t list; +} + +type 'diagnostics parsingEngine = { + parseImplementation: + forPrinter:bool -> + filename:string -> + (Parsetree.structure, 'diagnostics) parseResult; + parseInterface: + forPrinter:bool -> + filename:string -> + (Parsetree.signature, 'diagnostics) parseResult; + stringOfDiagnostics: source:string -> filename:string -> 'diagnostics -> unit; +} + +val parseImplementationFromSource : + forPrinter:bool -> + displayFilename:string -> + source:string -> + (Parsetree.structure, Res_diagnostics.t list) parseResult + [@@live] + +val parseInterfaceFromSource : + forPrinter:bool -> + displayFilename:string -> + source:string -> + (Parsetree.signature, Res_diagnostics.t list) parseResult + [@@live] + +type printEngine = { + printImplementation: + width:int -> + filename:string -> + comments:Res_comment.t list -> + Parsetree.structure -> + unit; + printInterface: + width:int -> + filename:string -> + comments:Res_comment.t list -> + Parsetree.signature -> + unit; +} + +val parsingEngine : Res_diagnostics.t list parsingEngine + +val printEngine : printEngine + +(* ReScript implementation parsing compatible with ocaml pparse driver. Used by the compiler. *) +val parse_implementation : string -> Parsetree.structure + [@@live] [@@raises Location.Error] + +(* ReScript interface parsing compatible with ocaml pparse driver. Used by the compiler *) +val parse_interface : string -> Parsetree.signature + [@@live] [@@raises Location.Error] diff --git a/res_syntax/src/res_driver_binary.ml b/res_syntax/src/res_driver_binary.ml new file mode 100644 index 0000000000..58a8153630 --- /dev/null +++ b/res_syntax/src/res_driver_binary.ml @@ -0,0 +1,14 @@ +let printEngine = + Res_driver. + { + printImplementation = + (fun ~width:_ ~filename ~comments:_ structure -> + output_string stdout Config.ast_impl_magic_number; + output_value stdout filename; + output_value stdout structure); + printInterface = + (fun ~width:_ ~filename ~comments:_ signature -> + output_string stdout Config.ast_intf_magic_number; + output_value stdout filename; + output_value stdout signature); + } diff --git a/res_syntax/src/res_driver_binary.mli b/res_syntax/src/res_driver_binary.mli new file mode 100644 index 0000000000..7991ba8db3 --- /dev/null +++ b/res_syntax/src/res_driver_binary.mli @@ -0,0 +1 @@ +val printEngine : Res_driver.printEngine diff --git a/res_syntax/src/res_driver_ml_parser.ml b/res_syntax/src/res_driver_ml_parser.ml new file mode 100644 index 0000000000..0d6a99e9ae --- /dev/null +++ b/res_syntax/src/res_driver_ml_parser.ml @@ -0,0 +1,100 @@ +module OcamlParser = Parser +module IO = Res_io + +let setup ~filename = + if String.length filename > 0 then ( + Location.input_name := filename; + IO.readFile ~filename |> Lexing.from_string) + else Lexing.from_channel stdin + +let extractOcamlConcreteSyntax filename = + let lexbuf = + if String.length filename > 0 then + IO.readFile ~filename |> Lexing.from_string + else Lexing.from_channel stdin + in + let stringLocs = ref [] in + let commentData = ref [] in + let rec next (prevTokEndPos : Lexing.position) () = + let token = Lexer.token_with_comments lexbuf in + match token with + | OcamlParser.COMMENT (txt, loc) -> + let comment = Res_comment.fromOcamlComment ~loc ~prevTokEndPos ~txt in + commentData := comment :: !commentData; + next loc.Location.loc_end () + | OcamlParser.STRING (_txt, None) -> + let open Location in + let loc = + { + loc_start = lexbuf.lex_start_p; + loc_end = lexbuf.Lexing.lex_curr_p; + loc_ghost = false; + } + in + let len = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum in + let txt = + Bytes.to_string + ((Bytes.sub [@doesNotRaise]) lexbuf.Lexing.lex_buffer + loc.loc_start.pos_cnum len) + in + stringLocs := (txt, loc) :: !stringLocs; + next lexbuf.Lexing.lex_curr_p () + | OcamlParser.EOF -> () + | _ -> next lexbuf.Lexing.lex_curr_p () + in + next lexbuf.Lexing.lex_start_p (); + (List.rev !stringLocs, List.rev !commentData) + +let parsingEngine = + { + Res_driver.parseImplementation = + (fun ~forPrinter:_ ~filename -> + let lexbuf = setup ~filename in + let stringData, comments = + extractOcamlConcreteSyntax !Location.input_name + in + let structure = + Parse.implementation lexbuf + |> Res_ast_conversion.replaceStringLiteralStructure stringData + |> Res_ast_conversion.structure + in + { + filename = !Location.input_name; + source = Bytes.to_string lexbuf.lex_buffer; + parsetree = structure; + diagnostics = (); + invalid = false; + comments; + }); + parseInterface = + (fun ~forPrinter:_ ~filename -> + let lexbuf = setup ~filename in + let stringData, comments = + extractOcamlConcreteSyntax !Location.input_name + in + let signature = + Parse.interface lexbuf + |> Res_ast_conversion.replaceStringLiteralSignature stringData + |> Res_ast_conversion.signature + in + { + filename = !Location.input_name; + source = Bytes.to_string lexbuf.lex_buffer; + parsetree = signature; + diagnostics = (); + invalid = false; + comments; + }); + stringOfDiagnostics = (fun ~source:_ ~filename:_ _diagnostics -> ()); + } + +let printEngine = + Res_driver. + { + printImplementation = + (fun ~width:_ ~filename:_ ~comments:_ structure -> + Pprintast.structure Format.std_formatter structure); + printInterface = + (fun ~width:_ ~filename:_ ~comments:_ signature -> + Pprintast.signature Format.std_formatter signature); + } diff --git a/res_syntax/src/res_driver_ml_parser.mli b/res_syntax/src/res_driver_ml_parser.mli new file mode 100644 index 0000000000..63ea8f81f4 --- /dev/null +++ b/res_syntax/src/res_driver_ml_parser.mli @@ -0,0 +1,10 @@ +(* This module represents a general interface to parse marshalled reason ast *) + +(* extracts comments and the original string data from an ocaml file *) +val extractOcamlConcreteSyntax : + string -> (string * Location.t) list * Res_comment.t list + [@@live] + +val parsingEngine : unit Res_driver.parsingEngine + +val printEngine : Res_driver.printEngine diff --git a/res_syntax/src/res_driver_reason_binary.ml b/res_syntax/src/res_driver_reason_binary.ml new file mode 100644 index 0000000000..0882a39bc1 --- /dev/null +++ b/res_syntax/src/res_driver_reason_binary.ml @@ -0,0 +1,119 @@ +module IO = Res_io + +let isReasonDocComment (comment : Res_comment.t) = + let content = Res_comment.txt comment in + let len = String.length content in + if len = 0 then true + else if + len >= 2 + && String.unsafe_get content 0 = '*' + && String.unsafe_get content 1 = '*' + then false + else if len >= 1 && String.unsafe_get content 0 = '*' then true + else false + +let extractConcreteSyntax filename = + let commentData = ref [] in + let stringData = ref [] in + let src = IO.readFile ~filename in + let scanner = Res_scanner.make src ~filename in + + let rec next prevEndPos scanner = + let startPos, endPos, token = Res_scanner.scan scanner in + match token with + | Eof -> () + | Comment c -> + Res_comment.setPrevTokEndPos c prevEndPos; + commentData := c :: !commentData; + next endPos scanner + | String _ -> + let loc = + {Location.loc_start = startPos; loc_end = endPos; loc_ghost = false} + in + let len = endPos.pos_cnum - startPos.pos_cnum in + let txt = (String.sub [@doesNotRaise]) src startPos.pos_cnum len in + stringData := (txt, loc) :: !stringData; + next endPos scanner + | Lbrace -> + (* handle {| |} or {sql||sql} quoted strings. We don't care about its contents. + Why? // abcdef inside the quoted string would otherwise be picked up as an extra comment *) + Res_scanner.tryAdvanceQuotedString scanner; + next endPos scanner + | _ -> next endPos scanner + in + next Lexing.dummy_pos scanner; + let comments = + !commentData + |> List.filter (fun c -> not (isReasonDocComment c)) + |> List.rev + in + (comments, !stringData) + +let parsingEngine = + { + Res_driver.parseImplementation = + (fun ~forPrinter:_ ~filename -> + let chan, close = + if String.length filename == 0 then (stdin, fun _ -> ()) + else + let file_chan = open_in_bin filename in + let () = seek_in file_chan 0 in + (file_chan, close_in_noerr) + in + let magic = Config.ast_impl_magic_number in + ignore + ((really_input_string [@doesNotRaise]) chan (String.length magic)); + let filename = input_value chan in + let comments, stringData = + if filename <> "" then extractConcreteSyntax filename else ([], []) + in + let ast = input_value chan in + close chan; + let structure = + ast + |> Res_ast_conversion.replaceStringLiteralStructure stringData + |> Res_ast_conversion.normalizeReasonArityStructure ~forPrinter:true + |> Res_ast_conversion.structure + in + { + Res_driver.filename; + source = ""; + parsetree = structure; + diagnostics = (); + invalid = false; + comments; + }); + parseInterface = + (fun ~forPrinter:_ ~filename -> + let chan, close = + if String.length filename == 0 then (stdin, fun _ -> ()) + else + let file_chan = open_in_bin filename in + let () = seek_in file_chan 0 in + (file_chan, close_in_noerr) + in + let magic = Config.ast_intf_magic_number in + ignore + ((really_input_string [@doesNotRaise]) chan (String.length magic)); + let filename = input_value chan in + let comments, stringData = + if filename <> "" then extractConcreteSyntax filename else ([], []) + in + let ast = input_value chan in + close chan; + let signature = + ast + |> Res_ast_conversion.replaceStringLiteralSignature stringData + |> Res_ast_conversion.normalizeReasonAritySignature ~forPrinter:true + |> Res_ast_conversion.signature + in + { + Res_driver.filename; + source = ""; + parsetree = signature; + diagnostics = (); + invalid = false; + comments; + }); + stringOfDiagnostics = (fun ~source:_ ~filename:_ _diagnostics -> ()); + } diff --git a/res_syntax/src/res_driver_reason_binary.mli b/res_syntax/src/res_driver_reason_binary.mli new file mode 100644 index 0000000000..bccfc4c195 --- /dev/null +++ b/res_syntax/src/res_driver_reason_binary.mli @@ -0,0 +1,3 @@ +(* This module represents a general interface to parse marshalled reason ast *) + +val parsingEngine : unit Res_driver.parsingEngine diff --git a/res_syntax/src/res_grammar.ml b/res_syntax/src/res_grammar.ml new file mode 100644 index 0000000000..cba9b4bdee --- /dev/null +++ b/res_syntax/src/res_grammar.ml @@ -0,0 +1,325 @@ +module Token = Res_token + +type t = + | OpenDescription (* open Belt *) + | ModuleLongIdent (* Foo or Foo.Bar *) [@live] + | Ternary (* condExpr ? trueExpr : falseExpr *) + | Es6ArrowExpr + | Jsx + | JsxAttribute + | JsxChild [@live] + | ExprOperand + | ExprUnary + | ExprSetField + | ExprBinaryAfterOp of Token.t + | ExprBlock + | ExprCall + | ExprList + | ExprArrayAccess + | ExprArrayMutation + | ExprIf + | ExprFor + | IfCondition + | IfBranch + | ElseBranch + | TypeExpression + | External + | PatternMatching + | PatternMatchCase + | LetBinding + | PatternList + | PatternOcamlList + | PatternRecord + | TypeDef + | TypeConstrName + | TypeParams + | TypeParam [@live] + | PackageConstraint + | TypeRepresentation + | RecordDecl + | ConstructorDeclaration + | ParameterList + | StringFieldDeclarations + | FieldDeclarations + | TypExprList + | FunctorArgs + | ModExprList + | TypeParameters + | RecordRows + | RecordRowsStringKey + | ArgumentList + | Signature + | Specification + | Structure + | Implementation + | Attribute + | TypeConstraint + | AtomicTypExpr + | ListExpr + | Pattern + | AttributePayload + | TagNames + +let toString = function + | OpenDescription -> "an open description" + | ModuleLongIdent -> "a module path" + | Ternary -> "a ternary expression" + | Es6ArrowExpr -> "an es6 arrow function" + | Jsx -> "a jsx expression" + | JsxAttribute -> "a jsx attribute" + | ExprOperand -> "a basic expression" + | ExprUnary -> "a unary expression" + | ExprBinaryAfterOp op -> + "an expression after the operator \"" ^ Token.toString op ^ "\"" + | ExprIf -> "an if expression" + | IfCondition -> "the condition of an if expression" + | IfBranch -> "the true-branch of an if expression" + | ElseBranch -> "the else-branch of an if expression" + | TypeExpression -> "a type" + | External -> "an external" + | PatternMatching -> "the cases of a pattern match" + | ExprBlock -> "a block with expressions" + | ExprSetField -> "a record field mutation" + | ExprCall -> "a function application" + | ExprArrayAccess -> "an array access expression" + | ExprArrayMutation -> "an array mutation" + | LetBinding -> "a let binding" + | TypeDef -> "a type definition" + | TypeParams -> "type parameters" + | TypeParam -> "a type parameter" + | TypeConstrName -> "a type-constructor name" + | TypeRepresentation -> "a type representation" + | RecordDecl -> "a record declaration" + | PatternMatchCase -> "a pattern match case" + | ConstructorDeclaration -> "a constructor declaration" + | ExprList -> "multiple expressions" + | PatternList -> "multiple patterns" + | PatternOcamlList -> "a list pattern" + | PatternRecord -> "a record pattern" + | ParameterList -> "parameters" + | StringFieldDeclarations -> "string field declarations" + | FieldDeclarations -> "field declarations" + | TypExprList -> "list of types" + | FunctorArgs -> "functor arguments" + | ModExprList -> "list of module expressions" + | TypeParameters -> "list of type parameters" + | RecordRows -> "rows of a record" + | RecordRowsStringKey -> "rows of a record with string keys" + | ArgumentList -> "arguments" + | Signature -> "signature" + | Specification -> "specification" + | Structure -> "structure" + | Implementation -> "implementation" + | Attribute -> "an attribute" + | TypeConstraint -> "constraints on a type" + | AtomicTypExpr -> "a type" + | ListExpr -> "an ocaml list expr" + | PackageConstraint -> "a package constraint" + | JsxChild -> "jsx child" + | Pattern -> "pattern" + | ExprFor -> "a for expression" + | AttributePayload -> "an attribute payload" + | TagNames -> "tag names" + +let isSignatureItemStart = function + | Token.At | Let | Typ | External | Exception | Open | Include | Module | AtAt + | PercentPercent -> + true + | _ -> false + +let isAtomicPatternStart = function + | Token.Int _ | String _ | Codepoint _ | Backtick | Lparen | Lbracket | Lbrace + | Underscore | Lident _ | Uident _ | List | Exception | Lazy | Percent -> + true + | _ -> false + +let isAtomicExprStart = function + | Token.True | False | Int _ | String _ | Float _ | Codepoint _ | Backtick + | Uident _ | Lident _ | Hash | Lparen | List | Lbracket | Lbrace | LessThan + | Module | Percent -> + true + | _ -> false + +let isAtomicTypExprStart = function + | Token.SingleQuote | Underscore | Lparen | Lbrace | Uident _ | Lident _ + | Percent -> + true + | _ -> false + +let isExprStart = function + | Token.Assert | At | Await | Backtick | Bang | Codepoint _ | False | Float _ + | For | Hash | If | Int _ | Lazy | Lbrace | Lbracket | LessThan | Lident _ + | List | Lparen | Minus | MinusDot | Module | Percent | Plus | PlusDot + | String _ | Switch | True | Try | Uident _ | Underscore (* _ => doThings() *) + | While -> + true + | _ -> false + +let isJsxAttributeStart = function + | Token.Lident _ | Question | Lbrace -> true + | _ -> false + +let isStructureItemStart = function + | Token.Open | Let | Typ | External | Exception | Include | Module | AtAt + | PercentPercent | At -> + true + | t when isExprStart t -> true + | _ -> false + +let isPatternStart = function + | Token.Int _ | Float _ | String _ | Codepoint _ | Backtick | True | False + | Minus | Plus | Lparen | Lbracket | Lbrace | List | Underscore | Lident _ + | Uident _ | Hash | Exception | Lazy | Percent | Module | At -> + true + | _ -> false + +let isParameterStart = function + | Token.Typ | Tilde | Dot -> true + | token when isPatternStart token -> true + | _ -> false + +(* TODO: overparse Uident ? *) +let isStringFieldDeclStart = function + | Token.String _ | Lident _ | At | DotDotDot -> true + | _ -> false + +(* TODO: overparse Uident ? *) +let isFieldDeclStart = function + | Token.At | Mutable | Lident _ -> true + (* recovery, TODO: this is not ideal… *) + | Uident _ -> true + | t when Token.isKeyword t -> true + | _ -> false + +let isRecordDeclStart = function + | Token.At | Mutable | Lident _ -> true + | _ -> false + +let isTypExprStart = function + | Token.At | SingleQuote | Underscore | Lparen | Lbracket | Uident _ + | Lident _ | Module | Percent | Lbrace -> + true + | _ -> false + +let isTypeParameterStart = function + | Token.Tilde | Dot -> true + | token when isTypExprStart token -> true + | _ -> false + +let isTypeParamStart = function + | Token.Plus | Minus | SingleQuote | Underscore -> true + | _ -> false + +let isFunctorArgStart = function + | Token.At | Uident _ | Underscore | Percent | Lbrace | Lparen -> true + | _ -> false + +let isModExprStart = function + | Token.At | Percent | Uident _ | Lbrace | Lparen | Lident "unpack" -> true + | _ -> false + +let isRecordRowStart = function + | Token.DotDotDot -> true + | Token.Uident _ | Lident _ -> true + (* TODO *) + | t when Token.isKeyword t -> true + | _ -> false + +let isRecordRowStringKeyStart = function + | Token.String _ -> true + | _ -> false + +let isArgumentStart = function + | Token.Tilde | Dot | Underscore -> true + | t when isExprStart t -> true + | _ -> false + +let isPatternMatchStart = function + | Token.Bar -> true + | t when isPatternStart t -> true + | _ -> false + +let isPatternOcamlListStart = function + | Token.DotDotDot -> true + | t when isPatternStart t -> true + | _ -> false + +let isPatternRecordItemStart = function + | Token.DotDotDot | Uident _ | Lident _ | Underscore -> true + | _ -> false + +let isAttributeStart = function + | Token.At -> true + | _ -> false + +let isJsxChildStart = isAtomicExprStart + +let isBlockExprStart = function + | Token.Assert | At | Await | Backtick | Bang | Codepoint _ | Exception + | False | Float _ | For | Forwardslash | Hash | If | Int _ | Lazy | Lbrace + | Lbracket | LessThan | Let | Lident _ | List | Lparen | Minus | MinusDot + | Module | Open | Percent | Plus | PlusDot | String _ | Switch | True | Try + | Uident _ | Underscore | While -> + true + | _ -> false + +let isListElement grammar token = + match grammar with + | ExprList -> token = Token.DotDotDot || isExprStart token + | ListExpr -> token = DotDotDot || isExprStart token + | PatternList -> token = DotDotDot || isPatternStart token + | ParameterList -> isParameterStart token + | StringFieldDeclarations -> isStringFieldDeclStart token + | FieldDeclarations -> isFieldDeclStart token + | RecordDecl -> isRecordDeclStart token + | TypExprList -> isTypExprStart token || token = Token.LessThan + | TypeParams -> isTypeParamStart token + | FunctorArgs -> isFunctorArgStart token + | ModExprList -> isModExprStart token + | TypeParameters -> isTypeParameterStart token + | RecordRows -> isRecordRowStart token + | RecordRowsStringKey -> isRecordRowStringKeyStart token + | ArgumentList -> isArgumentStart token + | Signature | Specification -> isSignatureItemStart token + | Structure | Implementation -> isStructureItemStart token + | PatternMatching -> isPatternMatchStart token + | PatternOcamlList -> isPatternOcamlListStart token + | PatternRecord -> isPatternRecordItemStart token + | Attribute -> isAttributeStart token + | TypeConstraint -> token = Constraint + | PackageConstraint -> token = And + | ConstructorDeclaration -> token = Bar + | JsxAttribute -> isJsxAttributeStart token + | AttributePayload -> token = Lparen + | TagNames -> token = Hash + | _ -> false + +let isListTerminator grammar token = + match (grammar, token) with + | _, Token.Eof + | ExprList, (Rparen | Forwardslash | Rbracket) + | ListExpr, Rparen + | ArgumentList, Rparen + | TypExprList, (Rparen | Forwardslash | GreaterThan | Equal) + | ModExprList, Rparen + | ( (PatternList | PatternOcamlList | PatternRecord), + ( Forwardslash | Rbracket | Rparen | EqualGreater (* pattern matching => *) + | In (* for expressions *) + | Equal (* let {x} = foo *) ) ) + | ExprBlock, Rbrace + | (Structure | Signature), Rbrace + | TypeParams, Rparen + | ParameterList, (EqualGreater | Lbrace) + | JsxAttribute, (Forwardslash | GreaterThan) + | StringFieldDeclarations, Rbrace -> + true + | Attribute, token when token <> At -> true + | TypeConstraint, token when token <> Constraint -> true + | PackageConstraint, token when token <> And -> true + | ConstructorDeclaration, token when token <> Bar -> true + | AttributePayload, Rparen -> true + | TagNames, Rbracket -> true + | _ -> false + +let isPartOfList grammar token = + isListElement grammar token || isListTerminator grammar token diff --git a/res_syntax/src/res_io.ml b/res_syntax/src/res_io.ml new file mode 100644 index 0000000000..ef29399bad --- /dev/null +++ b/res_syntax/src/res_io.ml @@ -0,0 +1,14 @@ +let readFile ~filename = + let chan = open_in_bin filename in + let content = + try really_input_string chan (in_channel_length chan) + with End_of_file -> "" + in + close_in_noerr chan; + content + +let writeFile ~filename ~contents:txt = + let chan = open_out_bin filename in + output_string chan txt; + close_out chan + [@@raises Sys_error] diff --git a/res_syntax/src/res_io.mli b/res_syntax/src/res_io.mli new file mode 100644 index 0000000000..dcc6e14253 --- /dev/null +++ b/res_syntax/src/res_io.mli @@ -0,0 +1,7 @@ +(* utilities to read and write to/from files or stdin *) + +(* reads the contents of "filename" into a string *) +val readFile : filename:string -> string + +(* writes "content" into file with name "filename" *) +val writeFile : filename:string -> contents:string -> unit diff --git a/res_syntax/src/res_minibuffer.ml b/res_syntax/src/res_minibuffer.ml new file mode 100644 index 0000000000..d36a1679ad --- /dev/null +++ b/res_syntax/src/res_minibuffer.ml @@ -0,0 +1,47 @@ +type t = {mutable buffer: bytes; mutable position: int; mutable length: int} + +let create n = + let n = if n < 1 then 1 else n in + let s = (Bytes.create [@doesNotRaise]) n in + {buffer = s; position = 0; length = n} + +let contents b = (Bytes.sub_string [@doesNotRaise]) b.buffer 0 b.position + +(* Can't be called directly, don't add to the interface *) +let resize_internal b more = + let len = b.length in + let new_len = ref len in + while b.position + more > !new_len do + new_len := 2 * !new_len + done; + if !new_len > Sys.max_string_length then + if b.position + more <= Sys.max_string_length then + new_len := Sys.max_string_length; + let new_buffer = (Bytes.create [@doesNotRaise]) !new_len in + (* PR#6148: let's keep using [blit] rather than [unsafe_blit] in + this tricky function that is slow anyway. *) + Bytes.blit b.buffer 0 new_buffer 0 b.position [@doesNotRaise]; + b.buffer <- new_buffer; + b.length <- !new_len + +let add_char b c = + let pos = b.position in + if pos >= b.length then resize_internal b 1; + Bytes.unsafe_set b.buffer pos c; + b.position <- pos + 1 + +let add_string b s = + let len = String.length s in + let new_position = b.position + len in + if new_position > b.length then resize_internal b len; + Bytes.blit_string s 0 b.buffer b.position len [@doesNotRaise]; + b.position <- new_position + +(* adds newline and trims all preceding whitespace *) +let flush_newline b = + let position = ref b.position in + while Bytes.unsafe_get b.buffer (!position - 1) = ' ' && !position >= 0 do + position := !position - 1 + done; + b.position <- !position; + add_char b '\n' diff --git a/res_syntax/src/res_minibuffer.mli b/res_syntax/src/res_minibuffer.mli new file mode 100644 index 0000000000..0a2bffa538 --- /dev/null +++ b/res_syntax/src/res_minibuffer.mli @@ -0,0 +1,6 @@ +type t +val add_char : t -> char -> unit +val add_string : t -> string -> unit +val contents : t -> string +val create : int -> t +val flush_newline : t -> unit diff --git a/res_syntax/src/res_multi_printer.ml b/res_syntax/src/res_multi_printer.ml new file mode 100644 index 0000000000..a9d65cb834 --- /dev/null +++ b/res_syntax/src/res_multi_printer.ml @@ -0,0 +1,53 @@ +let defaultPrintWidth = 100 + +(* print res files to res syntax *) +let printRes ~isInterface ~filename = + if isInterface then + let parseResult = + Res_driver.parsingEngine.parseInterface ~forPrinter:true ~filename + in + if parseResult.invalid then ( + Res_diagnostics.printReport parseResult.diagnostics parseResult.source; + exit 1) + else + Res_printer.printInterface ~width:defaultPrintWidth + ~comments:parseResult.comments parseResult.parsetree + else + let parseResult = + Res_driver.parsingEngine.parseImplementation ~forPrinter:true ~filename + in + if parseResult.invalid then ( + Res_diagnostics.printReport parseResult.diagnostics parseResult.source; + exit 1) + else + Res_printer.printImplementation ~width:defaultPrintWidth + ~comments:parseResult.comments parseResult.parsetree + [@@raises exit] + +(* print ocaml files to res syntax *) +let printMl ~isInterface ~filename = + if isInterface then + let parseResult = + Res_driver_ml_parser.parsingEngine.parseInterface ~forPrinter:true + ~filename + in + Res_printer.printInterface ~width:defaultPrintWidth + ~comments:parseResult.comments parseResult.parsetree + else + let parseResult = + Res_driver_ml_parser.parsingEngine.parseImplementation ~forPrinter:true + ~filename + in + Res_printer.printImplementation ~width:defaultPrintWidth + ~comments:parseResult.comments parseResult.parsetree + +(* print the given file named input to from "language" to res, general interface exposed by the compiler *) +let print language ~input = + let isInterface = + let len = String.length input in + len > 0 && String.unsafe_get input (len - 1) = 'i' + in + match language with + | `res -> printRes ~isInterface ~filename:input + | `ml -> printMl ~isInterface ~filename:input + [@@raises exit] diff --git a/res_syntax/src/res_multi_printer.mli b/res_syntax/src/res_multi_printer.mli new file mode 100644 index 0000000000..f18043fbdb --- /dev/null +++ b/res_syntax/src/res_multi_printer.mli @@ -0,0 +1,3 @@ +(* Interface to print source code from different languages to res. + * Takes a filename called "input" and returns the corresponding formatted res syntax *) +val print : [`ml | `res] -> input:string -> string diff --git a/res_syntax/src/res_outcome_printer.ml b/res_syntax/src/res_outcome_printer.ml new file mode 100644 index 0000000000..97560bf22a --- /dev/null +++ b/res_syntax/src/res_outcome_printer.ml @@ -0,0 +1,1162 @@ +(* For the curious: the outcome printer is a printer to print data + * from the outcometree.mli file in the ocaml compiler. + * The outcome tree is used by: + * - ocaml's toplevel/repl, print results/errors + * - super errors, print nice errors + * - editor tooling, e.g. show type on hover + * + * In general it represent messages to show results or errors to the user. *) + +module Doc = Res_doc +module Token = Res_token + +let rec unsafe_for_all_range s ~start ~finish p = + start > finish + || p (String.unsafe_get s start) + && unsafe_for_all_range s ~start:(start + 1) ~finish p + +let for_all_from s start p = + let len = String.length s in + unsafe_for_all_range s ~start ~finish:(len - 1) p + +(* See https://github.com/rescript-lang/rescript-compiler/blob/726cfa534314b586e5b5734471bc2023ad99ebd9/jscomp/ext/ext_string.ml#L510 *) +let isValidNumericPolyvarNumber (x : string) = + let len = String.length x in + len > 0 + && + let a = Char.code (String.unsafe_get x 0) in + a <= 57 + && + if len > 1 then + a > 48 + && for_all_from x 1 (function + | '0' .. '9' -> true + | _ -> false) + else a >= 48 + +(* checks if ident contains "arity", like in "arity1", "arity2", "arity3" etc. *) +let isArityIdent ident = + if String.length ident >= 6 then + (String.sub [@doesNotRaise]) ident 0 5 = "arity" + else false + +type identifierStyle = ExoticIdent | NormalIdent + +let classifyIdentContent ~allowUident txt = + let len = String.length txt in + let rec go i = + if i == len then NormalIdent + else + let c = String.unsafe_get txt i in + if + i == 0 + && not + ((allowUident && c >= 'A' && c <= 'Z') + || (c >= 'a' && c <= 'z') + || c = '_') + then ExoticIdent + else if + not + ((c >= 'a' && c <= 'z') + || (c >= 'A' && c <= 'Z') + || c = '\'' || c = '_' + || (c >= '0' && c <= '9')) + then ExoticIdent + else go (i + 1) + in + if Token.isKeywordTxt txt then ExoticIdent else go 0 + +let printIdentLike ~allowUident txt = + match classifyIdentContent ~allowUident txt with + | ExoticIdent -> Doc.concat [Doc.text "\\\""; Doc.text txt; Doc.text "\""] + | NormalIdent -> Doc.text txt + +let printPolyVarIdent txt = + (* numeric poly-vars don't need quotes: #644 *) + if isValidNumericPolyvarNumber txt then Doc.text txt + else + match classifyIdentContent ~allowUident:true txt with + | ExoticIdent -> Doc.concat [Doc.text "\""; Doc.text txt; Doc.text "\""] + | NormalIdent -> Doc.text txt + +(* ReScript doesn't have parenthesized identifiers. + * We don't support custom operators. *) +let parenthesized_ident _name = true + +(* TODO: better allocation strategy for the buffer *) +let escapeStringContents s = + let len = String.length s in + let b = Buffer.create len in + for i = 0 to len - 1 do + let c = (String.get [@doesNotRaise]) s i in + if c = '\008' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'b') + else if c = '\009' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 't') + else if c = '\010' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'n') + else if c = '\013' then ( + Buffer.add_char b '\\'; + Buffer.add_char b 'r') + else if c = '\034' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '"') + else if c = '\092' then ( + Buffer.add_char b '\\'; + Buffer.add_char b '\\') + else Buffer.add_char b c + done; + Buffer.contents b + +(* let rec print_ident fmt ident = match ident with + | Outcometree.Oide_ident s -> Format.pp_print_string fmt s + | Oide_dot (id, s) -> + print_ident fmt id; + Format.pp_print_char fmt '.'; + Format.pp_print_string fmt s + | Oide_apply (id1, id2) -> + print_ident fmt id1; + Format.pp_print_char fmt '('; + print_ident fmt id2; + Format.pp_print_char fmt ')' *) + +let rec printOutIdentDoc ?(allowUident = true) (ident : Outcometree.out_ident) = + match ident with + | Oide_ident s -> printIdentLike ~allowUident s + | Oide_dot (ident, s) -> + Doc.concat [printOutIdentDoc ident; Doc.dot; Doc.text s] + | Oide_apply (call, arg) -> + Doc.concat + [printOutIdentDoc call; Doc.lparen; printOutIdentDoc arg; Doc.rparen] + +let printOutAttributeDoc (outAttribute : Outcometree.out_attribute) = + Doc.concat [Doc.text "@"; Doc.text outAttribute.oattr_name] + +let printOutAttributesDoc (attrs : Outcometree.out_attribute list) = + match attrs with + | [] -> Doc.nil + | attrs -> + Doc.concat + [ + Doc.group (Doc.join ~sep:Doc.line (List.map printOutAttributeDoc attrs)); + Doc.line; + ] + +let rec collectArrowArgs (outType : Outcometree.out_type) args = + match outType with + | Otyp_arrow (label, argType, returnType) -> + let arg = (label, argType) in + collectArrowArgs returnType (arg :: args) + | _ as returnType -> (List.rev args, returnType) + +let rec collectFunctorArgs (outModuleType : Outcometree.out_module_type) args = + match outModuleType with + | Omty_functor (lbl, optModType, returnModType) -> + let arg = (lbl, optModType) in + collectFunctorArgs returnModType (arg :: args) + | _ -> (List.rev args, outModuleType) + +let rec printOutTypeDoc (outType : Outcometree.out_type) = + match outType with + | Otyp_abstract | Otyp_open -> Doc.nil + | Otyp_variant (nonGen, outVariant, closed, labels) -> + (* bool * out_variant * bool * (string list) option *) + let opening = + match (closed, labels) with + | true, None -> (* [#A | #B] *) Doc.softLine + | false, None -> + (* [> #A | #B] *) + Doc.concat [Doc.greaterThan; Doc.line] + | true, Some [] -> + (* [< #A | #B] *) + Doc.concat [Doc.lessThan; Doc.line] + | true, Some _ -> + (* [< #A | #B > #X #Y ] *) + Doc.concat [Doc.lessThan; Doc.line] + | false, Some _ -> + (* impossible!? ocaml seems to print ?, see oprint.ml in 4.06 *) + Doc.concat [Doc.text "?"; Doc.line] + in + Doc.group + (Doc.concat + [ + (if nonGen then Doc.text "_" else Doc.nil); + Doc.lbracket; + Doc.indent (Doc.concat [opening; printOutVariant outVariant]); + (match labels with + | None | Some [] -> Doc.nil + | Some tags -> + Doc.group + (Doc.concat + [ + Doc.space; + Doc.join ~sep:Doc.space + (List.map + (fun lbl -> printIdentLike ~allowUident:true lbl) + tags); + ])); + Doc.softLine; + Doc.rbracket; + ]) + | Otyp_alias (typ, aliasTxt) -> + Doc.concat + [ + Doc.lparen; + printOutTypeDoc typ; + Doc.text " as '"; + Doc.text aliasTxt; + Doc.rparen; + ] + | Otyp_constr + ( Oide_dot (Oide_dot (Oide_ident "Js", "Fn"), "arity0"), + (* Js.Fn.arity0 *) + [typ] ) -> + (* Js.Fn.arity0 -> (.) => t *) + Doc.concat [Doc.text "(. ()) => "; printOutTypeDoc typ] + | Otyp_constr + ( Oide_dot (Oide_dot (Oide_ident "Js", "Fn"), ident), + (* Js.Fn.arity2 *) + [(Otyp_arrow _ as arrowType)] (* (int, int) => int *) ) + when isArityIdent ident -> + (* Js.Fn.arity2<(int, int) => int> -> (. int, int) => int*) + printOutArrowType ~uncurried:true arrowType + | Otyp_constr (outIdent, []) -> printOutIdentDoc ~allowUident:false outIdent + | Otyp_manifest (typ1, typ2) -> + Doc.concat [printOutTypeDoc typ1; Doc.text " = "; printOutTypeDoc typ2] + | Otyp_record record -> printRecordDeclarationDoc ~inline:true record + | Otyp_stuff txt -> Doc.text txt + | Otyp_var (ng, s) -> + Doc.concat [Doc.text ("'" ^ if ng then "_" else ""); Doc.text s] + | Otyp_object (fields, rest) -> printObjectFields fields rest + | Otyp_class _ -> Doc.nil + | Otyp_attribute (typ, attribute) -> + Doc.group + (Doc.concat + [printOutAttributeDoc attribute; Doc.line; printOutTypeDoc typ]) + (* example: Red | Blue | Green | CustomColour(float, float, float) *) + | Otyp_sum constructors -> printOutConstructorsDoc constructors + (* example: {"name": string, "age": int} *) + | Otyp_constr (Oide_dot (Oide_ident "Js", "t"), [Otyp_object (fields, rest)]) + -> + printObjectFields fields rest + (* example: node *) + | Otyp_constr (outIdent, args) -> + let argsDoc = + match args with + | [] -> Doc.nil + | args -> + Doc.concat + [ + Doc.lessThan; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printOutTypeDoc args); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ] + in + Doc.group (Doc.concat [printOutIdentDoc outIdent; argsDoc]) + | Otyp_tuple tupleArgs -> + Doc.group + (Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printOutTypeDoc tupleArgs); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + | Otyp_poly (vars, outType) -> + Doc.group + (Doc.concat + [ + Doc.join ~sep:Doc.space + (List.map (fun var -> Doc.text ("'" ^ var)) vars); + Doc.dot; + Doc.space; + printOutTypeDoc outType; + ]) + | Otyp_arrow _ as typ -> printOutArrowType ~uncurried:false typ + | Otyp_module (modName, stringList, outTypes) -> + let packageTypeDoc = + match (stringList, outTypes) with + | [], [] -> Doc.nil + | labels, types -> + let i = ref 0 in + let package = + Doc.join ~sep:Doc.line + ((List.map2 [@doesNotRaise]) + (fun lbl typ -> + Doc.concat + [ + Doc.text + (if i.contents > 0 then "and type " else "with type "); + Doc.text lbl; + Doc.text " = "; + printOutTypeDoc typ; + ]) + labels types) + in + Doc.indent (Doc.concat [Doc.line; package]) + in + Doc.concat + [ + Doc.text "module"; + Doc.lparen; + Doc.text modName; + packageTypeDoc; + Doc.rparen; + ] + +and printOutArrowType ~uncurried typ = + let typArgs, typ = collectArrowArgs typ [] in + let args = + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun (lbl, typ) -> + let lblLen = String.length lbl in + if lblLen = 0 then printOutTypeDoc typ + else + let lbl, optionalIndicator = + (* the ocaml compiler hardcodes the optional label inside the string of the label in printtyp.ml *) + match String.unsafe_get lbl 0 with + | '?' -> + ((String.sub [@doesNotRaise]) lbl 1 (lblLen - 1), Doc.text "=?") + | _ -> (lbl, Doc.nil) + in + Doc.group + (Doc.concat + [ + Doc.text ("~" ^ lbl ^ ": "); + printOutTypeDoc typ; + optionalIndicator; + ])) + typArgs) + in + let argsDoc = + let needsParens = + match typArgs with + | _ when uncurried -> true + | [(_, (Otyp_tuple _ | Otyp_arrow _))] -> true + (* single argument should not be wrapped *) + | [("", _)] -> false + | _ -> true + in + if needsParens then + Doc.group + (Doc.concat + [ + (if uncurried then Doc.text "(. " else Doc.lparen); + Doc.indent (Doc.concat [Doc.softLine; args]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + else args + in + Doc.concat [argsDoc; Doc.text " => "; printOutTypeDoc typ] + +and printOutVariant variant = + match variant with + | Ovar_fields fields -> + (* (string * bool * out_type list) list *) + Doc.join ~sep:Doc.line + ((* + * [< | #T([< u2]) & ([< u2]) & ([< u1])] --> no ampersand + * [< | #S & ([< u2]) & ([< u2]) & ([< u1])] --> ampersand + *) + List.mapi + (fun i (name, ampersand, types) -> + let needsParens = + match types with + | [Outcometree.Otyp_tuple _] -> false + | _ -> true + in + Doc.concat + [ + (if i > 0 then Doc.text "| " + else Doc.ifBreaks (Doc.text "| ") Doc.nil); + Doc.group + (Doc.concat + [ + Doc.text "#"; + printPolyVarIdent name; + (match types with + | [] -> Doc.nil + | types -> + Doc.concat + [ + (if ampersand then Doc.text " & " else Doc.nil); + Doc.indent + (Doc.concat + [ + Doc.join + ~sep:(Doc.concat [Doc.text " &"; Doc.line]) + (List.map + (fun typ -> + let outTypeDoc = + printOutTypeDoc typ + in + if needsParens then + Doc.concat + [ + Doc.lparen; + outTypeDoc; + Doc.rparen; + ] + else outTypeDoc) + types); + ]); + ]); + ]); + ]) + fields) + | Ovar_typ typ -> printOutTypeDoc typ + +and printObjectFields fields rest = + let dots = + match rest with + | Some non_gen -> Doc.text ((if non_gen then "_" else "") ^ "..") + | None -> if fields = [] then Doc.dot else Doc.nil + in + Doc.group + (Doc.concat + [ + Doc.lbrace; + dots; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun (lbl, outType) -> + Doc.group + (Doc.concat + [ + Doc.text ("\"" ^ lbl ^ "\": "); + printOutTypeDoc outType; + ])) + fields); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + +and printOutConstructorsDoc constructors = + Doc.group + (Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join ~sep:Doc.line + (List.mapi + (fun i constructor -> + Doc.concat + [ + (if i > 0 then Doc.text "| " + else Doc.ifBreaks (Doc.text "| ") Doc.nil); + printOutConstructorDoc constructor; + ]) + constructors); + ])) + +and printOutConstructorDoc (name, args, gadt) = + let gadtDoc = + match gadt with + | Some outType -> Doc.concat [Doc.text ": "; printOutTypeDoc outType] + | None -> Doc.nil + in + let argsDoc = + match args with + | [] -> Doc.nil + | [Otyp_record record] -> + (* inline records + * | Root({ + * mutable value: 'value, + * mutable updatedTime: float, + * }) + *) + Doc.concat + [ + Doc.lparen; + Doc.indent (printRecordDeclarationDoc ~inline:true record); + Doc.rparen; + ] + | _types -> + Doc.indent + (Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printOutTypeDoc args); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + in + Doc.group (Doc.concat [Doc.text name; argsDoc; gadtDoc]) + +and printRecordDeclRowDoc (name, mut, opt, arg) = + Doc.group + (Doc.concat + [ + (if mut then Doc.text "mutable " else Doc.nil); + printIdentLike ~allowUident:false name; + (if opt then Doc.text "?" else Doc.nil); + Doc.text ": "; + printOutTypeDoc arg; + ]) + +and printRecordDeclarationDoc ~inline rows = + let content = + Doc.concat + [ + Doc.lbrace; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printRecordDeclRowDoc rows); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ] + in + if not inline then Doc.group content else content + +let printOutType fmt outType = + Format.pp_print_string fmt (Doc.toString ~width:80 (printOutTypeDoc outType)) + +let printTypeParameterDoc (typ, (co, cn)) = + Doc.concat + [ + (if not cn then Doc.text "+" else if not co then Doc.text "-" else Doc.nil); + (if typ = "_" then Doc.text "_" else Doc.text ("'" ^ typ)); + ] + +let rec printOutSigItemDoc ?(printNameAsIs = false) + (outSigItem : Outcometree.out_sig_item) = + match outSigItem with + | Osig_class _ | Osig_class_type _ -> Doc.nil + | Osig_ellipsis -> Doc.dotdotdot + | Osig_value valueDecl -> + Doc.group + (Doc.concat + [ + printOutAttributesDoc valueDecl.oval_attributes; + Doc.text + (match valueDecl.oval_prims with + | [] -> "let " + | _ -> "external "); + Doc.text valueDecl.oval_name; + Doc.text ":"; + Doc.space; + printOutTypeDoc valueDecl.oval_type; + (match valueDecl.oval_prims with + | [] -> Doc.nil + | primitives -> + Doc.indent + (Doc.concat + [ + Doc.text " ="; + Doc.line; + Doc.group + (Doc.join ~sep:Doc.line + (List.map + (fun prim -> + let prim = + if + prim <> "" + && (prim.[0] [@doesNotRaise]) = '\132' + then "#rescript-external" + else prim + in + (* not display those garbage '\132' is a magic number for marshal *) + Doc.text ("\"" ^ prim ^ "\"")) + primitives)); + ])); + ]) + | Osig_typext (outExtensionConstructor, _outExtStatus) -> + printOutExtensionConstructorDoc outExtensionConstructor + | Osig_modtype (modName, Omty_signature []) -> + Doc.concat [Doc.text "module type "; Doc.text modName] + | Osig_modtype (modName, outModuleType) -> + Doc.group + (Doc.concat + [ + Doc.text "module type "; + Doc.text modName; + Doc.text " = "; + printOutModuleTypeDoc outModuleType; + ]) + | Osig_module (modName, Omty_alias ident, _) -> + Doc.group + (Doc.concat + [ + Doc.text "module "; + Doc.text modName; + Doc.text " ="; + Doc.line; + printOutIdentDoc ident; + ]) + | Osig_module (modName, outModType, outRecStatus) -> + Doc.group + (Doc.concat + [ + Doc.text + (match outRecStatus with + | Orec_not -> "module " + | Orec_first -> "module rec " + | Orec_next -> "and "); + Doc.text modName; + Doc.text ": "; + printOutModuleTypeDoc outModType; + ]) + | Osig_type (outTypeDecl, outRecStatus) -> + (* TODO: manifest ? *) + let attrs = + match (outTypeDecl.otype_immediate, outTypeDecl.otype_unboxed) with + | false, false -> Doc.nil + | true, false -> Doc.concat [Doc.text "@immediate"; Doc.line] + | false, true -> Doc.concat [Doc.text "@unboxed"; Doc.line] + | true, true -> Doc.concat [Doc.text "@immediate @unboxed"; Doc.line] + in + let kw = + Doc.text + (match outRecStatus with + | Orec_not -> "type " + | Orec_first -> "type rec " + | Orec_next -> "and ") + in + let typeParams = + match outTypeDecl.otype_params with + | [] -> Doc.nil + | _params -> + Doc.group + (Doc.concat + [ + Doc.lessThan; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printTypeParameterDoc outTypeDecl.otype_params); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ]) + in + let privateDoc = + match outTypeDecl.otype_private with + | Asttypes.Private -> Doc.text "private " + | Public -> Doc.nil + in + let kind = + match outTypeDecl.otype_type with + | Otyp_open -> Doc.concat [Doc.text " = "; privateDoc; Doc.text ".."] + | Otyp_abstract -> Doc.nil + | Otyp_record record -> + Doc.concat + [ + Doc.text " = "; + privateDoc; + printRecordDeclarationDoc ~inline:false record; + ] + | typ -> Doc.concat [Doc.text " = "; printOutTypeDoc typ] + in + let constraints = + match outTypeDecl.otype_cstrs with + | [] -> Doc.nil + | _ -> + Doc.group + (Doc.indent + (Doc.concat + [ + Doc.hardLine; + Doc.join ~sep:Doc.line + (List.map + (fun (typ1, typ2) -> + Doc.group + (Doc.concat + [ + Doc.text "constraint "; + printOutTypeDoc typ1; + Doc.text " ="; + Doc.space; + printOutTypeDoc typ2; + ])) + outTypeDecl.otype_cstrs); + ])) + in + Doc.group + (Doc.concat + [ + attrs; + Doc.group + (Doc.concat + [ + attrs; + kw; + (if printNameAsIs then Doc.text outTypeDecl.otype_name + else printIdentLike ~allowUident:false outTypeDecl.otype_name); + typeParams; + kind; + ]); + constraints; + ]) + +and printOutModuleTypeDoc (outModType : Outcometree.out_module_type) = + match outModType with + | Omty_abstract -> Doc.nil + | Omty_ident ident -> printOutIdentDoc ident + (* example: module Increment = (M: X_int) => X_int *) + | Omty_functor _ -> + let args, returnModType = collectFunctorArgs outModType [] in + let argsDoc = + match args with + | [(_, None)] -> Doc.text "()" + | args -> + Doc.group + (Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun (lbl, optModType) -> + Doc.group + (Doc.concat + [ + Doc.text lbl; + (match optModType with + | None -> Doc.nil + | Some modType -> + Doc.concat + [ + Doc.text ": "; + printOutModuleTypeDoc modType; + ]); + ])) + args); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + in + Doc.group + (Doc.concat + [argsDoc; Doc.text " => "; printOutModuleTypeDoc returnModType]) + | Omty_signature [] -> Doc.nil + | Omty_signature signature -> + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.lbrace; + Doc.indent (Doc.concat [Doc.line; printOutSignatureDoc signature]); + Doc.softLine; + Doc.rbrace; + ]) + | Omty_alias _ident -> Doc.nil + +and printOutSignatureDoc (signature : Outcometree.out_sig_item list) = + let rec loop signature acc = + match signature with + | [] -> List.rev acc + | Outcometree.Osig_typext (ext, Oext_first) :: items -> + (* Gather together the extension constructors *) + let rec gather_extensions acc items = + match items with + | Outcometree.Osig_typext (ext, Oext_next) :: items -> + gather_extensions + ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc) + items + | _ -> (List.rev acc, items) + in + let exts, items = + gather_extensions + [(ext.oext_name, ext.oext_args, ext.oext_ret_type)] + items + in + let te = + { + Outcometree.otyext_name = ext.oext_type_name; + otyext_params = ext.oext_type_params; + otyext_constructors = exts; + otyext_private = ext.oext_private; + } + in + let doc = printOutTypeExtensionDoc te in + loop items (doc :: acc) + | item :: items -> + let doc = printOutSigItemDoc ~printNameAsIs:false item in + loop items (doc :: acc) + in + match loop signature [] with + | [doc] -> doc + | docs -> Doc.breakableGroup ~forceBreak:true (Doc.join ~sep:Doc.line docs) + +and printOutExtensionConstructorDoc + (outExt : Outcometree.out_extension_constructor) = + let typeParams = + match outExt.oext_type_params with + | [] -> Doc.nil + | params -> + Doc.group + (Doc.concat + [ + Doc.lessThan; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun ty -> + Doc.text (if ty = "_" then ty else "'" ^ ty)) + params); + ]); + Doc.softLine; + Doc.greaterThan; + ]) + in + + Doc.group + (Doc.concat + [ + Doc.text "type "; + printIdentLike ~allowUident:false outExt.oext_type_name; + typeParams; + Doc.text " += "; + Doc.line; + (if outExt.oext_private = Asttypes.Private then Doc.text "private " + else Doc.nil); + printOutConstructorDoc + (outExt.oext_name, outExt.oext_args, outExt.oext_ret_type); + ]) + +and printOutTypeExtensionDoc (typeExtension : Outcometree.out_type_extension) = + let typeParams = + match typeExtension.otyext_params with + | [] -> Doc.nil + | params -> + Doc.group + (Doc.concat + [ + Doc.lessThan; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun ty -> + Doc.text (if ty = "_" then ty else "'" ^ ty)) + params); + ]); + Doc.softLine; + Doc.greaterThan; + ]) + in + + Doc.group + (Doc.concat + [ + Doc.text "type "; + printIdentLike ~allowUident:false typeExtension.otyext_name; + typeParams; + Doc.text " += "; + (if typeExtension.otyext_private = Asttypes.Private then + Doc.text "private " + else Doc.nil); + printOutConstructorsDoc typeExtension.otyext_constructors; + ]) + +let printOutSigItem fmt outSigItem = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutSigItemDoc outSigItem)) + +let printOutSignature fmt signature = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutSignatureDoc signature)) + +let validFloatLexeme s = + let l = String.length s in + let rec loop i = + if i >= l then s ^ "." + else + match s.[i] [@doesNotRaise] with + | '0' .. '9' | '-' -> loop (i + 1) + | _ -> s + in + loop 0 + +let floatRepres f = + match classify_float f with + | FP_nan -> "nan" + | FP_infinite -> if f < 0.0 then "neg_infinity" else "infinity" + | _ -> + let float_val = + let s1 = Printf.sprintf "%.12g" f in + if f = (float_of_string [@doesNotRaise]) s1 then s1 + else + let s2 = Printf.sprintf "%.15g" f in + if f = (float_of_string [@doesNotRaise]) s2 then s2 + else Printf.sprintf "%.18g" f + in + validFloatLexeme float_val + +let rec printOutValueDoc (outValue : Outcometree.out_value) = + match outValue with + | Oval_array outValues -> + Doc.group + (Doc.concat + [ + Doc.lbracket; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printOutValueDoc outValues); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbracket; + ]) + | Oval_char c -> Doc.text ("'" ^ Char.escaped c ^ "'") + | Oval_constr (outIdent, outValues) -> + Doc.group + (Doc.concat + [ + printOutIdentDoc outIdent; + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printOutValueDoc outValues); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + | Oval_ellipsis -> Doc.text "..." + | Oval_int i -> Doc.text (Format.sprintf "%i" i) + | Oval_int32 i -> Doc.text (Format.sprintf "%lil" i) + | Oval_int64 i -> Doc.text (Format.sprintf "%LiL" i) + | Oval_nativeint i -> Doc.text (Format.sprintf "%nin" i) + | Oval_float f -> Doc.text (floatRepres f) + | Oval_list outValues -> + Doc.group + (Doc.concat + [ + Doc.text "list["; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printOutValueDoc outValues); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbracket; + ]) + | Oval_printer fn -> + let fmt = Format.str_formatter in + fn fmt; + let str = Format.flush_str_formatter () in + Doc.text str + | Oval_record rows -> + Doc.group + (Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun (outIdent, outValue) -> + Doc.group + (Doc.concat + [ + printOutIdentDoc outIdent; + Doc.text ": "; + printOutValueDoc outValue; + ])) + rows); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + | Oval_string (txt, _sizeToPrint, _kind) -> + Doc.text (escapeStringContents txt) + | Oval_stuff txt -> Doc.text txt + | Oval_tuple outValues -> + Doc.group + (Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map printOutValueDoc outValues); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + (* Not supported by ReScript *) + | Oval_variant _ -> Doc.nil + +let printOutExceptionDoc exc outValue = + match exc with + | Sys.Break -> Doc.text "Interrupted." + | Out_of_memory -> Doc.text "Out of memory during evaluation." + | Stack_overflow -> + Doc.text "Stack overflow during evaluation (looping recursion?)." + | _ -> + Doc.group + (Doc.indent + (Doc.concat + [Doc.text "Exception:"; Doc.line; printOutValueDoc outValue])) + +let printOutPhraseSignature signature = + let rec loop signature acc = + match signature with + | [] -> List.rev acc + | (Outcometree.Osig_typext (ext, Oext_first), None) :: signature -> + (* Gather together extension constructors *) + let rec gather_extensions acc items = + match items with + | (Outcometree.Osig_typext (ext, Oext_next), None) :: items -> + gather_extensions + ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc) + items + | _ -> (List.rev acc, items) + in + let exts, signature = + gather_extensions + [(ext.oext_name, ext.oext_args, ext.oext_ret_type)] + signature + in + let te = + { + Outcometree.otyext_name = ext.oext_type_name; + otyext_params = ext.oext_type_params; + otyext_constructors = exts; + otyext_private = ext.oext_private; + } + in + let doc = printOutTypeExtensionDoc te in + loop signature (doc :: acc) + | (sigItem, optOutValue) :: signature -> + let doc = + match optOutValue with + | None -> printOutSigItemDoc sigItem + | Some outValue -> + Doc.group + (Doc.concat + [ + printOutSigItemDoc sigItem; + Doc.text " = "; + printOutValueDoc outValue; + ]) + in + loop signature (doc :: acc) + in + Doc.breakableGroup ~forceBreak:true + (Doc.join ~sep:Doc.line (loop signature [])) + +let printOutPhraseDoc (outPhrase : Outcometree.out_phrase) = + match outPhrase with + | Ophr_eval (outValue, outType) -> + Doc.group + (Doc.concat + [ + Doc.text "- : "; + printOutTypeDoc outType; + Doc.text " ="; + Doc.indent (Doc.concat [Doc.line; printOutValueDoc outValue]); + ]) + | Ophr_signature [] -> Doc.nil + | Ophr_signature signature -> printOutPhraseSignature signature + | Ophr_exception (exc, outValue) -> printOutExceptionDoc exc outValue + +let printOutPhrase fmt outPhrase = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutPhraseDoc outPhrase)) + +let printOutModuleType fmt outModuleType = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutModuleTypeDoc outModuleType)) + +let printOutTypeExtension fmt typeExtension = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutTypeExtensionDoc typeExtension)) + +let printOutValue fmt outValue = + Format.pp_print_string fmt + (Doc.toString ~width:80 (printOutValueDoc outValue)) + +(* Not supported in ReScript *) +(* Oprint.out_class_type *) +let setup = + lazy + (Oprint.out_value := printOutValue; + Oprint.out_type := printOutType; + Oprint.out_module_type := printOutModuleType; + Oprint.out_sig_item := printOutSigItem; + Oprint.out_signature := printOutSignature; + Oprint.out_type_extension := printOutTypeExtension; + Oprint.out_phrase := printOutPhrase) diff --git a/res_syntax/src/res_outcome_printer.mli b/res_syntax/src/res_outcome_printer.mli new file mode 100644 index 0000000000..d3ee60aa43 --- /dev/null +++ b/res_syntax/src/res_outcome_printer.mli @@ -0,0 +1,18 @@ +(* For the curious: the outcome printer is a printer to print data + * from the outcometree.mli file in the ocaml compiler. + * The outcome tree is used by: + * - ocaml's toplevel/repl, print results/errors + * - super errors, print nice errors + * - editor tooling, e.g. show type on hover + * + * In general it represent messages to show results or errors to the user. *) + +val parenthesized_ident : string -> bool [@@live] + +val setup : unit lazy_t [@@live] + +(* Needed for e.g. the playground to print typedtree data *) +val printOutTypeDoc : Outcometree.out_type -> Res_doc.t [@@live] +val printOutSigItemDoc : + ?printNameAsIs:bool -> Outcometree.out_sig_item -> Res_doc.t + [@@live] diff --git a/res_syntax/src/res_parens.ml b/res_syntax/src/res_parens.ml new file mode 100644 index 0000000000..ad034b59bb --- /dev/null +++ b/res_syntax/src/res_parens.ml @@ -0,0 +1,451 @@ +module ParsetreeViewer = Res_parsetree_viewer +type kind = Parenthesized | Braced of Location.t | Nothing + +let expr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | _ -> ( + match expr with + | { + Parsetree.pexp_desc = + Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + } -> + Nothing + | {pexp_desc = Pexp_constraint _} -> Parenthesized + | _ -> Nothing) + +let exprRecordRowRhs e = + let kind = expr e in + match kind with + | Nothing when Res_parsetree_viewer.hasOptionalAttribute e.pexp_attributes + -> ( + match e.pexp_desc with + | Pexp_ifthenelse _ | Pexp_fun _ -> Parenthesized + | _ -> kind) + | _ -> kind + +let callExpr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | _ -> ( + match expr with + | {Parsetree.pexp_attributes = attrs} + when match ParsetreeViewer.filterParsingAttrs attrs with + | _ :: _ -> true + | [] -> false -> + Parenthesized + | _ + when ParsetreeViewer.isUnaryExpression expr + || ParsetreeViewer.isBinaryExpression expr -> + Parenthesized + | { + Parsetree.pexp_desc = + Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + } -> + Nothing + | {pexp_desc = Pexp_fun _} when ParsetreeViewer.isUnderscoreApplySugar expr + -> + Nothing + | { + pexp_desc = + ( Pexp_lazy _ | Pexp_assert _ | Pexp_fun _ | Pexp_newtype _ + | Pexp_function _ | Pexp_constraint _ | Pexp_setfield _ | Pexp_match _ + | Pexp_try _ | Pexp_while _ | Pexp_for _ | Pexp_ifthenelse _ ); + } -> + Parenthesized + | _ when ParsetreeViewer.hasAwaitAttribute expr.pexp_attributes -> + Parenthesized + | _ -> Nothing) + +let structureExpr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | None -> ( + match expr with + | _ + when ParsetreeViewer.hasAttributes expr.pexp_attributes + && not (ParsetreeViewer.isJsxExpression expr) -> + Parenthesized + | { + Parsetree.pexp_desc = + Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + } -> + Nothing + | {pexp_desc = Pexp_constraint _} -> Parenthesized + | _ -> Nothing) + +let unaryExprOperand expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | None -> ( + match expr with + | {Parsetree.pexp_attributes = attrs} + when match ParsetreeViewer.filterParsingAttrs attrs with + | _ :: _ -> true + | [] -> false -> + Parenthesized + | expr + when ParsetreeViewer.isUnaryExpression expr + || ParsetreeViewer.isBinaryExpression expr -> + Parenthesized + | { + pexp_desc = + Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + } -> + Nothing + | {pexp_desc = Pexp_fun _} when ParsetreeViewer.isUnderscoreApplySugar expr + -> + Nothing + | { + pexp_desc = + ( Pexp_lazy _ | Pexp_assert _ | Pexp_fun _ | Pexp_newtype _ + | Pexp_function _ | Pexp_constraint _ | Pexp_setfield _ + | Pexp_extension _ (* readability? maybe remove *) | Pexp_match _ + | Pexp_try _ | Pexp_while _ | Pexp_for _ | Pexp_ifthenelse _ ); + } -> + Parenthesized + | _ when ParsetreeViewer.hasAwaitAttribute expr.pexp_attributes -> + Parenthesized + | _ -> Nothing) + +let binaryExprOperand ~isLhs expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | None -> ( + match expr with + | { + Parsetree.pexp_desc = + Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + } -> + Nothing + | {pexp_desc = Pexp_fun _} when ParsetreeViewer.isUnderscoreApplySugar expr + -> + Nothing + | { + pexp_desc = + Pexp_constraint _ | Pexp_fun _ | Pexp_function _ | Pexp_newtype _; + } -> + Parenthesized + | expr when ParsetreeViewer.isBinaryExpression expr -> Parenthesized + | expr when ParsetreeViewer.isTernaryExpr expr -> Parenthesized + | {pexp_desc = Pexp_lazy _ | Pexp_assert _} when isLhs -> Parenthesized + | _ when ParsetreeViewer.hasAwaitAttribute expr.pexp_attributes -> + Parenthesized + | {Parsetree.pexp_attributes = attrs} -> + if ParsetreeViewer.hasPrintableAttributes attrs then Parenthesized + else Nothing) + +let subBinaryExprOperand parentOperator childOperator = + let precParent = ParsetreeViewer.operatorPrecedence parentOperator in + let precChild = ParsetreeViewer.operatorPrecedence childOperator in + precParent > precChild + || precParent == precChild + && not (ParsetreeViewer.flattenableOperators parentOperator childOperator) + || (* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… *) + (parentOperator = "||" && childOperator = "&&") + +let rhsBinaryExprOperand parentOperator rhs = + match rhs.Parsetree.pexp_desc with + | Parsetree.Pexp_apply + ( { + pexp_attributes = []; + pexp_desc = + Pexp_ident {txt = Longident.Lident operator; loc = operatorLoc}; + }, + [(_, _left); (_, _right)] ) + when ParsetreeViewer.isBinaryOperator operator + && not (operatorLoc.loc_ghost && operator = "^") -> + let precParent = ParsetreeViewer.operatorPrecedence parentOperator in + let precChild = ParsetreeViewer.operatorPrecedence operator in + precParent == precChild + | _ -> false + +let flattenOperandRhs parentOperator rhs = + match rhs.Parsetree.pexp_desc with + | Parsetree.Pexp_apply + ( { + pexp_desc = + Pexp_ident {txt = Longident.Lident operator; loc = operatorLoc}; + }, + [(_, _left); (_, _right)] ) + when ParsetreeViewer.isBinaryOperator operator + && not (operatorLoc.loc_ghost && operator = "^") -> + let precParent = ParsetreeViewer.operatorPrecedence parentOperator in + let precChild = ParsetreeViewer.operatorPrecedence operator in + precParent >= precChild || rhs.pexp_attributes <> [] + | Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}) -> + false + | Pexp_fun _ when ParsetreeViewer.isUnderscoreApplySugar rhs -> false + | Pexp_fun _ | Pexp_newtype _ | Pexp_setfield _ | Pexp_constraint _ -> true + | _ when ParsetreeViewer.isTernaryExpr rhs -> true + | _ -> false + +let binaryOperatorInsideAwaitNeedsParens operator = + ParsetreeViewer.operatorPrecedence operator + < ParsetreeViewer.operatorPrecedence "|." + +let lazyOrAssertOrAwaitExprRhs ?(inAwait = false) expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | None -> ( + match expr with + | {Parsetree.pexp_attributes = attrs} + when match ParsetreeViewer.filterParsingAttrs attrs with + | _ :: _ -> true + | [] -> false -> + Parenthesized + | { + pexp_desc = + Pexp_apply ({pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, _); + } + when ParsetreeViewer.isBinaryExpression expr -> + if inAwait && not (binaryOperatorInsideAwaitNeedsParens operator) then + Nothing + else Parenthesized + | { + pexp_desc = + Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + } -> + Nothing + | {pexp_desc = Pexp_fun _} when ParsetreeViewer.isUnderscoreApplySugar expr + -> + Nothing + | { + pexp_desc = + ( Pexp_lazy _ | Pexp_assert _ | Pexp_fun _ | Pexp_newtype _ + | Pexp_function _ | Pexp_constraint _ | Pexp_setfield _ | Pexp_match _ + | Pexp_try _ | Pexp_while _ | Pexp_for _ | Pexp_ifthenelse _ ); + } -> + Parenthesized + | _ + when (not inAwait) + && ParsetreeViewer.hasAwaitAttribute expr.pexp_attributes -> + Parenthesized + | _ -> Nothing) + +let isNegativeConstant constant = + let isNeg txt = + let len = String.length txt in + len > 0 && (String.get [@doesNotRaise]) txt 0 = '-' + in + match constant with + | (Parsetree.Pconst_integer (i, _) | Pconst_float (i, _)) when isNeg i -> true + | _ -> false + +let fieldExpr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | None -> ( + match expr with + | {Parsetree.pexp_attributes = attrs} + when match ParsetreeViewer.filterParsingAttrs attrs with + | _ :: _ -> true + | [] -> false -> + Parenthesized + | expr + when ParsetreeViewer.isBinaryExpression expr + || ParsetreeViewer.isUnaryExpression expr -> + Parenthesized + | { + pexp_desc = + Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + } -> + Nothing + | {pexp_desc = Pexp_constant c} when isNegativeConstant c -> Parenthesized + | {pexp_desc = Pexp_fun _} when ParsetreeViewer.isUnderscoreApplySugar expr + -> + Nothing + | { + pexp_desc = + ( Pexp_lazy _ | Pexp_assert _ + | Pexp_extension _ (* %extension.x vs (%extension).x *) | Pexp_fun _ + | Pexp_newtype _ | Pexp_function _ | Pexp_constraint _ | Pexp_setfield _ + | Pexp_match _ | Pexp_try _ | Pexp_while _ | Pexp_for _ + | Pexp_ifthenelse _ ); + } -> + Parenthesized + | _ when ParsetreeViewer.hasAwaitAttribute expr.pexp_attributes -> + Parenthesized + | _ -> Nothing) + +let setFieldExprRhs expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | None -> ( + match expr with + | { + Parsetree.pexp_desc = + Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + } -> + Nothing + | {pexp_desc = Pexp_constraint _} -> Parenthesized + | _ -> Nothing) + +let ternaryOperand expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | None -> ( + match expr with + | { + Parsetree.pexp_desc = + Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + } -> + Nothing + | {pexp_desc = Pexp_constraint _} -> Parenthesized + | {pexp_desc = Pexp_fun _ | Pexp_newtype _} -> ( + let _attrsOnArrow, _parameters, returnExpr = + ParsetreeViewer.funExpr expr + in + match returnExpr.pexp_desc with + | Pexp_constraint _ -> Parenthesized + | _ -> Nothing) + | _ -> Nothing) + +let startsWithMinus txt = + let len = String.length txt in + if len == 0 then false + else + let s = (String.get [@doesNotRaise]) txt 0 in + s = '-' + +let jsxPropExpr expr = + match expr.Parsetree.pexp_desc with + | Parsetree.Pexp_let _ | Pexp_sequence _ | Pexp_letexception _ + | Pexp_letmodule _ | Pexp_open _ -> + Nothing + | _ -> ( + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | None -> ( + match expr with + | { + Parsetree.pexp_desc = + Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _)); + pexp_attributes = []; + } + when startsWithMinus x -> + Parenthesized + | _ when ParsetreeViewer.hasAwaitAttribute expr.pexp_attributes -> + Parenthesized + | { + Parsetree.pexp_desc = + ( Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ + | Pexp_variant _ | Pexp_array _ | Pexp_pack _ | Pexp_record _ + | Pexp_extension _ | Pexp_letmodule _ | Pexp_letexception _ + | Pexp_open _ | Pexp_sequence _ | Pexp_let _ | Pexp_tuple _ ); + pexp_attributes = []; + } -> + Nothing + | { + Parsetree.pexp_desc = + Pexp_constraint + ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + pexp_attributes = []; + } -> + Nothing + | _ -> Parenthesized)) + +let jsxChildExpr expr = + match expr.Parsetree.pexp_desc with + | Parsetree.Pexp_let _ | Pexp_sequence _ | Pexp_letexception _ + | Pexp_letmodule _ | Pexp_open _ -> + Nothing + | _ -> ( + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | _ -> ( + match expr with + | { + Parsetree.pexp_desc = + Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _)); + pexp_attributes = []; + } + when startsWithMinus x -> + Parenthesized + | _ when ParsetreeViewer.hasAwaitAttribute expr.pexp_attributes -> + Parenthesized + | { + Parsetree.pexp_desc = + ( Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ + | Pexp_variant _ | Pexp_array _ | Pexp_pack _ | Pexp_record _ + | Pexp_extension _ | Pexp_letmodule _ | Pexp_letexception _ + | Pexp_open _ | Pexp_sequence _ | Pexp_let _ ); + pexp_attributes = []; + } -> + Nothing + | { + Parsetree.pexp_desc = + Pexp_constraint + ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}); + pexp_attributes = []; + } -> + Nothing + | expr when ParsetreeViewer.isJsxExpression expr -> Nothing + | _ -> Parenthesized)) + +let binaryExpr expr = + let optBraces, _ = ParsetreeViewer.processBracesAttr expr in + match optBraces with + | Some ({Location.loc = bracesLoc}, _) -> Braced bracesLoc + | None -> ( + match expr with + | {Parsetree.pexp_attributes = _ :: _} as expr + when ParsetreeViewer.isBinaryExpression expr -> + Parenthesized + | _ -> Nothing) + +let modTypeFunctorReturn modType = + match modType with + | {Parsetree.pmty_desc = Pmty_with _} -> true + | _ -> false + +(* Add parens for readability: + module type Functor = SetLike => Set with type t = A.t + This is actually: + module type Functor = (SetLike => Set) with type t = A.t +*) +let modTypeWithOperand modType = + match modType with + | {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true + | _ -> false + +let modExprFunctorConstraint modType = + match modType with + | {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true + | _ -> false + +let bracedExpr expr = + match expr.Parsetree.pexp_desc with + | Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}) -> + false + | Pexp_constraint _ -> true + | _ -> false + +let includeModExpr modExpr = + match modExpr.Parsetree.pmod_desc with + | Parsetree.Pmod_constraint _ -> true + | _ -> false + +let arrowReturnTypExpr typExpr = + match typExpr.Parsetree.ptyp_desc with + | Parsetree.Ptyp_arrow _ -> true + | _ -> false + +let patternRecordRowRhs (pattern : Parsetree.pattern) = + match pattern.ppat_desc with + | Ppat_constraint ({ppat_desc = Ppat_unpack _}, {ptyp_desc = Ptyp_package _}) + -> + false + | Ppat_constraint _ -> true + | _ -> false diff --git a/res_syntax/src/res_parens.mli b/res_syntax/src/res_parens.mli new file mode 100644 index 0000000000..9b60b815f1 --- /dev/null +++ b/res_syntax/src/res_parens.mli @@ -0,0 +1,39 @@ +type kind = Parenthesized | Braced of Location.t | Nothing + +val expr : Parsetree.expression -> kind +val structureExpr : Parsetree.expression -> kind + +val unaryExprOperand : Parsetree.expression -> kind + +val binaryExprOperand : isLhs:bool -> Parsetree.expression -> kind +val subBinaryExprOperand : string -> string -> bool +val rhsBinaryExprOperand : string -> Parsetree.expression -> bool +val flattenOperandRhs : string -> Parsetree.expression -> bool + +val binaryOperatorInsideAwaitNeedsParens : string -> bool +val lazyOrAssertOrAwaitExprRhs : ?inAwait:bool -> Parsetree.expression -> kind + +val fieldExpr : Parsetree.expression -> kind + +val setFieldExprRhs : Parsetree.expression -> kind + +val ternaryOperand : Parsetree.expression -> kind + +val jsxPropExpr : Parsetree.expression -> kind +val jsxChildExpr : Parsetree.expression -> kind + +val binaryExpr : Parsetree.expression -> kind +val modTypeFunctorReturn : Parsetree.module_type -> bool +val modTypeWithOperand : Parsetree.module_type -> bool +val modExprFunctorConstraint : Parsetree.module_type -> bool + +val bracedExpr : Parsetree.expression -> bool +val callExpr : Parsetree.expression -> kind + +val includeModExpr : Parsetree.module_expr -> bool + +val arrowReturnTypExpr : Parsetree.core_type -> bool + +val patternRecordRowRhs : Parsetree.pattern -> bool + +val exprRecordRowRhs : Parsetree.expression -> kind diff --git a/res_syntax/src/res_parser.ml b/res_syntax/src/res_parser.ml new file mode 100644 index 0000000000..9fcdc3c5c4 --- /dev/null +++ b/res_syntax/src/res_parser.ml @@ -0,0 +1,189 @@ +module Scanner = Res_scanner +module Diagnostics = Res_diagnostics +module Token = Res_token +module Grammar = Res_grammar +module Reporting = Res_reporting + +module Comment = Res_comment + +type mode = ParseForTypeChecker | Default + +type regionStatus = Report | Silent + +type t = { + mode: mode; + mutable scanner: Scanner.t; + mutable token: Token.t; + mutable startPos: Lexing.position; + mutable endPos: Lexing.position; + mutable prevEndPos: Lexing.position; + mutable breadcrumbs: (Grammar.t * Lexing.position) list; + mutable errors: Reporting.parseError list; + mutable diagnostics: Diagnostics.t list; + mutable comments: Comment.t list; + mutable regions: regionStatus ref list; +} + +let err ?startPos ?endPos p error = + match p.regions with + | ({contents = Report} as region) :: _ -> + let d = + Diagnostics.make + ~startPos: + (match startPos with + | Some pos -> pos + | None -> p.startPos) + ~endPos: + (match endPos with + | Some pos -> pos + | None -> p.endPos) + error + in + p.diagnostics <- d :: p.diagnostics; + region := Silent + | _ -> () + +let beginRegion p = p.regions <- ref Report :: p.regions +let endRegion p = + match p.regions with + | [] -> () + | _ :: rest -> p.regions <- rest + +let docCommentToAttributeToken comment = + let txt = Comment.txt comment in + let loc = Comment.loc comment in + Token.DocComment (loc, txt) + +let moduleCommentToAttributeToken comment = + let txt = Comment.txt comment in + let loc = Comment.loc comment in + Token.ModuleComment (loc, txt) + +(* Advance to the next non-comment token and store any encountered comment + * in the parser's state. Every comment contains the end position of its + * previous token to facilite comment interleaving *) +let rec next ?prevEndPos p = + if p.token = Eof then assert false; + let prevEndPos = + match prevEndPos with + | Some pos -> pos + | None -> p.endPos + in + let startPos, endPos, token = Scanner.scan p.scanner in + match token with + | Comment c -> + if Comment.isDocComment c then ( + p.token <- docCommentToAttributeToken c; + p.prevEndPos <- prevEndPos; + p.startPos <- startPos; + p.endPos <- endPos) + else if Comment.isModuleComment c then ( + p.token <- moduleCommentToAttributeToken c; + p.prevEndPos <- prevEndPos; + p.startPos <- startPos; + p.endPos <- endPos) + else ( + Comment.setPrevTokEndPos c p.endPos; + p.comments <- c :: p.comments; + p.prevEndPos <- p.endPos; + p.endPos <- endPos; + next ~prevEndPos p) + | _ -> + p.token <- token; + p.prevEndPos <- prevEndPos; + p.startPos <- startPos; + p.endPos <- endPos + +let nextUnsafe p = if p.token <> Eof then next p + +let nextTemplateLiteralToken p = + let startPos, endPos, token = Scanner.scanTemplateLiteralToken p.scanner in + p.token <- token; + p.prevEndPos <- p.endPos; + p.startPos <- startPos; + p.endPos <- endPos + +let checkProgress ~prevEndPos ~result p = + if p.endPos == prevEndPos then None else Some result + +let make ?(mode = ParseForTypeChecker) src filename = + let scanner = Scanner.make ~filename src in + let parserState = + { + mode; + scanner; + token = Token.Semicolon; + startPos = Lexing.dummy_pos; + prevEndPos = Lexing.dummy_pos; + endPos = Lexing.dummy_pos; + breadcrumbs = []; + errors = []; + diagnostics = []; + comments = []; + regions = [ref Report]; + } + in + parserState.scanner.err <- + (fun ~startPos ~endPos error -> + let diagnostic = Diagnostics.make ~startPos ~endPos error in + parserState.diagnostics <- diagnostic :: parserState.diagnostics); + next parserState; + parserState + +let leaveBreadcrumb p circumstance = + let crumb = (circumstance, p.startPos) in + p.breadcrumbs <- crumb :: p.breadcrumbs + +let eatBreadcrumb p = + match p.breadcrumbs with + | [] -> () + | _ :: crumbs -> p.breadcrumbs <- crumbs + +let optional p token = + if p.token = token then + let () = next p in + true + else false + +let expect ?grammar token p = + if p.token = token then next p + else + let error = Diagnostics.expected ?grammar p.prevEndPos token in + err ~startPos:p.prevEndPos p error + +(* Don't use immutable copies here, it trashes certain heuristics + * in the ocaml compiler, resulting in massive slowdowns of the parser *) +let lookahead p callback = + let err = p.scanner.err in + let ch = p.scanner.ch in + let offset = p.scanner.offset in + let lineOffset = p.scanner.lineOffset in + let lnum = p.scanner.lnum in + let mode = p.scanner.mode in + let token = p.token in + let startPos = p.startPos in + let endPos = p.endPos in + let prevEndPos = p.prevEndPos in + let breadcrumbs = p.breadcrumbs in + let errors = p.errors in + let diagnostics = p.diagnostics in + let comments = p.comments in + + let res = callback p in + + p.scanner.err <- err; + p.scanner.ch <- ch; + p.scanner.offset <- offset; + p.scanner.lineOffset <- lineOffset; + p.scanner.lnum <- lnum; + p.scanner.mode <- mode; + p.token <- token; + p.startPos <- startPos; + p.endPos <- endPos; + p.prevEndPos <- prevEndPos; + p.breadcrumbs <- breadcrumbs; + p.errors <- errors; + p.diagnostics <- diagnostics; + p.comments <- comments; + + res diff --git a/res_syntax/src/res_parser.mli b/res_syntax/src/res_parser.mli new file mode 100644 index 0000000000..09b0b455f7 --- /dev/null +++ b/res_syntax/src/res_parser.mli @@ -0,0 +1,47 @@ +module Scanner = Res_scanner +module Token = Res_token +module Grammar = Res_grammar +module Reporting = Res_reporting +module Diagnostics = Res_diagnostics +module Comment = Res_comment + +type mode = ParseForTypeChecker | Default + +type regionStatus = Report | Silent + +type t = { + mode: mode; + mutable scanner: Scanner.t; + mutable token: Token.t; + mutable startPos: Lexing.position; + mutable endPos: Lexing.position; + mutable prevEndPos: Lexing.position; + mutable breadcrumbs: (Grammar.t * Lexing.position) list; + mutable errors: Reporting.parseError list; + mutable diagnostics: Diagnostics.t list; + mutable comments: Comment.t list; + mutable regions: regionStatus ref list; +} + +val make : ?mode:mode -> string -> string -> t + +val expect : ?grammar:Grammar.t -> Token.t -> t -> unit +val optional : t -> Token.t -> bool +val next : ?prevEndPos:Lexing.position -> t -> unit +val nextUnsafe : t -> unit (* Does not assert on Eof, makes no progress *) +val nextTemplateLiteralToken : t -> unit +val lookahead : t -> (t -> 'a) -> 'a +val err : + ?startPos:Lexing.position -> + ?endPos:Lexing.position -> + t -> + Diagnostics.category -> + unit + +val leaveBreadcrumb : t -> Grammar.t -> unit +val eatBreadcrumb : t -> unit + +val beginRegion : t -> unit +val endRegion : t -> unit + +val checkProgress : prevEndPos:Lexing.position -> result:'a -> t -> 'a option diff --git a/res_syntax/src/res_parsetree_viewer.ml b/res_syntax/src/res_parsetree_viewer.ml new file mode 100644 index 0000000000..7ab2a37303 --- /dev/null +++ b/res_syntax/src/res_parsetree_viewer.ml @@ -0,0 +1,691 @@ +open Parsetree + +let arrowType ct = + let rec process attrsBefore acc typ = + match typ with + | { + ptyp_desc = Ptyp_arrow ((Nolabel as lbl), typ1, typ2); + ptyp_attributes = []; + } -> + let arg = ([], lbl, typ1) in + process attrsBefore (arg :: acc) typ2 + | { + ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); + ptyp_attributes = [({txt = "bs"}, _)]; + } -> + (* stop here, the uncurried attribute always indicates the beginning of an arrow function + * e.g. `(. int) => (. int)` instead of `(. int, . int)` *) + (attrsBefore, List.rev acc, typ) + | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = _attrs} + as returnType -> + let args = List.rev acc in + (attrsBefore, args, returnType) + | { + ptyp_desc = Ptyp_arrow (((Labelled _ | Optional _) as lbl), typ1, typ2); + ptyp_attributes = attrs; + } -> + let arg = (attrs, lbl, typ1) in + process attrsBefore (arg :: acc) typ2 + | typ -> (attrsBefore, List.rev acc, typ) + in + match ct with + | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = attrs} as + typ -> + process attrs [] {typ with ptyp_attributes = []} + | typ -> process [] [] typ + +let functorType modtype = + let rec process acc modtype = + match modtype with + | { + pmty_desc = Pmty_functor (lbl, argType, returnType); + pmty_attributes = attrs; + } -> + let arg = (attrs, lbl, argType) in + process (arg :: acc) returnType + | modType -> (List.rev acc, modType) + in + process [] modtype + +let processUncurriedAttribute attrs = + let rec process uncurriedSpotted acc attrs = + match attrs with + | [] -> (uncurriedSpotted, List.rev acc) + | ({Location.txt = "bs"}, _) :: rest -> process true acc rest + | attr :: rest -> process uncurriedSpotted (attr :: acc) rest + in + process false [] attrs + +type functionAttributesInfo = { + async: bool; + uncurried: bool; + attributes: Parsetree.attributes; +} + +let processFunctionAttributes attrs = + let rec process async uncurried acc attrs = + match attrs with + | [] -> {async; uncurried; attributes = List.rev acc} + | ({Location.txt = "bs"}, _) :: rest -> process async true acc rest + | ({Location.txt = "res.async"}, _) :: rest -> + process true uncurried acc rest + | attr :: rest -> process async uncurried (attr :: acc) rest + in + process false false [] attrs + +let hasAwaitAttribute attrs = + List.exists + (function + | {Location.txt = "res.await"}, _ -> true + | _ -> false) + attrs + +let collectListExpressions expr = + let rec collect acc expr = + match expr.pexp_desc with + | Pexp_construct ({txt = Longident.Lident "[]"}, _) -> (List.rev acc, None) + | Pexp_construct + ( {txt = Longident.Lident "::"}, + Some {pexp_desc = Pexp_tuple (hd :: [tail])} ) -> + collect (hd :: acc) tail + | _ -> (List.rev acc, Some expr) + in + collect [] expr + +(* (__x) => f(a, __x, c) -----> f(a, _, c) *) +let rewriteUnderscoreApply expr = + match expr.pexp_desc with + | Pexp_fun + ( Nolabel, + None, + {ppat_desc = Ppat_var {txt = "__x"}}, + ({pexp_desc = Pexp_apply (callExpr, args)} as e) ) -> + let newArgs = + List.map + (fun arg -> + match arg with + | ( lbl, + ({pexp_desc = Pexp_ident ({txt = Longident.Lident "__x"} as lid)} + as argExpr) ) -> + ( lbl, + { + argExpr with + pexp_desc = Pexp_ident {lid with txt = Longident.Lident "_"}; + } ) + | arg -> arg) + args + in + {e with pexp_desc = Pexp_apply (callExpr, newArgs)} + | _ -> expr + +type funParamKind = + | Parameter of { + attrs: Parsetree.attributes; + lbl: Asttypes.arg_label; + defaultExpr: Parsetree.expression option; + pat: Parsetree.pattern; + } + | NewTypes of {attrs: Parsetree.attributes; locs: string Asttypes.loc list} + +let funExpr expr = + (* Turns (type t, type u, type z) into "type t u z" *) + let rec collectNewTypes acc returnExpr = + match returnExpr with + | {pexp_desc = Pexp_newtype (stringLoc, returnExpr); pexp_attributes = []} + -> + collectNewTypes (stringLoc :: acc) returnExpr + | returnExpr -> (List.rev acc, returnExpr) + in + let rec collect n attrsBefore acc expr = + match expr with + | { + pexp_desc = + Pexp_fun + ( Nolabel, + None, + {ppat_desc = Ppat_var {txt = "__x"}}, + {pexp_desc = Pexp_apply _} ); + } -> + (attrsBefore, List.rev acc, rewriteUnderscoreApply expr) + | { + pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); + pexp_attributes = []; + } -> + let parameter = Parameter {attrs = []; lbl; defaultExpr; pat = pattern} in + collect (n + 1) attrsBefore (parameter :: acc) returnExpr + | {pexp_desc = Pexp_newtype (stringLoc, rest); pexp_attributes = attrs} -> + let stringLocs, returnExpr = collectNewTypes [stringLoc] rest in + let param = NewTypes {attrs; locs = stringLocs} in + collect (n + 1) attrsBefore (param :: acc) returnExpr + | {pexp_desc = Pexp_fun _; pexp_attributes} + when pexp_attributes + |> List.exists (fun ({Location.txt}, _) -> + txt = "bs" || txt = "res.async") + && n > 0 -> + (* stop here, the uncurried or async attribute always indicates the beginning of an arrow function + * e.g. `(. a) => (. b)` instead of `(. a, . b)` *) + (attrsBefore, List.rev acc, expr) + | { + pexp_desc = + Pexp_fun + (((Labelled _ | Optional _) as lbl), defaultExpr, pattern, returnExpr); + pexp_attributes = attrs; + } -> + (* Normally attributes are attached to the labelled argument, e.g. (@foo ~x) => ... + In the case of `@res.async`, pass the attribute to the outside *) + let attrs_async, attrs_other = + attrs |> List.partition (fun ({Location.txt}, _) -> txt = "res.async") + in + let parameter = + Parameter {attrs = attrs_other; lbl; defaultExpr; pat = pattern} + in + collect (n + 1) (attrs_async @ attrsBefore) (parameter :: acc) returnExpr + | expr -> (attrsBefore, List.rev acc, expr) + in + match expr with + | { + pexp_desc = Pexp_fun (Nolabel, _defaultExpr, _pattern, _returnExpr); + pexp_attributes = attrs; + } as expr -> + collect 0 attrs [] {expr with pexp_attributes = []} + | expr -> collect 0 [] [] expr + +let processBracesAttr expr = + match expr.pexp_attributes with + | (({txt = "ns.braces"}, _) as attr) :: attrs -> + (Some attr, {expr with pexp_attributes = attrs}) + | _ -> (None, expr) + +let filterParsingAttrs attrs = + List.filter + (fun attr -> + match attr with + | ( { + Location.txt = + ( "bs" | "ns.braces" | "ns.iflet" | "ns.namedArgLoc" + | "ns.optional" | "ns.ternary" | "res.async" | "res.await" + | "res.template" ); + }, + _ ) -> + false + | _ -> true) + attrs + +let isBlockExpr expr = + match expr.pexp_desc with + | Pexp_letmodule _ | Pexp_letexception _ | Pexp_let _ | Pexp_open _ + | Pexp_sequence _ -> + true + | _ -> false + +let isBracedExpr expr = + match processBracesAttr expr with + | Some _, _ -> true + | _ -> false + +let isMultilineText txt = + let len = String.length txt in + let rec check i = + if i >= len then false + else + let c = String.unsafe_get txt i in + match c with + | '\010' | '\013' -> true + | '\\' -> if i + 2 = len then false else check (i + 2) + | _ -> check (i + 1) + in + check 0 + +let isHuggableExpression expr = + match expr.pexp_desc with + | Pexp_array _ | Pexp_tuple _ + | Pexp_constant (Pconst_string (_, Some _)) + | Pexp_construct ({txt = Longident.Lident ("::" | "[]")}, _) + | Pexp_extension ({txt = "bs.obj" | "obj"}, _) + | Pexp_record _ -> + true + | _ when isBlockExpr expr -> true + | _ when isBracedExpr expr -> true + | Pexp_constant (Pconst_string (txt, None)) when isMultilineText txt -> true + | _ -> false + +let isHuggableRhs expr = + match expr.pexp_desc with + | Pexp_array _ | Pexp_tuple _ + | Pexp_extension ({txt = "bs.obj" | "obj"}, _) + | Pexp_record _ -> + true + | _ when isBracedExpr expr -> true + | _ -> false + +let isHuggablePattern pattern = + match pattern.ppat_desc with + | Ppat_array _ | Ppat_tuple _ | Ppat_record _ | Ppat_variant _ + | Ppat_construct _ -> + true + | _ -> false + +let operatorPrecedence operator = + match operator with + | ":=" -> 1 + | "||" -> 2 + | "&&" -> 3 + | "=" | "==" | "<" | ">" | "!=" | "<>" | "!==" | "<=" | ">=" | "|>" -> 4 + | "+" | "+." | "-" | "-." | "^" -> 5 + | "*" | "*." | "/" | "/." -> 6 + | "**" -> 7 + | "#" | "##" | "|." -> 8 + | _ -> 0 + +let isUnaryOperator operator = + match operator with + | "~+" | "~+." | "~-" | "~-." | "not" -> true + | _ -> false + +let isUnaryExpression expr = + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [(Nolabel, _arg)] ) + when isUnaryOperator operator -> + true + | _ -> false + +(* TODO: tweak this to check for ghost ^ as template literal *) +let isBinaryOperator operator = + match operator with + | ":=" | "||" | "&&" | "=" | "==" | "<" | ">" | "!=" | "!==" | "<=" | ">=" + | "|>" | "+" | "+." | "-" | "-." | "^" | "*" | "*." | "/" | "/." | "**" | "|." + | "<>" -> + true + | _ -> false + +let isBinaryExpression expr = + match expr.pexp_desc with + | Pexp_apply + ( { + pexp_desc = + Pexp_ident {txt = Longident.Lident operator; loc = operatorLoc}; + }, + [(Nolabel, _operand1); (Nolabel, _operand2)] ) + when isBinaryOperator operator + && not (operatorLoc.loc_ghost && operator = "^") + (* template literal *) -> + true + | _ -> false + +let isEqualityOperator operator = + match operator with + | "=" | "==" | "<>" | "!=" -> true + | _ -> false + +let flattenableOperators parentOperator childOperator = + let precParent = operatorPrecedence parentOperator in + let precChild = operatorPrecedence childOperator in + if precParent == precChild then + not (isEqualityOperator parentOperator && isEqualityOperator childOperator) + else false + +let rec hasIfLetAttribute attrs = + match attrs with + | [] -> false + | ({Location.txt = "ns.iflet"}, _) :: _ -> true + | _ :: attrs -> hasIfLetAttribute attrs + +let isIfLetExpr expr = + match expr with + | {pexp_attributes = attrs; pexp_desc = Pexp_match _} + when hasIfLetAttribute attrs -> + true + | _ -> false + +let rec hasOptionalAttribute attrs = + match attrs with + | [] -> false + | ({Location.txt = "ns.optional"}, _) :: _ -> true + | _ :: attrs -> hasOptionalAttribute attrs + +let hasAttributes attrs = + List.exists + (fun attr -> + match attr with + | ( { + Location.txt = + ( "bs" | "ns.braces" | "ns.iflet" | "ns.ternary" | "res.async" + | "res.await" | "res.template" ); + }, + _ ) -> + false + (* Remove the fragile pattern warning for iflet expressions *) + | ( {Location.txt = "warning"}, + PStr + [ + { + pstr_desc = + Pstr_eval + ({pexp_desc = Pexp_constant (Pconst_string ("-4", None))}, _); + }; + ] ) -> + not (hasIfLetAttribute attrs) + | _ -> true) + attrs + +let isArrayAccess expr = + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "get")}}, + [(Nolabel, _parentExpr); (Nolabel, _memberExpr)] ) -> + true + | _ -> false + +type ifConditionKind = + | If of Parsetree.expression + | IfLet of Parsetree.pattern * Parsetree.expression + +let collectIfExpressions expr = + let rec collect acc expr = + let exprLoc = expr.pexp_loc in + match expr.pexp_desc with + | Pexp_ifthenelse (ifExpr, thenExpr, Some elseExpr) -> + collect ((exprLoc, If ifExpr, thenExpr) :: acc) elseExpr + | Pexp_ifthenelse (ifExpr, thenExpr, (None as elseExpr)) -> + let ifs = List.rev ((exprLoc, If ifExpr, thenExpr) :: acc) in + (ifs, elseExpr) + | Pexp_match + ( condition, + [ + {pc_lhs = pattern; pc_guard = None; pc_rhs = thenExpr}; + { + pc_rhs = + {pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)}; + }; + ] ) + when isIfLetExpr expr -> + let ifs = + List.rev ((exprLoc, IfLet (pattern, condition), thenExpr) :: acc) + in + (ifs, None) + | Pexp_match + ( condition, + [ + {pc_lhs = pattern; pc_guard = None; pc_rhs = thenExpr}; + {pc_rhs = elseExpr}; + ] ) + when isIfLetExpr expr -> + collect ((exprLoc, IfLet (pattern, condition), thenExpr) :: acc) elseExpr + | _ -> (List.rev acc, Some expr) + in + collect [] expr + +let rec hasTernaryAttribute attrs = + match attrs with + | [] -> false + | ({Location.txt = "ns.ternary"}, _) :: _ -> true + | _ :: attrs -> hasTernaryAttribute attrs + +let isTernaryExpr expr = + match expr with + | {pexp_attributes = attrs; pexp_desc = Pexp_ifthenelse _} + when hasTernaryAttribute attrs -> + true + | _ -> false + +let collectTernaryParts expr = + let rec collect acc expr = + match expr with + | { + pexp_attributes = attrs; + pexp_desc = Pexp_ifthenelse (condition, consequent, Some alternate); + } + when hasTernaryAttribute attrs -> + collect ((condition, consequent) :: acc) alternate + | alternate -> (List.rev acc, alternate) + in + collect [] expr + +let parametersShouldHug parameters = + match parameters with + | [Parameter {attrs = []; lbl = Asttypes.Nolabel; defaultExpr = None; pat}] + when isHuggablePattern pat -> + true + | _ -> false + +let filterTernaryAttributes attrs = + List.filter + (fun attr -> + match attr with + | {Location.txt = "ns.ternary"}, _ -> false + | _ -> true) + attrs + +let filterFragileMatchAttributes attrs = + List.filter + (fun attr -> + match attr with + | ( {Location.txt = "warning"}, + PStr + [ + { + pstr_desc = + Pstr_eval + ({pexp_desc = Pexp_constant (Pconst_string ("-4", _))}, _); + }; + ] ) -> + false + | _ -> true) + attrs + +let isJsxExpression expr = + let rec loop attrs = + match attrs with + | [] -> false + | ({Location.txt = "JSX"}, _) :: _ -> true + | _ :: attrs -> loop attrs + in + match expr.pexp_desc with + | Pexp_apply _ -> loop expr.Parsetree.pexp_attributes + | _ -> false + +let hasJsxAttribute attributes = + let rec loop attrs = + match attrs with + | [] -> false + | ({Location.txt = "JSX"}, _) :: _ -> true + | _ :: attrs -> loop attrs + in + loop attributes + +let shouldIndentBinaryExpr expr = + let samePrecedenceSubExpression operator subExpression = + match subExpression with + | { + pexp_desc = + Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident subOperator}}, + [(Nolabel, _lhs); (Nolabel, _rhs)] ); + } + when isBinaryOperator subOperator -> + flattenableOperators operator subOperator + | _ -> true + in + match expr with + | { + pexp_desc = + Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [(Nolabel, lhs); (Nolabel, _rhs)] ); + } + when isBinaryOperator operator -> + isEqualityOperator operator + || (not (samePrecedenceSubExpression operator lhs)) + || operator = ":=" + | _ -> false + +let shouldInlineRhsBinaryExpr rhs = + match rhs.pexp_desc with + | Parsetree.Pexp_constant _ | Pexp_let _ | Pexp_letmodule _ + | Pexp_letexception _ | Pexp_sequence _ | Pexp_open _ | Pexp_ifthenelse _ + | Pexp_for _ | Pexp_while _ | Pexp_try _ | Pexp_array _ | Pexp_record _ -> + true + | _ -> false + +let isPrintableAttribute attr = + match attr with + | ( { + Location.txt = + ( "bs" | "ns.iflet" | "ns.braces" | "JSX" | "res.async" | "res.await" + | "res.template" | "ns.ternary" ); + }, + _ ) -> + false + | _ -> true + +let hasPrintableAttributes attrs = List.exists isPrintableAttribute attrs + +let filterPrintableAttributes attrs = List.filter isPrintableAttribute attrs + +let partitionPrintableAttributes attrs = + List.partition isPrintableAttribute attrs + +let requiresSpecialCallbackPrintingLastArg args = + let rec loop args = + match args with + | [] -> false + | [(_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})] -> true + | (_, {pexp_desc = Pexp_fun _ | Pexp_newtype _}) :: _ -> false + | _ :: rest -> loop rest + in + loop args + +let requiresSpecialCallbackPrintingFirstArg args = + let rec loop args = + match args with + | [] -> true + | (_, {pexp_desc = Pexp_fun _ | Pexp_newtype _}) :: _ -> false + | _ :: rest -> loop rest + in + match args with + | [(_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})] -> false + | (_, {pexp_desc = Pexp_fun _ | Pexp_newtype _}) :: rest -> loop rest + | _ -> false + +let modExprApply modExpr = + let rec loop acc modExpr = + match modExpr with + | {pmod_desc = Pmod_apply (next, arg)} -> loop (arg :: acc) next + | _ -> (acc, modExpr) + in + loop [] modExpr + +let modExprFunctor modExpr = + let rec loop acc modExpr = + match modExpr with + | { + pmod_desc = Pmod_functor (lbl, modType, returnModExpr); + pmod_attributes = attrs; + } -> + let param = (attrs, lbl, modType) in + loop (param :: acc) returnModExpr + | returnModExpr -> (List.rev acc, returnModExpr) + in + loop [] modExpr + +let rec collectPatternsFromListConstruct acc pattern = + let open Parsetree in + match pattern.ppat_desc with + | Ppat_construct + ({txt = Longident.Lident "::"}, Some {ppat_desc = Ppat_tuple [pat; rest]}) + -> + collectPatternsFromListConstruct (pat :: acc) rest + | _ -> (List.rev acc, pattern) + +let hasTemplateLiteralAttr attrs = + List.exists + (fun attr -> + match attr with + | {Location.txt = "res.template"}, _ -> true + | _ -> false) + attrs + +let isTemplateLiteral expr = + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident "^"}}, + [(Nolabel, _); (Nolabel, _)] ) + when hasTemplateLiteralAttr expr.pexp_attributes -> + true + | Pexp_constant (Pconst_string (_, Some "")) -> true + | Pexp_constant _ when hasTemplateLiteralAttr expr.pexp_attributes -> true + | _ -> false + +let hasSpreadAttr attrs = + List.exists + (fun attr -> + match attr with + | {Location.txt = "res.spread"}, _ -> true + | _ -> false) + attrs + +let isSpreadBeltListConcat expr = + match expr.pexp_desc with + | Pexp_ident + { + txt = + Longident.Ldot + (Longident.Ldot (Longident.Lident "Belt", "List"), "concatMany"); + } -> + hasSpreadAttr expr.pexp_attributes + | _ -> false + +(* Blue | Red | Green -> [Blue; Red; Green] *) +let collectOrPatternChain pat = + let rec loop pattern chain = + match pattern.ppat_desc with + | Ppat_or (left, right) -> loop left (right :: chain) + | _ -> pattern :: chain + in + loop pat [] + +let isSinglePipeExpr expr = + (* handles: + * x + * ->Js.Dict.get("wm-property") + * ->Option.flatMap(Js.Json.decodeString) + * ->Option.flatMap(x => + * switch x { + * | "like-of" => Some(#like) + * | "repost-of" => Some(#repost) + * | _ => None + * } + * ) + *) + let isPipeExpr expr = + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident ("|." | "|>")}}, + [(Nolabel, _operand1); (Nolabel, _operand2)] ) -> + true + | _ -> false + in + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident ("|." | "|>")}}, + [(Nolabel, operand1); (Nolabel, _operand2)] ) + when not (isPipeExpr operand1) -> + true + | _ -> false + +let isUnderscoreApplySugar expr = + match expr.pexp_desc with + | Pexp_fun + ( Nolabel, + None, + {ppat_desc = Ppat_var {txt = "__x"}}, + {pexp_desc = Pexp_apply _} ) -> + true + | _ -> false + +let isRewrittenUnderscoreApplySugar expr = + match expr.pexp_desc with + | Pexp_ident {txt = Longident.Lident "_"} -> true + | _ -> false diff --git a/res_syntax/src/res_parsetree_viewer.mli b/res_syntax/src/res_parsetree_viewer.mli new file mode 100644 index 0000000000..abed6a3105 --- /dev/null +++ b/res_syntax/src/res_parsetree_viewer.mli @@ -0,0 +1,156 @@ +(* Restructures a nested tree of arrow types into its args & returnType + * The parsetree contains: a => b => c => d, for printing purposes + * we restructure the tree into (a, b, c) and its returnType d *) +val arrowType : + Parsetree.core_type -> + Parsetree.attributes + * (Parsetree.attributes * Asttypes.arg_label * Parsetree.core_type) list + * Parsetree.core_type + +val functorType : + Parsetree.module_type -> + (Parsetree.attributes * string Asttypes.loc * Parsetree.module_type option) + list + * Parsetree.module_type + +(* filters @bs out of the provided attributes *) +val processUncurriedAttribute : + Parsetree.attributes -> bool * Parsetree.attributes + +type functionAttributesInfo = { + async: bool; + uncurried: bool; + attributes: Parsetree.attributes; +} + +(* determines whether a function is async and/or uncurried based on the given attributes *) +val processFunctionAttributes : Parsetree.attributes -> functionAttributesInfo + +val hasAwaitAttribute : Parsetree.attributes -> bool + +type ifConditionKind = + | If of Parsetree.expression + | IfLet of Parsetree.pattern * Parsetree.expression + +(* if ... else if ... else ... is represented as nested expressions: if ... else { if ... } + * The purpose of this function is to flatten nested ifs into one sequence. + * Basically compute: ([if, else if, else if, else if], else) *) +val collectIfExpressions : + Parsetree.expression -> + (Location.t * ifConditionKind * Parsetree.expression) list + * Parsetree.expression option + +val collectListExpressions : + Parsetree.expression -> + Parsetree.expression list * Parsetree.expression option + +type funParamKind = + | Parameter of { + attrs: Parsetree.attributes; + lbl: Asttypes.arg_label; + defaultExpr: Parsetree.expression option; + pat: Parsetree.pattern; + } + | NewTypes of {attrs: Parsetree.attributes; locs: string Asttypes.loc list} + +val funExpr : + Parsetree.expression -> + Parsetree.attributes * funParamKind list * Parsetree.expression + +(* example: + * `makeCoordinate({ + * x: 1, + * y: 2, + * })` + * Notice howe `({` and `})` "hug" or stick to each other *) +val isHuggableExpression : Parsetree.expression -> bool + +val isHuggablePattern : Parsetree.pattern -> bool + +val isHuggableRhs : Parsetree.expression -> bool + +val operatorPrecedence : string -> int + +val isUnaryExpression : Parsetree.expression -> bool +val isBinaryOperator : string -> bool +val isBinaryExpression : Parsetree.expression -> bool + +val flattenableOperators : string -> string -> bool + +val hasAttributes : Parsetree.attributes -> bool + +val isArrayAccess : Parsetree.expression -> bool +val isTernaryExpr : Parsetree.expression -> bool +val isIfLetExpr : Parsetree.expression -> bool + +val collectTernaryParts : + Parsetree.expression -> + (Parsetree.expression * Parsetree.expression) list * Parsetree.expression + +val parametersShouldHug : funParamKind list -> bool + +val filterTernaryAttributes : Parsetree.attributes -> Parsetree.attributes +val filterFragileMatchAttributes : Parsetree.attributes -> Parsetree.attributes + +val isJsxExpression : Parsetree.expression -> bool +val hasJsxAttribute : Parsetree.attributes -> bool +val hasOptionalAttribute : Parsetree.attributes -> bool + +val shouldIndentBinaryExpr : Parsetree.expression -> bool +val shouldInlineRhsBinaryExpr : Parsetree.expression -> bool +val hasPrintableAttributes : Parsetree.attributes -> bool +val filterPrintableAttributes : Parsetree.attributes -> Parsetree.attributes +val partitionPrintableAttributes : + Parsetree.attributes -> Parsetree.attributes * Parsetree.attributes + +val requiresSpecialCallbackPrintingLastArg : + (Asttypes.arg_label * Parsetree.expression) list -> bool +val requiresSpecialCallbackPrintingFirstArg : + (Asttypes.arg_label * Parsetree.expression) list -> bool + +val modExprApply : + Parsetree.module_expr -> Parsetree.module_expr list * Parsetree.module_expr + +(* Collection of utilities to view the ast in a more a convenient form, + * allowing for easier processing. + * Example: given a ptyp_arrow type, what are its arguments and what is the + * returnType? *) + +val modExprFunctor : + Parsetree.module_expr -> + (Parsetree.attributes * string Asttypes.loc * Parsetree.module_type option) + list + * Parsetree.module_expr + +val collectPatternsFromListConstruct : + Parsetree.pattern list -> + Parsetree.pattern -> + Parsetree.pattern list * Parsetree.pattern + +val isBlockExpr : Parsetree.expression -> bool + +val isTemplateLiteral : Parsetree.expression -> bool +val hasTemplateLiteralAttr : Parsetree.attributes -> bool + +val isSpreadBeltListConcat : Parsetree.expression -> bool + +val collectOrPatternChain : Parsetree.pattern -> Parsetree.pattern list + +val processBracesAttr : + Parsetree.expression -> Parsetree.attribute option * Parsetree.expression + +val filterParsingAttrs : Parsetree.attributes -> Parsetree.attributes + +val isBracedExpr : Parsetree.expression -> bool + +val isSinglePipeExpr : Parsetree.expression -> bool + +(* (__x) => f(a, __x, c) -----> f(a, _, c) *) +val rewriteUnderscoreApply : Parsetree.expression -> Parsetree.expression + +(* (__x) => f(a, __x, c) -----> f(a, _, c) *) +val isUnderscoreApplySugar : Parsetree.expression -> bool + +val hasIfLetAttribute : Parsetree.attributes -> bool + +val isRewrittenUnderscoreApplySugar : Parsetree.expression -> bool diff --git a/res_syntax/src/res_printer.ml b/res_syntax/src/res_printer.ml new file mode 100644 index 0000000000..7c4efa2349 --- /dev/null +++ b/res_syntax/src/res_printer.ml @@ -0,0 +1,5661 @@ +module Doc = Res_doc +module CommentTable = Res_comments_table +module Comment = Res_comment +module Token = Res_token +module Parens = Res_parens +module ParsetreeViewer = Res_parsetree_viewer + +type callbackStyle = + (* regular arrow function, example: `let f = x => x + 1` *) + | NoCallback + (* `Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument))` *) + | FitsOnOneLine + (* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) => + * MyModuleBlah.toList(argument) + * ) + *) + | ArgumentsFitOnOneLine + +(* Since compiler version 8.3, the bs. prefix is no longer needed *) +(* Synced from + https://github.com/rescript-lang/rescript-compiler/blob/29174de1a5fde3b16cf05d10f5ac109cfac5c4ca/jscomp/frontend/ast_external_process.ml#L291-L367 *) +let convertBsExternalAttribute = function + | "bs.as" -> "as" + | "bs.deriving" -> "deriving" + | "bs.get" -> "get" + | "bs.get_index" -> "get_index" + | "bs.ignore" -> "ignore" + | "bs.inline" -> "inline" + | "bs.int" -> "int" + | "bs.meth" -> "meth" + | "bs.module" -> "module" + | "bs.new" -> "new" + | "bs.obj" -> "obj" + | "bs.optional" -> "optional" + | "bs.return" -> "return" + | "bs.send" -> "send" + | "bs.scope" -> "scope" + | "bs.set" -> "set" + | "bs.set_index" -> "set_index" + | "bs.splice" | "bs.variadic" -> "variadic" + | "bs.string" -> "string" + | "bs.this" -> "this" + | "bs.uncurry" -> "uncurry" + | "bs.unwrap" -> "unwrap" + | "bs.val" -> "val" + (* bs.send.pipe shouldn't be transformed *) + | txt -> txt + +(* These haven't been needed for a long time now *) +(* Synced from + https://github.com/rescript-lang/rescript-compiler/blob/29174de1a5fde3b16cf05d10f5ac109cfac5c4ca/jscomp/frontend/ast_exp_extension.ml *) +let convertBsExtension = function + | "bs.debugger" -> "debugger" + | "bs.external" -> "raw" + (* We should never see this one since we use the sugared object form, but still *) + | "bs.obj" -> "obj" + | "bs.raw" -> "raw" + | "bs.re" -> "re" + (* TODO: what about bs.time and bs.node? *) + | txt -> txt + +let addParens doc = + Doc.group + (Doc.concat + [ + Doc.lparen; + Doc.indent (Doc.concat [Doc.softLine; doc]); + Doc.softLine; + Doc.rparen; + ]) + +let addBraces doc = + Doc.group + (Doc.concat + [ + Doc.lbrace; + Doc.indent (Doc.concat [Doc.softLine; doc]); + Doc.softLine; + Doc.rbrace; + ]) + +let addAsync doc = Doc.concat [Doc.text "async "; doc] + +let getFirstLeadingComment tbl loc = + match Hashtbl.find tbl.CommentTable.leading loc with + | comment :: _ -> Some comment + | [] -> None + | exception Not_found -> None + +(* Checks if `loc` has a leading line comment, i.e. `// comment above`*) +let hasLeadingLineComment tbl loc = + match getFirstLeadingComment tbl loc with + | Some comment -> Comment.isSingleLineComment comment + | None -> false + +let hasCommentBelow tbl loc = + match Hashtbl.find tbl.CommentTable.trailing loc with + | comment :: _ -> + let commentLoc = Comment.loc comment in + commentLoc.Location.loc_start.pos_lnum > loc.Location.loc_end.pos_lnum + | [] -> false + | exception Not_found -> false + +let hasNestedJsxOrMoreThanOneChild expr = + let rec loop inRecursion expr = + match expr.Parsetree.pexp_desc with + | Pexp_construct + ({txt = Longident.Lident "::"}, Some {pexp_desc = Pexp_tuple [hd; tail]}) + -> + if inRecursion || ParsetreeViewer.isJsxExpression hd then true + else loop true tail + | _ -> false + in + loop false expr + +let hasCommentsInside tbl loc = + match Hashtbl.find_opt tbl.CommentTable.inside loc with + | None -> false + | _ -> true + +let hasTrailingComments tbl loc = + match Hashtbl.find_opt tbl.CommentTable.trailing loc with + | None -> false + | _ -> true + +let printMultilineCommentContent txt = + (* Turns + * |* first line + * * second line + * * third line *| + * Into + * |* first line + * * second line + * * third line *| + * + * What makes a comment suitable for this kind of indentation? + * -> multiple lines + every line starts with a star + *) + let rec indentStars lines acc = + match lines with + | [] -> Doc.nil + | [lastLine] -> + let line = String.trim lastLine in + let doc = Doc.text (" " ^ line) in + let trailingSpace = if line = "" then Doc.nil else Doc.space in + List.rev (trailingSpace :: doc :: acc) |> Doc.concat + | line :: lines -> + let line = String.trim line in + if line != "" && String.unsafe_get line 0 == '*' then + let doc = Doc.text (" " ^ line) in + indentStars lines (Doc.hardLine :: doc :: acc) + else + let trailingSpace = + let len = String.length txt in + if len > 0 && String.unsafe_get txt (len - 1) = ' ' then Doc.space + else Doc.nil + in + let content = Comment.trimSpaces txt in + Doc.concat [Doc.text content; trailingSpace] + in + let lines = String.split_on_char '\n' txt in + match lines with + | [] -> Doc.text "/* */" + | [line] -> + Doc.concat + [Doc.text "/* "; Doc.text (Comment.trimSpaces line); Doc.text " */"] + | first :: rest -> + let firstLine = Comment.trimSpaces first in + Doc.concat + [ + Doc.text "/*"; + (match firstLine with + | "" | "*" -> Doc.nil + | _ -> Doc.space); + indentStars rest [Doc.hardLine; Doc.text firstLine]; + Doc.text "*/"; + ] + +let printTrailingComment (prevLoc : Location.t) (nodeLoc : Location.t) comment = + let singleLine = Comment.isSingleLineComment comment in + let content = + let txt = Comment.txt comment in + if singleLine then Doc.text ("//" ^ txt) + else printMultilineCommentContent txt + in + let diff = + let cmtStart = (Comment.loc comment).loc_start in + cmtStart.pos_lnum - prevLoc.loc_end.pos_lnum + in + let isBelow = + (Comment.loc comment).loc_start.pos_lnum > nodeLoc.loc_end.pos_lnum + in + if diff > 0 || isBelow then + Doc.concat + [ + Doc.breakParent; + Doc.lineSuffix + (Doc.concat + [ + Doc.hardLine; + (if diff > 1 then Doc.hardLine else Doc.nil); + content; + ]); + ] + else if not singleLine then Doc.concat [Doc.space; content] + else Doc.lineSuffix (Doc.concat [Doc.space; content]) + +let printLeadingComment ?nextComment comment = + let singleLine = Comment.isSingleLineComment comment in + let content = + let txt = Comment.txt comment in + if singleLine then Doc.text ("//" ^ txt) + else printMultilineCommentContent txt + in + let separator = + Doc.concat + [ + (if singleLine then Doc.concat [Doc.hardLine; Doc.breakParent] + else Doc.nil); + (match nextComment with + | Some next -> + let nextLoc = Comment.loc next in + let currLoc = Comment.loc comment in + let diff = + nextLoc.Location.loc_start.pos_lnum + - currLoc.Location.loc_end.pos_lnum + in + let nextSingleLine = Comment.isSingleLineComment next in + if singleLine && nextSingleLine then + if diff > 1 then Doc.hardLine else Doc.nil + else if singleLine && not nextSingleLine then + if diff > 1 then Doc.hardLine else Doc.nil + else if diff > 1 then Doc.concat [Doc.hardLine; Doc.hardLine] + else if diff == 1 then Doc.hardLine + else Doc.space + | None -> Doc.nil); + ] + in + Doc.concat [content; separator] + +(* This function is used for printing comments inside an empty block *) +let printCommentsInside cmtTbl loc = + let printComment comment = + let singleLine = Comment.isSingleLineComment comment in + let txt = Comment.txt comment in + if singleLine then Doc.text ("//" ^ txt) + else printMultilineCommentContent txt + in + let forceBreak = + loc.Location.loc_start.pos_lnum <> loc.Location.loc_end.pos_lnum + in + let rec loop acc comments = + match comments with + | [] -> Doc.nil + | [comment] -> + let cmtDoc = printComment comment in + let cmtsDoc = Doc.concat (Doc.softLine :: List.rev (cmtDoc :: acc)) in + let doc = + Doc.breakableGroup ~forceBreak + (Doc.concat [Doc.ifBreaks (Doc.indent cmtsDoc) cmtsDoc; Doc.softLine]) + in + doc + | comment :: rest -> + let cmtDoc = Doc.concat [printComment comment; Doc.line] in + loop (cmtDoc :: acc) rest + in + match Hashtbl.find cmtTbl.CommentTable.inside loc with + | exception Not_found -> Doc.nil + | comments -> + Hashtbl.remove cmtTbl.inside loc; + loop [] comments + +(* This function is used for printing comments inside an empty file *) +let printCommentsInsideFile cmtTbl = + let rec loop acc comments = + match comments with + | [] -> Doc.nil + | [comment] -> + let cmtDoc = printLeadingComment comment in + let doc = + Doc.group (Doc.concat [Doc.concat (List.rev (cmtDoc :: acc))]) + in + doc + | comment :: (nextComment :: _comments as rest) -> + let cmtDoc = printLeadingComment ~nextComment comment in + loop (cmtDoc :: acc) rest + in + match Hashtbl.find cmtTbl.CommentTable.inside Location.none with + | exception Not_found -> Doc.nil + | comments -> + Hashtbl.remove cmtTbl.inside Location.none; + Doc.group (loop [] comments) + +let printLeadingComments node tbl loc = + let rec loop acc comments = + match comments with + | [] -> node + | [comment] -> + let cmtDoc = printLeadingComment comment in + let diff = + loc.Location.loc_start.pos_lnum + - (Comment.loc comment).Location.loc_end.pos_lnum + in + let separator = + if Comment.isSingleLineComment comment then + if diff > 1 then Doc.hardLine else Doc.nil + else if diff == 0 then Doc.space + else if diff > 1 then Doc.concat [Doc.hardLine; Doc.hardLine] + else Doc.hardLine + in + let doc = + Doc.group + (Doc.concat [Doc.concat (List.rev (cmtDoc :: acc)); separator; node]) + in + doc + | comment :: (nextComment :: _comments as rest) -> + let cmtDoc = printLeadingComment ~nextComment comment in + loop (cmtDoc :: acc) rest + in + match Hashtbl.find tbl loc with + | exception Not_found -> node + | comments -> + (* Remove comments from tbl: Some ast nodes have the same location. + * We only want to print comments once *) + Hashtbl.remove tbl loc; + loop [] comments + +let printTrailingComments node tbl loc = + let rec loop prev acc comments = + match comments with + | [] -> Doc.concat (List.rev acc) + | comment :: comments -> + let cmtDoc = printTrailingComment prev loc comment in + loop (Comment.loc comment) (cmtDoc :: acc) comments + in + match Hashtbl.find tbl loc with + | exception Not_found -> node + | [] -> node + | _first :: _ as comments -> + (* Remove comments from tbl: Some ast nodes have the same location. + * We only want to print comments once *) + Hashtbl.remove tbl loc; + let cmtsDoc = loop loc [] comments in + Doc.concat [node; cmtsDoc] + +let printComments doc (tbl : CommentTable.t) loc = + let docWithLeadingComments = printLeadingComments doc tbl.leading loc in + printTrailingComments docWithLeadingComments tbl.trailing loc + +let printList ~getLoc ~nodes ~print ?(forceBreak = false) t = + let rec loop (prevLoc : Location.t) acc nodes = + match nodes with + | [] -> (prevLoc, Doc.concat (List.rev acc)) + | node :: nodes -> + let loc = getLoc node in + let startPos = + match getFirstLeadingComment t loc with + | None -> loc.loc_start + | Some comment -> (Comment.loc comment).loc_start + in + let sep = + if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 then + Doc.concat [Doc.hardLine; Doc.hardLine] + else Doc.hardLine + in + let doc = printComments (print node t) t loc in + loop loc (doc :: sep :: acc) nodes + in + match nodes with + | [] -> Doc.nil + | node :: nodes -> + let firstLoc = getLoc node in + let doc = printComments (print node t) t firstLoc in + let lastLoc, docs = loop firstLoc [doc] nodes in + let forceBreak = + forceBreak || firstLoc.loc_start.pos_lnum != lastLoc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak docs + +let printListi ~getLoc ~nodes ~print ?(forceBreak = false) t = + let rec loop i (prevLoc : Location.t) acc nodes = + match nodes with + | [] -> (prevLoc, Doc.concat (List.rev acc)) + | node :: nodes -> + let loc = getLoc node in + let startPos = + match getFirstLeadingComment t loc with + | None -> loc.loc_start + | Some comment -> (Comment.loc comment).loc_start + in + let sep = + if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 then + Doc.concat [Doc.hardLine; Doc.hardLine] + else Doc.line + in + let doc = printComments (print node t i) t loc in + loop (i + 1) loc (doc :: sep :: acc) nodes + in + match nodes with + | [] -> Doc.nil + | node :: nodes -> + let firstLoc = getLoc node in + let doc = printComments (print node t 0) t firstLoc in + let lastLoc, docs = loop 1 firstLoc [doc] nodes in + let forceBreak = + forceBreak || firstLoc.loc_start.pos_lnum != lastLoc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak docs + +let rec printLongidentAux accu = function + | Longident.Lident s -> Doc.text s :: accu + | Ldot (lid, s) -> printLongidentAux (Doc.text s :: accu) lid + | Lapply (lid1, lid2) -> + let d1 = Doc.join ~sep:Doc.dot (printLongidentAux [] lid1) in + let d2 = Doc.join ~sep:Doc.dot (printLongidentAux [] lid2) in + Doc.concat [d1; Doc.lparen; d2; Doc.rparen] :: accu + +let printLongident = function + | Longident.Lident txt -> Doc.text txt + | lid -> Doc.join ~sep:Doc.dot (printLongidentAux [] lid) + +type identifierStyle = ExoticIdent | NormalIdent + +let classifyIdentContent ?(allowUident = false) txt = + if Token.isKeywordTxt txt then ExoticIdent + else + let len = String.length txt in + let rec loop i = + if i == len then NormalIdent + else if i == 0 then + match String.unsafe_get txt i with + | 'A' .. 'Z' when allowUident -> loop (i + 1) + | 'a' .. 'z' | '_' -> loop (i + 1) + | _ -> ExoticIdent + else + match String.unsafe_get txt i with + | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '\'' | '_' -> loop (i + 1) + | _ -> ExoticIdent + in + loop 0 + +let printIdentLike ?allowUident txt = + match classifyIdentContent ?allowUident txt with + | ExoticIdent -> Doc.concat [Doc.text "\\\""; Doc.text txt; Doc.text "\""] + | NormalIdent -> Doc.text txt + +let rec unsafe_for_all_range s ~start ~finish p = + start > finish + || p (String.unsafe_get s start) + && unsafe_for_all_range s ~start:(start + 1) ~finish p + +let for_all_from s start p = + let len = String.length s in + unsafe_for_all_range s ~start ~finish:(len - 1) p + +(* See https://github.com/rescript-lang/rescript-compiler/blob/726cfa534314b586e5b5734471bc2023ad99ebd9/jscomp/ext/ext_string.ml#L510 *) +let isValidNumericPolyvarNumber (x : string) = + let len = String.length x in + len > 0 + && + let a = Char.code (String.unsafe_get x 0) in + a <= 57 + && + if len > 1 then + a > 48 + && for_all_from x 1 (function + | '0' .. '9' -> true + | _ -> false) + else a >= 48 + +(* Exotic identifiers in poly-vars have a "lighter" syntax: #"ease-in" *) +let printPolyVarIdent txt = + (* numeric poly-vars don't need quotes: #644 *) + if isValidNumericPolyvarNumber txt then Doc.text txt + else + match classifyIdentContent ~allowUident:true txt with + | ExoticIdent -> Doc.concat [Doc.text "\""; Doc.text txt; Doc.text "\""] + | NormalIdent -> ( + match txt with + | "" -> Doc.concat [Doc.text "\""; Doc.text txt; Doc.text "\""] + | _ -> Doc.text txt) + +let printLident l = + let flatLidOpt lid = + let rec flat accu = function + | Longident.Lident s -> Some (s :: accu) + | Ldot (lid, s) -> flat (s :: accu) lid + | Lapply (_, _) -> None + in + flat [] lid + in + match l with + | Longident.Lident txt -> printIdentLike txt + | Longident.Ldot (path, txt) -> + let doc = + match flatLidOpt path with + | Some txts -> + Doc.concat + [ + Doc.join ~sep:Doc.dot (List.map Doc.text txts); + Doc.dot; + printIdentLike txt; + ] + | None -> Doc.text "printLident: Longident.Lapply is not supported" + in + doc + | Lapply (_, _) -> Doc.text "printLident: Longident.Lapply is not supported" + +let printLongidentLocation l cmtTbl = + let doc = printLongident l.Location.txt in + printComments doc cmtTbl l.loc + +(* Module.SubModule.x *) +let printLidentPath path cmtTbl = + let doc = printLident path.Location.txt in + printComments doc cmtTbl path.loc + +(* Module.SubModule.x or Module.SubModule.X *) +let printIdentPath path cmtTbl = + let doc = printLident path.Location.txt in + printComments doc cmtTbl path.loc + +let printStringLoc sloc cmtTbl = + let doc = printIdentLike sloc.Location.txt in + printComments doc cmtTbl sloc.loc + +let printStringContents txt = + let lines = String.split_on_char '\n' txt in + Doc.join ~sep:Doc.literalLine (List.map Doc.text lines) + +let printConstant ?(templateLiteral = false) c = + match c with + | Parsetree.Pconst_integer (s, suffix) -> ( + match suffix with + | Some c -> Doc.text (s ^ Char.escaped c) + | None -> Doc.text s) + | Pconst_string (txt, None) -> + Doc.concat [Doc.text "\""; printStringContents txt; Doc.text "\""] + | Pconst_string (txt, Some prefix) -> + if prefix = "INTERNAL_RES_CHAR_CONTENTS" then + Doc.concat [Doc.text "'"; Doc.text txt; Doc.text "'"] + else + let lquote, rquote = + if templateLiteral then ("`", "`") else ("\"", "\"") + in + Doc.concat + [ + (if prefix = "js" then Doc.nil else Doc.text prefix); + Doc.text lquote; + printStringContents txt; + Doc.text rquote; + ] + | Pconst_float (s, _) -> Doc.text s + | Pconst_char c -> + let str = + match Char.unsafe_chr c with + | '\'' -> "\\'" + | '\\' -> "\\\\" + | '\n' -> "\\n" + | '\t' -> "\\t" + | '\r' -> "\\r" + | '\b' -> "\\b" + | ' ' .. '~' as c -> + let s = (Bytes.create [@doesNotRaise]) 1 in + Bytes.unsafe_set s 0 c; + Bytes.unsafe_to_string s + | _ -> Res_utf8.encodeCodePoint c + in + Doc.text ("'" ^ str ^ "'") + +let printOptionalLabel attrs = + if Res_parsetree_viewer.hasOptionalAttribute attrs then Doc.text "?" + else Doc.nil + +let customLayoutThreshold = 2 + +let rec printStructure ~customLayout (s : Parsetree.structure) t = + match s with + | [] -> printCommentsInsideFile t + | structure -> + printList + ~getLoc:(fun s -> s.Parsetree.pstr_loc) + ~nodes:structure + ~print:(printStructureItem ~customLayout) + t + +and printStructureItem ~customLayout (si : Parsetree.structure_item) cmtTbl = + match si.pstr_desc with + | Pstr_value (rec_flag, valueBindings) -> + let recFlag = + match rec_flag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + printValueBindings ~customLayout ~recFlag valueBindings cmtTbl + | Pstr_type (recFlag, typeDeclarations) -> + let recFlag = + match recFlag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + printTypeDeclarations ~customLayout ~recFlag typeDeclarations cmtTbl + | Pstr_primitive valueDescription -> + printValueDescription ~customLayout valueDescription cmtTbl + | Pstr_eval (expr, attrs) -> + let exprDoc = + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.structureExpr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [printAttributes ~customLayout attrs cmtTbl; exprDoc] + | Pstr_attribute attr -> + fst (printAttribute ~customLayout ~standalone:true attr cmtTbl) + | Pstr_extension (extension, attrs) -> + Doc.concat + [ + printAttributes ~customLayout attrs cmtTbl; + Doc.concat + [printExtension ~customLayout ~atModuleLvl:true extension cmtTbl]; + ] + | Pstr_include includeDeclaration -> + printIncludeDeclaration ~customLayout includeDeclaration cmtTbl + | Pstr_open openDescription -> + printOpenDescription ~customLayout openDescription cmtTbl + | Pstr_modtype modTypeDecl -> + printModuleTypeDeclaration ~customLayout modTypeDecl cmtTbl + | Pstr_module moduleBinding -> + printModuleBinding ~customLayout ~isRec:false moduleBinding cmtTbl 0 + | Pstr_recmodule moduleBindings -> + printListi + ~getLoc:(fun mb -> mb.Parsetree.pmb_loc) + ~nodes:moduleBindings + ~print:(printModuleBinding ~customLayout ~isRec:true) + cmtTbl + | Pstr_exception extensionConstructor -> + printExceptionDef ~customLayout extensionConstructor cmtTbl + | Pstr_typext typeExtension -> + printTypeExtension ~customLayout typeExtension cmtTbl + | Pstr_class _ | Pstr_class_type _ -> Doc.nil + +and printTypeExtension ~customLayout (te : Parsetree.type_extension) cmtTbl = + let prefix = Doc.text "type " in + let name = printLidentPath te.ptyext_path cmtTbl in + let typeParams = printTypeParams ~customLayout te.ptyext_params cmtTbl in + let extensionConstructors = + let ecs = te.ptyext_constructors in + let forceBreak = + match (ecs, List.rev ecs) with + | first :: _, last :: _ -> + first.pext_loc.loc_start.pos_lnum > te.ptyext_path.loc.loc_end.pos_lnum + || first.pext_loc.loc_start.pos_lnum < last.pext_loc.loc_end.pos_lnum + | _ -> false + in + let privateFlag = + match te.ptyext_private with + | Asttypes.Private -> Doc.concat [Doc.text "private"; Doc.line] + | Public -> Doc.nil + in + let rows = + printListi + ~getLoc:(fun n -> n.Parsetree.pext_loc) + ~print:(printExtensionConstructor ~customLayout) + ~nodes:ecs ~forceBreak cmtTbl + in + Doc.breakableGroup ~forceBreak + (Doc.indent + (Doc.concat + [ + Doc.line; + privateFlag; + rows; + (* Doc.join ~sep:Doc.line ( *) + (* List.mapi printExtensionConstructor ecs *) + (* ) *) + ])) + in + Doc.group + (Doc.concat + [ + printAttributes ~customLayout ~loc:te.ptyext_path.loc + te.ptyext_attributes cmtTbl; + prefix; + name; + typeParams; + Doc.text " +="; + extensionConstructors; + ]) + +and printModuleBinding ~customLayout ~isRec moduleBinding cmtTbl i = + let prefix = + if i = 0 then + Doc.concat + [Doc.text "module "; (if isRec then Doc.text "rec " else Doc.nil)] + else Doc.text "and " + in + let modExprDoc, modConstraintDoc = + match moduleBinding.pmb_expr with + | {pmod_desc = Pmod_constraint (modExpr, modType)} -> + ( printModExpr ~customLayout modExpr cmtTbl, + Doc.concat [Doc.text ": "; printModType ~customLayout modType cmtTbl] ) + | modExpr -> (printModExpr ~customLayout modExpr cmtTbl, Doc.nil) + in + let modName = + let doc = Doc.text moduleBinding.pmb_name.Location.txt in + printComments doc cmtTbl moduleBinding.pmb_name.loc + in + let doc = + Doc.concat + [ + printAttributes ~customLayout ~loc:moduleBinding.pmb_name.loc + moduleBinding.pmb_attributes cmtTbl; + prefix; + modName; + modConstraintDoc; + Doc.text " = "; + modExprDoc; + ] + in + printComments doc cmtTbl moduleBinding.pmb_loc + +and printModuleTypeDeclaration ~customLayout + (modTypeDecl : Parsetree.module_type_declaration) cmtTbl = + let modName = + let doc = Doc.text modTypeDecl.pmtd_name.txt in + printComments doc cmtTbl modTypeDecl.pmtd_name.loc + in + Doc.concat + [ + printAttributes ~customLayout modTypeDecl.pmtd_attributes cmtTbl; + Doc.text "module type "; + modName; + (match modTypeDecl.pmtd_type with + | None -> Doc.nil + | Some modType -> + Doc.concat [Doc.text " = "; printModType ~customLayout modType cmtTbl]); + ] + +and printModType ~customLayout modType cmtTbl = + let modTypeDoc = + match modType.pmty_desc with + | Parsetree.Pmty_ident longident -> + Doc.concat + [ + printAttributes ~customLayout ~loc:longident.loc + modType.pmty_attributes cmtTbl; + printLongidentLocation longident cmtTbl; + ] + | Pmty_signature [] -> + if hasCommentsInside cmtTbl modType.pmty_loc then + let doc = printCommentsInside cmtTbl modType.pmty_loc in + Doc.concat [Doc.lbrace; doc; Doc.rbrace] + else + let shouldBreak = + modType.pmty_loc.loc_start.pos_lnum + < modType.pmty_loc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak:shouldBreak + (Doc.concat [Doc.lbrace; Doc.softLine; Doc.softLine; Doc.rbrace]) + | Pmty_signature signature -> + let signatureDoc = + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.lbrace; + Doc.indent + (Doc.concat + [Doc.line; printSignature ~customLayout signature cmtTbl]); + Doc.line; + Doc.rbrace; + ]) + in + Doc.concat + [ + printAttributes ~customLayout modType.pmty_attributes cmtTbl; + signatureDoc; + ] + | Pmty_functor _ -> + let parameters, returnType = ParsetreeViewer.functorType modType in + let parametersDoc = + match parameters with + | [] -> Doc.nil + | [(attrs, {Location.txt = "_"; loc}, Some modType)] -> + let cmtLoc = + {loc with loc_end = modType.Parsetree.pmty_loc.loc_end} + in + let attrs = printAttributes ~customLayout attrs cmtTbl in + let doc = + Doc.concat [attrs; printModType ~customLayout modType cmtTbl] + in + printComments doc cmtTbl cmtLoc + | params -> + Doc.group + (Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun (attrs, lbl, modType) -> + let cmtLoc = + match modType with + | None -> lbl.Asttypes.loc + | Some modType -> + { + lbl.Asttypes.loc with + loc_end = + modType.Parsetree.pmty_loc.loc_end; + } + in + let attrs = + printAttributes ~customLayout attrs cmtTbl + in + let lblDoc = + if lbl.Location.txt = "_" || lbl.txt = "*" then + Doc.nil + else + let doc = Doc.text lbl.txt in + printComments doc cmtTbl lbl.loc + in + let doc = + Doc.concat + [ + attrs; + lblDoc; + (match modType with + | None -> Doc.nil + | Some modType -> + Doc.concat + [ + (if lbl.txt = "_" then Doc.nil + else Doc.text ": "); + printModType ~customLayout modType + cmtTbl; + ]); + ] + in + printComments doc cmtTbl cmtLoc) + params); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + in + let returnDoc = + let doc = printModType ~customLayout returnType cmtTbl in + if Parens.modTypeFunctorReturn returnType then addParens doc else doc + in + Doc.group + (Doc.concat + [ + parametersDoc; + Doc.group (Doc.concat [Doc.text " =>"; Doc.line; returnDoc]); + ]) + | Pmty_typeof modExpr -> + Doc.concat + [Doc.text "module type of "; printModExpr ~customLayout modExpr cmtTbl] + | Pmty_extension extension -> + printExtension ~customLayout ~atModuleLvl:false extension cmtTbl + | Pmty_alias longident -> + Doc.concat [Doc.text "module "; printLongidentLocation longident cmtTbl] + | Pmty_with (modType, withConstraints) -> + let operand = + let doc = printModType ~customLayout modType cmtTbl in + if Parens.modTypeWithOperand modType then addParens doc else doc + in + Doc.group + (Doc.concat + [ + operand; + Doc.indent + (Doc.concat + [ + Doc.line; + printWithConstraints ~customLayout withConstraints cmtTbl; + ]); + ]) + in + let attrsAlreadyPrinted = + match modType.pmty_desc with + | Pmty_functor _ | Pmty_signature _ | Pmty_ident _ -> true + | _ -> false + in + let doc = + Doc.concat + [ + (if attrsAlreadyPrinted then Doc.nil + else printAttributes ~customLayout modType.pmty_attributes cmtTbl); + modTypeDoc; + ] + in + printComments doc cmtTbl modType.pmty_loc + +and printWithConstraints ~customLayout withConstraints cmtTbl = + let rows = + List.mapi + (fun i withConstraint -> + Doc.group + (Doc.concat + [ + (if i == 0 then Doc.text "with " else Doc.text "and "); + printWithConstraint ~customLayout withConstraint cmtTbl; + ])) + withConstraints + in + Doc.join ~sep:Doc.line rows + +and printWithConstraint ~customLayout + (withConstraint : Parsetree.with_constraint) cmtTbl = + match withConstraint with + (* with type X.t = ... *) + | Pwith_type (longident, typeDeclaration) -> + Doc.group + (printTypeDeclaration ~customLayout + ~name:(printLidentPath longident cmtTbl) + ~equalSign:"=" ~recFlag:Doc.nil 0 typeDeclaration CommentTable.empty) + (* with module X.Y = Z *) + | Pwith_module ({txt = longident1}, {txt = longident2}) -> + Doc.concat + [ + Doc.text "module "; + printLongident longident1; + Doc.text " ="; + Doc.indent (Doc.concat [Doc.line; printLongident longident2]); + ] + (* with type X.t := ..., same format as [Pwith_type] *) + | Pwith_typesubst (longident, typeDeclaration) -> + Doc.group + (printTypeDeclaration ~customLayout + ~name:(printLidentPath longident cmtTbl) + ~equalSign:":=" ~recFlag:Doc.nil 0 typeDeclaration CommentTable.empty) + | Pwith_modsubst ({txt = longident1}, {txt = longident2}) -> + Doc.concat + [ + Doc.text "module "; + printLongident longident1; + Doc.text " :="; + Doc.indent (Doc.concat [Doc.line; printLongident longident2]); + ] + +and printSignature ~customLayout signature cmtTbl = + match signature with + | [] -> printCommentsInsideFile cmtTbl + | signature -> + printList + ~getLoc:(fun s -> s.Parsetree.psig_loc) + ~nodes:signature + ~print:(printSignatureItem ~customLayout) + cmtTbl + +and printSignatureItem ~customLayout (si : Parsetree.signature_item) cmtTbl = + match si.psig_desc with + | Parsetree.Psig_value valueDescription -> + printValueDescription ~customLayout valueDescription cmtTbl + | Psig_type (recFlag, typeDeclarations) -> + let recFlag = + match recFlag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + printTypeDeclarations ~customLayout ~recFlag typeDeclarations cmtTbl + | Psig_typext typeExtension -> + printTypeExtension ~customLayout typeExtension cmtTbl + | Psig_exception extensionConstructor -> + printExceptionDef ~customLayout extensionConstructor cmtTbl + | Psig_module moduleDeclaration -> + printModuleDeclaration ~customLayout moduleDeclaration cmtTbl + | Psig_recmodule moduleDeclarations -> + printRecModuleDeclarations ~customLayout moduleDeclarations cmtTbl + | Psig_modtype modTypeDecl -> + printModuleTypeDeclaration ~customLayout modTypeDecl cmtTbl + | Psig_open openDescription -> + printOpenDescription ~customLayout openDescription cmtTbl + | Psig_include includeDescription -> + printIncludeDescription ~customLayout includeDescription cmtTbl + | Psig_attribute attr -> + fst (printAttribute ~customLayout ~standalone:true attr cmtTbl) + | Psig_extension (extension, attrs) -> + Doc.concat + [ + printAttributes ~customLayout attrs cmtTbl; + Doc.concat + [printExtension ~customLayout ~atModuleLvl:true extension cmtTbl]; + ] + | Psig_class _ | Psig_class_type _ -> Doc.nil + +and printRecModuleDeclarations ~customLayout moduleDeclarations cmtTbl = + printListi + ~getLoc:(fun n -> n.Parsetree.pmd_loc) + ~nodes:moduleDeclarations + ~print:(printRecModuleDeclaration ~customLayout) + cmtTbl + +and printRecModuleDeclaration ~customLayout md cmtTbl i = + let body = + match md.pmd_type.pmty_desc with + | Parsetree.Pmty_alias longident -> + Doc.concat [Doc.text " = "; printLongidentLocation longident cmtTbl] + | _ -> + let needsParens = + match md.pmd_type.pmty_desc with + | Pmty_with _ -> true + | _ -> false + in + let modTypeDoc = + let doc = printModType ~customLayout md.pmd_type cmtTbl in + if needsParens then addParens doc else doc + in + Doc.concat [Doc.text ": "; modTypeDoc] + in + let prefix = if i < 1 then "module rec " else "and " in + Doc.concat + [ + printAttributes ~customLayout ~loc:md.pmd_name.loc md.pmd_attributes cmtTbl; + Doc.text prefix; + printComments (Doc.text md.pmd_name.txt) cmtTbl md.pmd_name.loc; + body; + ] + +and printModuleDeclaration ~customLayout (md : Parsetree.module_declaration) + cmtTbl = + let body = + match md.pmd_type.pmty_desc with + | Parsetree.Pmty_alias longident -> + Doc.concat [Doc.text " = "; printLongidentLocation longident cmtTbl] + | _ -> + Doc.concat [Doc.text ": "; printModType ~customLayout md.pmd_type cmtTbl] + in + Doc.concat + [ + printAttributes ~customLayout ~loc:md.pmd_name.loc md.pmd_attributes cmtTbl; + Doc.text "module "; + printComments (Doc.text md.pmd_name.txt) cmtTbl md.pmd_name.loc; + body; + ] + +and printOpenDescription ~customLayout + (openDescription : Parsetree.open_description) cmtTbl = + Doc.concat + [ + printAttributes ~customLayout openDescription.popen_attributes cmtTbl; + Doc.text "open"; + (match openDescription.popen_override with + | Asttypes.Fresh -> Doc.space + | Asttypes.Override -> Doc.text "! "); + printLongidentLocation openDescription.popen_lid cmtTbl; + ] + +and printIncludeDescription ~customLayout + (includeDescription : Parsetree.include_description) cmtTbl = + Doc.concat + [ + printAttributes ~customLayout includeDescription.pincl_attributes cmtTbl; + Doc.text "include "; + printModType ~customLayout includeDescription.pincl_mod cmtTbl; + ] + +and printIncludeDeclaration ~customLayout + (includeDeclaration : Parsetree.include_declaration) cmtTbl = + Doc.concat + [ + printAttributes ~customLayout includeDeclaration.pincl_attributes cmtTbl; + Doc.text "include "; + (let includeDoc = + printModExpr ~customLayout includeDeclaration.pincl_mod cmtTbl + in + if Parens.includeModExpr includeDeclaration.pincl_mod then + addParens includeDoc + else includeDoc); + ] + +and printValueBindings ~customLayout ~recFlag + (vbs : Parsetree.value_binding list) cmtTbl = + printListi + ~getLoc:(fun vb -> vb.Parsetree.pvb_loc) + ~nodes:vbs + ~print:(printValueBinding ~customLayout ~recFlag) + cmtTbl + +and printValueDescription ~customLayout valueDescription cmtTbl = + let isExternal = + match valueDescription.pval_prim with + | [] -> false + | _ -> true + in + let attrs = + printAttributes ~customLayout ~loc:valueDescription.pval_name.loc + valueDescription.pval_attributes cmtTbl + in + let header = if isExternal then "external " else "let " in + Doc.group + (Doc.concat + [ + attrs; + Doc.text header; + printComments + (printIdentLike valueDescription.pval_name.txt) + cmtTbl valueDescription.pval_name.loc; + Doc.text ": "; + printTypExpr ~customLayout valueDescription.pval_type cmtTbl; + (if isExternal then + Doc.group + (Doc.concat + [ + Doc.text " ="; + Doc.indent + (Doc.concat + [ + Doc.line; + Doc.join ~sep:Doc.line + (List.map + (fun s -> + Doc.concat + [Doc.text "\""; Doc.text s; Doc.text "\""]) + valueDescription.pval_prim); + ]); + ]) + else Doc.nil); + ]) + +and printTypeDeclarations ~customLayout ~recFlag typeDeclarations cmtTbl = + printListi + ~getLoc:(fun n -> n.Parsetree.ptype_loc) + ~nodes:typeDeclarations + ~print:(printTypeDeclaration2 ~customLayout ~recFlag) + cmtTbl + +(* + * type_declaration = { + * ptype_name: string loc; + * ptype_params: (core_type * variance) list; + * (* ('a1,...'an) t; None represents _*) + * ptype_cstrs: (core_type * core_type * Location.t) list; + * (* ... constraint T1=T1' ... constraint Tn=Tn' *) + * ptype_kind: type_kind; + * ptype_private: private_flag; (* = private ... *) + * ptype_manifest: core_type option; (* = T *) + * ptype_attributes: attributes; (* ... [@@id1] [@@id2] *) + * ptype_loc: Location.t; + * } + * + * + * type t (abstract, no manifest) + * type t = T0 (abstract, manifest=T0) + * type t = C of T | ... (variant, no manifest) + * type t = T0 = C of T | ... (variant, manifest=T0) + * type t = {l: T; ...} (record, no manifest) + * type t = T0 = {l : T; ...} (record, manifest=T0) + * type t = .. (open, no manifest) + * + * + * and type_kind = + * | Ptype_abstract + * | Ptype_variant of constructor_declaration list + * (* Invariant: non-empty list *) + * | Ptype_record of label_declaration list + * (* Invariant: non-empty list *) + * | Ptype_open + *) +and printTypeDeclaration ~customLayout ~name ~equalSign ~recFlag i + (td : Parsetree.type_declaration) cmtTbl = + let attrs = + printAttributes ~customLayout ~loc:td.ptype_loc td.ptype_attributes cmtTbl + in + let prefix = + if i > 0 then Doc.text "and " else Doc.concat [Doc.text "type "; recFlag] + in + let typeName = name in + let typeParams = printTypeParams ~customLayout td.ptype_params cmtTbl in + let manifestAndKind = + match td.ptype_kind with + | Ptype_abstract -> ( + match td.ptype_manifest with + | None -> Doc.nil + | Some typ -> + Doc.concat + [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printTypExpr ~customLayout typ cmtTbl; + ]) + | Ptype_open -> + Doc.concat + [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + Doc.text ".."; + ] + | Ptype_record lds -> + let manifest = + match td.ptype_manifest with + | None -> Doc.nil + | Some typ -> + Doc.concat + [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr ~customLayout typ cmtTbl; + ] + in + Doc.concat + [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printRecordDeclaration ~customLayout lds cmtTbl; + ] + | Ptype_variant cds -> + let manifest = + match td.ptype_manifest with + | None -> Doc.nil + | Some typ -> + Doc.concat + [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr ~customLayout typ cmtTbl; + ] + in + Doc.concat + [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign]; + printConstructorDeclarations ~customLayout + ~privateFlag:td.ptype_private cds cmtTbl; + ] + in + let constraints = + printTypeDefinitionConstraints ~customLayout td.ptype_cstrs + in + Doc.group + (Doc.concat + [attrs; prefix; typeName; typeParams; manifestAndKind; constraints]) + +and printTypeDeclaration2 ~customLayout ~recFlag + (td : Parsetree.type_declaration) cmtTbl i = + let name = + let doc = printIdentLike td.Parsetree.ptype_name.txt in + printComments doc cmtTbl td.ptype_name.loc + in + let equalSign = "=" in + let attrs = + printAttributes ~customLayout ~loc:td.ptype_loc td.ptype_attributes cmtTbl + in + let prefix = + if i > 0 then Doc.text "and " else Doc.concat [Doc.text "type "; recFlag] + in + let typeName = name in + let typeParams = printTypeParams ~customLayout td.ptype_params cmtTbl in + let manifestAndKind = + match td.ptype_kind with + | Ptype_abstract -> ( + match td.ptype_manifest with + | None -> Doc.nil + | Some typ -> + Doc.concat + [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printTypExpr ~customLayout typ cmtTbl; + ]) + | Ptype_open -> + Doc.concat + [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + Doc.text ".."; + ] + | Ptype_record lds -> + if lds = [] then + Doc.concat + [ + Doc.space; + Doc.text equalSign; + Doc.space; + Doc.lbrace; + printCommentsInside cmtTbl td.ptype_loc; + Doc.rbrace; + ] + else + let manifest = + match td.ptype_manifest with + | None -> Doc.nil + | Some typ -> + Doc.concat + [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr ~customLayout typ cmtTbl; + ] + in + Doc.concat + [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printPrivateFlag td.ptype_private; + printRecordDeclaration ~customLayout lds cmtTbl; + ] + | Ptype_variant cds -> + let manifest = + match td.ptype_manifest with + | None -> Doc.nil + | Some typ -> + Doc.concat + [ + Doc.concat [Doc.space; Doc.text equalSign; Doc.space]; + printTypExpr ~customLayout typ cmtTbl; + ] + in + Doc.concat + [ + manifest; + Doc.concat [Doc.space; Doc.text equalSign]; + printConstructorDeclarations ~customLayout + ~privateFlag:td.ptype_private cds cmtTbl; + ] + in + let constraints = + printTypeDefinitionConstraints ~customLayout td.ptype_cstrs + in + Doc.group + (Doc.concat + [attrs; prefix; typeName; typeParams; manifestAndKind; constraints]) + +and printTypeDefinitionConstraints ~customLayout cstrs = + match cstrs with + | [] -> Doc.nil + | cstrs -> + Doc.indent + (Doc.group + (Doc.concat + [ + Doc.line; + Doc.group + (Doc.join ~sep:Doc.line + (List.map + (printTypeDefinitionConstraint ~customLayout) + cstrs)); + ])) + +and printTypeDefinitionConstraint ~customLayout + ((typ1, typ2, _loc) : + Parsetree.core_type * Parsetree.core_type * Location.t) = + Doc.concat + [ + Doc.text "constraint "; + printTypExpr ~customLayout typ1 CommentTable.empty; + Doc.text " = "; + printTypExpr ~customLayout typ2 CommentTable.empty; + ] + +and printPrivateFlag (flag : Asttypes.private_flag) = + match flag with + | Private -> Doc.text "private " + | Public -> Doc.nil + +and printTypeParams ~customLayout typeParams cmtTbl = + match typeParams with + | [] -> Doc.nil + | typeParams -> + Doc.group + (Doc.concat + [ + Doc.lessThan; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun typeParam -> + let doc = + printTypeParam ~customLayout typeParam cmtTbl + in + printComments doc cmtTbl + (fst typeParam).Parsetree.ptyp_loc) + typeParams); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ]) + +and printTypeParam ~customLayout + (param : Parsetree.core_type * Asttypes.variance) cmtTbl = + let typ, variance = param in + let printedVariance = + match variance with + | Covariant -> Doc.text "+" + | Contravariant -> Doc.text "-" + | Invariant -> Doc.nil + in + Doc.concat [printedVariance; printTypExpr ~customLayout typ cmtTbl] + +and printRecordDeclaration ~customLayout + (lds : Parsetree.label_declaration list) cmtTbl = + let forceBreak = + match (lds, List.rev lds) with + | first :: _, last :: _ -> + first.pld_loc.loc_start.pos_lnum < last.pld_loc.loc_end.pos_lnum + | _ -> false + in + Doc.breakableGroup ~forceBreak + (Doc.concat + [ + Doc.lbrace; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun ld -> + let doc = + printLabelDeclaration ~customLayout ld cmtTbl + in + printComments doc cmtTbl ld.Parsetree.pld_loc) + lds); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + +and printConstructorDeclarations ~customLayout ~privateFlag + (cds : Parsetree.constructor_declaration list) cmtTbl = + let forceBreak = + match (cds, List.rev cds) with + | first :: _, last :: _ -> + first.pcd_loc.loc_start.pos_lnum < last.pcd_loc.loc_end.pos_lnum + | _ -> false + in + let privateFlag = + match privateFlag with + | Asttypes.Private -> Doc.concat [Doc.text "private"; Doc.line] + | Public -> Doc.nil + in + let rows = + printListi + ~getLoc:(fun cd -> cd.Parsetree.pcd_loc) + ~nodes:cds + ~print:(fun cd cmtTbl i -> + let doc = printConstructorDeclaration2 ~customLayout i cd cmtTbl in + printComments doc cmtTbl cd.Parsetree.pcd_loc) + ~forceBreak cmtTbl + in + Doc.breakableGroup ~forceBreak + (Doc.indent (Doc.concat [Doc.line; privateFlag; rows])) + +and printConstructorDeclaration2 ~customLayout i + (cd : Parsetree.constructor_declaration) cmtTbl = + let attrs = printAttributes ~customLayout cd.pcd_attributes cmtTbl in + let bar = + if i > 0 || cd.pcd_attributes <> [] then Doc.text "| " + else Doc.ifBreaks (Doc.text "| ") Doc.nil + in + let constrName = + let doc = Doc.text cd.pcd_name.txt in + printComments doc cmtTbl cd.pcd_name.loc + in + let constrArgs = + printConstructorArguments ~customLayout ~indent:true cd.pcd_args cmtTbl + in + let gadt = + match cd.pcd_res with + | None -> Doc.nil + | Some typ -> + Doc.indent + (Doc.concat [Doc.text ": "; printTypExpr ~customLayout typ cmtTbl]) + in + Doc.concat + [ + bar; + Doc.group + (Doc.concat + [ + attrs; + (* TODO: fix parsing of attributes, so when can print them above the bar? *) + constrName; + constrArgs; + gadt; + ]); + ] + +and printConstructorArguments ~customLayout ~indent + (cdArgs : Parsetree.constructor_arguments) cmtTbl = + match cdArgs with + | Pcstr_tuple [] -> Doc.nil + | Pcstr_tuple types -> + let args = + Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun typexpr -> printTypExpr ~customLayout typexpr cmtTbl) + types); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + in + Doc.group (if indent then Doc.indent args else args) + | Pcstr_record lds -> + let args = + Doc.concat + [ + Doc.lparen; + (* manually inline the printRecordDeclaration, gives better layout *) + Doc.lbrace; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun ld -> + let doc = + printLabelDeclaration ~customLayout ld cmtTbl + in + printComments doc cmtTbl ld.Parsetree.pld_loc) + lds); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + Doc.rparen; + ] + in + if indent then Doc.indent args else args + +and printLabelDeclaration ~customLayout (ld : Parsetree.label_declaration) + cmtTbl = + let attrs = + printAttributes ~customLayout ~loc:ld.pld_name.loc ld.pld_attributes cmtTbl + in + let mutableFlag = + match ld.pld_mutable with + | Mutable -> Doc.text "mutable " + | Immutable -> Doc.nil + in + let name = + let doc = printIdentLike ld.pld_name.txt in + printComments doc cmtTbl ld.pld_name.loc + in + let optional = printOptionalLabel ld.pld_attributes in + Doc.group + (Doc.concat + [ + attrs; + mutableFlag; + name; + optional; + Doc.text ": "; + printTypExpr ~customLayout ld.pld_type cmtTbl; + ]) + +and printTypExpr ~customLayout (typExpr : Parsetree.core_type) cmtTbl = + let renderedType = + match typExpr.ptyp_desc with + | Ptyp_any -> Doc.text "_" + | Ptyp_var var -> + Doc.concat [Doc.text "'"; printIdentLike ~allowUident:true var] + | Ptyp_extension extension -> + printExtension ~customLayout ~atModuleLvl:false extension cmtTbl + | Ptyp_alias (typ, alias) -> + let typ = + (* Technically type t = (string, float) => unit as 'x, doesn't require + * parens around the arrow expression. This is very confusing though. + * Is the "as" part of "unit" or "(string, float) => unit". By printing + * parens we guide the user towards its meaning.*) + let needsParens = + match typ.ptyp_desc with + | Ptyp_arrow _ -> true + | _ -> false + in + let doc = printTypExpr ~customLayout typ cmtTbl in + if needsParens then Doc.concat [Doc.lparen; doc; Doc.rparen] else doc + in + Doc.concat + [typ; Doc.text " as "; Doc.concat [Doc.text "'"; printIdentLike alias]] + (* object printings *) + | Ptyp_object (fields, openFlag) -> + printObject ~customLayout ~inline:false fields openFlag cmtTbl + | Ptyp_constr (longidentLoc, [{ptyp_desc = Ptyp_object (fields, openFlag)}]) + -> + (* for foo<{"a": b}>, when the object is long and needs a line break, we + want the <{ and }> to stay hugged together *) + let constrName = printLidentPath longidentLoc cmtTbl in + Doc.concat + [ + constrName; + Doc.lessThan; + printObject ~customLayout ~inline:true fields openFlag cmtTbl; + Doc.greaterThan; + ] + | Ptyp_constr (longidentLoc, [{ptyp_desc = Parsetree.Ptyp_tuple tuple}]) -> + let constrName = printLidentPath longidentLoc cmtTbl in + Doc.group + (Doc.concat + [ + constrName; + Doc.lessThan; + printTupleType ~customLayout ~inline:true tuple cmtTbl; + Doc.greaterThan; + ]) + | Ptyp_constr (longidentLoc, constrArgs) -> ( + let constrName = printLidentPath longidentLoc cmtTbl in + match constrArgs with + | [] -> constrName + | _args -> + Doc.group + (Doc.concat + [ + constrName; + Doc.lessThan; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun typexpr -> + printTypExpr ~customLayout typexpr cmtTbl) + constrArgs); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.greaterThan; + ])) + | Ptyp_arrow _ -> ( + let attrsBefore, args, returnType = ParsetreeViewer.arrowType typExpr in + let returnTypeNeedsParens = + match returnType.ptyp_desc with + | Ptyp_alias _ -> true + | _ -> false + in + let returnDoc = + let doc = printTypExpr ~customLayout returnType cmtTbl in + if returnTypeNeedsParens then Doc.concat [Doc.lparen; doc; Doc.rparen] + else doc + in + let isUncurried, attrs = + ParsetreeViewer.processUncurriedAttribute attrsBefore + in + match args with + | [] -> Doc.nil + | [([], Nolabel, n)] when not isUncurried -> + let hasAttrsBefore = not (attrs = []) in + let attrs = + if hasAttrsBefore then + printAttributes ~customLayout ~inline:true attrsBefore cmtTbl + else Doc.nil + in + let typDoc = + let doc = printTypExpr ~customLayout n cmtTbl in + match n.ptyp_desc with + | Ptyp_arrow _ | Ptyp_tuple _ | Ptyp_alias _ -> addParens doc + | _ -> doc + in + Doc.group + (Doc.concat + [ + Doc.group attrs; + Doc.group + (if hasAttrsBefore then + Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [Doc.softLine; typDoc; Doc.text " => "; returnDoc]); + Doc.softLine; + Doc.rparen; + ] + else Doc.concat [typDoc; Doc.text " => "; returnDoc]); + ]) + | args -> + let attrs = printAttributes ~customLayout ~inline:true attrs cmtTbl in + let renderedArgs = + Doc.concat + [ + attrs; + Doc.text "("; + Doc.indent + (Doc.concat + [ + Doc.softLine; + (if isUncurried then Doc.concat [Doc.dot; Doc.space] + else Doc.nil); + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun tp -> printTypeParameter ~customLayout tp cmtTbl) + args); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.text ")"; + ] + in + Doc.group (Doc.concat [renderedArgs; Doc.text " => "; returnDoc])) + | Ptyp_tuple types -> + printTupleType ~customLayout ~inline:false types cmtTbl + | Ptyp_poly ([], typ) -> printTypExpr ~customLayout typ cmtTbl + | Ptyp_poly (stringLocs, typ) -> + Doc.concat + [ + Doc.join ~sep:Doc.space + (List.map + (fun {Location.txt; loc} -> + let doc = Doc.concat [Doc.text "'"; Doc.text txt] in + printComments doc cmtTbl loc) + stringLocs); + Doc.dot; + Doc.space; + printTypExpr ~customLayout typ cmtTbl; + ] + | Ptyp_package packageType -> + printPackageType ~customLayout ~printModuleKeywordAndParens:true + packageType cmtTbl + | Ptyp_class _ -> Doc.text "classes are not supported in types" + | Ptyp_variant (rowFields, closedFlag, labelsOpt) -> + let forceBreak = + typExpr.ptyp_loc.Location.loc_start.pos_lnum + < typExpr.ptyp_loc.loc_end.pos_lnum + in + let printRowField = function + | Parsetree.Rtag ({txt; loc}, attrs, true, []) -> + let doc = + Doc.group + (Doc.concat + [ + printAttributes ~customLayout attrs cmtTbl; + Doc.concat [Doc.text "#"; printPolyVarIdent txt]; + ]) + in + printComments doc cmtTbl loc + | Rtag ({txt}, attrs, truth, types) -> + let doType t = + match t.Parsetree.ptyp_desc with + | Ptyp_tuple _ -> printTypExpr ~customLayout t cmtTbl + | _ -> + Doc.concat + [Doc.lparen; printTypExpr ~customLayout t cmtTbl; Doc.rparen] + in + let printedTypes = List.map doType types in + let cases = + Doc.join ~sep:(Doc.concat [Doc.line; Doc.text "& "]) printedTypes + in + let cases = + if truth then Doc.concat [Doc.line; Doc.text "& "; cases] else cases + in + Doc.group + (Doc.concat + [ + printAttributes ~customLayout attrs cmtTbl; + Doc.concat [Doc.text "#"; printPolyVarIdent txt]; + cases; + ]) + | Rinherit coreType -> printTypExpr ~customLayout coreType cmtTbl + in + let docs = List.map printRowField rowFields in + let cases = Doc.join ~sep:(Doc.concat [Doc.line; Doc.text "| "]) docs in + let cases = + if docs = [] then cases + else Doc.concat [Doc.ifBreaks (Doc.text "| ") Doc.nil; cases] + in + let openingSymbol = + if closedFlag = Open then Doc.concat [Doc.greaterThan; Doc.line] + else if labelsOpt = None then Doc.softLine + else Doc.concat [Doc.lessThan; Doc.line] + in + let labels = + match labelsOpt with + | None | Some [] -> Doc.nil + | Some labels -> + Doc.concat + (List.map + (fun label -> + Doc.concat [Doc.line; Doc.text "#"; printPolyVarIdent label]) + labels) + in + let closingSymbol = + match labelsOpt with + | None | Some [] -> Doc.nil + | _ -> Doc.text " >" + in + Doc.breakableGroup ~forceBreak + (Doc.concat + [ + Doc.lbracket; + Doc.indent + (Doc.concat [openingSymbol; cases; closingSymbol; labels]); + Doc.softLine; + Doc.rbracket; + ]) + in + let shouldPrintItsOwnAttributes = + match typExpr.ptyp_desc with + | Ptyp_arrow _ (* es6 arrow types print their own attributes *) -> true + | _ -> false + in + let doc = + match typExpr.ptyp_attributes with + | _ :: _ as attrs when not shouldPrintItsOwnAttributes -> + Doc.group + (Doc.concat [printAttributes ~customLayout attrs cmtTbl; renderedType]) + | _ -> renderedType + in + printComments doc cmtTbl typExpr.ptyp_loc + +and printObject ~customLayout ~inline fields openFlag cmtTbl = + let doc = + match fields with + | [] -> + Doc.concat + [ + Doc.lbrace; + (match openFlag with + | Asttypes.Closed -> Doc.dot + | Open -> Doc.dotdot); + Doc.rbrace; + ] + | fields -> + Doc.concat + [ + Doc.lbrace; + (match openFlag with + | Asttypes.Closed -> Doc.nil + | Open -> ( + match fields with + (* handle `type t = {.. ...objType, "x": int}` + * .. and ... should have a space in between *) + | Oinherit _ :: _ -> Doc.text ".. " + | _ -> Doc.dotdot)); + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun field -> printObjectField ~customLayout field cmtTbl) + fields); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ] + in + if inline then doc else Doc.group doc + +and printTupleType ~customLayout ~inline (types : Parsetree.core_type list) + cmtTbl = + let tuple = + Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun typexpr -> printTypExpr ~customLayout typexpr cmtTbl) + types); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + in + if inline == false then Doc.group tuple else tuple + +and printObjectField ~customLayout (field : Parsetree.object_field) cmtTbl = + match field with + | Otag (labelLoc, attrs, typ) -> + let lbl = + let doc = Doc.text ("\"" ^ labelLoc.txt ^ "\"") in + printComments doc cmtTbl labelLoc.loc + in + let doc = + Doc.concat + [ + printAttributes ~customLayout ~loc:labelLoc.loc attrs cmtTbl; + lbl; + Doc.text ": "; + printTypExpr ~customLayout typ cmtTbl; + ] + in + let cmtLoc = {labelLoc.loc with loc_end = typ.ptyp_loc.loc_end} in + printComments doc cmtTbl cmtLoc + | Oinherit typexpr -> + Doc.concat [Doc.dotdotdot; printTypExpr ~customLayout typexpr cmtTbl] + +(* es6 arrow type arg + * type t = (~foo: string, ~bar: float=?, unit) => unit + * i.e. ~foo: string, ~bar: float *) +and printTypeParameter ~customLayout (attrs, lbl, typ) cmtTbl = + let isUncurried, attrs = ParsetreeViewer.processUncurriedAttribute attrs in + let uncurried = + if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil + in + let attrs = printAttributes ~customLayout attrs cmtTbl in + let label = + match lbl with + | Asttypes.Nolabel -> Doc.nil + | Labelled lbl -> + Doc.concat [Doc.text "~"; printIdentLike lbl; Doc.text ": "] + | Optional lbl -> + Doc.concat [Doc.text "~"; printIdentLike lbl; Doc.text ": "] + in + let optionalIndicator = + match lbl with + | Asttypes.Nolabel | Labelled _ -> Doc.nil + | Optional _lbl -> Doc.text "=?" + in + let loc, typ = + match typ.ptyp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: attrs -> + ( {loc with loc_end = typ.ptyp_loc.loc_end}, + {typ with ptyp_attributes = attrs} ) + | _ -> (typ.ptyp_loc, typ) + in + let doc = + Doc.group + (Doc.concat + [ + uncurried; + attrs; + label; + printTypExpr ~customLayout typ cmtTbl; + optionalIndicator; + ]) + in + printComments doc cmtTbl loc + +and printValueBinding ~customLayout ~recFlag (vb : Parsetree.value_binding) + cmtTbl i = + let attrs = + printAttributes ~customLayout ~loc:vb.pvb_pat.ppat_loc vb.pvb_attributes + cmtTbl + in + let header = + if i == 0 then Doc.concat [Doc.text "let "; recFlag] else Doc.text "and " + in + match vb with + | { + pvb_pat = + { + ppat_desc = + Ppat_constraint (pattern, ({ptyp_desc = Ptyp_poly _} as patTyp)); + }; + pvb_expr = {pexp_desc = Pexp_newtype _} as expr; + } -> ( + let _attrs, parameters, returnExpr = ParsetreeViewer.funExpr expr in + let abstractType = + match parameters with + | [NewTypes {locs = vars}] -> + Doc.concat + [ + Doc.text "type "; + Doc.join ~sep:Doc.space + (List.map (fun var -> Doc.text var.Asttypes.txt) vars); + Doc.dot; + ] + | _ -> Doc.nil + in + match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) -> + Doc.group + (Doc.concat + [ + attrs; + header; + printPattern ~customLayout pattern cmtTbl; + Doc.text ":"; + Doc.indent + (Doc.concat + [ + Doc.line; + abstractType; + Doc.space; + printTypExpr ~customLayout typ cmtTbl; + Doc.text " ="; + Doc.concat + [ + Doc.line; + printExpressionWithComments ~customLayout expr cmtTbl; + ]; + ]); + ]) + | _ -> + (* Example: + * let cancel_and_collect_callbacks: + * 'a 'u 'c. (list, promise<'a, 'u, 'c>) => list = * (type x, callbacks_accumulator, p: promise<_, _, c>) + *) + Doc.group + (Doc.concat + [ + attrs; + header; + printPattern ~customLayout pattern cmtTbl; + Doc.text ":"; + Doc.indent + (Doc.concat + [ + Doc.line; + abstractType; + Doc.space; + printTypExpr ~customLayout patTyp cmtTbl; + Doc.text " ="; + Doc.concat + [ + Doc.line; + printExpressionWithComments ~customLayout expr cmtTbl; + ]; + ]); + ])) + | _ -> + let optBraces, expr = ParsetreeViewer.processBracesAttr vb.pvb_expr in + let printedExpr = + let doc = printExpressionWithComments ~customLayout vb.pvb_expr cmtTbl in + match Parens.expr vb.pvb_expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + let patternDoc = printPattern ~customLayout vb.pvb_pat cmtTbl in + (* + * we want to optimize the layout of one pipe: + * let tbl = data->Js.Array2.reduce((map, curr) => { + * ... + * }) + * important is that we don't do this for multiple pipes: + * let decoratorTags = + * items + * ->Js.Array2.filter(items => {items.category === Decorators}) + * ->Belt.Array.map(...) + * Multiple pipes chained together lend themselves more towards the last layout. + *) + if ParsetreeViewer.isSinglePipeExpr vb.pvb_expr then + Doc.customLayout + [ + Doc.group + (Doc.concat + [ + attrs; header; patternDoc; Doc.text " ="; Doc.space; printedExpr; + ]); + Doc.group + (Doc.concat + [ + attrs; + header; + patternDoc; + Doc.text " ="; + Doc.indent (Doc.concat [Doc.line; printedExpr]); + ]); + ] + else + let shouldIndent = + match optBraces with + | Some _ -> false + | _ -> ( + ParsetreeViewer.isBinaryExpression expr + || + match vb.pvb_expr with + | { + pexp_attributes = [({Location.txt = "ns.ternary"}, _)]; + pexp_desc = Pexp_ifthenelse (ifExpr, _, _); + } -> + ParsetreeViewer.isBinaryExpression ifExpr + || ParsetreeViewer.hasAttributes ifExpr.pexp_attributes + | {pexp_desc = Pexp_newtype _} -> false + | e -> + ParsetreeViewer.hasAttributes e.pexp_attributes + || ParsetreeViewer.isArrayAccess e) + in + Doc.group + (Doc.concat + [ + attrs; + header; + patternDoc; + Doc.text " ="; + (if shouldIndent then + Doc.indent (Doc.concat [Doc.line; printedExpr]) + else Doc.concat [Doc.space; printedExpr]); + ]) + +and printPackageType ~customLayout ~printModuleKeywordAndParens + (packageType : Parsetree.package_type) cmtTbl = + let doc = + match packageType with + | longidentLoc, [] -> + Doc.group (Doc.concat [printLongidentLocation longidentLoc cmtTbl]) + | longidentLoc, packageConstraints -> + Doc.group + (Doc.concat + [ + printLongidentLocation longidentLoc cmtTbl; + printPackageConstraints ~customLayout packageConstraints cmtTbl; + Doc.softLine; + ]) + in + if printModuleKeywordAndParens then + Doc.concat [Doc.text "module("; doc; Doc.rparen] + else doc + +and printPackageConstraints ~customLayout packageConstraints cmtTbl = + Doc.concat + [ + Doc.text " with"; + Doc.indent + (Doc.concat + [ + Doc.line; + Doc.join ~sep:Doc.line + (List.mapi + (fun i pc -> + let longident, typexpr = pc in + let cmtLoc = + { + longident.Asttypes.loc with + loc_end = typexpr.Parsetree.ptyp_loc.loc_end; + } + in + let doc = + printPackageConstraint ~customLayout i cmtTbl pc + in + printComments doc cmtTbl cmtLoc) + packageConstraints); + ]); + ] + +and printPackageConstraint ~customLayout i cmtTbl (longidentLoc, typ) = + let prefix = if i == 0 then Doc.text "type " else Doc.text "and type " in + Doc.concat + [ + prefix; + printLongidentLocation longidentLoc cmtTbl; + Doc.text " = "; + printTypExpr ~customLayout typ cmtTbl; + ] + +and printExtension ~customLayout ~atModuleLvl (stringLoc, payload) cmtTbl = + let txt = convertBsExtension stringLoc.Location.txt in + let extName = + let doc = + Doc.concat + [ + Doc.text "%"; + (if atModuleLvl then Doc.text "%" else Doc.nil); + Doc.text txt; + ] + in + printComments doc cmtTbl stringLoc.Location.loc + in + Doc.group (Doc.concat [extName; printPayload ~customLayout payload cmtTbl]) + +and printPattern ~customLayout (p : Parsetree.pattern) cmtTbl = + let patternWithoutAttributes = + match p.ppat_desc with + | Ppat_any -> Doc.text "_" + | Ppat_var var -> printIdentLike var.txt + | Ppat_constant c -> + let templateLiteral = + ParsetreeViewer.hasTemplateLiteralAttr p.ppat_attributes + in + printConstant ~templateLiteral c + | Ppat_tuple patterns -> + Doc.group + (Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun pat -> printPattern ~customLayout pat cmtTbl) + patterns); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + | Ppat_array [] -> + Doc.concat + [Doc.lbracket; printCommentsInside cmtTbl p.ppat_loc; Doc.rbracket] + | Ppat_array patterns -> + Doc.group + (Doc.concat + [ + Doc.text "["; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun pat -> printPattern ~customLayout pat cmtTbl) + patterns); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.text "]"; + ]) + | Ppat_construct ({txt = Longident.Lident "()"}, _) -> + Doc.concat [Doc.lparen; printCommentsInside cmtTbl p.ppat_loc; Doc.rparen] + | Ppat_construct ({txt = Longident.Lident "[]"}, _) -> + Doc.concat + [Doc.text "list{"; printCommentsInside cmtTbl p.ppat_loc; Doc.rbrace] + | Ppat_construct ({txt = Longident.Lident "::"}, _) -> + let patterns, tail = + ParsetreeViewer.collectPatternsFromListConstruct [] p + in + let shouldHug = + match (patterns, tail) with + | [pat], {ppat_desc = Ppat_construct ({txt = Longident.Lident "[]"}, _)} + when ParsetreeViewer.isHuggablePattern pat -> + true + | _ -> false + in + let children = + Doc.concat + [ + (if shouldHug then Doc.nil else Doc.softLine); + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun pat -> printPattern ~customLayout pat cmtTbl) + patterns); + (match tail.Parsetree.ppat_desc with + | Ppat_construct ({txt = Longident.Lident "[]"}, _) -> Doc.nil + | _ -> + let doc = + Doc.concat + [Doc.text "..."; printPattern ~customLayout tail cmtTbl] + in + let tail = printComments doc cmtTbl tail.ppat_loc in + Doc.concat [Doc.text ","; Doc.line; tail]); + ] + in + Doc.group + (Doc.concat + [ + Doc.text "list{"; + (if shouldHug then children + else + Doc.concat + [ + Doc.indent children; + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + ]); + Doc.rbrace; + ]) + | Ppat_construct (constrName, constructorArgs) -> + let constrName = printLongidentLocation constrName cmtTbl in + let argsDoc = + match constructorArgs with + | None -> Doc.nil + | Some + { + ppat_loc; + ppat_desc = Ppat_construct ({txt = Longident.Lident "()"}, _); + } -> + Doc.concat + [Doc.lparen; printCommentsInside cmtTbl ppat_loc; Doc.rparen] + | Some {ppat_desc = Ppat_tuple []; ppat_loc = loc} -> + Doc.concat [Doc.lparen; printCommentsInside cmtTbl loc; Doc.rparen] + (* Some((1, 2) *) + | Some {ppat_desc = Ppat_tuple [({ppat_desc = Ppat_tuple _} as arg)]} -> + Doc.concat + [Doc.lparen; printPattern ~customLayout arg cmtTbl; Doc.rparen] + | Some {ppat_desc = Ppat_tuple patterns} -> + Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun pat -> printPattern ~customLayout pat cmtTbl) + patterns); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + | Some arg -> + let argDoc = printPattern ~customLayout arg cmtTbl in + let shouldHug = ParsetreeViewer.isHuggablePattern arg in + Doc.concat + [ + Doc.lparen; + (if shouldHug then argDoc + else + Doc.concat + [ + Doc.indent (Doc.concat [Doc.softLine; argDoc]); + Doc.trailingComma; + Doc.softLine; + ]); + Doc.rparen; + ] + in + Doc.group (Doc.concat [constrName; argsDoc]) + | Ppat_variant (label, None) -> + Doc.concat [Doc.text "#"; printPolyVarIdent label] + | Ppat_variant (label, variantArgs) -> + let variantName = Doc.concat [Doc.text "#"; printPolyVarIdent label] in + let argsDoc = + match variantArgs with + | None -> Doc.nil + | Some {ppat_desc = Ppat_construct ({txt = Longident.Lident "()"}, _)} + -> + Doc.text "()" + | Some {ppat_desc = Ppat_tuple []; ppat_loc = loc} -> + Doc.concat [Doc.lparen; printCommentsInside cmtTbl loc; Doc.rparen] + (* Some((1, 2) *) + | Some {ppat_desc = Ppat_tuple [({ppat_desc = Ppat_tuple _} as arg)]} -> + Doc.concat + [Doc.lparen; printPattern ~customLayout arg cmtTbl; Doc.rparen] + | Some {ppat_desc = Ppat_tuple patterns} -> + Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun pat -> printPattern ~customLayout pat cmtTbl) + patterns); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + | Some arg -> + let argDoc = printPattern ~customLayout arg cmtTbl in + let shouldHug = ParsetreeViewer.isHuggablePattern arg in + Doc.concat + [ + Doc.lparen; + (if shouldHug then argDoc + else + Doc.concat + [ + Doc.indent (Doc.concat [Doc.softLine; argDoc]); + Doc.trailingComma; + Doc.softLine; + ]); + Doc.rparen; + ] + in + Doc.group (Doc.concat [variantName; argsDoc]) + | Ppat_type ident -> + Doc.concat [Doc.text "#..."; printIdentPath ident cmtTbl] + | Ppat_record (rows, openFlag) -> + Doc.group + (Doc.concat + [ + Doc.lbrace; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun row -> + printPatternRecordRow ~customLayout row cmtTbl) + rows); + (match openFlag with + | Open -> Doc.concat [Doc.text ","; Doc.line; Doc.text "_"] + | Closed -> Doc.nil); + ]); + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + Doc.rbrace; + ]) + | Ppat_exception p -> + let needsParens = + match p.ppat_desc with + | Ppat_or (_, _) | Ppat_alias (_, _) -> true + | _ -> false + in + let pat = + let p = printPattern ~customLayout p cmtTbl in + if needsParens then Doc.concat [Doc.text "("; p; Doc.text ")"] else p + in + Doc.group (Doc.concat [Doc.text "exception"; Doc.line; pat]) + | Ppat_or _ -> + (* Blue | Red | Green -> [Blue; Red; Green] *) + let orChain = ParsetreeViewer.collectOrPatternChain p in + let docs = + List.mapi + (fun i pat -> + let patternDoc = printPattern ~customLayout pat cmtTbl in + Doc.concat + [ + (if i == 0 then Doc.nil + else Doc.concat [Doc.line; Doc.text "| "]); + (match pat.ppat_desc with + (* (Blue | Red) | (Green | Black) | White *) + | Ppat_or _ -> addParens patternDoc + | _ -> patternDoc); + ]) + orChain + in + let isSpreadOverMultipleLines = + match (orChain, List.rev orChain) with + | first :: _, last :: _ -> + first.ppat_loc.loc_start.pos_lnum < last.ppat_loc.loc_end.pos_lnum + | _ -> false + in + Doc.breakableGroup ~forceBreak:isSpreadOverMultipleLines (Doc.concat docs) + | Ppat_extension ext -> + printExtension ~customLayout ~atModuleLvl:false ext cmtTbl + | Ppat_lazy p -> + let needsParens = + match p.ppat_desc with + | Ppat_or (_, _) | Ppat_alias (_, _) -> true + | _ -> false + in + let pat = + let p = printPattern ~customLayout p cmtTbl in + if needsParens then Doc.concat [Doc.text "("; p; Doc.text ")"] else p + in + Doc.concat [Doc.text "lazy "; pat] + | Ppat_alias (p, aliasLoc) -> + let needsParens = + match p.ppat_desc with + | Ppat_or (_, _) | Ppat_alias (_, _) -> true + | _ -> false + in + let renderedPattern = + let p = printPattern ~customLayout p cmtTbl in + if needsParens then Doc.concat [Doc.text "("; p; Doc.text ")"] else p + in + Doc.concat + [renderedPattern; Doc.text " as "; printStringLoc aliasLoc cmtTbl] + (* Note: module(P : S) is represented as *) + (* Ppat_constraint(Ppat_unpack, Ptyp_package) *) + | Ppat_constraint + ( {ppat_desc = Ppat_unpack stringLoc}, + {ptyp_desc = Ptyp_package packageType; ptyp_loc} ) -> + Doc.concat + [ + Doc.text "module("; + printComments (Doc.text stringLoc.txt) cmtTbl stringLoc.loc; + Doc.text ": "; + printComments + (printPackageType ~customLayout ~printModuleKeywordAndParens:false + packageType cmtTbl) + cmtTbl ptyp_loc; + Doc.rparen; + ] + | Ppat_constraint (pattern, typ) -> + Doc.concat + [ + printPattern ~customLayout pattern cmtTbl; + Doc.text ": "; + printTypExpr ~customLayout typ cmtTbl; + ] + (* Note: module(P : S) is represented as *) + (* Ppat_constraint(Ppat_unpack, Ptyp_package) *) + | Ppat_unpack stringLoc -> + Doc.concat + [ + Doc.text "module("; + printComments (Doc.text stringLoc.txt) cmtTbl stringLoc.loc; + Doc.rparen; + ] + | Ppat_interval (a, b) -> + Doc.concat [printConstant a; Doc.text " .. "; printConstant b] + | Ppat_open _ -> Doc.nil + in + let doc = + match p.ppat_attributes with + | [] -> patternWithoutAttributes + | attrs -> + Doc.group + (Doc.concat + [ + printAttributes ~customLayout attrs cmtTbl; patternWithoutAttributes; + ]) + in + printComments doc cmtTbl p.ppat_loc + +and printPatternRecordRow ~customLayout row cmtTbl = + match row with + (* punned {x}*) + | ( ({Location.txt = Longident.Lident ident} as longident), + {Parsetree.ppat_desc = Ppat_var {txt; _}; ppat_attributes} ) + when ident = txt -> + Doc.concat + [ + printOptionalLabel ppat_attributes; + printAttributes ~customLayout ppat_attributes cmtTbl; + printLidentPath longident cmtTbl; + ] + | longident, pattern -> + let locForComments = + {longident.loc with loc_end = pattern.Parsetree.ppat_loc.loc_end} + in + let rhsDoc = + let doc = printPattern ~customLayout pattern cmtTbl in + let doc = + if Parens.patternRecordRowRhs pattern then addParens doc else doc + in + Doc.concat [printOptionalLabel pattern.ppat_attributes; doc] + in + let doc = + Doc.group + (Doc.concat + [ + printLidentPath longident cmtTbl; + Doc.text ":"; + (if ParsetreeViewer.isHuggablePattern pattern then + Doc.concat [Doc.space; rhsDoc] + else Doc.indent (Doc.concat [Doc.line; rhsDoc])); + ]) + in + printComments doc cmtTbl locForComments + +and printExpressionWithComments ~customLayout expr cmtTbl : Doc.t = + let doc = printExpression ~customLayout expr cmtTbl in + printComments doc cmtTbl expr.Parsetree.pexp_loc + +and printIfChain ~customLayout pexp_attributes ifs elseExpr cmtTbl = + let ifDocs = + Doc.join ~sep:Doc.space + (List.mapi + (fun i (outerLoc, ifExpr, thenExpr) -> + let ifTxt = if i > 0 then Doc.text "else if " else Doc.text "if " in + let doc = + match ifExpr with + | ParsetreeViewer.If ifExpr -> + let condition = + if ParsetreeViewer.isBlockExpr ifExpr then + printExpressionBlock ~customLayout ~braces:true ifExpr cmtTbl + else + let doc = + printExpressionWithComments ~customLayout ifExpr cmtTbl + in + match Parens.expr ifExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc ifExpr braces + | Nothing -> Doc.ifBreaks (addParens doc) doc + in + Doc.concat + [ + ifTxt; + Doc.group condition; + Doc.space; + (let thenExpr = + match ParsetreeViewer.processBracesAttr thenExpr with + (* This case only happens when coming from Reason, we strip braces *) + | Some _, expr -> expr + | _ -> thenExpr + in + printExpressionBlock ~customLayout ~braces:true thenExpr + cmtTbl); + ] + | IfLet (pattern, conditionExpr) -> + let conditionDoc = + let doc = + printExpressionWithComments ~customLayout conditionExpr + cmtTbl + in + match Parens.expr conditionExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc conditionExpr braces + | Nothing -> doc + in + Doc.concat + [ + ifTxt; + Doc.text "let "; + printPattern ~customLayout pattern cmtTbl; + Doc.text " = "; + conditionDoc; + Doc.space; + printExpressionBlock ~customLayout ~braces:true thenExpr + cmtTbl; + ] + in + printLeadingComments doc cmtTbl.leading outerLoc) + ifs) + in + let elseDoc = + match elseExpr with + | None -> Doc.nil + | Some expr -> + Doc.concat + [ + Doc.text " else "; + printExpressionBlock ~customLayout ~braces:true expr cmtTbl; + ] + in + let attrs = ParsetreeViewer.filterFragileMatchAttributes pexp_attributes in + Doc.concat [printAttributes ~customLayout attrs cmtTbl; ifDocs; elseDoc] + +and printExpression ~customLayout (e : Parsetree.expression) cmtTbl = + let printedExpression = + match e.pexp_desc with + | Parsetree.Pexp_constant c -> + printConstant ~templateLiteral:(ParsetreeViewer.isTemplateLiteral e) c + | Pexp_construct _ when ParsetreeViewer.hasJsxAttribute e.pexp_attributes -> + printJsxFragment ~customLayout e cmtTbl + | Pexp_construct ({txt = Longident.Lident "()"}, _) -> Doc.text "()" + | Pexp_construct ({txt = Longident.Lident "[]"}, _) -> + Doc.concat + [Doc.text "list{"; printCommentsInside cmtTbl e.pexp_loc; Doc.rbrace] + | Pexp_construct ({txt = Longident.Lident "::"}, _) -> + let expressions, spread = ParsetreeViewer.collectListExpressions e in + let spreadDoc = + match spread with + | Some expr -> + Doc.concat + [ + Doc.text ","; + Doc.line; + Doc.dotdotdot; + (let doc = + printExpressionWithComments ~customLayout expr cmtTbl + in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc); + ] + | None -> Doc.nil + in + Doc.group + (Doc.concat + [ + Doc.text "list{"; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun expr -> + let doc = + printExpressionWithComments ~customLayout expr + cmtTbl + in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + expressions); + spreadDoc; + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + | Pexp_construct (longidentLoc, args) -> + let constr = printLongidentLocation longidentLoc cmtTbl in + let args = + match args with + | None -> Doc.nil + | Some {pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)} + -> + Doc.text "()" + (* Some((1, 2)) *) + | Some {pexp_desc = Pexp_tuple [({pexp_desc = Pexp_tuple _} as arg)]} -> + Doc.concat + [ + Doc.lparen; + (let doc = printExpressionWithComments ~customLayout arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc); + Doc.rparen; + ] + | Some {pexp_desc = Pexp_tuple args} -> + Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun expr -> + let doc = + printExpressionWithComments ~customLayout expr + cmtTbl + in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + args); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + | Some arg -> + let argDoc = + let doc = printExpressionWithComments ~customLayout arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc + in + let shouldHug = ParsetreeViewer.isHuggableExpression arg in + Doc.concat + [ + Doc.lparen; + (if shouldHug then argDoc + else + Doc.concat + [ + Doc.indent (Doc.concat [Doc.softLine; argDoc]); + Doc.trailingComma; + Doc.softLine; + ]); + Doc.rparen; + ] + in + Doc.group (Doc.concat [constr; args]) + | Pexp_ident path -> printLidentPath path cmtTbl + | Pexp_tuple exprs -> + Doc.group + (Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun expr -> + let doc = + printExpressionWithComments ~customLayout expr + cmtTbl + in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + exprs); + ]); + Doc.ifBreaks (Doc.text ",") Doc.nil; + Doc.softLine; + Doc.rparen; + ]) + | Pexp_array [] -> + Doc.concat + [Doc.lbracket; printCommentsInside cmtTbl e.pexp_loc; Doc.rbracket] + | Pexp_array exprs -> + Doc.group + (Doc.concat + [ + Doc.lbracket; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun expr -> + let doc = + printExpressionWithComments ~customLayout expr + cmtTbl + in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + exprs); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbracket; + ]) + | Pexp_variant (label, args) -> + let variantName = Doc.concat [Doc.text "#"; printPolyVarIdent label] in + let args = + match args with + | None -> Doc.nil + | Some {pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)} + -> + Doc.text "()" + (* #poly((1, 2) *) + | Some {pexp_desc = Pexp_tuple [({pexp_desc = Pexp_tuple _} as arg)]} -> + Doc.concat + [ + Doc.lparen; + (let doc = printExpressionWithComments ~customLayout arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc); + Doc.rparen; + ] + | Some {pexp_desc = Pexp_tuple args} -> + Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun expr -> + let doc = + printExpressionWithComments ~customLayout expr + cmtTbl + in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + args); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ] + | Some arg -> + let argDoc = + let doc = printExpressionWithComments ~customLayout arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc + in + let shouldHug = ParsetreeViewer.isHuggableExpression arg in + Doc.concat + [ + Doc.lparen; + (if shouldHug then argDoc + else + Doc.concat + [ + Doc.indent (Doc.concat [Doc.softLine; argDoc]); + Doc.trailingComma; + Doc.softLine; + ]); + Doc.rparen; + ] + in + Doc.group (Doc.concat [variantName; args]) + | Pexp_record (rows, spreadExpr) -> + if rows = [] then + Doc.concat + [Doc.lbrace; printCommentsInside cmtTbl e.pexp_loc; Doc.rbrace] + else + let spread = + match spreadExpr with + | None -> Doc.nil + | Some expr -> + Doc.concat + [ + Doc.dotdotdot; + (let doc = + printExpressionWithComments ~customLayout expr cmtTbl + in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc); + Doc.comma; + Doc.line; + ] + in + (* If the record is written over multiple lines, break automatically + * `let x = {a: 1, b: 3}` -> same line, break when line-width exceeded + * `let x = { + * a: 1, + * b: 2, + * }` -> record is written on multiple lines, break the group *) + let forceBreak = + e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum + in + let punningAllowed = + match (spreadExpr, rows) with + | None, [_] -> false (* disallow punning for single-element records *) + | _ -> true + in + Doc.breakableGroup ~forceBreak + (Doc.concat + [ + Doc.lbrace; + Doc.indent + (Doc.concat + [ + Doc.softLine; + spread; + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun row -> + printExpressionRecordRow ~customLayout row cmtTbl + punningAllowed) + rows); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + | Pexp_extension extension -> ( + match extension with + | ( {txt = "bs.obj" | "obj"}, + PStr + [ + { + pstr_loc = loc; + pstr_desc = Pstr_eval ({pexp_desc = Pexp_record (rows, _)}, []); + }; + ] ) -> + (* If the object is written over multiple lines, break automatically + * `let x = {"a": 1, "b": 3}` -> same line, break when line-width exceeded + * `let x = { + * "a": 1, + * "b": 2, + * }` -> object is written on multiple lines, break the group *) + let forceBreak = loc.loc_start.pos_lnum < loc.loc_end.pos_lnum in + Doc.breakableGroup ~forceBreak + (Doc.concat + [ + Doc.lbrace; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun row -> + printBsObjectRow ~customLayout row cmtTbl) + rows); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + | extension -> + printExtension ~customLayout ~atModuleLvl:false extension cmtTbl) + | Pexp_apply (e, [(Nolabel, {pexp_desc = Pexp_array subLists})]) + when ParsetreeViewer.isSpreadBeltListConcat e -> + printBeltListConcatApply ~customLayout subLists cmtTbl + | Pexp_apply _ -> + if ParsetreeViewer.isUnaryExpression e then + printUnaryExpression ~customLayout e cmtTbl + else if ParsetreeViewer.isTemplateLiteral e then + printTemplateLiteral ~customLayout e cmtTbl + else if ParsetreeViewer.isBinaryExpression e then + printBinaryExpression ~customLayout e cmtTbl + else printPexpApply ~customLayout e cmtTbl + | Pexp_unreachable -> Doc.dot + | Pexp_field (expr, longidentLoc) -> + let lhs = + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.fieldExpr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [lhs; Doc.dot; printLidentPath longidentLoc cmtTbl] + | Pexp_setfield (expr1, longidentLoc, expr2) -> + printSetFieldExpr ~customLayout e.pexp_attributes expr1 longidentLoc expr2 + e.pexp_loc cmtTbl + | Pexp_ifthenelse (_ifExpr, _thenExpr, _elseExpr) + when ParsetreeViewer.isTernaryExpr e -> + let parts, alternate = ParsetreeViewer.collectTernaryParts e in + let ternaryDoc = + match parts with + | (condition1, consequent1) :: rest -> + Doc.group + (Doc.concat + [ + printTernaryOperand ~customLayout condition1 cmtTbl; + Doc.indent + (Doc.concat + [ + Doc.line; + Doc.indent + (Doc.concat + [ + Doc.text "? "; + printTernaryOperand ~customLayout consequent1 + cmtTbl; + ]); + Doc.concat + (List.map + (fun (condition, consequent) -> + Doc.concat + [ + Doc.line; + Doc.text ": "; + printTernaryOperand ~customLayout condition + cmtTbl; + Doc.line; + Doc.text "? "; + printTernaryOperand ~customLayout consequent + cmtTbl; + ]) + rest); + Doc.line; + Doc.text ": "; + Doc.indent + (printTernaryOperand ~customLayout alternate cmtTbl); + ]); + ]) + | _ -> Doc.nil + in + let attrs = ParsetreeViewer.filterTernaryAttributes e.pexp_attributes in + let needsParens = + match ParsetreeViewer.filterParsingAttrs attrs with + | [] -> false + | _ -> true + in + Doc.concat + [ + printAttributes ~customLayout attrs cmtTbl; + (if needsParens then addParens ternaryDoc else ternaryDoc); + ] + | Pexp_ifthenelse (_ifExpr, _thenExpr, _elseExpr) -> + let ifs, elseExpr = ParsetreeViewer.collectIfExpressions e in + printIfChain ~customLayout e.pexp_attributes ifs elseExpr cmtTbl + | Pexp_while (expr1, expr2) -> + let condition = + let doc = printExpressionWithComments ~customLayout expr1 cmtTbl in + match Parens.expr expr1 with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr1 braces + | Nothing -> doc + in + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.text "while "; + (if ParsetreeViewer.isBlockExpr expr1 then condition + else Doc.group (Doc.ifBreaks (addParens condition) condition)); + Doc.space; + printExpressionBlock ~customLayout ~braces:true expr2 cmtTbl; + ]) + | Pexp_for (pattern, fromExpr, toExpr, directionFlag, body) -> + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.text "for "; + printPattern ~customLayout pattern cmtTbl; + Doc.text " in "; + (let doc = + printExpressionWithComments ~customLayout fromExpr cmtTbl + in + match Parens.expr fromExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc fromExpr braces + | Nothing -> doc); + printDirectionFlag directionFlag; + (let doc = + printExpressionWithComments ~customLayout toExpr cmtTbl + in + match Parens.expr toExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc toExpr braces + | Nothing -> doc); + Doc.space; + printExpressionBlock ~customLayout ~braces:true body cmtTbl; + ]) + | Pexp_constraint + ( {pexp_desc = Pexp_pack modExpr}, + {ptyp_desc = Ptyp_package packageType; ptyp_loc} ) -> + Doc.group + (Doc.concat + [ + Doc.text "module("; + Doc.indent + (Doc.concat + [ + Doc.softLine; + printModExpr ~customLayout modExpr cmtTbl; + Doc.text ": "; + printComments + (printPackageType ~customLayout + ~printModuleKeywordAndParens:false packageType cmtTbl) + cmtTbl ptyp_loc; + ]); + Doc.softLine; + Doc.rparen; + ]) + | Pexp_constraint (expr, typ) -> + let exprDoc = + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [exprDoc; Doc.text ": "; printTypExpr ~customLayout typ cmtTbl] + | Pexp_letmodule ({txt = _modName}, _modExpr, _expr) -> + printExpressionBlock ~customLayout ~braces:true e cmtTbl + | Pexp_letexception (_extensionConstructor, _expr) -> + printExpressionBlock ~customLayout ~braces:true e cmtTbl + | Pexp_assert expr -> + let rhs = + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.lazyOrAssertOrAwaitExprRhs expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat [Doc.text "assert "; rhs] + | Pexp_lazy expr -> + let rhs = + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.lazyOrAssertOrAwaitExprRhs expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.group (Doc.concat [Doc.text "lazy "; rhs]) + | Pexp_open (_overrideFlag, _longidentLoc, _expr) -> + printExpressionBlock ~customLayout ~braces:true e cmtTbl + | Pexp_pack modExpr -> + Doc.group + (Doc.concat + [ + Doc.text "module("; + Doc.indent + (Doc.concat + [Doc.softLine; printModExpr ~customLayout modExpr cmtTbl]); + Doc.softLine; + Doc.rparen; + ]) + | Pexp_sequence _ -> + printExpressionBlock ~customLayout ~braces:true e cmtTbl + | Pexp_let _ -> printExpressionBlock ~customLayout ~braces:true e cmtTbl + | Pexp_fun + ( Nolabel, + None, + {ppat_desc = Ppat_var {txt = "__x"}}, + {pexp_desc = Pexp_apply _} ) -> + (* (__x) => f(a, __x, c) -----> f(a, _, c) *) + printExpressionWithComments ~customLayout + (ParsetreeViewer.rewriteUnderscoreApply e) + cmtTbl + | Pexp_fun _ | Pexp_newtype _ -> + let attrsOnArrow, parameters, returnExpr = ParsetreeViewer.funExpr e in + let ParsetreeViewer.{async; uncurried; attributes = attrs} = + ParsetreeViewer.processFunctionAttributes attrsOnArrow + in + let returnExpr, typConstraint = + match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) -> + ( { + expr with + pexp_attributes = + List.concat [expr.pexp_attributes; returnExpr.pexp_attributes]; + }, + Some typ ) + | _ -> (returnExpr, None) + in + let hasConstraint = + match typConstraint with + | Some _ -> true + | None -> false + in + let parametersDoc = + printExprFunParameters ~customLayout ~inCallback:NoCallback ~uncurried + ~async ~hasConstraint parameters cmtTbl + in + let returnExprDoc = + let optBraces, _ = ParsetreeViewer.processBracesAttr returnExpr in + let shouldInline = + match (returnExpr.pexp_desc, optBraces) with + | _, Some _ -> true + | ( ( Pexp_array _ | Pexp_tuple _ + | Pexp_construct (_, Some _) + | Pexp_record _ ), + _ ) -> + true + | _ -> false + in + let shouldIndent = + match returnExpr.pexp_desc with + | Pexp_sequence _ | Pexp_let _ | Pexp_letmodule _ + | Pexp_letexception _ | Pexp_open _ -> + false + | _ -> true + in + let returnDoc = + let doc = + printExpressionWithComments ~customLayout returnExpr cmtTbl + in + match Parens.expr returnExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc returnExpr braces + | Nothing -> doc + in + if shouldInline then Doc.concat [Doc.space; returnDoc] + else + Doc.group + (if shouldIndent then Doc.indent (Doc.concat [Doc.line; returnDoc]) + else Doc.concat [Doc.space; returnDoc]) + in + let typConstraintDoc = + match typConstraint with + | Some typ -> + let typDoc = + let doc = printTypExpr ~customLayout typ cmtTbl in + if Parens.arrowReturnTypExpr typ then addParens doc else doc + in + Doc.concat [Doc.text ": "; typDoc] + | _ -> Doc.nil + in + let attrs = printAttributes ~customLayout attrs cmtTbl in + Doc.group + (Doc.concat + [ + attrs; + parametersDoc; + typConstraintDoc; + Doc.text " =>"; + returnExprDoc; + ]) + | Pexp_try (expr, cases) -> + let exprDoc = + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat + [ + Doc.text "try "; + exprDoc; + Doc.text " catch "; + printCases ~customLayout cases cmtTbl; + ] + | Pexp_match (_, [_; _]) when ParsetreeViewer.isIfLetExpr e -> + let ifs, elseExpr = ParsetreeViewer.collectIfExpressions e in + printIfChain ~customLayout e.pexp_attributes ifs elseExpr cmtTbl + | Pexp_match (expr, cases) -> + let exprDoc = + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + Doc.concat + [ + Doc.text "switch "; + exprDoc; + Doc.space; + printCases ~customLayout cases cmtTbl; + ] + | Pexp_function cases -> + Doc.concat + [Doc.text "x => switch x "; printCases ~customLayout cases cmtTbl] + | Pexp_coerce (expr, typOpt, typ) -> + let docExpr = printExpressionWithComments ~customLayout expr cmtTbl in + let docTyp = printTypExpr ~customLayout typ cmtTbl in + let ofType = + match typOpt with + | None -> Doc.nil + | Some typ1 -> + Doc.concat [Doc.text ": "; printTypExpr ~customLayout typ1 cmtTbl] + in + Doc.concat + [Doc.lparen; docExpr; ofType; Doc.text " :> "; docTyp; Doc.rparen] + | Pexp_send (parentExpr, label) -> + let parentDoc = + let doc = printExpressionWithComments ~customLayout parentExpr cmtTbl in + match Parens.unaryExprOperand parentExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc parentExpr braces + | Nothing -> doc + in + let member = + let memberDoc = printComments (Doc.text label.txt) cmtTbl label.loc in + Doc.concat [Doc.text "\""; memberDoc; Doc.text "\""] + in + Doc.group (Doc.concat [parentDoc; Doc.lbracket; member; Doc.rbracket]) + | Pexp_new _ -> Doc.text "Pexp_new not impemented in printer" + | Pexp_setinstvar _ -> Doc.text "Pexp_setinstvar not impemented in printer" + | Pexp_override _ -> Doc.text "Pexp_override not impemented in printer" + | Pexp_poly _ -> Doc.text "Pexp_poly not impemented in printer" + | Pexp_object _ -> Doc.text "Pexp_object not impemented in printer" + in + let exprWithAwait = + if ParsetreeViewer.hasAwaitAttribute e.pexp_attributes then + let rhs = + match + Parens.lazyOrAssertOrAwaitExprRhs ~inAwait:true + { + e with + pexp_attributes = + List.filter + (function + | {Location.txt = "ns.braces"}, _ -> false + | _ -> true) + e.pexp_attributes; + } + with + | Parens.Parenthesized -> addParens printedExpression + | Braced braces -> printBraces printedExpression e braces + | Nothing -> printedExpression + in + Doc.concat [Doc.text "await "; rhs] + else printedExpression + in + let shouldPrintItsOwnAttributes = + match e.pexp_desc with + | Pexp_apply _ | Pexp_fun _ | Pexp_newtype _ | Pexp_setfield _ + | Pexp_ifthenelse _ -> + true + | Pexp_match _ when ParsetreeViewer.isIfLetExpr e -> true + | Pexp_construct _ when ParsetreeViewer.hasJsxAttribute e.pexp_attributes -> + true + | _ -> false + in + match e.pexp_attributes with + | [] -> exprWithAwait + | attrs when not shouldPrintItsOwnAttributes -> + Doc.group + (Doc.concat [printAttributes ~customLayout attrs cmtTbl; exprWithAwait]) + | _ -> exprWithAwait + +and printPexpFun ~customLayout ~inCallback e cmtTbl = + let attrsOnArrow, parameters, returnExpr = ParsetreeViewer.funExpr e in + let ParsetreeViewer.{async; uncurried; attributes = attrs} = + ParsetreeViewer.processFunctionAttributes attrsOnArrow + in + let returnExpr, typConstraint = + match returnExpr.pexp_desc with + | Pexp_constraint (expr, typ) -> + ( { + expr with + pexp_attributes = + List.concat [expr.pexp_attributes; returnExpr.pexp_attributes]; + }, + Some typ ) + | _ -> (returnExpr, None) + in + let parametersDoc = + printExprFunParameters ~customLayout ~inCallback ~async ~uncurried + ~hasConstraint: + (match typConstraint with + | Some _ -> true + | None -> false) + parameters cmtTbl + in + let returnShouldIndent = + match returnExpr.pexp_desc with + | Pexp_sequence _ | Pexp_let _ | Pexp_letmodule _ | Pexp_letexception _ + | Pexp_open _ -> + false + | _ -> true + in + let returnExprDoc = + let optBraces, _ = ParsetreeViewer.processBracesAttr returnExpr in + let shouldInline = + match (returnExpr.pexp_desc, optBraces) with + | _, Some _ -> true + | ( ( Pexp_array _ | Pexp_tuple _ + | Pexp_construct (_, Some _) + | Pexp_record _ ), + _ ) -> + true + | _ -> false + in + let returnDoc = + let doc = printExpressionWithComments ~customLayout returnExpr cmtTbl in + match Parens.expr returnExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc returnExpr braces + | Nothing -> doc + in + if shouldInline then Doc.concat [Doc.space; returnDoc] + else + Doc.group + (if returnShouldIndent then + Doc.concat + [ + Doc.indent (Doc.concat [Doc.line; returnDoc]); + (match inCallback with + | FitsOnOneLine | ArgumentsFitOnOneLine -> Doc.softLine + | _ -> Doc.nil); + ] + else Doc.concat [Doc.space; returnDoc]) + in + let typConstraintDoc = + match typConstraint with + | Some typ -> + Doc.concat [Doc.text ": "; printTypExpr ~customLayout typ cmtTbl] + | _ -> Doc.nil + in + Doc.concat + [ + printAttributes ~customLayout attrs cmtTbl; + parametersDoc; + typConstraintDoc; + Doc.text " =>"; + returnExprDoc; + ] + +and printTernaryOperand ~customLayout expr cmtTbl = + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.ternaryOperand expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + +and printSetFieldExpr ~customLayout attrs lhs longidentLoc rhs loc cmtTbl = + let rhsDoc = + let doc = printExpressionWithComments ~customLayout rhs cmtTbl in + match Parens.setFieldExprRhs rhs with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc rhs braces + | Nothing -> doc + in + let lhsDoc = + let doc = printExpressionWithComments ~customLayout lhs cmtTbl in + match Parens.fieldExpr lhs with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc lhs braces + | Nothing -> doc + in + let shouldIndent = ParsetreeViewer.isBinaryExpression rhs in + let doc = + Doc.group + (Doc.concat + [ + lhsDoc; + Doc.dot; + printLidentPath longidentLoc cmtTbl; + Doc.text " ="; + (if shouldIndent then + Doc.group (Doc.indent (Doc.concat [Doc.line; rhsDoc])) + else Doc.concat [Doc.space; rhsDoc]); + ]) + in + let doc = + match attrs with + | [] -> doc + | attrs -> + Doc.group (Doc.concat [printAttributes ~customLayout attrs cmtTbl; doc]) + in + printComments doc cmtTbl loc + +and printTemplateLiteral ~customLayout expr cmtTbl = + let tag = ref "js" in + let rec walkExpr expr = + let open Parsetree in + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident "^"}}, + [(Nolabel, arg1); (Nolabel, arg2)] ) -> + let lhs = walkExpr arg1 in + let rhs = walkExpr arg2 in + Doc.concat [lhs; rhs] + | Pexp_constant (Pconst_string (txt, Some prefix)) -> + tag := prefix; + printStringContents txt + | _ -> + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + Doc.group (Doc.concat [Doc.text "${"; Doc.indent doc; Doc.rbrace]) + in + let content = walkExpr expr in + Doc.concat + [ + (if !tag = "js" then Doc.nil else Doc.text !tag); + Doc.text "`"; + content; + Doc.text "`"; + ] + +and printUnaryExpression ~customLayout expr cmtTbl = + let printUnaryOperator op = + Doc.text + (match op with + | "~+" -> "+" + | "~+." -> "+." + | "~-" -> "-" + | "~-." -> "-." + | "not" -> "!" + | _ -> assert false) + in + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [(Nolabel, operand)] ) -> + let printedOperand = + let doc = printExpressionWithComments ~customLayout operand cmtTbl in + match Parens.unaryExprOperand operand with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc operand braces + | Nothing -> doc + in + let doc = Doc.concat [printUnaryOperator operator; printedOperand] in + printComments doc cmtTbl expr.pexp_loc + | _ -> assert false + +and printBinaryExpression ~customLayout (expr : Parsetree.expression) cmtTbl = + let printBinaryOperator ~inlineRhs operator = + let operatorTxt = + match operator with + | "|." -> "->" + | "^" -> "++" + | "=" -> "==" + | "==" -> "===" + | "<>" -> "!=" + | "!=" -> "!==" + | txt -> txt + in + let spacingBeforeOperator = + if operator = "|." then Doc.softLine + else if operator = "|>" then Doc.line + else Doc.space + in + let spacingAfterOperator = + if operator = "|." then Doc.nil + else if operator = "|>" then Doc.space + else if inlineRhs then Doc.space + else Doc.line + in + Doc.concat + [spacingBeforeOperator; Doc.text operatorTxt; spacingAfterOperator] + in + let printOperand ~isLhs expr parentOperator = + let rec flatten ~isLhs expr parentOperator = + if ParsetreeViewer.isBinaryExpression expr then + match expr with + | { + pexp_desc = + Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [(_, left); (_, right)] ); + } -> + if + ParsetreeViewer.flattenableOperators parentOperator operator + && not (ParsetreeViewer.hasAttributes expr.pexp_attributes) + then + let leftPrinted = flatten ~isLhs:true left operator in + let rightPrinted = + let rightPrinteableAttrs, rightInternalAttrs = + ParsetreeViewer.partitionPrintableAttributes + right.pexp_attributes + in + let doc = + printExpressionWithComments ~customLayout + {right with pexp_attributes = rightInternalAttrs} + cmtTbl + in + let doc = + if Parens.flattenOperandRhs parentOperator right then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else doc + in + let doc = + Doc.concat + [ + printAttributes ~customLayout rightPrinteableAttrs cmtTbl; + doc; + ] + in + match rightPrinteableAttrs with + | [] -> doc + | _ -> addParens doc + in + let isAwait = + ParsetreeViewer.hasAwaitAttribute expr.pexp_attributes + in + let doc = + if isAwait then + let parens = + Res_parens.binaryOperatorInsideAwaitNeedsParens operator + in + Doc.concat + [ + Doc.lparen; + Doc.text "await "; + (if parens then Doc.lparen else Doc.nil); + leftPrinted; + printBinaryOperator ~inlineRhs:false operator; + rightPrinted; + (if parens then Doc.rparen else Doc.nil); + Doc.rparen; + ] + else + Doc.concat + [ + leftPrinted; + printBinaryOperator ~inlineRhs:false operator; + rightPrinted; + ] + in + + let doc = + if (not isLhs) && Parens.rhsBinaryExprOperand operator expr then + Doc.concat [Doc.lparen; doc; Doc.rparen] + else doc + in + printComments doc cmtTbl expr.pexp_loc + else + let printeableAttrs, internalAttrs = + ParsetreeViewer.partitionPrintableAttributes expr.pexp_attributes + in + let doc = + printExpressionWithComments ~customLayout + {expr with pexp_attributes = internalAttrs} + cmtTbl + in + let doc = + if + Parens.subBinaryExprOperand parentOperator operator + || printeableAttrs <> [] + && (ParsetreeViewer.isBinaryExpression expr + || ParsetreeViewer.isTernaryExpr expr) + then Doc.concat [Doc.lparen; doc; Doc.rparen] + else doc + in + Doc.concat + [printAttributes ~customLayout printeableAttrs cmtTbl; doc] + | _ -> assert false + else + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident "^"; loc}}, + [(Nolabel, _); (Nolabel, _)] ) + when loc.loc_ghost -> + let doc = printTemplateLiteral ~customLayout expr cmtTbl in + printComments doc cmtTbl expr.Parsetree.pexp_loc + | Pexp_setfield (lhs, field, rhs) -> + let doc = + printSetFieldExpr ~customLayout expr.pexp_attributes lhs field rhs + expr.pexp_loc cmtTbl + in + if isLhs then addParens doc else doc + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident "#="}}, + [(Nolabel, lhs); (Nolabel, rhs)] ) -> + let rhsDoc = printExpressionWithComments ~customLayout rhs cmtTbl in + let lhsDoc = printExpressionWithComments ~customLayout lhs cmtTbl in + (* TODO: unify indentation of "=" *) + let shouldIndent = ParsetreeViewer.isBinaryExpression rhs in + let doc = + Doc.group + (Doc.concat + [ + lhsDoc; + Doc.text " ="; + (if shouldIndent then + Doc.group (Doc.indent (Doc.concat [Doc.line; rhsDoc])) + else Doc.concat [Doc.space; rhsDoc]); + ]) + in + let doc = + match expr.pexp_attributes with + | [] -> doc + | attrs -> + Doc.group + (Doc.concat [printAttributes ~customLayout attrs cmtTbl; doc]) + in + if isLhs then addParens doc else doc + | _ -> ( + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.binaryExprOperand ~isLhs expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + in + flatten ~isLhs expr parentOperator + in + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident (("|." | "|>") as op)}}, + [(Nolabel, lhs); (Nolabel, rhs)] ) + when not + (ParsetreeViewer.isBinaryExpression lhs + || ParsetreeViewer.isBinaryExpression rhs + || printAttributes ~customLayout expr.pexp_attributes cmtTbl + <> Doc.nil) -> + let lhsHasCommentBelow = hasCommentBelow cmtTbl lhs.pexp_loc in + let lhsDoc = printOperand ~isLhs:true lhs op in + let rhsDoc = printOperand ~isLhs:false rhs op in + Doc.group + (Doc.concat + [ + printAttributes ~customLayout expr.pexp_attributes cmtTbl; + lhsDoc; + (match (lhsHasCommentBelow, op) with + | true, "|." -> Doc.concat [Doc.softLine; Doc.text "->"] + | false, "|." -> Doc.text "->" + | true, "|>" -> Doc.concat [Doc.line; Doc.text "|> "] + | false, "|>" -> Doc.text " |> " + | _ -> Doc.nil); + rhsDoc; + ]) + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, + [(Nolabel, lhs); (Nolabel, rhs)] ) -> + let right = + let operatorWithRhs = + let rhsDoc = printOperand ~isLhs:false rhs operator in + Doc.concat + [ + printBinaryOperator + ~inlineRhs:(ParsetreeViewer.shouldInlineRhsBinaryExpr rhs) + operator; + rhsDoc; + ] + in + if ParsetreeViewer.shouldIndentBinaryExpr expr then + Doc.group (Doc.indent operatorWithRhs) + else operatorWithRhs + in + let doc = + Doc.group (Doc.concat [printOperand ~isLhs:true lhs operator; right]) + in + Doc.group + (Doc.concat + [ + printAttributes ~customLayout expr.pexp_attributes cmtTbl; + (match + Parens.binaryExpr + { + expr with + pexp_attributes = + ParsetreeViewer.filterPrintableAttributes + expr.pexp_attributes; + } + with + | Braced bracesLoc -> printBraces doc expr bracesLoc + | Parenthesized -> addParens doc + | Nothing -> doc); + ]) + | _ -> Doc.nil + +and printBeltListConcatApply ~customLayout subLists cmtTbl = + let makeSpreadDoc commaBeforeSpread = function + | Some expr -> + Doc.concat + [ + commaBeforeSpread; + Doc.dotdotdot; + (let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc); + ] + | None -> Doc.nil + in + let makeSubListDoc (expressions, spread) = + let commaBeforeSpread = + match expressions with + | [] -> Doc.nil + | _ -> Doc.concat [Doc.text ","; Doc.line] + in + let spreadDoc = makeSpreadDoc commaBeforeSpread spread in + Doc.concat + [ + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map + (fun expr -> + let doc = + printExpressionWithComments ~customLayout expr cmtTbl + in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc) + expressions); + spreadDoc; + ] + in + Doc.group + (Doc.concat + [ + Doc.text "list{"; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.text ","; Doc.line]) + (List.map makeSubListDoc + (List.map ParsetreeViewer.collectListExpressions subLists)); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rbrace; + ]) + +(* callExpr(arg1, arg2) *) +and printPexpApply ~customLayout expr cmtTbl = + match expr.pexp_desc with + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident "##"}}, + [(Nolabel, parentExpr); (Nolabel, memberExpr)] ) -> + let parentDoc = + let doc = printExpressionWithComments ~customLayout parentExpr cmtTbl in + match Parens.unaryExprOperand parentExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc parentExpr braces + | Nothing -> doc + in + let member = + let memberDoc = + match memberExpr.pexp_desc with + | Pexp_ident lident -> + printComments (printLongident lident.txt) cmtTbl memberExpr.pexp_loc + | _ -> printExpressionWithComments ~customLayout memberExpr cmtTbl + in + Doc.concat [Doc.text "\""; memberDoc; Doc.text "\""] + in + Doc.group + (Doc.concat + [ + printAttributes ~customLayout expr.pexp_attributes cmtTbl; + parentDoc; + Doc.lbracket; + member; + Doc.rbracket; + ]) + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Lident "#="}}, + [(Nolabel, lhs); (Nolabel, rhs)] ) -> ( + let rhsDoc = + let doc = printExpressionWithComments ~customLayout rhs cmtTbl in + match Parens.expr rhs with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc rhs braces + | Nothing -> doc + in + (* TODO: unify indentation of "=" *) + let shouldIndent = + (not (ParsetreeViewer.isBracedExpr rhs)) + && ParsetreeViewer.isBinaryExpression rhs + in + let doc = + Doc.group + (Doc.concat + [ + printExpressionWithComments ~customLayout lhs cmtTbl; + Doc.text " ="; + (if shouldIndent then + Doc.group (Doc.indent (Doc.concat [Doc.line; rhsDoc])) + else Doc.concat [Doc.space; rhsDoc]); + ]) + in + match expr.pexp_attributes with + | [] -> doc + | attrs -> + Doc.group (Doc.concat [printAttributes ~customLayout attrs cmtTbl; doc])) + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "get")}}, + [(Nolabel, parentExpr); (Nolabel, memberExpr)] ) + when not (ParsetreeViewer.isRewrittenUnderscoreApplySugar parentExpr) -> + (* Don't print the Array.get(_, 0) sugar a.k.a. (__x) => Array.get(__x, 0) as _[0] *) + let member = + let memberDoc = + let doc = printExpressionWithComments ~customLayout memberExpr cmtTbl in + match Parens.expr memberExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc memberExpr braces + | Nothing -> doc + in + let shouldInline = + match memberExpr.pexp_desc with + | Pexp_constant _ | Pexp_ident _ -> true + | _ -> false + in + if shouldInline then memberDoc + else + Doc.concat + [Doc.indent (Doc.concat [Doc.softLine; memberDoc]); Doc.softLine] + in + let parentDoc = + let doc = printExpressionWithComments ~customLayout parentExpr cmtTbl in + match Parens.unaryExprOperand parentExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc parentExpr braces + | Nothing -> doc + in + Doc.group + (Doc.concat + [ + printAttributes ~customLayout expr.pexp_attributes cmtTbl; + parentDoc; + Doc.lbracket; + member; + Doc.rbracket; + ]) + | Pexp_apply + ( {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "set")}}, + [(Nolabel, parentExpr); (Nolabel, memberExpr); (Nolabel, targetExpr)] ) + -> + let member = + let memberDoc = + let doc = printExpressionWithComments ~customLayout memberExpr cmtTbl in + match Parens.expr memberExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc memberExpr braces + | Nothing -> doc + in + let shouldInline = + match memberExpr.pexp_desc with + | Pexp_constant _ | Pexp_ident _ -> true + | _ -> false + in + if shouldInline then memberDoc + else + Doc.concat + [Doc.indent (Doc.concat [Doc.softLine; memberDoc]); Doc.softLine] + in + let shouldIndentTargetExpr = + if ParsetreeViewer.isBracedExpr targetExpr then false + else + ParsetreeViewer.isBinaryExpression targetExpr + || + match targetExpr with + | { + pexp_attributes = [({Location.txt = "ns.ternary"}, _)]; + pexp_desc = Pexp_ifthenelse (ifExpr, _, _); + } -> + ParsetreeViewer.isBinaryExpression ifExpr + || ParsetreeViewer.hasAttributes ifExpr.pexp_attributes + | {pexp_desc = Pexp_newtype _} -> false + | e -> + ParsetreeViewer.hasAttributes e.pexp_attributes + || ParsetreeViewer.isArrayAccess e + in + let targetExpr = + let doc = printExpressionWithComments ~customLayout targetExpr cmtTbl in + match Parens.expr targetExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc targetExpr braces + | Nothing -> doc + in + let parentDoc = + let doc = printExpressionWithComments ~customLayout parentExpr cmtTbl in + match Parens.unaryExprOperand parentExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc parentExpr braces + | Nothing -> doc + in + Doc.group + (Doc.concat + [ + printAttributes ~customLayout expr.pexp_attributes cmtTbl; + parentDoc; + Doc.lbracket; + member; + Doc.rbracket; + Doc.text " ="; + (if shouldIndentTargetExpr then + Doc.indent (Doc.concat [Doc.line; targetExpr]) + else Doc.concat [Doc.space; targetExpr]); + ]) + (* TODO: cleanup, are those branches even remotely performant? *) + | Pexp_apply ({pexp_desc = Pexp_ident lident}, args) + when ParsetreeViewer.isJsxExpression expr -> + printJsxExpression ~customLayout lident args cmtTbl + | Pexp_apply (callExpr, args) -> + let args = + List.map + (fun (lbl, arg) -> (lbl, ParsetreeViewer.rewriteUnderscoreApply arg)) + args + in + let uncurried, attrs = + ParsetreeViewer.processUncurriedAttribute expr.pexp_attributes + in + let callExprDoc = + let doc = printExpressionWithComments ~customLayout callExpr cmtTbl in + match Parens.callExpr callExpr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc callExpr braces + | Nothing -> doc + in + if ParsetreeViewer.requiresSpecialCallbackPrintingFirstArg args then + let argsDoc = + printArgumentsWithCallbackInFirstPosition ~uncurried ~customLayout args + cmtTbl + in + Doc.concat + [printAttributes ~customLayout attrs cmtTbl; callExprDoc; argsDoc] + else if ParsetreeViewer.requiresSpecialCallbackPrintingLastArg args then + let argsDoc = + printArgumentsWithCallbackInLastPosition ~customLayout ~uncurried args + cmtTbl + in + (* + * Fixes the following layout (the `[` and `]` should break): + * [fn(x => { + * let _ = x + * }), fn(y => { + * let _ = y + * }), fn(z => { + * let _ = z + * })] + * See `Doc.willBreak documentation in interface file for more context. + * Context: + * https://github.com/rescript-lang/syntax/issues/111 + * https://github.com/rescript-lang/syntax/issues/166 + *) + let maybeBreakParent = + if Doc.willBreak argsDoc then Doc.breakParent else Doc.nil + in + Doc.concat + [ + maybeBreakParent; + printAttributes ~customLayout attrs cmtTbl; + callExprDoc; + argsDoc; + ] + else + let argsDoc = printArguments ~customLayout ~uncurried args cmtTbl in + Doc.concat + [printAttributes ~customLayout attrs cmtTbl; callExprDoc; argsDoc] + | _ -> assert false + +and printJsxExpression ~customLayout lident args cmtTbl = + let name = printJsxName lident in + let formattedProps, children = printJsxProps ~customLayout args cmtTbl in + (*
*) + let hasChildren = + match children with + | Some + { + Parsetree.pexp_desc = + Pexp_construct ({txt = Longident.Lident "[]"}, None); + } -> + false + | None -> false + | _ -> true + in + let isSelfClosing = + match children with + | Some + { + Parsetree.pexp_desc = + Pexp_construct ({txt = Longident.Lident "[]"}, None); + pexp_loc = loc; + } -> + not (hasCommentsInside cmtTbl loc) + | _ -> false + in + let printChildren children = + let lineSep = + match children with + | Some expr -> + if hasNestedJsxOrMoreThanOneChild expr then Doc.hardLine else Doc.line + | None -> Doc.line + in + Doc.concat + [ + Doc.indent + (Doc.concat + [ + Doc.line; + (match children with + | Some childrenExpression -> + printJsxChildren ~customLayout childrenExpression ~sep:lineSep + cmtTbl + | None -> Doc.nil); + ]); + lineSep; + ] + in + Doc.group + (Doc.concat + [ + Doc.group + (Doc.concat + [ + printComments + (Doc.concat [Doc.lessThan; name]) + cmtTbl lident.Asttypes.loc; + formattedProps; + (match children with + | Some + { + Parsetree.pexp_desc = + Pexp_construct ({txt = Longident.Lident "[]"}, None); + } + when isSelfClosing -> + Doc.text "/>" + | _ -> + (* if tag A has trailing comments then put > on the next line + + + *) + if hasTrailingComments cmtTbl lident.Asttypes.loc then + Doc.concat [Doc.softLine; Doc.greaterThan] + else Doc.greaterThan); + ]); + (if isSelfClosing then Doc.nil + else + Doc.concat + [ + (if hasChildren then printChildren children + else + match children with + | Some + { + Parsetree.pexp_desc = + Pexp_construct ({txt = Longident.Lident "[]"}, None); + pexp_loc = loc; + } -> + printCommentsInside cmtTbl loc + | _ -> Doc.nil); + Doc.text "" in + let closing = Doc.text "" in + let lineSep = + if hasNestedJsxOrMoreThanOneChild expr then Doc.hardLine else Doc.line + in + Doc.group + (Doc.concat + [ + opening; + (match expr.pexp_desc with + | Pexp_construct ({txt = Longident.Lident "[]"}, None) -> Doc.nil + | _ -> + Doc.indent + (Doc.concat + [ + Doc.line; + printJsxChildren ~customLayout expr ~sep:lineSep cmtTbl; + ])); + lineSep; + closing; + ]) + +and printJsxChildren ~customLayout (childrenExpr : Parsetree.expression) ~sep + cmtTbl = + match childrenExpr.pexp_desc with + | Pexp_construct ({txt = Longident.Lident "::"}, _) -> + let children, _ = ParsetreeViewer.collectListExpressions childrenExpr in + Doc.group + (Doc.join ~sep + (List.map + (fun (expr : Parsetree.expression) -> + let leadingLineCommentPresent = + hasLeadingLineComment cmtTbl expr.pexp_loc + in + let exprDoc = + printExpressionWithComments ~customLayout expr cmtTbl + in + let addParensOrBraces exprDoc = + (* {(20: int)} make sure that we also protect the expression inside *) + let innerDoc = + if Parens.bracedExpr expr then addParens exprDoc else exprDoc + in + if leadingLineCommentPresent then addBraces innerDoc + else Doc.concat [Doc.lbrace; innerDoc; Doc.rbrace] + in + match Parens.jsxChildExpr expr with + | Nothing -> exprDoc + | Parenthesized -> addParensOrBraces exprDoc + | Braced bracesLoc -> + printComments (addParensOrBraces exprDoc) cmtTbl bracesLoc) + children)) + | _ -> + let leadingLineCommentPresent = + hasLeadingLineComment cmtTbl childrenExpr.pexp_loc + in + let exprDoc = + printExpressionWithComments ~customLayout childrenExpr cmtTbl + in + Doc.concat + [ + Doc.dotdotdot; + (match Parens.jsxChildExpr childrenExpr with + | Parenthesized | Braced _ -> + let innerDoc = + if Parens.bracedExpr childrenExpr then addParens exprDoc + else exprDoc + in + if leadingLineCommentPresent then addBraces innerDoc + else Doc.concat [Doc.lbrace; innerDoc; Doc.rbrace] + | Nothing -> exprDoc); + ] + +and printJsxProps ~customLayout args cmtTbl : + Doc.t * Parsetree.expression option = + (* This function was introduced because we have different formatting behavior for self-closing tags and other tags + we always put /> on a new line for self-closing tag when it breaks + + + + + + we should remove this function once the format is unified + *) + let isSelfClosing children = + match children with + | { + Parsetree.pexp_desc = Pexp_construct ({txt = Longident.Lident "[]"}, None); + pexp_loc = loc; + } -> + not (hasCommentsInside cmtTbl loc) + | _ -> false + in + let rec loop props args = + match args with + | [] -> (Doc.nil, None) + | [ + (Asttypes.Labelled "children", children); + ( Asttypes.Nolabel, + { + Parsetree.pexp_desc = + Pexp_construct ({txt = Longident.Lident "()"}, None); + } ); + ] -> + let doc = if isSelfClosing children then Doc.line else Doc.nil in + (doc, Some children) + | ((_, expr) as lastProp) + :: [ + (Asttypes.Labelled "children", children); + ( Asttypes.Nolabel, + { + Parsetree.pexp_desc = + Pexp_construct ({txt = Longident.Lident "()"}, None); + } ); + ] -> + let loc = + match expr.Parsetree.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _attrs -> + {loc with loc_end = expr.pexp_loc.loc_end} + | _ -> expr.pexp_loc + in + let trailingCommentsPresent = hasTrailingComments cmtTbl loc in + let propDoc = printJsxProp ~customLayout lastProp cmtTbl in + let formattedProps = + Doc.concat + [ + Doc.indent + (Doc.concat + [ + Doc.line; + Doc.group + (Doc.join ~sep:Doc.line (propDoc :: props |> List.rev)); + ]); + (* print > on new line if the last prop has trailing comments *) + (match (isSelfClosing children, trailingCommentsPresent) with + (* we always put /> on a new line when a self-closing tag breaks *) + | true, _ -> Doc.line + | false, true -> Doc.softLine + | false, false -> Doc.nil); + ] + in + (formattedProps, Some children) + | arg :: args -> + let propDoc = printJsxProp ~customLayout arg cmtTbl in + loop (propDoc :: props) args + in + loop [] args + +and printJsxProp ~customLayout arg cmtTbl = + match arg with + | ( ((Asttypes.Labelled lblTxt | Optional lblTxt) as lbl), + { + Parsetree.pexp_attributes = + [({Location.txt = "ns.namedArgLoc"; loc = argLoc}, _)]; + pexp_desc = Pexp_ident {txt = Longident.Lident ident}; + } ) + when lblTxt = ident (* jsx punning *) -> ( + match lbl with + | Nolabel -> Doc.nil + | Labelled _lbl -> printComments (printIdentLike ident) cmtTbl argLoc + | Optional _lbl -> + let doc = Doc.concat [Doc.question; printIdentLike ident] in + printComments doc cmtTbl argLoc) + | ( ((Asttypes.Labelled lblTxt | Optional lblTxt) as lbl), + { + Parsetree.pexp_attributes = []; + pexp_desc = Pexp_ident {txt = Longident.Lident ident}; + } ) + when lblTxt = ident (* jsx punning when printing from Reason *) -> ( + match lbl with + | Nolabel -> Doc.nil + | Labelled _lbl -> printIdentLike ident + | Optional _lbl -> Doc.concat [Doc.question; printIdentLike ident]) + | Asttypes.Labelled "_spreadProps", expr -> + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + Doc.concat [Doc.lbrace; Doc.dotdotdot; Doc.softLine; doc; Doc.rbrace] + | lbl, expr -> + let argLoc, expr = + match expr.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: attrs -> + (loc, {expr with pexp_attributes = attrs}) + | _ -> (Location.none, expr) + in + let lblDoc = + match lbl with + | Asttypes.Labelled lbl -> + let lbl = printComments (printIdentLike lbl) cmtTbl argLoc in + Doc.concat [lbl; Doc.equal] + | Asttypes.Optional lbl -> + let lbl = printComments (printIdentLike lbl) cmtTbl argLoc in + Doc.concat [lbl; Doc.equal; Doc.question] + | Nolabel -> Doc.nil + in + let exprDoc = + let leadingLineCommentPresent = + hasLeadingLineComment cmtTbl expr.pexp_loc + in + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.jsxPropExpr expr with + | Parenthesized | Braced _ -> + (* {(20: int)} make sure that we also protect the expression inside *) + let innerDoc = if Parens.bracedExpr expr then addParens doc else doc in + if leadingLineCommentPresent then addBraces innerDoc + else Doc.concat [Doc.lbrace; innerDoc; Doc.rbrace] + | _ -> doc + in + let fullLoc = {argLoc with loc_end = expr.pexp_loc.loc_end} in + printComments (Doc.concat [lblDoc; exprDoc]) cmtTbl fullLoc + +(* div -> div. + * Navabar.createElement -> Navbar + * Staff.Users.createElement -> Staff.Users *) +and printJsxName {txt = lident} = + let rec flatten acc lident = + match lident with + | Longident.Lident txt -> txt :: acc + | Ldot (lident, txt) -> + let acc = if txt = "createElement" then acc else txt :: acc in + flatten acc lident + | _ -> acc + in + match lident with + | Longident.Lident txt -> Doc.text txt + | _ as lident -> + let segments = flatten [] lident in + Doc.join ~sep:Doc.dot (List.map Doc.text segments) + +and printArgumentsWithCallbackInFirstPosition ~uncurried ~customLayout args + cmtTbl = + (* Because the same subtree gets printed twice, we need to copy the cmtTbl. + * consumed comments need to be marked not-consumed and reprinted… + * Cheng's different comment algorithm will solve this. *) + let customLayout = customLayout + 1 in + let cmtTblCopy = CommentTable.copy cmtTbl in + let callback, printedArgs = + match args with + | (lbl, expr) :: args -> + let lblDoc = + match lbl with + | Asttypes.Nolabel -> Doc.nil + | Asttypes.Labelled txt -> + Doc.concat [Doc.tilde; printIdentLike txt; Doc.equal] + | Asttypes.Optional txt -> + Doc.concat [Doc.tilde; printIdentLike txt; Doc.equal; Doc.question] + in + let callback = + Doc.concat + [ + lblDoc; + printPexpFun ~customLayout ~inCallback:FitsOnOneLine expr cmtTbl; + ] + in + let callback = lazy (printComments callback cmtTbl expr.pexp_loc) in + let printedArgs = + lazy + (Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map (fun arg -> printArgument ~customLayout arg cmtTbl) args)) + in + (callback, printedArgs) + | _ -> assert false + in + + (* Thing.map((arg1, arg2) => MyModuleBlah.toList(argument), foo) *) + (* Thing.map((arg1, arg2) => { + * MyModuleBlah.toList(argument) + * }, longArgumet, veryLooooongArgument) + *) + let fitsOnOneLine = + lazy + (Doc.concat + [ + (if uncurried then Doc.text "(. " else Doc.lparen); + Lazy.force callback; + Doc.comma; + Doc.line; + Lazy.force printedArgs; + Doc.rparen; + ]) + in + + (* Thing.map( + * (param1, parm2) => doStuff(param1, parm2), + * arg1, + * arg2, + * arg3, + * ) + *) + let breakAllArgs = + lazy (printArguments ~customLayout ~uncurried args cmtTblCopy) + in + + (* Sometimes one of the non-callback arguments will break. + * There might be a single line comment in there, or a multiline string etc. + * showDialog( + * ~onConfirm={() => ()}, + * ` + * Do you really want to leave this workspace? + * Some more text with detailed explanations... + * `, + * ~danger=true, + * // comment --> here a single line comment + * ~confirmText="Yes, I am sure!", + * ) + * In this case, we always want the arguments broken over multiple lines, + * like a normal function call. + *) + if customLayout > customLayoutThreshold then Lazy.force breakAllArgs + else if Doc.willBreak (Lazy.force printedArgs) then Lazy.force breakAllArgs + else Doc.customLayout [Lazy.force fitsOnOneLine; Lazy.force breakAllArgs] + +and printArgumentsWithCallbackInLastPosition ~customLayout ~uncurried args + cmtTbl = + (* Because the same subtree gets printed twice, we need to copy the cmtTbl. + * consumed comments need to be marked not-consumed and reprinted… + * Cheng's different comment algorithm will solve this. *) + let customLayout = customLayout + 1 in + let cmtTblCopy = CommentTable.copy cmtTbl in + let cmtTblCopy2 = CommentTable.copy cmtTbl in + let rec loop acc args = + match args with + | [] -> (lazy Doc.nil, lazy Doc.nil, lazy Doc.nil) + | [(lbl, expr)] -> + let lblDoc = + match lbl with + | Asttypes.Nolabel -> Doc.nil + | Asttypes.Labelled txt -> + Doc.concat [Doc.tilde; printIdentLike txt; Doc.equal] + | Asttypes.Optional txt -> + Doc.concat [Doc.tilde; printIdentLike txt; Doc.equal; Doc.question] + in + let callbackFitsOnOneLine = + lazy + (let pexpFunDoc = + printPexpFun ~customLayout ~inCallback:FitsOnOneLine expr cmtTbl + in + let doc = Doc.concat [lblDoc; pexpFunDoc] in + printComments doc cmtTbl expr.pexp_loc) + in + let callbackArgumentsFitsOnOneLine = + lazy + (let pexpFunDoc = + printPexpFun ~customLayout ~inCallback:ArgumentsFitOnOneLine expr + cmtTblCopy + in + let doc = Doc.concat [lblDoc; pexpFunDoc] in + printComments doc cmtTblCopy expr.pexp_loc) + in + ( lazy (Doc.concat (List.rev acc)), + callbackFitsOnOneLine, + callbackArgumentsFitsOnOneLine ) + | arg :: args -> + let argDoc = printArgument ~customLayout arg cmtTbl in + loop (Doc.line :: Doc.comma :: argDoc :: acc) args + in + let printedArgs, callback, callback2 = loop [] args in + + (* Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument)) *) + let fitsOnOneLine = + lazy + (Doc.concat + [ + (if uncurried then Doc.text "(." else Doc.lparen); + Lazy.force printedArgs; + Lazy.force callback; + Doc.rparen; + ]) + in + + (* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) => + * MyModuleBlah.toList(argument) + * ) + *) + let arugmentsFitOnOneLine = + lazy + (Doc.concat + [ + (if uncurried then Doc.text "(." else Doc.lparen); + Lazy.force printedArgs; + Doc.breakableGroup ~forceBreak:true (Lazy.force callback2); + Doc.rparen; + ]) + in + + (* Thing.map( + * arg1, + * arg2, + * arg3, + * (param1, parm2) => doStuff(param1, parm2) + * ) + *) + let breakAllArgs = + lazy (printArguments ~customLayout ~uncurried args cmtTblCopy2) + in + + (* Sometimes one of the non-callback arguments will break. + * There might be a single line comment in there, or a multiline string etc. + * showDialog( + * ` + * Do you really want to leave this workspace? + * Some more text with detailed explanations... + * `, + * ~danger=true, + * // comment --> here a single line comment + * ~confirmText="Yes, I am sure!", + * ~onConfirm={() => ()}, + * ) + * In this case, we always want the arguments broken over multiple lines, + * like a normal function call. + *) + if customLayout > customLayoutThreshold then Lazy.force breakAllArgs + else if Doc.willBreak (Lazy.force printedArgs) then Lazy.force breakAllArgs + else + Doc.customLayout + [ + Lazy.force fitsOnOneLine; + Lazy.force arugmentsFitOnOneLine; + Lazy.force breakAllArgs; + ] + +and printArguments ~customLayout ~uncurried + (args : (Asttypes.arg_label * Parsetree.expression) list) cmtTbl = + match args with + | [ + ( Nolabel, + { + pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _); + pexp_loc = loc; + } ); + ] -> ( + (* See "parseCallExpr", ghost unit expression is used the implement + * arity zero vs arity one syntax. + * Related: https://github.com/rescript-lang/syntax/issues/138 *) + match (uncurried, loc.loc_ghost) with + | true, true -> Doc.text "(.)" (* arity zero *) + | true, false -> Doc.text "(. ())" (* arity one *) + | _ -> Doc.text "()") + | [(Nolabel, arg)] when ParsetreeViewer.isHuggableExpression arg -> + let argDoc = + let doc = printExpressionWithComments ~customLayout arg cmtTbl in + match Parens.expr arg with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc arg braces + | Nothing -> doc + in + Doc.concat + [(if uncurried then Doc.text "(. " else Doc.lparen); argDoc; Doc.rparen] + | args -> + Doc.group + (Doc.concat + [ + (if uncurried then Doc.text "(." else Doc.lparen); + Doc.indent + (Doc.concat + [ + (if uncurried then Doc.line else Doc.softLine); + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun arg -> printArgument ~customLayout arg cmtTbl) + args); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + +(* + * argument ::= + * | _ (* syntax sugar *) + * | expr + * | expr : type + * | ~ label-name + * | ~ label-name + * | ~ label-name ? + * | ~ label-name = expr + * | ~ label-name = _ (* syntax sugar *) + * | ~ label-name = expr : type + * | ~ label-name = ? expr + * | ~ label-name = ? _ (* syntax sugar *) + * | ~ label-name = ? expr : type *) +and printArgument ~customLayout (argLbl, arg) cmtTbl = + match (argLbl, arg) with + (* ~a (punned)*) + | ( Asttypes.Labelled lbl, + ({ + pexp_desc = Pexp_ident {txt = Longident.Lident name}; + pexp_attributes = [] | [({Location.txt = "ns.namedArgLoc"}, _)]; + } as argExpr) ) + when lbl = name && not (ParsetreeViewer.isBracedExpr argExpr) -> + let loc = + match arg.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _ -> loc + | _ -> arg.pexp_loc + in + let doc = Doc.concat [Doc.tilde; printIdentLike lbl] in + printComments doc cmtTbl loc + (* ~a: int (punned)*) + | ( Asttypes.Labelled lbl, + { + pexp_desc = + Pexp_constraint + ( ({pexp_desc = Pexp_ident {txt = Longident.Lident name}} as argExpr), + typ ); + pexp_loc; + pexp_attributes = + ([] | [({Location.txt = "ns.namedArgLoc"}, _)]) as attrs; + } ) + when lbl = name && not (ParsetreeViewer.isBracedExpr argExpr) -> + let loc = + match attrs with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _ -> + {loc with loc_end = pexp_loc.loc_end} + | _ -> arg.pexp_loc + in + let doc = + Doc.concat + [ + Doc.tilde; + printIdentLike lbl; + Doc.text ": "; + printTypExpr ~customLayout typ cmtTbl; + ] + in + printComments doc cmtTbl loc + (* ~a? (optional lbl punned)*) + | ( Asttypes.Optional lbl, + { + pexp_desc = Pexp_ident {txt = Longident.Lident name}; + pexp_attributes = [] | [({Location.txt = "ns.namedArgLoc"}, _)]; + } ) + when lbl = name -> + let loc = + match arg.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _ -> loc + | _ -> arg.pexp_loc + in + let doc = Doc.concat [Doc.tilde; printIdentLike lbl; Doc.question] in + printComments doc cmtTbl loc + | _lbl, expr -> + let argLoc, expr = + match expr.pexp_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: attrs -> + (loc, {expr with pexp_attributes = attrs}) + | _ -> (expr.pexp_loc, expr) + in + let printedLbl = + match argLbl with + | Asttypes.Nolabel -> Doc.nil + | Asttypes.Labelled lbl -> + let doc = Doc.concat [Doc.tilde; printIdentLike lbl; Doc.equal] in + printComments doc cmtTbl argLoc + | Asttypes.Optional lbl -> + let doc = + Doc.concat [Doc.tilde; printIdentLike lbl; Doc.equal; Doc.question] + in + printComments doc cmtTbl argLoc + in + let printedExpr = + let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + let loc = {argLoc with loc_end = expr.pexp_loc.loc_end} in + let doc = Doc.concat [printedLbl; printedExpr] in + printComments doc cmtTbl loc + +and printCases ~customLayout (cases : Parsetree.case list) cmtTbl = + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.lbrace; + Doc.concat + [ + Doc.line; + printList + ~getLoc:(fun n -> + { + n.Parsetree.pc_lhs.ppat_loc with + loc_end = n.pc_rhs.pexp_loc.loc_end; + }) + ~print:(printCase ~customLayout) ~nodes:cases cmtTbl; + ]; + Doc.line; + Doc.rbrace; + ]) + +and printCase ~customLayout (case : Parsetree.case) cmtTbl = + let rhs = + match case.pc_rhs.pexp_desc with + | Pexp_let _ | Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ + | Pexp_sequence _ -> + printExpressionBlock ~customLayout + ~braces:(ParsetreeViewer.isBracedExpr case.pc_rhs) + case.pc_rhs cmtTbl + | _ -> ( + let doc = printExpressionWithComments ~customLayout case.pc_rhs cmtTbl in + match Parens.expr case.pc_rhs with + | Parenthesized -> addParens doc + | _ -> doc) + in + + let guard = + match case.pc_guard with + | None -> Doc.nil + | Some expr -> + Doc.group + (Doc.concat + [ + Doc.line; + Doc.text "if "; + printExpressionWithComments ~customLayout expr cmtTbl; + ]) + in + let shouldInlineRhs = + match case.pc_rhs.pexp_desc with + | Pexp_construct ({txt = Longident.Lident ("()" | "true" | "false")}, _) + | Pexp_constant _ | Pexp_ident _ -> + true + | _ when ParsetreeViewer.isHuggableRhs case.pc_rhs -> true + | _ -> false + in + let shouldIndentPattern = + match case.pc_lhs.ppat_desc with + | Ppat_or _ -> false + | _ -> true + in + let patternDoc = + let doc = printPattern ~customLayout case.pc_lhs cmtTbl in + match case.pc_lhs.ppat_desc with + | Ppat_constraint _ -> addParens doc + | _ -> doc + in + let content = + Doc.concat + [ + (if shouldIndentPattern then Doc.indent patternDoc else patternDoc); + Doc.indent guard; + Doc.text " =>"; + Doc.indent + (Doc.concat [(if shouldInlineRhs then Doc.space else Doc.line); rhs]); + ] + in + Doc.group (Doc.concat [Doc.text "| "; content]) + +and printExprFunParameters ~customLayout ~inCallback ~async ~uncurried + ~hasConstraint parameters cmtTbl = + match parameters with + (* let f = _ => () *) + | [ + ParsetreeViewer.Parameter + { + attrs = []; + lbl = Asttypes.Nolabel; + defaultExpr = None; + pat = {Parsetree.ppat_desc = Ppat_any; ppat_loc}; + }; + ] + when not uncurried -> + let any = + let doc = if hasConstraint then Doc.text "(_)" else Doc.text "_" in + printComments doc cmtTbl ppat_loc + in + if async then addAsync any else any + (* let f = a => () *) + | [ + ParsetreeViewer.Parameter + { + attrs = []; + lbl = Asttypes.Nolabel; + defaultExpr = None; + pat = {Parsetree.ppat_desc = Ppat_var stringLoc}; + }; + ] + when not uncurried -> + let txtDoc = + let var = printIdentLike stringLoc.txt in + let var = if hasConstraint then addParens var else var in + if async then addAsync var else var + in + printComments txtDoc cmtTbl stringLoc.loc + (* let f = () => () *) + | [ + ParsetreeViewer.Parameter + { + attrs = []; + lbl = Asttypes.Nolabel; + defaultExpr = None; + pat = + {ppat_desc = Ppat_construct ({txt = Longident.Lident "()"; loc}, None)}; + }; + ] + when not uncurried -> + let doc = + let lparenRparen = Doc.text "()" in + if async then addAsync lparenRparen else lparenRparen + in + printComments doc cmtTbl loc + (* let f = (~greeting, ~from as hometown, ~x=?) => () *) + | parameters -> + let inCallback = + match inCallback with + | FitsOnOneLine -> true + | _ -> false + in + let maybeAsyncLparen = + let lparen = if uncurried then Doc.text "(. " else Doc.lparen in + if async then addAsync lparen else lparen + in + let shouldHug = ParsetreeViewer.parametersShouldHug parameters in + let printedParamaters = + Doc.concat + [ + (if shouldHug || inCallback then Doc.nil else Doc.softLine); + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun p -> printExpFunParameter ~customLayout p cmtTbl) + parameters); + ] + in + Doc.group + (Doc.concat + [ + maybeAsyncLparen; + (if shouldHug || inCallback then printedParamaters + else + Doc.concat + [Doc.indent printedParamaters; Doc.trailingComma; Doc.softLine]); + Doc.rparen; + ]) + +and printExpFunParameter ~customLayout parameter cmtTbl = + match parameter with + | ParsetreeViewer.NewTypes {attrs; locs = lbls} -> + Doc.group + (Doc.concat + [ + printAttributes ~customLayout attrs cmtTbl; + Doc.text "type "; + Doc.join ~sep:Doc.space + (List.map + (fun lbl -> + printComments + (printIdentLike lbl.Asttypes.txt) + cmtTbl lbl.Asttypes.loc) + lbls); + ]) + | Parameter {attrs; lbl; defaultExpr; pat = pattern} -> + let isUncurried, attrs = ParsetreeViewer.processUncurriedAttribute attrs in + let uncurried = + if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil + in + let attrs = printAttributes ~customLayout attrs cmtTbl in + (* =defaultValue *) + let defaultExprDoc = + match defaultExpr with + | Some expr -> + Doc.concat + [Doc.text "="; printExpressionWithComments ~customLayout expr cmtTbl] + | None -> Doc.nil + in + (* ~from as hometown + * ~from -> punning *) + let labelWithPattern = + match (lbl, pattern) with + | Asttypes.Nolabel, pattern -> printPattern ~customLayout pattern cmtTbl + | ( (Asttypes.Labelled lbl | Optional lbl), + { + ppat_desc = Ppat_var stringLoc; + ppat_attributes = [] | [({Location.txt = "ns.namedArgLoc"}, _)]; + } ) + when lbl = stringLoc.txt -> + (* ~d *) + Doc.concat [Doc.text "~"; printIdentLike lbl] + | ( (Asttypes.Labelled lbl | Optional lbl), + { + ppat_desc = Ppat_constraint ({ppat_desc = Ppat_var {txt}}, typ); + ppat_attributes = [] | [({Location.txt = "ns.namedArgLoc"}, _)]; + } ) + when lbl = txt -> + (* ~d: e *) + Doc.concat + [ + Doc.text "~"; + printIdentLike lbl; + Doc.text ": "; + printTypExpr ~customLayout typ cmtTbl; + ] + | (Asttypes.Labelled lbl | Optional lbl), pattern -> + (* ~b as c *) + Doc.concat + [ + Doc.text "~"; + printIdentLike lbl; + Doc.text " as "; + printPattern ~customLayout pattern cmtTbl; + ] + in + let optionalLabelSuffix = + match (lbl, defaultExpr) with + | Asttypes.Optional _, None -> Doc.text "=?" + | _ -> Doc.nil + in + let doc = + Doc.group + (Doc.concat + [ + uncurried; + attrs; + labelWithPattern; + defaultExprDoc; + optionalLabelSuffix; + ]) + in + let cmtLoc = + match defaultExpr with + | None -> ( + match pattern.ppat_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _ -> + {loc with loc_end = pattern.ppat_loc.loc_end} + | _ -> pattern.ppat_loc) + | Some expr -> + let startPos = + match pattern.ppat_attributes with + | ({Location.txt = "ns.namedArgLoc"; loc}, _) :: _ -> loc.loc_start + | _ -> pattern.ppat_loc.loc_start + in + { + pattern.ppat_loc with + loc_start = startPos; + loc_end = expr.pexp_loc.loc_end; + } + in + printComments doc cmtTbl cmtLoc + +and printExpressionBlock ~customLayout ~braces expr cmtTbl = + let rec collectRows acc expr = + match expr.Parsetree.pexp_desc with + | Parsetree.Pexp_letmodule (modName, modExpr, expr2) -> + let name = + let doc = Doc.text modName.txt in + printComments doc cmtTbl modName.loc + in + let letModuleDoc = + Doc.concat + [ + Doc.text "module "; + name; + Doc.text " = "; + printModExpr ~customLayout modExpr cmtTbl; + ] + in + let loc = {expr.pexp_loc with loc_end = modExpr.pmod_loc.loc_end} in + collectRows ((loc, letModuleDoc) :: acc) expr2 + | Pexp_letexception (extensionConstructor, expr2) -> + let loc = + let loc = + {expr.pexp_loc with loc_end = extensionConstructor.pext_loc.loc_end} + in + match getFirstLeadingComment cmtTbl loc with + | None -> loc + | Some comment -> + let cmtLoc = Comment.loc comment in + {cmtLoc with loc_end = loc.loc_end} + in + let letExceptionDoc = + printExceptionDef ~customLayout extensionConstructor cmtTbl + in + collectRows ((loc, letExceptionDoc) :: acc) expr2 + | Pexp_open (overrideFlag, longidentLoc, expr2) -> + let openDoc = + Doc.concat + [ + Doc.text "open"; + printOverrideFlag overrideFlag; + Doc.space; + printLongidentLocation longidentLoc cmtTbl; + ] + in + let loc = {expr.pexp_loc with loc_end = longidentLoc.loc.loc_end} in + collectRows ((loc, openDoc) :: acc) expr2 + | Pexp_sequence (expr1, expr2) -> + let exprDoc = + let doc = printExpression ~customLayout expr1 cmtTbl in + match Parens.expr expr1 with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr1 braces + | Nothing -> doc + in + let loc = expr1.pexp_loc in + collectRows ((loc, exprDoc) :: acc) expr2 + | Pexp_let (recFlag, valueBindings, expr2) -> ( + let loc = + let loc = + match (valueBindings, List.rev valueBindings) with + | vb :: _, lastVb :: _ -> + {vb.pvb_loc with loc_end = lastVb.pvb_loc.loc_end} + | _ -> Location.none + in + match getFirstLeadingComment cmtTbl loc with + | None -> loc + | Some comment -> + let cmtLoc = Comment.loc comment in + {cmtLoc with loc_end = loc.loc_end} + in + let recFlag = + match recFlag with + | Asttypes.Nonrecursive -> Doc.nil + | Asttypes.Recursive -> Doc.text "rec " + in + let letDoc = + printValueBindings ~customLayout ~recFlag valueBindings cmtTbl + in + (* let () = { + * let () = foo() + * () + * } + * We don't need to print the () on the last line of the block + *) + match expr2.pexp_desc with + | Pexp_construct ({txt = Longident.Lident "()"}, _) -> + List.rev ((loc, letDoc) :: acc) + | _ -> collectRows ((loc, letDoc) :: acc) expr2) + | _ -> + let exprDoc = + let doc = printExpression ~customLayout expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc + in + List.rev ((expr.pexp_loc, exprDoc) :: acc) + in + let rows = collectRows [] expr in + let block = + printList ~getLoc:fst ~nodes:rows + ~print:(fun (_, doc) _ -> doc) + ~forceBreak:true cmtTbl + in + Doc.breakableGroup ~forceBreak:true + (if braces then + Doc.concat + [ + Doc.lbrace; + Doc.indent (Doc.concat [Doc.line; block]); + Doc.line; + Doc.rbrace; + ] + else block) + +(* + * // user types: + * let f = (a, b) => { a + b } + * + * // printer: everything is on one line + * let f = (a, b) => { a + b } + * + * // user types: over multiple lines + * let f = (a, b) => { + * a + b + * } + * + * // printer: over multiple lines + * let f = (a, b) => { + * a + b + * } + *) +and printBraces doc expr bracesLoc = + let overMultipleLines = + let open Location in + bracesLoc.loc_end.pos_lnum > bracesLoc.loc_start.pos_lnum + in + match expr.Parsetree.pexp_desc with + | Pexp_letmodule _ | Pexp_letexception _ | Pexp_let _ | Pexp_open _ + | Pexp_sequence _ -> + (* already has braces *) + doc + | _ -> + Doc.breakableGroup ~forceBreak:overMultipleLines + (Doc.concat + [ + Doc.lbrace; + Doc.indent + (Doc.concat + [ + Doc.softLine; + (if Parens.bracedExpr expr then addParens doc else doc); + ]); + Doc.softLine; + Doc.rbrace; + ]) + +and printOverrideFlag overrideFlag = + match overrideFlag with + | Asttypes.Override -> Doc.text "!" + | Fresh -> Doc.nil + +and printDirectionFlag flag = + match flag with + | Asttypes.Downto -> Doc.text " downto " + | Asttypes.Upto -> Doc.text " to " + +and printExpressionRecordRow ~customLayout (lbl, expr) cmtTbl punningAllowed = + let cmtLoc = {lbl.loc with loc_end = expr.pexp_loc.loc_end} in + let doc = + Doc.group + (match expr.pexp_desc with + | Pexp_ident {txt = Lident key; loc = _keyLoc} + when punningAllowed && Longident.last lbl.txt = key -> + (* print punned field *) + Doc.concat + [ + printAttributes ~customLayout expr.pexp_attributes cmtTbl; + printOptionalLabel expr.pexp_attributes; + printLidentPath lbl cmtTbl; + ] + | _ -> + Doc.concat + [ + printLidentPath lbl cmtTbl; + Doc.text ": "; + printOptionalLabel expr.pexp_attributes; + (let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.exprRecordRowRhs expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc); + ]) + in + printComments doc cmtTbl cmtLoc + +and printBsObjectRow ~customLayout (lbl, expr) cmtTbl = + let cmtLoc = {lbl.loc with loc_end = expr.pexp_loc.loc_end} in + let lblDoc = + let doc = + Doc.concat [Doc.text "\""; printLongident lbl.txt; Doc.text "\""] + in + printComments doc cmtTbl lbl.loc + in + let doc = + Doc.concat + [ + lblDoc; + Doc.text ": "; + (let doc = printExpressionWithComments ~customLayout expr cmtTbl in + match Parens.expr expr with + | Parens.Parenthesized -> addParens doc + | Braced braces -> printBraces doc expr braces + | Nothing -> doc); + ] + in + printComments doc cmtTbl cmtLoc + +(* The optional loc indicates whether we need to print the attributes in + * relation to some location. In practise this means the following: + * `@attr type t = string` -> on the same line, print on the same line + * `@attr + * type t = string` -> attr is on prev line, print the attributes + * with a line break between, we respect the users' original layout *) +and printAttributes ?loc ?(inline = false) ~customLayout + (attrs : Parsetree.attributes) cmtTbl = + match ParsetreeViewer.filterParsingAttrs attrs with + | [] -> Doc.nil + | attrs -> + let lineBreak = + match loc with + | None -> Doc.line + | Some loc -> ( + match List.rev attrs with + | ({loc = firstLoc}, _) :: _ + when loc.loc_start.pos_lnum > firstLoc.loc_end.pos_lnum -> + Doc.hardLine + | _ -> Doc.line) + in + Doc.concat + [ + Doc.group + (Doc.joinWithSep + (List.map + (fun attr -> printAttribute ~customLayout attr cmtTbl) + attrs)); + (if inline then Doc.space else lineBreak); + ] + +and printPayload ~customLayout (payload : Parsetree.payload) cmtTbl = + match payload with + | PStr [] -> Doc.nil + | PStr [{pstr_desc = Pstr_eval (expr, attrs)}] -> + let exprDoc = printExpressionWithComments ~customLayout expr cmtTbl in + let needsParens = + match attrs with + | [] -> false + | _ -> true + in + let shouldHug = ParsetreeViewer.isHuggableExpression expr in + if shouldHug then + Doc.concat + [ + Doc.lparen; + printAttributes ~customLayout attrs cmtTbl; + (if needsParens then addParens exprDoc else exprDoc); + Doc.rparen; + ] + else + Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + printAttributes ~customLayout attrs cmtTbl; + (if needsParens then addParens exprDoc else exprDoc); + ]); + Doc.softLine; + Doc.rparen; + ] + | PStr [({pstr_desc = Pstr_value (_recFlag, _bindings)} as si)] -> + addParens (printStructureItem ~customLayout si cmtTbl) + | PStr structure -> addParens (printStructure ~customLayout structure cmtTbl) + | PTyp typ -> + Doc.concat + [ + Doc.lparen; + Doc.text ":"; + Doc.indent + (Doc.concat [Doc.line; printTypExpr ~customLayout typ cmtTbl]); + Doc.softLine; + Doc.rparen; + ] + | PPat (pat, optExpr) -> + let whenDoc = + match optExpr with + | Some expr -> + Doc.concat + [ + Doc.line; + Doc.text "if "; + printExpressionWithComments ~customLayout expr cmtTbl; + ] + | None -> Doc.nil + in + Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.text "? "; + printPattern ~customLayout pat cmtTbl; + whenDoc; + ]); + Doc.softLine; + Doc.rparen; + ] + | PSig signature -> + Doc.concat + [ + Doc.lparen; + Doc.text ":"; + Doc.indent + (Doc.concat [Doc.line; printSignature ~customLayout signature cmtTbl]); + Doc.softLine; + Doc.rparen; + ] + +and printAttribute ?(standalone = false) ~customLayout + ((id, payload) : Parsetree.attribute) cmtTbl = + match (id, payload) with + | ( {txt = "ns.doc"}, + PStr + [ + { + pstr_desc = + Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (txt, _))}, _); + }; + ] ) -> + ( Doc.concat + [ + Doc.text (if standalone then "/***" else "/**"); + Doc.text txt; + Doc.text "*/"; + ], + Doc.hardLine ) + | _ -> + ( Doc.group + (Doc.concat + [ + Doc.text (if standalone then "@@" else "@"); + Doc.text (convertBsExternalAttribute id.txt); + printPayload ~customLayout payload cmtTbl; + ]), + Doc.line ) + +and printModExpr ~customLayout modExpr cmtTbl = + let doc = + match modExpr.pmod_desc with + | Pmod_ident longidentLoc -> printLongidentLocation longidentLoc cmtTbl + | Pmod_structure [] -> + let shouldBreak = + modExpr.pmod_loc.loc_start.pos_lnum < modExpr.pmod_loc.loc_end.pos_lnum + in + Doc.breakableGroup ~forceBreak:shouldBreak + (Doc.concat + [Doc.lbrace; printCommentsInside cmtTbl modExpr.pmod_loc; Doc.rbrace]) + | Pmod_structure structure -> + Doc.breakableGroup ~forceBreak:true + (Doc.concat + [ + Doc.lbrace; + Doc.indent + (Doc.concat + [Doc.softLine; printStructure ~customLayout structure cmtTbl]); + Doc.softLine; + Doc.rbrace; + ]) + | Pmod_unpack expr -> + let shouldHug = + match expr.pexp_desc with + | Pexp_let _ -> true + | Pexp_constraint + ({pexp_desc = Pexp_let _}, {ptyp_desc = Ptyp_package _packageType}) + -> + true + | _ -> false + in + let expr, moduleConstraint = + match expr.pexp_desc with + | Pexp_constraint + (expr, {ptyp_desc = Ptyp_package packageType; ptyp_loc}) -> + let packageDoc = + let doc = + printPackageType ~customLayout ~printModuleKeywordAndParens:false + packageType cmtTbl + in + printComments doc cmtTbl ptyp_loc + in + let typeDoc = + Doc.group + (Doc.concat + [Doc.text ":"; Doc.indent (Doc.concat [Doc.line; packageDoc])]) + in + (expr, typeDoc) + | _ -> (expr, Doc.nil) + in + let unpackDoc = + Doc.group + (Doc.concat + [ + printExpressionWithComments ~customLayout expr cmtTbl; + moduleConstraint; + ]) + in + Doc.group + (Doc.concat + [ + Doc.text "unpack("; + (if shouldHug then unpackDoc + else + Doc.concat + [ + Doc.indent (Doc.concat [Doc.softLine; unpackDoc]); + Doc.softLine; + ]); + Doc.rparen; + ]) + | Pmod_extension extension -> + printExtension ~customLayout ~atModuleLvl:false extension cmtTbl + | Pmod_apply _ -> + let args, callExpr = ParsetreeViewer.modExprApply modExpr in + let isUnitSugar = + match args with + | [{pmod_desc = Pmod_structure []}] -> true + | _ -> false + in + let shouldHug = + match args with + | [{pmod_desc = Pmod_structure _}] -> true + | _ -> false + in + Doc.group + (Doc.concat + [ + printModExpr ~customLayout callExpr cmtTbl; + (if isUnitSugar then + printModApplyArg ~customLayout + (List.hd args [@doesNotRaise]) + cmtTbl + else + Doc.concat + [ + Doc.lparen; + (if shouldHug then + printModApplyArg ~customLayout + (List.hd args [@doesNotRaise]) + cmtTbl + else + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun modArg -> + printModApplyArg ~customLayout modArg cmtTbl) + args); + ])); + (if not shouldHug then + Doc.concat [Doc.trailingComma; Doc.softLine] + else Doc.nil); + Doc.rparen; + ]); + ]) + | Pmod_constraint (modExpr, modType) -> + Doc.concat + [ + printModExpr ~customLayout modExpr cmtTbl; + Doc.text ": "; + printModType ~customLayout modType cmtTbl; + ] + | Pmod_functor _ -> printModFunctor ~customLayout modExpr cmtTbl + in + printComments doc cmtTbl modExpr.pmod_loc + +and printModFunctor ~customLayout modExpr cmtTbl = + let parameters, returnModExpr = ParsetreeViewer.modExprFunctor modExpr in + (* let shouldInline = match returnModExpr.pmod_desc with *) + (* | Pmod_structure _ | Pmod_ident _ -> true *) + (* | Pmod_constraint ({pmod_desc = Pmod_structure _}, _) -> true *) + (* | _ -> false *) + (* in *) + let returnConstraint, returnModExpr = + match returnModExpr.pmod_desc with + | Pmod_constraint (modExpr, modType) -> + let constraintDoc = + let doc = printModType ~customLayout modType cmtTbl in + if Parens.modExprFunctorConstraint modType then addParens doc else doc + in + let modConstraint = Doc.concat [Doc.text ": "; constraintDoc] in + (modConstraint, printModExpr ~customLayout modExpr cmtTbl) + | _ -> (Doc.nil, printModExpr ~customLayout returnModExpr cmtTbl) + in + let parametersDoc = + match parameters with + | [(attrs, {txt = "*"}, None)] -> + Doc.group + (Doc.concat [printAttributes ~customLayout attrs cmtTbl; Doc.text "()"]) + | [([], {txt = lbl}, None)] -> Doc.text lbl + | parameters -> + Doc.group + (Doc.concat + [ + Doc.lparen; + Doc.indent + (Doc.concat + [ + Doc.softLine; + Doc.join + ~sep:(Doc.concat [Doc.comma; Doc.line]) + (List.map + (fun param -> + printModFunctorParam ~customLayout param cmtTbl) + parameters); + ]); + Doc.trailingComma; + Doc.softLine; + Doc.rparen; + ]) + in + Doc.group + (Doc.concat + [parametersDoc; returnConstraint; Doc.text " => "; returnModExpr]) + +and printModFunctorParam ~customLayout (attrs, lbl, optModType) cmtTbl = + let cmtLoc = + match optModType with + | None -> lbl.Asttypes.loc + | Some modType -> + {lbl.loc with loc_end = modType.Parsetree.pmty_loc.loc_end} + in + let attrs = printAttributes ~customLayout attrs cmtTbl in + let lblDoc = + let doc = if lbl.txt = "*" then Doc.text "()" else Doc.text lbl.txt in + printComments doc cmtTbl lbl.loc + in + let doc = + Doc.group + (Doc.concat + [ + attrs; + lblDoc; + (match optModType with + | None -> Doc.nil + | Some modType -> + Doc.concat + [Doc.text ": "; printModType ~customLayout modType cmtTbl]); + ]) + in + printComments doc cmtTbl cmtLoc + +and printModApplyArg ~customLayout modExpr cmtTbl = + match modExpr.pmod_desc with + | Pmod_structure [] -> Doc.text "()" + | _ -> printModExpr ~customLayout modExpr cmtTbl + +and printExceptionDef ~customLayout (constr : Parsetree.extension_constructor) + cmtTbl = + let kind = + match constr.pext_kind with + | Pext_rebind longident -> + Doc.indent + (Doc.concat + [Doc.text " ="; Doc.line; printLongidentLocation longident cmtTbl]) + | Pext_decl (Pcstr_tuple [], None) -> Doc.nil + | Pext_decl (args, gadt) -> + let gadtDoc = + match gadt with + | Some typ -> + Doc.concat [Doc.text ": "; printTypExpr ~customLayout typ cmtTbl] + | None -> Doc.nil + in + Doc.concat + [ + printConstructorArguments ~customLayout ~indent:false args cmtTbl; + gadtDoc; + ] + in + let name = + printComments (Doc.text constr.pext_name.txt) cmtTbl constr.pext_name.loc + in + let doc = + Doc.group + (Doc.concat + [ + printAttributes ~customLayout constr.pext_attributes cmtTbl; + Doc.text "exception "; + name; + kind; + ]) + in + printComments doc cmtTbl constr.pext_loc + +and printExtensionConstructor ~customLayout + (constr : Parsetree.extension_constructor) cmtTbl i = + let attrs = printAttributes ~customLayout constr.pext_attributes cmtTbl in + let bar = + if i > 0 then Doc.text "| " else Doc.ifBreaks (Doc.text "| ") Doc.nil + in + let kind = + match constr.pext_kind with + | Pext_rebind longident -> + Doc.indent + (Doc.concat + [Doc.text " ="; Doc.line; printLongidentLocation longident cmtTbl]) + | Pext_decl (Pcstr_tuple [], None) -> Doc.nil + | Pext_decl (args, gadt) -> + let gadtDoc = + match gadt with + | Some typ -> + Doc.concat [Doc.text ": "; printTypExpr ~customLayout typ cmtTbl] + | None -> Doc.nil + in + Doc.concat + [ + printConstructorArguments ~customLayout ~indent:false args cmtTbl; + gadtDoc; + ] + in + let name = + printComments (Doc.text constr.pext_name.txt) cmtTbl constr.pext_name.loc + in + Doc.concat [bar; Doc.group (Doc.concat [attrs; name; kind])] + +let printTypeParams = printTypeParams ~customLayout:0 +let printTypExpr = printTypExpr ~customLayout:0 +let printExpression = printExpression ~customLayout:0 +let printPattern = printPattern ~customLayout:0 + +let printImplementation ~width (s : Parsetree.structure) ~comments = + let cmtTbl = CommentTable.make () in + CommentTable.walkStructure s cmtTbl comments; + (* CommentTable.log cmtTbl; *) + let doc = printStructure ~customLayout:0 s cmtTbl in + (* Doc.debug doc; *) + Doc.toString ~width doc ^ "\n" + +let printInterface ~width (s : Parsetree.signature) ~comments = + let cmtTbl = CommentTable.make () in + CommentTable.walkSignature s cmtTbl comments; + Doc.toString ~width (printSignature ~customLayout:0 s cmtTbl) ^ "\n" + +let printStructure = printStructure ~customLayout:0 diff --git a/res_syntax/src/res_printer.mli b/res_syntax/src/res_printer.mli new file mode 100644 index 0000000000..2f854ef6b2 --- /dev/null +++ b/res_syntax/src/res_printer.mli @@ -0,0 +1,26 @@ +val convertBsExternalAttribute : string -> string +val convertBsExtension : string -> string + +val printTypeParams : + (Parsetree.core_type * Asttypes.variance) list -> + Res_comments_table.t -> + Res_doc.t + +val printLongident : Longident.t -> Res_doc.t + +val printTypExpr : Parsetree.core_type -> Res_comments_table.t -> Res_doc.t + +val addParens : Res_doc.t -> Res_doc.t + +val printExpression : Parsetree.expression -> Res_comments_table.t -> Res_doc.t + +val printPattern : Parsetree.pattern -> Res_comments_table.t -> Res_doc.t + [@@live] + +val printStructure : Parsetree.structure -> Res_comments_table.t -> Res_doc.t + [@@live] + +val printImplementation : + width:int -> Parsetree.structure -> comments:Res_comment.t list -> string +val printInterface : + width:int -> Parsetree.signature -> comments:Res_comment.t list -> string diff --git a/res_syntax/src/res_reporting.ml b/res_syntax/src/res_reporting.ml new file mode 100644 index 0000000000..77d370af08 --- /dev/null +++ b/res_syntax/src/res_reporting.ml @@ -0,0 +1,16 @@ +module Token = Res_token +module Grammar = Res_grammar + +type problem = + | Unexpected of Token.t [@live] + | Expected of { + token: Token.t; + pos: Lexing.position; + context: Grammar.t option; + } [@live] + | Message of string [@live] + | Uident [@live] + | Lident [@live] + | Unbalanced of Token.t [@live] + +type parseError = Lexing.position * problem diff --git a/res_syntax/src/res_scanner.ml b/res_syntax/src/res_scanner.ml new file mode 100644 index 0000000000..a1589e2723 --- /dev/null +++ b/res_syntax/src/res_scanner.ml @@ -0,0 +1,985 @@ +module Diagnostics = Res_diagnostics +module Token = Res_token +module Comment = Res_comment + +type mode = Jsx | Diamond + +(* We hide the implementation detail of the scanner reading character. Our char + will also contain the special -1 value to indicate end-of-file. This isn't + ideal; we should clean this up *) +let hackyEOFChar = Char.unsafe_chr (-1) +type charEncoding = Char.t + +type t = { + filename: string; + src: string; + mutable err: + startPos:Lexing.position -> + endPos:Lexing.position -> + Diagnostics.category -> + unit; + mutable ch: charEncoding; (* current character *) + mutable offset: int; (* character offset *) + mutable lineOffset: int; (* current line offset *) + mutable lnum: int; (* current line number *) + mutable mode: mode list; +} + +let setDiamondMode scanner = scanner.mode <- Diamond :: scanner.mode + +let setJsxMode scanner = scanner.mode <- Jsx :: scanner.mode + +let popMode scanner mode = + match scanner.mode with + | m :: ms when m = mode -> scanner.mode <- ms + | _ -> () + +let inDiamondMode scanner = + match scanner.mode with + | Diamond :: _ -> true + | _ -> false + +let inJsxMode scanner = + match scanner.mode with + | Jsx :: _ -> true + | _ -> false + +let position scanner = + Lexing. + { + pos_fname = scanner.filename; + (* line number *) + pos_lnum = scanner.lnum; + (* offset of the beginning of the line (number + of characters between the beginning of the scanner and the beginning + of the line) *) + pos_bol = scanner.lineOffset; + (* [pos_cnum] is the offset of the position (number of + characters between the beginning of the scanner and the position). *) + pos_cnum = scanner.offset; + } + +(* Small debugging util + ❯ echo 'let msg = "hello"' | ./lib/rescript.exe + let msg = "hello" + ^-^ let 0-3 + let msg = "hello" + ^-^ msg 4-7 + let msg = "hello" + ^ = 8-9 + let msg = "hello" + ^-----^ string "hello" 10-17 + let msg = "hello" + ^ eof 18-18 + let msg = "hello" +*) +let _printDebug ~startPos ~endPos scanner token = + let open Lexing in + print_string scanner.src; + print_string ((String.make [@doesNotRaise]) startPos.pos_cnum ' '); + print_char '^'; + (match endPos.pos_cnum - startPos.pos_cnum with + | 0 -> if token = Token.Eof then () else assert false + | 1 -> () + | n -> + print_string ((String.make [@doesNotRaise]) (n - 2) '-'); + print_char '^'); + print_char ' '; + print_string (Res_token.toString token); + print_char ' '; + print_int startPos.pos_cnum; + print_char '-'; + print_int endPos.pos_cnum; + print_endline "" + [@@live] + +let next scanner = + let nextOffset = scanner.offset + 1 in + (match scanner.ch with + | '\n' -> + scanner.lineOffset <- nextOffset; + scanner.lnum <- scanner.lnum + 1 + (* What about CRLF (\r + \n) on windows? + * \r\n will always be terminated by a \n + * -> we can just bump the line count on \n *) + | _ -> ()); + if nextOffset < String.length scanner.src then ( + scanner.offset <- nextOffset; + scanner.ch <- String.unsafe_get scanner.src scanner.offset) + else ( + scanner.offset <- String.length scanner.src; + scanner.ch <- hackyEOFChar) + +let next2 scanner = + next scanner; + next scanner + +let next3 scanner = + next scanner; + next scanner; + next scanner + +let peek scanner = + if scanner.offset + 1 < String.length scanner.src then + String.unsafe_get scanner.src (scanner.offset + 1) + else hackyEOFChar + +let peek2 scanner = + if scanner.offset + 2 < String.length scanner.src then + String.unsafe_get scanner.src (scanner.offset + 2) + else hackyEOFChar + +let peek3 scanner = + if scanner.offset + 3 < String.length scanner.src then + String.unsafe_get scanner.src (scanner.offset + 3) + else hackyEOFChar + +let make ~filename src = + { + filename; + src; + err = (fun ~startPos:_ ~endPos:_ _ -> ()); + ch = (if src = "" then hackyEOFChar else String.unsafe_get src 0); + offset = 0; + lineOffset = 0; + lnum = 1; + mode = []; + } + +(* generic helpers *) + +let isWhitespace ch = + match ch with + | ' ' | '\t' | '\n' | '\r' -> true + | _ -> false + +let rec skipWhitespace scanner = + if isWhitespace scanner.ch then ( + next scanner; + skipWhitespace scanner) + +let digitValue ch = + match ch with + | '0' .. '9' -> Char.code ch - 48 + | 'a' .. 'f' -> Char.code ch - Char.code 'a' + 10 + | 'A' .. 'F' -> Char.code ch + 32 - Char.code 'a' + 10 + | _ -> 16 (* larger than any legal value *) + +let rec skipLowerCaseChars scanner = + match scanner.ch with + | 'a' .. 'z' -> + next scanner; + skipLowerCaseChars scanner + | _ -> () + +(* scanning helpers *) + +let scanIdentifier scanner = + let startOff = scanner.offset in + let rec skipGoodChars scanner = + match scanner.ch with + | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '_' | '\'' -> + next scanner; + skipGoodChars scanner + | _ -> () + in + skipGoodChars scanner; + let str = + (String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff) + in + if '{' == scanner.ch && str = "list" then ( + next scanner; + (* TODO: this isn't great *) + Token.lookupKeyword "list{") + else Token.lookupKeyword str + +let scanDigits scanner ~base = + if base <= 10 then + let rec loop scanner = + match scanner.ch with + | '0' .. '9' | '_' -> + next scanner; + loop scanner + | _ -> () + in + loop scanner + else + let rec loop scanner = + match scanner.ch with + (* hex *) + | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' | '_' -> + next scanner; + loop scanner + | _ -> () + in + loop scanner + +(* float: (0…9) { 0…9∣ _ } [. { 0…9∣ _ }] [(e∣ E) [+∣ -] (0…9) { 0…9∣ _ }] *) +let scanNumber scanner = + let startOff = scanner.offset in + + (* integer part *) + let base = + match scanner.ch with + | '0' -> ( + match peek scanner with + | 'x' | 'X' -> + next2 scanner; + 16 + | 'o' | 'O' -> + next2 scanner; + 8 + | 'b' | 'B' -> + next2 scanner; + 2 + | _ -> + next scanner; + 8) + | _ -> 10 + in + scanDigits scanner ~base; + + (* *) + let isFloat = + if '.' == scanner.ch then ( + next scanner; + scanDigits scanner ~base; + true) + else false + in + + (* exponent part *) + let isFloat = + match scanner.ch with + | 'e' | 'E' | 'p' | 'P' -> + (match peek scanner with + | '+' | '-' -> next2 scanner + | _ -> next scanner); + scanDigits scanner ~base; + true + | _ -> isFloat + in + let literal = + (String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff) + in + + (* suffix *) + let suffix = + match scanner.ch with + | 'n' -> + let msg = + "Unsupported number type (nativeint). Did you mean `" ^ literal ^ "`?" + in + let pos = position scanner in + scanner.err ~startPos:pos ~endPos:pos (Diagnostics.message msg); + next scanner; + Some 'n' + | ('g' .. 'z' | 'G' .. 'Z') as ch -> + next scanner; + Some ch + | _ -> None + in + if isFloat then Token.Float {f = literal; suffix} + else Token.Int {i = literal; suffix} + +let scanExoticIdentifier scanner = + (* TODO: are we disregarding the current char...? Should be a quote *) + next scanner; + let buffer = Buffer.create 20 in + let startPos = position scanner in + + let rec scan () = + match scanner.ch with + | '"' -> next scanner + | '\n' | '\r' -> + (* line break *) + let endPos = position scanner in + scanner.err ~startPos ~endPos + (Diagnostics.message "A quoted identifier can't contain line breaks."); + next scanner + | ch when ch == hackyEOFChar -> + let endPos = position scanner in + scanner.err ~startPos ~endPos + (Diagnostics.message "Did you forget a \" here?") + | ch -> + Buffer.add_char buffer ch; + next scanner; + scan () + in + scan (); + (* TODO: do we really need to create a new buffer instead of substring once? *) + Token.Lident (Buffer.contents buffer) + +let scanStringEscapeSequence ~startPos scanner = + let scan ~n ~base ~max = + let rec loop n x = + if n == 0 then x + else + let d = digitValue scanner.ch in + if d >= base then ( + let pos = position scanner in + let msg = + if scanner.ch == hackyEOFChar then "unclosed escape sequence" + else "unknown escape sequence" + in + scanner.err ~startPos ~endPos:pos (Diagnostics.message msg); + -1) + else + let () = next scanner in + loop (n - 1) ((x * base) + d) + in + let x = loop n 0 in + if x > max || (0xD800 <= x && x < 0xE000) then + let pos = position scanner in + let msg = "escape sequence is invalid unicode code point" in + scanner.err ~startPos ~endPos:pos (Diagnostics.message msg) + in + match scanner.ch with + (* \ already consumed *) + | 'n' | 't' | 'b' | 'r' | '\\' | ' ' | '\'' | '"' -> next scanner + | '0' + when let c = peek scanner in + c < '0' || c > '9' -> + (* Allow \0 *) + next scanner + | '0' .. '9' -> scan ~n:3 ~base:10 ~max:255 + | 'x' -> + (* hex *) + next scanner; + scan ~n:2 ~base:16 ~max:255 + | 'u' -> ( + next scanner; + match scanner.ch with + | '{' -> ( + (* unicode code point escape sequence: '\u{7A}', one or more hex digits *) + next scanner; + let x = ref 0 in + while + match scanner.ch with + | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' -> true + | _ -> false + do + x := (!x * 16) + digitValue scanner.ch; + next scanner + done; + (* consume '}' in '\u{7A}' *) + match scanner.ch with + | '}' -> next scanner + | _ -> ()) + | _ -> scan ~n:4 ~base:16 ~max:Res_utf8.max) + | _ -> + (* unknown escape sequence + * TODO: we should warn the user here. Let's not make it a hard error for now, for reason compat *) + (* + let pos = position scanner in + let msg = + if ch == -1 then "unclosed escape sequence" + else "unknown escape sequence" + in + scanner.err ~startPos ~endPos:pos (Diagnostics.message msg) + *) + () + +let scanString scanner = + (* assumption: we've just matched a quote *) + let startPosWithQuote = position scanner in + next scanner; + + (* If the text needs changing, a buffer is used *) + let buf = Buffer.create 0 in + let firstCharOffset = scanner.offset in + let lastOffsetInBuf = ref firstCharOffset in + + let bringBufUpToDate ~startOffset = + let strUpToNow = + (String.sub scanner.src !lastOffsetInBuf + (startOffset - !lastOffsetInBuf) [@doesNotRaise]) + in + Buffer.add_string buf strUpToNow; + lastOffsetInBuf := startOffset + in + + let result ~firstCharOffset ~lastCharOffset = + if Buffer.length buf = 0 then + (String.sub [@doesNotRaise]) scanner.src firstCharOffset + (lastCharOffset - firstCharOffset) + else ( + bringBufUpToDate ~startOffset:lastCharOffset; + Buffer.contents buf) + in + + let rec scan () = + match scanner.ch with + | '"' -> + let lastCharOffset = scanner.offset in + next scanner; + result ~firstCharOffset ~lastCharOffset + | '\\' -> + let startPos = position scanner in + let startOffset = scanner.offset + 1 in + next scanner; + scanStringEscapeSequence ~startPos scanner; + let endOffset = scanner.offset in + convertOctalToHex ~startOffset ~endOffset + | ch when ch == hackyEOFChar -> + let endPos = position scanner in + scanner.err ~startPos:startPosWithQuote ~endPos Diagnostics.unclosedString; + let lastCharOffset = scanner.offset in + result ~firstCharOffset ~lastCharOffset + | _ -> + next scanner; + scan () + and convertOctalToHex ~startOffset ~endOffset = + let len = endOffset - startOffset in + let isDigit = function + | '0' .. '9' -> true + | _ -> false + in + let txt = scanner.src in + let isNumericEscape = + len = 3 + && (isDigit txt.[startOffset] [@doesNotRaise]) + && (isDigit txt.[startOffset + 1] [@doesNotRaise]) + && (isDigit txt.[startOffset + 2] [@doesNotRaise]) + in + if isNumericEscape then ( + let strDecimal = (String.sub txt startOffset 3 [@doesNotRaise]) in + bringBufUpToDate ~startOffset; + let strHex = Res_string.convertDecimalToHex ~strDecimal in + lastOffsetInBuf := startOffset + 3; + Buffer.add_string buf strHex; + scan ()) + else scan () + in + Token.String (scan ()) + +let scanEscape scanner = + (* '\' consumed *) + let offset = scanner.offset - 1 in + let convertNumber scanner ~n ~base = + let x = ref 0 in + for _ = n downto 1 do + let d = digitValue scanner.ch in + x := (!x * base) + d; + next scanner + done; + let c = !x in + if Res_utf8.isValidCodePoint c then c else Res_utf8.repl + in + let codepoint = + match scanner.ch with + | '0' .. '9' -> convertNumber scanner ~n:3 ~base:10 + | 'b' -> + next scanner; + 8 + | 'n' -> + next scanner; + 10 + | 'r' -> + next scanner; + 13 + | 't' -> + next scanner; + 009 + | 'x' -> + next scanner; + convertNumber scanner ~n:2 ~base:16 + | 'o' -> + next scanner; + convertNumber scanner ~n:3 ~base:8 + | 'u' -> ( + next scanner; + match scanner.ch with + | '{' -> + (* unicode code point escape sequence: '\u{7A}', one or more hex digits *) + next scanner; + let x = ref 0 in + while + match scanner.ch with + | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' -> true + | _ -> false + do + x := (!x * 16) + digitValue scanner.ch; + next scanner + done; + (* consume '}' in '\u{7A}' *) + (match scanner.ch with + | '}' -> next scanner + | _ -> ()); + let c = !x in + if Res_utf8.isValidCodePoint c then c else Res_utf8.repl + | _ -> + (* unicode escape sequence: '\u007A', exactly 4 hex digits *) + convertNumber scanner ~n:4 ~base:16) + | ch -> + next scanner; + Char.code ch + in + let contents = + (String.sub [@doesNotRaise]) scanner.src offset (scanner.offset - offset) + in + next scanner; + (* Consume \' *) + (* TODO: do we know it's \' ? *) + Token.Codepoint {c = codepoint; original = contents} + +let scanSingleLineComment scanner = + let startOff = scanner.offset in + let startPos = position scanner in + let rec skip scanner = + match scanner.ch with + | '\n' | '\r' -> () + | ch when ch == hackyEOFChar -> () + | _ -> + next scanner; + skip scanner + in + skip scanner; + let endPos = position scanner in + Token.Comment + (Comment.makeSingleLineComment + ~loc:Location.{loc_start = startPos; loc_end = endPos; loc_ghost = false} + ((String.sub [@doesNotRaise]) scanner.src startOff + (scanner.offset - startOff))) + +let scanMultiLineComment scanner = + (* assumption: we're only ever using this helper in `scan` after detecting a comment *) + let docComment = peek2 scanner = '*' && peek3 scanner <> '/' (* no /**/ *) in + let standalone = docComment && peek3 scanner = '*' (* /*** *) in + let contentStartOff = + scanner.offset + if docComment then if standalone then 4 else 3 else 2 + in + let startPos = position scanner in + let rec scan ~depth = + (* invariant: depth > 0 right after this match. See assumption *) + match (scanner.ch, peek scanner) with + | '/', '*' -> + next2 scanner; + scan ~depth:(depth + 1) + | '*', '/' -> + next2 scanner; + if depth > 1 then scan ~depth:(depth - 1) + | ch, _ when ch == hackyEOFChar -> + let endPos = position scanner in + scanner.err ~startPos ~endPos Diagnostics.unclosedComment + | _ -> + next scanner; + scan ~depth + in + scan ~depth:0; + let length = scanner.offset - 2 - contentStartOff in + let length = if length < 0 (* in case of EOF *) then 0 else length in + Token.Comment + (Comment.makeMultiLineComment ~docComment ~standalone + ~loc: + Location. + {loc_start = startPos; loc_end = position scanner; loc_ghost = false} + ((String.sub [@doesNotRaise]) scanner.src contentStartOff length)) + +let scanTemplateLiteralToken scanner = + let startOff = scanner.offset in + + (* if starting } here, consume it *) + if scanner.ch == '}' then next scanner; + + let startPos = position scanner in + + let rec scan () = + let lastPos = position scanner in + match scanner.ch with + | '`' -> + next scanner; + let contents = + (String.sub [@doesNotRaise]) scanner.src startOff + (scanner.offset - 1 - startOff) + in + Token.TemplateTail (contents, lastPos) + | '$' -> ( + match peek scanner with + | '{' -> + next2 scanner; + let contents = + (String.sub [@doesNotRaise]) scanner.src startOff + (scanner.offset - 2 - startOff) + in + Token.TemplatePart (contents, lastPos) + | _ -> + next scanner; + scan ()) + | '\\' -> ( + match peek scanner with + | '`' | '\\' | '$' | '\n' | '\r' -> + (* line break *) + next2 scanner; + scan () + | _ -> + next scanner; + scan ()) + | ch when ch = hackyEOFChar -> + let endPos = position scanner in + scanner.err ~startPos ~endPos Diagnostics.unclosedTemplate; + let contents = + (String.sub [@doesNotRaise]) scanner.src startOff + (max (scanner.offset - 1 - startOff) 0) + in + Token.TemplateTail (contents, lastPos) + | _ -> + next scanner; + scan () + in + let token = scan () in + let endPos = position scanner in + (startPos, endPos, token) + +let rec scan scanner = + skipWhitespace scanner; + let startPos = position scanner in + + let token = + match scanner.ch with + (* peeking 0 char *) + | 'A' .. 'Z' | 'a' .. 'z' -> scanIdentifier scanner + | '0' .. '9' -> scanNumber scanner + | '`' -> + next scanner; + Token.Backtick + | '~' -> + next scanner; + Token.Tilde + | '?' -> + next scanner; + Token.Question + | ';' -> + next scanner; + Token.Semicolon + | '(' -> + next scanner; + Token.Lparen + | ')' -> + next scanner; + Token.Rparen + | '[' -> + next scanner; + Token.Lbracket + | ']' -> + next scanner; + Token.Rbracket + | '{' -> + next scanner; + Token.Lbrace + | '}' -> + next scanner; + Token.Rbrace + | ',' -> + next scanner; + Token.Comma + | '"' -> scanString scanner + (* peeking 1 char *) + | '_' -> ( + match peek scanner with + | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '_' -> scanIdentifier scanner + | _ -> + next scanner; + Token.Underscore) + | '#' -> ( + match peek scanner with + | '=' -> + next2 scanner; + Token.HashEqual + | _ -> + next scanner; + Token.Hash) + | '*' -> ( + match peek scanner with + | '*' -> + next2 scanner; + Token.Exponentiation + | '.' -> + next2 scanner; + Token.AsteriskDot + | _ -> + next scanner; + Token.Asterisk) + | '@' -> ( + match peek scanner with + | '@' -> + next2 scanner; + Token.AtAt + | _ -> + next scanner; + Token.At) + | '%' -> ( + match peek scanner with + | '%' -> + next2 scanner; + Token.PercentPercent + | _ -> + next scanner; + Token.Percent) + | '|' -> ( + match peek scanner with + | '|' -> + next2 scanner; + Token.Lor + | '>' -> + next2 scanner; + Token.BarGreater + | _ -> + next scanner; + Token.Bar) + | '&' -> ( + match peek scanner with + | '&' -> + next2 scanner; + Token.Land + | _ -> + next scanner; + Token.Band) + | ':' -> ( + match peek scanner with + | '=' -> + next2 scanner; + Token.ColonEqual + | '>' -> + next2 scanner; + Token.ColonGreaterThan + | _ -> + next scanner; + Token.Colon) + | '\\' -> + next scanner; + scanExoticIdentifier scanner + | '/' -> ( + match peek scanner with + | '/' -> + next2 scanner; + scanSingleLineComment scanner + | '*' -> scanMultiLineComment scanner + | '.' -> + next2 scanner; + Token.ForwardslashDot + | _ -> + next scanner; + Token.Forwardslash) + | '-' -> ( + match peek scanner with + | '.' -> + next2 scanner; + Token.MinusDot + | '>' -> + next2 scanner; + Token.MinusGreater + | _ -> + next scanner; + Token.Minus) + | '+' -> ( + match peek scanner with + | '.' -> + next2 scanner; + Token.PlusDot + | '+' -> + next2 scanner; + Token.PlusPlus + | '=' -> + next2 scanner; + Token.PlusEqual + | _ -> + next scanner; + Token.Plus) + | '>' -> ( + match peek scanner with + | '=' when not (inDiamondMode scanner) -> + next2 scanner; + Token.GreaterEqual + | _ -> + next scanner; + Token.GreaterThan) + | '<' when not (inJsxMode scanner) -> ( + match peek scanner with + | '=' -> + next2 scanner; + Token.LessEqual + | _ -> + next scanner; + Token.LessThan) + (* special handling for JSX < *) + | '<' -> ( + (* Imagine the following:
< + * < indicates the start of a new jsx-element, the parser expects + * the name of a new element after the < + * Example:
+ * This signals a closing element. To simulate the two-token lookahead, + * the + next scanner; + Token.LessThanSlash + | '=' -> + next scanner; + Token.LessEqual + | _ -> Token.LessThan) + (* peeking 2 chars *) + | '.' -> ( + match (peek scanner, peek2 scanner) with + | '.', '.' -> + next3 scanner; + Token.DotDotDot + | '.', _ -> + next2 scanner; + Token.DotDot + | _ -> + next scanner; + Token.Dot) + | '\'' -> ( + match (peek scanner, peek2 scanner) with + | '\\', '"' -> + (* careful with this one! We're next-ing _once_ (not twice), + then relying on matching on the quote *) + next scanner; + SingleQuote + | '\\', _ -> + next2 scanner; + scanEscape scanner + | ch, '\'' -> + let offset = scanner.offset + 1 in + next3 scanner; + Token.Codepoint + { + c = Char.code ch; + original = (String.sub [@doesNotRaise]) scanner.src offset 1; + } + | ch, _ -> + next scanner; + let offset = scanner.offset in + let codepoint, length = + Res_utf8.decodeCodePoint scanner.offset scanner.src + (String.length scanner.src) + in + for _ = 0 to length - 1 do + next scanner + done; + if scanner.ch = '\'' then ( + let contents = + (String.sub [@doesNotRaise]) scanner.src offset length + in + next scanner; + Token.Codepoint {c = codepoint; original = contents}) + else ( + scanner.ch <- ch; + scanner.offset <- offset; + SingleQuote)) + | '!' -> ( + match (peek scanner, peek2 scanner) with + | '=', '=' -> + next3 scanner; + Token.BangEqualEqual + | '=', _ -> + next2 scanner; + Token.BangEqual + | _ -> + next scanner; + Token.Bang) + | '=' -> ( + match (peek scanner, peek2 scanner) with + | '=', '=' -> + next3 scanner; + Token.EqualEqualEqual + | '=', _ -> + next2 scanner; + Token.EqualEqual + | '>', _ -> + next2 scanner; + Token.EqualGreater + | _ -> + next scanner; + Token.Equal) + (* special cases *) + | ch when ch == hackyEOFChar -> + next scanner; + Token.Eof + | ch -> + (* if we arrive here, we're dealing with an unknown character, + * report the error and continue scanning… *) + next scanner; + let endPos = position scanner in + scanner.err ~startPos ~endPos (Diagnostics.unknownUchar ch); + let _, _, token = scan scanner in + token + in + let endPos = position scanner in + (* _printDebug ~startPos ~endPos scanner token; *) + (startPos, endPos, token) + +(* misc helpers used elsewhere *) + +(* Imagine:
< + * is `<` the start of a jsx-child?
+ * reconsiderLessThan peeks at the next token and + * determines the correct token to disambiguate *) +let reconsiderLessThan scanner = + (* < consumed *) + skipWhitespace scanner; + if scanner.ch == '/' then + let () = next scanner in + Token.LessThanSlash + else Token.LessThan + +(* If an operator has whitespace around both sides, it's a binary operator *) +(* TODO: this helper seems out of place *) +let isBinaryOp src startCnum endCnum = + if startCnum == 0 then false + else ( + (* we're gonna put some assertions and invariant checks here because this is + used outside of the scanner's normal invariant assumptions *) + assert (endCnum >= 0); + assert (startCnum > 0 && startCnum < String.length src); + let leftOk = isWhitespace (String.unsafe_get src (startCnum - 1)) in + (* we need some stronger confidence that endCnum is ok *) + let rightOk = + endCnum >= String.length src + || isWhitespace (String.unsafe_get src endCnum) + in + leftOk && rightOk) + +(* Assume `{` consumed, advances the scanner towards the ends of Reason quoted strings. (for conversion) + * In {| foo bar |} the scanner will be advanced until after the `|}` *) +let tryAdvanceQuotedString scanner = + let rec scanContents tag = + match scanner.ch with + | '|' -> ( + next scanner; + match scanner.ch with + | 'a' .. 'z' -> + let startOff = scanner.offset in + skipLowerCaseChars scanner; + let suffix = + (String.sub [@doesNotRaise]) scanner.src startOff + (scanner.offset - startOff) + in + if tag = suffix then + if scanner.ch = '}' then next scanner else scanContents tag + else scanContents tag + | '}' -> next scanner + | _ -> scanContents tag) + | ch when ch == hackyEOFChar -> + (* TODO: why is this place checking EOF and not others? *) + () + | _ -> + next scanner; + scanContents tag + in + match scanner.ch with + | 'a' .. 'z' -> + let startOff = scanner.offset in + skipLowerCaseChars scanner; + let tag = + (String.sub [@doesNotRaise]) scanner.src startOff + (scanner.offset - startOff) + in + if scanner.ch = '|' then scanContents tag + | '|' -> scanContents "" + | _ -> () diff --git a/res_syntax/src/res_scanner.mli b/res_syntax/src/res_scanner.mli new file mode 100644 index 0000000000..e68020cfbb --- /dev/null +++ b/res_syntax/src/res_scanner.mli @@ -0,0 +1,36 @@ +type mode = Jsx | Diamond + +type charEncoding + +type t = { + filename: string; + src: string; + mutable err: + startPos:Lexing.position -> + endPos:Lexing.position -> + Res_diagnostics.category -> + unit; + mutable ch: charEncoding; (* current character *) + mutable offset: int; (* character offset *) + mutable lineOffset: int; (* current line offset *) + mutable lnum: int; (* current line number *) + mutable mode: mode list; +} + +val make : filename:string -> string -> t + +(* TODO: make this a record *) +val scan : t -> Lexing.position * Lexing.position * Res_token.t + +val isBinaryOp : string -> int -> int -> bool + +val setJsxMode : t -> unit +val setDiamondMode : t -> unit +val popMode : t -> mode -> unit + +val reconsiderLessThan : t -> Res_token.t + +val scanTemplateLiteralToken : + t -> Lexing.position * Lexing.position * Res_token.t + +val tryAdvanceQuotedString : t -> unit diff --git a/res_syntax/src/res_string.ml b/res_syntax/src/res_string.ml new file mode 100644 index 0000000000..a4ecba11db --- /dev/null +++ b/res_syntax/src/res_string.ml @@ -0,0 +1,11 @@ +let hexTable = + [| '0'; '1'; '2'; '3'; '4'; '5'; '6'; '7'; '8'; '9'; 'a'; 'b'; 'c'; 'd'; 'e'; 'f'; |] + [@ocamlformat "disable"] + +let convertDecimalToHex ~strDecimal = + try + let intNum = int_of_string strDecimal in + let c1 = Array.get hexTable (intNum lsr 4) in + let c2 = Array.get hexTable (intNum land 15) in + "x" ^ String.concat "" [String.make 1 c1; String.make 1 c2] + with Invalid_argument _ | Failure _ -> strDecimal diff --git a/res_syntax/src/res_token.ml b/res_syntax/src/res_token.ml new file mode 100644 index 0000000000..f519af6f02 --- /dev/null +++ b/res_syntax/src/res_token.ml @@ -0,0 +1,263 @@ +module Comment = Res_comment + +type t = + | Await + | Open + | True + | False + | Codepoint of {c: int; original: string} + | Int of {i: string; suffix: char option} + | Float of {f: string; suffix: char option} + | String of string + | Lident of string + | Uident of string + | As + | Dot + | DotDot + | DotDotDot + | Bang + | Semicolon + | Let + | And + | Rec + | Underscore + | SingleQuote + | Equal + | EqualEqual + | EqualEqualEqual + | Bar + | Lparen + | Rparen + | Lbracket + | Rbracket + | Lbrace + | Rbrace + | Colon + | Comma + | Eof + | Exception + | Backslash [@live] + | Forwardslash + | ForwardslashDot + | Asterisk + | AsteriskDot + | Exponentiation + | Minus + | MinusDot + | Plus + | PlusDot + | PlusPlus + | PlusEqual + | ColonGreaterThan + | GreaterThan + | LessThan + | LessThanSlash + | Hash + | HashEqual + | Assert + | Lazy + | Tilde + | Question + | If + | Else + | For + | In + | While + | Switch + | When + | EqualGreater + | MinusGreater + | External + | Typ + | Private + | Mutable + | Constraint + | Include + | Module + | Of + | Land + | Lor + | Band (* Bitwise and: & *) + | BangEqual + | BangEqualEqual + | LessEqual + | GreaterEqual + | ColonEqual + | At + | AtAt + | Percent + | PercentPercent + | Comment of Comment.t + | List + | TemplateTail of string * Lexing.position + | TemplatePart of string * Lexing.position + | Backtick + | BarGreater + | Try + | DocComment of Location.t * string + | ModuleComment of Location.t * string + +let precedence = function + | HashEqual | ColonEqual -> 1 + | Lor -> 2 + | Land -> 3 + | Equal | EqualEqual | EqualEqualEqual | LessThan | GreaterThan | BangEqual + | BangEqualEqual | LessEqual | GreaterEqual | BarGreater -> + 4 + | Plus | PlusDot | Minus | MinusDot | PlusPlus -> 5 + | Asterisk | AsteriskDot | Forwardslash | ForwardslashDot -> 6 + | Exponentiation -> 7 + | MinusGreater -> 8 + | Dot -> 9 + | _ -> 0 + +let toString = function + | Await -> "await" + | Open -> "open" + | True -> "true" + | False -> "false" + | Codepoint {original} -> "codepoint '" ^ original ^ "'" + | String s -> "string \"" ^ s ^ "\"" + | Lident str -> str + | Uident str -> str + | Dot -> "." + | DotDot -> ".." + | DotDotDot -> "..." + | Int {i} -> "int " ^ i + | Float {f} -> "Float: " ^ f + | Bang -> "!" + | Semicolon -> ";" + | Let -> "let" + | And -> "and" + | Rec -> "rec" + | Underscore -> "_" + | SingleQuote -> "'" + | Equal -> "=" + | EqualEqual -> "==" + | EqualEqualEqual -> "===" + | Eof -> "eof" + | Bar -> "|" + | As -> "as" + | Lparen -> "(" + | Rparen -> ")" + | Lbracket -> "[" + | Rbracket -> "]" + | Lbrace -> "{" + | Rbrace -> "}" + | ColonGreaterThan -> ":>" + | Colon -> ":" + | Comma -> "," + | Minus -> "-" + | MinusDot -> "-." + | Plus -> "+" + | PlusDot -> "+." + | PlusPlus -> "++" + | PlusEqual -> "+=" + | Backslash -> "\\" + | Forwardslash -> "/" + | ForwardslashDot -> "/." + | Exception -> "exception" + | Hash -> "#" + | HashEqual -> "#=" + | GreaterThan -> ">" + | LessThan -> "<" + | LessThanSlash -> " "*" + | AsteriskDot -> "*." + | Exponentiation -> "**" + | Assert -> "assert" + | Lazy -> "lazy" + | Tilde -> "tilde" + | Question -> "?" + | If -> "if" + | Else -> "else" + | For -> "for" + | In -> "in" + | While -> "while" + | Switch -> "switch" + | When -> "when" + | EqualGreater -> "=>" + | MinusGreater -> "->" + | External -> "external" + | Typ -> "type" + | Private -> "private" + | Constraint -> "constraint" + | Mutable -> "mutable" + | Include -> "include" + | Module -> "module" + | Of -> "of" + | Lor -> "||" + | Band -> "&" + | Land -> "&&" + | BangEqual -> "!=" + | BangEqualEqual -> "!==" + | GreaterEqual -> ">=" + | LessEqual -> "<=" + | ColonEqual -> ":=" + | At -> "@" + | AtAt -> "@@" + | Percent -> "%" + | PercentPercent -> "%%" + | Comment c -> "Comment" ^ Comment.toString c + | List -> "list{" + | TemplatePart (text, _) -> text ^ "${" + | TemplateTail (text, _) -> "TemplateTail(" ^ text ^ ")" + | Backtick -> "`" + | BarGreater -> "|>" + | Try -> "try" + | DocComment (_loc, s) -> "DocComment " ^ s + | ModuleComment (_loc, s) -> "ModuleComment " ^ s + +let keywordTable = function + | "and" -> And + | "as" -> As + | "assert" -> Assert + | "await" -> Await + | "constraint" -> Constraint + | "else" -> Else + | "exception" -> Exception + | "external" -> External + | "false" -> False + | "for" -> For + | "if" -> If + | "in" -> In + | "include" -> Include + | "lazy" -> Lazy + | "let" -> Let + | "list{" -> List + | "module" -> Module + | "mutable" -> Mutable + | "of" -> Of + | "open" -> Open + | "private" -> Private + | "rec" -> Rec + | "switch" -> Switch + | "true" -> True + | "try" -> Try + | "type" -> Typ + | "when" -> When + | "while" -> While + | _ -> raise Not_found + [@@raises Not_found] + +let isKeyword = function + | Await | And | As | Assert | Constraint | Else | Exception | External | False + | For | If | In | Include | Land | Lazy | Let | List | Lor | Module | Mutable + | Of | Open | Private | Rec | Switch | True | Try | Typ | When | While -> + true + | _ -> false + +let lookupKeyword str = + try keywordTable str + with Not_found -> ( + match str.[0] [@doesNotRaise] with + | 'A' .. 'Z' -> Uident str + | _ -> Lident str) + +let isKeywordTxt str = + try + let _ = keywordTable str in + true + with Not_found -> false + +let catch = Lident "catch" diff --git a/res_syntax/src/res_utf8.ml b/res_syntax/src/res_utf8.ml new file mode 100644 index 0000000000..69c7d234f9 --- /dev/null +++ b/res_syntax/src/res_utf8.ml @@ -0,0 +1,143 @@ +(* https://tools.ietf.org/html/rfc3629#section-10 *) +(* let bom = 0xFEFF *) + +let repl = 0xFFFD + +(* let min = 0x0000 *) +let max = 0x10FFFF + +let surrogateMin = 0xD800 +let surrogateMax = 0xDFFF + +(* + * Char. number range | UTF-8 octet sequence + * (hexadecimal) | (binary) + * --------------------+--------------------------------------------- + * 0000 0000-0000 007F | 0xxxxxxx + * 0000 0080-0000 07FF | 110xxxxx 10xxxxxx + * 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx + * 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + *) +let h2 = 0b1100_0000 +let h3 = 0b1110_0000 +let h4 = 0b1111_0000 + +let cont_mask = 0b0011_1111 + +type category = {low: int; high: int; size: int} + +let locb = 0b1000_0000 +let hicb = 0b1011_1111 + +let categoryTable = [| + (* 0 *) {low = -1; high= -1; size= 1}; (* invalid *) + (* 1 *) {low = 1; high= -1; size= 1}; (* ascii *) + (* 2 *) {low = locb; high= hicb; size= 2}; + (* 3 *) {low = 0xA0; high= hicb; size= 3}; + (* 4 *) {low = locb; high= hicb; size= 3}; + (* 5 *) {low = locb; high= 0x9F; size= 3}; + (* 6 *) {low = 0x90; high= hicb; size= 4}; + (* 7 *) {low = locb; high= hicb; size= 4}; + (* 8 *) {low = locb; high= 0x8F; size= 4}; +|] [@@ocamlformat "disable"] + +let categories = [| + 1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1; + 1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1; + 1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1; + 1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1; + 1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1; + 1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1; + 1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1; + 1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1; + + 0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0; + 0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0; + 0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0; + 0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0; + (* surrogate range U+D800 - U+DFFFF = 55296 - 917503 *) + 0; 0; 2; 2;2; 2; 2; 2;2; 2; 2; 2;2; 2; 2; 2; + 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; + 3; 4; 4; 4; 4; 4; 4; 4; 4; 4; 4; 4; 4; 5; 4; 4; + 6; 7; 7 ;7; 8; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; +|] [@@ocamlformat "disable"] + +let decodeCodePoint i s len = + if len < 1 then (repl, 1) + else + let first = int_of_char (String.unsafe_get s i) in + if first < 128 then (first, 1) + else + let index = Array.unsafe_get categories first in + if index = 0 then (repl, 1) + else + let cat = Array.unsafe_get categoryTable index in + if len < i + cat.size then (repl, 1) + else if cat.size == 2 then + let c1 = int_of_char (String.unsafe_get s (i + 1)) in + if c1 < cat.low || cat.high < c1 then (repl, 1) + else + let i1 = c1 land 0b00111111 in + let i0 = (first land 0b00011111) lsl 6 in + let uc = i0 lor i1 in + (uc, 2) + else if cat.size == 3 then + let c1 = int_of_char (String.unsafe_get s (i + 1)) in + let c2 = int_of_char (String.unsafe_get s (i + 2)) in + if c1 < cat.low || cat.high < c1 || c2 < locb || hicb < c2 then + (repl, 1) + else + let i0 = (first land 0b00001111) lsl 12 in + let i1 = (c1 land 0b00111111) lsl 6 in + let i2 = c2 land 0b00111111 in + let uc = i0 lor i1 lor i2 in + (uc, 3) + else + let c1 = int_of_char (String.unsafe_get s (i + 1)) in + let c2 = int_of_char (String.unsafe_get s (i + 2)) in + let c3 = int_of_char (String.unsafe_get s (i + 3)) in + if + c1 < cat.low || cat.high < c1 || c2 < locb || hicb < c2 || c3 < locb + || hicb < c3 + then (repl, 1) + else + let i1 = (c1 land 0x3f) lsl 12 in + let i2 = (c2 land 0x3f) lsl 6 in + let i3 = c3 land 0x3f in + let i0 = (first land 0x07) lsl 18 in + let uc = i0 lor i3 lor i2 lor i1 in + (uc, 4) + +let encodeCodePoint c = + if c <= 127 then ( + let bytes = (Bytes.create [@doesNotRaise]) 1 in + Bytes.unsafe_set bytes 0 (Char.unsafe_chr c); + Bytes.unsafe_to_string bytes) + else if c <= 2047 then ( + let bytes = (Bytes.create [@doesNotRaise]) 2 in + Bytes.unsafe_set bytes 0 (Char.unsafe_chr (h2 lor (c lsr 6))); + Bytes.unsafe_set bytes 1 + (Char.unsafe_chr (0b1000_0000 lor (c land cont_mask))); + Bytes.unsafe_to_string bytes) + else if c <= 65535 then ( + let bytes = (Bytes.create [@doesNotRaise]) 3 in + Bytes.unsafe_set bytes 0 (Char.unsafe_chr (h3 lor (c lsr 12))); + Bytes.unsafe_set bytes 1 + (Char.unsafe_chr (0b1000_0000 lor ((c lsr 6) land cont_mask))); + Bytes.unsafe_set bytes 2 + (Char.unsafe_chr (0b1000_0000 lor (c land cont_mask))); + Bytes.unsafe_to_string bytes) + else + (* if c <= max then *) + let bytes = (Bytes.create [@doesNotRaise]) 4 in + Bytes.unsafe_set bytes 0 (Char.unsafe_chr (h4 lor (c lsr 18))); + Bytes.unsafe_set bytes 1 + (Char.unsafe_chr (0b1000_0000 lor ((c lsr 12) land cont_mask))); + Bytes.unsafe_set bytes 2 + (Char.unsafe_chr (0b1000_0000 lor ((c lsr 6) land cont_mask))); + Bytes.unsafe_set bytes 3 + (Char.unsafe_chr (0b1000_0000 lor (c land cont_mask))); + Bytes.unsafe_to_string bytes + +let isValidCodePoint c = + (0 <= c && c < surrogateMin) || (surrogateMax < c && c <= max) diff --git a/res_syntax/src/res_utf8.mli b/res_syntax/src/res_utf8.mli new file mode 100644 index 0000000000..7dcb342d68 --- /dev/null +++ b/res_syntax/src/res_utf8.mli @@ -0,0 +1,9 @@ +val repl : int + +val max : int + +val decodeCodePoint : int -> string -> int -> int * int + +val encodeCodePoint : int -> string + +val isValidCodePoint : int -> bool diff --git a/res_syntax/testrunner/dune b/res_syntax/testrunner/dune new file mode 100644 index 0000000000..57d14394e2 --- /dev/null +++ b/res_syntax/testrunner/dune @@ -0,0 +1,6 @@ +(executable + (name res_test) + (public_name testrunner) + (flags + (-open Syntax -open Compilerlibs406)) + (libraries syntax compilerlibs406)) diff --git a/res_syntax/testrunner/res_test.ml b/res_syntax/testrunner/res_test.ml new file mode 100644 index 0000000000..07858f88a8 --- /dev/null +++ b/res_syntax/testrunner/res_test.ml @@ -0,0 +1,153 @@ +module IO = Res_io + +(* test printing of .res file*) +let () = + let filename = "./tests/api/resSyntax.res" in + let prettySource = Res_multi_printer.print `res ~input:filename in + assert ( + prettySource + = {|// test file + +if true { + Js.log("true") +} else { + Js.log("false") +} +|}) + +(* test printing of .resi file*) +let () = + let filename = "./tests/api/resiSyntax.resi" in + let prettySource = Res_multi_printer.print `res ~input:filename in + assert (prettySource = {|// test interface file + +let x: int +|}) + +(* test printing of ocaml .ml file *) +let () = + let filename = "./tests/api/mlSyntax.ml" in + let prettySource = Res_multi_printer.print `ml ~input:filename in + assert ( + prettySource + = {|/* test ml file */ + +let () = print_endline("hello world") + +let unicode = "🙈 😅 🙌" + +let d = `Sehr Schön` +|}) + +(* test printing of ocaml .mli file *) +let () = + let filename = "./tests/api/mliSyntax.mli" in + let prettySource = Res_multi_printer.print `ml ~input:filename in + assert ( + prettySource + = {|/* test mli file */ + +let x: int + +/* comment */ +let y: float +|}) + +let () = print_endline "✅ multi printer api tests" + +module OutcomePrinterTests = struct + let signatureToOutcome structure = + Lazy.force Res_outcome_printer.setup; + + Compmisc.init_path false; + Clflags.nopervasives := true; + let env = Compmisc.initial_env () in + try + let _typedStructure, signature, _newenv = + Typemod.type_toplevel_phrase env structure + in + signature |> Printtyp.tree_of_signature + |> !Oprint.out_signature Format.str_formatter; + Format.flush_str_formatter () + with + | Typetexp.Error (_, _, err) -> + Typetexp.report_error env Format.str_formatter err; + prerr_string (Format.flush_str_formatter ()); + exit 1 + | Typemod.Error (_, _, err) -> + Typemod.report_error env Format.str_formatter err; + prerr_string (Format.flush_str_formatter ()); + exit 1 + | Typedecl.Error (_, err) -> + Typedecl.report_error Format.str_formatter err; + prerr_string (Format.flush_str_formatter ()); + exit 1 + | e -> + prerr_string + ("Unknown error while trying to print outcome tree.\n" + ^ "We don't display all the outcome type errors; try adding the new \ + case to the `try` pattern match.\n"); + raise e + + (* `tests/oprint/oprint.res` will be read into memory and typechecked. + * The inferred signature (i.e. the type of the module `oprint.res`) will + * then be converted to the outcome tree. + * The outcome tree is printed to a string + * and stored in a snapshot `tests/oprint/expected/oprint.resi.txt` *) + let run () = + let filename = "tests/oprint/oprint.res" in + let result = + Res_driver.parsingEngine.parseImplementation ~forPrinter:false ~filename + in + let signature = + if result.Res_driver.invalid then ( + Res_driver.parsingEngine.stringOfDiagnostics ~source:result.source + ~filename:result.filename result.diagnostics; + exit 1) + else result.Res_driver.parsetree + in + IO.writeFile ~filename:"tests/oprint/expected/oprint.resi.txt" + ~contents:(signatureToOutcome signature) +end + +module ParserApiTest = struct + let makeDefault () = + let src = " let x = 1\nlet y = 2\nlet z = 3" in + let parser = Res_parser.make src "test.res" in + assert (parser.scanner.lnum == 1); + assert (parser.scanner.lineOffset == 0); + assert (parser.scanner.offset == 6); + assert (parser.token = Res_token.Let); + print_endline "✅ Parser make: initializes parser and checking offsets" + + let unixLf () = + let src = "let x = 1\nlet y = 2\nlet z = 3" in + let parser = Res_parser.make src "test.res" in + (match Res_core.parseImplementation parser with + | [x; y; z] -> + assert (x.pstr_loc.loc_start.pos_lnum = 1); + assert (y.pstr_loc.loc_start.pos_lnum = 2); + assert (z.pstr_loc.loc_start.pos_lnum = 3) + | _ -> assert false); + print_endline "✅ Parser handles LF correct" + + let windowsCrlf () = + let src = "let x = 1\r\nlet y = 2\r\nlet z = 3" in + let parser = Res_parser.make src "test.res" in + (match Res_core.parseImplementation parser with + | [x; y; z] -> + assert (x.pstr_loc.loc_start.pos_lnum = 1); + assert (y.pstr_loc.loc_start.pos_lnum = 2); + assert (z.pstr_loc.loc_start.pos_lnum = 3) + | _ -> assert false); + print_endline "✅ Parser handles CRLF correct" + + let run () = + makeDefault (); + unixLf (); + windowsCrlf () +end + +let () = OutcomePrinterTests.run () +let () = ParserApiTest.run () +let () = Res_utf8_test.run () diff --git a/res_syntax/testrunner/res_utf8_test.ml b/res_syntax/testrunner/res_utf8_test.ml new file mode 100644 index 0000000000..5546ae3fc7 --- /dev/null +++ b/res_syntax/testrunner/res_utf8_test.ml @@ -0,0 +1,93 @@ +type utf8Test = {codepoint: int; str: string; size: int} + +let utf8CodePointTests = + [| + {codepoint = 0x00; str = "\x00"; size = 1}; + {codepoint = 0x01; str = "\x01"; size = 1}; + {codepoint = 0x7e; str = "\x7e"; size = 1}; + {codepoint = 0x7f; str = "\x7f"; size = 1}; + {codepoint = 0x0080; str = "\xc2\x80"; size = 2}; + {codepoint = 0x0081; str = "\xc2\x81"; size = 2}; + {codepoint = 0x00bf; str = "\xc2\xbf"; size = 2}; + {codepoint = 0x00c0; str = "\xc3\x80"; size = 2}; + {codepoint = 0x00c1; str = "\xc3\x81"; size = 2}; + {codepoint = 0x00c8; str = "\xc3\x88"; size = 2}; + {codepoint = 0x00d0; str = "\xc3\x90"; size = 2}; + {codepoint = 0x00e0; str = "\xc3\xa0"; size = 2}; + {codepoint = 0x00f0; str = "\xc3\xb0"; size = 2}; + {codepoint = 0x00f8; str = "\xc3\xb8"; size = 2}; + {codepoint = 0x00ff; str = "\xc3\xbf"; size = 2}; + {codepoint = 0x0100; str = "\xc4\x80"; size = 2}; + {codepoint = 0x07ff; str = "\xdf\xbf"; size = 2}; + {codepoint = 0x0400; str = "\xd0\x80"; size = 2}; + {codepoint = 0x0800; str = "\xe0\xa0\x80"; size = 3}; + {codepoint = 0x0801; str = "\xe0\xa0\x81"; size = 3}; + {codepoint = 0x1000; str = "\xe1\x80\x80"; size = 3}; + {codepoint = 0xd000; str = "\xed\x80\x80"; size = 3}; + {codepoint = 0xd7ff; str = "\xed\x9f\xbf"; size = 3}; + {codepoint = 0xe000; str = "\xee\x80\x80"; size = 3}; + {codepoint = 0xfffe; str = "\xef\xbf\xbe"; size = 3}; + {codepoint = 0xffff; str = "\xef\xbf\xbf"; size = 3}; + {codepoint = 0x10000; str = "\xf0\x90\x80\x80"; size = 4}; + {codepoint = 0x10001; str = "\xf0\x90\x80\x81"; size = 4}; + {codepoint = 0x40000; str = "\xf1\x80\x80\x80"; size = 4}; + {codepoint = 0x10fffe; str = "\xf4\x8f\xbf\xbe"; size = 4}; + {codepoint = 0x10ffff; str = "\xf4\x8f\xbf\xbf"; size = 4}; + {codepoint = 0xFFFD; str = "\xef\xbf\xbd"; size = 3}; + |] + +let surrogateRange = + [| + {codepoint = 0xFFFD; str = "\xed\xa0\x80"; size = 1}; + {codepoint = 0xFFFD; str = "\xed\xbf\xbf"; size = 1}; + |] + +let testDecode () = + Array.iter + (fun t -> + let len = String.length t.str in + let codepoint, size = Res_utf8.decodeCodePoint 0 t.str len in + assert (codepoint = t.codepoint); + assert (size = t.size)) + utf8CodePointTests + +let testDecodeSurrogateRange () = + Array.iter + (fun t -> + let len = String.length t.str in + let codepoint, size = Res_utf8.decodeCodePoint 0 t.str len in + assert (codepoint = t.codepoint); + assert (size = t.size)) + surrogateRange + +let testEncode () = + Array.iter + (fun t -> + let encodedString = Res_utf8.encodeCodePoint t.codepoint in + assert (encodedString = t.str)) + utf8CodePointTests + +let validCodePointsTests = + [| + (0, true); + (Char.code 'e', true); + (Res_utf8.max, true); + (0xD7FF, true); + (0xD800, false); + (0xDFFF, false); + (0xE000, true); + (Res_utf8.max + 1, false); + (-1, false); + |] + +let testIsValidCodePoint () = + Array.iter + (fun (codePoint, t) -> assert (Res_utf8.isValidCodePoint codePoint = t)) + validCodePointsTests + +let run () = + testDecode (); + testDecodeSurrogateRange (); + testEncode (); + testIsValidCodePoint (); + print_endline "✅ utf8 tests" diff --git a/res_syntax/tests/api/mlSyntax.ml b/res_syntax/tests/api/mlSyntax.ml new file mode 100644 index 0000000000..33d3236a9d --- /dev/null +++ b/res_syntax/tests/api/mlSyntax.ml @@ -0,0 +1,7 @@ +(* test ml file *) + +let () = print_endline "hello world" + +let unicode = "🙈 😅 🙌" + +let d = {|Sehr Schön|} diff --git a/res_syntax/tests/api/mliSyntax.mli b/res_syntax/tests/api/mliSyntax.mli new file mode 100644 index 0000000000..4bba2763df --- /dev/null +++ b/res_syntax/tests/api/mliSyntax.mli @@ -0,0 +1,6 @@ +(* test mli file *) + +val x: int + +(* comment *) +val y: float diff --git a/res_syntax/tests/api/reasonSyntax.res b/res_syntax/tests/api/reasonSyntax.res new file mode 100644 index 0000000000..54bfdf92ee --- /dev/null +++ b/res_syntax/tests/api/reasonSyntax.res @@ -0,0 +1,8 @@ +// test .re file +let \"+++" = (a, b) => a + b + +let unicode = "🙈 😅 🙌" + +let d = `Sehr Schön` /* test */ + +let () = print_endline("foo") diff --git a/res_syntax/tests/api/reiSyntax.resi b/res_syntax/tests/api/reiSyntax.resi new file mode 100644 index 0000000000..bf897f37db --- /dev/null +++ b/res_syntax/tests/api/reiSyntax.resi @@ -0,0 +1,2 @@ +// test .rei file +let x: int diff --git a/res_syntax/tests/api/resReactJsx.res b/res_syntax/tests/api/resReactJsx.res new file mode 100644 index 0000000000..e236abf212 --- /dev/null +++ b/res_syntax/tests/api/resReactJsx.res @@ -0,0 +1,6 @@ +// test React JSX file + +@react.component +let make = (~msg) => { +
{msg->React.string}
+} diff --git a/res_syntax/tests/api/resSyntax.res b/res_syntax/tests/api/resSyntax.res new file mode 100644 index 0000000000..2d6f904b82 --- /dev/null +++ b/res_syntax/tests/api/resSyntax.res @@ -0,0 +1,7 @@ +// test file + +if true { + Js.log("true") +} else { + Js.log("false") +} diff --git a/res_syntax/tests/api/resiSyntax.resi b/res_syntax/tests/api/resiSyntax.resi new file mode 100644 index 0000000000..c415b91ce9 --- /dev/null +++ b/res_syntax/tests/api/resiSyntax.resi @@ -0,0 +1,3 @@ +// test interface file + +let x: int diff --git a/res_syntax/tests/conversion/reason/attributes.res b/res_syntax/tests/conversion/reason/attributes.res new file mode 100644 index 0000000000..ff549e3e6d --- /dev/null +++ b/res_syntax/tests/conversion/reason/attributes.res @@ -0,0 +1,22 @@ +module Color: { + type t = private string + + @inline("red") let red: t + @inline("black") let black: t +} = { + type t = string + + @inline let red = "red" + @inline let black = "black" +} + +@send external map: (array<'a>, 'a => 'b) => array<'b> = "map" +@send external filter: (array<'a>, 'a => 'b) => array<'b> = "filter" +list{1, 2, 3}->map(a => a + 1)->filter(a => modulo(a, 2) == 0)->Js.log + +type t +@new external make: unit => t = "DOMParser" +@bs.send.pipe(: t) +external parseHtmlFromString: (string, @as("text/html") _) => Dom.htmlDocument = "parseFromString" + +Js.log(make() |> parseHtmlFromString("sdsd")) diff --git a/res_syntax/tests/conversion/reason/bracedJsx.res b/res_syntax/tests/conversion/reason/bracedJsx.res new file mode 100644 index 0000000000..0cbb8f4784 --- /dev/null +++ b/res_syntax/tests/conversion/reason/bracedJsx.res @@ -0,0 +1,147 @@ +open Belt + +type action = + | RunCommand + | SetValue(string) + +type line = + | User(string) + | System(string) + +type state = { + history: array, + input: string, +} + +module Styles = { + open Css + let terminal = style(list{ + margin(10->px), + backgroundColor("222"->hex), + borderRadius(10->px), + padding(10->px), + color("fff"->hex), + height(300->px), + overflowY(auto), + fontFamily(#custom(Theme.codeFontFamily)), + unsafe("WebkitOverflowScrolling", "touch"), + }) + let line = style(list{whiteSpace(#preWrap)}) + let input = style(list{ + backgroundColor("222"->hex), + fontFamily(#custom(Theme.codeFontFamily)), + color("fff"->hex), + fontSize(16->px), + borderWidth(zero), + margin(zero), + padding(zero), + outlineStyle(none), + }) + let title = style(list{ + fontSize(48->px), + fontWeight(extraBold), + marginTop(20->px), + marginBottom(20->px), + textAlign(center), + }) +} + +@react.component +let make = () => { + let containerRef = React.useRef(Js.Nullable.null) + + let (state, send) = React.useReducer((state, action) => + switch action { + | RunCommand => { + input: "", + history: Array.concat( + state.history, + [ + User(state.input), + switch state.input->Js.String.trim { + | "" => System("") + | "help" => + System(`available commands: +- help +- ls +- cat `) + | "ls" => + System(`- hack-website.sh +- go-to-home.sh +- nuclear-codes.txt`) + | "cat" => System("cat: missing argument") + | "cat hack-website.sh" + | "cat ./hack-website.sh" => + System("# seriously?\necho \"lol\"") + | "hack-website.sh" + | "./hack-website.sh" => + System("lol") + | "cat nuclear-codes.txt" + | "cat ./nuclear-codes.txt" => + System("000000") + | "go-to-home.sh" + | "./go-to-home.sh" => + Js.Global.setTimeout(() => ReasonReact.Router.push("/"), 1_000)->ignore + System("Redirecting ...") + | "cat go-to-home.sh" + | "cat ./go-to-home.sh" => + System("ReasonReact.Router.push(\"/\")") + | _ => System("command not found: " ++ (state.input ++ "\ntry command 'help'")) + }, + ], + ), + } + | SetValue(input) => {...state, input: input} + } + , {history: [], input: ""}) + + React.useEffect1(() => { + switch containerRef.current->Js.Nullable.toOption { + | Some(containerRef) => + open Webapi.Dom + containerRef->Element.setScrollTop(containerRef->Element.scrollHeight->float_of_int) + | None => () + } + None + }, [state.history]) + + let userPrefix = "~ " + +
{"Erreur"->ReasonReact.string}
+
(event->ReactEvent.Mouse.target)["querySelector"]("input")["focus"]()} + ref={containerRef->ReactDOMRe.Ref.domRef}> + {state.history + ->Array.mapWithIndex((index, item) => +
+ {ReasonReact.string( + switch item { + | User(value) => userPrefix ++ value + | System(value) => value + }, + )} +
+ ) + ->ReasonReact.array} +
+ {userPrefix->ReasonReact.string} + { send(SetValue((event->ReactEvent.Form.target)["value"]))} + onKeyDown={event => { + if event->ReactEvent.Keyboard.key == "Enter" { + send(RunCommand) + } + if event->ReactEvent.Keyboard.key == "Tab" { + event->ReactEvent.Keyboard.preventDefault + } + }} + />->ReasonReact.cloneElement(~props={"autoCapitalize": "off"}, [])} +
+
+
+} diff --git a/res_syntax/tests/conversion/reason/braces.res b/res_syntax/tests/conversion/reason/braces.res new file mode 100644 index 0000000000..cf164c934d --- /dev/null +++ b/res_syntax/tests/conversion/reason/braces.res @@ -0,0 +1,24 @@ +let f = () => id +let f = () => id + +if isArray(children) { + // Scenario 1 + let code = children->asStringArray->Js.Array2.joinWith("") + {code->s} +} else if isObject(children) { + // Scenario 2 + children->asElement +} else { + // Scenario 3 + let code = unknownAsString(children) + makeCodeElement(~code, ~metastring, ~lang) +} + +let getDailyNewCases = x => + switch x { + | First(ret) => ret + | Pair({prevRecord, record}) => + let confirmed = record.confirmed - prevRecord.confirmed + let deaths = record.deaths - prevRecord.deaths + {confirmed: confirmed, deaths: deaths} + } diff --git a/res_syntax/tests/conversion/reason/comments.res b/res_syntax/tests/conversion/reason/comments.res new file mode 100644 index 0000000000..948e3ba49b --- /dev/null +++ b/res_syntax/tests/conversion/reason/comments.res @@ -0,0 +1,770 @@ +// comment 1 +let /* a */ x /* b */ = /* c */ 1 /* comment 2 */ + +// comment 3 + +// comment 4 +let x = 1 /* 5 */ +/* 6 */ + +let f = ( + // 1 + a, //2 + // 3 + b, +) => { + // 5 + x: 1, // 6 + // 7 + y: 2, // 8 +} +/* **** comment */ +/* ** comment */ +/* comment */ +/* ** comment */ +/* *** comment */ +/* **** comment */ + +/* ** */ +/* *** */ + +/* ** */ + +/* (** comment *) */ +/* (*** comment *) */ +/* *(*** comment *) */ + +/* comment * */ +/* comment ** */ +/* comment *** */ +/* comment **** */ + +let testingNotQuiteEndOfLineComments = list{ + "Item 1" /* Comment For First Item */, + "Item 2" /* Comment For Second Item */, + "Item 3" /* Comment For Third Item */, + "Item 4" /* Comment For Fourth Item - but no semi */, + /* Comment after last item in list. */ +} /* Comment after list bracket */ + +let testingEndOfLineComments = list{ + "Item 1" /* Comment For First Item */, + "Item 2" /* Comment For Second Item */, + "Item 3" /* Comment For Third Item */, + "Item 4" /* Comment For Fourth Item - but before semi */, + /* Comment after last item in list. */ +} /* Comment after list bracket */ + +/* This time no space between bracket and comment */ +let testingEndOfLineComments = list{} /* Comment after list bracket */ + +type t = (int, int) /* End of line on t */ + +type t22 = /* End of t22 line on type t22 = */ +(int, int) + +type variant = + /* Comment above X */ + | X(int) /* End of line on X */ + /* Comment above Y */ + | Y(int) /* End of line on Y */ +/* Comment on entire type def for variant */ + +type rec x = { + /* not attached *above* x */ + fieldOne: int, +} /* Attached end of line after x */ +and y = { + /* not attached *above* y */ + fieldTwo: int, +} /* Attached end of line after y */ + +let result = switch X(3) { +| X(x) => + /* Where does this comment go? */ + let tmp = x + x + tmp +| Y(x) => + /* How about this one */ + let tmp = x + x + tmp +} + +let result = switch None { +| Some({fieldOne: 20}) => + /* Where does this comment go? */ + let tmp = 0 + 2 + tmp +| Some({fieldOne: n}) => + /* How about this one */ + let tmp = n + n + tmp +| None => 20 +} + +type pointWithManyKindsOfComments = { + /* Line before x */ + x: string /* x field */, + /* Line before y */ + y: string /* y field */, + /* Final row of record */ +} + +type typeParamPointWithComments<'a> = { + /* Line before x */ + x: 'a /* x field */, + /* Line before y */ + y: 'a /* y field */, + /* Final row of record */ +} + +let name_equal = (x, y) => x == y + +let equal = (i1, i2) => i1.contents === i2.contents && true /* most unlikely first */ + +let equal = (i1, i2) => compare(compare(0, 0), compare(1, 1)) /* END OF LINE HERE */ + +module Temp = { + let v = true + let logIt = (str, ()) => print_string(str) +} + +let store_attributes = arg => { + let attributes_file = "test" + let proc_name = attributes_file ++ ".proc" + let should_write = + /* only overwrite defined procedures */ + Temp.v || !Temp.v + if should_write { + Temp.logIt(proc_name, ()) + } +} +3 // - +3 //- + +3 //- + +3 /* - */ +// **** comment +/* ** comment */ +@ocaml.doc(" docstring ") +@ocaml.doc(" docstring ") +@ocaml.doc(" ") +@ocaml.doc("") +@ocaml.doc(" (** comment *) ") +@ocaml.doc(" (*** comment *) ") +@ocaml.doc(" + * Multiline + ") +@ocaml.doc(" Multiline + * + ") +@ocaml.doc(" + ** + ") +module // comment + +/* ** comment */ +/* *** comment */ +/* **** comment */ + +/* ** */ +/* *** */ + +/* ** */ + +// (** comment *) +// (*** comment *) +// *(*** comment *) +// comment * +// comment ** +// comment *** +// comment **** + +JustString = { + include Map.Make(Int32) // Comment eol include +} + +let testingEndOfLineComments = list{ + "Item 1" /* Comment For First Item */, + "Item 2" /* Comment For Second Item */, + "Item 3" /* Comment For Third Item */, + "Item 4" /* Comment For Fourth Item - but before trailing comma */, + // Comment after last item in list. +} /* Comment after rbracket */ + +// But if you place them after the comma at eol, they're preserved as such +let testingEndOfLineComments = list{ + "Item 1", // Comment For First Item + "Item 2", // Comment For Second Item + "Item 3", // Comment For Third Item + "Item 4" /* Comment For Fourth Item - but before trailing comma */, + // Comment after last item in list. +} /* Comment after rbracket */ + +// The space between ; and comment shoudn't matter +let testPlacementOfTrailingComment = list{ + "Item 0", + // Comment after last item in list. +} // Comment after semi + +// The space between ; and comment shoudn't matter +let testPlacementOfTrailingComment = list{ + "Item 0", + // Comment after last item in list. +} // Comment after semi + +// Try again but without other things in the list +let testPlacementOfTrailingComment = list{"Item 0"} // Comment after semi + +// The space between ; and comment shoudn't matter +let testPlacementOfTrailingComment = list{ + "Item 0", + // Comment after last item in list. +} // Comment after semi + +let testingEndOfLineComments = list{} // Comment after entire let binding + +// The following is not yet idempotent +// let myFunction +// withFirstArg // First arg +// andSecondArg => { // Second Arg +// withFirstArg + andSecondArg /* before semi */ ; +// }; + +let myFunction = // First arg +( + withFirstArg, + // Second Arg + andSecondArg, +) => withFirstArg + andSecondArg // After Semi + +type point = { + x: string, // x field + y: string, // y field +} + +type pointWithManyKindsOfComments = { + // Line before x + x: string, // x field + // Line before y + y: string, // y field + // Final row of record +} + +type typeParamPointWithComments<'a> = { + // Line before x + x: 'a, // x field + // Line before y + y: 'a, // y field + // Final row of record +} + +// Now, interleaving comments in type params +// Type name +type typeParamPointWithComments2< + 'a, + // The b type apram + 'b, +> = { + // Line before x + x: 'a, // x field + // Line before y + y: 'a, // y field + // Final row of record +} + +/* The way the last row comment is formatted is suboptimal becuase + * record type definitions do not include enough location information */ +type anotherpoint = { + x: string, // x field + y: string, // y field + // comment as last row of record +} + +type t = (int, int) // End of line on t +type t2 = (int, int) // End of line on (int, int) + +type t3 = (int, int) // End of line on (int, int) + +type variant = + | X(int, int) // End of line on X + | Y(int, int) // End of line on Y +// Comment on entire type def for variant + +// Before let +let res = // Before switch +switch X(2, 3) { +// Above X line +| X(_) => "result of X" // End of arrow and X line +// Above Y line +| Y(_) => "result of Y" // End of arrow and Y line +} // After final semi in switch + +let res = switch X(2, 3) { +| X(0, 0) => // After X arrow + "result of X" // End of X body line +| X(1, 0) /* Before X's arrow */ => "result of X" // End of X body line +| X(_) => // After X _ arrow + "result of X" // End of X body line +// Above Y line +| Y(_) => // Comment above Y body + "result of Y" +} + +type variant2 = + // Comment above X + | X(int, int) // End of line on X + // Comment above Y + | Y(int, int) + +type variant3 = + // Comment above X + | X(int, int) // End of line on X + // Comment above Y + | Y(int, int) // End of line on Y + +type rec x = { + // not attached *above* x + fieldOne: int, + fieldA: int, +} // Attached end of line after x +and y = { + // not attached *above* y + fieldTwo: int, +} // Attached end of line after y + +type rec x2 = { + // not attached *above* x2 + fieldOne: int, + fieldA: int, +} // Attached end of line after x2 +and y2 = { + // not attached *above* y2 + fieldTwo: int, +} + +let result = switch None { +| Some({fieldOne: 20, fieldA: a}) => + // Where does this comment go? + let tmp = 0 + 2 + tmp +| Some({fieldOne: n, fieldA: a}) => + // How about this one + let tmp = n + n + tmp +| None => 20 +} + +let res = // Before switch +switch X(2, 3) { +// Above X line +| X(_) => "result of X" // End of arrow and X line +// Above Y line +| Y(_) => "result of Y" // End of arrow and Y line +} + +/* + * Now these end of line comments *should* be retained. + */ +let result = switch None { +| Some({fieldOne: 20, fieldA: a}) => + let tmp = 0 + 2 + tmp +| Some({fieldOne: n, fieldA: a}) => + let tmp = n + n + tmp +| None => 20 +} + +/* + * These end of line comments *should* be retained. + * To get the simple expression eol comment to be retained, we just need to + * implement label breaking eol behavior much like we did with sequences. + * Otherwise, right now they are not idempotent. + */ +let res = switch X(2, 3) { +// Above X line +| X(_, _) => "result of X" // retain this // retain this + +// Above Y line +| Y(_) => "result of Y" // End of arrow and Y line +} + +/* type optionalTuple = */ +/* | OptTup( */ +/* option( */ +/* ( */ +/* int, */ +/* int */ +/* ), */ +/* ), */ +/* ); */ + +type optionTuple = option<(int, int)> // First int // Second int + +type intPair = (int, int) // First int // Second int + +type intPair2 = ( + // First int + int, + // Second int + int, +) + +let result = @ocaml.doc("") (2 + 3) + +// This is not yet idempotent +// { +// /**/ +// (+) 2 3 +// }; + +let a = () +for i in 0 to 10 { + // bla + a +} + +if true { + () +} + +type color = + | Red(int) // After red end of line + | Black(int) // After black end of line + | Green(int) // After green end of line +// On next line after color type def + +let blahCurriedX = (x, x) => + switch x { + | Red(10) + | Black(20) + | Green(10) => 1 // After or pattern green + | Red(x) => 0 // After red + | Black(x) => 0 // After black + | Green(x) => 0 + } // After second green +// On next line after blahCurriedX def + +let name_equal = (x, y) => x == y + +let equal = (i1, i2) => i1.contents === i2.contents && true // most unlikely first + +let equal = (i1, i2) => compare(compare(0, 0), compare(1, 1)) // END OF LINE HERE + +let tuple_equal = ((i1, i2)) => i1 == i2 + +let tuple_equal = ((csu, mgd)) => + // Some really long comments, see https://github.com/facebook/reason/issues/811 + tuple_equal((csu, mgd)) + +let trueThing = true + +for i in 0 to 1 { + // comment + print_newline() +} + +while trueThing { + // comment + print_newline() +} + +if trueThing { + // comment + print_newline() +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} else { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() +} else { + // Comment before print + print_newline() +} + +// Comment before while test +while trueThing { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} + +// Comment before while test +while trueThing { + // Comment before print + print_newline() +} + +// Comment before for test +for i in 0 to 100 { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} + +// Comment before for test +for i in 0 to 100 { + // Comment before print + print_newline() +} + +if trueThing { + // Comment before print + print_newline() // eol print + // Comment before print + print_newline() // eol print + // Comment after print +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() // eol print + // Comment before print + print_newline() // eol print + // Comment after print +} else { + // Comment before print + print_newline() // eol print + // Comment before print + print_newline() // eol print + // Comment after print +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() +} else { + // Comment before print + print_newline() // eol print + // Comment before print + print_newline() // eol print + // Comment after print +} + +// Comment before while test +while trueThing { + // Comment before print + print_newline() // eol + // Comment before print + print_newline() // eol + // Comment after final print +} + +// Comment before while test +while trueThing { + // Comment before print + print_newline() +} + +// Comment before for test +for i in 0 to 100 { + // Comment before print + print_newline() // eol + // Comment before print + print_newline() // eol + // Comment after final print +} + +// Comment before for test +for i in 0 to 100 { + // Comment before print + print_newline() +} + +let f = (a, b, c, d) => a + b + c + d + +while trueThing { + f( + // a + 1, + // b + 2, + // c + 3, + // d + 4, + // does work + ) +} +while trueThing { + f( + // a + 1, + // b + 2, + // c + 3, + // d + 4, // does work + ) +} + +ignore((_really, _long, _printWidth, _exceeded, _here) => { + // First comment + let x = 0 + x + x + // Closing comment +}) + +ignore((_xxx, _yyy) => { + // First comment + let x = 0 + x + x + // Closing comment +}) + +type tester<'a, 'b> = + | TwoArgsConstructor('a, 'b) + | OneTupleArgConstructor(('a, 'b)) +let callFunctionTwoArgs = (a, b) => () +let callFunctionOneTuple = tuple => () + +let y = TwoArgsConstructor(1, 2) //eol1 // eol2 + +let y = callFunctionTwoArgs(1, 2) //eol1 // eol2 + +let y = OneTupleArgConstructor((1, 2)) //eol1 // eol2 + +let y = callFunctionOneTuple((1, 2)) //eol1 // eol2 + +type polyRecord<'a, 'b> = { + fieldOne: 'a, + fieldTwo: 'b, +} + +let r = { + fieldOne: 1, //eol1 + fieldTwo: 2, // eol2 +} + +let r = { + fieldOne: 1, //eol1 + fieldTwo: 2, // eol2 with trailing comma +} + +let y = TwoArgsConstructor("1", "2") //eol1 // eol2 + +let y = callFunctionTwoArgs("1", "2") //eol1 // eol2 + +let y = OneTupleArgConstructor(("1", "2")) //eol1 // eol2 + +let y = callFunctionOneTuple(("1", "2")) //eol1 // eol2 + +let r = { + fieldOne: "1", //eol1 + fieldTwo: "2", // eol2 +} + +let r = { + fieldOne: "1", //eol1 + fieldTwo: "2", // eol2 with trailing comma +} + +let identifier = "hello" + +let y = TwoArgsConstructor(identifier, identifier) //eol1 // eol2 + +let y = callFunctionTwoArgs(identifier, identifier) //eol1 // eol2 + +let y = OneTupleArgConstructor((identifier, identifier)) //eol1 // eol2 + +let y = callFunctionOneTuple((identifier, identifier)) //eol1 // eol2 + +let r = { + fieldOne: identifier, //eol1 + fieldTwo: identifier, // eol2 +} + +let r = { + fieldOne: identifier, //eol1 + fieldTwo: identifier, // eol2 with trailing comma +} + +let y = TwoArgsConstructor((identifier: string), (identifier: string)) //eol1 // eol2 + +let y = callFunctionTwoArgs((identifier: string), (identifier: string)) //eol1 // eol2 + +let y = OneTupleArgConstructor(((identifier: string), (identifier: string))) //eol1 // eol2 + +let y = callFunctionOneTuple(((identifier: string), (identifier: string))) //eol1 // eol2 + +let r = { + fieldOne: (identifier: string), //eol1 + fieldTwo: (identifier: string), // eol2 +} + +let r = { + fieldOne: (identifier: string), //eol1 + fieldTwo: (identifier: string), // eol2 with trailing comma +} + +// whitespace interleaving + +// comment1 +// comment2 + +// whitespace above & below + +let r = { + fieldOne: (identifier: string), //eol1 + // c1 + + // c2 + + // c3 + // c4 + + // c5 + fieldTwo: (identifier: string), // eol2 with trailing comma +} +// trailing + +// trailing whitespace above +// attach + +// last comment diff --git a/res_syntax/tests/conversion/reason/destructiveSubstitutionSubModules.ml b/res_syntax/tests/conversion/reason/destructiveSubstitutionSubModules.ml new file mode 100644 index 0000000000..f6060c1743 --- /dev/null +++ b/res_syntax/tests/conversion/reason/destructiveSubstitutionSubModules.ml @@ -0,0 +1,51 @@ +module type Id = + sig type t val toString : t -> string val ofString : string -> t option end +module type A = sig module Id : Id type name = string val name : name end +module type B = sig module A : A val fullName : A.Id.t -> string end +module MakeB(A:A): B with module A.Id := A.Id = + (struct + module A = A + let fullName id = A.name ^ ("-" ^ (A.Id.toString id)) + end) +module StringId : Id = + struct + type t = string + external toString : t -> string = "%identity" + external ofString : string -> t = "%identity" + let ofString id = ((Some ((id |> ofString)))[@explicit_arity ]) + end +module A = struct module Id = StringId + type name = string + let name = "A" end +module B = (MakeB)(A) +let test = + match "someId" |> StringId.ofString with + | ((Some (id))[@explicit_arity ]) -> ((Some ((id |> B.fullName))) + [@explicit_arity ]) + | None as none -> none + +module type Printable = sig + type t + val print : Format.formatter -> t -> unit +end + +module type Comparable = sig + type t + val compare : t -> t -> int +end + +module type PrintableComparable = sig + include Printable + include Comparable with type t := t +end + +module type S = Comparable with type t := int + +module type S = sig + type u + include Comparable with type t := u +end + +module type ComparableInt = Comparable with type t = int + +module type CompareInt = ComparableInt with type t := int diff --git a/res_syntax/tests/conversion/reason/docComments.ml b/res_syntax/tests/conversion/reason/docComments.ml new file mode 100644 index 0000000000..4a6a782133 --- /dev/null +++ b/res_syntax/tests/conversion/reason/docComments.ml @@ -0,0 +1,40 @@ +(** The first special comment of the file is the comment associated + to the whole module. *) + + (** The comment for function f *) + let f x y = x + y + + (** This comment is not attached to any element since there is another + special comment just before the next element. *) + + (** Comment for exception My_exception, even with a simple comment + between the special comment and the exception.*) + (* A simple comment. *) + exception My_exception of (int -> int) * int + + (** Comment for type weather *) + type weather = + | Rain of int (** The comment for constructor Rain *) + | Sun (** The comment for constructor Sun *) + + (** The comment for type my_record *) + type my_record = { + foo : int ; (** Comment for field foo *) + bar : string ; (** Comment for field bar *) + } + + (** The comment for module Foo *) + module Foo = + struct + (** The comment for x *) + let x = 0 + (** A special comment in the class, but not associated to any element. *) + end + + (** The comment for module type my_module_type. *) + module type my_module_type = + sig + (* Comment for value x. *) + val x : int + (* ... *) + end diff --git a/res_syntax/tests/conversion/reason/docComments.mli b/res_syntax/tests/conversion/reason/docComments.mli new file mode 100644 index 0000000000..bfde148836 --- /dev/null +++ b/res_syntax/tests/conversion/reason/docComments.mli @@ -0,0 +1,70 @@ +(** The first special comment of the file is the comment associated + with the whole module.*) + + + (** Special comments can be placed between elements and are kept + by the OCamldoc tool, but are not associated to any element. + @-tags in these comments are ignored.*) + + (*******************************************************************) + (** Comments like the one above, with more than two asterisks, + are ignored. *) + + (** The comment for function f. *) + val f : int -> int -> int + (** The continuation of the comment for function f. *) + + (** Comment for exception My_exception, even with a simple comment + between the special comment and the exception.*) + (* Hello, I'm a simple comment :-) *) + exception My_exception of (int -> int) * int + + (** Comment for type weather *) + type weather = + | Rain of int (** The comment for constructor Rain *) + | Sun (** The comment for constructor Sun *) + + (** Comment for type weather2 *) + type weather2 = + | Rain of int (** The comment for constructor Rain *) + | Sun (** The comment for constructor Sun *) + (** I can continue the comment for type weather2 here + because there is already a comment associated to the last constructor.*) + + (** The comment for type my_record *) + type my_record = { + foo : int ; (** Comment for field foo *) + bar : string ; (** Comment for field bar *) + } + (** Continuation of comment for type my_record *) + + (** Comment for foo *) + val foo : string + (** This comment is associated to foo and not to bar. *) + val bar : string + (** This comment is associated to bar. *) + + (** The comment for module Foo *) + module Foo : + sig + (** The comment for x *) + val x : int + + (** A special comment that is kept but not associated to any element *) + end + + (** The comment for module type my_module_type. *) + module type my_module_type = + sig + (** The comment for value x. *) + val x : int + + (** The comment for module M. *) + module M : + sig + (** The comment for value y. *) + val y : int + + (* ... *) + end + end diff --git a/res_syntax/tests/conversion/reason/docComments.res b/res_syntax/tests/conversion/reason/docComments.res new file mode 100644 index 0000000000..26288308bf --- /dev/null +++ b/res_syntax/tests/conversion/reason/docComments.res @@ -0,0 +1,11 @@ +@ocaml.doc(" foo ") +let x = 1 + +@ocaml.doc("") +let x = 1 + +/* ** foo */ +let x = 1 + +/* **** foo */ +let x = 1 diff --git a/res_syntax/tests/conversion/reason/expected/attributes.res.txt b/res_syntax/tests/conversion/reason/expected/attributes.res.txt new file mode 100644 index 0000000000..ff549e3e6d --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/attributes.res.txt @@ -0,0 +1,22 @@ +module Color: { + type t = private string + + @inline("red") let red: t + @inline("black") let black: t +} = { + type t = string + + @inline let red = "red" + @inline let black = "black" +} + +@send external map: (array<'a>, 'a => 'b) => array<'b> = "map" +@send external filter: (array<'a>, 'a => 'b) => array<'b> = "filter" +list{1, 2, 3}->map(a => a + 1)->filter(a => modulo(a, 2) == 0)->Js.log + +type t +@new external make: unit => t = "DOMParser" +@bs.send.pipe(: t) +external parseHtmlFromString: (string, @as("text/html") _) => Dom.htmlDocument = "parseFromString" + +Js.log(make() |> parseHtmlFromString("sdsd")) diff --git a/res_syntax/tests/conversion/reason/expected/bracedJsx.res.txt b/res_syntax/tests/conversion/reason/expected/bracedJsx.res.txt new file mode 100644 index 0000000000..b49e82fece --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/bracedJsx.res.txt @@ -0,0 +1,147 @@ +open Belt + +type action = + | RunCommand + | SetValue(string) + +type line = + | User(string) + | System(string) + +type state = { + history: array, + input: string, +} + +module Styles = { + open Css + let terminal = style(list{ + margin(10->px), + backgroundColor("222"->hex), + borderRadius(10->px), + padding(10->px), + color("fff"->hex), + height(300->px), + overflowY(auto), + fontFamily(#custom(Theme.codeFontFamily)), + unsafe("WebkitOverflowScrolling", "touch"), + }) + let line = style(list{whiteSpace(#preWrap)}) + let input = style(list{ + backgroundColor("222"->hex), + fontFamily(#custom(Theme.codeFontFamily)), + color("fff"->hex), + fontSize(16->px), + borderWidth(zero), + margin(zero), + padding(zero), + outlineStyle(none), + }) + let title = style(list{ + fontSize(48->px), + fontWeight(extraBold), + marginTop(20->px), + marginBottom(20->px), + textAlign(center), + }) +} + +@react.component +let make = () => { + let containerRef = React.useRef(Js.Nullable.null) + + let (state, send) = React.useReducer((state, action) => + switch action { + | RunCommand => { + input: "", + history: Array.concat( + state.history, + [ + User(state.input), + switch state.input->Js.String.trim { + | "" => System("") + | "help" => + System(`available commands: +- help +- ls +- cat `) + | "ls" => + System(`- hack-website.sh +- go-to-home.sh +- nuclear-codes.txt`) + | "cat" => System("cat: missing argument") + | "cat hack-website.sh" + | "cat ./hack-website.sh" => + System("# seriously?\necho \"lol\"") + | "hack-website.sh" + | "./hack-website.sh" => + System("lol") + | "cat nuclear-codes.txt" + | "cat ./nuclear-codes.txt" => + System("000000") + | "go-to-home.sh" + | "./go-to-home.sh" => + Js.Global.setTimeout(() => ReasonReact.Router.push("/"), 1_000)->ignore + System("Redirecting ...") + | "cat go-to-home.sh" + | "cat ./go-to-home.sh" => + System("ReasonReact.Router.push(\"/\")") + | _ => System("command not found: " ++ (state.input ++ "\ntry command 'help'")) + }, + ], + ), + } + | SetValue(input) => {...state, input} + } + , {history: [], input: ""}) + + React.useEffect1(() => { + switch containerRef.current->Js.Nullable.toOption { + | Some(containerRef) => + open Webapi.Dom + containerRef->Element.setScrollTop(containerRef->Element.scrollHeight->float_of_int) + | None => () + } + None + }, [state.history]) + + let userPrefix = "~ " + +
{"Erreur"->ReasonReact.string}
+
(event->ReactEvent.Mouse.target)["querySelector"]("input")["focus"]()} + ref={containerRef->ReactDOMRe.Ref.domRef}> + {state.history + ->Array.mapWithIndex((index, item) => +
+ {ReasonReact.string( + switch item { + | User(value) => userPrefix ++ value + | System(value) => value + }, + )} +
+ ) + ->ReasonReact.array} +
+ {userPrefix->ReasonReact.string} + { send(SetValue((event->ReactEvent.Form.target)["value"]))} + onKeyDown={event => { + if event->ReactEvent.Keyboard.key == "Enter" { + send(RunCommand) + } + if event->ReactEvent.Keyboard.key == "Tab" { + event->ReactEvent.Keyboard.preventDefault + } + }} + />->ReasonReact.cloneElement(~props={"autoCapitalize": "off"}, [])} +
+
+
+} diff --git a/res_syntax/tests/conversion/reason/expected/braces.res.txt b/res_syntax/tests/conversion/reason/expected/braces.res.txt new file mode 100644 index 0000000000..0a3e335d63 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/braces.res.txt @@ -0,0 +1,24 @@ +let f = () => id +let f = () => id + +if isArray(children) { + // Scenario 1 + let code = children->asStringArray->Js.Array2.joinWith("") + {code->s} +} else if isObject(children) { + // Scenario 2 + children->asElement +} else { + // Scenario 3 + let code = unknownAsString(children) + makeCodeElement(~code, ~metastring, ~lang) +} + +let getDailyNewCases = x => + switch x { + | First(ret) => ret + | Pair({prevRecord, record}) => + let confirmed = record.confirmed - prevRecord.confirmed + let deaths = record.deaths - prevRecord.deaths + {confirmed, deaths} + } diff --git a/res_syntax/tests/conversion/reason/expected/comments.res.txt b/res_syntax/tests/conversion/reason/expected/comments.res.txt new file mode 100644 index 0000000000..948e3ba49b --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/comments.res.txt @@ -0,0 +1,770 @@ +// comment 1 +let /* a */ x /* b */ = /* c */ 1 /* comment 2 */ + +// comment 3 + +// comment 4 +let x = 1 /* 5 */ +/* 6 */ + +let f = ( + // 1 + a, //2 + // 3 + b, +) => { + // 5 + x: 1, // 6 + // 7 + y: 2, // 8 +} +/* **** comment */ +/* ** comment */ +/* comment */ +/* ** comment */ +/* *** comment */ +/* **** comment */ + +/* ** */ +/* *** */ + +/* ** */ + +/* (** comment *) */ +/* (*** comment *) */ +/* *(*** comment *) */ + +/* comment * */ +/* comment ** */ +/* comment *** */ +/* comment **** */ + +let testingNotQuiteEndOfLineComments = list{ + "Item 1" /* Comment For First Item */, + "Item 2" /* Comment For Second Item */, + "Item 3" /* Comment For Third Item */, + "Item 4" /* Comment For Fourth Item - but no semi */, + /* Comment after last item in list. */ +} /* Comment after list bracket */ + +let testingEndOfLineComments = list{ + "Item 1" /* Comment For First Item */, + "Item 2" /* Comment For Second Item */, + "Item 3" /* Comment For Third Item */, + "Item 4" /* Comment For Fourth Item - but before semi */, + /* Comment after last item in list. */ +} /* Comment after list bracket */ + +/* This time no space between bracket and comment */ +let testingEndOfLineComments = list{} /* Comment after list bracket */ + +type t = (int, int) /* End of line on t */ + +type t22 = /* End of t22 line on type t22 = */ +(int, int) + +type variant = + /* Comment above X */ + | X(int) /* End of line on X */ + /* Comment above Y */ + | Y(int) /* End of line on Y */ +/* Comment on entire type def for variant */ + +type rec x = { + /* not attached *above* x */ + fieldOne: int, +} /* Attached end of line after x */ +and y = { + /* not attached *above* y */ + fieldTwo: int, +} /* Attached end of line after y */ + +let result = switch X(3) { +| X(x) => + /* Where does this comment go? */ + let tmp = x + x + tmp +| Y(x) => + /* How about this one */ + let tmp = x + x + tmp +} + +let result = switch None { +| Some({fieldOne: 20}) => + /* Where does this comment go? */ + let tmp = 0 + 2 + tmp +| Some({fieldOne: n}) => + /* How about this one */ + let tmp = n + n + tmp +| None => 20 +} + +type pointWithManyKindsOfComments = { + /* Line before x */ + x: string /* x field */, + /* Line before y */ + y: string /* y field */, + /* Final row of record */ +} + +type typeParamPointWithComments<'a> = { + /* Line before x */ + x: 'a /* x field */, + /* Line before y */ + y: 'a /* y field */, + /* Final row of record */ +} + +let name_equal = (x, y) => x == y + +let equal = (i1, i2) => i1.contents === i2.contents && true /* most unlikely first */ + +let equal = (i1, i2) => compare(compare(0, 0), compare(1, 1)) /* END OF LINE HERE */ + +module Temp = { + let v = true + let logIt = (str, ()) => print_string(str) +} + +let store_attributes = arg => { + let attributes_file = "test" + let proc_name = attributes_file ++ ".proc" + let should_write = + /* only overwrite defined procedures */ + Temp.v || !Temp.v + if should_write { + Temp.logIt(proc_name, ()) + } +} +3 // - +3 //- + +3 //- + +3 /* - */ +// **** comment +/* ** comment */ +@ocaml.doc(" docstring ") +@ocaml.doc(" docstring ") +@ocaml.doc(" ") +@ocaml.doc("") +@ocaml.doc(" (** comment *) ") +@ocaml.doc(" (*** comment *) ") +@ocaml.doc(" + * Multiline + ") +@ocaml.doc(" Multiline + * + ") +@ocaml.doc(" + ** + ") +module // comment + +/* ** comment */ +/* *** comment */ +/* **** comment */ + +/* ** */ +/* *** */ + +/* ** */ + +// (** comment *) +// (*** comment *) +// *(*** comment *) +// comment * +// comment ** +// comment *** +// comment **** + +JustString = { + include Map.Make(Int32) // Comment eol include +} + +let testingEndOfLineComments = list{ + "Item 1" /* Comment For First Item */, + "Item 2" /* Comment For Second Item */, + "Item 3" /* Comment For Third Item */, + "Item 4" /* Comment For Fourth Item - but before trailing comma */, + // Comment after last item in list. +} /* Comment after rbracket */ + +// But if you place them after the comma at eol, they're preserved as such +let testingEndOfLineComments = list{ + "Item 1", // Comment For First Item + "Item 2", // Comment For Second Item + "Item 3", // Comment For Third Item + "Item 4" /* Comment For Fourth Item - but before trailing comma */, + // Comment after last item in list. +} /* Comment after rbracket */ + +// The space between ; and comment shoudn't matter +let testPlacementOfTrailingComment = list{ + "Item 0", + // Comment after last item in list. +} // Comment after semi + +// The space between ; and comment shoudn't matter +let testPlacementOfTrailingComment = list{ + "Item 0", + // Comment after last item in list. +} // Comment after semi + +// Try again but without other things in the list +let testPlacementOfTrailingComment = list{"Item 0"} // Comment after semi + +// The space between ; and comment shoudn't matter +let testPlacementOfTrailingComment = list{ + "Item 0", + // Comment after last item in list. +} // Comment after semi + +let testingEndOfLineComments = list{} // Comment after entire let binding + +// The following is not yet idempotent +// let myFunction +// withFirstArg // First arg +// andSecondArg => { // Second Arg +// withFirstArg + andSecondArg /* before semi */ ; +// }; + +let myFunction = // First arg +( + withFirstArg, + // Second Arg + andSecondArg, +) => withFirstArg + andSecondArg // After Semi + +type point = { + x: string, // x field + y: string, // y field +} + +type pointWithManyKindsOfComments = { + // Line before x + x: string, // x field + // Line before y + y: string, // y field + // Final row of record +} + +type typeParamPointWithComments<'a> = { + // Line before x + x: 'a, // x field + // Line before y + y: 'a, // y field + // Final row of record +} + +// Now, interleaving comments in type params +// Type name +type typeParamPointWithComments2< + 'a, + // The b type apram + 'b, +> = { + // Line before x + x: 'a, // x field + // Line before y + y: 'a, // y field + // Final row of record +} + +/* The way the last row comment is formatted is suboptimal becuase + * record type definitions do not include enough location information */ +type anotherpoint = { + x: string, // x field + y: string, // y field + // comment as last row of record +} + +type t = (int, int) // End of line on t +type t2 = (int, int) // End of line on (int, int) + +type t3 = (int, int) // End of line on (int, int) + +type variant = + | X(int, int) // End of line on X + | Y(int, int) // End of line on Y +// Comment on entire type def for variant + +// Before let +let res = // Before switch +switch X(2, 3) { +// Above X line +| X(_) => "result of X" // End of arrow and X line +// Above Y line +| Y(_) => "result of Y" // End of arrow and Y line +} // After final semi in switch + +let res = switch X(2, 3) { +| X(0, 0) => // After X arrow + "result of X" // End of X body line +| X(1, 0) /* Before X's arrow */ => "result of X" // End of X body line +| X(_) => // After X _ arrow + "result of X" // End of X body line +// Above Y line +| Y(_) => // Comment above Y body + "result of Y" +} + +type variant2 = + // Comment above X + | X(int, int) // End of line on X + // Comment above Y + | Y(int, int) + +type variant3 = + // Comment above X + | X(int, int) // End of line on X + // Comment above Y + | Y(int, int) // End of line on Y + +type rec x = { + // not attached *above* x + fieldOne: int, + fieldA: int, +} // Attached end of line after x +and y = { + // not attached *above* y + fieldTwo: int, +} // Attached end of line after y + +type rec x2 = { + // not attached *above* x2 + fieldOne: int, + fieldA: int, +} // Attached end of line after x2 +and y2 = { + // not attached *above* y2 + fieldTwo: int, +} + +let result = switch None { +| Some({fieldOne: 20, fieldA: a}) => + // Where does this comment go? + let tmp = 0 + 2 + tmp +| Some({fieldOne: n, fieldA: a}) => + // How about this one + let tmp = n + n + tmp +| None => 20 +} + +let res = // Before switch +switch X(2, 3) { +// Above X line +| X(_) => "result of X" // End of arrow and X line +// Above Y line +| Y(_) => "result of Y" // End of arrow and Y line +} + +/* + * Now these end of line comments *should* be retained. + */ +let result = switch None { +| Some({fieldOne: 20, fieldA: a}) => + let tmp = 0 + 2 + tmp +| Some({fieldOne: n, fieldA: a}) => + let tmp = n + n + tmp +| None => 20 +} + +/* + * These end of line comments *should* be retained. + * To get the simple expression eol comment to be retained, we just need to + * implement label breaking eol behavior much like we did with sequences. + * Otherwise, right now they are not idempotent. + */ +let res = switch X(2, 3) { +// Above X line +| X(_, _) => "result of X" // retain this // retain this + +// Above Y line +| Y(_) => "result of Y" // End of arrow and Y line +} + +/* type optionalTuple = */ +/* | OptTup( */ +/* option( */ +/* ( */ +/* int, */ +/* int */ +/* ), */ +/* ), */ +/* ); */ + +type optionTuple = option<(int, int)> // First int // Second int + +type intPair = (int, int) // First int // Second int + +type intPair2 = ( + // First int + int, + // Second int + int, +) + +let result = @ocaml.doc("") (2 + 3) + +// This is not yet idempotent +// { +// /**/ +// (+) 2 3 +// }; + +let a = () +for i in 0 to 10 { + // bla + a +} + +if true { + () +} + +type color = + | Red(int) // After red end of line + | Black(int) // After black end of line + | Green(int) // After green end of line +// On next line after color type def + +let blahCurriedX = (x, x) => + switch x { + | Red(10) + | Black(20) + | Green(10) => 1 // After or pattern green + | Red(x) => 0 // After red + | Black(x) => 0 // After black + | Green(x) => 0 + } // After second green +// On next line after blahCurriedX def + +let name_equal = (x, y) => x == y + +let equal = (i1, i2) => i1.contents === i2.contents && true // most unlikely first + +let equal = (i1, i2) => compare(compare(0, 0), compare(1, 1)) // END OF LINE HERE + +let tuple_equal = ((i1, i2)) => i1 == i2 + +let tuple_equal = ((csu, mgd)) => + // Some really long comments, see https://github.com/facebook/reason/issues/811 + tuple_equal((csu, mgd)) + +let trueThing = true + +for i in 0 to 1 { + // comment + print_newline() +} + +while trueThing { + // comment + print_newline() +} + +if trueThing { + // comment + print_newline() +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} else { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() +} else { + // Comment before print + print_newline() +} + +// Comment before while test +while trueThing { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} + +// Comment before while test +while trueThing { + // Comment before print + print_newline() +} + +// Comment before for test +for i in 0 to 100 { + // Comment before print + print_newline() + // Comment before print + print_newline() + // Comment after final print +} + +// Comment before for test +for i in 0 to 100 { + // Comment before print + print_newline() +} + +if trueThing { + // Comment before print + print_newline() // eol print + // Comment before print + print_newline() // eol print + // Comment after print +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() // eol print + // Comment before print + print_newline() // eol print + // Comment after print +} else { + // Comment before print + print_newline() // eol print + // Comment before print + print_newline() // eol print + // Comment after print +} + +// Comment before if test +if trueThing { + // Comment before print + print_newline() +} else { + // Comment before print + print_newline() // eol print + // Comment before print + print_newline() // eol print + // Comment after print +} + +// Comment before while test +while trueThing { + // Comment before print + print_newline() // eol + // Comment before print + print_newline() // eol + // Comment after final print +} + +// Comment before while test +while trueThing { + // Comment before print + print_newline() +} + +// Comment before for test +for i in 0 to 100 { + // Comment before print + print_newline() // eol + // Comment before print + print_newline() // eol + // Comment after final print +} + +// Comment before for test +for i in 0 to 100 { + // Comment before print + print_newline() +} + +let f = (a, b, c, d) => a + b + c + d + +while trueThing { + f( + // a + 1, + // b + 2, + // c + 3, + // d + 4, + // does work + ) +} +while trueThing { + f( + // a + 1, + // b + 2, + // c + 3, + // d + 4, // does work + ) +} + +ignore((_really, _long, _printWidth, _exceeded, _here) => { + // First comment + let x = 0 + x + x + // Closing comment +}) + +ignore((_xxx, _yyy) => { + // First comment + let x = 0 + x + x + // Closing comment +}) + +type tester<'a, 'b> = + | TwoArgsConstructor('a, 'b) + | OneTupleArgConstructor(('a, 'b)) +let callFunctionTwoArgs = (a, b) => () +let callFunctionOneTuple = tuple => () + +let y = TwoArgsConstructor(1, 2) //eol1 // eol2 + +let y = callFunctionTwoArgs(1, 2) //eol1 // eol2 + +let y = OneTupleArgConstructor((1, 2)) //eol1 // eol2 + +let y = callFunctionOneTuple((1, 2)) //eol1 // eol2 + +type polyRecord<'a, 'b> = { + fieldOne: 'a, + fieldTwo: 'b, +} + +let r = { + fieldOne: 1, //eol1 + fieldTwo: 2, // eol2 +} + +let r = { + fieldOne: 1, //eol1 + fieldTwo: 2, // eol2 with trailing comma +} + +let y = TwoArgsConstructor("1", "2") //eol1 // eol2 + +let y = callFunctionTwoArgs("1", "2") //eol1 // eol2 + +let y = OneTupleArgConstructor(("1", "2")) //eol1 // eol2 + +let y = callFunctionOneTuple(("1", "2")) //eol1 // eol2 + +let r = { + fieldOne: "1", //eol1 + fieldTwo: "2", // eol2 +} + +let r = { + fieldOne: "1", //eol1 + fieldTwo: "2", // eol2 with trailing comma +} + +let identifier = "hello" + +let y = TwoArgsConstructor(identifier, identifier) //eol1 // eol2 + +let y = callFunctionTwoArgs(identifier, identifier) //eol1 // eol2 + +let y = OneTupleArgConstructor((identifier, identifier)) //eol1 // eol2 + +let y = callFunctionOneTuple((identifier, identifier)) //eol1 // eol2 + +let r = { + fieldOne: identifier, //eol1 + fieldTwo: identifier, // eol2 +} + +let r = { + fieldOne: identifier, //eol1 + fieldTwo: identifier, // eol2 with trailing comma +} + +let y = TwoArgsConstructor((identifier: string), (identifier: string)) //eol1 // eol2 + +let y = callFunctionTwoArgs((identifier: string), (identifier: string)) //eol1 // eol2 + +let y = OneTupleArgConstructor(((identifier: string), (identifier: string))) //eol1 // eol2 + +let y = callFunctionOneTuple(((identifier: string), (identifier: string))) //eol1 // eol2 + +let r = { + fieldOne: (identifier: string), //eol1 + fieldTwo: (identifier: string), // eol2 +} + +let r = { + fieldOne: (identifier: string), //eol1 + fieldTwo: (identifier: string), // eol2 with trailing comma +} + +// whitespace interleaving + +// comment1 +// comment2 + +// whitespace above & below + +let r = { + fieldOne: (identifier: string), //eol1 + // c1 + + // c2 + + // c3 + // c4 + + // c5 + fieldTwo: (identifier: string), // eol2 with trailing comma +} +// trailing + +// trailing whitespace above +// attach + +// last comment diff --git a/res_syntax/tests/conversion/reason/expected/destructiveSubstitutionSubModules.ml.txt b/res_syntax/tests/conversion/reason/expected/destructiveSubstitutionSubModules.ml.txt new file mode 100644 index 0000000000..81c1f50060 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/destructiveSubstitutionSubModules.ml.txt @@ -0,0 +1,60 @@ +module type Id = { + type t + let toString: t => string + let ofString: string => option +} +module type A = { + module Id: Id + type name = string + let name: name +} +module type B = { + module A: A + let fullName: A.Id.t => string +} +module MakeB = (A: A): (B with module A.Id := A.Id) => { + module A = A + let fullName = id => A.name ++ ("-" ++ A.Id.toString(id)) +} +module StringId: Id = { + type t = string + external toString: t => string = "%identity" + external ofString: string => t = "%identity" + let ofString = id => Some(id |> ofString) +} +module A = { + module Id = StringId + type name = string + let name = "A" +} +module B = MakeB(A) +let test = switch "someId" |> StringId.ofString { +| Some(id) => Some(id |> B.fullName) +| None as none => none +} + +module type Printable = { + type t + let print: (Format.formatter, t) => unit +} + +module type Comparable = { + type t + let compare: (t, t) => int +} + +module type PrintableComparable = { + include Printable + include Comparable with type t := t +} + +module type S = Comparable with type t := int + +module type S = { + type u + include Comparable with type t := u +} + +module type ComparableInt = Comparable with type t = int + +module type CompareInt = ComparableInt with type t := int diff --git a/res_syntax/tests/conversion/reason/expected/docComments.ml.txt b/res_syntax/tests/conversion/reason/expected/docComments.ml.txt new file mode 100644 index 0000000000..7b31e47a32 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/docComments.ml.txt @@ -0,0 +1,40 @@ +@@ocaml.text(" The first special comment of the file is the comment associated + to the whole module. ") + +@ocaml.doc(" The comment for function f ") +let f = (x, y) => x + y + +@@ocaml.text(" This comment is not attached to any element since there is another + special comment just before the next element. ") + +/* A simple comment. */ +@ocaml.doc(" Comment for exception My_exception, even with a simple comment + between the special comment and the exception.") +exception My_exception(int => int, int) + +@ocaml.doc(" Comment for type weather ") +type weather = + | @ocaml.doc(" The comment for constructor Rain ") Rain(int) + | @ocaml.doc(" The comment for constructor Sun ") Sun + +@ocaml.doc(" The comment for type my_record ") +type my_record = { + @ocaml.doc(" Comment for field foo ") + foo: int, + @ocaml.doc(" Comment for field bar ") + bar: string, +} + +@ocaml.doc(" The comment for module Foo ") +module Foo = { + @ocaml.doc(" The comment for x ") + @ocaml.doc(" A special comment in the class, but not associated to any element. ") + let x = 0 +} + +@ocaml.doc(" The comment for module type my_module_type. ") +module type my_module_type = { + /* Comment for value x. */ + let x: int + /* ... */ +} diff --git a/res_syntax/tests/conversion/reason/expected/docComments.mli.txt b/res_syntax/tests/conversion/reason/expected/docComments.mli.txt new file mode 100644 index 0000000000..1ca939e048 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/docComments.mli.txt @@ -0,0 +1,69 @@ +@@ocaml.text(" The first special comment of the file is the comment associated + with the whole module.") + +@@ocaml.text(" Special comments can be placed between elements and are kept + by the OCamldoc tool, but are not associated to any element. + @-tags in these comments are ignored.") + +@@ocaml.text(/* ***************************************************************** */ +" Comments like the one above, with more than two asterisks, + are ignored. ") + +@ocaml.doc(" The comment for function f. ") +@ocaml.doc(" The continuation of the comment for function f. ") +let f: (int, int) => int + +/* Hello, I'm a simple comment :-) */ +@ocaml.doc(" Comment for exception My_exception, even with a simple comment + between the special comment and the exception.") +exception My_exception(int => int, int) + +@ocaml.doc(" Comment for type weather ") +type weather = + | @ocaml.doc(" The comment for constructor Rain ") Rain(int) + | @ocaml.doc(" The comment for constructor Sun ") Sun + +@ocaml.doc(" Comment for type weather2 ") +@ocaml.doc(" I can continue the comment for type weather2 here + because there is already a comment associated to the last constructor.") +type weather2 = + | @ocaml.doc(" The comment for constructor Rain ") Rain(int) + | @ocaml.doc(" The comment for constructor Sun ") Sun + +@ocaml.doc(" The comment for type my_record ") +@ocaml.doc(" Continuation of comment for type my_record ") +type my_record = { + @ocaml.doc(" Comment for field foo ") + foo: int, + @ocaml.doc(" Comment for field bar ") + bar: string, +} + +@ocaml.doc(" Comment for foo ") @ocaml.doc(" This comment is associated to foo and not to bar. ") +let foo: string + +@ocaml.doc(" This comment is associated to foo and not to bar. ") +@ocaml.doc(" This comment is associated to bar. ") +let bar: string + +@ocaml.doc(" The comment for module Foo ") +module Foo: { + @ocaml.doc(" The comment for x ") + let x: int + + @@ocaml.text(" A special comment that is kept but not associated to any element ") +} + +@ocaml.doc(" The comment for module type my_module_type. ") +module type my_module_type = { + @ocaml.doc(" The comment for value x. ") + let x: int + + @ocaml.doc(" The comment for module M. ") + module M: { + @ocaml.doc(" The comment for value y. ") + let y: int + + /* ... */ + } +} diff --git a/res_syntax/tests/conversion/reason/expected/docComments.res.txt b/res_syntax/tests/conversion/reason/expected/docComments.res.txt new file mode 100644 index 0000000000..26288308bf --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/docComments.res.txt @@ -0,0 +1,11 @@ +@ocaml.doc(" foo ") +let x = 1 + +@ocaml.doc("") +let x = 1 + +/* ** foo */ +let x = 1 + +/* **** foo */ +let x = 1 diff --git a/res_syntax/tests/conversion/reason/expected/extension.res.txt b/res_syntax/tests/conversion/reason/expected/extension.res.txt new file mode 100644 index 0000000000..0a6ff924a6 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/extension.res.txt @@ -0,0 +1,10 @@ +// here +%%raw(` eval( +__gc, +1, +0 +) + `) + +let x = %raw("10") +let y = %raw("20") diff --git a/res_syntax/tests/conversion/reason/expected/fastPipe.res.txt b/res_syntax/tests/conversion/reason/expected/fastPipe.res.txt new file mode 100644 index 0000000000..a7007b68b5 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/fastPipe.res.txt @@ -0,0 +1,24 @@ +a->f(b, c)->g(d, e) + +Element.querySelectorAll(selector, element) +->NodeList.toArray +->Array.keepMap(Element.ofNode) +->Array.getBy(node => node->Element.textContent === content) + +let x = @attr ((@attr2 a)->f(b)->c(d)) + +5->doStuff(3, _, 7) + +(event->target)["value"] + +(Route.urlToRoute(url)->ChangeView->self).send +Route.urlToRoute(url)->ChangeView->self.send + +let aggregateTotal = (forecast, ~audienceType) => + Js.Nullable.toOption(forecast["audiences"]) + ->Option.flatMap(item => Js.Dict.get(item, audienceType)) + ->Option.map(item => { + pages: item["reach"]["pages"], + views: item["reach"]["views"], + sample: item["reach"]["sample"], + }) diff --git a/res_syntax/tests/conversion/reason/expected/gentype.res.txt b/res_syntax/tests/conversion/reason/expected/gentype.res.txt new file mode 100644 index 0000000000..0ce4deb1cd --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/gentype.res.txt @@ -0,0 +1,28 @@ +module M: { + @genType @after + type t + + @genType @after + let x: int + + @foo + type e = .. +} = { + type t + let x = 34 + type e = .. +} + +module type MT = { + @genType @after + type t + + @genType @after + let x: int + + @foo + type e = .. +} + +@genType("ddd") +let x = 42 diff --git a/res_syntax/tests/conversion/reason/expected/gentype.resi.txt b/res_syntax/tests/conversion/reason/expected/gentype.resi.txt new file mode 100644 index 0000000000..90cc39f79a --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/gentype.resi.txt @@ -0,0 +1,2 @@ +@genType +let x: int diff --git a/res_syntax/tests/conversion/reason/expected/jsObject.res.txt b/res_syntax/tests/conversion/reason/expected/jsObject.res.txt new file mode 100644 index 0000000000..0c591da835 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/jsObject.res.txt @@ -0,0 +1,26 @@ +let component = props["Component"] + +let element = props["element"] + +let y = {"age": 30} +let y = {"age": 30, "name": "steve"} + +type propField<'a> = {.} +type propField<'a> = {..} as 'a +type propField<'a> = {..} as 'a +type propField<'a> = Js.nullable<{..} as 'a> + +type propField<'a> = {"a": b} +type propField<'a> = {.."a": b} +type propField<'a> = {"a": {"b": c}} + +user["address"] +user["address"]["street"] +user["address"]["street"]["log"] + +user["address"] = "Avenue 1" +user["address"]["street"] = "Avenue" +user["address"]["street"]["number"] = "1" + +school["print"](direction["name"], studentHead["name"]) +city["getSchool"]()["print"](direction["name"], studentHead["name"]) diff --git a/res_syntax/tests/conversion/reason/expected/jsObject.resi.txt b/res_syntax/tests/conversion/reason/expected/jsObject.resi.txt new file mode 100644 index 0000000000..f9ca82b776 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/jsObject.resi.txt @@ -0,0 +1,8 @@ +type propField<'a> = {.} +type propField<'a> = {..} as 'a +type propField<'a> = {..} as 'a +type propField<'a> = Js.nullable<{..} as 'a> + +type propField<'a> = {"a": b} +type propField<'a> = {.."a": b} +type propField<'a> = {"a": {"b": c}} diff --git a/res_syntax/tests/conversion/reason/expected/jsxProps.res.txt b/res_syntax/tests/conversion/reason/expected/jsxProps.res.txt new file mode 100644 index 0000000000..d368bc9529 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/jsxProps.res.txt @@ -0,0 +1,17 @@ +let handleClick = (href, event) => + if !ReactEvent.Mouse.defaultPrevented(event) { + ReactEvent.Mouse.preventDefault(event) + ReasonReact.Router.push(href) + } + +@react.component +let make = (~href, ~className="", ~children) => + handleClick(href, event)}> children + + ...{x =>
} + +
...element
+
...{a => 1}
+
...
+
...[a, b]
+
...{(1, 2)}
diff --git a/res_syntax/tests/conversion/reason/expected/letBinding.res.txt b/res_syntax/tests/conversion/reason/expected/letBinding.res.txt new file mode 100644 index 0000000000..525c159127 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/letBinding.res.txt @@ -0,0 +1,14 @@ +let deltaMode: t => Webapi__Dom__Types.deltaMode = self => + Webapi__Dom__Types.decodeDeltaMode(deltaMode(self)) + +let fromJs: ResourceIo.campaignWeeklyPlanning => t = weeklyPlanning => ( + weeklyPlanning["monday"]->dayFromJs, + weeklyPlanning["tuesday"]->dayFromJs, + weeklyPlanning["wednesday"]->dayFromJs, + weeklyPlanning["thursday"]->dayFromJs, + weeklyPlanning["friday"]->dayFromJs, + weeklyPlanning["saturday"]->dayFromJs, + weeklyPlanning["sunday"]->dayFromJs, +) + +let newChapter: Video.chapter = {startTime: percent *. duration} diff --git a/res_syntax/tests/conversion/reason/expected/letprivate.res.txt b/res_syntax/tests/conversion/reason/expected/letprivate.res.txt new file mode 100644 index 0000000000..d8b86ef1af --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/letprivate.res.txt @@ -0,0 +1 @@ +%%private(let x = 34) diff --git a/res_syntax/tests/conversion/reason/expected/modType.res.txt b/res_syntax/tests/conversion/reason/expected/modType.res.txt new file mode 100644 index 0000000000..260265ccf4 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/modType.res.txt @@ -0,0 +1,15 @@ +module Same = (Na: N, Nb: N): ((S with type number1 = Na.number) with type number2 = Nb.number) => { + type number1 = Na.number + type number2 = Nb.number + let rec sim = ((n, m)) => + if Na.is_zero(n) { + Nb.is_zero(m) + } else { + sim((Na.pred(n), Nb.pred(m))) + } + let similar = ((n, m)) => + try sim((n, m)) catch { + | Na.Too_small => false + | Nb.Too_small => false + } +} diff --git a/res_syntax/tests/conversion/reason/expected/moduleLanguage.res.txt b/res_syntax/tests/conversion/reason/expected/moduleLanguage.res.txt new file mode 100644 index 0000000000..724dfc68fd --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/moduleLanguage.res.txt @@ -0,0 +1 @@ +let someFunctorAsFunction = (x: module(MT)): module(ResT) => module(SomeFunctor(unpack(x))) diff --git a/res_syntax/tests/conversion/reason/expected/namedArgs.res.txt b/res_syntax/tests/conversion/reason/expected/namedArgs.res.txt new file mode 100644 index 0000000000..eb0d820201 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/namedArgs.res.txt @@ -0,0 +1,63 @@ +let wizard = Wizard.make( + ~spriteSheet=wizard, + ~hp=999999999999999, + ~mp=50, + //~coordinates={x: 0., y:0. z: 0.}, + ~coordinates={x: 40, y: 100., z: 0.}, + // /* c0 */ ~gpuCoordinates= /* c1 */ gpuBuffer[10] /* c2 */, // trailing +) + +apply( + // above + ~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, + // below + ~bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, + // here + ~cccccccccccccccccccccccccccccccc, +) + +applyOptional( + // above + ~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa?, + // below + ~bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb?, + // here + ~cccccccccccccccccccccccccccccccc?, +) + +foo( + // c0 + ~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: int, + // c1 + ~bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb: int, + // c2 + ~cccccccccccccccccccccccccccccc: int, +) + +let f = ( + ~isItemActive=?, + // array((name, href)) + ~headers: array<(string, string)>, + ~moduleName: string, + // foo + ~x, + // above + /* c0 */ ~d: /* c1 */ e, // end + ~from as // does it work + hometown, +) => { + let a = 1 + let b = 2 + a + b +} + +@react.component +let make = ( + ~theme: ColorTheme.t, + ~components: Mdx.Components.t, + ~sidebarState: (bool, (bool => bool) => unit), + // (Sidebar, toggleSidebar) ... for toggling sidebar in mobile view + ~sidebar: React.element, + ~breadcrumbs: option>=?, + ~children, +) => () diff --git a/res_syntax/tests/conversion/reason/expected/object.ml.txt b/res_syntax/tests/conversion/reason/expected/object.ml.txt new file mode 100644 index 0000000000..3afee67da6 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/object.ml.txt @@ -0,0 +1,4 @@ +type hi = {"z": int} +type u<'a> = {.. ...hi, "x": int, "y": int} as 'a +type u1<'a> = {.. ...hi} as 'a +type u2<'a> = {.. ...hi, ...hi, "y": int, ...hi} as 'a diff --git a/res_syntax/tests/conversion/reason/expected/openPattern.res.txt b/res_syntax/tests/conversion/reason/expected/openPattern.res.txt new file mode 100644 index 0000000000..28558275a7 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/openPattern.res.txt @@ -0,0 +1,23 @@ +let {T.a: a} = a() +let [Color.Blue] = a() +let list{Color.Blue} = a() +let (Color.Blue, Red) = a() + +let Color.Blue = blue + +module Color = { + type t = Red | Blue | Green + let red = Red + let blue = Blue + let green = Green +} + +let () = switch (Color.red, Color.blue, Color.green) { +| (Color.Red, Blue, Green) => Js.log("hello world") +| _ => () +} + +let () = switch [Color.red, Color.blue, Color.green] { +| [Color.Red, Blue, Green] => Js.log("hello world") +| _ => () +} diff --git a/res_syntax/tests/conversion/reason/expected/ppx.res.txt b/res_syntax/tests/conversion/reason/expected/ppx.res.txt new file mode 100644 index 0000000000..3f0dda10e0 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/ppx.res.txt @@ -0,0 +1,55 @@ +%graphql( + ` + query Site { + site { + siteMetadata { + title + description + siteUrl + } + } + } +` + {taggedTemplate: false} +) + +module Form = %form( + type input = { + name: string, + email: string, + message: string, + @as("form-name") + formName: string, + } + type output = input + let validators = { + name: { + strategy: OnFirstBlur, + validate: ({name, _}) => + switch name { + | "" => Error("Name is required.") + | name => Ok(name) + }, + }, + email: { + strategy: OnFirstBlur, + validate: ({email, _}) => + switch email { + | "" => Error("Email is required.") + | email => Ok(email) + }, + }, + message: { + strategy: OnFirstBlur, + validate: ({message, _}) => + switch message { + | "" => Error("Message is required.") + | message => Ok(message) + }, + }, + formName: { + strategy: OnSubmit, + validate: ({formName, _}) => Ok(formName), + }, + } +) diff --git a/res_syntax/tests/conversion/reason/expected/recursiveType.ml.txt b/res_syntax/tests/conversion/reason/expected/recursiveType.ml.txt new file mode 100644 index 0000000000..b77962fb8e --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/recursiveType.ml.txt @@ -0,0 +1,3 @@ +type rec tree = {"label": string, "left": option, "right": option} + +type t = t diff --git a/res_syntax/tests/conversion/reason/expected/refSugar.ml.txt b/res_syntax/tests/conversion/reason/expected/refSugar.ml.txt new file mode 100644 index 0000000000..96255e4a4c --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/refSugar.ml.txt @@ -0,0 +1 @@ +let x = foo.contents diff --git a/res_syntax/tests/conversion/reason/expected/refSugarReason.res.txt b/res_syntax/tests/conversion/reason/expected/refSugarReason.res.txt new file mode 100644 index 0000000000..96255e4a4c --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/refSugarReason.res.txt @@ -0,0 +1 @@ +let x = foo.contents diff --git a/res_syntax/tests/conversion/reason/expected/singleLineComments.res.txt b/res_syntax/tests/conversion/reason/expected/singleLineComments.res.txt new file mode 100644 index 0000000000..c5a75c2970 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/singleLineComments.res.txt @@ -0,0 +1,18 @@ +// This is the implementation of the _app.js file + +// Resources: +// -------------- +// Really good article on state persistence within layouts: +// https://adamwathan.me/2019/10/17/persistent-layout-patterns-in-nextjs/ + +/* + a + + */ + +/* + a + +*/ +let x = 1 +// here diff --git a/res_syntax/tests/conversion/reason/expected/string.res.txt b/res_syntax/tests/conversion/reason/expected/string.res.txt new file mode 100644 index 0000000000..a85d0bbc5b --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/string.res.txt @@ -0,0 +1,36 @@ +%%raw("define(x.y, 'userAgent', {value: 'USER_AGENT_STRING'})") + +%%raw("define(x.y, 'userAgent', {value: 'USER_AGENT_STRING'})") + +let x = `This is a long string with a slash and line break \\ +carriage return` + +let x = "\"" +let y = "\n" + +(<> {"\n"->React.string} ) + +// The `//` should not result into an extra comment +let x = j`https://www.apple.com` +let x = `https://www.apple.com` +let x = `https://www.apple.com` +let x = `https://www.apple.com` +let x = sql`https://www.apple.com` + +// /* */ should not result in an extra comments +let x = j`/* https://www.apple.com */` +let x = `/* https://www.apple.com*/` +let x = `/*https://www.apple.com*/` +let x = `/*https://www.apple.com*/` +let x = sql`/*https://www.apple.com*/` + +let x = `\`https://\${appleWebsite}\`` + +let var1 = "three" +let var2 = "a string" + +switch (var1, var2) { +| (`3`, `a string`) => Js.log("worked") +| (` test with \` \${here} \``, _) => Js.log("escapes ` and ${") +| _ => Js.log("didn't match") +} diff --git a/res_syntax/tests/conversion/reason/expected/ternary.res.txt b/res_syntax/tests/conversion/reason/expected/ternary.res.txt new file mode 100644 index 0000000000..3c90de0851 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/ternary.res.txt @@ -0,0 +1 @@ +let a = x ? 1 : 2 diff --git a/res_syntax/tests/conversion/reason/expected/uncurrried.res.txt b/res_syntax/tests/conversion/reason/expected/uncurrried.res.txt new file mode 100644 index 0000000000..48283a4128 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/uncurrried.res.txt @@ -0,0 +1,64 @@ +// ok +let updateBriefletNarrative = (. updateObj) => Js.log("patented merge algorithm goes here") + +// this is a bug in Reason, the . will be parsed wrong and disappear. +/* updateBriefletNarrative(. briefletNarrativeUpdateObj); */ + +// this is a bug in Reason, the . will be parsed wrong and disappear. +/* foo(. 3); */ + +module D = { + // this is a bug in Reason, the . will be parsed wrong and disappear. + /* foo(. 3); */ +} + +// ok +let x = foo(. 3) + +let x = { + let a = 3 + // ok + foo(. a) +} + +let x = { + // ok + let f = (. a, b) => apply(. a + b) + let a = 3 + // ok + foo(. a) + // ok + f(. 2, 2) +} + +// ok +let () = switch something(. x, y) { +| None => + // ok + log(. a, b) +| Some(_) => + let a = 1 + // ok + log(. a, 2) +} + +let () = { + // ok + let dontDoThisAhome = (. a, b) => (. c, d) => (. e, f) => a + b + c + d + e + f + // ok + dontDoThisAhome(. a, b)(. c, d)(. e, f) +} + +let _ = library.getBalance(. account)->Promise.Js.catch(_ => Promise.resolved(None)) + +let _ = + library.getBalance(. account) + ->Promise.Js.catch(_ => Promise.resolved(None)) + ->Promise.get(newBalance => + dispatch( + LoadAddress( + account, + newBalance->Belt.Option.flatMap(balance => Eth.make(balance.toString(.))), + ), + ) + ) diff --git a/res_syntax/tests/conversion/reason/expected/underscoreSugar.res.txt b/res_syntax/tests/conversion/reason/expected/underscoreSugar.res.txt new file mode 100644 index 0000000000..9330f5c835 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/underscoreSugar.res.txt @@ -0,0 +1 @@ +let photo = pricedRoom["room"]["photos"] |> filterNone |> Array.get(_, 0) diff --git a/res_syntax/tests/conversion/reason/expected/unicode.res.txt b/res_syntax/tests/conversion/reason/expected/unicode.res.txt new file mode 100644 index 0000000000..332b911389 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/unicode.res.txt @@ -0,0 +1,5 @@ +let x = "✅ foo bar" + +let x = "\n okokok" + +let z = "\t \b \n okok 🙈" diff --git a/res_syntax/tests/conversion/reason/expected/variant.res.txt b/res_syntax/tests/conversion/reason/expected/variant.res.txt new file mode 100644 index 0000000000..2864a7d676 --- /dev/null +++ b/res_syntax/tests/conversion/reason/expected/variant.res.txt @@ -0,0 +1 @@ +type t = | @foo X diff --git a/res_syntax/tests/conversion/reason/extension.res b/res_syntax/tests/conversion/reason/extension.res new file mode 100644 index 0000000000..0a6ff924a6 --- /dev/null +++ b/res_syntax/tests/conversion/reason/extension.res @@ -0,0 +1,10 @@ +// here +%%raw(` eval( +__gc, +1, +0 +) + `) + +let x = %raw("10") +let y = %raw("20") diff --git a/res_syntax/tests/conversion/reason/fastPipe.res b/res_syntax/tests/conversion/reason/fastPipe.res new file mode 100644 index 0000000000..c758fccd15 --- /dev/null +++ b/res_syntax/tests/conversion/reason/fastPipe.res @@ -0,0 +1,24 @@ +a->f(b, c)->g(d, e) + +Element.querySelectorAll(selector, element) +->NodeList.toArray +->Array.keepMap(Element.ofNode) +->Array.getBy(node => node->Element.textContent === content) + +let x = @attr (@attr2 a->f(b)->c(d)) + +5->doStuff(3, _, 7) + +(event->target)["value"] + +(Route.urlToRoute(url)->ChangeView->self).send +Route.urlToRoute(url)->ChangeView->self.send + +let aggregateTotal = (forecast, ~audienceType) => + Js.Nullable.toOption(forecast["audiences"]) + ->Option.flatMap(item => Js.Dict.get(item, audienceType)) + ->Option.map(item => { + pages: item["reach"]["pages"], + views: item["reach"]["views"], + sample: item["reach"]["sample"], + }) diff --git a/res_syntax/tests/conversion/reason/gentype.res b/res_syntax/tests/conversion/reason/gentype.res new file mode 100644 index 0000000000..0ce4deb1cd --- /dev/null +++ b/res_syntax/tests/conversion/reason/gentype.res @@ -0,0 +1,28 @@ +module M: { + @genType @after + type t + + @genType @after + let x: int + + @foo + type e = .. +} = { + type t + let x = 34 + type e = .. +} + +module type MT = { + @genType @after + type t + + @genType @after + let x: int + + @foo + type e = .. +} + +@genType("ddd") +let x = 42 diff --git a/res_syntax/tests/conversion/reason/gentype.resi b/res_syntax/tests/conversion/reason/gentype.resi new file mode 100644 index 0000000000..90cc39f79a --- /dev/null +++ b/res_syntax/tests/conversion/reason/gentype.resi @@ -0,0 +1,2 @@ +@genType +let x: int diff --git a/res_syntax/tests/conversion/reason/jsObject.res b/res_syntax/tests/conversion/reason/jsObject.res new file mode 100644 index 0000000000..0c591da835 --- /dev/null +++ b/res_syntax/tests/conversion/reason/jsObject.res @@ -0,0 +1,26 @@ +let component = props["Component"] + +let element = props["element"] + +let y = {"age": 30} +let y = {"age": 30, "name": "steve"} + +type propField<'a> = {.} +type propField<'a> = {..} as 'a +type propField<'a> = {..} as 'a +type propField<'a> = Js.nullable<{..} as 'a> + +type propField<'a> = {"a": b} +type propField<'a> = {.."a": b} +type propField<'a> = {"a": {"b": c}} + +user["address"] +user["address"]["street"] +user["address"]["street"]["log"] + +user["address"] = "Avenue 1" +user["address"]["street"] = "Avenue" +user["address"]["street"]["number"] = "1" + +school["print"](direction["name"], studentHead["name"]) +city["getSchool"]()["print"](direction["name"], studentHead["name"]) diff --git a/res_syntax/tests/conversion/reason/jsObject.resi b/res_syntax/tests/conversion/reason/jsObject.resi new file mode 100644 index 0000000000..f9ca82b776 --- /dev/null +++ b/res_syntax/tests/conversion/reason/jsObject.resi @@ -0,0 +1,8 @@ +type propField<'a> = {.} +type propField<'a> = {..} as 'a +type propField<'a> = {..} as 'a +type propField<'a> = Js.nullable<{..} as 'a> + +type propField<'a> = {"a": b} +type propField<'a> = {.."a": b} +type propField<'a> = {"a": {"b": c}} diff --git a/res_syntax/tests/conversion/reason/jsxProps.res b/res_syntax/tests/conversion/reason/jsxProps.res new file mode 100644 index 0000000000..d368bc9529 --- /dev/null +++ b/res_syntax/tests/conversion/reason/jsxProps.res @@ -0,0 +1,17 @@ +let handleClick = (href, event) => + if !ReactEvent.Mouse.defaultPrevented(event) { + ReactEvent.Mouse.preventDefault(event) + ReasonReact.Router.push(href) + } + +@react.component +let make = (~href, ~className="", ~children) => + handleClick(href, event)}> children + + ...{x =>
} + +
...element
+
...{a => 1}
+
...
+
...[a, b]
+
...{(1, 2)}
diff --git a/res_syntax/tests/conversion/reason/letBinding.res b/res_syntax/tests/conversion/reason/letBinding.res new file mode 100644 index 0000000000..525c159127 --- /dev/null +++ b/res_syntax/tests/conversion/reason/letBinding.res @@ -0,0 +1,14 @@ +let deltaMode: t => Webapi__Dom__Types.deltaMode = self => + Webapi__Dom__Types.decodeDeltaMode(deltaMode(self)) + +let fromJs: ResourceIo.campaignWeeklyPlanning => t = weeklyPlanning => ( + weeklyPlanning["monday"]->dayFromJs, + weeklyPlanning["tuesday"]->dayFromJs, + weeklyPlanning["wednesday"]->dayFromJs, + weeklyPlanning["thursday"]->dayFromJs, + weeklyPlanning["friday"]->dayFromJs, + weeklyPlanning["saturday"]->dayFromJs, + weeklyPlanning["sunday"]->dayFromJs, +) + +let newChapter: Video.chapter = {startTime: percent *. duration} diff --git a/res_syntax/tests/conversion/reason/letprivate.res b/res_syntax/tests/conversion/reason/letprivate.res new file mode 100644 index 0000000000..d8b86ef1af --- /dev/null +++ b/res_syntax/tests/conversion/reason/letprivate.res @@ -0,0 +1 @@ +%%private(let x = 34) diff --git a/res_syntax/tests/conversion/reason/modType.res b/res_syntax/tests/conversion/reason/modType.res new file mode 100644 index 0000000000..260265ccf4 --- /dev/null +++ b/res_syntax/tests/conversion/reason/modType.res @@ -0,0 +1,15 @@ +module Same = (Na: N, Nb: N): ((S with type number1 = Na.number) with type number2 = Nb.number) => { + type number1 = Na.number + type number2 = Nb.number + let rec sim = ((n, m)) => + if Na.is_zero(n) { + Nb.is_zero(m) + } else { + sim((Na.pred(n), Nb.pred(m))) + } + let similar = ((n, m)) => + try sim((n, m)) catch { + | Na.Too_small => false + | Nb.Too_small => false + } +} diff --git a/res_syntax/tests/conversion/reason/moduleLanguage.res b/res_syntax/tests/conversion/reason/moduleLanguage.res new file mode 100644 index 0000000000..724dfc68fd --- /dev/null +++ b/res_syntax/tests/conversion/reason/moduleLanguage.res @@ -0,0 +1 @@ +let someFunctorAsFunction = (x: module(MT)): module(ResT) => module(SomeFunctor(unpack(x))) diff --git a/res_syntax/tests/conversion/reason/namedArgs.res b/res_syntax/tests/conversion/reason/namedArgs.res new file mode 100644 index 0000000000..eb0d820201 --- /dev/null +++ b/res_syntax/tests/conversion/reason/namedArgs.res @@ -0,0 +1,63 @@ +let wizard = Wizard.make( + ~spriteSheet=wizard, + ~hp=999999999999999, + ~mp=50, + //~coordinates={x: 0., y:0. z: 0.}, + ~coordinates={x: 40, y: 100., z: 0.}, + // /* c0 */ ~gpuCoordinates= /* c1 */ gpuBuffer[10] /* c2 */, // trailing +) + +apply( + // above + ~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, + // below + ~bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, + // here + ~cccccccccccccccccccccccccccccccc, +) + +applyOptional( + // above + ~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa?, + // below + ~bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb?, + // here + ~cccccccccccccccccccccccccccccccc?, +) + +foo( + // c0 + ~aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: int, + // c1 + ~bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb: int, + // c2 + ~cccccccccccccccccccccccccccccc: int, +) + +let f = ( + ~isItemActive=?, + // array((name, href)) + ~headers: array<(string, string)>, + ~moduleName: string, + // foo + ~x, + // above + /* c0 */ ~d: /* c1 */ e, // end + ~from as // does it work + hometown, +) => { + let a = 1 + let b = 2 + a + b +} + +@react.component +let make = ( + ~theme: ColorTheme.t, + ~components: Mdx.Components.t, + ~sidebarState: (bool, (bool => bool) => unit), + // (Sidebar, toggleSidebar) ... for toggling sidebar in mobile view + ~sidebar: React.element, + ~breadcrumbs: option>=?, + ~children, +) => () diff --git a/res_syntax/tests/conversion/reason/object.ml b/res_syntax/tests/conversion/reason/object.ml new file mode 100644 index 0000000000..62099596f6 --- /dev/null +++ b/res_syntax/tests/conversion/reason/object.ml @@ -0,0 +1,4 @@ +type hi = < z : int > +type 'a u = < hi ; x : int ; y : int; .. > as 'a +type 'a u1 = < hi; .. > as 'a +type 'a u2 = < hi ; hi; y : int ; hi; .. > as 'a diff --git a/res_syntax/tests/conversion/reason/openPattern.res b/res_syntax/tests/conversion/reason/openPattern.res new file mode 100644 index 0000000000..28558275a7 --- /dev/null +++ b/res_syntax/tests/conversion/reason/openPattern.res @@ -0,0 +1,23 @@ +let {T.a: a} = a() +let [Color.Blue] = a() +let list{Color.Blue} = a() +let (Color.Blue, Red) = a() + +let Color.Blue = blue + +module Color = { + type t = Red | Blue | Green + let red = Red + let blue = Blue + let green = Green +} + +let () = switch (Color.red, Color.blue, Color.green) { +| (Color.Red, Blue, Green) => Js.log("hello world") +| _ => () +} + +let () = switch [Color.red, Color.blue, Color.green] { +| [Color.Red, Blue, Green] => Js.log("hello world") +| _ => () +} diff --git a/res_syntax/tests/conversion/reason/ppx.res b/res_syntax/tests/conversion/reason/ppx.res new file mode 100644 index 0000000000..3f0dda10e0 --- /dev/null +++ b/res_syntax/tests/conversion/reason/ppx.res @@ -0,0 +1,55 @@ +%graphql( + ` + query Site { + site { + siteMetadata { + title + description + siteUrl + } + } + } +` + {taggedTemplate: false} +) + +module Form = %form( + type input = { + name: string, + email: string, + message: string, + @as("form-name") + formName: string, + } + type output = input + let validators = { + name: { + strategy: OnFirstBlur, + validate: ({name, _}) => + switch name { + | "" => Error("Name is required.") + | name => Ok(name) + }, + }, + email: { + strategy: OnFirstBlur, + validate: ({email, _}) => + switch email { + | "" => Error("Email is required.") + | email => Ok(email) + }, + }, + message: { + strategy: OnFirstBlur, + validate: ({message, _}) => + switch message { + | "" => Error("Message is required.") + | message => Ok(message) + }, + }, + formName: { + strategy: OnSubmit, + validate: ({formName, _}) => Ok(formName), + }, + } +) diff --git a/res_syntax/tests/conversion/reason/recursiveType.ml b/res_syntax/tests/conversion/reason/recursiveType.ml new file mode 100644 index 0000000000..6a378eea08 --- /dev/null +++ b/res_syntax/tests/conversion/reason/recursiveType.ml @@ -0,0 +1,4 @@ +type tree = + < label: string ;left: tree option ;right: tree option > Js.t + +type nonrec t = t diff --git a/res_syntax/tests/conversion/reason/refSugar.ml b/res_syntax/tests/conversion/reason/refSugar.ml new file mode 100644 index 0000000000..6aa9cea6bd --- /dev/null +++ b/res_syntax/tests/conversion/reason/refSugar.ml @@ -0,0 +1 @@ +let x = !foo diff --git a/res_syntax/tests/conversion/reason/refSugarReason.res b/res_syntax/tests/conversion/reason/refSugarReason.res new file mode 100644 index 0000000000..96255e4a4c --- /dev/null +++ b/res_syntax/tests/conversion/reason/refSugarReason.res @@ -0,0 +1 @@ +let x = foo.contents diff --git a/res_syntax/tests/conversion/reason/singleLineComments.res b/res_syntax/tests/conversion/reason/singleLineComments.res new file mode 100644 index 0000000000..c5a75c2970 --- /dev/null +++ b/res_syntax/tests/conversion/reason/singleLineComments.res @@ -0,0 +1,18 @@ +// This is the implementation of the _app.js file + +// Resources: +// -------------- +// Really good article on state persistence within layouts: +// https://adamwathan.me/2019/10/17/persistent-layout-patterns-in-nextjs/ + +/* + a + + */ + +/* + a + +*/ +let x = 1 +// here diff --git a/res_syntax/tests/conversion/reason/string.res b/res_syntax/tests/conversion/reason/string.res new file mode 100644 index 0000000000..a85d0bbc5b --- /dev/null +++ b/res_syntax/tests/conversion/reason/string.res @@ -0,0 +1,36 @@ +%%raw("define(x.y, 'userAgent', {value: 'USER_AGENT_STRING'})") + +%%raw("define(x.y, 'userAgent', {value: 'USER_AGENT_STRING'})") + +let x = `This is a long string with a slash and line break \\ +carriage return` + +let x = "\"" +let y = "\n" + +(<> {"\n"->React.string} ) + +// The `//` should not result into an extra comment +let x = j`https://www.apple.com` +let x = `https://www.apple.com` +let x = `https://www.apple.com` +let x = `https://www.apple.com` +let x = sql`https://www.apple.com` + +// /* */ should not result in an extra comments +let x = j`/* https://www.apple.com */` +let x = `/* https://www.apple.com*/` +let x = `/*https://www.apple.com*/` +let x = `/*https://www.apple.com*/` +let x = sql`/*https://www.apple.com*/` + +let x = `\`https://\${appleWebsite}\`` + +let var1 = "three" +let var2 = "a string" + +switch (var1, var2) { +| (`3`, `a string`) => Js.log("worked") +| (` test with \` \${here} \``, _) => Js.log("escapes ` and ${") +| _ => Js.log("didn't match") +} diff --git a/res_syntax/tests/conversion/reason/ternary.res b/res_syntax/tests/conversion/reason/ternary.res new file mode 100644 index 0000000000..3c90de0851 --- /dev/null +++ b/res_syntax/tests/conversion/reason/ternary.res @@ -0,0 +1 @@ +let a = x ? 1 : 2 diff --git a/res_syntax/tests/conversion/reason/uncurrried.res b/res_syntax/tests/conversion/reason/uncurrried.res new file mode 100644 index 0000000000..b273f54a70 --- /dev/null +++ b/res_syntax/tests/conversion/reason/uncurrried.res @@ -0,0 +1,64 @@ +// ok +let updateBriefletNarrative = (. updateObj) => Js.log("patented merge algorithm goes here") + +// this is a bug in Reason, the . will be parsed wrong and disappear. +/* updateBriefletNarrative(. briefletNarrativeUpdateObj); */ + +// this is a bug in Reason, the . will be parsed wrong and disappear. +/* foo(. 3); */ + +module D = { + // this is a bug in Reason, the . will be parsed wrong and disappear. + /* foo(. 3); */ +} + +// ok +let x = foo(. 3) + +let x = { + let a = 3 + // ok + foo(. a) +} + +let x = { + // ok + let f = (. a, b) => apply(. a + b) + let a = 3 + // ok + foo(. a) + // ok + f(. 2, 2) +} + +// ok +let () = switch something(. x, y) { +| None => + // ok + log(. a, b) +| Some(_) => + let a = 1 + // ok + log(. a, 2) +} + +let () = { + // ok + let dontDoThisAhome = (. a, b, . c, d, . e, f) => a + b + c + d + e + f + // ok + dontDoThisAhome(. a, b)(. c, d)(. e, f) +} + +let _ = library.getBalance(. account)->Promise.Js.catch(_ => Promise.resolved(None)) + +let _ = + library.getBalance(. account) + ->Promise.Js.catch(_ => Promise.resolved(None)) + ->Promise.get(newBalance => + dispatch( + LoadAddress( + account, + newBalance->Belt.Option.flatMap(balance => Eth.make(balance.toString(.))), + ), + ) + ) diff --git a/res_syntax/tests/conversion/reason/underscoreSugar.res b/res_syntax/tests/conversion/reason/underscoreSugar.res new file mode 100644 index 0000000000..9330f5c835 --- /dev/null +++ b/res_syntax/tests/conversion/reason/underscoreSugar.res @@ -0,0 +1 @@ +let photo = pricedRoom["room"]["photos"] |> filterNone |> Array.get(_, 0) diff --git a/res_syntax/tests/conversion/reason/unicode.res b/res_syntax/tests/conversion/reason/unicode.res new file mode 100644 index 0000000000..332b911389 --- /dev/null +++ b/res_syntax/tests/conversion/reason/unicode.res @@ -0,0 +1,5 @@ +let x = "✅ foo bar" + +let x = "\n okokok" + +let z = "\t \b \n okok 🙈" diff --git a/res_syntax/tests/conversion/reason/variant.res b/res_syntax/tests/conversion/reason/variant.res new file mode 100644 index 0000000000..2864a7d676 --- /dev/null +++ b/res_syntax/tests/conversion/reason/variant.res @@ -0,0 +1 @@ +type t = | @foo X diff --git a/res_syntax/tests/idempotency/bs-css/Css.res b/res_syntax/tests/idempotency/bs-css/Css.res new file mode 100644 index 0000000000..492b63b0e1 --- /dev/null +++ b/res_syntax/tests/idempotency/bs-css/Css.res @@ -0,0 +1,16 @@ +include Css_Legacy_Core +include Css_Colors + +include Css_Legacy_Core.Make({ + exception NotImplemented + + let make = (. _) => raise(NotImplemented) + let mergeStyles = (. _) => raise(NotImplemented) + let injectRule = (. _) => () + let injectRaw = (. _) => () + let makeKeyFrames = (. _) => raise(NotImplemented) +}) + +external unsafeJsonToStyles: Js.Json.t => ReactDOMRe.Style.t = "%identity" + +let style = rules => rules->toJson->unsafeJsonToStyles diff --git a/res_syntax/tests/idempotency/bs-css/CssEmotion.res b/res_syntax/tests/idempotency/bs-css/CssEmotion.res new file mode 100644 index 0000000000..97b2eed304 --- /dev/null +++ b/res_syntax/tests/idempotency/bs-css/CssEmotion.res @@ -0,0 +1,35 @@ +include Css_Legacy_Core +include Css_Colors + +include Css_Legacy_Core.Make({ + @module("emotion") + external mergeStyles: (. array) => string = "cx" + + @module("emotion") external make: (. Js.Json.t) => string = "css" + + @module("emotion") + external injectRule: (. Js.Json.t) => unit = "injectGlobal" + + @module("emotion") + external injectRaw: (. string) => unit = "injectGlobal" + + @module("emotion") + external makeKeyFrames: (. Js.Dict.t) => string = "keyframes" +}) + +type cache + +@module("emotion") external cache: cache = "cache" + +let fontFace = (~fontFamily, ~src, ~fontStyle=?, ~fontWeight=?, ~fontDisplay=?, ()) => { + let asString = Css_Legacy_Core.fontFace( + ~fontFamily, + ~src, + ~fontStyle?, + ~fontWeight?, + ~fontDisplay?, + (), + ) + insertRule(asString) + fontFamily +} diff --git a/res_syntax/tests/idempotency/bs-css/CssEmotionJs.res b/res_syntax/tests/idempotency/bs-css/CssEmotionJs.res new file mode 100644 index 0000000000..8e711f25f5 --- /dev/null +++ b/res_syntax/tests/idempotency/bs-css/CssEmotionJs.res @@ -0,0 +1,29 @@ +include Css_Js_Core +include Css_Colors + +include Css_Js_Core.Make({ + @module("emotion") + external mergeStyles: (. array) => string = "cx" + + @module("emotion") external make: (. Js.Json.t) => string = "css" + + @module("emotion") + external injectRule: (. Js.Json.t) => unit = "injectGlobal" + + @module("emotion") + external injectRaw: (. string) => unit = "injectGlobal" + + @module("emotion") + external makeKeyFrames: (. Js.Dict.t) => string = "keyframes" +}) + +type cache + +@module("emotion") external cache: cache = "cache" + +let fontFace = (~fontFamily, ~src, ~fontStyle=?, ~fontWeight=?, ~fontDisplay=?, ()) => { + insertRule(. + Css_Js_Core.fontFace(~fontFamily, ~src, ~fontStyle?, ~fontWeight?, ~fontDisplay?, ()), + ) + fontFamily +} diff --git a/res_syntax/tests/idempotency/bs-css/CssJs.res b/res_syntax/tests/idempotency/bs-css/CssJs.res new file mode 100644 index 0000000000..ad8caf19cd --- /dev/null +++ b/res_syntax/tests/idempotency/bs-css/CssJs.res @@ -0,0 +1,16 @@ +include Css_Js_Core +include Css_Colors + +include Css_Js_Core.Make({ + exception NotImplemented + + let make = (. _) => raise(NotImplemented) + let mergeStyles = (. _) => raise(NotImplemented) + let injectRule = (. _) => () + let injectRaw = (. _) => () + let makeKeyFrames = (. _) => raise(NotImplemented) +}) + +external unsafeJsonToStyles: Js.Json.t => ReactDOMRe.Style.t = "%identity" + +let style = (. rules) => rules->toJson->unsafeJsonToStyles diff --git a/res_syntax/tests/idempotency/bs-css/Css_AtomicTypes.res b/res_syntax/tests/idempotency/bs-css/Css_AtomicTypes.res new file mode 100644 index 0000000000..18d29352bf --- /dev/null +++ b/res_syntax/tests/idempotency/bs-css/Css_AtomicTypes.res @@ -0,0 +1,1823 @@ +let join = (strings, separator) => { + let rec run = (strings, acc) => + switch strings { + | list{} => acc + | list{x} => acc ++ x + | list{x, ...xs} => run(xs, acc ++ (x ++ separator)) + } + run(strings, "") +} + +module Cascading = { + type t = [#initial | #inherit_ | #unset] + + let initial = #initial + let inherit_ = #inherit_ + let unset = #unset + + let toString = x => + switch x { + | #initial => "initial" + | #inherit_ => "inherit" + | #unset => "unset" + } +} + +module Var = { + type t = [#var(string) | #varDefault(string, string)] + + let var = x => #var(x) + let varDefault = (x, default) => #varDefault(x, default) + + let prefix = x => Js.String.startsWith("--", x) ? x : "--" ++ x + + let toString = x => + switch x { + | #var(x) => "var(" ++ (prefix(x) ++ ")") + | #varDefault(x, v) => "var(" ++ (prefix(x) ++ ("," ++ (v ++ ")"))) + } +} + +module Time = { + type t = [#s(float) | #ms(float)] + + let s = x => #s(x) + let ms = x => #ms(x) + + let toString = x => + switch x { + | #s(v) => Js.Float.toString(v) ++ "s" + | #ms(v) => Js.Float.toString(v) ++ "ms" + } +} + +module Percentage = { + type t = [#percent(float)] + + let pct = x => #percent(x) + + let toString = x => + switch x { + | #percent(x) => Js.Float.toString(x) ++ "%" + } +} + +module Url = { + type t = [#url(string)] + + let toString = x => + switch x { + | #url(s) => "url(" ++ (s ++ ")") + } +} + +module Length = { + type rec t = [ + | #ch(float) + | #em(float) + | #ex(float) + | #rem(float) + | #vh(float) + | #vw(float) + | #vmin(float) + | #vmax(float) + | #px(int) + | #pxFloat(float) + | #cm(float) + | #mm(float) + | #inch(float) + | #pc(float) + | #pt(int) + | #zero + | #calc([#add | #sub], t, t) + | #percent(float) + ] + + let ch = x => #ch(x) + let em = x => #em(x) + let ex = x => #ex(x) + let rem = x => #rem(x) + let vh = x => #vh(x) + let vw = x => #vw(x) + let vmin = x => #vmin(x) + let vmax = x => #vmax(x) + let px = x => #px(x) + let pxFloat = x => #pxFloat(x) + let cm = x => #cm(x) + let mm = x => #mm(x) + let inch = x => #inch(x) + let pc = x => #pc(x) + let pt = x => #pt(x) + let zero = #zero + + let rec toString = x => + switch x { + | #ch(x) => Js.Float.toString(x) ++ "ch" + | #em(x) => Js.Float.toString(x) ++ "em" + | #ex(x) => Js.Float.toString(x) ++ "ex" + | #rem(x) => Js.Float.toString(x) ++ "rem" + | #vh(x) => Js.Float.toString(x) ++ "vh" + | #vw(x) => Js.Float.toString(x) ++ "vw" + | #vmin(x) => Js.Float.toString(x) ++ "vmin" + | #vmax(x) => Js.Float.toString(x) ++ "vmax" + | #px(x) => Js.Int.toString(x) ++ "px" + | #pxFloat(x) => Js.Float.toString(x) ++ "px" + | #cm(x) => Js.Float.toString(x) ++ "cm" + | #mm(x) => Js.Float.toString(x) ++ "mm" + | #inch(x) => Js.Float.toString(x) ++ "in" + | #pc(x) => Js.Float.toString(x) ++ "pc" + | #pt(x) => Js.Int.toString(x) ++ "pt" + | #zero => "0" + + | #calc(#add, a, b) => "calc(" ++ (toString(a) ++ (" + " ++ (toString(b) ++ ")"))) + | #calc(#sub, a, b) => "calc(" ++ (toString(a) ++ (" - " ++ (toString(b) ++ ")"))) + | #percent(x) => Js.Float.toString(x) ++ "%" + } +} + +module Angle = { + type t = [#deg(float) | #rad(float) | #grad(float) | #turn(float)] + + let deg = (x: float) => #deg(x) + let rad = (x: float) => #rad(x) + let grad = (x: float) => #grad(x) + let turn = (x: float) => #turn(x) + + let toString = x => + switch x { + | #deg(x) => Js.Float.toString(x) ++ "deg" + | #rad(x) => Js.Float.toString(x) ++ "rad" + | #grad(x) => Js.Float.toString(x) ++ "grad" + | #turn(x) => Js.Float.toString(x) ++ "turn" + } +} + +module Direction = { + type t = [#ltr | #rtl] + + let ltr = #ltr + let rtl = #rtl + + let toString = x => + switch x { + | #ltr => "ltr" + | #rtl => "rtl" + } +} + +module Position = { + type t = [#absolute | #relative | #static | #fixed | #sticky] + + let absolute = #absolute + let relative = #relative + let static = #static + let fixed = #fixed + let sticky = #sticky + + let toString = x => + switch x { + | #absolute => "absolute" + | #relative => "relative" + | #static => "static" + | #fixed => "fixed" + | #sticky => "sticky" + } +} + +module Resize = { + type t = [#none | #both | #horizontal | #vertical | #block | #inline] + + let none = #none + let both = #both + let horizontal = #horizontal + let vertical = #vertical + let block = #block + let inline = #inline + + let toString = x => + switch x { + | #none => "none" + | #both => "both" + | #horizontal => "horizontal" + | #vertical => "vertical" + | #block => "block" + | #inline => "inline" + } +} + +module FontVariant = { + type t = [#normal | #smallCaps] + + let normal = #normal + let smallCaps = #smallCaps + + let toString = x => + switch x { + | #normal => "normal" + | #smallCaps => "smallCaps" + } +} + +module FontStyle = { + type t = [#normal | #italic | #oblique] + + let normal = #normal + let italic = #italic + let oblique = #oblique + + let toString = x => + switch x { + | #normal => "normal" + | #italic => "italic" + | #oblique => "oblique" + } +} + +module FlexBasis = { + type t = [ + | #auto + | #fill + | #content + | #maxContent + | #minContent + | #fitContent + ] + + let fill = #fill + let content = #content + let maxContent = #maxContent + let minContent = #minContent + let fitContent = #fitContent + + let toString = x => + switch x { + | #auto => "auto" + | #fill => "fill" + | #content => "content" + | #maxContent => "max-content" + | #minContent => "min-content" + | #fitContent => "fit-content" + } +} + +module Overflow = { + type t = [#hidden | #visible | #scroll | #auto] + + let hidden = #hidden + let visible = #visible + let scroll = #scroll + let auto = #auto + + let toString = x => + switch x { + | #hidden => "hidden" + | #visible => "visible" + | #scroll => "scroll" + | #auto => "auto" + } +} + +module Margin = { + type t = [#auto] + + let auto = #auto + + let toString = x => + switch x { + | #auto => "auto" + } +} + +module GridAutoFlow = { + type t = [#column | #row | #columnDense | #rowDense] + + let toString = x => + switch x { + | #column => "column" + | #row => "row" + | #columnDense => "column dense" + | #rowDense => "row dense" + } +} + +module ColumnGap = { + type t = [#normal] + + let toString = x => + switch x { + | #normal => "normal" + } +} + +module VerticalAlign = { + type t = [ + | #baseline + | #sub + | #super + | #top + | #textTop + | #middle + | #bottom + | #textBottom + ] + + let toString = x => + switch x { + | #baseline => "baseline" + | #sub => "sub" + | #super => "super" + | #top => "top" + | #textTop => "text-top" + | #middle => "middle" + | #bottom => "bottom" + | #textBottom => "text-bottom" + } +} + +module TimingFunction = { + type t = [ + | #linear + | #ease + | #easeIn + | #easeOut + | #easeInOut + | #stepStart + | #stepEnd + | #steps(int, [#start | #end_]) + | #cubicBezier(float, float, float, float) + ] + + let linear = #linear + let ease = #ease + let easeIn = #easeIn + let easeInOut = #easeInOut + let easeOut = #easeOut + let stepStart = #stepStart + let stepEnd = #stepEnd + let steps = (i, dir) => #steps(i, dir) + let cubicBezier = (a, b, c, d) => #cubicBezier(a, b, c, d) + + let toString = x => + switch x { + | #linear => "linear" + | #ease => "ease" + | #easeIn => "ease-in" + | #easeOut => "ease-out" + | #easeInOut => "ease-in-out" + | #stepStart => "step-start" + | #stepEnd => "step-end" + | #steps(i, #start) => "steps(" ++ (Js.Int.toString(i) ++ ", start)") + | #steps(i, #end_) => "steps(" ++ (Js.Int.toString(i) ++ ", end)") + | #cubicBezier(a, b, c, d) => + "cubic-bezier(" ++ + (Js.Float.toString(a) ++ + (", " ++ + (Js.Float.toString(b) ++ + (", " ++ (Js.Float.toString(c) ++ (", " ++ (Js.Float.toString(d) ++ ")"))))))) + } +} + +module RepeatValue = { + type t = [#autoFill | #autoFit | #num(int)] + + let toString = x => + switch x { + | #autoFill => "auto-fill" + | #autoFit => "auto-fit" + | #num(x) => Js.Int.toString(x) + } +} + +module ListStyleType = { + type t = [ + | #disc + | #circle + | #square + | #decimal + | #lowerAlpha + | #upperAlpha + | #lowerGreek + | #lowerLatin + | #upperLatin + | #lowerRoman + | #upperRoman + | #none + ] + + let toString = x => + switch x { + | #disc => "disc" + | #circle => "circle" + | #square => "square" + | #decimal => "decimal" + | #lowerAlpha => "lower-alpha" + | #upperAlpha => "upper-alpha" + | #lowerGreek => "lower-greek" + | #lowerLatin => "lower-latin" + | #upperLatin => "upper-latin" + | #lowerRoman => "lower-roman" + | #upperRoman => "upper-roman" + | #none => "none" + } +} + +module ListStylePosition = { + type t = [#inside | #outside] + + let toString = x => + switch x { + | #inside => "inside" + | #outside => "outside" + } +} + +module OutlineStyle = { + type t = [ + | #none + | #hidden + | #dotted + | #dashed + | #solid + | #double + | #groove + | #ridge + | #inset + | #outset + ] + + let toString = x => + switch x { + | #none => "none" + | #hidden => "hidden" + | #dotted => "dotted" + | #dashed => "dashed" + | #solid => "solid" + | #double => "double" + | #groove => "grove" + | #ridge => "ridge" + | #inset => "inset" + | #outset => "outset" + } +} + +module FontWeight = { + type t = [ + | #num(int) + | #thin + | #extraLight + | #light + | #normal + | #medium + | #semiBold + | #bold + | #extraBold + | #black + | #lighter + | #bolder + ] + + let thin = #thin + let extraLight = #extraLight + let light = #light + let medium = #medium + let semiBold = #semiBold + let bold = #bold + let extraBold = #extraBold + let lighter = #lighter + let bolder = #bolder + + let toString = x => + switch x { + | #num(n) => Js.Int.toString(n) + | #thin => "100" + | #extraLight => "200" + | #light => "300" + | #normal => "400" + | #medium => "500" + | #semiBold => "600" + | #bold => "700" + | #extraBold => "800" + | #black => "900" + | #lighter => "lighter" + | #bolder => "bolder" + } +} + +module Transform = { + type t = [ + | #translate(Length.t, Length.t) + | #translate3d(Length.t, Length.t, Length.t) + | #translateX(Length.t) + | #translateY(Length.t) + | #translateZ(Length.t) + | #scale(float, float) + | #scale3d(float, float, float) + | #scaleX(float) + | #scaleY(float) + | #scaleZ(float) + | #rotate(Angle.t) + | #rotate3d(float, float, float, Angle.t) + | #rotateX(Angle.t) + | #rotateY(Angle.t) + | #rotateZ(Angle.t) + | #skew(Angle.t, Angle.t) + | #skewX(Angle.t) + | #skewY(Angle.t) + | #perspective(int) + ] + + let translate = (x, y) => #translate(x, y) + let translate3d = (x, y, z) => #translate3d(x, y, z) + let translateX = x => #translateX(x) + let translateY = y => #translateY(y) + let translateZ = z => #translateZ(z) + let scale = (x, y) => #scale(x, y) + let scale3d = (x, y, z) => #scale3d(x, y, z) + let scaleX = x => #scaleX(x) + let scaleY = x => #scaleY(x) + let scaleZ = x => #scaleZ(x) + let rotate = a => #rotate(a) + let rotate3d = (x, y, z, a) => #rotate3d(x, y, z, a) + let rotateX = a => #rotateX(a) + let rotateY = a => #rotateY(a) + let rotateZ = a => #rotateZ(a) + let skew = (a, a') => #skew(a, a') + let skewX = a => #skewX(a) + let skewY = a => #skewY(a) + + let string_of_scale = (x, y) => + "scale(" ++ (Js.Float.toString(x) ++ (", " ++ (Js.Float.toString(y) ++ ")"))) + + let string_of_translate3d = (x, y, z) => + "translate3d(" ++ + (Length.toString(x) ++ + (", " ++ (Length.toString(y) ++ (", " ++ (Length.toString(z) ++ ")"))))) + + let toString = x => + switch x { + | #translate(x, y) => + "translate(" ++ (Length.toString(x) ++ (", " ++ (Length.toString(y) ++ ")"))) + | #translate3d(x, y, z) => string_of_translate3d(x, y, z) + | #translateX(x) => "translateX(" ++ (Length.toString(x) ++ ")") + | #translateY(y) => "translateY(" ++ (Length.toString(y) ++ ")") + | #translateZ(z) => "translateZ(" ++ (Length.toString(z) ++ ")") + | #scale(x, y) => string_of_scale(x, y) + | #scale3d(x, y, z) => + "scale3d(" ++ + (Js.Float.toString(x) ++ + (", " ++ (Js.Float.toString(y) ++ (", " ++ (Js.Float.toString(z) ++ ")"))))) + | #scaleX(x) => "scaleX(" ++ (Js.Float.toString(x) ++ ")") + | #scaleY(y) => "scaleY(" ++ (Js.Float.toString(y) ++ ")") + | #scaleZ(z) => "scaleZ(" ++ (Js.Float.toString(z) ++ ")") + | #rotate(a) => "rotate(" ++ (Angle.toString(a) ++ ")") + | #rotate3d(x, y, z, a) => + "rotate3d(" ++ + (Js.Float.toString(x) ++ + (", " ++ + (Js.Float.toString(y) ++ + (", " ++ (Js.Float.toString(z) ++ (", " ++ (Angle.toString(a) ++ ")"))))))) + | #rotateX(a) => "rotateX(" ++ (Angle.toString(a) ++ ")") + | #rotateY(a) => "rotateY(" ++ (Angle.toString(a) ++ ")") + | #rotateZ(a) => "rotateZ(" ++ (Angle.toString(a) ++ ")") + | #skew(x, y) => "skew(" ++ (Angle.toString(x) ++ (", " ++ (Angle.toString(y) ++ ")"))) + | #skewX(a) => "skewX(" ++ (Angle.toString(a) ++ ")") + | #skewY(a) => "skewY(" ++ (Angle.toString(a) ++ ")") + | #perspective(x) => "perspective(" ++ (Js.Int.toString(x) ++ ")") + } +} + +module AnimationDirection = { + type t = [#normal | #reverse | #alternate | #alternateReverse] + + let toString = x => + switch x { + | #normal => "normal" + | #reverse => "reverse" + | #alternate => "alternate" + | #alternateReverse => "alternate-reverse" + } +} + +module AnimationFillMode = { + type t = [#none | #forwards | #backwards | #both] + + let toString = x => + switch x { + | #none => "none" + | #forwards => "forwards" + | #backwards => "backwards" + | #both => "both" + } +} + +module AnimationIterationCount = { + type t = [#infinite | #count(int)] + + let toString = x => + switch x { + | #infinite => "infinite" + | #count(x) => Js.Int.toString(x) + } +} + +module AnimationPlayState = { + type t = [#paused | #running] + + let toString = x => + switch x { + | #paused => "paused" + | #running => "running" + } +} + +module Cursor = { + type t = [ + | #auto + | #default + | #none + | #contextMenu + | #help + | #pointer + | #progress + | #wait + | #cell + | #crosshair + | #text + | #verticalText + | #alias + | #copy + | #move + | #noDrop + | #notAllowed + | #grab + | #grabbing + | #allScroll + | #colResize + | #rowResize + | #nResize + | #eResize + | #sResize + | #wResize + | #neResize + | #nwResize + | #seResize + | #swResize + | #ewResize + | #nsResize + | #neswResize + | #nwseResize + | #zoomIn + | #zoomOut + ] + + let auto = #auto + let default = #default + let none = #none + let contextMenu = #contextMenu + let help = #help + let pointer = #pointer + let progress = #progress + let wait = #wait + let cell = #cell + let crosshair = #crosshair + let text = #text + let verticalText = #verticalText + let alias = #alias + let copy = #copy + let move = #move + let noDrop = #noDrop + let notAllowed = #notAllowed + let grab = #grab + let grabbing = #grabbing + let allScroll = #allScroll + let colResize = #colResize + let rowResize = #rowResize + let nResize = #nResize + let eResize = #eResize + let sResize = #sResize + let wResize = #wResize + let neResize = #neResize + let nwResize = #nwResize + let seResize = #seResize + let swResize = #swResize + let ewResize = #ewResize + let nsResize = #nsResize + let neswResize = #neswResize + let nwseResize = #nwseResize + let zoomIn = #zoomIn + let zoomOut = #zoomOut + + let toString = x => + switch x { + | #auto => "auto" + | #default => "default" + | #none => "none" + | #contextMenu => "context-menu" + | #help => "help" + | #pointer => "pointer" + | #progress => "progress" + | #wait => "wait" + | #cell => "cell" + | #crosshair => "crosshair" + | #text => "text" + | #verticalText => "vertical-text" + | #alias => "alias" + | #copy => "copy" + | #move => "move" + | #noDrop => "no-drop" + | #notAllowed => "not-allowed" + | #grab => "grab" + | #grabbing => "grabbing" + | #allScroll => "all-scroll" + | #colResize => "col-resize" + | #rowResize => "row-resize" + | #nResize => "n-resize" + | #eResize => "e-resize" + | #sResize => "s-resize" + | #wResize => "w-resize" + | #neResize => "ne-resize" + | #nwResize => "nw-resize" + | #seResize => "se-resize" + | #swResize => "sw-resize" + | #ewResize => "ew-resize" + | #nsResize => "ns-resize" + | #neswResize => "nesw-resize" + | #nwseResize => "nwse-resize" + | #zoomIn => "zoom-in" + | #zoomOut => "zoom-out" + } +} + +module Color = { + type t = [ + | #rgb(int, int, int) + | #rgba(int, int, int, [#num(float) | Percentage.t]) + | #hsl(Angle.t, Percentage.t, Percentage.t) + | #hsla(Angle.t, Percentage.t, Percentage.t, [#num(float) | Percentage.t]) + | #hex(string) + | #transparent + | #currentColor + ] + + let rgb = (r, g, b) => #rgb(r, g, b) + let rgba = (r, g, b, a) => #rgba(r, g, b, a) + let hsl = (h, s, l) => #hsl(h, s, l) + let hsla = (h, s, l, a) => #hsla(h, s, l, a) + let hex = x => #hex(x) + let transparent = #transparent + let currentColor = #currentColor + + let string_of_alpha = x => + switch x { + | #num(f) => Js.Float.toString(f) + | #...Percentage.t as pc => Percentage.toString(pc) + } + + let toString = x => + switch x { + | #rgb(r, g, b) => + "rgb(" ++ + (Js.Int.toString(r) ++ + (", " ++ (Js.Int.toString(g) ++ (", " ++ (Js.Int.toString(b) ++ ")"))))) + | #rgba(r, g, b, a) => + "rgba(" ++ + (Js.Int.toString(r) ++ + (", " ++ + (Js.Int.toString(g) ++ + (", " ++ (Js.Int.toString(b) ++ (", " ++ (string_of_alpha(a) ++ ")"))))))) + | #hsl(h, s, l) => + "hsl(" ++ + (Angle.toString(h) ++ + (", " ++ (Percentage.toString(s) ++ (", " ++ (Percentage.toString(l) ++ ")"))))) + | #hsla(h, s, l, a) => + "hsla(" ++ + (Angle.toString(h) ++ + (", " ++ + (Percentage.toString(s) ++ + (", " ++ (Percentage.toString(l) ++ (", " ++ (string_of_alpha(a) ++ ")"))))))) + | #hex(s) => "#" ++ s + | #transparent => "transparent" + | #currentColor => "currentColor" + } +} + +module BorderStyle = { + type t = [ + | #none + | #hidden + | #dotted + | #dashed + | #solid + | #double + | #groove + | #ridge + | #inset + | #outset + ] + + let toString = x => + switch x { + | #none => "none" + | #hidden => "hidden" + | #dotted => "dotted" + | #dashed => "dashed" + | #solid => "solid" + | #double => "double" + | #groove => "groove" + | #ridge => "ridge" + | #inset => "inset" + | #outset => "outset" + } +} + +module PointerEvents = { + type t = [#auto | #none] + + let toString = x => + switch x { + | #auto => "auto" + | #none => "none" + } +} + +module Perspective = { + type t = [#none] + + let toString = x => + switch x { + | #none => "none" + } +} + +module LetterSpacing = { + type t = [#normal] + + let normal = #normal + + let toString = x => + switch x { + | #normal => "normal" + } +} + +module LineHeight = { + type t = [#normal | #abs(float)] + + let toString = x => + switch x { + | #normal => "normal" + | #abs(x) => Js.Float.toString(x) + } +} + +module WordSpacing = { + type t = [#normal] + + let toString = x => + switch x { + | #normal => "normal" + } +} + +module DisplayOutside = { + type t = [#block | #inline | #runIn] + + let toString = x => + switch x { + | #block => "block" + | #inline => "inline" + | #runIn => "run-in" + } +} + +module DisplayInside = { + type t = [#table | #flex | #grid] + + let toString = x => + switch x { + | #table => "table" + | #flex => "flex" + | #grid => "grid" + } +} + +module DisplayListItem = { + type t = [#listItem] + + let toString = x => + switch x { + | #listItem => "list-item" + } +} + +module DisplayInternal = { + type t = [ + | #tableRowGroup + | #tableHeaderGroup + | #tableFooterGroup + | #tableRow + | #tableCell + | #tableColumnGroup + | #tableColumn + | #tableCaption + ] + + let toString = x => + switch x { + | #tableRowGroup => "table-row-group" + | #tableHeaderGroup => "table-header-group" + | #tableFooterGroup => "table-footer-group" + | #tableRow => "table-row" + | #tableCell => "table-cell" + | #tableColumnGroup => "table-column-group" + | #tableColumn => "table-column" + | #tableCaption => "table-caption" + } +} + +module DisplayBox = { + type t = [#contents | #none] + + let toString = x => + switch x { + | #contents => "contents" + | #none => "none" + } +} + +module DisplayLegacy = { + type t = [#inlineBlock | #inlineFlex | #inlineGrid | #inlineTable] + + let toString = x => + switch x { + | #inlineBlock => "inline-block" + | #inlineFlex => "inline-flex" + | #inlineGrid => "inline-grid" + | #inlineTable => "inline-table" + } +} + +module JustifySelf = { + type t = [#auto | #normal | #stretch] + + let toString = x => + switch x { + | #auto => "auto" + | #normal => "normal" + | #stretch => "stretch" + } +} + +module PositionalAlignment = { + type t = [ + | #center + | #start + | #end_ + | #flexStart + | #flexEnd + | #selfStart + | #selfEnd + | #left + | #right + ] + + let toString = x => + switch x { + | #center => "center" + | #start => "start" + | #end_ => "end" + | #flexStart => "flex-start" + | #flexEnd => "flex-end" + | #selfStart => "self-start" + | #selfEnd => "self-end" + | #left => "left" + | #right => "right" + } +} + +module OverflowAlignment = { + type t = [ + | #safe(PositionalAlignment.t) + | #unsafe(PositionalAlignment.t) + ] + + let toString = x => + switch x { + | #safe(pa) => "safe " ++ PositionalAlignment.toString(pa) + | #unsafe(pa) => "unsafe " ++ PositionalAlignment.toString(pa) + } +} + +module BaselineAlignment = { + type t = [#baseline | #firstBaseline | #lastBaseline] + + let toString = x => + switch x { + | #baseline => "baseline" + | #firstBaseline => "first baseline" + | #lastBaseline => "last baseline" + } +} + +module NormalAlignment = { + type t = [#normal] + + let toString = x => + switch x { + | #normal => "normal" + } +} + +module DistributedAlignment = { + type t = [#spaceBetween | #spaceAround | #spaceEvenly | #stretch] + + let toString = x => + switch x { + | #spaceBetween => "space-between" + | #spaceAround => "space-around" + | #spaceEvenly => "space-evenly" + | #stretch => "stretch" + } +} + +module LegacyAlignment = { + type t = [#legacy | #legacyRight | #legacyLeft | #legacyCenter] + + let toString = x => + switch x { + | #legacy => "legacy" + | #legacyRight => "legacy right" + | #legacyLeft => "legacy left" + | #legacyCenter => "legacy center" + } +} + +module TextAlign = { + type t = [#left | #right | #center | #justify] + + let toString = x => + switch x { + | #left => "left" + | #right => "right" + | #center => "center" + | #justify => "justify" + } +} + +module WordBreak = { + type t = [#normal | #breakAll | #keepAll] + + let toString = x => + switch x { + | #normal => "normal" + | #breakAll => "break-all" + | #keepAll => "keep-all" + } +} + +module WhiteSpace = { + type t = [#normal | #nowrap | #pre | #preLine | #preWrap | #breakSpaces] + + let toString = x => + switch x { + | #normal => "normal" + | #nowrap => "nowrap" + | #pre => "pre" + | #preLine => "pre-line" + | #preWrap => "pre-wrap" + | #breakSpaces => "break-spaces" + } +} + +module AlignItems = { + type t = [#normal | #stretch] + + let toString = x => + switch x { + | #normal => "normal" + | #stretch => "stretch" + } +} + +module AlignSelf = { + type t = [#auto | #normal | #stretch] + + let toString = x => + switch x { + | #auto => "auto" + | #normal => "normal" + | #stretch => "stretch" + } +} + +module AlignContent = { + type t = [#center | #start | #end_ | #flexStart | #flexEnd] + + let toString = x => + switch x { + | #center => "center" + | #start => "start" + | #end_ => "end" + | #flexStart => "flex-start" + | #flexEnd => "flex-end" + } +} + +module ObjectFit = { + type t = [#fill | #contain | #cover | #none | #scaleDown] + + let toString = x => + switch x { + | #fill => "fill" + | #contain => "contain" + | #cover => "cover" + | #none => "none" + | #scaleDown => "scale-down" + } +} + +module Clear = { + type t = [#none | #left | #right | #both | #inlineStart | #inlineEnd] + + let toString = x => + switch x { + | #none => "none" + | #left => "left" + | #right => "right" + | #both => "both" + | #inlineStart => "inline-start" + | #inlineEnd => "inline-end" + } +} + +module Float = { + type t = [#left | #right | #none | #inlineStart | #inlineEnd] + + let toString = x => + switch x { + | #left => "left" + | #right => "right" + | #none => "none" + | #inlineStart => "inline-start" + | #inlineEnd => "inline-end" + } +} + +module Visibility = { + type t = [#visible | #hidden | #collapse] + + let toString = x => + switch x { + | #visible => "visible" + | #hidden => "hidden" + | #collapse => "collapse" + } +} + +module TableLayout = { + type t = [#auto | #fixed] + + let toString = x => + switch x { + | #auto => "auto" + | #fixed => "fixed" + } +} + +module BorderCollapse = { + type t = [#collapse | #separate] + + let toString = x => + switch x { + | #collapse => "collapse" + | #separate => "separate" + } +} + +module FlexWrap = { + type t = [#nowrap | #wrap | #wrapReverse] + + let toString = x => + switch x { + | #nowrap => "nowrap" + | #wrap => "wrap" + | #wrapReverse => "wrap-reverse" + } +} + +module FlexDirection = { + type t = [#row | #rowReverse | #column | #columnReverse] + + let toString = x => + switch x { + | #row => "row" + | #rowReverse => "row-reverse" + | #column => "column" + | #columnReverse => "column-reverse" + } +} + +module BoxSizing = { + type t = [#contentBox | #borderBox] + + let toString = x => + switch x { + | #contentBox => "content-box" + | #borderBox => "border-box" + } +} + +module ColumnCount = { + type t = [#auto | #count(int)] + + let toString = x => + switch x { + | #auto => "auto" + | #count(v) => Js.Int.toString(v) + } +} + +module UserSelect = { + type t = [#none | #auto | #text | #contain | #all] + + let toString = x => + switch x { + | #none => "none" + | #auto => "auto" + | #text => "text" + | #contain => "contain" + | #all => "all" + } +} + +module TextTransform = { + type t = [#none | #capitalize | #uppercase | #lowercase] + + let toString = x => + switch x { + | #none => "none" + | #capitalize => "capitalize" + | #uppercase => "uppercase" + | #lowercase => "lowercase" + } +} + +module GridTemplateAreas = { + type t = [#none | #areas(list)] + + let areas = x => #areas(x) + + let toString = x => + switch x { + | #none => "none" + | #areas(l) => + String.trim(List.fold_left((carry, elem) => carry ++ ("'" ++ (elem ++ "' ")), "", l)) + } +} + +module GridArea = { + type t = [ + | #auto + | #ident(string) + | #num(int) + | #numIdent(int, string) + | #span([#num(int) | #ident(string)]) + ] + + let auto = #auto + let ident = x => #ident(x) + let num = x => #num(x) + let numIdent = (x, y) => #numIdent(x, y) + let span = x => #span(x) + + let toString = t => + switch t { + | #auto => "auto" + | #ident(s) => s + | #num(i) => string_of_int(i) + | #numIdent(i, s) => string_of_int(i) ++ (" " ++ s) + | #span(e) => + "span " ++ + switch e { + | #num(i) => string_of_int(i) + | #ident(s) => s + } + } +} + +module BackdropFilter = { + type t = [ + | #blur(Length.t) + | #brightness([#num(int) | #percent(float)]) + | #contrast([#num(int) | #percent(float)]) + | #dropShadow([#num(int) | #percent(float)]) + | #grayscale([#num(int) | #percent(float)]) + | #hueRotate([Angle.t | #zero]) + | #invert([#num(int) | #percent(float)]) + | #none + | #opacity([#num(int) | #percent(float)]) + | #saturate([#num(int) | #percent(float)]) + | #sepia([#num(int) | #percent(float)]) + ] + + let string_of_percent = p => Js.Float.toString(p) ++ "%" + + let toString = x => + switch x { + | #blur(#...Length.t as b) => "blur(" ++ (Length.toString(b) ++ ")") + | #brightness(#num(b)) => "brightness(" ++ (string_of_int(b) ++ ")") + | #brightness(#percent(b)) => "brightness(" ++ (string_of_percent(b) ++ ")") + | #contrast(#num(c)) => "contrast(" ++ (string_of_int(c) ++ ")") + | #contrast(#percent(c)) => "contrast(" ++ (string_of_percent(c) ++ ")") + | #dropShadow(#num(i)) => "drop-shadow(" ++ (string_of_int(i) ++ ")") + | #dropShadow(#percent(i)) => "drop-shadow(" ++ (string_of_percent(i) ++ ")") + | #grayscale(#num(i)) => "grayscale(" ++ (string_of_int(i) ++ ")") + | #grayscale(#percent(i)) => "grayscale(" ++ (string_of_percent(i) ++ ")") + | #hueRotate(#...Angle.t as h) => "hue-rotate(" ++ (Angle.toString(h) ++ ")") + | #hueRotate(#zero) => "hue-rotate(0deg)" + | #invert(#num(i)) => "invert(" ++ (string_of_int(i) ++ ")") + | #invert(#percent(i)) => "invert(" ++ (string_of_percent(i) ++ ")") + | #none => "none" + | #opacity(#num(i)) => "opacity(" ++ (string_of_int(i) ++ ")") + | #opacity(#percent(i)) => "opacity(" ++ (string_of_percent(i) ++ ")") + | #saturate(#num(i)) => "saturate(" ++ (string_of_int(i) ++ ")") + | #saturate(#percent(i)) => "saturate(" ++ (string_of_percent(i) ++ ")") + | #sepia(#num(i)) => "sepia(" ++ (string_of_int(i) ++ ")") + | #sepia(#percent(i)) => "sepia(" ++ (string_of_percent(i) ++ ")") + } +} + +module BackgroundAttachment = { + type t = [#scroll | #fixed | #local] + + let toString = x => + switch x { + | #scroll => "scroll" + | #fixed => "fixed" + | #local => "local" + } +} + +module BackgroundClip = { + type t = [#borderBox | #paddingBox | #contentBox] + + let toString = x => + switch x { + | #borderBox => "border-box" + | #contentBox => "content-box" + | #paddingBox => "padding-box" + } +} + +module BackgroundOrigin = { + type t = [#borderBox | #paddingBox | #contentBox] + + let toString = x => + switch x { + | #borderBox => "border-box" + | #contentBox => "content-box" + | #paddingBox => "padding-box" + } +} + +module BackgroundPosition = { + module X = { + type t = [#left | #right | #center] + + let toString = x => + switch x { + | #left => "left" + | #right => "right" + | #center => "center" + } + } + + module Y = { + type t = [#top | #bottom | #center] + + let toString = x => + switch x { + | #top => "top" + | #bottom => "bottom" + | #center => "center" + } + } + + type t = [X.t | Y.t] + + let toString = x => + switch x { + | #left => "left" + | #right => "right" + | #top => "top" + | #bottom => "bottom" + | #center => "center" + } +} + +module BackgroundRepeat = { + type twoValue = [#repeat | #space | #round | #noRepeat] + type t = [#repeatX | #repeatY | twoValue] + type horizontal = twoValue + type vertical = twoValue + + let toString = x => + switch x { + | #repeatX => "repeat-x" + | #repeatY => "repeat-y" + | #repeat => "repeat" + | #space => "space" + | #round => "round" + | #noRepeat => "no-repeat" + } +} + +module TextOverflow = { + type t = [#clip | #ellipsis | #string(string)] + + let toString = x => + switch x { + | #clip => "clip" + | #ellipsis => "ellipsis" + | #string(s) => s + } +} + +module TextDecorationLine = { + type t = [#none | #underline | #overline | #lineThrough | #blink] + + let toString = x => + switch x { + | #none => "none" + | #underline => "underline" + | #overline => "overline" + | #lineThrough => "line-through" + | #blink => "blink" + } +} + +module TextDecorationStyle = { + type t = [#solid | #double | #dotted | #dashed | #wavy] + + let toString = x => + switch x { + | #solid => "solid" + | #double => "double" + | #dotted => "dotted" + | #dashed => "dashed" + | #wavy => "wavy" + } +} + +module Width = { + type t = [#auto | #fitContent] + + let toString = x => + switch x { + | #auto => "auto" + | #fitContent => "fit-content" + } +} + +module MaxWidth = { + type t = [#none] + + let toString = x => + switch x { + | #none => "none" + } +} + +module Height = { + type t = [#auto] + + let toString = x => + switch x { + | #auto => "auto" + } +} + +module MaxHeight = { + type t = [#none] + + let toString = x => + switch x { + | #none => "none" + } +} + +module OverflowWrap = { + type t = [#normal | #breakWord | #anywhere] + + let toString = x => + switch x { + | #normal => "normal" + | #breakWord => "break-word" + | #anywhere => "anywhere" + } +} + +module Gradient = { + type t<'colorOrVar> = [ + | #linearGradient(Angle.t, list<(Length.t, [< Color.t | Var.t] as 'colorOrVar)>) + | #repeatingLinearGradient(Angle.t, list<(Length.t, [< Color.t | Var.t] as 'colorOrVar)>) + | #radialGradient(list<(Length.t, [< Color.t | Var.t] as 'colorOrVar)>) + | #repeatingRadialGradient(list<(Length.t, [< Color.t | Var.t] as 'colorOrVar)>) + ] + + let linearGradient = (angle, stops) => #linearGradient(angle, stops) + let repeatingLinearGradient = (angle, stops) => #repeatingLinearGradient(angle, stops) + let radialGradient = stops => #radialGradient(stops) + let repeatingRadialGradient = stops => #repeatingRadialGradient(stops) + + let string_of_color = x => + switch x { + | #...Color.t as co => Color.toString(co) + | #...Var.t as va => Var.toString(va) + } + let string_of_stops = stops => + stops->Belt.List.map(((l, c)) => string_of_color(c) ++ (" " ++ Length.toString(l)))->join(", ") + + let toString = x => + switch x { + | #linearGradient(angle, stops) => + "linear-gradient(" ++ (Angle.toString(angle) ++ (", " ++ (string_of_stops(stops) ++ ")"))) + | #repeatingLinearGradient(angle, stops) => + "repeating-linear-gradient(" ++ + (Angle.toString(angle) ++ + (", " ++ (string_of_stops(stops) ++ ")"))) + | #radialGradient(stops) => "radial-gradient(" ++ (string_of_stops(stops) ++ ")") + | #repeatingRadialGradient(stops) => + "repeating-radial-gradient(" ++ (string_of_stops(stops) ++ ")") + } +} + +module BackgroundImage = { + type t = [#none] + + let toString = x => + switch x { + | #none => "none" + } +} + +module GeometyBox = { + type t = [ + | #marginBox + | #borderBox + | #paddingBox + | #contentBox + | #fillBox + | #strokeBox + | #viewBox + ] + + let marginBox = #marginBox + let borderBox = #borderBox + let paddingBox = #paddingBox + let contentBox = #contentBox + let fillBox = #fillBox + let strokeBox = #strokeBox + let viewBox = #viewBox + + let toString = x => + switch x { + | #marginBox => "margin-box" + | #borderBox => "border-box" + | #paddingBox => "padding-box" + | #contentBox => "content-box" + | #fillBox => "fill-box" + | #strokeBox => "stroke-box" + | #viewBox => "view-box" + } +} + +module ClipPath = { + type t = [#none] + + let toString = x => + switch x { + | #none => "none" + } +} + +module BackfaceVisibility = { + type t = [#visible | #hidden] + + let toString = x => + switch x { + | #visible => "visible" + | #hidden => "hidden" + } +} + +module Flex = { + type t = [#auto | #initial | #none] + + let toString = x => + switch x { + | #auto => "auto" + | #initial => "initial" + | #none => "none" + } +} + +module TransformStyle = { + type t = [#preserve3d | #flat] + + let toString = x => + switch x { + | #preserve3d => "preserve-3d" + | #flat => "flat" + } +} + +module ListStyleImage = { + type t = [#none] + + let toString = x => + switch x { + | #none => "none" + } +} + +module FontFamilyName = { + type t = [ + | #custom(string) + | #serif + | #sansSerif + | #cursive + | #fantasy + | #monospace + | #systemUi + | #emoji + | #math + | #fangsong + ] + + let custom = #custom + let serif = #serif + let sansSerif = #sansSerif + let cursive = #cursive + let fantasy = #fantasy + let monospace = #monospace + let systemUi = #systemUi + let emoji = #emoji + let math = #math + let fangsong = #fangsong + + let toString = x => + switch x { + | #custom(name) => name + | #serif => "serif" + | #sansSerif => "sans-serif" + | #cursive => "cursive" + | #fantasy => "fantasy" + | #monospace => "monospace" + | #systemUi => "system-ui" + | #emoji => "emoji" + | #math => "math" + | #fangsong => "fangsong" + } +} + +module FontDisplay = { + type t = [#auto | #block | #swap | #fallback | #optional] + + let auto = #auto + let block = #block + let swap = #swap + let fallback = #fallback + let optional = #optional + + let toString = x => + switch x { + | #auto => "auto" + | #block => "block" + | #swap => "swap" + | #fallback => "fallback" + | #optional => "optional" + } +} + +module CounterStyleType = { + type t = [ListStyleType.t] + + let toString = x => + switch x { + | #...ListStyleType.t as c => ListStyleType.toString(c) + } +} + +module Counter = { + type style = [CounterStyleType.t | #unset] + type t = [#counter(string, style)] + + let counter = (~style=#unset, name) => #counter(name, style) + + let toString = x => + switch x { + | #counter(counter, style) => + switch style { + | #unset => "counter(" ++ (counter ++ ")") + | #...CounterStyleType.t as t => + "counter(" ++ (counter ++ ("," ++ (CounterStyleType.toString(t) ++ ")"))) + } + } +} + +module Counters = { + type style = [CounterStyleType.t | #unset] + type t = [#counters(string, string, style)] + + let counters = (~style=#unset, ~separator="", name) => #counters(name, separator, style) + + let toString = x => + switch x { + | #counters(name, separator, style) => + switch style { + | #unset => "counters(" ++ (name ++ (",\"" ++ (separator ++ "\")"))) + | #...CounterStyleType.t as s => + "counters(" ++ + (name ++ + (",\"" ++ (separator ++ ("\"," ++ (CounterStyleType.toString(s) ++ ")"))))) + } + } +} + +module CounterIncrement = { + type t = [#none | #increment(string, int)] + + let increment = (~value=1, name) => #increment(name, value) + + let toString = x => + switch x { + | #none => "none" + | #increment(name, value) => name ++ (" " ++ string_of_int(value)) + } +} + +module CounterReset = { + type t = [#none | #reset(string, int)] + + let reset = (~value=0, name) => #reset(name, value) + + let toString = x => + switch x { + | #none => "none" + | #reset(name, value) => name ++ (" " ++ string_of_int(value)) + } +} + +module CounterSet = { + type t = [#none | #set(string, int)] + + let set = (~value=0, name) => #set(name, value) + + let toString = x => + switch x { + | #none => "none" + | #set(name, value) => name ++ (" " ++ string_of_int(value)) + } +} + +module Content = { + type t = [ + | #none + | #normal + | #openQuote + | #closeQuote + | #noOpenQuote + | #noCloseQuote + | #attr(string) + | #text(string) + ] + + let toString = x => + switch x { + | #none => "none" + | #normal => "normal" + | #openQuote => "open-quote" + | #closeQuote => "close-quote" + | #noOpenQuote => "no-open-quote" + | #noCloseQuote => "no-close-quote" + | #attr(name) => "attr(" ++ (name ++ ")") + | #text(string) => j`"$string"` + } +} + +module SVG = { + module Fill = { + type t = [#none | #contextFill | #contextStroke] + + let contextFill = #contextFill + let contextStroke = #contextStroke + + let toString = x => + switch x { + | #none => "none" + | #contextFill => "context-fill" + | #contextStroke => "context-stroke" + } + } +} diff --git a/res_syntax/tests/idempotency/bs-css/Css_AtomicTypes.resi b/res_syntax/tests/idempotency/bs-css/Css_AtomicTypes.resi new file mode 100644 index 0000000000..549b7dd9e1 --- /dev/null +++ b/res_syntax/tests/idempotency/bs-css/Css_AtomicTypes.resi @@ -0,0 +1,1342 @@ +// Docs copied from MDN + +module Cascading: { + type t = [#initial | #inherit_ | #unset] + + let initial: [> t] + let inherit_: [> t] + let unset: [> t] + + let toString: t => string +} + +module Var: { + type t = [#var(string) | #varDefault(string, string)] + + let var: string => [> t] + let varDefault: (string, string) => [> t] + + let toString: t => string +} + +module Time: { + @ocaml.doc(" + The