Merge pull request #422 from Marwes/ndm-extract-state-machine-rebase

extract state machine
This commit is contained in:
Markus Westerlind 2018-12-10 21:51:36 +01:00 committed by GitHub
commit 64bed5ca63
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 18647 additions and 16225 deletions

129
Cargo.lock generated
View File

@ -1,9 +1,9 @@
[[package]]
name = "aho-corasick"
version = "0.6.8"
version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -19,7 +19,7 @@ name = "atty"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)",
"termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -77,7 +77,7 @@ version = "0.16.2"
dependencies = [
"lalrpop 0.16.2",
"lalrpop-util 0.16.2",
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -108,11 +108,11 @@ dependencies = [
[[package]]
name = "docopt"
version = "1.0.1"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -128,7 +128,7 @@ name = "ena"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -165,7 +165,7 @@ dependencies = [
[[package]]
name = "itertools"
version = "0.7.8"
version = "0.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -188,14 +188,14 @@ dependencies = [
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.2",
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -211,7 +211,7 @@ dependencies = [
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop 0.16.2",
"lalrpop-util 0.16.2",
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -220,20 +220,17 @@ version = "0.16.2"
[[package]]
name = "lazy_static"
version = "1.1.0"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "libc"
version = "0.2.43"
version = "0.2.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "log"
version = "0.4.5"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -241,11 +238,11 @@ dependencies = [
[[package]]
name = "memchr"
version = "2.1.0"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)",
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -271,10 +268,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
name = "pascal"
version = "0.11.0"
dependencies = [
"docopt 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop 0.16.2",
"lalrpop-util 0.16.2",
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -312,7 +309,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "proc-macro2"
version = "0.4.20"
version = "0.4.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -320,10 +317,10 @@ dependencies = [
[[package]]
name = "quote"
version = "0.6.8"
version = "0.6.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -333,7 +330,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -353,7 +350,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "redox_syscall"
version = "0.1.40"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -361,27 +358,27 @@ name = "redox_termios"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex"
version = "1.0.5"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"utf8-ranges 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex-syntax"
version = "0.6.2"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -394,9 +391,9 @@ name = "serde_derive"
version = "1.0.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.14 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -420,7 +417,7 @@ name = "string_cache"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.23 (registry+https://github.com/rust-lang/crates.io-index)",
"precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -436,8 +433,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"phf_generator 0.7.23 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.23 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -453,11 +450,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "syn"
version = "0.15.14"
version = "0.15.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -475,8 +472,8 @@ name = "termion"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -485,7 +482,7 @@ name = "thread_local"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -495,7 +492,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "ucd-util"
version = "0.1.1"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -513,7 +510,7 @@ dependencies = [
[[package]]
name = "utf8-ranges"
version = "1.0.1"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -564,7 +561,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "68f56c7353e5a9547cbd76ed90f7bb5ffc3ba09d4ea9bd1d8c06c8b1142eeb5a"
"checksum aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9a933f4e58658d7b12defcf96dc5c720f20832deebe3e0a19efd3b6aaeeb9e"
"checksum ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b385d69402821a1c254533a011a312531cbcc0e3e24f19bbb4747a5a2daf37e2"
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
"checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"
@ -578,7 +575,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
"checksum digest 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05f47366984d3ad862010e22c7ce81a7dbcaebbdfb37241a620f8b6596ee135c"
"checksum docopt 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d60c92df70dfaaabecc14b409fd79f55ba0f247780529db1d73bfa601e1d3ac0"
"checksum docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db2906c2579b5b7207fc1e328796a9a8835dc44e22dbe8e460b1d636f9a7b225"
"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
"checksum ena 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "25b4e5febb25f08c49f1b07dc33a182729a6b21edfb562b5aef95f78e0dbe5bb"
"checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
@ -586,12 +583,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
"checksum generic-array 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c0f28c2f5bfb5960175af447a2da7c18900693738343dc896ffbcabd9839592"
"checksum itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)" = "f58856976b776fedd95533137617a02fb25719f40e7d9b01c7043cd65474f450"
"checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca488b89a5657b0a2ecd45b95609b3e848cf1755da332a0da46e2b2b1cb371a7"
"checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d"
"checksum log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fcce5fa49cc693c312001daf1d13411c4a5283796bac1084299ea3e567113f"
"checksum memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4b3629fe9fdbff6daa6c33b90f7c08355c1aca05a3d01fa8063b822fcf185f3b"
"checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1"
"checksum libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)" = "10923947f84a519a45c8fefb7dd1b3e8c08747993381adee176d7a82b4195311"
"checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6"
"checksum memchr 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0a3eb002f0535929f1199681417029ebea04aadc0c7a4224b46be99c7f5d6a16"
"checksum new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0cdc457076c78ab54d5e0d6fa7c47981757f1e34dc39ff92787f217dede586c4"
"checksum opaque-debug 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "51ecbcb821e1bd256d456fe858aaa7f380b63863eab2eb86eee1bd9f33dd6682"
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
@ -599,15 +596,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum phf_generator 0.7.23 (registry+https://github.com/rust-lang/crates.io-index)" = "03dc191feb9b08b0dc1330d6549b795b9d81aec19efe6b4a45aec8d4caee0c4b"
"checksum phf_shared 0.7.23 (registry+https://github.com/rust-lang/crates.io-index)" = "b539898d22d4273ded07f64a05737649dc69095d92cb87c7097ec68e3f150b93"
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
"checksum proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "3d7b7eaaa90b4a90a932a9ea6666c95a389e424eff347f0f793979289429feee"
"checksum quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "dd636425967c33af890042c483632d33fa7a18f19ad1d7ea72e8998c6ef8dea5"
"checksum proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09"
"checksum quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c"
"checksum rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e464cd887e869cddcae8792a4ee31d23c7edd516700695608f5b98c67ee0131c"
"checksum rand_core 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1961a422c4d189dfb50ffa9320bf1f2a9bd54ecb92792fb9477f99a1045f3372"
"checksum rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0905b6b7079ec73b314d4c748701f6931eb79fd97c668caa3f1899b22b32c6db"
"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
"checksum redox_syscall 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "679da7508e9a6390aeaf7fbd02a800fdc64b73fe2204dd2c8ae66d22d9d5ad5d"
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
"checksum regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2069749032ea3ec200ca51e4a31df41759190a88edca0d2d86ee8bedf7073341"
"checksum regex-syntax 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "747ba3b235651f6e2f67dfa8bcdcd073ddb7c243cb21c442fc12395dfcac212d"
"checksum regex 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ee84f70c8c08744ea9641a731c7fadb475bf2ecc52d7f627feb833e0b3990467"
"checksum regex-syntax 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fbc557aac2b708fe84121caf261346cc2eed71978024337e42eb46b8a252ac6e"
"checksum serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)" = "15c141fc7027dd265a47c090bf864cf62b42c4d228bbcf4e51a0c9e2b0d3f7ef"
"checksum serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)" = "225de307c6302bec3898c51ca302fc94a7a1697ef0845fcee6448f33c032249c"
"checksum sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b4d8bfd0e469f417657573d8451fb33d16cfe0989359b93baf3a1ffc639543d"
@ -616,15 +613,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum string_cache_codegen 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1eea1eee654ef80933142157fdad9dd8bc43cf7c74e999e369263496f04ff4da"
"checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum syn 0.15.14 (registry+https://github.com/rust-lang/crates.io-index)" = "baaba45c6bf60fe29aaf241fa33306c0b75c801edea8378263a8f043b09a5634"
"checksum syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)" = "ae8b29eb5210bc5cf63ed6149cbf9adfc82ac0be023d8735c176ee74a2db4da7"
"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169"
"checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d"
"checksum ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
"checksum utf8-ranges 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd70f467df6810094968e2fce0ee1bd0e87157aceb026a8c083bcf5e25b9efe4"
"checksum utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "796f7e48bef87609f7ade7e06495a87d5cd06c7866e6a5cbfceffc558a243737"
"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"

View File

@ -1,6 +1,6 @@
pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
#[derive(Debug)]
#[derive(Copy, Clone, Debug)]
pub enum Tok {
Space,
Tab,

View File

@ -5,6 +5,7 @@ fn main() {
.emit_comments(true)
.force_build(true)
.unit_test()
.log_debug()
.process_current_dir()
.unwrap();
}

View File

@ -114,12 +114,6 @@ lalrpop_mod!(error_issue_278);
/// test for generic macros issue #417.
lalrpop_mod!(generics_issue_417);
// Check that error recovery (which requires cloneable tokens) is not created if it is not used
lalrpop_mod!(
#[allow(unused)]
no_clone_tok
);
lalrpop_mod!(
#[deny(overflowing_literals)]
#[allow(unused)]

View File

@ -1,17 +0,0 @@
use util::tok::{NoCloneTok, Tok};
use lalrpop_util::ParseError;
grammar;
extern {
type Location = usize;
type Error = char;
enum NoCloneTok {
"-" => NoCloneTok(Tok::Minus),
}
}
pub Item = {
"-"
};

View File

@ -1,6 +1,8 @@
use std::error::Error;
use std::fmt;
pub mod state_machine;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum ParseError<L, T, E> {
/// Generated by the parser when it encounters a token (or EOF) it did not

View File

@ -0,0 +1,672 @@
#![allow(dead_code)]
use std::fmt::Debug;
const DEBUG_ENABLED: bool = false;
macro_rules! debug {
($($args:expr),* $(,)*) => {
if DEBUG_ENABLED {
eprintln!($($args),*);
}
}
}
pub trait ParserDefinition: Sized {
/// Represents a location in the input text. If you are using the
/// default tokenizer, this will be a `usize`.
type Location: Clone + Debug;
/// Represents a "user error" -- this can get produced by
/// `reduce()` if the grammar includes `=>?` actions.
type Error;
/// The type emitted by the user's tokenizer (excluding the
/// location information).
type Token: Clone + Debug;
/// We assign a unique index to each token in the grammar, which
/// we call its *index*. When we pull in a new `Token` from the
/// input, we then match against it to determine its index. Note
/// that the actual `Token` is retained too, as it may carry
/// additional information (e.g., an `ID` terminal often has a
/// string value associated with it; this is not important to the
/// parser, but the semantic analyzer will want it).
type TokenIndex: Copy + Clone + Debug;
/// The type representing things on the LALRPOP stack. Represents
/// the union of terminals and nonterminals.
type Symbol;
/// Type produced by reducing the start symbol.
type Success;
/// Identifies a state. Typically an i8, i16, or i32 (depending on
/// how many states you have).
type StateIndex: Copy + Clone + Debug;
/// Identifies an action.
type Action: ParserAction<Self>;
/// Identifies a reduction.
type ReduceIndex: Copy + Clone + Debug;
/// Identifies a nonterminal.
type NonterminalIndex: Copy + Clone + Debug;
/// Returns a location representing the "start of the input".
fn start_location(&self) -> Self::Location;
/// Returns the initial state.
fn start_state(&self) -> Self::StateIndex;
/// Converts the user's tokens into an internal index; this index
/// is then used to index into actions and the like. When using an
/// internal tokenizer, these indices are directly produced. When
/// using an **external** tokenier, however, this function matches
/// against the patterns given by the user: it is fallible
/// therefore as these patterns may not be exhaustive. If a token
/// value is found that doesn't match any of the patterns the user
/// supplied, then this function returns `None`, which is
/// translated into a parse error by LALRPOP ("unrecognized
/// token").
fn token_to_index(&self, token: &Self::Token) -> Option<Self::TokenIndex>;
/// Given the top-most state and the pending terminal, returns an
/// action. This can be either SHIFT(state), REDUCE(action), or
/// ERROR.
fn action(&self, state: Self::StateIndex, token_index: Self::TokenIndex) -> Self::Action;
/// Returns the action to take if an error occurs in the given
/// state. This function is the same as the ordinary `action`,
/// except that it applies not to the user's terminals but to the
/// "special terminal" `!`.
fn error_action(&self, state: Self::StateIndex) -> Self::Action;
/// Action to take if EOF occurs in the given state. This function
/// is the same as the ordinary `action`, except that it applies
/// not to the user's terminals but to the "special terminal" `$`.
fn eof_action(&self, state: Self::StateIndex) -> Self::Action;
/// If we reduce to a nonterminal in the given state, what state
/// do we go to? This is infallible due to the nature of LR(1)
/// grammars.
fn goto(&self, state: Self::StateIndex, nt: Self::NonterminalIndex) -> Self::StateIndex;
/// "Upcast" a terminal into a symbol so we can push it onto the
/// parser stack.
fn token_to_symbol(&self, token_index: Self::TokenIndex, token: Self::Token) -> Self::Symbol;
/// Returns the expected tokens in a given state. This is used for
/// error reporting.
fn expected_tokens(&self, state: Self::StateIndex) -> Vec<String>;
/// True if this grammar supports error recovery.
fn uses_error_recovery(&self) -> bool;
/// Given error information, creates an error recovery symbol that
/// we push onto the stack (and supply to user actions).
fn error_recovery_symbol(&self, recovery: ErrorRecovery<Self>) -> Self::Symbol;
/// Execute a reduction in the given state: that is, execute user
/// code. The start location indicates the "starting point" of the
/// current lookahead that is triggering the reduction (it is
/// `None` for EOF).
///
/// The `states` and `symbols` vectors represent the internal
/// state machine vectors; they are given to `reduce` so that it
/// can pop off states that no longer apply (and consume their
/// symbols). At the end, it should also push the new state and
/// symbol produced.
///
/// Returns a `Some` if we reduced the start state and hence
/// parsing is complete, or if we encountered an irrecoverable
/// error.
///
/// FIXME. It would be nice to not have so much logic live in
/// reduce. It should just be given an iterator of popped symbols
/// and return the newly produced symbol (or error). We can use
/// `simulate_reduce` and our own information to drive the rest,
/// right? This would also allow us -- I think -- to extend error
/// recovery to cover user-produced errors.
fn reduce(
&mut self,
reduce_index: Self::ReduceIndex,
start_location: Option<&Self::Location>,
states: &mut Vec<Self::StateIndex>,
symbols: &mut Vec<SymbolTriple<Self>>,
) -> Option<ParseResult<Self>>;
/// Returns information about how many states will be popped
/// during a reduction, and what nonterminal would be produced as
/// a result.
fn simulate_reduce(&self, action: Self::ReduceIndex) -> SimulatedReduce<Self>;
}
pub trait ParserAction<D: ParserDefinition>: Copy + Clone + Debug {
fn as_shift(self) -> Option<D::StateIndex>;
fn as_reduce(self) -> Option<D::ReduceIndex>;
fn is_shift(self) -> bool;
fn is_reduce(self) -> bool;
fn is_error(self) -> bool;
}
pub enum SimulatedReduce<D: ParserDefinition> {
Reduce {
states_to_pop: usize,
nonterminal_produced: D::NonterminalIndex,
},
// This reduce is the "start" fn, so the parse is done.
Accept,
}
// These aliases are an elaborate hack to get around
// the warnings when you define a type alias like `type Foo<D: Trait>`
#[doc(hidden)]
pub type Location<D> = <D as ParserDefinition>::Location;
#[doc(hidden)]
pub type Token<D> = <D as ParserDefinition>::Token;
#[doc(hidden)]
pub type Error<D> = <D as ParserDefinition>::Error;
#[doc(hidden)]
pub type Success<D> = <D as ParserDefinition>::Success;
#[doc(hidden)]
pub type Symbol<D> = <D as ParserDefinition>::Symbol;
pub type ParseError<D> = ::ParseError<Location<D>, Token<D>, Error<D>>;
pub type ParseResult<D> = Result<Success<D>, ParseError<D>>;
pub type TokenTriple<D> = (Location<D>, Token<D>, Location<D>);
pub type SymbolTriple<D> = (Location<D>, Symbol<D>, Location<D>);
pub type ErrorRecovery<D> = ::ErrorRecovery<Location<D>, Token<D>, Error<D>>;
pub struct Parser<D, I>
where
D: ParserDefinition,
I: Iterator<Item = Result<TokenTriple<D>, ParseError<D>>>,
{
definition: D,
tokens: I,
states: Vec<D::StateIndex>,
symbols: Vec<SymbolTriple<D>>,
last_location: D::Location,
}
enum NextToken<D: ParserDefinition> {
FoundToken(TokenTriple<D>, D::TokenIndex),
EOF,
Done(ParseResult<D>),
}
impl<D, I> Parser<D, I>
where
D: ParserDefinition,
I: Iterator<Item = Result<TokenTriple<D>, ParseError<D>>>,
{
pub fn drive(definition: D, tokens: I) -> ParseResult<D> {
let last_location = definition.start_location();
let start_state = definition.start_state();
Parser {
definition,
tokens,
states: vec![start_state],
symbols: vec![],
last_location,
}.parse()
}
fn top_state(&self) -> D::StateIndex {
*self.states.last().unwrap()
}
fn parse(&mut self) -> ParseResult<D> {
// Outer loop: each time we continue around this loop, we
// shift a new token from the input. We break from the loop
// when the end of the input is reached (we return early if an
// error occurs).
'shift: loop {
let (mut lookahead, mut token_index) = match self.next_token() {
NextToken::FoundToken(l, i) => (l, i),
NextToken::EOF => return self.parse_eof(),
NextToken::Done(e) => return e,
};
debug!("+ SHIFT: {:?}", lookahead);
debug!("\\ token_index: {:?}", token_index);
'inner: loop {
let top_state = self.top_state();
let action = self.definition.action(top_state, token_index);
debug!("\\ action: {:?}", action);
if let Some(target_state) = action.as_shift() {
debug!("\\ shift to: {:?}", target_state);
// Shift and transition to state `action - 1`
let symbol = self.definition.token_to_symbol(token_index, lookahead.1);
self.states.push(target_state);
self.symbols.push((lookahead.0, symbol, lookahead.2));
continue 'shift;
} else if let Some(reduce_index) = action.as_reduce() {
debug!("\\ reduce to: {:?}", reduce_index);
if let Some(r) = self.definition.reduce(
reduce_index,
Some(&lookahead.0),
&mut self.states,
&mut self.symbols,
) {
return match r {
// we reached eof, but still have lookahead
Ok(_) => Err(::ParseError::ExtraToken { token: lookahead }),
Err(e) => Err(e),
};
}
} else {
debug!("\\ error -- initiating error recovery!");
match self.error_recovery(Some(lookahead), Some(token_index)) {
NextToken::FoundToken(l, i) => {
lookahead = l;
token_index = i;
continue 'inner;
}
NextToken::EOF => return self.parse_eof(),
NextToken::Done(e) => return e,
}
}
}
}
}
/// Invoked when we have no more tokens to consume.
fn parse_eof(&mut self) -> ParseResult<D> {
loop {
let top_state = self.top_state();
let action = self.definition.eof_action(top_state);
if let Some(reduce_index) = action.as_reduce() {
if let Some(result) =
self.definition
.reduce(reduce_index, None, &mut self.states, &mut self.symbols)
{
return result;
}
} else {
match self.error_recovery(None, None) {
NextToken::FoundToken(..) => panic!("cannot find token at EOF"),
NextToken::Done(e) => return e,
NextToken::EOF => continue,
}
}
}
}
fn error_recovery(
&mut self,
mut opt_lookahead: Option<TokenTriple<D>>,
mut opt_token_index: Option<D::TokenIndex>,
) -> NextToken<D> {
debug!(
"\\+ error_recovery(opt_lookahead={:?}, opt_token_index={:?})",
opt_lookahead,
opt_token_index,
);
if !self.definition.uses_error_recovery() {
debug!("\\ error -- no error recovery!");
return NextToken::Done(Err(self.unrecognized_token_error(
opt_lookahead,
self.top_state(),
)));
}
let error = self.unrecognized_token_error(opt_lookahead.clone(), self.top_state());
let mut dropped_tokens = vec![];
// We are going to insert ERROR into the lookahead. So, first,
// perform all reductions from current state triggered by having
// ERROR in the lookahead.
loop {
let state = self.top_state();
let action = self.definition.error_action(state);
if let Some(reduce_index) = action.as_reduce() {
debug!("\\\\ reducing: {:?}", reduce_index);
if let Some(result) =
self.reduce(reduce_index, opt_lookahead.as_ref().map(|l| &l.0))
{
debug!("\\\\ reduced to a result");
return NextToken::Done(result);
}
} else {
break;
}
}
// Now try to find the recovery state.
let states_len = self.states.len();
let top = 'find_state: loop {
// Go backwards through the states...
debug!(
"\\\\+ error_recovery: find_state loop, {:?} states = {:?}",
self.states.len(),
self.states,
);
for top in (0..states_len).rev() {
let state = self.states[top];
debug!("\\\\\\ top = {:?}, state = {:?}", top, state);
// ...fetch action for error token...
let action = self.definition.error_action(state);
debug!("\\\\\\ action = {:?}", action);
if let Some(error_state) = action.as_shift() {
// If action is a shift that takes us into `error_state`,
// and `error_state` can accept this lookahead, we are done.
if self.accepts(error_state, &self.states[..top + 1], opt_token_index) {
debug!("\\\\\\ accepted!");
break 'find_state top;
}
} else {
// ...else, if action is error or reduce, go to next state.
continue;
}
}
// Otherwise, if we couldn't find a state that would --
// after shifting the error token -- accept the lookahead,
// then drop the lookahead and advance to next token in
// the input.
match opt_lookahead.take() {
// If the lookahead is EOF, we can't drop any more
// tokens, abort error recovery and just report the
// original error (it might be nice if we would
// propagate back the dropped tokens, though).
None => {
debug!("\\\\\\ no more lookahead, report error");
return NextToken::Done(Err(error));
}
// Else, drop the current token and shift to the
// next. If there is a next token, we will `continue`
// to the start of the `'find_state` loop.
Some(lookahead) => {
debug!("\\\\\\ dropping lookahead token");
dropped_tokens.push(lookahead);
match self.next_token() {
NextToken::FoundToken(next_lookahead, next_token_index) => {
opt_lookahead = Some(next_lookahead);
opt_token_index = Some(next_token_index);
}
NextToken::EOF => {
debug!("\\\\\\ reached EOF");
opt_lookahead = None;
opt_token_index = None;
}
NextToken::Done(e) => {
debug!("\\\\\\ no more tokens");
return NextToken::Done(e);
}
}
}
}
};
// If we get here, we are ready to push the error recovery state.
// We have to compute the span for the error recovery
// token. We do this first, before we pop any symbols off the
// stack. There are several possibilities, in order of
// preference.
//
// For the **start** of the message, we prefer to use the start of any
// popped states. This represents parts of the input we had consumed but
// had to roll back and ignore.
//
// Example:
//
// a + (b + /)
// ^ start point is here, since this `+` will be popped off
//
// If there are no popped states, but there *are* dropped tokens, we can use
// the start of those.
//
// Example:
//
// a + (b + c e)
// ^ start point would be here
//
// Finally, if there are no popped states *nor* dropped tokens, we can use
// the end of the top-most state.
let start = if let Some(popped_sym) = self.symbols.get(top) {
popped_sym.0.clone()
} else if let Some(dropped_token) = dropped_tokens.first() {
dropped_token.0.clone()
} else if top > 0 {
self.symbols[top - 1].2.clone()
} else {
self.definition.start_location()
};
// For the end span, here are the possibilities:
//
// We prefer to use the end of the last dropped token.
//
// Examples:
//
// a + (b + /)
// ---
// a + (b c)
// -
//
// But, if there are no dropped tokens, we will use the end of the popped states,
// if any:
//
// a + /
// -
//
// If there are neither dropped tokens *or* popped states,
// then the user is simulating insertion of an operator. In
// this case, we prefer the start of the lookahead, but
// fallback to the start if we are at EOF.
//
// Examples:
//
// a + (b c)
// -
let end = if let Some(dropped_token) = dropped_tokens.last() {
dropped_token.2.clone()
} else if states_len - 1 > top {
self.symbols.last().unwrap().2.clone()
} else if let Some(lookahead) = opt_lookahead.as_ref() {
lookahead.0.clone()
} else {
start.clone()
};
self.states.truncate(top + 1);
self.symbols.truncate(top);
let recover_state = self.states[top];
let error_action = self.definition.error_action(recover_state);
let error_state = error_action.as_shift().unwrap();
self.states.push(error_state);
let recovery = self.definition.error_recovery_symbol(::ErrorRecovery {
error: error,
dropped_tokens: dropped_tokens,
});
self.symbols.push((start, recovery, end));
match (opt_lookahead, opt_token_index) {
(Some(l), Some(i)) => NextToken::FoundToken(l, i),
(None, None) => NextToken::EOF,
(l, i) => panic!("lookahead and token_index mismatched: {:?}, {:?}", l, i),
}
}
/// The `accepts` function has the job of figuring out whether the
/// given error state would "accept" the given lookahead. We
/// basically trace through the LR automaton looking for one of
/// two outcomes:
///
/// - the lookahead is eventually shifted
/// - we reduce to the end state successfully (in the case of EOF).
///
/// If we used the pure LR(1) algorithm, we wouldn't need this
/// function, because we would be guaranteed to error immediately
/// (and not after some number of reductions). But with an LALR
/// (or Lane Table) generated automaton, it is possible to reduce
/// some number of times before encountering an error. Failing to
/// take this into account can lead error recovery into an
/// infinite loop (see the `error_recovery_lalr_loop` test) or
/// produce crappy results (see `error_recovery_lock_in`).
fn accepts(
&self,
error_state: D::StateIndex,
states: &[D::StateIndex],
opt_token_index: Option<D::TokenIndex>,
) -> bool {
debug!(
"\\\\\\+ accepts(error_state={:?}, states={:?}, opt_token_index={:?})",
error_state,
states,
opt_token_index,
);
let mut states = states.to_vec();
states.push(error_state);
loop {
let mut states_len = states.len();
let top = states[states_len - 1];
let action = match opt_token_index {
None => self.definition.eof_action(top),
Some(i) => self.definition.action(top, i),
};
// If we encounter an error action, we do **not** accept.
if action.is_error() {
debug!("\\\\\\\\ accepts: error");
return false;
}
// If we encounter a reduce action, we need to simulate its
// effect on the state stack.
if let Some(reduce_action) = action.as_reduce() {
match self.definition.simulate_reduce(reduce_action) {
SimulatedReduce::Reduce {
states_to_pop,
nonterminal_produced,
} => {
states_len -= states_to_pop;
states.truncate(states_len);
let top = states[states_len - 1];
let next_state = self.definition.goto(top, nonterminal_produced);
states.push(next_state);
}
SimulatedReduce::Accept => {
debug!("\\\\\\\\ accepts: reduce accepts!");
return true;
}
}
} else {
// If we encounter a shift action, we DO accept.
debug!("\\\\\\\\ accepts: shift accepts!");
assert!(action.is_shift());
return true;
}
}
}
fn reduce(
&mut self,
action: D::ReduceIndex,
lookahead_start: Option<&D::Location>,
) -> Option<ParseResult<D>> {
self.definition
.reduce(action, lookahead_start, &mut self.states, &mut self.symbols)
}
fn unrecognized_token_error(
&self,
token: Option<TokenTriple<D>>,
top_state: D::StateIndex,
) -> ParseError<D> {
::ParseError::UnrecognizedToken {
token: token,
expected: self.definition.expected_tokens(top_state),
}
}
/// Consume the next token from the input and classify it into a
/// token index. Classification can fail with an error. If there
/// are no more tokens, signal EOF.
fn next_token(&mut self) -> NextToken<D> {
let token = match self.tokens.next() {
Some(Ok(v)) => v,
Some(Err(e)) => return NextToken::Done(Err(e)),
None => return NextToken::EOF,
};
self.last_location = token.2.clone();
let token_index = match self.definition.token_to_index(&token.1) {
Some(i) => i,
None => {
return NextToken::Done(Err(
self.unrecognized_token_error(Some(token), self.top_state())
))
}
};
NextToken::FoundToken(token, token_index)
}
}
/// In LALRPOP generated rules, we actually use `i32`, `i16`, or `i8`
/// to represent all of the various indices (we use the smallest one
/// that will fit). So implement `ParserAction` for each of those.
macro_rules! integral_indices {
($t:ty) => {
impl<D: ParserDefinition<StateIndex = $t, ReduceIndex = $t>> ParserAction<D> for $t {
fn as_shift(self) -> Option<D::StateIndex> {
if self > 0 {
Some(self - 1)
} else {
None
}
}
fn as_reduce(self) -> Option<D::ReduceIndex> {
if self < 0 {
Some(-(self + 1))
} else {
None
}
}
fn is_shift(self) -> bool {
self > 0
}
fn is_reduce(self) -> bool {
self < 0
}
fn is_error(self) -> bool {
self == 0
}
}
};
}
integral_indices!(i32);
integral_indices!(i16);
integral_indices!(i8);

View File

@ -97,8 +97,7 @@ fn emit_user_action_code<W: Write>(
.iter()
.cloned()
.map(|t| grammar.types.spanned_type(t)),
)
.map(|(p, t)| format!("(_, {}, _): {}", p, t))
).map(|(p, t)| format!("(_, {}, _): {}", p, t))
.collect();
// If this is a reduce of an empty production, we will
@ -120,16 +119,15 @@ fn emit_user_action_code<W: Write>(
]);
}
try!(rust.write_fn_header(
grammar,
&r::Visibility::Priv,
format!("{}action{}", grammar.prefix, index),
vec![],
None,
arguments,
ret_type,
vec![]
));
try!(
rust.fn_header(
&r::Visibility::Priv,
format!("{}action{}", grammar.prefix, index),
).with_grammar(grammar)
.with_parameters(arguments)
.with_return_type(ret_type)
.emit()
);
rust!(rust, "{{");
rust!(rust, "{}", data.code);
rust!(rust, "}}");
@ -143,13 +141,12 @@ fn emit_lookaround_action_code<W: Write>(
_defn: &r::ActionFnDefn,
data: &r::LookaroundActionFnDefn,
) -> io::Result<()> {
try!(rust.write_fn_header(
grammar,
&r::Visibility::Priv,
format!("{}action{}", grammar.prefix, index),
vec![],
None,
vec![
try!(
rust.fn_header(
&r::Visibility::Priv,
format!("{}action{}", grammar.prefix, index),
).with_grammar(grammar)
.with_parameters(vec![
format!(
"{}lookbehind: &{}",
grammar.prefix,
@ -160,10 +157,9 @@ fn emit_lookaround_action_code<W: Write>(
grammar.prefix,
grammar.types.terminal_loc_type()
),
],
format!("{}", grammar.types.terminal_loc_type()),
vec![]
));
]).with_return_type(format!("{}", grammar.types.terminal_loc_type()))
.emit()
);
rust!(rust, "{{");
match *data {
@ -198,8 +194,7 @@ fn emit_inline_action_code<W: Write>(
.flat_map(|sym| match *sym {
r::InlinedSymbol::Original(ref s) => vec![s.clone()],
r::InlinedSymbol::Inlined(_, ref syms) => syms.clone(),
})
.map(|s| s.ty(&grammar.types))
}).map(|s| s.ty(&grammar.types))
.collect();
// this is the number of symbols we expect to be passed in; it is
@ -231,16 +226,15 @@ fn emit_inline_action_code<W: Write>(
]);
}
try!(rust.write_fn_header(
grammar,
&r::Visibility::Priv,
format!("{}action{}", grammar.prefix, index),
vec![],
None,
arguments,
ret_type,
vec![]
));
try!(
rust.fn_header(
&r::Visibility::Priv,
format!("{}action{}", grammar.prefix, index),
).with_grammar(grammar)
.with_parameters(arguments)
.with_return_type(ret_type)
.emit()
);
rust!(rust, "{{");
// For each inlined thing, compute the start/end locations.

View File

@ -458,6 +458,14 @@ fn emit_to_triple_trait<W: Write>(grammar: &r::Grammar, rust: &mut RustWrite<W>)
let T = grammar.types.terminal_token_type();
let E = grammar.types.error_type();
let parse_error = format!(
"{p}lalrpop_util::ParseError<{L}, {T}, {E}>",
p = grammar.prefix,
L = L,
T = T,
E = E,
);
let mut user_type_parameters = String::new();
for type_parameter in &grammar.type_parameters {
user_type_parameters.push_str(&format!("{}, ", type_parameter));
@ -470,13 +478,12 @@ fn emit_to_triple_trait<W: Write>(grammar: &r::Grammar, rust: &mut RustWrite<W>)
grammar.prefix,
user_type_parameters,
);
rust!(rust, "type Error;");
rust!(
rust,
"fn to_triple(value: Self) -> Result<({},{},{}),Self::Error>;",
L,
T,
L,
"fn to_triple(value: Self) -> Result<({L},{T},{L}), {parse_error}>;",
L = L,
T = T,
parse_error = parse_error,
);
rust!(rust, "}}");
@ -484,22 +491,18 @@ fn emit_to_triple_trait<W: Write>(grammar: &r::Grammar, rust: &mut RustWrite<W>)
if grammar.types.opt_terminal_loc_type().is_some() {
rust!(
rust,
"impl<{}> {}ToTriple<{}> for ({}, {}, {}) {{",
user_type_parameters,
grammar.prefix,
user_type_parameters,
L,
T,
L,
"impl<{utp}> {p}ToTriple<{utp}> for ({L}, {T}, {L}) {{",
p = grammar.prefix,
utp = user_type_parameters,
L = L,
T = T,
);
rust!(rust, "type Error = {};", E);
rust!(
rust,
"fn to_triple(value: Self) -> Result<({},{},{}),{}> {{",
L,
T,
L,
E,
"fn to_triple(value: Self) -> Result<({L},{T},{L}), {parse_error}> {{",
L = L,
T = T,
parse_error = parse_error,
);
rust!(rust, "Ok(value)");
rust!(rust, "}}");
@ -507,42 +510,40 @@ fn emit_to_triple_trait<W: Write>(grammar: &r::Grammar, rust: &mut RustWrite<W>)
rust!(
rust,
"impl<{}> {}ToTriple<{}> for Result<({}, {}, {}),{}> {{",
user_type_parameters,
grammar.prefix,
user_type_parameters,
L,
T,
L,
E,
"impl<{utp}> {p}ToTriple<{utp}> for Result<({L}, {T}, {L}), {E}> {{",
utp = user_type_parameters,
p = grammar.prefix,
L = L,
T = T,
E = E,
);
rust!(rust, "type Error = {};", E);
rust!(
rust,
"fn to_triple(value: Self) -> Result<({},{},{}),{}> {{",
L,
T,
L,
E,
"fn to_triple(value: Self) -> Result<({L},{T},{L}), {parse_error}> {{",
L = L,
T = T,
parse_error = parse_error,
);
rust!(rust, "value");
rust!(rust, "match value {{");
rust!(rust, "Ok(v) => Ok(v),");
rust!(rust, "Err(error) => Err({p}lalrpop_util::ParseError::User {{ error }}),",
p = grammar.prefix);
rust!(rust, "}}"); // match
rust!(rust, "}}");
rust!(rust, "}}");
} else {
rust!(
rust,
"impl<{}> {}ToTriple<{}> for {} {{",
user_type_parameters,
grammar.prefix,
user_type_parameters,
T,
"impl<{utp}> {p}ToTriple<{utp}> for {T} {{",
utp = user_type_parameters,
p = grammar.prefix,
T = T,
);
rust!(rust, "type Error = {};", E);
rust!(
rust,
"fn to_triple(value: Self) -> Result<((),{},()),{}> {{",
T,
E,
"fn to_triple(value: Self) -> Result<((),{T},()), {parse_error}> {{",
T = T,
parse_error = parse_error,
);
rust!(rust, "Ok(((), value, ()))");
rust!(rust, "}}");
@ -550,23 +551,25 @@ fn emit_to_triple_trait<W: Write>(grammar: &r::Grammar, rust: &mut RustWrite<W>)
rust!(
rust,
"impl<{}> {}ToTriple<{}> for Result<({}),{}> {{",
user_type_parameters,
grammar.prefix,
user_type_parameters,
T,
E,
"impl<{utp}> {p}ToTriple<{utp}> for Result<({T}),{E}> {{",
utp = user_type_parameters,
p = grammar.prefix,
T = T,
E = E,
);
rust!(rust, "type Error = {};", E);
rust!(
rust,
"fn to_triple(value: Self) -> Result<((),{},()),{}> {{",
T,
E,
"fn to_triple(value: Self) -> Result<((),{T},()), {parse_error}> {{",
T = T,
parse_error = parse_error,
);
rust!(rust, "value.map(|v| ((), v, ()))");
rust!(rust, "}}");
rust!(rust, "}}");
rust!(rust, "match value {{");
rust!(rust, "Ok(v) => Ok(((), v, ())),");
rust!(rust, "Err(error) => Err({p}lalrpop_util::ParseError::User {{ error }}),",
p = grammar.prefix);
rust!(rust, "}}"); // match
rust!(rust, "}}"); // fn
rust!(rust, "}}"); // impl
}
Ok(())

View File

@ -0,0 +1,173 @@
use grammar::parse_tree::{self, Lifetime, TypeParameter};
use grammar::repr;
use std::iter;
use string_cache::DefaultAtom as Atom;
mod test;
/// Finds the set of "free variables" in something -- that is, the
/// type/lifetime parameters that appear and are not bound. For
/// example, `T: Foo<U>` would return `[T, U]`.
pub trait FreeVariables {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter>;
}
/// Subtle: the free-variables code sometimes encounter ambiguous
/// names. For example, we might see `Vec<Foo>` -- in that case, we
/// look at the list of declared type parameters to decide whether
/// `Foo` is a type parameter or just some other type name.
fn free_type(type_parameters: &[TypeParameter], id: &Atom) -> Vec<TypeParameter> {
let tp = TypeParameter::Id(id.clone());
if type_parameters.contains(&tp) {
vec![tp]
} else {
vec![]
}
}
/// Same as above: really, the only lifetime where this is relevant is
/// `'static`, but it doesn't hurt to be careful.
fn free_lifetime(type_parameters: &[TypeParameter], lt: &Lifetime) -> Vec<TypeParameter> {
let tp = TypeParameter::Lifetime(lt.clone());
if type_parameters.contains(&tp) {
vec![tp]
} else {
vec![]
}
}
impl<T: FreeVariables> FreeVariables for Option<T> {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter> {
match self {
None => vec![],
Some(t) => t.free_variables(type_parameters),
}
}
}
impl<T: FreeVariables> FreeVariables for Vec<T> {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter> {
self.into_iter()
.flat_map(|e| e.free_variables(type_parameters))
.collect()
}
}
impl FreeVariables for repr::TypeRepr {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter> {
match self {
repr::TypeRepr::Tuple(tys) => tys.free_variables(type_parameters),
repr::TypeRepr::Nominal(data) => data.free_variables(type_parameters),
repr::TypeRepr::Associated {
type_parameter,
id: _,
} => free_type(type_parameters, type_parameter),
repr::TypeRepr::Lifetime(l) => free_lifetime(type_parameters, l),
repr::TypeRepr::Ref {
lifetime,
mutable: _,
referent,
} => lifetime
.iter()
.map(|id| TypeParameter::Lifetime(id.clone()))
.chain(referent.free_variables(type_parameters))
.collect(),
}
}
}
impl FreeVariables for repr::WhereClause {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter> {
match self {
repr::WhereClause::Forall { binder, clause } => clause
.free_variables(type_parameters)
.into_iter()
.filter(|tp| !binder.contains(tp))
.collect(),
repr::WhereClause::Bound { subject, bound } => subject
.free_variables(type_parameters)
.into_iter()
.chain(bound.free_variables(type_parameters))
.collect(),
}
}
}
impl FreeVariables for parse_tree::Path {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter> {
// A path like `foo::Bar` is considered no free variables; a
// single identifier like `T` is a free variable `T`. Note
// that we can't distinguish type parameters from random names
// like `String`.
match self.as_id() {
Some(id) => free_type(type_parameters, &id),
None => vec![],
}
}
}
impl FreeVariables for repr::NominalTypeRepr {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter> {
let repr::NominalTypeRepr { path, types } = self;
path.free_variables(type_parameters)
.into_iter()
.chain(types.free_variables(type_parameters))
.collect()
}
}
impl<T: FreeVariables> FreeVariables for parse_tree::WhereClause<T> {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter> {
match self {
parse_tree::WhereClause::Lifetime { lifetime, bounds } => {
iter::once(TypeParameter::Lifetime(lifetime.clone()))
.chain(bounds.iter().map(|l| TypeParameter::Lifetime(l.clone())))
.collect()
}
parse_tree::WhereClause::Type { forall, ty, bounds } => ty
.free_variables(type_parameters)
.into_iter()
.chain(bounds.free_variables(type_parameters))
.filter(|tp| !forall.contains(tp))
.collect(),
}
}
}
impl<T: FreeVariables> FreeVariables for parse_tree::TypeBoundParameter<T> {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter> {
match self {
parse_tree::TypeBoundParameter::Lifetime(l) => free_lifetime(type_parameters, l),
parse_tree::TypeBoundParameter::TypeParameter(t) => t.free_variables(type_parameters),
parse_tree::TypeBoundParameter::Associated(..) => vec![],
}
}
}
impl<T: FreeVariables> FreeVariables for parse_tree::TypeBound<T> {
fn free_variables(&self, type_parameters: &[TypeParameter]) -> Vec<TypeParameter> {
match self {
parse_tree::TypeBound::Lifetime(l) => free_lifetime(type_parameters, l),
parse_tree::TypeBound::Fn {
forall,
path: _,
parameters,
ret,
} => parameters.free_variables(type_parameters)
.into_iter()
.chain(ret.free_variables(type_parameters))
.filter(|tp| !forall.contains(tp))
.collect(),
parse_tree::TypeBound::Trait {
forall,
path: _,
parameters,
} => parameters.free_variables(type_parameters)
.into_iter()
.filter(|tp| !forall.contains(tp))
.collect(),
}
}
}

View File

@ -0,0 +1,30 @@
#![cfg(test)]
use grammar::free_variables::FreeVariables;
use test_util::{expect_debug, normalized_grammar};
use tls::Tls;
#[test]
fn other_names() {
// Check that `Foo` does not end up in the list of free variables.
let _tls = Tls::test();
let grammar = normalized_grammar(r#"
grammar<'a, T>(x: &'a mut Foo, y: Vec<T>);
pub Foo: () = ();
"#);
let p0 = &grammar.parameters[0];
expect_debug(p0.ty.free_variables(&grammar.type_parameters), "[
Lifetime(
'a
)
]");
let p1 = &grammar.parameters[1];
expect_debug(p1.ty.free_variables(&grammar.type_parameters), "[
Id(
Atom('T' type=inline)
)
]");
}

View File

@ -1,6 +1,7 @@
//! The grammar definition.
pub mod consts;
pub mod free_variables;
pub mod parse_tree;
pub mod pattern;
pub mod repr;

View File

@ -1,7 +1,7 @@
//! The "parse-tree" is what is produced by the parser. We use it do
//! some pre-expansion and so forth before creating the proper AST.
use grammar::consts::{LALR, RECURSIVE_ASCENT, TABLE_DRIVEN, TEST_ALL};
use grammar::consts::{INPUT_LIFETIME, LALR, RECURSIVE_ASCENT, TABLE_DRIVEN, TEST_ALL};
use grammar::pattern::Pattern;
use grammar::repr::{self as r, NominalTypeRepr, TypeRepr};
use lexer::dfa::DFA;
@ -207,13 +207,13 @@ pub enum TypeRef {
},
Ref {
lifetime: Option<Atom>,
lifetime: Option<Lifetime>,
mutable: bool,
referent: Box<TypeRef>,
},
// 'x ==> only should appear within nominal types, but what do we care
Lifetime(Atom),
Lifetime(Lifetime),
// Foo or Bar ==> treated specially since macros may care
Id(Atom),
@ -226,58 +226,34 @@ pub enum TypeRef {
pub enum WhereClause<T> {
// 'a: 'b + 'c
Lifetime {
lifetime: Atom,
bounds: Vec<Atom>,
lifetime: Lifetime,
bounds: Vec<Lifetime>,
},
// where for<'a> &'a T: Debug + Into<usize>
Type {
forall: Option<Vec<Atom>>,
forall: Vec<TypeParameter>,
ty: T,
bounds: Vec<TypeBound<T>>,
},
}
impl<T> WhereClause<T> {
pub fn map<F, U>(&self, mut f: F) -> WhereClause<U>
where
F: FnMut(&T) -> U,
{
match *self {
WhereClause::Lifetime {
ref lifetime,
ref bounds,
} => WhereClause::Lifetime {
lifetime: lifetime.clone(),
bounds: bounds.clone(),
},
WhereClause::Type {
ref forall,
ref ty,
ref bounds,
} => WhereClause::Type {
forall: forall.clone(),
ty: f(ty),
bounds: bounds.iter().map(|b| b.map(&mut f)).collect(),
},
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum TypeBound<T> {
// The `'a` in `T: 'a`.
Lifetime(Atom),
Lifetime(Lifetime),
// `for<'a> FnMut(&'a usize)`
Fn {
forall: Option<Vec<Atom>>,
forall: Vec<TypeParameter>,
path: Path,
parameters: Vec<T>,
ret: Option<T>,
},
// `some::Trait` or `some::Trait<Param, ...>` or `some::Trait<Item = Assoc>`
// or `for<'a> Trait<'a, T>`
Trait {
forall: Option<Vec<Atom>>,
forall: Vec<TypeParameter>,
path: Path,
parameters: Vec<TypeBoundParameter<T>>,
},
@ -317,7 +293,7 @@ impl<T> TypeBound<T> {
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum TypeBoundParameter<T> {
// 'a
Lifetime(Atom),
Lifetime(Lifetime),
// `T` or `'a`
TypeParameter(T),
// `Item = T`
@ -341,19 +317,10 @@ impl<T> TypeBoundParameter<T> {
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum TypeParameter {
Lifetime(Atom),
Lifetime(Lifetime),
Id(Atom),
}
impl TypeParameter {
pub fn is_lifetime(&self) -> bool {
match *self {
TypeParameter::Lifetime(_) => true,
_ => false,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Parameter {
pub name: Atom,
@ -547,6 +514,31 @@ impl Into<Box<Content>> for NonterminalString {
}
}
#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct Lifetime(pub Atom);
impl Lifetime {
pub fn anonymous() -> Self {
Lifetime(Atom::from("'_"))
}
pub fn is_anonymous(&self) -> bool {
*self == Self::anonymous()
}
pub fn statik() -> Self {
Lifetime(Atom::from("'static"))
}
pub fn input() -> Self {
Lifetime(Atom::from(INPUT_LIFETIME))
}
pub fn len(&self) -> usize {
self.0.len()
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum RepeatOp {
Star,
@ -712,7 +704,7 @@ impl<T: Display> Display for WhereClause<T> {
ref ty,
ref bounds,
} => {
if let Some(ref forall) = *forall {
if !forall.is_empty() {
write!(fmt, "for<")?;
for (i, l) in forall.iter().enumerate() {
if i != 0 {
@ -746,7 +738,7 @@ impl<T: Display> Display for TypeBound<T> {
ref parameters,
ref ret,
} => {
if let Some(ref forall) = *forall {
if !forall.is_empty() {
write!(fmt, "for<")?;
for (i, l) in forall.iter().enumerate() {
if i != 0 {
@ -777,7 +769,7 @@ impl<T: Display> Display for TypeBound<T> {
ref path,
ref parameters,
} => {
if let Some(ref forall) = *forall {
if !forall.is_empty() {
write!(fmt, "for<")?;
for (i, l) in forall.iter().enumerate() {
if i != 0 {
@ -832,6 +824,18 @@ impl Debug for TerminalString {
}
}
impl Display for Lifetime {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
Display::fmt(&self.0, fmt)
}
}
impl Debug for Lifetime {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
Display::fmt(self, fmt)
}
}
impl Display for TerminalLiteral {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
match *self {

View File

@ -5,6 +5,7 @@
*/
use collections::{map, Map};
use grammar::free_variables::FreeVariables;
use grammar::pattern::Pattern;
use message::Content;
use std::fmt::{Debug, Display, Error, Formatter};
@ -13,8 +14,8 @@ use util::Sep;
// These concepts we re-use wholesale
pub use grammar::parse_tree::{
Annotation, InternToken, NonterminalString, Path, Span, TerminalLiteral, TerminalString,
TypeParameter, Visibility, WhereClause,
Annotation, InternToken, Lifetime, NonterminalString, Path, Span, TerminalLiteral,
TerminalString, TypeBound, TypeParameter, Visibility,
};
#[derive(Clone, Debug)]
@ -45,7 +46,7 @@ pub struct Grammar {
pub parameters: Vec<Parameter>,
// where clauses declared on the grammar, like `grammar<T> where T: Sized`
pub where_clauses: Vec<WhereClause<TypeRepr>>,
pub where_clauses: Vec<WhereClause>,
// optional tokenizer DFA; this is only needed if the user did not supply
// an extern token declaration
@ -61,6 +62,21 @@ pub struct Grammar {
pub module_attributes: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum WhereClause {
// forall<'a> WC
Forall {
binder: Vec<TypeParameter>,
clause: Box<WhereClause>,
},
// `T: Foo`
Bound {
subject: TypeRepr,
bound: TypeBound<TypeRepr>,
},
}
/// For each terminal, we map it to a small integer from 0 to N.
/// This struct contains the mappings to go back and forth.
#[derive(Clone, Debug)]
@ -173,15 +189,25 @@ pub enum TypeRepr {
type_parameter: Atom,
id: Atom,
},
Lifetime(Atom),
Lifetime(Lifetime),
Ref {
lifetime: Option<Atom>,
lifetime: Option<Lifetime>,
mutable: bool,
referent: Box<TypeRepr>,
},
}
impl TypeRepr {
pub fn from_parameter(tp: &TypeParameter) -> Self {
match tp {
TypeParameter::Lifetime(l) => TypeRepr::Lifetime(l.clone()),
TypeParameter::Id(name) => TypeRepr::Nominal(NominalTypeRepr {
path: Path::from_id(name.clone()),
types: vec![],
}),
}
}
pub fn is_unit(&self) -> bool {
match *self {
TypeRepr::Tuple(ref v) => v.is_empty(),
@ -203,37 +229,97 @@ impl TypeRepr {
})
}
/// Returns the type parameters (or potential type parameters)
/// referenced by this type. e.g., for the type `&'x X`, would
/// return `[TypeParameter::Lifetime('x), TypeParameter::Id(X)]`.
/// This is later used to prune the type parameters list so that
/// only those that are actually used are included.
pub fn referenced(&self) -> Vec<TypeParameter> {
match *self {
TypeRepr::Tuple(ref tys) => tys.iter().flat_map(|t| t.referenced()).collect(),
TypeRepr::Nominal(ref data) => data
.types
.iter()
.flat_map(|t| t.referenced())
.chain(match data.path.as_id() {
Some(id) => vec![TypeParameter::Id(id)],
None => vec![],
pub fn bottom_up(&self, op: &mut impl FnMut(TypeRepr) -> TypeRepr) -> Self {
let result = match self {
TypeRepr::Tuple(types) => {
TypeRepr::Tuple(types.iter().map(|t| t.bottom_up(op)).collect())
}
TypeRepr::Nominal(NominalTypeRepr { path, types }) => {
TypeRepr::Nominal(NominalTypeRepr {
path: path.clone(),
types: types.iter().map(|t| t.bottom_up(op)).collect(),
})
.collect(),
TypeRepr::Associated {
ref type_parameter, ..
} => vec![TypeParameter::Id(type_parameter.clone())],
TypeRepr::Lifetime(ref l) => vec![TypeParameter::Lifetime(l.clone())],
}
TypeRepr::Associated { type_parameter, id } => TypeRepr::Associated {
type_parameter: type_parameter.clone(),
id: id.clone(),
},
TypeRepr::Lifetime(l) => TypeRepr::Lifetime(l.clone()),
TypeRepr::Ref {
ref lifetime,
mutable: _,
ref referent,
} => lifetime
.iter()
.map(|id| TypeParameter::Lifetime(id.clone()))
.chain(referent.referenced())
.collect(),
}
lifetime,
mutable,
referent,
} => TypeRepr::Ref {
lifetime: lifetime.clone(),
mutable: *mutable,
referent: Box::new(referent.bottom_up(op)),
},
};
op(result)
}
/// Finds anonymous lifetimes (e.g., `&u32` or `Foo<'_>`) and
/// instantiates them with a name like `__1`. Also computes
/// obvious outlives relationships that are needed (e.g., `&'a T`
/// requires `T: 'a`). The parameters `type_parameters` and
/// `where_clauses` should contain -- on entry -- the
/// type-parameters and where-clauses that currently exist on the
/// grammar. On exit, they will have been modified to include the
/// new type parameters and any implied where clauses.
pub fn name_anonymous_lifetimes_and_compute_implied_outlives(
&self,
prefix: &str,
type_parameters: &mut Vec<TypeParameter>,
where_clauses: &mut Vec<WhereClause>,
) -> Self {
let fresh_lifetime_name = |type_parameters: &mut Vec<TypeParameter>| {
// Make a name like `__1`:
let len = type_parameters.len();
let name = Lifetime(Atom::from(format!("'{}{}", prefix, len)));
type_parameters.push(TypeParameter::Lifetime(name.clone()));
name
};
self.bottom_up(&mut |t| match t {
TypeRepr::Tuple { .. } | TypeRepr::Nominal { .. } | TypeRepr::Associated { .. } => t,
TypeRepr::Lifetime(l) => {
if l.is_anonymous() {
TypeRepr::Lifetime(fresh_lifetime_name(type_parameters))
} else {
TypeRepr::Lifetime(l)
}
}
TypeRepr::Ref {
mut lifetime,
mutable,
referent,
} => {
if lifetime.is_none() {
lifetime = Some(fresh_lifetime_name(type_parameters));
}
// If we have `&'a T`, then we have to compute each
// free variable `X` in `T` and ensure that `X: 'a`:
let l = lifetime.clone().unwrap();
for tp in referent.free_variables(type_parameters) {
let wc = WhereClause::Bound {
subject: TypeRepr::from_parameter(&tp),
bound: TypeBound::Lifetime(l.clone()),
};
if !where_clauses.contains(&wc) {
where_clauses.push(wc);
}
}
TypeRepr::Ref {
lifetime,
mutable,
referent,
}
}
})
}
}
@ -327,7 +413,7 @@ impl Types {
pub fn error_type(&self) -> TypeRepr {
self.error_type.clone().unwrap_or_else(|| TypeRepr::Ref {
lifetime: Some(Atom::from("'static")),
lifetime: Some(Lifetime::statik()),
mutable: false,
referent: Box::new(TypeRepr::str()),
})
@ -377,6 +463,18 @@ impl Types {
}
}
impl Display for WhereClause {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
match self {
WhereClause::Forall { binder, clause } => {
write!(fmt, "for<{}> {}", Sep(", ", binder), clause)
}
WhereClause::Bound { subject, bound } => write!(fmt, "{}: {}", subject, bound),
}
}
}
impl Display for Parameter {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt, "{}: {}", self.name, self.ty)

View File

@ -2,11 +2,10 @@
//!
//! [recursive ascent]: https://en.wikipedia.org/wiki/Recursive_ascent_parser
use collections::{Multimap, Set};
use grammar::parse_tree::WhereClause;
use collections::Multimap;
use grammar::repr::{
Grammar, NonterminalString, Production, Symbol, TerminalString, TypeParameter, TypeRepr,
Visibility,
Visibility, WhereClause,
};
use lr1::core::*;
use lr1::lookahead::Token;
@ -49,7 +48,7 @@ struct RecursiveAscent<'ascent, 'grammar> {
/// type parameters for the `Nonterminal` type
nonterminal_type_params: Vec<TypeParameter>,
nonterminal_where_clauses: Vec<WhereClause<TypeRepr>>,
nonterminal_where_clauses: Vec<WhereClause>,
}
/// Tracks the suffix of the stack (that is, top-most elements) that any
@ -125,44 +124,11 @@ impl<'ascent, 'grammar, W: Write>
action_module: &str,
out: &'ascent mut RustWrite<W>,
) -> Self {
// The nonterminal type needs to be parameterized by all the
// type parameters that actually appear in the types of
// nonterminals. We can't just use *all* type parameters
// because that would leave unused lifetime/type parameters in
// some cases.
let referenced_ty_params: Set<TypeParameter> = grammar
.types
.nonterminal_types()
.into_iter()
.flat_map(|t| t.referenced())
.collect();
let nonterminal_type_params: Vec<_> = grammar
.type_parameters
.iter()
.filter(|t| referenced_ty_params.contains(t))
.cloned()
.collect();
let mut referenced_where_clauses = Set::new();
for wc in &grammar.where_clauses {
wc.map(|ty| {
if ty
.referenced()
.iter()
.any(|p| nonterminal_type_params.contains(p))
{
referenced_where_clauses.insert(wc.clone());
}
});
}
let nonterminal_where_clauses: Vec<_> = grammar
.where_clauses
.iter()
.filter(|wc| referenced_where_clauses.contains(wc))
.cloned()
.collect();
let (nonterminal_type_params, nonterminal_where_clauses) =
Self::filter_type_parameters_and_where_clauses(
grammar,
grammar.types.nonterminal_types(),
);
let state_inputs = states
.iter()
@ -528,37 +494,27 @@ impl<'ascent, 'grammar, W: Write>
let triple_type = self.triple_type();
let parse_error_type = self.types.parse_error_type();
let error_type = self.types.error_type();
// If we are generated the tokenizer, it generates ParseError
// errors, otherwise they are user errors.
let iter_error_type = if self.grammar.intern_token.is_some() {
parse_error_type
} else {
&error_type
};
let (fn_args, starts_with_terminal) = self.fn_args(optional_prefix, fixed_prefix);
try!(self.out.write_fn_header(
self.grammar,
&Visibility::Priv,
format!("{}{}{}", self.prefix, fn_kind, fn_index),
vec![format!(
"{}TOKENS: Iterator<Item=Result<{},{}>>",
self.prefix, triple_type, iter_error_type
),],
None,
fn_args,
format!(
"Result<(Option<{}>, {}Nonterminal<{}>), {}>",
triple_type,
self.prefix,
Sep(", ", &self.custom.nonterminal_type_params),
parse_error_type
),
vec![]
));
try!(
self.out
.fn_header(
&Visibility::Priv,
format!("{}{}{}", self.prefix, fn_kind, fn_index),
).with_grammar(self.grammar)
.with_type_parameters(Some(format!(
"{}TOKENS: Iterator<Item=Result<{},{}>>",
self.prefix, triple_type, parse_error_type
))).with_parameters(fn_args)
.with_return_type(format!(
"Result<(Option<{}>, {}Nonterminal<{}>), {}>",
triple_type,
self.prefix,
Sep(", ", &self.custom.nonterminal_type_params),
parse_error_type
)).emit()
);
rust!(self.out, "{{");
@ -1009,11 +965,7 @@ impl<'ascent, 'grammar, W: Write>
rust!(self.out, "Some(Err(e)) => return Err(e),");
} else {
// otherwise, they are user errors
rust!(
self.out,
"Some(Err(e)) => return Err({}lalrpop_util::ParseError::User {{ error: e }}),",
self.prefix
);
rust!(self.out, "Some(Err(e)) => return Err(e),");
}
rust!(self.out, "}};");
Ok(())

View File

@ -1,5 +1,7 @@
//! Base helper routines for a code generator.
use collections::Set;
use grammar::free_variables::FreeVariables;
use grammar::repr::*;
use lr1::core::*;
use rust::RustWrite;
@ -66,6 +68,89 @@ impl<'codegen, 'grammar, W: Write, C> CodeGenerator<'codegen, 'grammar, W, C> {
}
}
/// We often create meta types that pull together a bunch of
/// user-given types -- basically describing (e.g.) the full set
/// of return values from any nonterminal (and, in some cases,
/// terminals). These types need to carry generic parameters from
/// the grammar, since the nonterminals may include generic
/// parameters -- but we don't want them to carry *all* the
/// generic parameters, since that can be unnecessarily
/// restrictive.
///
/// In particular, consider something like this:
///
/// ```notrust
/// grammar<'a>(buffer: &'a mut Vec<u32>);
/// ```
///
/// Here, we likely do not want the `'a` in the type of `buffer` to appear
/// in the nonterminal result. That's because, if it did, then the
/// action functions will have a signature like:
///
/// ```ignore
/// fn foo<'a, T>(x: &'a mut Vec<T>) -> Result<'a> { ... }
/// ```
///
/// In that case, we would only be able to call one action fn and
/// will in fact get borrowck errors, because Rust would think we
/// were potentially returning this `&'a mut Vec<T>`.
///
/// Therefore, we take the full list of type parameters and we
/// filter them down to those that appear in the types that we
/// need to include (those that appear in the `tys` parameter).
///
/// In some cases, we need to include a few more than just that
/// obviously appear textually: for example, if we have `T::Foo`,
/// and we see a where-clause `T: Bar<'a>`, then we need to
/// include both `T` and `'a`, since that bound may be important
/// for resolving `T::Foo` (in other words, `T::Foo` may expand to
/// `<T as Bar<'a>>::Foo`).
pub fn filter_type_parameters_and_where_clauses(
grammar: &Grammar,
tys: impl IntoIterator<Item = TypeRepr>,
) -> (Vec<TypeParameter>, Vec<WhereClause>) {
let referenced_ty_params: Set<_> = tys
.into_iter()
.flat_map(|t| t.free_variables(&grammar.type_parameters))
.collect();
let filtered_type_params: Vec<_> = grammar
.type_parameters
.iter()
.filter(|t| referenced_ty_params.contains(t))
.cloned()
.collect();
// If `T` is referenced in the types we need to keep, then
// include any bounds like `T: Foo`. This may be needed for
// the well-formedness conditions on `T` (e.g., maybe we have
// `T: Hash` and a `HashSet<T>` or something) but it may also
// be needed because of `T::Foo`-like types.
//
// Do not however include a bound like `T: 'a` unless both `T`
// **and** `'a` are referenced -- same with bounds like `T:
// Foo<U>`. If those were needed, then `'a` or `U` would also
// have to appear in the types.
debug!("filtered_type_params = {:?}", filtered_type_params);
let filtered_where_clauses: Vec<_> = grammar
.where_clauses
.iter()
.filter(|wc| {
debug!(
"wc = {:?} free_variables = {:?}",
wc,
wc.free_variables(&grammar.type_parameters)
);
wc.free_variables(&grammar.type_parameters)
.iter()
.all(|p| referenced_ty_params.contains(p))
}).cloned()
.collect();
debug!("filtered_where_clauses = {:?}", filtered_where_clauses);
(filtered_type_params, filtered_where_clauses)
}
pub fn write_parse_mod<F>(&mut self, body: F) -> io::Result<()>
where
F: FnOnce(&mut Self) -> io::Result<()>,
@ -117,7 +202,6 @@ impl<'codegen, 'grammar, W: Write, C> CodeGenerator<'codegen, 'grammar, W, C> {
}
pub fn start_parser_fn(&mut self) -> io::Result<()> {
let error_type = self.types.error_type();
let parse_error_type = self.types.parse_error_type();
let (type_parameters, parameters, mut where_clauses);
@ -138,8 +222,8 @@ impl<'codegen, 'grammar, W: Write, C> CodeGenerator<'codegen, 'grammar, W, C> {
}
type_parameters = vec![
format!(
"{}TOKEN: {}ToTriple<{}Error={}>",
self.prefix, self.prefix, user_type_parameters, error_type
"{}TOKEN: {}ToTriple<{}>",
self.prefix, self.prefix, user_type_parameters,
),
format!(
"{}TOKENS: IntoIterator<Item={}TOKEN>",
@ -197,20 +281,22 @@ impl<'codegen, 'grammar, W: Write, C> CodeGenerator<'codegen, 'grammar, W, C> {
rust!(self.out, "");
rust!(self.out, "#[allow(dead_code)]");
try!(self.out.write_fn_header(
self.grammar,
&self.grammar.nonterminals[&self.start_symbol].visibility,
"parse".to_owned(),
type_parameters,
Some("&self".to_owned()),
parameters,
format!(
"Result<{}, {}>",
self.types.nonterminal_type(&self.start_symbol),
parse_error_type
),
where_clauses
));
try!(
self.out
.fn_header(
&self.grammar.nonterminals[&self.start_symbol].visibility,
"parse".to_owned(),
).with_parameters(Some("&self".to_owned()))
.with_grammar(self.grammar)
.with_type_parameters(type_parameters)
.with_parameters(parameters)
.with_return_type(format!(
"Result<{}, {}>",
self.types.nonterminal_type(&self.start_symbol),
parse_error_type
)).with_where_clauses(where_clauses)
.emit()
);
rust!(self.out, "{{");
Ok(())
@ -260,10 +346,17 @@ impl<'codegen, 'grammar, W: Write, C> CodeGenerator<'codegen, 'grammar, W, C> {
/// all type parameters are constrained, even if they are not
/// used.
pub fn phantom_data_type(&self) -> String {
format!(
"::std::marker::PhantomData<({})>",
Sep(", ", &self.grammar.non_lifetime_type_parameters())
)
let phantom_bits: Vec<_> = self
.grammar
.type_parameters
.iter()
.map(|tp| match *tp {
TypeParameter::Lifetime(ref l) => format!("&{} ()", l),
TypeParameter::Id(ref id) => id.to_string(),
})
.collect();
format!("::std::marker::PhantomData<({})>", Sep(", ", &phantom_bits),)
}
/// Returns expression that captures the user-declared type
@ -271,9 +364,19 @@ impl<'codegen, 'grammar, W: Write, C> CodeGenerator<'codegen, 'grammar, W, C> {
/// all type parameters are constrained, even if they are not
/// used.
pub fn phantom_data_expr(&self) -> String {
let phantom_bits: Vec<_> = self
.grammar
.type_parameters
.iter()
.map(|tp| match *tp {
TypeParameter::Lifetime(_) => format!("&()"),
TypeParameter::Id(ref id) => id.to_string(),
})
.collect();
format!(
"::std::marker::PhantomData::<({})>",
Sep(", ", &self.grammar.non_lifetime_type_parameters())
Sep(", ", &phantom_bits),
)
}
}

File diff suppressed because it is too large Load Diff

View File

@ -2,9 +2,11 @@
//!
use collections::{map, Map};
use grammar::consts::*;
use grammar::consts::CFG;
use grammar::parse_tree as pt;
use grammar::parse_tree::{read_algorithm, InternToken, NonterminalString, Path, TerminalString};
use grammar::parse_tree::{
read_algorithm, InternToken, Lifetime, NonterminalString, Path, TerminalString,
};
use grammar::pattern::{Pattern, PatternKind};
use grammar::repr as r;
use normalize::norm_util::{self, Symbols};
@ -68,7 +70,7 @@ impl<'s> LowerState<'s> {
token_span = Some(grammar.span);
let span = grammar.span;
let input_str = r::TypeRepr::Ref {
lifetime: Some(Atom::from(INPUT_LIFETIME)),
lifetime: Some(Lifetime::input()),
mutable: false,
referent: Box::new(r::TypeRepr::Nominal(r::NominalTypeRepr {
path: r::Path::str(),
@ -157,7 +159,7 @@ impl<'s> LowerState<'s> {
let where_clauses = grammar
.where_clauses
.iter()
.map(|wc| wc.map(pt::TypeRef::type_repr))
.flat_map(|wc| self.lower_where_clause(wc))
.collect();
let mut algorithm = r::Algorithm::default();
@ -256,6 +258,39 @@ impl<'s> LowerState<'s> {
.collect()
}
/// When we lower where clauses into `repr::WhereClause`, they get
/// flattened; so we may go from `T: Foo + Bar` into `[T: Foo, T:
/// Bar]`. We also convert to `TypeRepr` and so forth.
fn lower_where_clause(&mut self, wc: &pt::WhereClause<pt::TypeRef>) -> Vec<r::WhereClause> {
match wc {
pt::WhereClause::Lifetime { lifetime, bounds } => bounds
.iter()
.map(|bound| r::WhereClause::Bound {
subject: r::TypeRepr::Lifetime(lifetime.clone()),
bound: pt::TypeBound::Lifetime(bound.clone()),
})
.collect(),
pt::WhereClause::Type { forall, ty, bounds } => bounds
.iter()
.map(|bound| r::WhereClause::Bound {
subject: ty.type_repr(),
bound: bound.map(pt::TypeRef::type_repr),
})
.map(|bound| {
if forall.is_empty() {
bound
} else {
r::WhereClause::Forall {
binder: forall.clone(),
clause: Box::new(bound),
}
}
})
.collect(),
}
}
fn action_kind(
&mut self,
nt_type: r::TypeRepr,

View File

@ -384,10 +384,10 @@ fn construct(grammar: &mut Grammar, match_block: MatchBlock) -> NormResult<()> {
// we need to inject a `'input` lifetime and `input: &'input str` parameter as well:
let input_lifetime = Atom::from(INPUT_LIFETIME);
let input_lifetime = Lifetime::input();
for parameter in &grammar.type_parameters {
match *parameter {
TypeParameter::Lifetime(ref i) if *i == input_lifetime => {
match parameter {
TypeParameter::Lifetime(i) if *i == input_lifetime => {
return_err!(
grammar.span,
"since there is no external token enum specified, \

View File

@ -1,10 +1,10 @@
use super::norm_util::{self, AlternativeAction, Symbols};
use super::{NormError, NormResult};
use grammar::consts::{ERROR, INPUT_LIFETIME, LOCATION};
use grammar::consts::{ERROR, LOCATION};
use grammar::parse_tree::{
ActionKind, Alternative, Grammar, NonterminalData, NonterminalString, Path, Span, SymbolKind,
TypeParameter, TypeRef,
ActionKind, Alternative, Grammar, Lifetime, NonterminalData, NonterminalString, Path, Span,
SymbolKind, TypeParameter, TypeRef,
};
use grammar::repr::{NominalTypeRepr, TypeRepr, Types};
use std::collections::{HashMap, HashSet};
@ -80,7 +80,7 @@ impl<'grammar> TypeInferencer<'grammar> {
TypeRepr::usize();
let input_str = // &'input str
TypeRepr::Ref {
lifetime: Some(Atom::from(INPUT_LIFETIME)),
lifetime: Some(Lifetime::input()),
mutable: false,
referent: Box::new(TypeRepr::str())
};
@ -90,7 +90,7 @@ impl<'grammar> TypeInferencer<'grammar> {
absolute: false,
ids: vec![Atom::from("Token")],
},
types: vec![TypeRepr::Lifetime(Atom::from(INPUT_LIFETIME))]
types: vec![TypeRepr::Lifetime(Lifetime::input())]
});
let mut types = Types::new(&grammar.prefix, Some(loc_type), error_type, enum_type);

View File

@ -52,12 +52,14 @@ GrammarWhereClauses: Vec<WhereClause<TypeRef>> =
GrammarWhereClause: WhereClause<TypeRef> = {
<l:Lifetime> ":" <bounds:Plus<Lifetime>> =>
WhereClause::Lifetime { lifetime: l, bounds: bounds },
<f:ForAll?> <ty:TypeRef> ":" <bounds:TypeBounds> =>
<f:ForAll> <ty:TypeRef> ":" <bounds:TypeBounds> =>
WhereClause::Type { forall: f, ty: ty, bounds: bounds }
};
ForAll: Vec<Atom> =
"for" "<" <Comma<Lifetime>> ">";
ForAll: Vec<TypeParameter> = {
"for" "<" <Comma<TypeParameter>> ">",
() => vec![],
};
TypeBounds: Vec<TypeBound<TypeRef>> =
<Plus<TypeBound>>;
@ -65,9 +67,9 @@ TypeBounds: Vec<TypeBound<TypeRef>> =
TypeBound: TypeBound<TypeRef> = {
<l:Lifetime> =>
TypeBound::Lifetime(l),
<f:ForAll?> <p:Path> "(" <params:Comma<TypeRef>> ")" <ret:("->" <TypeRef>)?> =>
<f:ForAll> <p:Path> "(" <params:Comma<TypeRef>> ")" <ret:("->" <TypeRef>)?> =>
TypeBound::Fn { forall: f, path: p, parameters: params, ret: ret },
<f:ForAll?> <p:Path> <params:("<" <Comma<TypeBoundParameter>> ">")?> =>
<f:ForAll> <p:Path> <params:("<" <Comma<TypeBoundParameter>> ">")?> =>
TypeBound::Trait { forall: f, path: p, parameters: params.unwrap_or(vec![]) }
};
@ -390,8 +392,8 @@ Id: Atom = {
Escape: Atom =
<i:"Escape"> => Atom::from(i);
Lifetime: Atom =
<i:"Lifetime"> => Atom::from(i);
Lifetime: Lifetime =
<i:"Lifetime"> => Lifetime(Atom::from(i));
Terminal: TerminalString = {
QuotedTerminal,

File diff suppressed because it is too large Load Diff

View File

@ -2,8 +2,8 @@
//! which then gets serialized.
use grammar::parse_tree::Visibility;
use grammar::repr::Grammar;
use std::fmt;
use grammar::repr::{self, Grammar};
use std::fmt::{self, Display};
use std::io::{self, Write};
use tls::Tls;
@ -40,7 +40,7 @@ pub struct RustWrite<W: Write> {
const TAB: usize = 4;
impl<W: Write> RustWrite<W> {
impl<'me, W: Write> RustWrite<W> {
pub fn new(w: W) -> RustWrite<W> {
RustWrite {
write: w,
@ -114,55 +114,10 @@ impl<W: Write> RustWrite<W> {
Ok(())
}
pub fn write_fn_header(
&mut self,
grammar: &Grammar,
visibility: &Visibility,
name: String,
type_parameters: Vec<String>,
first_parameter: Option<String>,
parameters: Vec<String>,
return_type: String,
where_clauses: Vec<String>,
) -> io::Result<()> {
rust!(self, "{}fn {}<", visibility, name);
for type_parameter in &grammar.type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
for type_parameter in type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
rust!(self, ">(");
if let Some(param) = first_parameter {
rust!(self, "{},", param);
}
for parameter in &grammar.parameters {
rust!(self, "{}: {},", parameter.name, parameter.ty);
}
for parameter in &parameters {
rust!(self, "{},", parameter);
}
if !grammar.where_clauses.is_empty() || !where_clauses.is_empty() {
rust!(self, ") -> {} where", return_type);
for where_clause in &grammar.where_clauses {
rust!(self, " {},", where_clause);
}
for where_clause in &where_clauses {
rust!(self, " {},", where_clause);
}
} else {
rust!(self, ") -> {}", return_type);
}
Ok(())
/// Create and return fn-header builder. Don't forget to invoke
/// `emit` at the end. =)
pub fn fn_header(&'me mut self, visibility: &'me Visibility, name: String) -> FnHeader<'me, W> {
FnHeader::new(self, visibility, name)
}
pub fn write_module_attributes(&mut self, grammar: &Grammar) -> io::Result<()> {
@ -189,8 +144,129 @@ impl<W: Write> RustWrite<W> {
// Stuff that we plan to use.
// Occasionally we happen to not use it after all, hence the allow.
rust!(self, "#[allow(unused_extern_crates)]");
rust!(self, "extern crate lalrpop_util as {}lalrpop_util;", prefix);
rust!(
self,
"extern crate lalrpop_util as {p}lalrpop_util;",
p = prefix,
);
rust!(self, "#[allow(unused_imports)]");
rust!(
self,
"use self::{p}lalrpop_util::state_machine as {p}state_machine;",
p = prefix,
);
Ok(())
}
}
pub struct FnHeader<'me, W: Write + 'me> {
write: &'me mut RustWrite<W>,
visibility: &'me Visibility,
name: String,
type_parameters: Vec<String>,
parameters: Vec<String>,
return_type: String,
where_clauses: Vec<String>,
}
impl<'me, W: Write> FnHeader<'me, W> {
pub fn new(write: &'me mut RustWrite<W>, visibility: &'me Visibility, name: String) -> Self {
FnHeader {
write,
visibility,
name,
type_parameters: vec![],
parameters: vec![],
return_type: format!("()"),
where_clauses: vec![],
}
}
/// Adds the type-parameters, where-clauses, and parameters from
/// the grammar.
pub fn with_grammar(self, grammar: &Grammar) -> Self {
self.with_type_parameters(&grammar.type_parameters)
.with_where_clauses(&grammar.where_clauses)
.with_parameters(&grammar.parameters)
}
/// Declare a series of type parameters. Note that lifetime
/// parameters must come first.
pub fn with_type_parameters(mut self, tps: impl IntoIterator<Item = impl Display>) -> Self {
self.type_parameters
.extend(tps.into_iter().map(|t| t.to_string()));
self
}
/// Add where clauses to the list.
pub fn with_where_clauses(mut self, tps: impl IntoIterator<Item = impl Display>) -> Self {
self.where_clauses
.extend(tps.into_iter().map(|t| t.to_string()));
self
}
/// Declare a series of parameters. You can supply strings of the
/// form `"foo: Bar"` or else `repr::Parameter` references.
pub fn with_parameters(
mut self,
parameters: impl IntoIterator<Item = impl ParameterDisplay>,
) -> Self {
self.parameters.extend(
parameters
.into_iter()
.map(ParameterDisplay::to_parameter_string),
);
self
}
/// Add where clauses to the list.
pub fn with_return_type(mut self, rt: impl Display) -> Self {
self.return_type = format!("{}", rt);
self
}
/// Emit fn header -- everything up to the opening `{` for the
/// body.
pub fn emit(self) -> io::Result<()> {
rust!(self.write, "{}fn {}<", self.visibility, self.name);
for type_parameter in &self.type_parameters {
rust!(self.write, "{0:1$}{2},", "", TAB, type_parameter);
}
rust!(self.write, ">(");
for parameter in &self.parameters {
rust!(self.write, "{},", parameter);
}
rust!(self.write, ") -> {}", self.return_type);
if !self.where_clauses.is_empty() {
rust!(self.write, "where");
for where_clause in &self.where_clauses {
rust!(self.write, " {},", where_clause);
}
}
Ok(())
}
}
pub trait ParameterDisplay {
fn to_parameter_string(self) -> String;
}
impl ParameterDisplay for String {
fn to_parameter_string(self) -> String {
self
}
}
impl<'me> ParameterDisplay for &'me repr::Parameter {
fn to_parameter_string(self) -> String {
format!("{}: {}", self.name, self.ty)
}
}