diff --git a/.github/workflows/quality-check.yml b/.github/workflows/quality-check.yml index bd93996..01adb34 100644 --- a/.github/workflows/quality-check.yml +++ b/.github/workflows/quality-check.yml @@ -12,6 +12,11 @@ jobs: - name: Checkout code uses: actions/checkout@v4 + - name: Install minimal nightly (for fmt) + uses: dtolnay/rust-toolchain@nightly + with: + components: rustfmt + - name: Set up Rust uses: dtolnay/rust-toolchain@1.90.0 with: @@ -21,7 +26,7 @@ jobs: run: cargo clippy --all-targets --all-features -- -D warnings - name: Format check - run: cargo fmt --all -- --check + run: cargo +nightly fmt --all -- --check test: needs: @@ -55,17 +60,13 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Build script - uses: docker://ghcr.io/amber-lang/amber:alpine-0.4.0-alpha - with: - args: | - build ./run_coverage.ab ./run_coverage.sh - - name: Test coverage - run: ./run_coverage.sh xml + run: cargo tarpaulin --verbose --all-features --workspace --timeout 120 --out xml - name: Upload to codecov.io - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v5 with: token: ${{secrets.CODECOV_TOKEN}} fail_ci_if_error: true + files: ./coverage_output/cobertura.xml + verbose: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1251cc1..f8141f4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: v6.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -11,7 +11,7 @@ repos: hooks: - id: find-fixme-phrase name: Find FIXME phrase - entry: bash -c 'git diff --cached --name-only --diff-filter=ACM | grep -E "\.(txt|js|ts|py|rs|go|java|c|cpp|sh|ab)$" | xargs -r grep -l "FIXME" && exit 1 || exit 0' + entry: bash -c 'git diff --cached --name-only --diff-filter=ACM | grep -E "\.(txt|js|ts|py|rs|go|java|c|cpp|sh|ab)$" | xargs -r grep -l "FIXME" | grep -E ".+" && exit 1 || exit 0' language: system files: '\.(txt|js|ts|py|rs|go|java|c|cpp|sh|ab)$' description: Checks for the "FIXME" phrase in staged text files. @@ -29,7 +29,7 @@ repos: hooks: - id: cargo-fmt name: cargo fmt --check - entry: cargo fmt -- --check + entry: cargo +nightly fmt --check language: system types: [rust] pass_filenames: false diff --git a/Tarpaulin.toml b/.tarpaulin.toml similarity index 100% rename from Tarpaulin.toml rename to .tarpaulin.toml diff --git a/Cargo.lock b/Cargo.lock index ea753a0..419dc2d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -41,6 +41,7 @@ dependencies = [ "include_dir", "indexmap", "insta", + "logos", "phf", "rangemap", "ropey", @@ -113,6 +114,12 @@ dependencies = [ "object", ] +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" + [[package]] name = "bitflags" version = "1.3.2" @@ -298,6 +305,12 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + [[package]] name = "fs_extra" version = "1.3.0" @@ -500,6 +513,39 @@ version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" +[[package]] +name = "logos" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7251356ef8cb7aec833ddf598c6cb24d17b689d20b993f9d11a3d764e34e6458" +dependencies = [ + "logos-derive", +] + +[[package]] +name = "logos-codegen" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59f80069600c0d66734f5ff52cc42f2dabd6b29d205f333d61fd7832e9e9963f" +dependencies = [ + "beef", + "fnv", + "lazy_static", + "proc-macro2", + "quote", + "regex-syntax 0.8.8", + "syn", +] + +[[package]] +name = "logos-derive" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24fb722b06a9dc12adb0963ed585f19fc61dc5413e6a9be9422ef92c091e731d" +dependencies = [ + "logos-codegen", +] + [[package]] name = "lsp-types" version = "0.97.0" @@ -730,7 +776,7 @@ checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.7.5", ] [[package]] @@ -739,6 +785,12 @@ version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" +[[package]] +name = "regex-syntax" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" + [[package]] name = "ropey" version = "1.6.1" diff --git a/Cargo.toml b/Cargo.toml index addb53b..2ec333b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,6 +16,7 @@ chumsky = { git = "https://github.com/KrosFire/chumsky", rev = "406ea80", featur "label", ] } heraclitus-compiler = "1.8.2" +logos = "0.14" serde_json = "1.0.128" rangemap = "1.5.1" indexmap = "2.6.0" @@ -47,7 +48,13 @@ ci = "github" # The installers to generate for each app installers = [] # Target platforms to build apps for (Rust target-triple syntax) -targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-unknown-linux-musl", "x86_64-pc-windows-msvc"] +targets = [ + "aarch64-apple-darwin", + "x86_64-apple-darwin", + "x86_64-unknown-linux-gnu", + "x86_64-unknown-linux-musl", + "x86_64-pc-windows-msvc", +] # Publish jobs to run in CI pr-run-mode = "plan" # Skip checking whether the specified configuration files are up to date diff --git a/codecov.yml b/codecov.yml index df8bb48..cb1a4e5 100644 --- a/codecov.yml +++ b/codecov.yml @@ -11,6 +11,10 @@ flags: paths: - src/grammar/alpha040/ - src/analysis/alpha040/ + 0.5.0-alpha: + paths: + - src/grammar/alpha050/ + - src/analysis/alpha050/ coverage: status: @@ -34,9 +38,12 @@ coverage: 0.4.0-alpha: flags: - 0.4.0-alpha + 0.5.0-alpha: + flags: + - 0.5.0-alpha patch: default: - target: 100% + target: 80% comment: layout: " diff, flags, files" diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 0000000..bb5db29 --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,2 @@ +imports_layout="Vertical" +imports_granularity="Module" diff --git a/src/analysis/alpha034/exp.rs b/src/analysis/alpha034/exp.rs index bdf4a09..e54ec38 100644 --- a/src/analysis/alpha034/exp.rs +++ b/src/analysis/alpha034/exp.rs @@ -2,20 +2,34 @@ use std::vec; use chumsky::span::SimpleSpan; -use crate::{ - analysis::{ - get_symbol_definition_info, insert_symbol_reference, - types::{make_union_type, matches_type, DataType, GenericsMap}, - Context, FunctionArgument, FunctionSymbol, SymbolInfo, SymbolLocation, SymbolType, - VariableSymbol, - }, - files::{FileVersion, Files}, - grammar::{ - alpha034::{Expression, InterpolatedCommand, InterpolatedText}, - Spanned, - }, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + DataType, + GenericsMap, }; +use crate::analysis::{ + get_symbol_definition_info, + insert_symbol_reference, + Context, + FunctionArgument, + FunctionSymbol, + SymbolInfo, + SymbolLocation, + SymbolType, + VariableSymbol, +}; +use crate::files::{ + FileVersion, + Files, +}; +use crate::grammar::alpha034::{ + Expression, + InterpolatedCommand, + InterpolatedText, +}; +use crate::grammar::Spanned; +use crate::paths::FileId; use super::stmnts::analyze_failure_handler; diff --git a/src/analysis/alpha034/global.rs b/src/analysis/alpha034/global.rs index da19d49..a8bc501 100644 --- a/src/analysis/alpha034/global.rs +++ b/src/analysis/alpha034/global.rs @@ -1,18 +1,33 @@ -use crate::{ - analysis::{ - self, import_symbol, insert_symbol_definition, map_import_path, - types::{make_union_type, matches_type, DataType}, - Context, FunctionContext, FunctionSymbol, ImportContext, SymbolInfo, SymbolType, - VariableSymbol, - }, - backend::Backend, - files::FileVersion, - grammar::{ - alpha034::{FunctionArgument, GlobalStatement, ImportContent}, - Span, Spanned, - }, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + DataType, }; +use crate::analysis::{ + self, + import_symbol, + insert_symbol_definition, + map_import_path, + Context, + FunctionContext, + FunctionSymbol, + ImportContext, + SymbolInfo, + SymbolType, + VariableSymbol, +}; +use crate::backend::Backend; +use crate::files::FileVersion; +use crate::grammar::alpha034::{ + FunctionArgument, + GlobalStatement, + ImportContent, +}; +use crate::grammar::{ + Span, + Spanned, +}; +use crate::paths::FileId; use super::stmnts::analyze_stmnt; diff --git a/src/analysis/alpha034/stmnts.rs b/src/analysis/alpha034/stmnts.rs index a51edf9..fc31391 100644 --- a/src/analysis/alpha034/stmnts.rs +++ b/src/analysis/alpha034/stmnts.rs @@ -1,13 +1,27 @@ -use crate::{ - analysis::{ - get_symbol_definition_info, insert_symbol_definition, insert_symbol_reference, - types::{make_union_type, matches_type, GenericsMap}, - BlockContext, Context, DataType, SymbolInfo, SymbolLocation, SymbolType, VariableSymbol, - }, - files::{FileVersion, Files}, - grammar::{alpha034::*, Spanned}, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + GenericsMap, }; +use crate::analysis::{ + get_symbol_definition_info, + insert_symbol_definition, + insert_symbol_reference, + BlockContext, + Context, + DataType, + SymbolInfo, + SymbolLocation, + SymbolType, + VariableSymbol, +}; +use crate::files::{ + FileVersion, + Files, +}; +use crate::grammar::alpha034::*; +use crate::grammar::Spanned; +use crate::paths::FileId; use super::exp::analyze_exp; diff --git a/src/analysis/alpha035/exp.rs b/src/analysis/alpha035/exp.rs index 20b9a3e..8a8b7ed 100644 --- a/src/analysis/alpha035/exp.rs +++ b/src/analysis/alpha035/exp.rs @@ -2,22 +2,43 @@ use std::vec; use chumsky::span::SimpleSpan; -use crate::{ - analysis::{ - get_symbol_definition_info, insert_symbol_reference, - types::{make_union_type, matches_type, DataType, GenericsMap}, - BlockContext, Context, FunctionArgument, FunctionSymbol, SymbolInfo, SymbolLocation, - SymbolType, VariableSymbol, - }, - files::{FileVersion, Files}, - grammar::{ - alpha035::{Expression, InterpolatedCommand, InterpolatedText}, - CommandModifier, Spanned, - }, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + DataType, + GenericsMap, }; +use crate::analysis::{ + get_symbol_definition_info, + insert_symbol_reference, + BlockContext, + Context, + FunctionArgument, + FunctionSymbol, + SymbolInfo, + SymbolLocation, + SymbolType, + VariableSymbol, +}; +use crate::files::{ + FileVersion, + Files, +}; +use crate::grammar::alpha035::{ + Expression, + InterpolatedCommand, + InterpolatedText, +}; +use crate::grammar::{ + CommandModifier, + Spanned, +}; +use crate::paths::FileId; -use super::stmnts::{analyze_failure_handler, StmntAnalysisResult}; +use super::stmnts::{ + analyze_failure_handler, + StmntAnalysisResult, +}; #[derive(Debug, Clone)] pub struct ExpAnalysisResult { diff --git a/src/analysis/alpha035/global.rs b/src/analysis/alpha035/global.rs index 6104e80..4c0fd15 100644 --- a/src/analysis/alpha035/global.rs +++ b/src/analysis/alpha035/global.rs @@ -1,22 +1,38 @@ -use crate::{ - analysis::{ - self, import_symbol, insert_symbol_definition, map_import_path, - types::{make_union_type, matches_type, DataType}, - Context, FunctionContext, FunctionSymbol, ImportContext, SymbolInfo, SymbolType, - VariableSymbol, - }, - backend::Backend, - files::FileVersion, - grammar::{ - alpha035::{FunctionArgument, GlobalStatement, ImportContent}, - Span, Spanned, - }, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + DataType, }; +use crate::analysis::{ + self, + import_symbol, + insert_symbol_definition, + map_import_path, + Context, + FunctionContext, + FunctionSymbol, + ImportContext, + SymbolInfo, + SymbolType, + VariableSymbol, +}; +use crate::backend::Backend; +use crate::files::FileVersion; +use crate::grammar::alpha035::{ + FunctionArgument, + GlobalStatement, + ImportContent, +}; +use crate::grammar::{ + Span, + Spanned, +}; +use crate::paths::FileId; -use super::{ - exp::analyze_exp, - stmnts::{analyze_stmnt, StmntAnalysisResult}, +use super::exp::analyze_exp; +use super::stmnts::{ + analyze_stmnt, + StmntAnalysisResult, }; #[tracing::instrument(skip_all)] diff --git a/src/analysis/alpha035/stmnts.rs b/src/analysis/alpha035/stmnts.rs index 7c401c6..b3ef1b8 100644 --- a/src/analysis/alpha035/stmnts.rs +++ b/src/analysis/alpha035/stmnts.rs @@ -1,21 +1,45 @@ -use crate::{ - analysis::{ - get_symbol_definition_info, insert_symbol_definition, insert_symbol_reference, - types::{make_union_type, matches_type, GenericsMap}, - BlockContext, Context, DataType, SymbolInfo, SymbolLocation, SymbolType, VariableSymbol, - }, - files::{FileVersion, Files}, - grammar::{ - alpha035::{ - Block, Comment, ElseCondition, FailureHandler, IfChainContent, IfCondition, - IterLoopVars, Statement, VariableInitType, - }, - CommandModifier, Spanned, - }, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + GenericsMap, +}; +use crate::analysis::{ + get_symbol_definition_info, + insert_symbol_definition, + insert_symbol_reference, + BlockContext, + Context, + DataType, + SymbolInfo, + SymbolLocation, + SymbolType, + VariableSymbol, +}; +use crate::files::{ + FileVersion, + Files, }; +use crate::grammar::alpha035::{ + Block, + Comment, + ElseCondition, + FailureHandler, + IfChainContent, + IfCondition, + IterLoopVars, + Statement, + VariableInitType, +}; +use crate::grammar::{ + CommandModifier, + Spanned, +}; +use crate::paths::FileId; -use super::exp::{analyze_exp, ExpAnalysisResult}; +use super::exp::{ + analyze_exp, + ExpAnalysisResult, +}; #[derive(Debug, Clone)] pub struct StmntAnalysisResult { diff --git a/src/analysis/alpha040/exp.rs b/src/analysis/alpha040/exp.rs index ed6d75e..2d2c50b 100644 --- a/src/analysis/alpha040/exp.rs +++ b/src/analysis/alpha040/exp.rs @@ -2,22 +2,43 @@ use std::vec; use chumsky::span::SimpleSpan; -use crate::{ - analysis::{ - get_symbol_definition_info, insert_symbol_reference, - types::{make_union_type, matches_type, DataType, GenericsMap}, - BlockContext, Context, FunctionArgument, FunctionSymbol, SymbolInfo, SymbolLocation, - SymbolType, VariableSymbol, - }, - files::{FileVersion, Files}, - grammar::{ - alpha040::{Expression, InterpolatedCommand, InterpolatedText}, - CommandModifier, Spanned, - }, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + DataType, + GenericsMap, }; +use crate::analysis::{ + get_symbol_definition_info, + insert_symbol_reference, + BlockContext, + Context, + FunctionArgument, + FunctionSymbol, + SymbolInfo, + SymbolLocation, + SymbolType, + VariableSymbol, +}; +use crate::files::{ + FileVersion, + Files, +}; +use crate::grammar::alpha040::{ + Expression, + InterpolatedCommand, + InterpolatedText, +}; +use crate::grammar::{ + CommandModifier, + Spanned, +}; +use crate::paths::FileId; -use super::stmnts::{analyze_failure_handler, StmntAnalysisResult}; +use super::stmnts::{ + analyze_failure_handler, + StmntAnalysisResult, +}; #[derive(Debug, Clone)] pub struct ExpAnalysisResult { diff --git a/src/analysis/alpha040/global.rs b/src/analysis/alpha040/global.rs index 45c4047..de92fd7 100644 --- a/src/analysis/alpha040/global.rs +++ b/src/analysis/alpha040/global.rs @@ -1,23 +1,39 @@ -use crate::{ - analysis::{ - self, import_symbol, insert_symbol_definition, map_import_path, - types::{make_union_type, matches_type, DataType}, - Context, FunctionContext, FunctionSymbol, ImportContext, SymbolInfo, SymbolType, - VariableSymbol, - }, - backend::Backend, - files::FileVersion, - grammar::{ - alpha040::{FunctionArgument, GlobalStatement, ImportContent}, - Span, Spanned, - }, - paths::FileId, - stdlib::is_builtin_file, +use crate::analysis::types::{ + make_union_type, + matches_type, + DataType, }; +use crate::analysis::{ + self, + import_symbol, + insert_symbol_definition, + map_import_path, + Context, + FunctionContext, + FunctionSymbol, + ImportContext, + SymbolInfo, + SymbolType, + VariableSymbol, +}; +use crate::backend::Backend; +use crate::files::FileVersion; +use crate::grammar::alpha040::{ + FunctionArgument, + GlobalStatement, + ImportContent, +}; +use crate::grammar::{ + Span, + Spanned, +}; +use crate::paths::FileId; +use crate::stdlib::is_builtin_file; -use super::{ - exp::analyze_exp, - stmnts::{analyze_stmnt, StmntAnalysisResult}, +use super::exp::analyze_exp; +use super::stmnts::{ + analyze_stmnt, + StmntAnalysisResult, }; #[tracing::instrument(skip_all)] diff --git a/src/analysis/alpha040/stmnts.rs b/src/analysis/alpha040/stmnts.rs index 629310e..ee51344 100644 --- a/src/analysis/alpha040/stmnts.rs +++ b/src/analysis/alpha040/stmnts.rs @@ -1,21 +1,45 @@ -use crate::{ - analysis::{ - get_symbol_definition_info, insert_symbol_definition, insert_symbol_reference, - types::{make_union_type, matches_type, GenericsMap}, - BlockContext, Context, DataType, SymbolInfo, SymbolLocation, SymbolType, VariableSymbol, - }, - files::{FileVersion, Files}, - grammar::{ - alpha040::{ - Block, Comment, ElseCondition, FailureHandler, IfChainContent, IfCondition, - IterLoopVars, Statement, VariableInitType, - }, - CommandModifier, Spanned, - }, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + GenericsMap, +}; +use crate::analysis::{ + get_symbol_definition_info, + insert_symbol_definition, + insert_symbol_reference, + BlockContext, + Context, + DataType, + SymbolInfo, + SymbolLocation, + SymbolType, + VariableSymbol, +}; +use crate::files::{ + FileVersion, + Files, }; +use crate::grammar::alpha040::{ + Block, + Comment, + ElseCondition, + FailureHandler, + IfChainContent, + IfCondition, + IterLoopVars, + Statement, + VariableInitType, +}; +use crate::grammar::{ + CommandModifier, + Spanned, +}; +use crate::paths::FileId; -use super::exp::{analyze_exp, ExpAnalysisResult}; +use super::exp::{ + analyze_exp, + ExpAnalysisResult, +}; #[derive(Debug, Clone)] pub struct StmntAnalysisResult { diff --git a/src/analysis/alpha050/exp.rs b/src/analysis/alpha050/exp.rs index 9ac4261..78b8301 100644 --- a/src/analysis/alpha050/exp.rs +++ b/src/analysis/alpha050/exp.rs @@ -2,22 +2,44 @@ use std::vec; use chumsky::span::SimpleSpan; -use crate::{ - analysis::{ - get_symbol_definition_info, insert_symbol_reference, - types::{make_union_type, matches_type, DataType, GenericsMap}, - BlockContext, Context, FunctionArgument, FunctionSymbol, SymbolInfo, SymbolLocation, - SymbolType, VariableSymbol, - }, - files::{FileVersion, Files}, - grammar::{ - alpha050::{Expression, FailableHandler, InterpolatedCommand, InterpolatedText}, - CommandModifier, Spanned, - }, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + DataType, + GenericsMap, }; +use crate::analysis::{ + get_symbol_definition_info, + insert_symbol_reference, + BlockContext, + Context, + FunctionArgument, + FunctionSymbol, + SymbolInfo, + SymbolLocation, + SymbolType, + VariableSymbol, +}; +use crate::files::{ + FileVersion, + Files, +}; +use crate::grammar::alpha050::{ + Expression, + FailableHandler, + InterpolatedCommand, + InterpolatedText, +}; +use crate::grammar::{ + CommandModifier, + Spanned, +}; +use crate::paths::FileId; -use super::stmnts::{analyze_failable_handlers, StmntAnalysisResult}; +use super::stmnts::{ + analyze_failable_handlers, + StmntAnalysisResult, +}; #[derive(Debug, Clone)] pub struct ExpAnalysisResult { diff --git a/src/analysis/alpha050/global.rs b/src/analysis/alpha050/global.rs index ccf2224..f0023ad 100644 --- a/src/analysis/alpha050/global.rs +++ b/src/analysis/alpha050/global.rs @@ -1,23 +1,41 @@ -use crate::{ - analysis::{ - self, import_symbol, insert_symbol_definition, map_import_path, - types::{make_union_type, matches_type, DataType}, - Context, FunctionContext, FunctionSymbol, ImportContext, SymbolInfo, SymbolType, - VariableSymbol, - }, - backend::Backend, - files::FileVersion, - grammar::{ - alpha050::{Block, FunctionArgument, GlobalStatement, ImportContent, Statement}, - Span, Spanned, - }, - paths::FileId, - stdlib::is_builtin_file, +use crate::analysis::types::{ + make_union_type, + matches_type, + DataType, }; +use crate::analysis::{ + self, + import_symbol, + insert_symbol_definition, + map_import_path, + Context, + FunctionContext, + FunctionSymbol, + ImportContext, + SymbolInfo, + SymbolType, + VariableSymbol, +}; +use crate::backend::Backend; +use crate::files::FileVersion; +use crate::grammar::alpha050::{ + Block, + FunctionArgument, + GlobalStatement, + ImportContent, + Statement, +}; +use crate::grammar::{ + Span, + Spanned, +}; +use crate::paths::FileId; +use crate::stdlib::is_builtin_file; -use super::{ - exp::analyze_exp, - stmnts::{analyze_stmnt, StmntAnalysisResult}, +use super::exp::analyze_exp; +use super::stmnts::{ + analyze_stmnt, + StmntAnalysisResult, }; #[tracing::instrument(skip_all)] diff --git a/src/analysis/alpha050/stmnts.rs b/src/analysis/alpha050/stmnts.rs index 9238c01..25cdc47 100644 --- a/src/analysis/alpha050/stmnts.rs +++ b/src/analysis/alpha050/stmnts.rs @@ -1,21 +1,46 @@ -use crate::{ - analysis::{ - get_symbol_definition_info, insert_symbol_definition, insert_symbol_reference, - types::{make_union_type, matches_type, GenericsMap}, - BlockContext, Context, DataType, SymbolInfo, SymbolLocation, SymbolType, VariableSymbol, - }, - files::{FileVersion, Files}, - grammar::{ - alpha050::{ - Block, Comment, ElseCondition, FailableHandler, FailureHandler, IfChainContent, - IfCondition, IterLoopVars, Statement, VariableInitType, - }, - CommandModifier, Spanned, - }, - paths::FileId, +use crate::analysis::types::{ + make_union_type, + matches_type, + GenericsMap, +}; +use crate::analysis::{ + get_symbol_definition_info, + insert_symbol_definition, + insert_symbol_reference, + BlockContext, + Context, + DataType, + SymbolInfo, + SymbolLocation, + SymbolType, + VariableSymbol, +}; +use crate::files::{ + FileVersion, + Files, }; +use crate::grammar::alpha050::{ + Block, + Comment, + ElseCondition, + FailableHandler, + FailureHandler, + IfChainContent, + IfCondition, + IterLoopVars, + Statement, + VariableInitType, +}; +use crate::grammar::{ + CommandModifier, + Spanned, +}; +use crate::paths::FileId; -use super::exp::{analyze_exp, ExpAnalysisResult}; +use super::exp::{ + analyze_exp, + ExpAnalysisResult, +}; #[derive(Debug, Clone)] pub struct StmntAnalysisResult { diff --git a/src/analysis/mod.rs b/src/analysis/mod.rs index a2bd989..f04cb16 100644 --- a/src/analysis/mod.rs +++ b/src/analysis/mod.rs @@ -1,16 +1,30 @@ use rangemap::RangeInclusiveMap; -use std::{collections::HashMap, ops::RangeInclusive}; -use tower_lsp_server::{lsp_types::Uri, UriExt}; -use types::{DataType, GenericsMap}; - -use crate::{ - backend::{AmberVersion, Backend}, - files::{FileVersion, Files}, - grammar::{CommandModifier, CompilerFlag, Span, Spanned}, - paths::FileId, - stdlib::resolve, +use std::collections::HashMap; +use std::ops::RangeInclusive; +use tower_lsp_server::lsp_types::Uri; +use tower_lsp_server::UriExt; +use types::{ + DataType, + GenericsMap, }; +use crate::backend::{ + AmberVersion, + Backend, +}; +use crate::files::{ + FileVersion, + Files, +}; +use crate::grammar::{ + CommandModifier, + CompilerFlag, + Span, + Spanned, +}; +use crate::paths::FileId; +use crate::stdlib::resolve; + pub mod alpha034; pub mod alpha035; pub mod alpha040; diff --git a/src/analysis/types.rs b/src/analysis/types.rs index f9f2769..5696ddd 100644 --- a/src/analysis/types.rs +++ b/src/analysis/types.rs @@ -1,11 +1,16 @@ -use std::fmt::{self, Display}; +use std::collections::HashSet; +use std::fmt::{ + self, + Display, +}; +use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering::SeqCst; -use std::{collections::HashSet, sync::atomic::AtomicUsize}; -use crate::{ - files::FileVersion, - paths::FileId, - utils::{FastDashMap, FastDashSet}, +use crate::files::FileVersion; +use crate::paths::FileId; +use crate::utils::{ + FastDashMap, + FastDashSet, }; #[derive(PartialEq, Eq, Clone, Hash)] diff --git a/src/backend.rs b/src/backend.rs index f8dd812..0464092 100644 --- a/src/backend.rs +++ b/src/backend.rs @@ -5,20 +5,47 @@ use std::sync::Arc; use chumsky::container::Seq; use ropey::Rope; use tokio::sync::RwLock; -use tower_lsp_server::jsonrpc::{Error, Result}; +use tower_lsp_server::jsonrpc::{ + Error, + Result, +}; use tower_lsp_server::lsp_types::*; -use tower_lsp_server::UriExt; -use tower_lsp_server::{Client, LanguageServer}; +use tower_lsp_server::{ + Client, + LanguageServer, + UriExt, +}; use crate::analysis::{ - self, get_symbol_definition_info, Context, FunctionSymbol, SymbolInfo, SymbolTable, SymbolType, + self, + get_symbol_definition_info, + Context, + FunctionSymbol, + SymbolInfo, + SymbolTable, + SymbolType, VariableSymbol, }; -use crate::files::{FileVersion, Files, DEFAULT_VERSION}; -use crate::fs::{LocalFs, FS}; -use crate::grammar::{self, Grammar, LSPAnalysis, ParserResponse}; +use crate::files::{ + FileVersion, + Files, + DEFAULT_VERSION, +}; +use crate::fs::{ + LocalFs, + FS, +}; +use crate::grammar::{ + self, + Grammar, + LSPAnalysis, + ParserResponse, +}; use crate::paths::FileId; -use crate::stdlib::{find_in_stdlib, save_resources}; +use crate::stdlib::{ + find_in_stdlib, + save_resources, +}; type PinnedFuture<'a, T> = Pin> + Send + 'a>>; diff --git a/src/files.rs b/src/files.rs index d4af875..5586e0b 100644 --- a/src/files.rs +++ b/src/files.rs @@ -5,13 +5,19 @@ use ropey::Rope; use tokio::sync::RwLock; use tower_lsp_server::lsp_types::Uri; -use crate::{ - analysis::{types::GenericsMap, SymbolTable}, - fs::FS, - grammar::{Grammar, Spanned, SpannedSemanticToken}, - paths::{FileId, PathInterner}, - utils::FastDashMap, +use crate::analysis::types::GenericsMap; +use crate::analysis::SymbolTable; +use crate::fs::FS; +use crate::grammar::{ + Grammar, + Spanned, + SpannedSemanticToken, }; +use crate::paths::{ + FileId, + PathInterner, +}; +use crate::utils::FastDashMap; #[derive(Copy, Debug, Clone, PartialEq, Eq, Hash)] pub struct FileVersion(pub i32); diff --git a/src/fs.rs b/src/fs.rs index ce3b0ad..144cf7c 100644 --- a/src/fs.rs +++ b/src/fs.rs @@ -1,14 +1,24 @@ -use std::{ - collections::HashMap, - fmt::Debug, - future::Future, - io::Result, - path::{Path, PathBuf}, - pin::Pin, - sync::{Arc, Mutex}, +use std::collections::HashMap; +use std::fmt::Debug; +use std::future::Future; +use std::io::Result; +use std::path::{ + Path, + PathBuf, +}; +use std::pin::Pin; +use std::sync::{ + Arc, + Mutex, }; -use tokio::fs::{create_dir_all, metadata, read_dir, read_to_string, write}; +use tokio::fs::{ + create_dir_all, + metadata, + read_dir, + read_to_string, + write, +}; pub trait FS: Sync + Send + Debug { fn read<'a>( diff --git a/src/grammar/alpha034/expressions/and.rs b/src/grammar/alpha034/expressions/and.rs index ef7b095..7dfcaa8 100644 --- a/src/grammar/alpha034/expressions/and.rs +++ b/src/grammar/alpha034/expressions/and.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::comparison::comparison_parser; diff --git a/src/grammar/alpha034/expressions/atom/array.rs b/src/grammar/alpha034/expressions/atom/array.rs index 0c235ed..40deb3c 100644 --- a/src/grammar/alpha034/expressions/atom/array.rs +++ b/src/grammar/alpha034/expressions/atom/array.rs @@ -1,9 +1,13 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; pub fn array_parser<'a>( expr: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha034/expressions/atom/bool.rs b/src/grammar/alpha034/expressions/atom/bool.rs index 1b0f86d..5f04256 100644 --- a/src/grammar/alpha034/expressions/atom/bool.rs +++ b/src/grammar/alpha034/expressions/atom/bool.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha034::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn bool_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha034/expressions/atom/call.rs b/src/grammar/alpha034/expressions/atom/call.rs index f78cbec..90c603a 100644 --- a/src/grammar/alpha034/expressions/atom/call.rs +++ b/src/grammar/alpha034/expressions/atom/call.rs @@ -1,12 +1,17 @@ -use crate::{ - grammar::alpha034::{ - lexer::Token, - parser::{default_recovery, ident}, - statements::{failed::failure_parser, modifiers::modifier_parser}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha034::statements::failed::failure_parser; +use crate::grammar::alpha034::statements::modifiers::modifier_parser; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn function_call_parser<'a>( diff --git a/src/grammar/alpha034/expressions/atom/command.rs b/src/grammar/alpha034/expressions/atom/command.rs index 06f9c5d..98e0ec4 100644 --- a/src/grammar/alpha034/expressions/atom/command.rs +++ b/src/grammar/alpha034/expressions/atom/command.rs @@ -1,14 +1,17 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - lexer::Token, - parser::default_recovery, - statements::{failed::failure_parser, modifiers::modifier_parser}, - AmberParser, Expression, InterpolatedCommand, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::statements::failed::failure_parser; +use crate::grammar::alpha034::statements::modifiers::modifier_parser; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + InterpolatedCommand, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; pub fn command_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha034/expressions/atom/mod.rs b/src/grammar/alpha034/expressions/atom/mod.rs index 100a1b3..8c8d9e9 100644 --- a/src/grammar/alpha034/expressions/atom/mod.rs +++ b/src/grammar/alpha034/expressions/atom/mod.rs @@ -1,4 +1,8 @@ -use crate::grammar::alpha034::{AmberParser, Spanned, Statement}; +use crate::grammar::alpha034::{ + AmberParser, + Spanned, + Statement, +}; use super::super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha034/expressions/atom/null.rs b/src/grammar/alpha034/expressions/atom/null.rs index 6aa908e..991a0db 100644 --- a/src/grammar/alpha034/expressions/atom/null.rs +++ b/src/grammar/alpha034/expressions/atom/null.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha034::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn null_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha034/expressions/atom/number.rs b/src/grammar/alpha034/expressions/atom/number.rs index 0a840ec..ecaa769 100644 --- a/src/grammar/alpha034/expressions/atom/number.rs +++ b/src/grammar/alpha034/expressions/atom/number.rs @@ -1,36 +1,32 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{lexer::Token, AmberParser, Spanned}, - T, +use crate::grammar::alpha034::{ + AmberParser, + Expression, +}; +use crate::grammar::{ + Spanned, + Token, }; - -use super::Expression; pub fn number_parser<'a>() -> impl AmberParser<'a, Spanned> { - let int = any().try_map(|token: Token, span| { - let word = token.to_string(); - - for char in word.chars() { - if !char.is_ascii_digit() { - return Err(Rich::custom(span, "int must contain only digits")); - } - } + any() + .try_map(|token: Token, span| { + let word = token.to_string(); - Ok(word) - }); + let num_str = if word.starts_with('.') { + // For numbers like ".4", prepend "0" to make "0.4" + format!("0{}", word) + } else { + word + }; - choice(( - int.then(just(T!['.']).ignore_then(int)) - .map(|(int, float)| format!("{int}.{float}")), - just(T!['.']) - .ignore_then(int) - .map(|float| format!("0.{float}")), - int.map(|int| format!("{int}.0")), - )) - .from_str::() - .unwrapped() - .map_with(|num, e| (Expression::Number((num, e.span())), e.span())) - .boxed() - .labelled("number") + // Parse as f32 + num_str + .parse::() + .map_err(|_| Rich::custom(span, format!("invalid number format: {}", num_str))) + }) + .map_with(|num, e| (Expression::Number((num, e.span())), e.span())) + .boxed() + .labelled("number") } diff --git a/src/grammar/alpha034/expressions/atom/parentheses.rs b/src/grammar/alpha034/expressions/atom/parentheses.rs index cee3411..726cd31 100644 --- a/src/grammar/alpha034/expressions/atom/parentheses.rs +++ b/src/grammar/alpha034/expressions/atom/parentheses.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha034::{lexer::Token, parser::default_recovery, AmberParser, Spanned}, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Spanned, }; +use crate::grammar::Token; +use crate::T; use super::super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha034/expressions/atom/status.rs b/src/grammar/alpha034/expressions/atom/status.rs index 5a70e1a..b9efeef 100644 --- a/src/grammar/alpha034/expressions/atom/status.rs +++ b/src/grammar/alpha034/expressions/atom/status.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha034::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn status_var_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha034/expressions/atom/text.rs b/src/grammar/alpha034/expressions/atom/text.rs index ed51bfe..62e689c 100644 --- a/src/grammar/alpha034/expressions/atom/text.rs +++ b/src/grammar/alpha034/expressions/atom/text.rs @@ -1,9 +1,12 @@ -use crate::{ - grammar::alpha034::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, InterpolatedText, Spanned, - }, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + InterpolatedText, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn text_parser<'a>( diff --git a/src/grammar/alpha034/expressions/atom/var.rs b/src/grammar/alpha034/expressions/atom/var.rs index 00c4b29..6f68ca9 100644 --- a/src/grammar/alpha034/expressions/atom/var.rs +++ b/src/grammar/alpha034/expressions/atom/var.rs @@ -1,4 +1,8 @@ -use crate::grammar::alpha034::{parser::ident, AmberParser, Spanned}; +use crate::grammar::alpha034::parser::ident; +use crate::grammar::alpha034::{ + AmberParser, + Spanned, +}; use super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha034/expressions/cast.rs b/src/grammar/alpha034/expressions/cast.rs index 4d59fca..8ccefc0 100644 --- a/src/grammar/alpha034/expressions/cast.rs +++ b/src/grammar/alpha034/expressions/cast.rs @@ -1,12 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - global::type_parser, lexer::Token, parser::default_recovery, AmberParser, DataType, - Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::global::type_parser; +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + DataType, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::unary::unary_parser; diff --git a/src/grammar/alpha034/expressions/comparison.rs b/src/grammar/alpha034/expressions/comparison.rs index 9932488..ba54b53 100644 --- a/src/grammar/alpha034/expressions/comparison.rs +++ b/src/grammar/alpha034/expressions/comparison.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::sum::sum_parser; diff --git a/src/grammar/alpha034/expressions/is.rs b/src/grammar/alpha034/expressions/is.rs index ce5b8ed..7aa33b3 100644 --- a/src/grammar/alpha034/expressions/is.rs +++ b/src/grammar/alpha034/expressions/is.rs @@ -1,12 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - global::type_parser, lexer::Token, parser::default_recovery, AmberParser, DataType, - Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::global::type_parser; +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + DataType, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::cast::cast_parser; diff --git a/src/grammar/alpha034/expressions/mod.rs b/src/grammar/alpha034/expressions/mod.rs index 71cb80b..ef862f3 100644 --- a/src/grammar/alpha034/expressions/mod.rs +++ b/src/grammar/alpha034/expressions/mod.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use super::{AmberParser, Expression, Spanned, Statement}; +use super::{ + AmberParser, + Expression, + Spanned, + Statement, +}; mod and; mod atom; diff --git a/src/grammar/alpha034/expressions/or.rs b/src/grammar/alpha034/expressions/or.rs index 0b90a52..29343db 100644 --- a/src/grammar/alpha034/expressions/or.rs +++ b/src/grammar/alpha034/expressions/or.rs @@ -1,11 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{lexer::Token, parser::default_recovery, AmberParser, Spanned, Statement}, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; -use super::{and::and_parser, Expression}; +use super::and::and_parser; +use super::Expression; pub fn or_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha034/expressions/product.rs b/src/grammar/alpha034/expressions/product.rs index c7c1dd2..1c7fdf2 100644 --- a/src/grammar/alpha034/expressions/product.rs +++ b/src/grammar/alpha034/expressions/product.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::is::is_parser; diff --git a/src/grammar/alpha034/expressions/range.rs b/src/grammar/alpha034/expressions/range.rs index 977960d..0026623 100644 --- a/src/grammar/alpha034/expressions/range.rs +++ b/src/grammar/alpha034/expressions/range.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::or::or_parser; diff --git a/src/grammar/alpha034/expressions/sum.rs b/src/grammar/alpha034/expressions/sum.rs index eda764e..3becbf6 100644 --- a/src/grammar/alpha034/expressions/sum.rs +++ b/src/grammar/alpha034/expressions/sum.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::product::product_parser; diff --git a/src/grammar/alpha034/expressions/ternary.rs b/src/grammar/alpha034/expressions/ternary.rs index a418942..552102d 100644 --- a/src/grammar/alpha034/expressions/ternary.rs +++ b/src/grammar/alpha034/expressions/ternary.rs @@ -1,8 +1,12 @@ use chumsky::prelude::*; -use crate::grammar::alpha034::lexer::Token; use crate::grammar::alpha034::parser::default_recovery; -use crate::grammar::alpha034::{AmberParser, Spanned, Statement}; +use crate::grammar::alpha034::{ + AmberParser, + Spanned, + Statement, +}; +use crate::grammar::Token; use crate::T; use super::range::range_parser; diff --git a/src/grammar/alpha034/expressions/unary.rs b/src/grammar/alpha034/expressions/unary.rs index 9c5b3f4..932be42 100644 --- a/src/grammar/alpha034/expressions/unary.rs +++ b/src/grammar/alpha034/expressions/unary.rs @@ -1,9 +1,13 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{lexer::Token, AmberParser, Expression, Spanned, Statement}, - T, +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::atom::atom_parser; diff --git a/src/grammar/alpha034/global.rs b/src/grammar/alpha034/global.rs index 22a5ae0..a100578 100644 --- a/src/grammar/alpha034/global.rs +++ b/src/grammar/alpha034/global.rs @@ -1,12 +1,21 @@ use chumsky::prelude::*; -use crate::{analysis::types::DataType, T}; +use crate::analysis::types::DataType; +use crate::grammar::Token; +use crate::T; +use super::parser::{ + default_recovery, + ident, +}; +use super::statements::statement_parser; use super::{ - lexer::Token, - parser::{default_recovery, ident}, - statements::statement_parser, - AmberParser, CompilerFlag, FunctionArgument, GlobalStatement, ImportContent, Spanned, + AmberParser, + CompilerFlag, + FunctionArgument, + GlobalStatement, + ImportContent, + Spanned, Statement, }; @@ -105,8 +114,7 @@ pub fn type_parser<'a>() -> impl AmberParser<'a, Spanned> { } fn compiler_flag_parser<'a>() -> impl AmberParser<'a, Spanned> { - just(T!["#"]) - .ignore_then(just(T!["["])) + just(T!["#["]) .ignore_then( choice(( just(T!["allow_nested_if_else"]).to(CompilerFlag::AllowNestedIfElse), diff --git a/src/grammar/alpha034/lexer.rs b/src/grammar/alpha034/lexer.rs index 278aaef..720bdac 100644 --- a/src/grammar/alpha034/lexer.rs +++ b/src/grammar/alpha034/lexer.rs @@ -1,74 +1,410 @@ -use heraclitus_compiler::prelude::*; - -pub use crate::grammar::Token; - -pub fn get_rules() -> Rules { - let symbols = vec![ - '+', '-', '*', '/', '%', ';', ':', '(', ')', '[', ']', '{', '}', ',', '.', '<', '>', '=', - '!', '?', '\\', '"', '$', '\n', - ]; - let compounds = vec![ - ('<', '='), - ('>', '='), - ('!', '='), - ('=', '='), - ('+', '='), - ('-', '='), - ('*', '='), - ('/', '='), - ('%', '='), - ('.', '.'), - ('/', '/'), - ]; - let region = reg![ - reg!(string as "string literal" => { - begin: "\"", - end: "\"", - tokenize: true, - allow_unclosed_region: true - } => [ - reg!(str_interp as "string interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]), - reg!(command as "command literal" => { - begin: "$", - end: "$", - tokenize: true, - allow_unclosed_region: true - } => [ - reg!(com_interp as "command interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]), - reg!(cc_flag as "compiler flag" => { - begin: "#[", - end: "]", - tokenize: true, - allow_unclosed_region: true - }), - reg!(comment as "comment" => { - begin: "//", - end: "\n", - allow_unclosed_region: true - }), - reg!(comment as "shebang" => { - begin: "#!", - end: "\n", - allow_unclosed_region: true - }), - reg!(interp as "interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]; - Rules::new(symbols, compounds, region) +use logos::Logos; + +use super::Spanned; +use crate::grammar::{ + SimpleSpan, + Token, +}; + +/// Main token context - general code +#[derive(Logos, Debug, Clone, PartialEq)] +#[logos(skip r"[ \t\r]+")] +pub enum TokenKind { + // Compound operators + #[token("<=")] + LessEquals, + #[token(">=")] + GreaterEquals, + #[token("!=")] + NotEquals, + #[token("==")] + EqualsEquals, + #[token("+=")] + PlusEquals, + #[token("-=")] + MinusEquals, + #[token("*=")] + MulEquals, + #[token("/=")] + DivEquals, + #[token("%=")] + ModEquals, + #[token("..")] + DotDot, + + // Single operators + #[token("+")] + Plus, + #[token("-")] + Minus, + #[token("*")] + Star, + #[token("/")] + Slash, + #[token("%")] + Percent, + #[token("<")] + Less, + #[token(">")] + Greater, + #[token("=")] + Equals, + #[token("!")] + Bang, + #[token("?")] + Question, + + // Delimiters + #[token("(")] + OpenParen, + #[token(")")] + CloseParen, + #[token("[")] + OpenBracket, + #[token("]")] + CloseBracket, + #[token("{")] + OpenBrace, + #[token("}")] + CloseBrace, + + // Punctuation + #[token(";")] + Semicolon, + #[token(":")] + Colon, + #[token(",")] + Comma, + #[token(".")] + Dot, + #[token("\\")] + Backslash, + + // String start (transition to StringContext) + #[token("\"")] + Quote, + + // Command start/end (transition to CommandContext) + #[token("$")] + Dollar, + + // Compiler flag start (transition to CompilerFlagContext) + #[token("#[")] + CompilerFlagStart, + + // Comments (include trailing newline to match Heraclitus behavior) + #[regex(r"//[^\n]*\n?", priority = 2)] + Comment, + #[regex(r"#![^\n]*\n?", priority = 2)] + Shebang, + + // Identifier + #[regex(r"[a-zA-Z_][a-zA-Z0-9_]*")] + Identifier, + + // Number + #[regex(r"([0-9]+(\.[0-9]+)?)|(\.[0-9]+)")] + Number, + + // Newline + #[token("\n")] + Newline, +} + +/// String content context - inside "..." +#[derive(Logos, Debug, Clone, PartialEq)] +pub enum StringContext { + // String end + #[token("\"")] + Quote, + + // Interpolation start + #[token("{")] + OpenBrace, + + // Text content (not quotes, backslashes, or braces) + #[regex(r#"[^"\\{]+"#)] + Content, + + // Escape sequence + #[regex(r#"\\."#)] + Escape, +} + +/// Command content context - inside $...$ +#[derive(Logos, Debug, Clone, PartialEq)] +pub enum CommandContext { + // Command end + #[token("$")] + Dollar, + + // Interpolation start + #[token("{")] + OpenBrace, + + // Text content (not $, backslash, or braces) + #[regex(r"[^$\\{]+")] + Content, + + // Escape sequence + #[regex(r#"\\."#)] + Escape, +} + +/// Context stack to track where we are in parsing +#[derive(Debug, Clone, PartialEq)] +enum LexerContext { + Main, + String, + Command, +} + +/// Stateful tokenizer that manages context switching +pub struct StatefulTokenizer<'source> { + source: &'source str, + position: usize, + context_stack: Vec, + brace_depth: usize, // Track brace nesting inside interpolations +} + +impl<'source> StatefulTokenizer<'source> { + fn new(source: &'source str) -> Self { + Self { + source, + position: 0, + context_stack: vec![LexerContext::Main], + brace_depth: 0, + } + } + + pub fn tokenize(source: &'source str) -> Vec> { + let mut tokenizer = Self::new(source); + let mut tokens = Vec::new(); + + while tokenizer.position < source.len() { + if let Some((token, span)) = tokenizer.next_token() { + tokens.push((token, span)); + } else { + break; + } + } + + // Filter out newline tokens - they're used for position tracking but not needed for parsing + tokens + .into_iter() + .filter(|(token, _)| token.0 != "\n") + .collect() + } + + fn next_token(&mut self) -> Option<(Token, SimpleSpan)> { + if self.position >= self.source.len() { + return None; + } + + let remaining = &self.source[self.position..]; + let context = self.context_stack.last()?.clone(); + + match context { + LexerContext::Main => self.lex_main_context(remaining), + LexerContext::String => self.lex_string_context(remaining), + LexerContext::Command => self.lex_command_context(remaining), + } + } + + fn lex_main_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = TokenKind::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(TokenKind::Quote) => { + // Entering string context + self.context_stack.push(LexerContext::String); + Some((Token("\"".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::Dollar) => { + // Check if this starts a command. + // If the next char is another '$', do not start a command (this is the first of $$). + // If this is the second of a $$ (previous char was '$'), only start a command + // when the following char is not whitespace. Otherwise single $ starts a command. + let prev_is_dollar = start > 0 && self.source[start - 1..].starts_with('$'); + + let is_command_start = if span.end < remaining.len() { + // Look at the next char after this $ in the remaining slice + let mut next_chars = remaining[span.end..].chars(); + match next_chars.next() { + Some(next_ch) => { + if next_ch == '$' { + // next is a dollar -> this $ does not start a command + false + } else if prev_is_dollar { + // this is the second $ in $$ — only start a command if not followed by whitespace + !next_ch.is_whitespace() + } else { + // single $ (not followed by $) -> start a command + true + } + } + None => false, + } + } else { + false + }; + + if is_command_start { + // Entering command context + self.context_stack.push(LexerContext::Command); + } + Some((Token("$".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::OpenBrace) => { + // Only track brace depth if we're inside an interpolation (context stack > 1) + if self.context_stack.len() > 1 { + self.brace_depth += 1; + } + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::CloseBrace) => { + // Only track brace depth if we're inside an interpolation + if self.context_stack.len() > 1 && self.brace_depth > 0 { + self.brace_depth -= 1; + if self.brace_depth == 0 { + // Exiting interpolation, pop back to string/command context + self.context_stack.pop(); + } + } + Some((Token("}".to_string()), SimpleSpan::new(start, end))) + } + Ok(kind) => { + let token_str = token_kind_to_string(&kind, slice); + Some((Token(token_str), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } + + fn lex_string_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = StringContext::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(StringContext::Quote) => { + // Exiting string context + self.context_stack.pop(); + Some((Token("\"".to_string()), SimpleSpan::new(start, end))) + } + Ok(StringContext::OpenBrace) => { + // Entering interpolation - switch back to main context + self.context_stack.push(LexerContext::Main); + self.brace_depth = 1; + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(StringContext::Content) | Ok(StringContext::Escape) => { + Some((Token(slice.to_string()), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } + + fn lex_command_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = CommandContext::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(CommandContext::Dollar) => { + // Exiting command context + self.context_stack.pop(); + Some((Token("$".to_string()), SimpleSpan::new(start, end))) + } + Ok(CommandContext::OpenBrace) => { + // Entering interpolation - switch back to main context + self.context_stack.push(LexerContext::Main); + self.brace_depth = 1; + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(CommandContext::Content) | Ok(CommandContext::Escape) => { + // If this command context was entered as the second `$` of `$$` and + // the content starts with whitespace (e.g. `$$ failed {`), trim the + // leading whitespace so the identifier/tokenization matches the + // parser's expectations (no leading spaces). + let token_text = if start > 0 && self.source[start - 1..].starts_with('$') { + // second of $$ - trim leading whitespace from content + slice.trim_start().to_string() + } else { + slice.to_string() + }; + Some((Token(token_text), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } +} + +fn token_kind_to_string(kind: &TokenKind, slice: &str) -> String { + match kind { + TokenKind::LessEquals => "<=".to_string(), + TokenKind::GreaterEquals => ">=".to_string(), + TokenKind::NotEquals => "!=".to_string(), + TokenKind::EqualsEquals => "==".to_string(), + TokenKind::PlusEquals => "+=".to_string(), + TokenKind::MinusEquals => "-=".to_string(), + TokenKind::MulEquals => "*=".to_string(), + TokenKind::DivEquals => "/=".to_string(), + TokenKind::ModEquals => "%=".to_string(), + TokenKind::DotDot => "..".to_string(), + TokenKind::Plus => "+".to_string(), + TokenKind::Minus => "-".to_string(), + TokenKind::Star => "*".to_string(), + TokenKind::Slash => "/".to_string(), + TokenKind::Percent => "%".to_string(), + TokenKind::Less => "<".to_string(), + TokenKind::Greater => ">".to_string(), + TokenKind::Equals => "=".to_string(), + TokenKind::Bang => "!".to_string(), + TokenKind::Question => "?".to_string(), + TokenKind::OpenParen => "(".to_string(), + TokenKind::CloseParen => ")".to_string(), + TokenKind::OpenBracket => "[".to_string(), + TokenKind::CloseBracket => "]".to_string(), + TokenKind::OpenBrace => "{".to_string(), + TokenKind::CloseBrace => "}".to_string(), + TokenKind::Semicolon => ";".to_string(), + TokenKind::Colon => ":".to_string(), + TokenKind::Comma => ",".to_string(), + TokenKind::Dot => ".".to_string(), + TokenKind::Backslash => "\\".to_string(), + TokenKind::Quote => "\"".to_string(), + TokenKind::Dollar => "$".to_string(), + TokenKind::Newline => "\n".to_string(), + TokenKind::CompilerFlagStart => "#[".to_string(), + // For these, use the actual slice + TokenKind::Comment | TokenKind::Shebang | TokenKind::Identifier | TokenKind::Number => { + slice.to_string() + } + } +} + +/// Public tokenize function +pub fn tokenize(input: &str) -> Vec> { + StatefulTokenizer::tokenize(input) } diff --git a/src/grammar/alpha034/mod.rs b/src/grammar/alpha034/mod.rs index d9f3843..d52b50e 100644 --- a/src/grammar/alpha034/mod.rs +++ b/src/grammar/alpha034/mod.rs @@ -1,17 +1,22 @@ use crate::analysis::types::DataType; pub use super::Spanned; -use super::{CommandModifier, CompilerFlag, Grammar, LSPAnalysis, ParserResponse, Span}; -use chumsky::{ - error::Rich, - extra::Err, - input::{Input, SpannedInput}, - span::SimpleSpan, - Parser, +use super::{ + CommandModifier, + CompilerFlag, + Grammar, + LSPAnalysis, + ParserResponse, + Span, }; -use heraclitus_compiler::prelude::*; -use lexer::{get_rules, Token}; -use prelude::lexer::Lexer; +use crate::grammar::Token; +use chumsky::error::Rich; +use chumsky::extra::Err; +use chumsky::input::{ + Input, + SpannedInput, +}; +use chumsky::Parser; use semantic_tokens::semantic_tokens_from_ast; pub mod expressions; @@ -229,9 +234,7 @@ pub enum GlobalStatement { } #[derive(Debug)] -pub struct AmberCompiler { - lexer: Lexer, -} +pub struct AmberCompiler {} impl Default for AmberCompiler { fn default() -> Self { @@ -241,9 +244,7 @@ impl Default for AmberCompiler { impl AmberCompiler { pub fn new() -> Self { - let lexer = Lexer::new(get_rules()); - - AmberCompiler { lexer } + AmberCompiler {} } pub fn parser<'a>(&self) -> impl AmberParser<'a, Vec>> { @@ -254,22 +255,8 @@ impl AmberCompiler { impl LSPAnalysis for AmberCompiler { #[tracing::instrument(skip_all)] fn tokenize(&self, input: &str) -> Vec> { - // It should never fail - self.lexer - .tokenize(&input.replace("\r\n", "\n").replace("\r", "\n")) - .expect("Failed to tokenize input") - .iter() - .filter_map(|t| { - if t.word == "\n" { - return None; - } - - Some(( - Token(t.word.clone()), - SimpleSpan::new(t.start, t.start + t.word.chars().count()), - )) - }) - .collect() + // Use Logos lexer instead of Heraclitus + lexer::tokenize(&input.replace("\r\n", "\n").replace("\r", "\n")) } #[tracing::instrument(skip_all)] diff --git a/src/grammar/alpha034/parser.rs b/src/grammar/alpha034/parser.rs index 75df53f..010d849 100644 --- a/src/grammar/alpha034/parser.rs +++ b/src/grammar/alpha034/parser.rs @@ -2,7 +2,8 @@ use chumsky::prelude::*; use crate::T; -use super::{lexer::Token, AmberParser}; +use super::AmberParser; +use crate::grammar::Token; const KEYWORDS: &[&str] = &[ "if", "else", "loop", "in", "return", "break", "continue", "true", "false", "null", "fun", diff --git a/src/grammar/alpha034/statements/block.rs b/src/grammar/alpha034/statements/block.rs index 361b05e..d832238 100644 --- a/src/grammar/alpha034/statements/block.rs +++ b/src/grammar/alpha034/statements/block.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - lexer::Token, parser::default_recovery, AmberParser, Block, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Block, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::modifiers::modifier_parser; diff --git a/src/grammar/alpha034/statements/comment.rs b/src/grammar/alpha034/statements/comment.rs index 2adc93e..66f20f9 100644 --- a/src/grammar/alpha034/statements/comment.rs +++ b/src/grammar/alpha034/statements/comment.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use crate::grammar::alpha034::{lexer::Token, AmberParser, Comment, Spanned}; +use crate::grammar::alpha034::{ + AmberParser, + Comment, + Spanned, +}; +use crate::grammar::Token; pub fn comment_parser<'a>() -> impl AmberParser<'a, Spanned> { any() diff --git a/src/grammar/alpha034/statements/const_init.rs b/src/grammar/alpha034/statements/const_init.rs index 9fb30fc..4e1d979 100644 --- a/src/grammar/alpha034/statements/const_init.rs +++ b/src/grammar/alpha034/statements/const_init.rs @@ -1,17 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::{ - alpha034::Expression, - alpha034::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Spanned, Statement, - }, - }, - T, +use crate::grammar::alpha034::expressions::parse_expr; +use crate::grammar::alpha034::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; pub fn const_init_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha034/statements/failed.rs b/src/grammar/alpha034/statements/failed.rs index fa1e2ab..3c98f95 100644 --- a/src/grammar/alpha034/statements/failed.rs +++ b/src/grammar/alpha034/statements/failed.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - lexer::Token, parser::default_recovery, AmberParser, FailureHandler, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + FailureHandler, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; pub fn failure_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha034/statements/if_cond.rs b/src/grammar/alpha034/statements/if_cond.rs index 2671dba..2763c58 100644 --- a/src/grammar/alpha034/statements/if_cond.rs +++ b/src/grammar/alpha034/statements/if_cond.rs @@ -1,12 +1,19 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, statements::comment, - AmberParser, Comment, ElseCondition, IfChainContent, IfCondition, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::expressions::parse_expr; +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::statements::comment; +use crate::grammar::alpha034::{ + AmberParser, + Comment, + ElseCondition, + IfChainContent, + IfCondition, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::block::block_parser; diff --git a/src/grammar/alpha034/statements/keywords.rs b/src/grammar/alpha034/statements/keywords.rs index 085b2f6..c9dd6e4 100644 --- a/src/grammar/alpha034/statements/keywords.rs +++ b/src/grammar/alpha034/statements/keywords.rs @@ -1,12 +1,15 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, AmberParser, Expression, - Spanned, Statement, - }, - T, +use crate::grammar::alpha034::expressions::parse_expr; +use crate::grammar::alpha034::parser::default_recovery; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; pub fn keywords_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha034/statements/loops.rs b/src/grammar/alpha034/statements/loops.rs index 914f07b..ddf234b 100644 --- a/src/grammar/alpha034/statements/loops.rs +++ b/src/grammar/alpha034/statements/loops.rs @@ -1,14 +1,20 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Block, Expression, IterLoopVars, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::expressions::parse_expr; +use crate::grammar::alpha034::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha034::{ + AmberParser, + Block, + Expression, + IterLoopVars, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; use super::block::block_parser; diff --git a/src/grammar/alpha034/statements/mod.rs b/src/grammar/alpha034/statements/mod.rs index 6e2f12c..4049c1f 100644 --- a/src/grammar/alpha034/statements/mod.rs +++ b/src/grammar/alpha034/statements/mod.rs @@ -1,8 +1,14 @@ use chumsky::prelude::*; -use crate::{grammar::Token, T}; +use crate::grammar::Token; +use crate::T; -use super::{expressions::parse_expr, AmberParser, Spanned, Statement}; +use super::expressions::parse_expr; +use super::{ + AmberParser, + Spanned, + Statement, +}; pub mod block; pub mod comment; diff --git a/src/grammar/alpha034/statements/modifiers.rs b/src/grammar/alpha034/statements/modifiers.rs index 73f376d..b852aaf 100644 --- a/src/grammar/alpha034/statements/modifiers.rs +++ b/src/grammar/alpha034/statements/modifiers.rs @@ -1,9 +1,12 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{lexer::Token, AmberParser, CommandModifier, Spanned}, - T, +use crate::grammar::alpha034::{ + AmberParser, + CommandModifier, + Spanned, }; +use crate::grammar::Token; +use crate::T; pub fn modifier_parser<'a>() -> impl AmberParser<'a, Spanned> { choice(( diff --git a/src/grammar/alpha034/statements/shebang.rs b/src/grammar/alpha034/statements/shebang.rs index 17ce1fb..5fd0644 100644 --- a/src/grammar/alpha034/statements/shebang.rs +++ b/src/grammar/alpha034/statements/shebang.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use crate::grammar::alpha034::{lexer::Token, AmberParser, Spanned, Statement}; +use crate::grammar::alpha034::{ + AmberParser, + Spanned, + Statement, +}; +use crate::grammar::Token; pub fn shebang_parser<'a>() -> impl AmberParser<'a, Spanned> { any() diff --git a/src/grammar/alpha034/statements/shorthands.rs b/src/grammar/alpha034/statements/shorthands.rs index edfa25b..0f3b067 100644 --- a/src/grammar/alpha034/statements/shorthands.rs +++ b/src/grammar/alpha034/statements/shorthands.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::expressions::parse_expr; +use crate::grammar::alpha034::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; pub fn shorthand_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha034/statements/var_init.rs b/src/grammar/alpha034/statements/var_init.rs index 13642aa..c66f0d6 100644 --- a/src/grammar/alpha034/statements/var_init.rs +++ b/src/grammar/alpha034/statements/var_init.rs @@ -1,15 +1,19 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - expressions::parse_expr, - global::type_parser, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Spanned, Statement, VariableInitType, - }, - T, +use crate::grammar::alpha034::expressions::parse_expr; +use crate::grammar::alpha034::global::type_parser; +use crate::grammar::alpha034::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha034::{ + AmberParser, + Spanned, + Statement, + VariableInitType, +}; +use crate::grammar::Token; +use crate::T; pub fn var_init_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha034/statements/var_set.rs b/src/grammar/alpha034/statements/var_set.rs index c2e1650..8c936a6 100644 --- a/src/grammar/alpha034/statements/var_set.rs +++ b/src/grammar/alpha034/statements/var_set.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha034::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha034::expressions::parse_expr; +use crate::grammar::alpha034::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha034::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; pub fn var_set_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/expressions/and.rs b/src/grammar/alpha035/expressions/and.rs index c224d1b..a6e446e 100644 --- a/src/grammar/alpha035/expressions/and.rs +++ b/src/grammar/alpha035/expressions/and.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::comparison::comparison_parser; diff --git a/src/grammar/alpha035/expressions/atom/array.rs b/src/grammar/alpha035/expressions/atom/array.rs index b810887..9cd370c 100644 --- a/src/grammar/alpha035/expressions/atom/array.rs +++ b/src/grammar/alpha035/expressions/atom/array.rs @@ -1,9 +1,13 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; pub fn array_parser<'a>( expr: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/expressions/atom/bool.rs b/src/grammar/alpha035/expressions/atom/bool.rs index 6df689e..58ef536 100644 --- a/src/grammar/alpha035/expressions/atom/bool.rs +++ b/src/grammar/alpha035/expressions/atom/bool.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha035::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn bool_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha035/expressions/atom/call.rs b/src/grammar/alpha035/expressions/atom/call.rs index 2f155f6..a30f558 100644 --- a/src/grammar/alpha035/expressions/atom/call.rs +++ b/src/grammar/alpha035/expressions/atom/call.rs @@ -1,12 +1,17 @@ -use crate::{ - grammar::alpha035::{ - lexer::Token, - parser::{default_recovery, ident}, - statements::{failed::failure_parser, modifiers::modifier_parser}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha035::statements::failed::failure_parser; +use crate::grammar::alpha035::statements::modifiers::modifier_parser; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn function_call_parser<'a>( diff --git a/src/grammar/alpha035/expressions/atom/command.rs b/src/grammar/alpha035/expressions/atom/command.rs index 3b02006..5fa2d57 100644 --- a/src/grammar/alpha035/expressions/atom/command.rs +++ b/src/grammar/alpha035/expressions/atom/command.rs @@ -1,14 +1,17 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - lexer::Token, - parser::default_recovery, - statements::{failed::failure_parser, modifiers::modifier_parser}, - AmberParser, Expression, InterpolatedCommand, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::statements::failed::failure_parser; +use crate::grammar::alpha035::statements::modifiers::modifier_parser; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + InterpolatedCommand, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; pub fn command_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/expressions/atom/mod.rs b/src/grammar/alpha035/expressions/atom/mod.rs index 45e9d4c..cbce07c 100644 --- a/src/grammar/alpha035/expressions/atom/mod.rs +++ b/src/grammar/alpha035/expressions/atom/mod.rs @@ -1,4 +1,8 @@ -use crate::grammar::alpha035::{AmberParser, Spanned, Statement}; +use crate::grammar::alpha035::{ + AmberParser, + Spanned, + Statement, +}; use super::super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha035/expressions/atom/null.rs b/src/grammar/alpha035/expressions/atom/null.rs index 1c17405..1a1144a 100644 --- a/src/grammar/alpha035/expressions/atom/null.rs +++ b/src/grammar/alpha035/expressions/atom/null.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha035::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn null_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha035/expressions/atom/number.rs b/src/grammar/alpha035/expressions/atom/number.rs index 6dafdc5..07d2c29 100644 --- a/src/grammar/alpha035/expressions/atom/number.rs +++ b/src/grammar/alpha035/expressions/atom/number.rs @@ -1,36 +1,32 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{lexer::Token, AmberParser, Spanned}, - T, +use crate::grammar::alpha035::{ + AmberParser, + Expression, +}; +use crate::grammar::{ + Spanned, + Token, }; - -use super::Expression; pub fn number_parser<'a>() -> impl AmberParser<'a, Spanned> { - let int = any().try_map(|token: Token, span| { - let word = token.to_string(); - - for char in word.chars() { - if !char.is_ascii_digit() { - return Err(Rich::custom(span, "int must contain only digits")); - } - } + any() + .try_map(|token: Token, span| { + let word = token.to_string(); - Ok(word) - }); + let num_str = if word.starts_with('.') { + // For numbers like ".4", prepend "0" to make "0.4" + format!("0{}", word) + } else { + word + }; - choice(( - int.then(just(T!['.']).ignore_then(int)) - .map(|(int, float)| format!("{int}.{float}")), - just(T!['.']) - .ignore_then(int) - .map(|float| format!("0.{float}")), - int.map(|int| format!("{int}.0")), - )) - .from_str::() - .unwrapped() - .map_with(|num, e| (Expression::Number((num, e.span())), e.span())) - .boxed() - .labelled("number") + // Parse as f32 + num_str + .parse::() + .map_err(|_| Rich::custom(span, format!("invalid number format: {}", num_str))) + }) + .map_with(|num, e| (Expression::Number((num, e.span())), e.span())) + .boxed() + .labelled("number") } diff --git a/src/grammar/alpha035/expressions/atom/parentheses.rs b/src/grammar/alpha035/expressions/atom/parentheses.rs index f1eb737..d6e25c3 100644 --- a/src/grammar/alpha035/expressions/atom/parentheses.rs +++ b/src/grammar/alpha035/expressions/atom/parentheses.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha035::{lexer::Token, parser::default_recovery, AmberParser, Spanned}, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Spanned, }; +use crate::grammar::Token; +use crate::T; use super::super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha035/expressions/atom/status.rs b/src/grammar/alpha035/expressions/atom/status.rs index ba9ca82..1bab52f 100644 --- a/src/grammar/alpha035/expressions/atom/status.rs +++ b/src/grammar/alpha035/expressions/atom/status.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha035::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn status_var_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha035/expressions/atom/text.rs b/src/grammar/alpha035/expressions/atom/text.rs index 2b01be9..1a2d4d7 100644 --- a/src/grammar/alpha035/expressions/atom/text.rs +++ b/src/grammar/alpha035/expressions/atom/text.rs @@ -1,9 +1,12 @@ -use crate::{ - grammar::alpha035::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, InterpolatedText, Spanned, - }, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + InterpolatedText, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn text_parser<'a>( diff --git a/src/grammar/alpha035/expressions/atom/var.rs b/src/grammar/alpha035/expressions/atom/var.rs index 77b486d..41006e3 100644 --- a/src/grammar/alpha035/expressions/atom/var.rs +++ b/src/grammar/alpha035/expressions/atom/var.rs @@ -1,4 +1,8 @@ -use crate::grammar::alpha035::{parser::ident, AmberParser, Spanned}; +use crate::grammar::alpha035::parser::ident; +use crate::grammar::alpha035::{ + AmberParser, + Spanned, +}; use super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha035/expressions/cast.rs b/src/grammar/alpha035/expressions/cast.rs index 586219e..79c15fe 100644 --- a/src/grammar/alpha035/expressions/cast.rs +++ b/src/grammar/alpha035/expressions/cast.rs @@ -1,12 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - global::type_parser, lexer::Token, parser::default_recovery, AmberParser, DataType, - Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::global::type_parser; +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + DataType, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::unary::unary_parser; diff --git a/src/grammar/alpha035/expressions/comparison.rs b/src/grammar/alpha035/expressions/comparison.rs index 9d82489..1bc2771 100644 --- a/src/grammar/alpha035/expressions/comparison.rs +++ b/src/grammar/alpha035/expressions/comparison.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::sum::sum_parser; diff --git a/src/grammar/alpha035/expressions/is.rs b/src/grammar/alpha035/expressions/is.rs index 2487d5e..d8bcfc7 100644 --- a/src/grammar/alpha035/expressions/is.rs +++ b/src/grammar/alpha035/expressions/is.rs @@ -1,12 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - global::type_parser, lexer::Token, parser::default_recovery, AmberParser, DataType, - Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::global::type_parser; +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + DataType, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::cast::cast_parser; diff --git a/src/grammar/alpha035/expressions/mod.rs b/src/grammar/alpha035/expressions/mod.rs index 71cb80b..ef862f3 100644 --- a/src/grammar/alpha035/expressions/mod.rs +++ b/src/grammar/alpha035/expressions/mod.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use super::{AmberParser, Expression, Spanned, Statement}; +use super::{ + AmberParser, + Expression, + Spanned, + Statement, +}; mod and; mod atom; diff --git a/src/grammar/alpha035/expressions/or.rs b/src/grammar/alpha035/expressions/or.rs index 4017528..c120756 100644 --- a/src/grammar/alpha035/expressions/or.rs +++ b/src/grammar/alpha035/expressions/or.rs @@ -1,11 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{lexer::Token, parser::default_recovery, AmberParser, Spanned, Statement}, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; -use super::{and::and_parser, Expression}; +use super::and::and_parser; +use super::Expression; pub fn or_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/expressions/product.rs b/src/grammar/alpha035/expressions/product.rs index 6c24bfb..1b0116a 100644 --- a/src/grammar/alpha035/expressions/product.rs +++ b/src/grammar/alpha035/expressions/product.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::is::is_parser; diff --git a/src/grammar/alpha035/expressions/range.rs b/src/grammar/alpha035/expressions/range.rs index c35f89f..e53ae80 100644 --- a/src/grammar/alpha035/expressions/range.rs +++ b/src/grammar/alpha035/expressions/range.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::or::or_parser; diff --git a/src/grammar/alpha035/expressions/sum.rs b/src/grammar/alpha035/expressions/sum.rs index e19a322..439442a 100644 --- a/src/grammar/alpha035/expressions/sum.rs +++ b/src/grammar/alpha035/expressions/sum.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::product::product_parser; diff --git a/src/grammar/alpha035/expressions/ternary.rs b/src/grammar/alpha035/expressions/ternary.rs index a2e86ec..068826d 100644 --- a/src/grammar/alpha035/expressions/ternary.rs +++ b/src/grammar/alpha035/expressions/ternary.rs @@ -1,8 +1,12 @@ use chumsky::prelude::*; -use crate::grammar::alpha035::lexer::Token; use crate::grammar::alpha035::parser::default_recovery; -use crate::grammar::alpha035::{AmberParser, Spanned, Statement}; +use crate::grammar::alpha035::{ + AmberParser, + Spanned, + Statement, +}; +use crate::grammar::Token; use crate::T; use super::range::range_parser; diff --git a/src/grammar/alpha035/expressions/unary.rs b/src/grammar/alpha035/expressions/unary.rs index 0bda554..39f0eb5 100644 --- a/src/grammar/alpha035/expressions/unary.rs +++ b/src/grammar/alpha035/expressions/unary.rs @@ -1,9 +1,13 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{lexer::Token, AmberParser, Expression, Spanned, Statement}, - T, +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::atom::atom_parser; diff --git a/src/grammar/alpha035/global.rs b/src/grammar/alpha035/global.rs index c848182..3598bb4 100644 --- a/src/grammar/alpha035/global.rs +++ b/src/grammar/alpha035/global.rs @@ -2,14 +2,24 @@ use chumsky::prelude::*; use crate::T; +use super::expressions::parse_expr; +use super::parser::{ + default_recovery, + ident, +}; +use super::statements::statement_parser; use super::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - statements::statement_parser, - AmberParser, CompilerFlag, DataType, Expression, FunctionArgument, GlobalStatement, - ImportContent, Spanned, Statement, + AmberParser, + CompilerFlag, + DataType, + Expression, + FunctionArgument, + GlobalStatement, + ImportContent, + Spanned, + Statement, }; +use crate::grammar::Token; pub fn import_parser<'a>() -> impl AmberParser<'a, Spanned> { let import_all_parser = just(T!["*"]).map_with(|_, e| (ImportContent::ImportAll, e.span())); @@ -119,8 +129,7 @@ pub fn type_parser<'a>() -> impl AmberParser<'a, Spanned> { } fn compiler_flag_parser<'a>() -> impl AmberParser<'a, Spanned> { - just(T!["#"]) - .ignore_then(just(T!["["])) + just(T!["#["]) .ignore_then( choice(( just(T!["allow_nested_if_else"]).to(CompilerFlag::AllowNestedIfElse), diff --git a/src/grammar/alpha035/lexer.rs b/src/grammar/alpha035/lexer.rs index 278aaef..9182994 100644 --- a/src/grammar/alpha035/lexer.rs +++ b/src/grammar/alpha035/lexer.rs @@ -1,74 +1,408 @@ -use heraclitus_compiler::prelude::*; +use logos::Logos; +use super::Spanned; +use crate::grammar::SimpleSpan; pub use crate::grammar::Token; -pub fn get_rules() -> Rules { - let symbols = vec![ - '+', '-', '*', '/', '%', ';', ':', '(', ')', '[', ']', '{', '}', ',', '.', '<', '>', '=', - '!', '?', '\\', '"', '$', '\n', - ]; - let compounds = vec![ - ('<', '='), - ('>', '='), - ('!', '='), - ('=', '='), - ('+', '='), - ('-', '='), - ('*', '='), - ('/', '='), - ('%', '='), - ('.', '.'), - ('/', '/'), - ]; - let region = reg![ - reg!(string as "string literal" => { - begin: "\"", - end: "\"", - tokenize: true, - allow_unclosed_region: true - } => [ - reg!(str_interp as "string interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]), - reg!(command as "command literal" => { - begin: "$", - end: "$", - tokenize: true, - allow_unclosed_region: true - } => [ - reg!(com_interp as "command interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]), - reg!(cc_flag as "compiler flag" => { - begin: "#[", - end: "]", - tokenize: true, - allow_unclosed_region: true - }), - reg!(comment as "comment" => { - begin: "//", - end: "\n", - allow_unclosed_region: true - }), - reg!(comment as "shebang" => { - begin: "#!", - end: "\n", - allow_unclosed_region: true - }), - reg!(interp as "interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]; - Rules::new(symbols, compounds, region) +/// Main token context - general code +#[derive(Logos, Debug, Clone, PartialEq)] +#[logos(skip r"[ \t\r]+")] +pub enum TokenKind { + // Compound operators + #[token("<=")] + LessEquals, + #[token(">=")] + GreaterEquals, + #[token("!=")] + NotEquals, + #[token("==")] + EqualsEquals, + #[token("+=")] + PlusEquals, + #[token("-=")] + MinusEquals, + #[token("*=")] + MulEquals, + #[token("/=")] + DivEquals, + #[token("%=")] + ModEquals, + #[token("..")] + DotDot, + + // Single operators + #[token("+")] + Plus, + #[token("-")] + Minus, + #[token("*")] + Star, + #[token("/")] + Slash, + #[token("%")] + Percent, + #[token("<")] + Less, + #[token(">")] + Greater, + #[token("=")] + Equals, + #[token("!")] + Bang, + #[token("?")] + Question, + + // Delimiters + #[token("(")] + OpenParen, + #[token(")")] + CloseParen, + #[token("[")] + OpenBracket, + #[token("]")] + CloseBracket, + #[token("{")] + OpenBrace, + #[token("}")] + CloseBrace, + + // Punctuation + #[token(";")] + Semicolon, + #[token(":")] + Colon, + #[token(",")] + Comma, + #[token(".")] + Dot, + #[token("\\")] + Backslash, + + // String start (transition to StringContext) + #[token("\"")] + Quote, + + // Command start/end (transition to CommandContext) + #[token("$")] + Dollar, + + // Compiler flag start (transition to CompilerFlagContext) + #[token("#[")] + CompilerFlagStart, + + // Comments (include trailing newline to match Heraclitus behavior) + #[regex(r"//[^\n]*\n?", priority = 2)] + Comment, + #[regex(r"#![^\n]*\n?", priority = 2)] + Shebang, + + // Identifier + #[regex(r"[a-zA-Z_][a-zA-Z0-9_]*")] + Identifier, + + // Number + #[regex(r"([0-9]+(\.[0-9]+)?)|(\.[0-9]+)")] + Number, + + // Newline + #[token("\n")] + Newline, +} + +/// String content context - inside "..." +#[derive(Logos, Debug, Clone, PartialEq)] +pub enum StringContext { + // String end + #[token("\"")] + Quote, + + // Interpolation start + #[token("{")] + OpenBrace, + + // Text content (not quotes, backslashes, or braces) + #[regex(r#"[^"\\{]+"#)] + Content, + + // Escape sequence + #[regex(r#"\\."#)] + Escape, +} + +/// Command content context - inside $...$ +#[derive(Logos, Debug, Clone, PartialEq)] +pub enum CommandContext { + // Command end + #[token("$")] + Dollar, + + // Interpolation start + #[token("{")] + OpenBrace, + + // Text content (not $, backslash, or braces) + #[regex(r"[^$\\{]+")] + Content, + + // Escape sequence + #[regex(r#"\\."#)] + Escape, +} + +/// Context stack to track where we are in parsing +#[derive(Debug, Clone, PartialEq)] +enum LexerContext { + Main, + String, + Command, +} + +/// Stateful tokenizer that manages context switching +pub struct StatefulTokenizer<'source> { + source: &'source str, + position: usize, + context_stack: Vec, + brace_depth: usize, // Track brace nesting inside interpolations +} + +impl<'source> StatefulTokenizer<'source> { + fn new(source: &'source str) -> Self { + Self { + source, + position: 0, + context_stack: vec![LexerContext::Main], + brace_depth: 0, + } + } + + pub fn tokenize(source: &'source str) -> Vec> { + let mut tokenizer = Self::new(source); + let mut tokens = Vec::new(); + + while tokenizer.position < source.len() { + if let Some((token, span)) = tokenizer.next_token() { + tokens.push((token, span)); + } else { + break; + } + } + + // Filter out newline tokens - they're used for position tracking but not needed for parsing + tokens + .into_iter() + .filter(|(token, _)| token.0 != "\n") + .collect() + } + + fn next_token(&mut self) -> Option<(Token, SimpleSpan)> { + if self.position >= self.source.len() { + return None; + } + + let remaining = &self.source[self.position..]; + let context = self.context_stack.last()?.clone(); + + match context { + LexerContext::Main => self.lex_main_context(remaining), + LexerContext::String => self.lex_string_context(remaining), + LexerContext::Command => self.lex_command_context(remaining), + } + } + + fn lex_main_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = TokenKind::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(TokenKind::Quote) => { + // Entering string context + self.context_stack.push(LexerContext::String); + Some((Token("\"".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::Dollar) => { + // Check if this starts a command. + // If the next char is another '$', do not start a command (this is the first of $$). + // If this is the second of a $$ (previous char was '$'), only start a command + // when the following char is not whitespace. Otherwise single $ starts a command. + let prev_is_dollar = start > 0 && self.source[start - 1..].starts_with('$'); + + let is_command_start = if span.end < remaining.len() { + // Look at the next char after this $ in the remaining slice + let mut next_chars = remaining[span.end..].chars(); + match next_chars.next() { + Some(next_ch) => { + if next_ch == '$' { + // next is a dollar -> this $ does not start a command + false + } else if prev_is_dollar { + // this is the second $ in $$ — only start a command if not followed by whitespace + !next_ch.is_whitespace() + } else { + // single $ (not followed by $) -> start a command + true + } + } + None => false, + } + } else { + false + }; + + if is_command_start { + // Entering command context + self.context_stack.push(LexerContext::Command); + } + Some((Token("$".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::OpenBrace) => { + // Only track brace depth if we're inside an interpolation (context stack > 1) + if self.context_stack.len() > 1 { + self.brace_depth += 1; + } + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::CloseBrace) => { + // Only track brace depth if we're inside an interpolation + if self.context_stack.len() > 1 && self.brace_depth > 0 { + self.brace_depth -= 1; + if self.brace_depth == 0 { + // Exiting interpolation, pop back to string/command context + self.context_stack.pop(); + } + } + Some((Token("}".to_string()), SimpleSpan::new(start, end))) + } + Ok(kind) => { + let token_str = token_kind_to_string(&kind, slice); + Some((Token(token_str), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } + + fn lex_string_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = StringContext::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(StringContext::Quote) => { + // Exiting string context + self.context_stack.pop(); + Some((Token("\"".to_string()), SimpleSpan::new(start, end))) + } + Ok(StringContext::OpenBrace) => { + // Entering interpolation - switch back to main context + self.context_stack.push(LexerContext::Main); + self.brace_depth = 1; + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(StringContext::Content) | Ok(StringContext::Escape) => { + Some((Token(slice.to_string()), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } + + fn lex_command_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = CommandContext::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(CommandContext::Dollar) => { + // Exiting command context + self.context_stack.pop(); + Some((Token("$".to_string()), SimpleSpan::new(start, end))) + } + Ok(CommandContext::OpenBrace) => { + // Entering interpolation - switch back to main context + self.context_stack.push(LexerContext::Main); + self.brace_depth = 1; + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(CommandContext::Content) | Ok(CommandContext::Escape) => { + // If this command context was entered as the second `$` of `$$` and + // the content starts with whitespace (e.g. `$$ failed {`), trim the + // leading whitespace so the identifier/tokenization matches the + // parser's expectations (no leading spaces). + let token_text = if start > 0 && self.source[start - 1..].starts_with('$') { + // second of $$ - trim leading whitespace from content + slice.trim_start().to_string() + } else { + slice.to_string() + }; + Some((Token(token_text), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } +} + +fn token_kind_to_string(kind: &TokenKind, slice: &str) -> String { + match kind { + TokenKind::LessEquals => "<=".to_string(), + TokenKind::GreaterEquals => ">=".to_string(), + TokenKind::NotEquals => "!=".to_string(), + TokenKind::EqualsEquals => "==".to_string(), + TokenKind::PlusEquals => "+=".to_string(), + TokenKind::MinusEquals => "-=".to_string(), + TokenKind::MulEquals => "*=".to_string(), + TokenKind::DivEquals => "/=".to_string(), + TokenKind::ModEquals => "%=".to_string(), + TokenKind::DotDot => "..".to_string(), + TokenKind::Plus => "+".to_string(), + TokenKind::Minus => "-".to_string(), + TokenKind::Star => "*".to_string(), + TokenKind::Slash => "/".to_string(), + TokenKind::Percent => "%".to_string(), + TokenKind::Less => "<".to_string(), + TokenKind::Greater => ">".to_string(), + TokenKind::Equals => "=".to_string(), + TokenKind::Bang => "!".to_string(), + TokenKind::Question => "?".to_string(), + TokenKind::OpenParen => "(".to_string(), + TokenKind::CloseParen => ")".to_string(), + TokenKind::OpenBracket => "[".to_string(), + TokenKind::CloseBracket => "]".to_string(), + TokenKind::OpenBrace => "{".to_string(), + TokenKind::CloseBrace => "}".to_string(), + TokenKind::Semicolon => ";".to_string(), + TokenKind::Colon => ":".to_string(), + TokenKind::Comma => ",".to_string(), + TokenKind::Dot => ".".to_string(), + TokenKind::Backslash => "\\".to_string(), + TokenKind::Quote => "\"".to_string(), + TokenKind::Dollar => "$".to_string(), + TokenKind::Newline => "\n".to_string(), + TokenKind::CompilerFlagStart => "#[".to_string(), + // For these, use the actual slice + TokenKind::Comment | TokenKind::Shebang | TokenKind::Identifier | TokenKind::Number => { + slice.to_string() + } + } +} + +/// Public tokenize function +pub fn tokenize(input: &str) -> Vec> { + StatefulTokenizer::tokenize(input) } diff --git a/src/grammar/alpha035/mod.rs b/src/grammar/alpha035/mod.rs index 037fa95..0511bfe 100644 --- a/src/grammar/alpha035/mod.rs +++ b/src/grammar/alpha035/mod.rs @@ -1,17 +1,22 @@ use crate::analysis::types::DataType; pub use super::Spanned; -use super::{CommandModifier, CompilerFlag, Grammar, LSPAnalysis, ParserResponse, Span}; -use chumsky::{ - error::Rich, - extra::Err, - input::{Input, SpannedInput}, - span::SimpleSpan, - Parser, +use super::{ + CommandModifier, + CompilerFlag, + Grammar, + LSPAnalysis, + ParserResponse, + Span, }; -use heraclitus_compiler::prelude::*; -use lexer::{get_rules, Token}; -use prelude::lexer::Lexer; +use crate::grammar::Token; +use chumsky::error::Rich; +use chumsky::extra::Err; +use chumsky::input::{ + Input, + SpannedInput, +}; +use chumsky::Parser; use semantic_tokens::semantic_tokens_from_ast; pub mod expressions; @@ -244,9 +249,7 @@ pub enum GlobalStatement { } #[derive(Debug)] -pub struct AmberCompiler { - lexer: Lexer, -} +pub struct AmberCompiler {} impl Default for AmberCompiler { fn default() -> Self { @@ -256,9 +259,7 @@ impl Default for AmberCompiler { impl AmberCompiler { pub fn new() -> Self { - let lexer = Lexer::new(get_rules()); - - AmberCompiler { lexer } + AmberCompiler {} } pub fn parser<'a>(&self) -> impl AmberParser<'a, Vec>> { @@ -269,22 +270,8 @@ impl AmberCompiler { impl LSPAnalysis for AmberCompiler { #[tracing::instrument(skip_all)] fn tokenize(&self, input: &str) -> Vec> { - // It should never fail - self.lexer - .tokenize(&input.replace("\r\n", "\n").replace("\r", "\n")) - .expect("Failed to tokenize input") - .iter() - .filter_map(|t| { - if t.word == "\n" { - return None; - } - - Some(( - Token(t.word.clone()), - SimpleSpan::new(t.start, t.start + t.word.chars().count()), - )) - }) - .collect() + // Use Logos lexer instead of Heraclitus + lexer::tokenize(&input.replace("\r\n", "\n").replace("\r", "\n")) } #[tracing::instrument(skip_all)] diff --git a/src/grammar/alpha035/parser.rs b/src/grammar/alpha035/parser.rs index 75df53f..010d849 100644 --- a/src/grammar/alpha035/parser.rs +++ b/src/grammar/alpha035/parser.rs @@ -2,7 +2,8 @@ use chumsky::prelude::*; use crate::T; -use super::{lexer::Token, AmberParser}; +use super::AmberParser; +use crate::grammar::Token; const KEYWORDS: &[&str] = &[ "if", "else", "loop", "in", "return", "break", "continue", "true", "false", "null", "fun", diff --git a/src/grammar/alpha035/statements/block.rs b/src/grammar/alpha035/statements/block.rs index 18bc636..2f83d31 100644 --- a/src/grammar/alpha035/statements/block.rs +++ b/src/grammar/alpha035/statements/block.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - lexer::Token, parser::default_recovery, AmberParser, Block, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Block, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::modifiers::modifier_parser; diff --git a/src/grammar/alpha035/statements/comment.rs b/src/grammar/alpha035/statements/comment.rs index 84b725c..b23ae77 100644 --- a/src/grammar/alpha035/statements/comment.rs +++ b/src/grammar/alpha035/statements/comment.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use crate::grammar::alpha035::{lexer::Token, AmberParser, Comment, Spanned}; +use crate::grammar::alpha035::{ + AmberParser, + Comment, + Spanned, +}; +use crate::grammar::Token; pub fn comment_parser<'a>() -> impl AmberParser<'a, Spanned> { choice((doc_string_parser(), single_line_comment_parser())).boxed() diff --git a/src/grammar/alpha035/statements/const_init.rs b/src/grammar/alpha035/statements/const_init.rs index 7ed9b4e..fb567c0 100644 --- a/src/grammar/alpha035/statements/const_init.rs +++ b/src/grammar/alpha035/statements/const_init.rs @@ -1,17 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::{ - alpha035::Expression, - alpha035::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Spanned, Statement, - }, - }, - T, +use crate::grammar::alpha035::expressions::parse_expr; +use crate::grammar::alpha035::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; pub fn const_init_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/statements/failed.rs b/src/grammar/alpha035/statements/failed.rs index b532ea6..41bae72 100644 --- a/src/grammar/alpha035/statements/failed.rs +++ b/src/grammar/alpha035/statements/failed.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - lexer::Token, parser::default_recovery, AmberParser, FailureHandler, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + FailureHandler, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; pub fn failure_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/statements/if_cond.rs b/src/grammar/alpha035/statements/if_cond.rs index da402dc..cf67b0d 100644 --- a/src/grammar/alpha035/statements/if_cond.rs +++ b/src/grammar/alpha035/statements/if_cond.rs @@ -1,12 +1,19 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, statements::comment, - AmberParser, Comment, ElseCondition, IfChainContent, IfCondition, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::expressions::parse_expr; +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::statements::comment; +use crate::grammar::alpha035::{ + AmberParser, + Comment, + ElseCondition, + IfChainContent, + IfCondition, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::block::block_parser; diff --git a/src/grammar/alpha035/statements/keywords.rs b/src/grammar/alpha035/statements/keywords.rs index ff84e43..688de92 100644 --- a/src/grammar/alpha035/statements/keywords.rs +++ b/src/grammar/alpha035/statements/keywords.rs @@ -1,12 +1,15 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, AmberParser, Expression, - Spanned, Statement, - }, - T, +use crate::grammar::alpha035::expressions::parse_expr; +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; pub fn keywords_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/statements/loops.rs b/src/grammar/alpha035/statements/loops.rs index 38737ca..b5bb6c8 100644 --- a/src/grammar/alpha035/statements/loops.rs +++ b/src/grammar/alpha035/statements/loops.rs @@ -1,14 +1,20 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Block, Expression, IterLoopVars, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::expressions::parse_expr; +use crate::grammar::alpha035::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha035::{ + AmberParser, + Block, + Expression, + IterLoopVars, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; use super::block::block_parser; diff --git a/src/grammar/alpha035/statements/mod.rs b/src/grammar/alpha035/statements/mod.rs index 6f2857b..9c7f98d 100644 --- a/src/grammar/alpha035/statements/mod.rs +++ b/src/grammar/alpha035/statements/mod.rs @@ -1,8 +1,14 @@ use chumsky::prelude::*; -use crate::{grammar::Token, T}; +use crate::grammar::Token; +use crate::T; -use super::{expressions::parse_expr, AmberParser, Spanned, Statement}; +use super::expressions::parse_expr; +use super::{ + AmberParser, + Spanned, + Statement, +}; pub mod block; pub mod comment; diff --git a/src/grammar/alpha035/statements/modifiers.rs b/src/grammar/alpha035/statements/modifiers.rs index fade6e5..22b7c0f 100644 --- a/src/grammar/alpha035/statements/modifiers.rs +++ b/src/grammar/alpha035/statements/modifiers.rs @@ -1,9 +1,12 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{lexer::Token, AmberParser, CommandModifier, Spanned}, - T, +use crate::grammar::alpha035::{ + AmberParser, + CommandModifier, + Spanned, }; +use crate::grammar::Token; +use crate::T; pub fn modifier_parser<'a>() -> impl AmberParser<'a, Spanned> { choice(( diff --git a/src/grammar/alpha035/statements/move_files.rs b/src/grammar/alpha035/statements/move_files.rs index 936b978..2760496 100644 --- a/src/grammar/alpha035/statements/move_files.rs +++ b/src/grammar/alpha035/statements/move_files.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, AmberParser, Expression, - Spanned, Statement, - }, - T, +use crate::grammar::alpha035::expressions::parse_expr; +use crate::grammar::alpha035::parser::default_recovery; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; -use super::{failed::failure_parser, modifiers::modifier_parser}; +use super::failed::failure_parser; +use super::modifiers::modifier_parser; pub fn move_files_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/statements/shebang.rs b/src/grammar/alpha035/statements/shebang.rs index a330e68..b3289c6 100644 --- a/src/grammar/alpha035/statements/shebang.rs +++ b/src/grammar/alpha035/statements/shebang.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use crate::grammar::alpha035::{lexer::Token, AmberParser, Spanned, Statement}; +use crate::grammar::alpha035::{ + AmberParser, + Spanned, + Statement, +}; +use crate::grammar::Token; pub fn shebang_parser<'a>() -> impl AmberParser<'a, Spanned> { any() diff --git a/src/grammar/alpha035/statements/shorthands.rs b/src/grammar/alpha035/statements/shorthands.rs index caf600e..a453fc7 100644 --- a/src/grammar/alpha035/statements/shorthands.rs +++ b/src/grammar/alpha035/statements/shorthands.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::expressions::parse_expr; +use crate::grammar::alpha035::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; pub fn shorthand_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/statements/var_init.rs b/src/grammar/alpha035/statements/var_init.rs index 66cf69f..2193b06 100644 --- a/src/grammar/alpha035/statements/var_init.rs +++ b/src/grammar/alpha035/statements/var_init.rs @@ -1,15 +1,19 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - expressions::parse_expr, - global::type_parser, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Spanned, Statement, VariableInitType, - }, - T, +use crate::grammar::alpha035::expressions::parse_expr; +use crate::grammar::alpha035::global::type_parser; +use crate::grammar::alpha035::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha035::{ + AmberParser, + Spanned, + Statement, + VariableInitType, +}; +use crate::grammar::Token; +use crate::T; pub fn var_init_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha035/statements/var_set.rs b/src/grammar/alpha035/statements/var_set.rs index 087c10e..f8cd050 100644 --- a/src/grammar/alpha035/statements/var_set.rs +++ b/src/grammar/alpha035/statements/var_set.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha035::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha035::expressions::parse_expr; +use crate::grammar::alpha035::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha035::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; pub fn var_set_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/expressions/and.rs b/src/grammar/alpha040/expressions/and.rs index ed5c17e..e788af4 100644 --- a/src/grammar/alpha040/expressions/and.rs +++ b/src/grammar/alpha040/expressions/and.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::comparison::comparison_parser; diff --git a/src/grammar/alpha040/expressions/atom/array.rs b/src/grammar/alpha040/expressions/atom/array.rs index 21d253c..718de8c 100644 --- a/src/grammar/alpha040/expressions/atom/array.rs +++ b/src/grammar/alpha040/expressions/atom/array.rs @@ -1,9 +1,13 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; pub fn array_parser<'a>( expr: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/expressions/atom/bool.rs b/src/grammar/alpha040/expressions/atom/bool.rs index 44f3a0b..77b5a38 100644 --- a/src/grammar/alpha040/expressions/atom/bool.rs +++ b/src/grammar/alpha040/expressions/atom/bool.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha040::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn bool_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha040/expressions/atom/call.rs b/src/grammar/alpha040/expressions/atom/call.rs index b4f01f1..b79282a 100644 --- a/src/grammar/alpha040/expressions/atom/call.rs +++ b/src/grammar/alpha040/expressions/atom/call.rs @@ -1,12 +1,17 @@ -use crate::{ - grammar::alpha040::{ - lexer::Token, - parser::{default_recovery, ident}, - statements::{failed::failure_parser, modifiers::modifier_parser}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha040::statements::failed::failure_parser; +use crate::grammar::alpha040::statements::modifiers::modifier_parser; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn function_call_parser<'a>( diff --git a/src/grammar/alpha040/expressions/atom/command.rs b/src/grammar/alpha040/expressions/atom/command.rs index 556572f..639127e 100644 --- a/src/grammar/alpha040/expressions/atom/command.rs +++ b/src/grammar/alpha040/expressions/atom/command.rs @@ -1,14 +1,17 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - lexer::Token, - parser::default_recovery, - statements::{failed::failure_parser, modifiers::modifier_parser}, - AmberParser, Expression, InterpolatedCommand, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::statements::failed::failure_parser; +use crate::grammar::alpha040::statements::modifiers::modifier_parser; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + InterpolatedCommand, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; pub fn command_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/expressions/atom/exit.rs b/src/grammar/alpha040/expressions/atom/exit.rs index bb05138..9aaaaf5 100644 --- a/src/grammar/alpha040/expressions/atom/exit.rs +++ b/src/grammar/alpha040/expressions/atom/exit.rs @@ -1,9 +1,12 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; pub fn exit_parser<'a>( exp: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/expressions/atom/mod.rs b/src/grammar/alpha040/expressions/atom/mod.rs index b6772cb..96bbaab 100644 --- a/src/grammar/alpha040/expressions/atom/mod.rs +++ b/src/grammar/alpha040/expressions/atom/mod.rs @@ -1,10 +1,11 @@ -use crate::{ - grammar::{ - alpha040::parser::default_recovery, - alpha040::{lexer::Token, AmberParser, Spanned, Statement}, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha040/expressions/atom/null.rs b/src/grammar/alpha040/expressions/atom/null.rs index 78ee03c..2d58232 100644 --- a/src/grammar/alpha040/expressions/atom/null.rs +++ b/src/grammar/alpha040/expressions/atom/null.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha040::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn null_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha040/expressions/atom/number.rs b/src/grammar/alpha040/expressions/atom/number.rs index 9d51bca..b828d98 100644 --- a/src/grammar/alpha040/expressions/atom/number.rs +++ b/src/grammar/alpha040/expressions/atom/number.rs @@ -1,36 +1,32 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{lexer::Token, AmberParser, Spanned}, - T, +use crate::grammar::alpha040::{ + AmberParser, + Expression, +}; +use crate::grammar::{ + Spanned, + Token, }; - -use super::Expression; pub fn number_parser<'a>() -> impl AmberParser<'a, Spanned> { - let int = any().try_map(|token: Token, span| { - let word = token.to_string(); - - for char in word.chars() { - if !char.is_ascii_digit() { - return Err(Rich::custom(span, "int must contain only digits")); - } - } + any() + .try_map(|token: Token, span| { + let word = token.to_string(); - Ok(word) - }); + let num_str = if word.starts_with('.') { + // For numbers like ".4", prepend "0" to make "0.4" + format!("0{}", word) + } else { + word + }; - choice(( - int.then(just(T!['.']).ignore_then(int)) - .map(|(int, float)| format!("{int}.{float}")), - just(T!['.']) - .ignore_then(int) - .map(|float| format!("0.{float}")), - int.map(|int| format!("{int}.0")), - )) - .from_str::() - .unwrapped() - .map_with(|num, e| (Expression::Number((num, e.span())), e.span())) - .boxed() - .labelled("number") + // Parse as f32 + num_str + .parse::() + .map_err(|_| Rich::custom(span, format!("invalid number format: {}", num_str))) + }) + .map_with(|num, e| (Expression::Number((num, e.span())), e.span())) + .boxed() + .labelled("number") } diff --git a/src/grammar/alpha040/expressions/atom/parentheses.rs b/src/grammar/alpha040/expressions/atom/parentheses.rs index ad0a831..a69b1d7 100644 --- a/src/grammar/alpha040/expressions/atom/parentheses.rs +++ b/src/grammar/alpha040/expressions/atom/parentheses.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha040::{lexer::Token, parser::default_recovery, AmberParser, Spanned}, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Spanned, }; +use crate::grammar::Token; +use crate::T; use super::super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha040/expressions/atom/status.rs b/src/grammar/alpha040/expressions/atom/status.rs index 4393dcb..24dab1f 100644 --- a/src/grammar/alpha040/expressions/atom/status.rs +++ b/src/grammar/alpha040/expressions/atom/status.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha040::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn status_var_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha040/expressions/atom/text.rs b/src/grammar/alpha040/expressions/atom/text.rs index 8c6bd60..882dc85 100644 --- a/src/grammar/alpha040/expressions/atom/text.rs +++ b/src/grammar/alpha040/expressions/atom/text.rs @@ -1,9 +1,12 @@ -use crate::{ - grammar::alpha040::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, InterpolatedText, Spanned, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + InterpolatedText, + Spanned, }; +use crate::grammar::Token; +use crate::T; use chumsky::prelude::*; pub fn text_parser<'a>( diff --git a/src/grammar/alpha040/expressions/atom/var.rs b/src/grammar/alpha040/expressions/atom/var.rs index d48248f..bd629b4 100644 --- a/src/grammar/alpha040/expressions/atom/var.rs +++ b/src/grammar/alpha040/expressions/atom/var.rs @@ -1,4 +1,8 @@ -use crate::grammar::alpha040::{parser::ident, AmberParser, Spanned}; +use crate::grammar::alpha040::parser::ident; +use crate::grammar::alpha040::{ + AmberParser, + Spanned, +}; use super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha040/expressions/cast.rs b/src/grammar/alpha040/expressions/cast.rs index 2b3e00c..f6a8fd0 100644 --- a/src/grammar/alpha040/expressions/cast.rs +++ b/src/grammar/alpha040/expressions/cast.rs @@ -1,12 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - global::type_parser, lexer::Token, parser::default_recovery, AmberParser, DataType, - Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::global::type_parser; +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + DataType, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::unary::unary_parser; diff --git a/src/grammar/alpha040/expressions/comparison.rs b/src/grammar/alpha040/expressions/comparison.rs index b2e977b..ab91582 100644 --- a/src/grammar/alpha040/expressions/comparison.rs +++ b/src/grammar/alpha040/expressions/comparison.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::sum::sum_parser; diff --git a/src/grammar/alpha040/expressions/is.rs b/src/grammar/alpha040/expressions/is.rs index 76e718e..f69ba4c 100644 --- a/src/grammar/alpha040/expressions/is.rs +++ b/src/grammar/alpha040/expressions/is.rs @@ -1,12 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - global::type_parser, lexer::Token, parser::default_recovery, AmberParser, DataType, - Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::global::type_parser; +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + DataType, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::cast::cast_parser; diff --git a/src/grammar/alpha040/expressions/mod.rs b/src/grammar/alpha040/expressions/mod.rs index 71cb80b..ef862f3 100644 --- a/src/grammar/alpha040/expressions/mod.rs +++ b/src/grammar/alpha040/expressions/mod.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use super::{AmberParser, Expression, Spanned, Statement}; +use super::{ + AmberParser, + Expression, + Spanned, + Statement, +}; mod and; mod atom; diff --git a/src/grammar/alpha040/expressions/or.rs b/src/grammar/alpha040/expressions/or.rs index 2f21572..4860903 100644 --- a/src/grammar/alpha040/expressions/or.rs +++ b/src/grammar/alpha040/expressions/or.rs @@ -1,11 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{lexer::Token, parser::default_recovery, AmberParser, Spanned, Statement}, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; -use super::{and::and_parser, Expression}; +use super::and::and_parser; +use super::Expression; pub fn or_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/expressions/product.rs b/src/grammar/alpha040/expressions/product.rs index 3f24ba4..23f0245 100644 --- a/src/grammar/alpha040/expressions/product.rs +++ b/src/grammar/alpha040/expressions/product.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::is::is_parser; diff --git a/src/grammar/alpha040/expressions/range.rs b/src/grammar/alpha040/expressions/range.rs index b5182c6..c690128 100644 --- a/src/grammar/alpha040/expressions/range.rs +++ b/src/grammar/alpha040/expressions/range.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::or::or_parser; diff --git a/src/grammar/alpha040/expressions/sum.rs b/src/grammar/alpha040/expressions/sum.rs index 4e56f00..ef5ae6b 100644 --- a/src/grammar/alpha040/expressions/sum.rs +++ b/src/grammar/alpha040/expressions/sum.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::product::product_parser; diff --git a/src/grammar/alpha040/expressions/ternary.rs b/src/grammar/alpha040/expressions/ternary.rs index e1765ba..161451b 100644 --- a/src/grammar/alpha040/expressions/ternary.rs +++ b/src/grammar/alpha040/expressions/ternary.rs @@ -1,8 +1,12 @@ use chumsky::prelude::*; -use crate::grammar::alpha040::lexer::Token; use crate::grammar::alpha040::parser::default_recovery; -use crate::grammar::alpha040::{AmberParser, Spanned, Statement}; +use crate::grammar::alpha040::{ + AmberParser, + Spanned, + Statement, +}; +use crate::grammar::Token; use crate::T; use super::range::range_parser; diff --git a/src/grammar/alpha040/expressions/unary.rs b/src/grammar/alpha040/expressions/unary.rs index faa149a..0711935 100644 --- a/src/grammar/alpha040/expressions/unary.rs +++ b/src/grammar/alpha040/expressions/unary.rs @@ -1,9 +1,13 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{lexer::Token, AmberParser, Expression, Spanned, Statement}, - T, +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::atom::array_index_parser; diff --git a/src/grammar/alpha040/global.rs b/src/grammar/alpha040/global.rs index eaebeaf..d32e03f 100644 --- a/src/grammar/alpha040/global.rs +++ b/src/grammar/alpha040/global.rs @@ -2,14 +2,24 @@ use chumsky::prelude::*; use crate::T; +use super::expressions::parse_expr; +use super::parser::{ + default_recovery, + ident, +}; +use super::statements::statement_parser; use super::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - statements::statement_parser, - AmberParser, CompilerFlag, DataType, Expression, FunctionArgument, GlobalStatement, - ImportContent, Spanned, Statement, + AmberParser, + CompilerFlag, + DataType, + Expression, + FunctionArgument, + GlobalStatement, + ImportContent, + Spanned, + Statement, }; +use crate::grammar::Token; pub fn import_parser<'a>() -> impl AmberParser<'a, Spanned> { let import_all_parser = just(T!["*"]).map_with(|_, e| (ImportContent::ImportAll, e.span())); @@ -121,8 +131,7 @@ pub fn type_parser<'a>() -> impl AmberParser<'a, Spanned> { } fn compiler_flag_parser<'a>() -> impl AmberParser<'a, Spanned> { - just(T!["#"]) - .ignore_then(just(T!["["])) + just(T!["#["]) .ignore_then( choice(( just(T!["allow_nested_if_else"]).to(CompilerFlag::AllowNestedIfElse), diff --git a/src/grammar/alpha040/lexer.rs b/src/grammar/alpha040/lexer.rs index 278aaef..e5817db 100644 --- a/src/grammar/alpha040/lexer.rs +++ b/src/grammar/alpha040/lexer.rs @@ -1,74 +1,410 @@ -use heraclitus_compiler::prelude::*; - -pub use crate::grammar::Token; - -pub fn get_rules() -> Rules { - let symbols = vec![ - '+', '-', '*', '/', '%', ';', ':', '(', ')', '[', ']', '{', '}', ',', '.', '<', '>', '=', - '!', '?', '\\', '"', '$', '\n', - ]; - let compounds = vec![ - ('<', '='), - ('>', '='), - ('!', '='), - ('=', '='), - ('+', '='), - ('-', '='), - ('*', '='), - ('/', '='), - ('%', '='), - ('.', '.'), - ('/', '/'), - ]; - let region = reg![ - reg!(string as "string literal" => { - begin: "\"", - end: "\"", - tokenize: true, - allow_unclosed_region: true - } => [ - reg!(str_interp as "string interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]), - reg!(command as "command literal" => { - begin: "$", - end: "$", - tokenize: true, - allow_unclosed_region: true - } => [ - reg!(com_interp as "command interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]), - reg!(cc_flag as "compiler flag" => { - begin: "#[", - end: "]", - tokenize: true, - allow_unclosed_region: true - }), - reg!(comment as "comment" => { - begin: "//", - end: "\n", - allow_unclosed_region: true - }), - reg!(comment as "shebang" => { - begin: "#!", - end: "\n", - allow_unclosed_region: true - }), - reg!(interp as "interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]; - Rules::new(symbols, compounds, region) +use logos::Logos; + +use super::{ + Spanned, + Token, +}; +use crate::grammar::SimpleSpan; + +/// Main token context - general code +#[derive(Logos, Debug, Clone, PartialEq)] +#[logos(skip r"[ \t\r]+")] +pub enum TokenKind { + // Compound operators + #[token("<=")] + LessEquals, + #[token(">=")] + GreaterEquals, + #[token("!=")] + NotEquals, + #[token("==")] + EqualsEquals, + #[token("+=")] + PlusEquals, + #[token("-=")] + MinusEquals, + #[token("*=")] + MulEquals, + #[token("/=")] + DivEquals, + #[token("%=")] + ModEquals, + #[token("..")] + DotDot, + + // Single operators + #[token("+")] + Plus, + #[token("-")] + Minus, + #[token("*")] + Star, + #[token("/")] + Slash, + #[token("%")] + Percent, + #[token("<")] + Less, + #[token(">")] + Greater, + #[token("=")] + Equals, + #[token("!")] + Bang, + #[token("?")] + Question, + + // Delimiters + #[token("(")] + OpenParen, + #[token(")")] + CloseParen, + #[token("[")] + OpenBracket, + #[token("]")] + CloseBracket, + #[token("{")] + OpenBrace, + #[token("}")] + CloseBrace, + + // Punctuation + #[token(";")] + Semicolon, + #[token(":")] + Colon, + #[token(",")] + Comma, + #[token(".")] + Dot, + #[token("\\")] + Backslash, + + // String start (transition to StringContext) + #[token("\"")] + Quote, + + // Command start/end (transition to CommandContext) + #[token("$")] + Dollar, + + // Compiler flag start (transition to CompilerFlagContext) + #[token("#[")] + CompilerFlagStart, + + // Comments (include trailing newline to match Heraclitus behavior) + #[regex(r"//[^\n]*\n?", priority = 2)] + Comment, + #[regex(r"#![^\n]*\n?", priority = 2)] + Shebang, + + // Identifier + #[regex(r"[a-zA-Z_][a-zA-Z0-9_]*")] + Identifier, + + // Number + #[regex(r"([0-9]+(\.[0-9]+)?)|(\.[0-9]+)")] + Number, + + // Newline + #[token("\n")] + Newline, +} + +/// String content context - inside "..." +#[derive(Logos, Debug, Clone, PartialEq)] +pub enum StringContext { + // String end + #[token("\"")] + Quote, + + // Interpolation start + #[token("{")] + OpenBrace, + + // Text content (not quotes, backslashes, or braces) + #[regex(r#"[^"\\{]+"#)] + Content, + + // Escape sequence + #[regex(r#"\\."#)] + Escape, +} + +/// Command content context - inside $...$ +#[derive(Logos, Debug, Clone, PartialEq)] +pub enum CommandContext { + // Command end + #[token("$")] + Dollar, + + // Interpolation start + #[token("{")] + OpenBrace, + + // Text content (not $, backslash, or braces) + #[regex(r"[^$\\{]+")] + Content, + + // Escape sequence + #[regex(r#"\\."#)] + Escape, +} + +/// Context stack to track where we are in parsing +#[derive(Debug, Clone, PartialEq)] +enum LexerContext { + Main, + String, + Command, +} + +/// Stateful tokenizer that manages context switching +pub struct StatefulTokenizer<'source> { + source: &'source str, + position: usize, + context_stack: Vec, + brace_depth: usize, // Track brace nesting inside interpolations +} + +impl<'source> StatefulTokenizer<'source> { + fn new(source: &'source str) -> Self { + Self { + source, + position: 0, + context_stack: vec![LexerContext::Main], + brace_depth: 0, + } + } + + pub fn tokenize(source: &'source str) -> Vec> { + let mut tokenizer = Self::new(source); + let mut tokens = Vec::new(); + + while tokenizer.position < source.len() { + if let Some((token, span)) = tokenizer.next_token() { + tokens.push((token, span)); + } else { + break; + } + } + + // Filter out newline tokens - they're used for position tracking but not needed for parsing + tokens + .into_iter() + .filter(|(token, _)| token.0 != "\n") + .collect() + } + + fn next_token(&mut self) -> Option<(Token, SimpleSpan)> { + if self.position >= self.source.len() { + return None; + } + + let remaining = &self.source[self.position..]; + let context = self.context_stack.last()?.clone(); + + match context { + LexerContext::Main => self.lex_main_context(remaining), + LexerContext::String => self.lex_string_context(remaining), + LexerContext::Command => self.lex_command_context(remaining), + } + } + + fn lex_main_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = TokenKind::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(TokenKind::Quote) => { + // Entering string context + self.context_stack.push(LexerContext::String); + Some((Token("\"".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::Dollar) => { + // Check if this starts a command. + // If the next char is another '$', do not start a command (this is the first of $$). + // If this is the second of a $$ (previous char was '$'), only start a command + // when the following char is not whitespace. Otherwise single $ starts a command. + let prev_is_dollar = start > 0 && self.source[start - 1..].starts_with('$'); + + let is_command_start = if span.end < remaining.len() { + // Look at the next char after this $ in the remaining slice + let mut next_chars = remaining[span.end..].chars(); + match next_chars.next() { + Some(next_ch) => { + if next_ch == '$' { + // next is a dollar -> this $ does not start a command + false + } else if prev_is_dollar { + // this is the second $ in $$ — only start a command if not followed by whitespace + !next_ch.is_whitespace() + } else { + // single $ (not followed by $) -> start a command + true + } + } + None => false, + } + } else { + false + }; + + if is_command_start { + // Entering command context + self.context_stack.push(LexerContext::Command); + } + Some((Token("$".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::OpenBrace) => { + // Only track brace depth if we're inside an interpolation (context stack > 1) + if self.context_stack.len() > 1 { + self.brace_depth += 1; + } + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::CloseBrace) => { + // Only track brace depth if we're inside an interpolation + if self.context_stack.len() > 1 && self.brace_depth > 0 { + self.brace_depth -= 1; + if self.brace_depth == 0 { + // Exiting interpolation, pop back to string/command context + self.context_stack.pop(); + } + } + Some((Token("}".to_string()), SimpleSpan::new(start, end))) + } + Ok(kind) => { + let token_str = token_kind_to_string(&kind, slice); + Some((Token(token_str), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } + + fn lex_string_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = StringContext::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(StringContext::Quote) => { + // Exiting string context + self.context_stack.pop(); + Some((Token("\"".to_string()), SimpleSpan::new(start, end))) + } + Ok(StringContext::OpenBrace) => { + // Entering interpolation - switch back to main context + self.context_stack.push(LexerContext::Main); + self.brace_depth = 1; + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(StringContext::Content) | Ok(StringContext::Escape) => { + Some((Token(slice.to_string()), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } + + fn lex_command_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = CommandContext::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(CommandContext::Dollar) => { + // Exiting command context + self.context_stack.pop(); + Some((Token("$".to_string()), SimpleSpan::new(start, end))) + } + Ok(CommandContext::OpenBrace) => { + // Entering interpolation - switch back to main context + self.context_stack.push(LexerContext::Main); + self.brace_depth = 1; + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(CommandContext::Content) | Ok(CommandContext::Escape) => { + // If this command context was entered as the second `$` of `$$` and + // the content starts with whitespace (e.g. `$$ failed {`), trim the + // leading whitespace so the identifier/tokenization matches the + // parser's expectations (no leading spaces). + let token_text = if start > 0 && self.source[start - 1..].starts_with('$') { + // second of $$ - trim leading whitespace from content + slice.trim_start().to_string() + } else { + slice.to_string() + }; + Some((Token(token_text), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } +} + +fn token_kind_to_string(kind: &TokenKind, slice: &str) -> String { + match kind { + TokenKind::LessEquals => "<=".to_string(), + TokenKind::GreaterEquals => ">=".to_string(), + TokenKind::NotEquals => "!=".to_string(), + TokenKind::EqualsEquals => "==".to_string(), + TokenKind::PlusEquals => "+=".to_string(), + TokenKind::MinusEquals => "-=".to_string(), + TokenKind::MulEquals => "*=".to_string(), + TokenKind::DivEquals => "/=".to_string(), + TokenKind::ModEquals => "%=".to_string(), + TokenKind::DotDot => "..".to_string(), + TokenKind::Plus => "+".to_string(), + TokenKind::Minus => "-".to_string(), + TokenKind::Star => "*".to_string(), + TokenKind::Slash => "/".to_string(), + TokenKind::Percent => "%".to_string(), + TokenKind::Less => "<".to_string(), + TokenKind::Greater => ">".to_string(), + TokenKind::Equals => "=".to_string(), + TokenKind::Bang => "!".to_string(), + TokenKind::Question => "?".to_string(), + TokenKind::OpenParen => "(".to_string(), + TokenKind::CloseParen => ")".to_string(), + TokenKind::OpenBracket => "[".to_string(), + TokenKind::CloseBracket => "]".to_string(), + TokenKind::OpenBrace => "{".to_string(), + TokenKind::CloseBrace => "}".to_string(), + TokenKind::Semicolon => ";".to_string(), + TokenKind::Colon => ":".to_string(), + TokenKind::Comma => ",".to_string(), + TokenKind::Dot => ".".to_string(), + TokenKind::Backslash => "\\".to_string(), + TokenKind::Quote => "\"".to_string(), + TokenKind::Dollar => "$".to_string(), + TokenKind::Newline => "\n".to_string(), + TokenKind::CompilerFlagStart => "#[".to_string(), + // For these, use the actual slice + TokenKind::Comment | TokenKind::Shebang | TokenKind::Identifier | TokenKind::Number => { + slice.to_string() + } + } +} + +/// Public tokenize function +pub fn tokenize(input: &str) -> Vec> { + StatefulTokenizer::tokenize(input) } diff --git a/src/grammar/alpha040/mod.rs b/src/grammar/alpha040/mod.rs index 7f208ec..46ff4b3 100644 --- a/src/grammar/alpha040/mod.rs +++ b/src/grammar/alpha040/mod.rs @@ -1,17 +1,22 @@ use crate::analysis::types::DataType; pub use super::Spanned; -use super::{CommandModifier, CompilerFlag, Grammar, LSPAnalysis, ParserResponse, Span}; -use chumsky::{ - error::Rich, - extra::Err, - input::{Input, SpannedInput}, - span::SimpleSpan, - Parser, +use super::{ + CommandModifier, + CompilerFlag, + Grammar, + LSPAnalysis, + ParserResponse, + Span, + Token, }; -use heraclitus_compiler::prelude::*; -use lexer::{get_rules, Token}; -use prelude::lexer::Lexer; +use chumsky::error::Rich; +use chumsky::extra::Err; +use chumsky::input::{ + Input, + SpannedInput, +}; +use chumsky::Parser; use semantic_tokens::semantic_tokens_from_ast; pub mod expressions; @@ -246,9 +251,7 @@ pub enum GlobalStatement { } #[derive(Debug)] -pub struct AmberCompiler { - lexer: Lexer, -} +pub struct AmberCompiler {} impl Default for AmberCompiler { fn default() -> Self { @@ -258,9 +261,7 @@ impl Default for AmberCompiler { impl AmberCompiler { pub fn new() -> Self { - let lexer = Lexer::new(get_rules()); - - AmberCompiler { lexer } + AmberCompiler {} } pub fn parser<'a>(&self) -> impl AmberParser<'a, Vec>> { @@ -271,22 +272,8 @@ impl AmberCompiler { impl LSPAnalysis for AmberCompiler { #[tracing::instrument(skip_all)] fn tokenize(&self, input: &str) -> Vec> { - // It should never fail - self.lexer - .tokenize(&input.replace("\r\n", "\n").replace("\r", "\n")) - .expect("Failed to tokenize input") - .iter() - .filter_map(|t| { - if t.word == "\n" { - return None; - } - - Some(( - Token(t.word.clone()), - SimpleSpan::new(t.start, t.start + t.word.chars().count()), - )) - }) - .collect() + // Use Logos lexer instead of Heraclitus + lexer::tokenize(&input.replace("\r\n", "\n").replace("\r", "\n")) } #[tracing::instrument(skip_all)] diff --git a/src/grammar/alpha040/parser.rs b/src/grammar/alpha040/parser.rs index e76b58c..3c0c01f 100644 --- a/src/grammar/alpha040/parser.rs +++ b/src/grammar/alpha040/parser.rs @@ -2,7 +2,8 @@ use chumsky::prelude::*; use crate::T; -use super::{lexer::Token, AmberParser}; +use super::AmberParser; +use crate::grammar::Token; const KEYWORDS: &[&str] = &[ "if", "else", "loop", "in", "return", "break", "continue", "true", "false", "null", "fun", diff --git a/src/grammar/alpha040/statements/block.rs b/src/grammar/alpha040/statements/block.rs index 52dc7b4..271a88e 100644 --- a/src/grammar/alpha040/statements/block.rs +++ b/src/grammar/alpha040/statements/block.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - lexer::Token, parser::default_recovery, AmberParser, Block, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Block, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::modifiers::modifier_parser; diff --git a/src/grammar/alpha040/statements/comment.rs b/src/grammar/alpha040/statements/comment.rs index 64f59b0..e155402 100644 --- a/src/grammar/alpha040/statements/comment.rs +++ b/src/grammar/alpha040/statements/comment.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use crate::grammar::alpha040::{lexer::Token, AmberParser, Comment, Spanned}; +use crate::grammar::alpha040::{ + AmberParser, + Comment, + Spanned, +}; +use crate::grammar::Token; pub fn comment_parser<'a>() -> impl AmberParser<'a, Spanned> { choice((doc_string_parser(), single_line_comment_parser())).boxed() diff --git a/src/grammar/alpha040/statements/const_init.rs b/src/grammar/alpha040/statements/const_init.rs index 650b757..7ed521f 100644 --- a/src/grammar/alpha040/statements/const_init.rs +++ b/src/grammar/alpha040/statements/const_init.rs @@ -1,17 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::{ - alpha040::Expression, - alpha040::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Spanned, Statement, - }, - }, - T, +use crate::grammar::alpha040::expressions::parse_expr; +use crate::grammar::alpha040::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; pub fn const_init_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/statements/failed.rs b/src/grammar/alpha040/statements/failed.rs index 36dd335..ad5c28b 100644 --- a/src/grammar/alpha040/statements/failed.rs +++ b/src/grammar/alpha040/statements/failed.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - lexer::Token, parser::default_recovery, AmberParser, FailureHandler, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + FailureHandler, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; pub fn failure_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/statements/if_cond.rs b/src/grammar/alpha040/statements/if_cond.rs index 89ae22c..067bfe8 100644 --- a/src/grammar/alpha040/statements/if_cond.rs +++ b/src/grammar/alpha040/statements/if_cond.rs @@ -1,12 +1,19 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, statements::comment, - AmberParser, Comment, ElseCondition, IfChainContent, IfCondition, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::expressions::parse_expr; +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::statements::comment; +use crate::grammar::alpha040::{ + AmberParser, + Comment, + ElseCondition, + IfChainContent, + IfCondition, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; use super::block::block_parser; diff --git a/src/grammar/alpha040/statements/keywords.rs b/src/grammar/alpha040/statements/keywords.rs index edac590..8e22e01 100644 --- a/src/grammar/alpha040/statements/keywords.rs +++ b/src/grammar/alpha040/statements/keywords.rs @@ -1,12 +1,15 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, AmberParser, Expression, - Spanned, Statement, - }, - T, +use crate::grammar::alpha040::expressions::parse_expr; +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; pub fn keywords_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/statements/loops.rs b/src/grammar/alpha040/statements/loops.rs index 8a162cf..431b263 100644 --- a/src/grammar/alpha040/statements/loops.rs +++ b/src/grammar/alpha040/statements/loops.rs @@ -1,14 +1,20 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Block, Expression, IterLoopVars, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::expressions::parse_expr; +use crate::grammar::alpha040::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha040::{ + AmberParser, + Block, + Expression, + IterLoopVars, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; use super::block::block_parser; diff --git a/src/grammar/alpha040/statements/mod.rs b/src/grammar/alpha040/statements/mod.rs index 6f2857b..9c7f98d 100644 --- a/src/grammar/alpha040/statements/mod.rs +++ b/src/grammar/alpha040/statements/mod.rs @@ -1,8 +1,14 @@ use chumsky::prelude::*; -use crate::{grammar::Token, T}; +use crate::grammar::Token; +use crate::T; -use super::{expressions::parse_expr, AmberParser, Spanned, Statement}; +use super::expressions::parse_expr; +use super::{ + AmberParser, + Spanned, + Statement, +}; pub mod block; pub mod comment; diff --git a/src/grammar/alpha040/statements/modifiers.rs b/src/grammar/alpha040/statements/modifiers.rs index 809e6ae..72d13dc 100644 --- a/src/grammar/alpha040/statements/modifiers.rs +++ b/src/grammar/alpha040/statements/modifiers.rs @@ -1,9 +1,12 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{lexer::Token, AmberParser, CommandModifier, Spanned}, - T, +use crate::grammar::alpha040::{ + AmberParser, + CommandModifier, + Spanned, }; +use crate::grammar::Token; +use crate::T; pub fn modifier_parser<'a>() -> impl AmberParser<'a, Spanned> { choice(( diff --git a/src/grammar/alpha040/statements/move_files.rs b/src/grammar/alpha040/statements/move_files.rs index ba6d24e..5160a93 100644 --- a/src/grammar/alpha040/statements/move_files.rs +++ b/src/grammar/alpha040/statements/move_files.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, AmberParser, Expression, - Spanned, Statement, - }, - T, +use crate::grammar::alpha040::expressions::parse_expr; +use crate::grammar::alpha040::parser::default_recovery; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, }; +use crate::grammar::Token; +use crate::T; -use super::{failed::failure_parser, modifiers::modifier_parser}; +use super::failed::failure_parser; +use super::modifiers::modifier_parser; pub fn move_files_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/statements/shebang.rs b/src/grammar/alpha040/statements/shebang.rs index 0b9b961..90aef1f 100644 --- a/src/grammar/alpha040/statements/shebang.rs +++ b/src/grammar/alpha040/statements/shebang.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use crate::grammar::alpha040::{lexer::Token, AmberParser, Spanned, Statement}; +use crate::grammar::alpha040::{ + AmberParser, + Spanned, + Statement, +}; +use crate::grammar::Token; pub fn shebang_parser<'a>() -> impl AmberParser<'a, Spanned> { any() diff --git a/src/grammar/alpha040/statements/shorthands.rs b/src/grammar/alpha040/statements/shorthands.rs index 24cd6de..1eaed4c 100644 --- a/src/grammar/alpha040/statements/shorthands.rs +++ b/src/grammar/alpha040/statements/shorthands.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::expressions::parse_expr; +use crate::grammar::alpha040::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; pub fn shorthand_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/statements/var_init.rs b/src/grammar/alpha040/statements/var_init.rs index 521d857..6bc98f1 100644 --- a/src/grammar/alpha040/statements/var_init.rs +++ b/src/grammar/alpha040/statements/var_init.rs @@ -1,15 +1,19 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - expressions::parse_expr, - global::type_parser, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Spanned, Statement, VariableInitType, - }, - T, +use crate::grammar::alpha040::expressions::parse_expr; +use crate::grammar::alpha040::global::type_parser; +use crate::grammar::alpha040::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha040::{ + AmberParser, + Spanned, + Statement, + VariableInitType, +}; +use crate::grammar::Token; +use crate::T; pub fn var_init_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha040/statements/var_set.rs b/src/grammar/alpha040/statements/var_set.rs index d75ccc4..047e4d7 100644 --- a/src/grammar/alpha040/statements/var_set.rs +++ b/src/grammar/alpha040/statements/var_set.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha040::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha040::expressions::parse_expr; +use crate::grammar::alpha040::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha040::{ + AmberParser, + Expression, + Spanned, + Statement, +}; +use crate::grammar::Token; +use crate::T; pub fn var_set_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/expressions/and.rs b/src/grammar/alpha050/expressions/and.rs index ffef80f..1a6b367 100644 --- a/src/grammar/alpha050/expressions/and.rs +++ b/src/grammar/alpha050/expressions/and.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; use super::comparison::comparison_parser; diff --git a/src/grammar/alpha050/expressions/atom/array.rs b/src/grammar/alpha050/expressions/atom/array.rs index 973b182..11b2f96 100644 --- a/src/grammar/alpha050/expressions/atom/array.rs +++ b/src/grammar/alpha050/expressions/atom/array.rs @@ -1,9 +1,13 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Token, }; +use crate::T; pub fn array_parser<'a>( expr: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/expressions/atom/bool.rs b/src/grammar/alpha050/expressions/atom/bool.rs index e76af3c..f709d24 100644 --- a/src/grammar/alpha050/expressions/atom/bool.rs +++ b/src/grammar/alpha050/expressions/atom/bool.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha050::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Token, }; +use crate::T; use chumsky::prelude::*; pub fn bool_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha050/expressions/atom/call.rs b/src/grammar/alpha050/expressions/atom/call.rs index 4de585c..63cd63a 100644 --- a/src/grammar/alpha050/expressions/atom/call.rs +++ b/src/grammar/alpha050/expressions/atom/call.rs @@ -1,12 +1,17 @@ -use crate::{ - grammar::alpha050::{ - lexer::Token, - parser::{default_recovery, ident}, - statements::{failable_handlers::failable_handlers_parser, modifiers::modifier_parser}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha050::statements::failable_handlers::failable_handlers_parser; +use crate::grammar::alpha050::statements::modifiers::modifier_parser; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, +}; +use crate::T; use chumsky::prelude::*; pub fn function_call_parser<'a>( diff --git a/src/grammar/alpha050/expressions/atom/command.rs b/src/grammar/alpha050/expressions/atom/command.rs index 4de1146..71e474e 100644 --- a/src/grammar/alpha050/expressions/atom/command.rs +++ b/src/grammar/alpha050/expressions/atom/command.rs @@ -1,14 +1,17 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - lexer::Token, - parser::default_recovery, - statements::{failable_handlers::failable_handlers_parser, modifiers::modifier_parser}, - AmberParser, Expression, InterpolatedCommand, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::statements::failable_handlers::failable_handlers_parser; +use crate::grammar::alpha050::statements::modifiers::modifier_parser; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + InterpolatedCommand, + Spanned, + Statement, + Token, }; +use crate::T; pub fn command_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/expressions/atom/exit.rs b/src/grammar/alpha050/expressions/atom/exit.rs index 0bde77a..ddd32b0 100644 --- a/src/grammar/alpha050/expressions/atom/exit.rs +++ b/src/grammar/alpha050/expressions/atom/exit.rs @@ -1,9 +1,12 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Token, }; +use crate::T; pub fn exit_parser<'a>( exp: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/expressions/atom/int.rs b/src/grammar/alpha050/expressions/atom/int.rs index 2edfdc9..9b337a8 100644 --- a/src/grammar/alpha050/expressions/atom/int.rs +++ b/src/grammar/alpha050/expressions/atom/int.rs @@ -1,6 +1,10 @@ use chumsky::prelude::*; -use crate::grammar::alpha050::{lexer::Token, AmberParser, Spanned}; +use crate::grammar::alpha050::{ + AmberParser, + Spanned, + Token, +}; use super::Expression; diff --git a/src/grammar/alpha050/expressions/atom/mod.rs b/src/grammar/alpha050/expressions/atom/mod.rs index 949b42b..63f4b9a 100644 --- a/src/grammar/alpha050/expressions/atom/mod.rs +++ b/src/grammar/alpha050/expressions/atom/mod.rs @@ -1,10 +1,11 @@ -use crate::{ - grammar::{ - alpha050::parser::default_recovery, - alpha050::{lexer::Token, AmberParser, Spanned, Statement}, - }, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Spanned, + Statement, + Token, }; +use crate::T; use super::super::Expression; use chumsky::prelude::*; @@ -37,8 +38,8 @@ pub fn atom_parser<'a>( text::text_parser(expr.clone()), array::array_parser(expr.clone()), command::command_parser(stmnts.clone(), expr.clone()), - number::number_parser(), int::int_parser(), + number::number_parser(), )) .boxed() } diff --git a/src/grammar/alpha050/expressions/atom/null.rs b/src/grammar/alpha050/expressions/atom/null.rs index 80bfec7..b5040e9 100644 --- a/src/grammar/alpha050/expressions/atom/null.rs +++ b/src/grammar/alpha050/expressions/atom/null.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha050::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Token, }; +use crate::T; use chumsky::prelude::*; pub fn null_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha050/expressions/atom/number.rs b/src/grammar/alpha050/expressions/atom/number.rs index 769c413..55d9b7f 100644 --- a/src/grammar/alpha050/expressions/atom/number.rs +++ b/src/grammar/alpha050/expressions/atom/number.rs @@ -1,35 +1,32 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{lexer::Token, AmberParser, Spanned}, - T, +use crate::grammar::alpha050::{ + AmberParser, + Expression, +}; +use crate::grammar::{ + Spanned, + Token, }; - -use super::Expression; pub fn number_parser<'a>() -> impl AmberParser<'a, Spanned> { - let int = any().try_map(|token: Token, span| { - let word = token.to_string(); - - for char in word.chars() { - if !char.is_ascii_digit() { - return Err(Rich::custom(span, "int must contain only digits")); - } - } + any() + .try_map(|token: Token, span| { + let word = token.to_string(); - Ok(word) - }); + let num_str = if word.starts_with('.') { + // For numbers like ".4", prepend "0" to make "0.4" + format!("0{}", word) + } else { + word + }; - choice(( - int.then(just(T!['.']).ignore_then(int)) - .map(|(int, float)| format!("{int}.{float}")), - just(T!['.']) - .ignore_then(int) - .map(|float| format!("0.{float}")), - )) - .from_str::() - .unwrapped() - .map_with(|num, e| (Expression::Number((num, e.span())), e.span())) - .boxed() - .labelled("number") + // Parse as f32 + num_str + .parse::() + .map_err(|_| Rich::custom(span, format!("invalid number format: {}", num_str))) + }) + .map_with(|num, e| (Expression::Number((num, e.span())), e.span())) + .boxed() + .labelled("number") } diff --git a/src/grammar/alpha050/expressions/atom/parentheses.rs b/src/grammar/alpha050/expressions/atom/parentheses.rs index ab063e3..7368de6 100644 --- a/src/grammar/alpha050/expressions/atom/parentheses.rs +++ b/src/grammar/alpha050/expressions/atom/parentheses.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha050::{lexer::Token, parser::default_recovery, AmberParser, Spanned}, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Spanned, + Token, }; +use crate::T; use super::super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha050/expressions/atom/status.rs b/src/grammar/alpha050/expressions/atom/status.rs index 7c20eee..9e67ce7 100644 --- a/src/grammar/alpha050/expressions/atom/status.rs +++ b/src/grammar/alpha050/expressions/atom/status.rs @@ -1,7 +1,10 @@ -use crate::{ - grammar::alpha050::{lexer::Token, AmberParser, Expression, Spanned}, - T, +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Token, }; +use crate::T; use chumsky::prelude::*; pub fn status_var_parser<'a>() -> impl AmberParser<'a, Spanned> { diff --git a/src/grammar/alpha050/expressions/atom/text.rs b/src/grammar/alpha050/expressions/atom/text.rs index c96dd03..fe78299 100644 --- a/src/grammar/alpha050/expressions/atom/text.rs +++ b/src/grammar/alpha050/expressions/atom/text.rs @@ -1,9 +1,12 @@ -use crate::{ - grammar::alpha050::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, InterpolatedText, Spanned, - }, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + InterpolatedText, + Spanned, + Token, }; +use crate::T; use chumsky::prelude::*; pub fn text_parser<'a>( diff --git a/src/grammar/alpha050/expressions/atom/var.rs b/src/grammar/alpha050/expressions/atom/var.rs index 03d61ae..bbbcd65 100644 --- a/src/grammar/alpha050/expressions/atom/var.rs +++ b/src/grammar/alpha050/expressions/atom/var.rs @@ -1,4 +1,8 @@ -use crate::grammar::alpha050::{parser::ident, AmberParser, Spanned}; +use crate::grammar::alpha050::parser::ident; +use crate::grammar::alpha050::{ + AmberParser, + Spanned, +}; use super::Expression; use chumsky::prelude::*; diff --git a/src/grammar/alpha050/expressions/cast.rs b/src/grammar/alpha050/expressions/cast.rs index 22c2ede..4eda2f9 100644 --- a/src/grammar/alpha050/expressions/cast.rs +++ b/src/grammar/alpha050/expressions/cast.rs @@ -1,12 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - global::type_parser, lexer::Token, parser::default_recovery, AmberParser, DataType, - Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::global::type_parser; +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + DataType, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; use super::unary::unary_parser; diff --git a/src/grammar/alpha050/expressions/comparison.rs b/src/grammar/alpha050/expressions/comparison.rs index 92be98f..cb193c7 100644 --- a/src/grammar/alpha050/expressions/comparison.rs +++ b/src/grammar/alpha050/expressions/comparison.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; use super::sum::sum_parser; diff --git a/src/grammar/alpha050/expressions/is.rs b/src/grammar/alpha050/expressions/is.rs index 5ee4bd4..c1e7e97 100644 --- a/src/grammar/alpha050/expressions/is.rs +++ b/src/grammar/alpha050/expressions/is.rs @@ -1,12 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - global::type_parser, lexer::Token, parser::default_recovery, AmberParser, DataType, - Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::global::type_parser; +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + DataType, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; use super::cast::cast_parser; diff --git a/src/grammar/alpha050/expressions/mod.rs b/src/grammar/alpha050/expressions/mod.rs index 71cb80b..ef862f3 100644 --- a/src/grammar/alpha050/expressions/mod.rs +++ b/src/grammar/alpha050/expressions/mod.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use super::{AmberParser, Expression, Spanned, Statement}; +use super::{ + AmberParser, + Expression, + Spanned, + Statement, +}; mod and; mod atom; diff --git a/src/grammar/alpha050/expressions/or.rs b/src/grammar/alpha050/expressions/or.rs index 364835d..0a2e922 100644 --- a/src/grammar/alpha050/expressions/or.rs +++ b/src/grammar/alpha050/expressions/or.rs @@ -1,11 +1,16 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{lexer::Token, parser::default_recovery, AmberParser, Spanned, Statement}, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Spanned, + Statement, + Token, }; +use crate::T; -use super::{and::and_parser, Expression}; +use super::and::and_parser; +use super::Expression; pub fn or_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/expressions/product.rs b/src/grammar/alpha050/expressions/product.rs index 5025d63..c6c218f 100644 --- a/src/grammar/alpha050/expressions/product.rs +++ b/src/grammar/alpha050/expressions/product.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; use super::is::is_parser; diff --git a/src/grammar/alpha050/expressions/range.rs b/src/grammar/alpha050/expressions/range.rs index f4b4e64..6b953ba 100644 --- a/src/grammar/alpha050/expressions/range.rs +++ b/src/grammar/alpha050/expressions/range.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; use super::or::or_parser; diff --git a/src/grammar/alpha050/expressions/sum.rs b/src/grammar/alpha050/expressions/sum.rs index b36c8aa..f34a9f9 100644 --- a/src/grammar/alpha050/expressions/sum.rs +++ b/src/grammar/alpha050/expressions/sum.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - lexer::Token, parser::default_recovery, AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; use super::product::product_parser; diff --git a/src/grammar/alpha050/expressions/ternary.rs b/src/grammar/alpha050/expressions/ternary.rs index 1392746..1c33925 100644 --- a/src/grammar/alpha050/expressions/ternary.rs +++ b/src/grammar/alpha050/expressions/ternary.rs @@ -1,8 +1,12 @@ use chumsky::prelude::*; -use crate::grammar::alpha050::lexer::Token; use crate::grammar::alpha050::parser::default_recovery; -use crate::grammar::alpha050::{AmberParser, Spanned, Statement}; +use crate::grammar::alpha050::{ + AmberParser, + Spanned, + Statement, + Token, +}; use crate::T; use super::range::range_parser; diff --git a/src/grammar/alpha050/expressions/unary.rs b/src/grammar/alpha050/expressions/unary.rs index 1d8ba97..511962f 100644 --- a/src/grammar/alpha050/expressions/unary.rs +++ b/src/grammar/alpha050/expressions/unary.rs @@ -1,9 +1,13 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{lexer::Token, AmberParser, Expression, Spanned, Statement}, - T, +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; use super::atom::array_index_parser; diff --git a/src/grammar/alpha050/global.rs b/src/grammar/alpha050/global.rs index a9f38ff..b30f3e4 100644 --- a/src/grammar/alpha050/global.rs +++ b/src/grammar/alpha050/global.rs @@ -1,17 +1,26 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{statements::block::block_parser, Block}, - T, -}; +use crate::grammar::alpha050::statements::block::block_parser; +use crate::grammar::alpha050::Block; +use crate::grammar::Token; +use crate::T; +use super::expressions::parse_expr; +use super::parser::{ + default_recovery, + ident, +}; +use super::statements::statement_parser; use super::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - statements::statement_parser, - AmberParser, CompilerFlag, DataType, Expression, FunctionArgument, GlobalStatement, - ImportContent, Spanned, Statement, + AmberParser, + CompilerFlag, + DataType, + Expression, + FunctionArgument, + GlobalStatement, + ImportContent, + Spanned, + Statement, }; pub fn import_parser<'a>() -> impl AmberParser<'a, Spanned> { @@ -125,8 +134,7 @@ pub fn type_parser<'a>() -> impl AmberParser<'a, Spanned> { } fn compiler_flag_parser<'a>() -> impl AmberParser<'a, Spanned> { - just(T!["#"]) - .ignore_then(just(T!["["])) + just(T!["#["]) .ignore_then( choice(( just(T!["allow_nested_if_else"]).to(CompilerFlag::AllowNestedIfElse), diff --git a/src/grammar/alpha050/lexer.rs b/src/grammar/alpha050/lexer.rs index 278aaef..4074b46 100644 --- a/src/grammar/alpha050/lexer.rs +++ b/src/grammar/alpha050/lexer.rs @@ -1,74 +1,956 @@ -use heraclitus_compiler::prelude::*; - -pub use crate::grammar::Token; - -pub fn get_rules() -> Rules { - let symbols = vec![ - '+', '-', '*', '/', '%', ';', ':', '(', ')', '[', ']', '{', '}', ',', '.', '<', '>', '=', - '!', '?', '\\', '"', '$', '\n', - ]; - let compounds = vec![ - ('<', '='), - ('>', '='), - ('!', '='), - ('=', '='), - ('+', '='), - ('-', '='), - ('*', '='), - ('/', '='), - ('%', '='), - ('.', '.'), - ('/', '/'), - ]; - let region = reg![ - reg!(string as "string literal" => { - begin: "\"", - end: "\"", - tokenize: true, - allow_unclosed_region: true - } => [ - reg!(str_interp as "string interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]), - reg!(command as "command literal" => { - begin: "$", - end: "$", - tokenize: true, - allow_unclosed_region: true - } => [ - reg!(com_interp as "command interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]), - reg!(cc_flag as "compiler flag" => { - begin: "#[", - end: "]", - tokenize: true, - allow_unclosed_region: true - }), - reg!(comment as "comment" => { - begin: "//", - end: "\n", - allow_unclosed_region: true - }), - reg!(comment as "shebang" => { - begin: "#!", - end: "\n", - allow_unclosed_region: true - }), - reg!(interp as "interpolation" => { - begin: "{", - end: "}", - tokenize: true, - allow_unclosed_region: true - } ref global) - ]; - Rules::new(symbols, compounds, region) +use logos::Logos; + +use super::{ + Spanned, + Token, +}; +use crate::grammar::SimpleSpan; + +/// Main token context - general code +#[derive(Logos, Debug, Clone, PartialEq)] +#[logos(skip r"[ \t\r]+")] +pub enum TokenKind { + // Compound operators + #[token("<=")] + LessEquals, + #[token(">=")] + GreaterEquals, + #[token("!=")] + NotEquals, + #[token("==")] + EqualsEquals, + #[token("+=")] + PlusEquals, + #[token("-=")] + MinusEquals, + #[token("*=")] + MulEquals, + #[token("/=")] + DivEquals, + #[token("%=")] + ModEquals, + #[token("..")] + DotDot, + + // Single operators + #[token("+")] + Plus, + #[token("-")] + Minus, + #[token("*")] + Star, + #[token("/")] + Slash, + #[token("%")] + Percent, + #[token("<")] + Less, + #[token(">")] + Greater, + #[token("=")] + Equals, + #[token("!")] + Bang, + #[token("?")] + Question, + + // Delimiters + #[token("(")] + OpenParen, + #[token(")")] + CloseParen, + #[token("[")] + OpenBracket, + #[token("]")] + CloseBracket, + #[token("{")] + OpenBrace, + #[token("}")] + CloseBrace, + + // Punctuation + #[token(";")] + Semicolon, + #[token(":")] + Colon, + #[token(",")] + Comma, + #[token(".")] + Dot, + #[token("\\")] + Backslash, + + // String start (transition to StringContext) + #[token("\"")] + Quote, + + // Command start/end (transition to CommandContext) + #[token("$")] + Dollar, + + // Compiler flag start (transition to CompilerFlagContext) + #[token("#[")] + CompilerFlagStart, + + // Comments (include trailing newline to match Heraclitus behavior) + #[regex(r"//[^\n]*\n?", priority = 2)] + Comment, + #[regex(r"#![^\n]*\n?", priority = 2)] + Shebang, + + // Identifier + #[regex(r"[a-zA-Z_][a-zA-Z0-9_]*")] + Identifier, + + // Number + #[regex(r"([0-9]+(\.[0-9]+)?)|(\.[0-9]+)")] + Number, + + // Newline + #[token("\n")] + Newline, +} + +/// String content context - inside "..." +#[derive(Logos, Debug, Clone, PartialEq)] +pub enum StringContext { + // String end + #[token("\"")] + Quote, + + // Interpolation start + #[token("{")] + OpenBrace, + + // Text content (not quotes, backslashes, or braces) + #[regex(r#"[^"\\{]+"#)] + Content, + + // Escape sequence + #[regex(r#"\\."#)] + Escape, +} + +/// Command content context - inside $...$ +#[derive(Logos, Debug, Clone, PartialEq)] +pub enum CommandContext { + // Command end + #[token("$")] + Dollar, + + // Interpolation start + #[token("{")] + OpenBrace, + + // Text content (not $, backslash, or braces) + #[regex(r"[^$\\{]+")] + Content, + + // Escape sequence + #[regex(r#"\\."#)] + Escape, +} + +/// Context stack to track where we are in parsing +#[derive(Debug, Clone, PartialEq)] +enum LexerContext { + Main, + String, + Command, +} + +/// Stateful tokenizer that manages context switching +pub struct StatefulTokenizer<'source> { + source: &'source str, + position: usize, + context_stack: Vec, + brace_depth: usize, // Track brace nesting inside interpolations +} + +impl<'source> StatefulTokenizer<'source> { + fn new(source: &'source str) -> Self { + Self { + source, + position: 0, + context_stack: vec![LexerContext::Main], + brace_depth: 0, + } + } + + pub fn tokenize(source: &'source str) -> Vec> { + let mut tokenizer = Self::new(source); + let mut tokens = Vec::new(); + + while tokenizer.position < source.len() { + if let Some((token, span)) = tokenizer.next_token() { + tokens.push((token, span)); + } else { + break; + } + } + + // Filter out newline tokens - they're used for position tracking but not needed for parsing + tokens + .into_iter() + .filter(|(token, _)| token.0 != "\n") + .collect() + } + + fn next_token(&mut self) -> Option<(Token, SimpleSpan)> { + if self.position >= self.source.len() { + return None; + } + + let remaining = &self.source[self.position..]; + let context = self.context_stack.last()?.clone(); + + match context { + LexerContext::Main => self.lex_main_context(remaining), + LexerContext::String => self.lex_string_context(remaining), + LexerContext::Command => self.lex_command_context(remaining), + } + } + + fn lex_main_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = TokenKind::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(TokenKind::Quote) => { + // Entering string context + self.context_stack.push(LexerContext::String); + Some((Token("\"".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::Dollar) => { + // Check if this starts a command. + // If the next char is another '$', do not start a command (this is the first of $$). + // If this is the second of a $$ (previous char was '$'), only start a command + // when the following char is not whitespace. Otherwise single $ starts a command. + let prev_is_dollar = start > 0 && self.source[start - 1..].starts_with('$'); + + let is_command_start = if span.end < remaining.len() { + // Look at the next char after this $ in the remaining slice + let mut next_chars = remaining[span.end..].chars(); + match next_chars.next() { + Some(next_ch) => { + if next_ch == '$' { + // next is a dollar -> this $ does not start a command + false + } else if prev_is_dollar { + // this is the second $ in $$ — only start a command if not followed by whitespace + !next_ch.is_whitespace() + } else { + // single $ (not followed by $) -> start a command + true + } + } + None => false, + } + } else { + false + }; + + if is_command_start { + // Entering command context + self.context_stack.push(LexerContext::Command); + } + Some((Token("$".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::OpenBrace) => { + // Only track brace depth if we're inside an interpolation (context stack > 1) + if self.context_stack.len() > 1 { + self.brace_depth += 1; + } + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(TokenKind::CloseBrace) => { + // Only track brace depth if we're inside an interpolation + if self.context_stack.len() > 1 && self.brace_depth > 0 { + self.brace_depth -= 1; + if self.brace_depth == 0 { + // Exiting interpolation, pop back to string/command context + self.context_stack.pop(); + } + } + Some((Token("}".to_string()), SimpleSpan::new(start, end))) + } + Ok(kind) => { + let token_str = token_kind_to_string(&kind, slice); + Some((Token(token_str), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } + + fn lex_string_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = StringContext::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(StringContext::Quote) => { + // Exiting string context + self.context_stack.pop(); + Some((Token("\"".to_string()), SimpleSpan::new(start, end))) + } + Ok(StringContext::OpenBrace) => { + // Entering interpolation - switch back to main context + self.context_stack.push(LexerContext::Main); + self.brace_depth = 1; + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(StringContext::Content) | Ok(StringContext::Escape) => { + Some((Token(slice.to_string()), SimpleSpan::new(start, end))) + } + Err(_) => Some((Token(slice.to_string()), SimpleSpan::new(start, end))), + } + } + + fn lex_command_context(&mut self, remaining: &str) -> Option<(Token, SimpleSpan)> { + let mut lex = CommandContext::lexer(remaining); + let token_result = lex.next()?; + let span = lex.span(); + let slice = lex.slice(); + + // span is relative to `remaining`, so add current position to get absolute + let start = self.position + span.start; + let end = self.position + span.end; + self.position = end; + + match token_result { + Ok(CommandContext::Dollar) => { + // Exiting command context + self.context_stack.pop(); + Some((Token("$".to_string()), SimpleSpan::new(start, end))) + } + Ok(CommandContext::OpenBrace) => { + // Entering interpolation - switch back to main context + self.context_stack.push(LexerContext::Main); + self.brace_depth = 1; + Some((Token("{".to_string()), SimpleSpan::new(start, end))) + } + Ok(CommandContext::Content) | Ok(CommandContext::Escape) => { + // If this command context was entered as the second `$` of `$$` and + // the content starts with whitespace (e.g. `$$ failed {`), trim the + // leading whitespace so the identifier/tokenization matches the + // parser's expectations (no leading spaces). + let token_text = if start > 0 && self.source[start - 1..].starts_with('$') { + // second of $$ - trim leading whitespace from content + slice.trim_start().to_string() + } else { + slice.to_string() + }; + Some((Token(token_text), SimpleSpan::new(start, end))) + } + Err(e) => { + eprintln!( + "CommandContext lexer error at pos {}: {:?}, slice: {:?}, remaining: {:?}", + self.position, + e, + slice, + &remaining[..remaining.len().min(50)] + ); + Some((Token(slice.to_string()), SimpleSpan::new(start, end))) + } + } + } +} + +fn token_kind_to_string(kind: &TokenKind, slice: &str) -> String { + match kind { + TokenKind::LessEquals => "<=".to_string(), + TokenKind::GreaterEquals => ">=".to_string(), + TokenKind::NotEquals => "!=".to_string(), + TokenKind::EqualsEquals => "==".to_string(), + TokenKind::PlusEquals => "+=".to_string(), + TokenKind::MinusEquals => "-=".to_string(), + TokenKind::MulEquals => "*=".to_string(), + TokenKind::DivEquals => "/=".to_string(), + TokenKind::ModEquals => "%=".to_string(), + TokenKind::DotDot => "..".to_string(), + TokenKind::Plus => "+".to_string(), + TokenKind::Minus => "-".to_string(), + TokenKind::Star => "*".to_string(), + TokenKind::Slash => "/".to_string(), + TokenKind::Percent => "%".to_string(), + TokenKind::Less => "<".to_string(), + TokenKind::Greater => ">".to_string(), + TokenKind::Equals => "=".to_string(), + TokenKind::Bang => "!".to_string(), + TokenKind::Question => "?".to_string(), + TokenKind::OpenParen => "(".to_string(), + TokenKind::CloseParen => ")".to_string(), + TokenKind::OpenBracket => "[".to_string(), + TokenKind::CloseBracket => "]".to_string(), + TokenKind::OpenBrace => "{".to_string(), + TokenKind::CloseBrace => "}".to_string(), + TokenKind::Semicolon => ";".to_string(), + TokenKind::Colon => ":".to_string(), + TokenKind::Comma => ",".to_string(), + TokenKind::Dot => ".".to_string(), + TokenKind::Backslash => "\\".to_string(), + TokenKind::Quote => "\"".to_string(), + TokenKind::Dollar => "$".to_string(), + TokenKind::Newline => "\n".to_string(), + TokenKind::CompilerFlagStart => "#[".to_string(), + // For these, use the actual slice + TokenKind::Comment | TokenKind::Shebang | TokenKind::Identifier | TokenKind::Number => { + slice.to_string() + } + } +} + +/// Public tokenize function +pub fn tokenize(input: &str) -> Vec> { + StatefulTokenizer::tokenize(input) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_numbers() { + let input = r#".5 2.4 2"#; + let tokens = tokenize(input); + + assert_eq!(tokens[0].0 .0, ".5"); + assert_eq!(tokens[1].0 .0, "2.4"); + assert_eq!(tokens[2].0 .0, "2"); + } + + #[test] + fn test_simple_string() { + let input = r#""hello""#; + let tokens = tokenize(input); + assert_eq!(tokens.len(), 3); + assert_eq!(tokens[0].0 .0, "\""); + assert_eq!(tokens[1].0 .0, "hello"); + assert_eq!(tokens[2].0 .0, "\""); + } + + #[test] + fn test_string_with_interpolation() { + let input = r#""Hello {name}!""#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["\"", "Hello ", "{", "name", "}", "!", "\""] + ); + } + + #[test] + fn test_command() { + let input = r#"$echo test$"#; + let tokens = tokenize(input); + assert_eq!(tokens[0].0 .0, "$"); + assert_eq!(tokens[tokens.len() - 1].0 .0, "$"); + } + + #[test] + fn test_keywords() { + let input = "if else fun return"; + let tokens = tokenize(input); + assert_eq!(tokens.len(), 4); + assert_eq!(tokens[0].0 .0, "if"); + assert_eq!(tokens[1].0 .0, "else"); + } + + #[test] + fn test_nested_string_interpolation() { + let input = r#""outer {inner "nested {x}"}""#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["\"", "outer ", "{", "inner", "\"", "nested ", "{", "x", "}", "\"", "}", "\""] + ); + } + + #[test] + fn test_string_with_escape() { + let input = r#""hello \"world\"""#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["\"", "hello ", r#"\""#, "world", r#"\""#, "\""] + ); + } + + #[test] + fn test_string_with_multiple_interpolations() { + let input = r#""Hello {name}, you are {age} years old!""#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + println!("Actual tokens: {:?}", token_strs); + // After closing }, we should return to string context, not stay in main + // This is a bug - need to fix context management + assert_eq!( + token_strs, + vec![ + "\"", + "Hello ", + "{", + "name", + "}", + ", you are ", + "{", + "age", + "}", + " years old!", + "\"" + ] + ); + } + + #[test] + fn test_empty_string() { + let input = r#""""#; + let tokens = tokenize(input); + assert_eq!(tokens.len(), 2); + assert_eq!(tokens[0].0 .0, "\""); + assert_eq!(tokens[1].0 .0, "\""); + } + + #[test] + fn test_string_with_expression_interpolation() { + let input = r#""Result: {a + b}""#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["\"", "Result: ", "{", "a", "+", "b", "}", "\""] + ); + } + + #[test] + fn test_command_simple() { + let input = r#"$ls -la$"#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!(token_strs, vec!["$", "ls -la", "$"]); + } + + #[test] + fn test_command_with_interpolation() { + let input = r#"$echo {msg}$"#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!(token_strs, vec!["$", "echo ", "{", "msg", "}", "$"]); + } + + #[test] + fn test_command_with_multiple_interpolations() { + let input = r#"$cp {src} {dst}$"#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["$", "cp ", "{", "src", "}", " ", "{", "dst", "}", "$"] + ); + } + + #[test] + fn test_command_with_string_in_interpolation() { + let input = r#"$echo {"Hello World"}$"#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["$", "echo ", "{", "\"", "Hello World", "\"", "}", "$"] + ); + } + + #[test] + fn test_command_empty() { + let input = r#"$$"#; + let tokens = tokenize(input); + assert_eq!(tokens.len(), 2); + assert_eq!(tokens[0].0 .0, "$"); + assert_eq!(tokens[1].0 .0, "$"); + } + + #[test] + fn test_string_only_interpolation() { + let input = r#""{x}""#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!(token_strs, vec!["\"", "{", "x", "}", "\""]); + } + + #[test] + fn test_mixed_code_with_strings() { + let input = r#"let x = "hello {name}""#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["let", "x", "=", "\"", "hello ", "{", "name", "}", "\""] + ); + } + + #[test] + fn test_comment_tokenization() { + let input = "// this is a comment"; + let tokens = tokenize(input); + assert_eq!(tokens.len(), 1); + assert_eq!(tokens[0].0 .0, "// this is a comment"); + } + + #[test] + fn test_shebang_tokenization() { + let input = "#!/usr/bin/env amber"; + let tokens = tokenize(input); + assert_eq!(tokens.len(), 1); + assert_eq!(tokens[0].0 .0, "#!/usr/bin/env amber"); + } + + #[test] + fn test_compiler_flag() { + let input = "#[allow_nested_if_else]"; + let tokens = tokenize(input); + // Compiler flags are now split into: #[, content, ] + assert_eq!(tokens.len(), 3); + assert_eq!(tokens[0].0 .0, "#["); + assert_eq!(tokens[1].0 .0, "allow_nested_if_else"); + assert_eq!(tokens[2].0 .0, "]"); + } + + #[test] + fn test_operators_and_delimiters() { + let input = "a <= b and c >= d"; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!(token_strs, vec!["a", "<=", "b", "and", "c", ">=", "d"]); + } + + #[test] + fn test_array_with_range() { + let input = "[1..10]"; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!(token_strs, vec!["[", "1", "..", "10", "]"]); + } + + #[test] + fn test_function_call_with_string() { + let input = r#"echo("test {x}")"#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["echo", "(", "\"", "test ", "{", "x", "}", "\"", ")"] + ); + } + + #[test] + fn test_deeply_nested_braces() { + let input = r#""a {b {c {d}}}""#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["\"", "a ", "{", "b", "{", "c", "{", "d", "}", "}", "}", "\""] + ); + } + + #[test] + fn test_string_with_backslash_escape() { + let input = r#""path\\to\\file""#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!( + token_strs, + vec!["\"", "path", r#"\\"#, "to", r#"\\"#, "file", "\""] + ); + } + + #[test] + fn test_command_with_escape() { + let input = r#"$echo \$$"#; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!(token_strs, vec!["$", "echo ", r#"\$"#, "$"]); + } + + #[test] + fn test_numbers_integers_and_floats() { + let input = "42 3.14 100"; + let tokens = tokenize(input); + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + assert_eq!(token_strs, vec!["42", "3.14", "100"]); + } + + #[test] + fn test_span_accuracy() { + let input = r#""hello {name}""#; + let tokens = tokenize(input); + + // Verify spans are correct + assert_eq!(tokens[0].1, SimpleSpan::new(0, 1)); // " + assert_eq!(tokens[1].1, SimpleSpan::new(1, 7)); // hello + assert_eq!(tokens[2].1, SimpleSpan::new(7, 8)); // { + assert_eq!(tokens[3].1, SimpleSpan::new(8, 12)); // name + assert_eq!(tokens[4].1, SimpleSpan::new(12, 13)); // } + assert_eq!(tokens[5].1, SimpleSpan::new(13, 14)); // " + } + + #[test] + fn test_comment_with_newline() { + let input = "echo \"x\" // test comment\nif x"; + let tokens = tokenize(input); + + // Print for debugging + for (i, (token, span)) in tokens.iter().enumerate() { + eprintln!("{}: {:?} @ {:?}", i, token.0, span); + } + + let token_strs: Vec<&str> = tokens.iter().map(|(t, _)| t.0.as_str()).collect(); + // Should see: echo, ", x, ", comment, if, x + assert!(token_strs.contains(&"echo")); + assert!(token_strs.contains(&"if")); + } + + #[test] + fn test_logos_newline_directly() { + let input = "}\n\nif"; + let mut lex = TokenKind::lexer(input); + + let mut count = 0; + while let Some(result) = lex.next() { + eprintln!( + "Token {}: {:?} = {:?} @ {:?}", + count, + result, + lex.slice(), + lex.span() + ); + count += 1; + } + + eprintln!("Total: {} tokens", count); + assert!(count >= 4, "Expected at least 4 tokens from Logos directly"); + } + + #[test] + fn test_newlines_tokenization() { + let input = "}\n\nif"; + let tokens = tokenize(input); + + eprintln!("Total tokens: {}", tokens.len()); + for (i, (token, span)) in tokens.iter().enumerate() { + eprintln!( + "{}: {:?} (bytes: {:?}) @ {:?}", + i, + token.0, + token.0.as_bytes(), + span + ); + } + + // Newlines are filtered out, so should be: }, if + assert_eq!(tokens.len(), 2, "Expected 2 tokens (newlines filtered)"); + assert_eq!(tokens[0].0 .0, "}"); + assert_eq!(tokens[1].0 .0, "if"); + } + + #[test] + fn test_after_closing_brace() { + let input = "}\n\n if age"; + let tokens = tokenize(input); + + eprintln!("Total tokens: {}", tokens.len()); + for (i, (token, span)) in tokens.iter().enumerate() { + eprintln!("{}: {:?} @ {:?}", i, token.0, span); + } + + // Newlines are filtered out, so should be: }, if, age + assert_eq!(tokens.len(), 3, "Expected 3 tokens (newlines filtered)"); + assert_eq!(tokens[0].0 .0, "}"); + assert_eq!(tokens[1].0 .0, "if"); + assert_eq!(tokens[2].0 .0, "age"); + } + + #[test] + fn test_full_comments_in_ifs_tokenization() { + let input = r#" + #[test] + fn test_full_comments_in_ifs_tokenization() { + let input = r#" + if { + 1 == 2: echo "x" // test comment + // another comment + 2 == 2 { + echo "y" + } + // another + else: echo "z" // comment + // super comment + /// doc comment + } + + if age >= 16: echo "Welcome" // comment + // comment in between + else: echo "Entry not allowed" // another comment +"#; + eprintln!("Input length: {}", input.len()); + eprintln!( + "Input around position 233: {:?}", + &input.chars().skip(233).take(20).collect::() + ); + + let tokens = tokenize(input); + + eprintln!("Total tokens: {}", tokens.len()); + for (i, (token, span)) in tokens.iter().enumerate() { + eprintln!("{}: {:?} @ {:?}", i, token.0, span); + } + + // Should have more than 40 tokens for this input + assert!( + tokens.len() > 40, + "Expected more than 40 tokens, got {}", + tokens.len() + ); + } + + #[test] + fn test_compiler_flag_tokenization() { + let input = "#[allow_absurd_cast]\npub fun test() {}"; + let tokens = tokenize(input); + + eprintln!("Total tokens: {}", tokens.len()); + for (i, (token, span)) in tokens.iter().enumerate() { + eprintln!("{}: {:?} @ {:?}", i, token.0, span); + } + + assert!(!tokens.is_empty(), "Expected tokens"); + // Compiler flags are now split into tokens: #[, content, ] + assert_eq!(tokens[0].0 .0, "#["); + assert_eq!(tokens[1].0 .0, "allow_absurd_cast"); + assert_eq!(tokens[2].0 .0, "]"); + assert_eq!(tokens[3].0 .0, "pub"); + } + + #[test] + fn test_stdlib_math_full_file() { + use std::fs::read_to_string; + + let input = + read_to_string("resources/alpha050/std/math.ab").expect("Failed to read math.ab"); + + let tokens = tokenize(&input); + + eprintln!("Total tokens from full file: {}", tokens.len()); + eprintln!("First 30 tokens:"); + for (i, (token, span)) in tokens.iter().enumerate().take(30) { + eprintln!("{}: {:?} @ {:?}", i, token.0, span); + } + + // The file should have many tokens + assert!( + tokens.len() > 100, + "Expected more than 100 tokens, got {}", + tokens.len() + ); + } + + #[test] + fn test_command_with_single_quotes_and_dollar() { + let input = r#"trust $ echo "{text}" | sed -e 's/^[[:space:]]*//' $"#; + let tokens = tokenize(input); + + eprintln!("Total tokens: {}", tokens.len()); + for (i, (token, span)) in tokens.iter().enumerate() { + eprintln!("{}: {:?} @ {:?}", i, token.0, span); + } + + // The command should end with a separate $ token + let last_token = &tokens[tokens.len() - 1]; + assert_eq!( + last_token.0 .0, "$", + "Last token should be the closing $ of the command" + ); + + // Find the token before the last $ + let before_last = &tokens[tokens.len() - 2]; + assert!( + !before_last.0 .0.contains("$"), + "Token before last should not contain $, got: {:?}", + before_last.0 .0 + ); + } + + #[test] + fn test_command_context_regex_directly() { + use logos::Logos; + + // Test the CommandContext lexer directly with the problematic input + let input = " | sed -e 's/^[[:space:]]*//' "; + let mut lex = CommandContext::lexer(input); + + eprintln!("Testing CommandContext with: {:?}", input); + while let Some(result) = lex.next() { + eprintln!("Token: {:?} = {:?} @ {:?}", result, lex.slice(), lex.span()); + } + } + + #[test] + fn test_complete_if_blocks() { + let input = r#" + if { + 1 == 2: echo "x" // test comment + // another comment + 2 == 2 { + echo "y" + } + // another + else: echo "z" // comment + // super comment + /// doc comment + } + + if age >= 16: echo "Welcome" // comment + // comment in between + else: echo "Entry not allowed" // another comment +"#; + let tokens = tokenize(input); + + eprintln!("Total tokens: {}", tokens.len()); + for (i, (token, span)) in tokens.iter().enumerate() { + eprintln!("{}: {:?} @ {:?}", i, token.0, span); + } + + // Should tokenize the complete input + assert!( + tokens.len() >= 45, + "Expected at least 45 tokens, got {}", + tokens.len() + ); + + // Verify we have the second if statement + assert!( + tokens.iter().any(|(t, _)| t.0 == "age"), + "Should have 'age' identifier" + ); + assert!( + tokens.iter().any(|(t, _)| t.0 == "Entry not allowed"), + "Should have text from second if block" + ); + } } diff --git a/src/grammar/alpha050/mod.rs b/src/grammar/alpha050/mod.rs index adc8e0a..b864c8b 100644 --- a/src/grammar/alpha050/mod.rs +++ b/src/grammar/alpha050/mod.rs @@ -1,17 +1,24 @@ -use crate::analysis::types::DataType; +pub use super::{ + Spanned, + Token, +}; -pub use super::Spanned; -use super::{CommandModifier, CompilerFlag, Grammar, LSPAnalysis, ParserResponse, Span}; -use chumsky::{ - error::Rich, - extra::Err, - input::{Input, SpannedInput}, - span::SimpleSpan, - Parser, +use super::{ + CommandModifier, + CompilerFlag, + Grammar, + LSPAnalysis, + ParserResponse, + Span, +}; +use crate::analysis::types::DataType; +use chumsky::error::Rich; +use chumsky::extra::Err; +use chumsky::input::{ + Input, + SpannedInput, }; -use heraclitus_compiler::prelude::*; -use lexer::{get_rules, Token}; -use prelude::lexer::Lexer; +use chumsky::Parser; use semantic_tokens::semantic_tokens_from_ast; pub mod expressions; @@ -259,9 +266,7 @@ pub enum GlobalStatement { } #[derive(Debug)] -pub struct AmberCompiler { - lexer: Lexer, -} +pub struct AmberCompiler {} impl Default for AmberCompiler { fn default() -> Self { @@ -271,9 +276,7 @@ impl Default for AmberCompiler { impl AmberCompiler { pub fn new() -> Self { - let lexer = Lexer::new(get_rules()); - - AmberCompiler { lexer } + AmberCompiler {} } pub fn parser<'a>(&self) -> impl AmberParser<'a, Vec>> { @@ -284,22 +287,9 @@ impl AmberCompiler { impl LSPAnalysis for AmberCompiler { #[tracing::instrument(skip_all)] fn tokenize(&self, input: &str) -> Vec> { - // It should never fail - self.lexer - .tokenize(&input.replace("\r\n", "\n").replace("\r", "\n")) - .expect("Failed to tokenize input") - .iter() - .filter_map(|t| { - if t.word == "\n" { - return None; - } - - Some(( - Token(t.word.clone()), - SimpleSpan::new(t.start, t.start + t.word.chars().count()), - )) - }) - .collect() + // Normalize line endings and use Logos v2 lexer with context morphing + let normalized = input.replace("\r\n", "\n").replace("\r", "\n"); + lexer::tokenize(&normalized) } #[tracing::instrument(skip_all)] diff --git a/src/grammar/alpha050/parser.rs b/src/grammar/alpha050/parser.rs index 6282afd..5a332ca 100644 --- a/src/grammar/alpha050/parser.rs +++ b/src/grammar/alpha050/parser.rs @@ -1,8 +1,9 @@ use chumsky::prelude::*; +use crate::grammar::Token; use crate::T; -use super::{lexer::Token, AmberParser}; +use super::AmberParser; const KEYWORDS: &[&str] = &[ "if", diff --git a/src/grammar/alpha050/statements/block.rs b/src/grammar/alpha050/statements/block.rs index 9a8b2fc..7297179 100644 --- a/src/grammar/alpha050/statements/block.rs +++ b/src/grammar/alpha050/statements/block.rs @@ -1,11 +1,14 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - lexer::Token, parser::default_recovery, AmberParser, Block, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Block, + Spanned, + Statement, + Token, }; +use crate::T; use super::modifiers::modifier_parser; diff --git a/src/grammar/alpha050/statements/comment.rs b/src/grammar/alpha050/statements/comment.rs index 1e440ed..d3a1a38 100644 --- a/src/grammar/alpha050/statements/comment.rs +++ b/src/grammar/alpha050/statements/comment.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use crate::grammar::alpha050::{lexer::Token, AmberParser, Comment, Spanned}; +use crate::grammar::alpha050::{ + AmberParser, + Comment, + Spanned, + Token, +}; pub fn comment_parser<'a>() -> impl AmberParser<'a, Spanned> { choice((doc_string_parser(), single_line_comment_parser())).boxed() diff --git a/src/grammar/alpha050/statements/const_init.rs b/src/grammar/alpha050/statements/const_init.rs index eeca42e..ff2dc44 100644 --- a/src/grammar/alpha050/statements/const_init.rs +++ b/src/grammar/alpha050/statements/const_init.rs @@ -1,17 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::{ - alpha050::Expression, - alpha050::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Spanned, Statement, - }, - }, - T, +use crate::grammar::alpha050::expressions::parse_expr; +use crate::grammar::alpha050::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, +}; +use crate::T; pub fn const_init_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/statements/failable_handlers.rs b/src/grammar/alpha050/statements/failable_handlers.rs index 41df66e..02971d5 100644 --- a/src/grammar/alpha050/statements/failable_handlers.rs +++ b/src/grammar/alpha050/statements/failable_handlers.rs @@ -1,14 +1,20 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - lexer::Token, - parser::{default_recovery, ident}, - statements::block::block_parser, - AmberParser, Block, FailableHandler, FailureHandler, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha050::statements::block::block_parser; +use crate::grammar::alpha050::{ + AmberParser, + Block, + FailableHandler, + FailureHandler, + Spanned, + Statement, + Token, +}; +use crate::T; fn failure_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/statements/if_cond.rs b/src/grammar/alpha050/statements/if_cond.rs index 6b8431f..c4c918a 100644 --- a/src/grammar/alpha050/statements/if_cond.rs +++ b/src/grammar/alpha050/statements/if_cond.rs @@ -1,12 +1,19 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, statements::comment, - AmberParser, Comment, ElseCondition, IfChainContent, IfCondition, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::expressions::parse_expr; +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::statements::comment; +use crate::grammar::alpha050::{ + AmberParser, + Comment, + ElseCondition, + IfChainContent, + IfCondition, + Spanned, + Statement, + Token, }; +use crate::T; use super::block::block_parser; diff --git a/src/grammar/alpha050/statements/keywords.rs b/src/grammar/alpha050/statements/keywords.rs index 4defffa..15a0545 100644 --- a/src/grammar/alpha050/statements/keywords.rs +++ b/src/grammar/alpha050/statements/keywords.rs @@ -1,12 +1,15 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, AmberParser, Expression, - Spanned, Statement, - }, - T, +use crate::grammar::alpha050::expressions::parse_expr; +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; pub fn keywords_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/statements/loops.rs b/src/grammar/alpha050/statements/loops.rs index b935a2a..1a8a901 100644 --- a/src/grammar/alpha050/statements/loops.rs +++ b/src/grammar/alpha050/statements/loops.rs @@ -1,14 +1,20 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Block, Expression, IterLoopVars, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::expressions::parse_expr; +use crate::grammar::alpha050::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha050::{ + AmberParser, + Block, + Expression, + IterLoopVars, + Spanned, + Statement, + Token, +}; +use crate::T; use super::block::block_parser; diff --git a/src/grammar/alpha050/statements/mod.rs b/src/grammar/alpha050/statements/mod.rs index 7f3b9ae..c9c8b9a 100644 --- a/src/grammar/alpha050/statements/mod.rs +++ b/src/grammar/alpha050/statements/mod.rs @@ -1,8 +1,14 @@ use chumsky::prelude::*; -use crate::{grammar::Token, T}; +use crate::grammar::Token; +use crate::T; -use super::{expressions::parse_expr, AmberParser, Spanned, Statement}; +use super::expressions::parse_expr; +use super::{ + AmberParser, + Spanned, + Statement, +}; pub mod block; pub mod comment; diff --git a/src/grammar/alpha050/statements/modifiers.rs b/src/grammar/alpha050/statements/modifiers.rs index c918e0e..cfe495f 100644 --- a/src/grammar/alpha050/statements/modifiers.rs +++ b/src/grammar/alpha050/statements/modifiers.rs @@ -1,9 +1,12 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{lexer::Token, AmberParser, CommandModifier, Spanned}, - T, +use crate::grammar::alpha050::{ + AmberParser, + CommandModifier, + Spanned, + Token, }; +use crate::T; pub fn modifier_parser<'a>() -> impl AmberParser<'a, Spanned> { choice(( diff --git a/src/grammar/alpha050/statements/move_files.rs b/src/grammar/alpha050/statements/move_files.rs index 758a623..39cdff2 100644 --- a/src/grammar/alpha050/statements/move_files.rs +++ b/src/grammar/alpha050/statements/move_files.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - expressions::parse_expr, lexer::Token, parser::default_recovery, AmberParser, Expression, - Spanned, Statement, - }, - T, +use crate::grammar::alpha050::expressions::parse_expr; +use crate::grammar::alpha050::parser::default_recovery; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, }; +use crate::T; -use super::{failable_handlers::failable_handlers_parser, modifiers::modifier_parser}; +use super::failable_handlers::failable_handlers_parser; +use super::modifiers::modifier_parser; pub fn move_files_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/statements/shebang.rs b/src/grammar/alpha050/statements/shebang.rs index 5f30c88..4a16703 100644 --- a/src/grammar/alpha050/statements/shebang.rs +++ b/src/grammar/alpha050/statements/shebang.rs @@ -1,6 +1,11 @@ use chumsky::prelude::*; -use crate::grammar::alpha050::{lexer::Token, AmberParser, Spanned, Statement}; +use crate::grammar::alpha050::{ + AmberParser, + Spanned, + Statement, + Token, +}; pub fn shebang_parser<'a>() -> impl AmberParser<'a, Spanned> { any() diff --git a/src/grammar/alpha050/statements/shorthands.rs b/src/grammar/alpha050/statements/shorthands.rs index 5073327..ff83724 100644 --- a/src/grammar/alpha050/statements/shorthands.rs +++ b/src/grammar/alpha050/statements/shorthands.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::expressions::parse_expr; +use crate::grammar::alpha050::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, +}; +use crate::T; pub fn shorthand_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/statements/var_init.rs b/src/grammar/alpha050/statements/var_init.rs index 39f6c2d..ce1cae2 100644 --- a/src/grammar/alpha050/statements/var_init.rs +++ b/src/grammar/alpha050/statements/var_init.rs @@ -1,15 +1,19 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - expressions::parse_expr, - global::type_parser, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Spanned, Statement, VariableInitType, - }, - T, +use crate::grammar::alpha050::expressions::parse_expr; +use crate::grammar::alpha050::global::type_parser; +use crate::grammar::alpha050::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha050::{ + AmberParser, + Spanned, + Statement, + Token, + VariableInitType, +}; +use crate::T; pub fn var_init_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/alpha050/statements/var_set.rs b/src/grammar/alpha050/statements/var_set.rs index 8e6e65f..178df48 100644 --- a/src/grammar/alpha050/statements/var_set.rs +++ b/src/grammar/alpha050/statements/var_set.rs @@ -1,14 +1,18 @@ use chumsky::prelude::*; -use crate::{ - grammar::alpha050::{ - expressions::parse_expr, - lexer::Token, - parser::{default_recovery, ident}, - AmberParser, Expression, Spanned, Statement, - }, - T, +use crate::grammar::alpha050::expressions::parse_expr; +use crate::grammar::alpha050::parser::{ + default_recovery, + ident, }; +use crate::grammar::alpha050::{ + AmberParser, + Expression, + Spanned, + Statement, + Token, +}; +use crate::T; pub fn var_set_parser<'a>( stmnts: impl AmberParser<'a, Spanned>, diff --git a/src/grammar/mod.rs b/src/grammar/mod.rs index ad5ee0a..2ee7daf 100644 --- a/src/grammar/mod.rs +++ b/src/grammar/mod.rs @@ -1,5 +1,10 @@ -use chumsky::{error::Rich, span::SimpleSpan}; -use std::fmt::{self, Debug, Display}; +use chumsky::error::Rich; +use chumsky::span::SimpleSpan; +use std::fmt::{ + self, + Debug, + Display, +}; pub mod alpha034; pub mod alpha035; diff --git a/src/main.rs b/src/main.rs index 0c82716..04f1cb3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,11 +1,22 @@ -use std::{ - env::temp_dir, - process::{Command, Stdio}, +use std::env::temp_dir; +use std::process::{ + Command, + Stdio, }; -use amber_lsp::backend::{AmberVersion, Backend}; -use clap::{builder::PossibleValue, Parser, ValueEnum}; -use tower_lsp_server::{LspService, Server}; +use amber_lsp::backend::{ + AmberVersion, + Backend, +}; +use clap::builder::PossibleValue; +use clap::{ + Parser, + ValueEnum, +}; +use tower_lsp_server::{ + LspService, + Server, +}; use tracing::subscriber; use tracing_subscriber::fmt::format::FmtSpan; diff --git a/src/paths.rs b/src/paths.rs index 857a5b2..1fa0965 100644 --- a/src/paths.rs +++ b/src/paths.rs @@ -1,4 +1,7 @@ -use std::sync::{Arc, Mutex}; +use std::sync::{ + Arc, + Mutex, +}; use indexmap::IndexSet; use tower_lsp_server::lsp_types::Uri; diff --git a/src/stdlib.rs b/src/stdlib.rs index 4914585..abd4d02 100644 --- a/src/stdlib.rs +++ b/src/stdlib.rs @@ -1,16 +1,25 @@ -use std::{ - env::current_exe, - future::Future, - path::{Path, PathBuf}, - pin::Pin, +use std::env::current_exe; +use std::future::Future; +use std::path::{ + Path, + PathBuf, }; +use std::pin::Pin; use clap::builder::OsStr; -use include_dir::{include_dir, Dir, DirEntry}; -use tower_lsp_server::{lsp_types::Uri, UriExt}; +use include_dir::{ + include_dir, + Dir, + DirEntry, +}; +use tower_lsp_server::lsp_types::Uri; +use tower_lsp_server::UriExt; use tracing::warn; -use crate::backend::{AmberVersion, Backend}; +use crate::backend::{ + AmberVersion, + Backend, +}; pub const STDLIB: Dir = include_dir!("$CARGO_MANIFEST_DIR/resources/"); diff --git a/src/utils.rs b/src/utils.rs index 094e0a4..23ac563 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,6 +1,9 @@ use std::hash::BuildHasherDefault; -use dashmap::{DashMap, DashSet}; +use dashmap::{ + DashMap, + DashSet, +}; use rustc_hash::FxHasher; pub type FastDashMap = DashMap>; diff --git a/tests/analysis/alpha034.rs b/tests/analysis/alpha034.rs index 594af06..bece96a 100644 --- a/tests/analysis/alpha034.rs +++ b/tests/analysis/alpha034.rs @@ -1,13 +1,22 @@ -use std::{path::Path, sync::Arc}; +use std::path::Path; +use std::sync::Arc; -use amber_lsp::{ - analysis::{FunctionSymbol, SymbolType}, - backend::{AmberVersion, Backend}, - fs::MemoryFS, +use amber_lsp::analysis::{ + FunctionSymbol, + SymbolType, }; +use amber_lsp::backend::{ + AmberVersion, + Backend, +}; +use amber_lsp::fs::MemoryFS; use insta::assert_debug_snapshot; use tokio::test; -use tower_lsp_server::{lsp_types::Uri, LspService, UriExt}; +use tower_lsp_server::lsp_types::Uri; +use tower_lsp_server::{ + LspService, + UriExt, +}; #[test] async fn test_function_definition() { diff --git a/tests/analysis/alpha050.rs b/tests/analysis/alpha050.rs index 004293c..90fbaca 100644 --- a/tests/analysis/alpha050.rs +++ b/tests/analysis/alpha050.rs @@ -1,12 +1,15 @@ -use std::{path::Path, sync::Arc}; +use std::path::Path; +use std::sync::Arc; -use amber_lsp::{ - backend::{AmberVersion, Backend}, - fs::MemoryFS, +use amber_lsp::backend::{ + AmberVersion, + Backend, }; +use amber_lsp::fs::MemoryFS; use insta::assert_debug_snapshot; use tokio::test; -use tower_lsp_server::{lsp_types::Uri, UriExt}; +use tower_lsp_server::lsp_types::Uri; +use tower_lsp_server::UriExt; #[test] async fn test_simple_function() { diff --git a/tests/grammar/alpha034.rs b/tests/grammar/alpha034.rs index d29f208..a7dc2da 100644 --- a/tests/grammar/alpha034.rs +++ b/tests/grammar/alpha034.rs @@ -3,9 +3,15 @@ use std::fs::read_to_string; use chumsky::error::Rich; use insta::assert_debug_snapshot; +use amber_lsp::grammar::alpha034::semantic_tokens::semantic_tokens_from_ast; +use amber_lsp::grammar::alpha034::{ + AmberCompiler, + Spanned, +}; use amber_lsp::grammar::{ - alpha034::{lexer::Token, semantic_tokens::semantic_tokens_from_ast, AmberCompiler, Spanned}, - LSPAnalysis, ParserResponse, + LSPAnalysis, + ParserResponse, + Token, }; fn tokenize(input: &str) -> Vec> { @@ -664,6 +670,217 @@ fn test_lexer() { )); } +#[test] +fn test_lexer_double_dollar_edge_cases() { + let compiler = AmberCompiler::new(); + + // Test $$ followed by whitespace + assert_debug_snapshot!("double_dollar_whitespace", compiler.tokenize("$$ \n")); + + // Test $$ at end of input + assert_debug_snapshot!("double_dollar_end", compiler.tokenize("$$")); + + // Test $$ followed by identifier without space + assert_debug_snapshot!("double_dollar_no_space", compiler.tokenize("$$failed")); + + // Test $$ followed by { + assert_debug_snapshot!("double_dollar_brace", compiler.tokenize("$$ {var}$")); +} + +#[test] +fn test_lexer_string_escapes() { + let compiler = AmberCompiler::new(); + + // Test string with various escape sequences + assert_debug_snapshot!( + "string_escapes", + compiler.tokenize(r#""hello\nworld\t\"quote\"""#) + ); + + // Test string with backslash at end + assert_debug_snapshot!("string_trailing_backslash", compiler.tokenize(r#""test\""#)); + + // Test empty string + assert_debug_snapshot!("empty_string", compiler.tokenize(r#""""#)); +} + +#[test] +fn test_lexer_command_escapes() { + let compiler = AmberCompiler::new(); + + // Test command with escaped spaces + assert_debug_snapshot!( + "command_escapes", + compiler.tokenize(r#"$echo\ with\ spaces$"#) + ); + + // Test command with backslash + assert_debug_snapshot!("command_backslash", compiler.tokenize(r#"$test\n$"#)); +} + +#[test] +fn test_lexer_command_context_whitespace() { + let compiler = AmberCompiler::new(); + + // Test $$ command with leading whitespace after $$ + assert_debug_snapshot!("double_dollar_cmd_space", compiler.tokenize("$$ cmd$")); +} + +#[test] +fn test_lexer_dollar_at_end() { + let compiler = AmberCompiler::new(); + + // Test single $ at end of input (line 226 - None case) + assert_debug_snapshot!("dollar_at_end", compiler.tokenize("text$")); + + // Test $ followed by end of string + assert_debug_snapshot!("just_dollar", compiler.tokenize("$")); +} + +#[test] +fn test_lexer_dollar_followed_by_dollar() { + let compiler = AmberCompiler::new(); + + // Test first $ of $$ (line 217-218 - next_ch == '$' false branch) + assert_debug_snapshot!("dollar_dollar", compiler.tokenize("$$")); + + // Test $$$ (triple dollar) + assert_debug_snapshot!("triple_dollar", compiler.tokenize("$$$")); +} + +#[test] +fn test_lexer_dollar_prev_is_dollar() { + let compiler = AmberCompiler::new(); + + // Test second $ with whitespace after (lines 221-223) + assert_debug_snapshot!("dollar_dollar_space", compiler.tokenize("$$ ")); + + // Test second $ with non-whitespace after (line 222 - !next_ch.is_whitespace()) + assert_debug_snapshot!("dollar_dollar_char", compiler.tokenize("$$x")); +} + +#[test] +fn test_lexer_string_interpolation_edge_cases() { + let compiler = AmberCompiler::new(); + + // Test nested braces in string interpolation + assert_debug_snapshot!( + "string_nested_braces", + compiler.tokenize(r#""text {if true { "inner" } else { "other" }} end""#) + ); + + // Test multiple interpolations + assert_debug_snapshot!( + "string_multi_interpolation", + compiler.tokenize(r#""{a} and {b} and {c}""#) + ); + + // Test empty interpolation + assert_debug_snapshot!( + "string_empty_interpolation", + compiler.tokenize(r#""test {} end""#) + ); +} + +#[test] +fn test_lexer_command_interpolation_edge_cases() { + let compiler = AmberCompiler::new(); + + // Test nested braces in command interpolation + assert_debug_snapshot!( + "command_nested_braces", + compiler.tokenize(r#"$echo {if true { 1 } else { 0 }}$"#) + ); + + // Test command with multiple interpolations + assert_debug_snapshot!( + "command_multi_interpolation", + compiler.tokenize("$echo {a} {b} {c}$") + ); +} + +#[test] +fn test_lexer_context_transitions() { + let compiler = AmberCompiler::new(); + + // Test transitioning between contexts + assert_debug_snapshot!( + "context_string_to_main", + compiler.tokenize(r#""start {expr} end""#) + ); + + // Test command to main context + assert_debug_snapshot!("context_command_to_main", compiler.tokenize("$cmd {expr}$")); + + // Test multiple context switches + assert_debug_snapshot!( + "context_multiple_switches", + compiler.tokenize(r#""a {$b$} c""#) + ); +} + +#[test] +fn test_lexer_brace_depth_tracking() { + let compiler = AmberCompiler::new(); + + // Test brace depth increases and decreases correctly + assert_debug_snapshot!("brace_depth_simple", compiler.tokenize(r#""{{}}" "#)); + + // Test brace depth with nested interpolations + assert_debug_snapshot!( + "brace_depth_nested", + compiler.tokenize(r#""outer { "inner {x}" }""#) + ); + + // Test braces outside interpolation context + assert_debug_snapshot!( + "brace_no_interpolation", + compiler.tokenize("{ let x = {} }") + ); +} + +#[test] +fn test_lexer_error_recovery() { + let compiler = AmberCompiler::new(); + + // Test with various malformed inputs + assert_debug_snapshot!("unclosed_string", compiler.tokenize(r#""unclosed"#)); + + // Test with unclosed command + assert_debug_snapshot!("unclosed_command", compiler.tokenize("$echo test")); + + // Test with mismatched braces + assert_debug_snapshot!("mismatched_braces", compiler.tokenize(r#""{{{""#)); +} + +#[test] +fn test_lexer_single_dollar_variations() { + let compiler = AmberCompiler::new(); + + // Test single $ starting command + assert_debug_snapshot!("single_dollar_cmd", compiler.tokenize("$echo$")); + + // Test $ not starting command (followed by whitespace in $$ case) + assert_debug_snapshot!("dollar_no_cmd", compiler.tokenize("$ ")); + + // Test $ followed by another $ + assert_debug_snapshot!("dollar_followed_by_dollar", compiler.tokenize("$$$")); +} + +#[test] +fn test_lexer_edge_case_positions() { + let compiler = AmberCompiler::new(); + + // Test token at very end of input + assert_debug_snapshot!("token_at_end", compiler.tokenize("let x")); + + // Test empty input + assert_debug_snapshot!("empty_input", compiler.tokenize("")); + + // Test single character + assert_debug_snapshot!("single_char", compiler.tokenize("x")); +} + #[test] fn test_stdlib() { let stdlib = read_to_string("resources/alpha034/std/main.ab").unwrap(); diff --git a/tests/grammar/alpha035.rs b/tests/grammar/alpha035.rs index dcfdc62..c1011b9 100644 --- a/tests/grammar/alpha035.rs +++ b/tests/grammar/alpha035.rs @@ -3,9 +3,15 @@ use std::fs::read_to_string; use chumsky::error::Rich; use insta::assert_debug_snapshot; +use amber_lsp::grammar::alpha035::{ + AmberCompiler, + GlobalStatement, + Spanned, +}; use amber_lsp::grammar::{ - alpha035::{lexer::Token, AmberCompiler, GlobalStatement, Spanned}, - LSPAnalysis, ParserResponse, + LSPAnalysis, + ParserResponse, + Token, }; fn tokenize(input: &str) -> Vec> { @@ -33,6 +39,17 @@ fn parse_unwrap(tokens: &[Spanned]) -> Vec> { parse(tokens).0.unwrap() } +#[test] +fn test_numbers() { + let input = r#" + 2 + 2.4 + .2 + "#; + + assert_debug_snapshot!(parse_unwrap(&tokenize(input))); +} + #[test] fn test_stdlib_array() { let stdlib = read_to_string("resources/alpha035/std/array.ab").unwrap(); @@ -81,3 +98,111 @@ fn test_stdlib_text() { assert_debug_snapshot!(parse_unwrap(&tokenize(&stdlib))); } + +#[test] +fn test_lexer_double_dollar_edge_cases() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("double_dollar_whitespace", compiler.tokenize("$$ \n")); + assert_debug_snapshot!("double_dollar_end", compiler.tokenize("$$")); + assert_debug_snapshot!("double_dollar_no_space", compiler.tokenize("$$failed")); + assert_debug_snapshot!("double_dollar_brace", compiler.tokenize("$$ {var}$")); +} + +#[test] +fn test_lexer_string_escapes() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "string_escapes", + compiler.tokenize(r#""hello\nworld\t\"quote\"""#) + ); + assert_debug_snapshot!("string_trailing_backslash", compiler.tokenize(r#""test\""#)); + assert_debug_snapshot!("empty_string", compiler.tokenize(r#""""#)); +} + +#[test] +fn test_lexer_command_escapes() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "command_escapes", + compiler.tokenize(r#"$echo\ with\ spaces$"#) + ); + assert_debug_snapshot!("command_backslash", compiler.tokenize(r#"$test\n$"#)); +} + +#[test] +fn test_lexer_interpolation_edge_cases() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "string_nested_braces", + compiler.tokenize(r#""text {if true { "inner" } else { "other" }} end""#) + ); + assert_debug_snapshot!( + "string_multi_interpolation", + compiler.tokenize(r#""{a} and {b} and {c}""#) + ); + assert_debug_snapshot!( + "command_nested_braces", + compiler.tokenize(r#"$echo {if true { 1 } else { 0 }}$"#) + ); +} + +#[test] +fn test_lexer_dollar_at_end() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("dollar_at_end", compiler.tokenize("text$")); + assert_debug_snapshot!("just_dollar", compiler.tokenize("$")); +} + +#[test] +fn test_lexer_dollar_followed_by_dollar() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("dollar_dollar", compiler.tokenize("$$")); + assert_debug_snapshot!("triple_dollar", compiler.tokenize("$$$")); +} + +#[test] +fn test_lexer_dollar_prev_is_dollar() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("dollar_dollar_space", compiler.tokenize("$$ ")); + assert_debug_snapshot!("dollar_dollar_char", compiler.tokenize("$$x")); +} + +#[test] +fn test_lexer_context_transitions() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "context_string_to_main", + compiler.tokenize(r#""start {expr} end""#) + ); + assert_debug_snapshot!("context_command_to_main", compiler.tokenize("$cmd {expr}$")); + assert_debug_snapshot!( + "context_multiple_switches", + compiler.tokenize(r#""a {$b$} c""#) + ); +} + +#[test] +fn test_lexer_error_recovery() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("unclosed_string", compiler.tokenize(r#""unclosed"#)); + assert_debug_snapshot!("unclosed_command", compiler.tokenize("$echo test")); + assert_debug_snapshot!("mismatched_braces", compiler.tokenize(r#""{{{""#)); +} + +#[test] +fn test_lexer_edge_cases() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("empty_input", compiler.tokenize("")); + assert_debug_snapshot!("single_char", compiler.tokenize("x")); + assert_debug_snapshot!("single_dollar_cmd", compiler.tokenize("$echo$")); +} diff --git a/tests/grammar/alpha040.rs b/tests/grammar/alpha040.rs index e26edbb..88e937c 100644 --- a/tests/grammar/alpha040.rs +++ b/tests/grammar/alpha040.rs @@ -3,9 +3,15 @@ use std::fs::read_to_string; use chumsky::error::Rich; use insta::assert_debug_snapshot; +use amber_lsp::grammar::alpha040::{ + AmberCompiler, + GlobalStatement, + Spanned, +}; use amber_lsp::grammar::{ - alpha040::{lexer::Token, AmberCompiler, GlobalStatement, Spanned}, - LSPAnalysis, ParserResponse, + LSPAnalysis, + ParserResponse, + Token, }; fn tokenize(input: &str) -> Vec> { @@ -37,6 +43,17 @@ fn parse_unwrap(tokens: &[Spanned]) -> Vec> { ast.unwrap() } +#[test] +fn test_numbers() { + let input = r#" + 2 + 2.4 + .2 + "#; + + assert_debug_snapshot!(parse_unwrap(&tokenize(input))); +} + #[test] fn test_stdlib_array() { let stdlib = read_to_string("resources/alpha040/std/array.ab").unwrap(); @@ -146,3 +163,111 @@ fn test_comments_in_ifs() { assert_debug_snapshot!(parse(&tokenize(input))); } + +#[test] +fn test_lexer_double_dollar_edge_cases() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("double_dollar_whitespace", compiler.tokenize("$$ \n")); + assert_debug_snapshot!("double_dollar_end", compiler.tokenize("$$")); + assert_debug_snapshot!("double_dollar_no_space", compiler.tokenize("$$failed")); + assert_debug_snapshot!("double_dollar_brace", compiler.tokenize("$$ {var}$")); +} + +#[test] +fn test_lexer_string_escapes() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "string_escapes", + compiler.tokenize(r#""hello\nworld\t\"quote\"""#) + ); + assert_debug_snapshot!("string_trailing_backslash", compiler.tokenize(r#""test\""#)); + assert_debug_snapshot!("empty_string", compiler.tokenize(r#""""#)); +} + +#[test] +fn test_lexer_command_escapes() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "command_escapes", + compiler.tokenize(r#"$echo\ with\ spaces$"#) + ); + assert_debug_snapshot!("command_backslash", compiler.tokenize(r#"$test\n$"#)); +} + +#[test] +fn test_lexer_interpolation_edge_cases() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "string_nested_braces", + compiler.tokenize(r#""text {if true { "inner" } else { "other" }} end""#) + ); + assert_debug_snapshot!( + "string_multi_interpolation", + compiler.tokenize(r#""{a} and {b} and {c}""#) + ); + assert_debug_snapshot!( + "command_nested_braces", + compiler.tokenize(r#"$echo {if true { 1 } else { 0 }}$"#) + ); +} + +#[test] +fn test_lexer_dollar_at_end() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("dollar_at_end", compiler.tokenize("text$")); + assert_debug_snapshot!("just_dollar", compiler.tokenize("$")); +} + +#[test] +fn test_lexer_dollar_followed_by_dollar() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("dollar_dollar", compiler.tokenize("$$")); + assert_debug_snapshot!("triple_dollar", compiler.tokenize("$$$")); +} + +#[test] +fn test_lexer_dollar_prev_is_dollar() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("dollar_dollar_space", compiler.tokenize("$$ ")); + assert_debug_snapshot!("dollar_dollar_char", compiler.tokenize("$$x")); +} + +#[test] +fn test_lexer_context_transitions() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "context_string_to_main", + compiler.tokenize(r#""start {expr} end""#) + ); + assert_debug_snapshot!("context_command_to_main", compiler.tokenize("$cmd {expr}$")); + assert_debug_snapshot!( + "context_multiple_switches", + compiler.tokenize(r#""a {$b$} c""#) + ); +} + +#[test] +fn test_lexer_error_recovery() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("unclosed_string", compiler.tokenize(r#""unclosed"#)); + assert_debug_snapshot!("unclosed_command", compiler.tokenize("$echo test")); + assert_debug_snapshot!("mismatched_braces", compiler.tokenize(r#""{{{""#)); +} + +#[test] +fn test_lexer_edge_cases() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("empty_input", compiler.tokenize("")); + assert_debug_snapshot!("single_char", compiler.tokenize("x")); + assert_debug_snapshot!("single_dollar_cmd", compiler.tokenize("$echo$")); +} diff --git a/tests/grammar/alpha050.rs b/tests/grammar/alpha050.rs index f6884cb..86667d0 100644 --- a/tests/grammar/alpha050.rs +++ b/tests/grammar/alpha050.rs @@ -3,9 +3,15 @@ use std::fs::read_to_string; use chumsky::error::Rich; use insta::assert_debug_snapshot; +use amber_lsp::grammar::alpha050::{ + AmberCompiler, + GlobalStatement, + Spanned, + Token, +}; use amber_lsp::grammar::{ - alpha050::{lexer::Token, AmberCompiler, GlobalStatement, Spanned}, - LSPAnalysis, ParserResponse, + LSPAnalysis, + ParserResponse, }; fn tokenize(input: &str) -> Vec> { @@ -37,6 +43,17 @@ fn parse_unwrap(tokens: &[Spanned]) -> Vec> { ast.unwrap() } +#[test] +fn test_numbers() { + let input = r#" + 2 + 2.4 + .2 + "#; + + assert_debug_snapshot!(parse_unwrap(&tokenize(input))); +} + #[test] fn test_stdlib_array() { let stdlib = read_to_string("resources/alpha050/std/array.ab").unwrap(); @@ -203,3 +220,111 @@ fn test_mv_files() { assert_debug_snapshot!(parse_unwrap(&tokenize(input))); } + +#[test] +fn test_lexer_double_dollar_edge_cases() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("double_dollar_whitespace", compiler.tokenize("$$ \n")); + assert_debug_snapshot!("double_dollar_end", compiler.tokenize("$$")); + assert_debug_snapshot!("double_dollar_no_space", compiler.tokenize("$$failed")); + assert_debug_snapshot!("double_dollar_brace", compiler.tokenize("$$ {var}$")); +} + +#[test] +fn test_lexer_string_escapes() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "string_escapes", + compiler.tokenize(r#""hello\nworld\t\"quote\"""#) + ); + assert_debug_snapshot!("string_trailing_backslash", compiler.tokenize(r#""test\""#)); + assert_debug_snapshot!("empty_string", compiler.tokenize(r#""""#)); +} + +#[test] +fn test_lexer_command_escapes() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "command_escapes", + compiler.tokenize(r#"$echo\ with\ spaces$"#) + ); + assert_debug_snapshot!("command_backslash", compiler.tokenize(r#"$test\n$"#)); +} + +#[test] +fn test_lexer_interpolation_edge_cases() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "string_nested_braces", + compiler.tokenize(r#""text {if true { "inner" } else { "other" }} end""#) + ); + assert_debug_snapshot!( + "string_multi_interpolation", + compiler.tokenize(r#""{a} and {b} and {c}""#) + ); + assert_debug_snapshot!( + "command_nested_braces", + compiler.tokenize(r#"$echo {if true { 1 } else { 0 }}$"#) + ); +} + +#[test] +fn test_lexer_dollar_at_end() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("dollar_at_end", compiler.tokenize("text$")); + assert_debug_snapshot!("just_dollar", compiler.tokenize("$")); +} + +#[test] +fn test_lexer_dollar_followed_by_dollar() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("dollar_dollar", compiler.tokenize("$$")); + assert_debug_snapshot!("triple_dollar", compiler.tokenize("$$$")); +} + +#[test] +fn test_lexer_dollar_prev_is_dollar() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("dollar_dollar_space", compiler.tokenize("$$ ")); + assert_debug_snapshot!("dollar_dollar_char", compiler.tokenize("$$x")); +} + +#[test] +fn test_lexer_context_transitions() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!( + "context_string_to_main", + compiler.tokenize(r#""start {expr} end""#) + ); + assert_debug_snapshot!("context_command_to_main", compiler.tokenize("$cmd {expr}$")); + assert_debug_snapshot!( + "context_multiple_switches", + compiler.tokenize(r#""a {$b$} c""#) + ); +} + +#[test] +fn test_lexer_error_recovery() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("unclosed_string", compiler.tokenize(r#""unclosed"#)); + assert_debug_snapshot!("unclosed_command", compiler.tokenize("$echo test")); + assert_debug_snapshot!("mismatched_braces", compiler.tokenize(r#""{{{""#)); +} + +#[test] +fn test_lexer_edge_cases() { + let compiler = AmberCompiler::new(); + + assert_debug_snapshot!("empty_input", compiler.tokenize("")); + assert_debug_snapshot!("single_char", compiler.tokenize("x")); + assert_debug_snapshot!("single_dollar_cmd", compiler.tokenize("$echo$")); +} diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-10.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-10.snap index d17739c..97b90e3 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-10.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-10.snap @@ -74,8 +74,8 @@ expression: "parse(&tokenize(\"[1, 2 3 let\"))" ], ), [ - found "3" at 6..7 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', - found "let" at 8..11 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '"]"', + found "3" at 6..7 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', + found "let" at 8..11 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '"]"', found end of input at 8..11 expected something else, found end of input at 11..11 expected '"="', found end of input at 11..11 expected "type", or "expression", diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-11.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-11.snap index ff3365b..e909bd1 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-11.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-11.snap @@ -71,6 +71,6 @@ expression: "parse(&tokenize(\"[1, 2 3] 4\"))" ], ), [ - found "3" at 6..7 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', + found "3" at 6..7 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-5.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-5.snap index 10b23ad..d4d6343 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-5.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-5.snap @@ -34,6 +34,6 @@ expression: "parse(&tokenize(\"[1\"))" ], ), [ - found end of input at 2..2 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '"]"', + found end of input at 2..2 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '"]"', ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-8.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-8.snap index 8ba7aa6..d8a4107 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-8.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-8.snap @@ -43,6 +43,6 @@ expression: "parse(&tokenize(\"[1, 2\"))" ], ), [ - found end of input at 5..5 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '"]"', + found end of input at 5..5 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '"]"', ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-9.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-9.snap index 183b99b..4bb067e 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__array-9.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__array-9.snap @@ -52,7 +52,7 @@ expression: "parse(&tokenize(\"[1, 2 3\"))" ], ), [ - found "3" at 6..7 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', - found end of input at 7..7 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '"]"', + found "3" at 6..7 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', + found end of input at 7..7 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '"]"', ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__brace_depth_nested.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__brace_depth_nested.snap new file mode 100644 index 0000000..1c0f83f --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__brace_depth_nested.snap @@ -0,0 +1,72 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"outer { \"inner {x}\" }\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "outer ", + ), + 1..7, + ), + ( + Token( + "{", + ), + 7..8, + ), + ( + Token( + "\"", + ), + 9..10, + ), + ( + Token( + "inner ", + ), + 10..16, + ), + ( + Token( + "{", + ), + 16..17, + ), + ( + Token( + "x", + ), + 17..18, + ), + ( + Token( + "}", + ), + 18..19, + ), + ( + Token( + "\"", + ), + 19..20, + ), + ( + Token( + "}", + ), + 21..22, + ), + ( + Token( + "\"", + ), + 22..23, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__brace_depth_simple.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__brace_depth_simple.snap new file mode 100644 index 0000000..8400de9 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__brace_depth_simple.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"{{}}\" \"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "{", + ), + 1..2, + ), + ( + Token( + "{", + ), + 2..3, + ), + ( + Token( + "}", + ), + 3..4, + ), + ( + Token( + "}", + ), + 4..5, + ), + ( + Token( + "\"", + ), + 5..6, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__brace_no_interpolation.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__brace_no_interpolation.snap new file mode 100644 index 0000000..819a462 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__brace_no_interpolation.snap @@ -0,0 +1,48 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"{ let x = {} }\")" +--- +[ + ( + Token( + "{", + ), + 0..1, + ), + ( + Token( + "let", + ), + 2..5, + ), + ( + Token( + "x", + ), + 6..7, + ), + ( + Token( + "=", + ), + 8..9, + ), + ( + Token( + "{", + ), + 10..11, + ), + ( + Token( + "}", + ), + 11..12, + ), + ( + Token( + "}", + ), + 13..14, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-2.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-2.snap index 9aefd95..27c560f 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-2.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-2.snap @@ -19,27 +19,9 @@ expression: "parse_unwrap(&tokenize(\"$echo \\\"Hello, {name}!\\\"$\"))" ), ( Text( - "echo", + "echo \"Hello, ", ), - 1..5, - ), - ( - Text( - "\"", - ), - 6..7, - ), - ( - Text( - "Hello", - ), - 7..12, - ), - ( - Text( - ",", - ), - 12..13, + 1..14, ), ( Expression( @@ -57,15 +39,9 @@ expression: "parse_unwrap(&tokenize(\"$echo \\\"Hello, {name}!\\\"$\"))" ), ( Text( - "!", - ), - 20..21, - ), - ( - Text( - "\"", + "!\"", ), - 21..22, + 20..22, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-3.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-3.snap index 1af2b9d..3bc55d8 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-3.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-3.snap @@ -21,21 +21,9 @@ expression: "parse(&tokenize(\"$command --arg1 -v$\"))" ), ( Text( - "command", + "command --arg1 -v", ), - 1..8, - ), - ( - CommandOption( - "--arg1", - ), - 9..15, - ), - ( - CommandOption( - "-v", - ), - 16..18, + 1..18, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-4.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-4.snap index 02febc3..9c86e55 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-4.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-4.snap @@ -21,15 +21,9 @@ expression: "parse(&tokenize(\"$command -$\"))" ), ( Text( - "command", + "command -", ), - 1..8, - ), - ( - CommandOption( - "-", - ), - 9..10, + 1..10, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-5.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-5.snap index 685dda4..cc8198c 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-5.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-5.snap @@ -21,21 +21,9 @@ expression: "parse(&tokenize(\"$command --arg1 -v\"))" ), ( Text( - "command", + "command --arg1 -v", ), - 1..8, - ), - ( - CommandOption( - "--arg1", - ), - 9..15, - ), - ( - CommandOption( - "-v", - ), - 16..18, + 1..18, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-6.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-6.snap index 50ffaa9..ad7eaa1 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-6.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-6.snap @@ -21,9 +21,9 @@ expression: "parse(&tokenize(\"$command {unclosed\"))" ), ( Text( - "command", + "command ", ), - 1..8, + 1..9, ), ( Expression( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-7.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-7.snap index c52f790..76fc3d2 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-7.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-7.snap @@ -21,9 +21,9 @@ expression: "parse(&tokenize(\"$command {unclosed interpolation$ let x = 10\"))" ), ( Text( - "command", + "command ", ), - 1..8, + 1..9, ), ( Expression( @@ -65,38 +65,16 @@ expression: "parse(&tokenize(\"$command {unclosed interpolation$ let x = 10\"))" ( Statement( ( - VariableInit( - ( - "let", - 34..37, - ), - ( - "x", - 38..39, - ), - ( - Expression( - ( - Number( - ( - 10.0, - 42..44, - ), - ), - 42..44, - ), - ), - 42..44, - ), - ), - 34..44, + Error, + 33..44, ), ), - 34..44, + 33..44, ), ], ), [ found "interpolation" at 19..32 expected '"("', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '"}"', + found end of input at 33..44 expected something else, ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-8.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-8.snap index d27c196..80a2177 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-8.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-8.snap @@ -21,9 +21,9 @@ expression: "parse(&tokenize(\"$command {\"))" ), ( Text( - "command", + "command ", ), - 1..8, + 1..9, ), ( Expression( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-9.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-9.snap index 2a2ea51..6e10c52 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__command-9.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command-9.snap @@ -21,9 +21,9 @@ expression: "parse(&tokenize(\"$command {}$\"))" ), ( Text( - "command", + "command ", ), - 1..8, + 1..9, ), ( Expression( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command.snap index a534375..b3fbd40 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__command.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command.snap @@ -19,43 +19,25 @@ expression: "parse_unwrap(&tokenize(r#\"$echo \\\"Hello, world!\\\"$\"#))" ), ( Text( - "echo", + "echo ", ), - 1..5, - ), - ( - Escape( - "\"", - ), - 6..8, - ), - ( - Text( - "Hello", - ), - 8..13, + 1..6, ), ( Text( - ",", + "\\\"", ), - 13..14, + 6..8, ), ( Text( - "world", + "Hello, world!", ), - 15..20, + 8..21, ), ( Text( - "!", - ), - 20..21, - ), - ( - Escape( - "\"", + "\\\"", ), 21..23, ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command_backslash.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_backslash.snap new file mode 100644 index 0000000..17e849b --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_backslash.snap @@ -0,0 +1,30 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"$test\\n$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "test", + ), + 1..5, + ), + ( + Token( + "\\n", + ), + 5..7, + ), + ( + Token( + "$", + ), + 7..8, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command_escapes.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_escapes.snap new file mode 100644 index 0000000..a1385e8 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_escapes.snap @@ -0,0 +1,48 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"$echo\\ with\\ spaces$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo", + ), + 1..5, + ), + ( + Token( + "\\ ", + ), + 5..7, + ), + ( + Token( + "with", + ), + 7..11, + ), + ( + Token( + "\\ ", + ), + 11..13, + ), + ( + Token( + "spaces", + ), + 13..19, + ), + ( + Token( + "$", + ), + 19..20, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command_interpolation.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_interpolation.snap new file mode 100644 index 0000000..947eae4 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_interpolation.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$echo {var}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "var", + ), + 7..10, + ), + ( + Token( + "}", + ), + 10..11, + ), + ( + Token( + "$", + ), + 11..12, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command_multi_interpolation.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_multi_interpolation.snap new file mode 100644 index 0000000..0fbbdf2 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_multi_interpolation.snap @@ -0,0 +1,90 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$echo {a} {b} {c}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "a", + ), + 7..8, + ), + ( + Token( + "}", + ), + 8..9, + ), + ( + Token( + " ", + ), + 9..10, + ), + ( + Token( + "{", + ), + 10..11, + ), + ( + Token( + "b", + ), + 11..12, + ), + ( + Token( + "}", + ), + 12..13, + ), + ( + Token( + " ", + ), + 13..14, + ), + ( + Token( + "{", + ), + 14..15, + ), + ( + Token( + "c", + ), + 15..16, + ), + ( + Token( + "}", + ), + 16..17, + ), + ( + Token( + "$", + ), + 17..18, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__command_nested_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_nested_braces.snap new file mode 100644 index 0000000..8884745 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__command_nested_braces.snap @@ -0,0 +1,90 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"$echo {if true { 1 } else { 0 }}$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "if", + ), + 7..9, + ), + ( + Token( + "true", + ), + 10..14, + ), + ( + Token( + "{", + ), + 15..16, + ), + ( + Token( + "1", + ), + 17..18, + ), + ( + Token( + "}", + ), + 19..20, + ), + ( + Token( + "else", + ), + 21..25, + ), + ( + Token( + "{", + ), + 26..27, + ), + ( + Token( + "0", + ), + 28..29, + ), + ( + Token( + "}", + ), + 30..31, + ), + ( + Token( + "}", + ), + 31..32, + ), + ( + Token( + "$", + ), + 32..33, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__context_command_to_main.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__context_command_to_main.snap new file mode 100644 index 0000000..7e51f95 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__context_command_to_main.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$cmd {expr}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "cmd ", + ), + 1..5, + ), + ( + Token( + "{", + ), + 5..6, + ), + ( + Token( + "expr", + ), + 6..10, + ), + ( + Token( + "}", + ), + 10..11, + ), + ( + Token( + "$", + ), + 11..12, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__context_multiple_switches.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__context_multiple_switches.snap new file mode 100644 index 0000000..0886c16 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__context_multiple_switches.snap @@ -0,0 +1,60 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"a {$b$} c\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "a ", + ), + 1..3, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "$", + ), + 4..5, + ), + ( + Token( + "b", + ), + 5..6, + ), + ( + Token( + "$", + ), + 6..7, + ), + ( + Token( + "}", + ), + 7..8, + ), + ( + Token( + " c", + ), + 8..10, + ), + ( + Token( + "\"", + ), + 10..11, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__context_string_to_main.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__context_string_to_main.snap new file mode 100644 index 0000000..e86fe3c --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__context_string_to_main.snap @@ -0,0 +1,48 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"start {expr} end\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "start ", + ), + 1..7, + ), + ( + Token( + "{", + ), + 7..8, + ), + ( + Token( + "expr", + ), + 8..12, + ), + ( + Token( + "}", + ), + 12..13, + ), + ( + Token( + " end", + ), + 13..17, + ), + ( + Token( + "\"", + ), + 17..18, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_at_end.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_at_end.snap new file mode 100644 index 0000000..b98e5d7 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_at_end.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"text$\")" +--- +[ + ( + Token( + "text", + ), + 0..4, + ), + ( + Token( + "$", + ), + 4..5, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_dollar.snap new file mode 100644 index 0000000..3145dcb --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_dollar.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_dollar_char.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_dollar_char.snap new file mode 100644 index 0000000..b0056ea --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_dollar_char.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$x\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "x", + ), + 2..3, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_dollar_space.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_dollar_space.snap new file mode 100644 index 0000000..11fee59 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_dollar_space.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$ \")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_followed_by_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_followed_by_dollar.snap new file mode 100644 index 0000000..7d7d69d --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_followed_by_dollar.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "$", + ), + 2..3, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_no_cmd.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_no_cmd.snap new file mode 100644 index 0000000..8a35105 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__dollar_no_cmd.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$ \")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_brace.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_brace.snap new file mode 100644 index 0000000..c4c814e --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_brace.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$ {var}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "var", + ), + 4..7, + ), + ( + Token( + "}", + ), + 7..8, + ), + ( + Token( + "$", + ), + 8..9, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_cmd_space.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_cmd_space.snap new file mode 100644 index 0000000..3004ed7 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_cmd_space.snap @@ -0,0 +1,30 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$ cmd$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "cmd", + ), + 3..6, + ), + ( + Token( + "$", + ), + 6..7, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_end.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_end.snap new file mode 100644 index 0000000..3145dcb --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_end.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_no_space.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_no_space.snap new file mode 100644 index 0000000..2bca3c2 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_no_space.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$failed\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "failed", + ), + 2..8, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_whitespace.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_whitespace.snap new file mode 100644 index 0000000..602f478 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__double_dollar_whitespace.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$ \\n\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__empty_input.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__empty_input.snap new file mode 100644 index 0000000..fd5c88a --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__empty_input.snap @@ -0,0 +1,5 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"\")" +--- +[] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__empty_string.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__empty_string.snap new file mode 100644 index 0000000..234b662 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__empty_string.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "\"", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-5.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-5.snap index 01ee3f7..bc321d3 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-5.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-5.snap @@ -40,6 +40,6 @@ expression: "parse(&tokenize(\"func(1\"))" ], ), [ - found end of input at 6..6 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '")"', + found end of input at 6..6 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '")"', ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-7.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-7.snap index e7a5251..471c833 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-7.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-7.snap @@ -49,7 +49,7 @@ expression: "parse(&tokenize(\"func(1 2\"))" ], ), [ - found "2" at 7..8 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', - found end of input at 8..8 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '")"', + found "2" at 7..8 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', + found end of input at 8..8 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '")"', ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-8.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-8.snap index 900e9a2..8cd55d3 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-8.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-8.snap @@ -71,8 +71,8 @@ expression: "parse(&tokenize(\"func(1 2 let\"))" ], ), [ - found "2" at 7..8 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', - found "let" at 9..12 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '")"', + found "2" at 7..8 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', + found "let" at 9..12 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', '","', or '")"', found end of input at 9..12 expected something else, found end of input at 12..12 expected '"="', found end of input at 12..12 expected "type", or "expression", diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-9.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-9.snap index ead17b3..16f0774 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-9.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__function_invocation-9.snap @@ -68,6 +68,6 @@ expression: "parse(&tokenize(\"func(1 2) 3\"))" ], ), [ - found "2" at 7..8 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', + found "2" at 7..8 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '","', ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__just_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__just_dollar.snap new file mode 100644 index 0000000..b9f3413 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__just_dollar.snap @@ -0,0 +1,12 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__lexer.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__lexer.snap index b3c8a0a..eded5d7 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__lexer.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__lexer.snap @@ -29,21 +29,15 @@ expression: "compiler.tokenize(r#\"\n let x = \"my \\\"interpolated\\\" s ), ( Token( - "my", + "my ", ), - 18..20, + 18..21, ), ( Token( - "\\", + "\\\"", ), - 21..22, - ), - ( - Token( - "\"", - ), - 22..23, + 21..23, ), ( Token( @@ -53,21 +47,15 @@ expression: "compiler.tokenize(r#\"\n let x = \"my \\\"interpolated\\\" s ), ( Token( - "\\", - ), - 35..36, - ), - ( - Token( - "\"", + "\\\"", ), - 36..37, + 35..37, ), ( Token( - "string", + " string ", ), - 38..44, + 37..45, ), ( Token( @@ -89,9 +77,9 @@ expression: "compiler.tokenize(r#\"\n let x = \"my \\\"interpolated\\\" s ), ( Token( - "end", + " end", ), - 52..55, + 51..55, ), ( Token( @@ -113,57 +101,15 @@ expression: "compiler.tokenize(r#\"\n let x = \"my \\\"interpolated\\\" s ), ( Token( - "this", - ), - 68..72, - ), - ( - Token( - "-", - ), - 73..74, - ), - ( - Token( - "-", + "this --should be - tokenized ", ), - 74..75, + 68..97, ), ( Token( - "should", + "\\$", ), - 75..81, - ), - ( - Token( - "be", - ), - 82..84, - ), - ( - Token( - "-", - ), - 85..86, - ), - ( - Token( - "tokenized", - ), - 87..96, - ), - ( - Token( - "\\", - ), - 97..98, - ), - ( - Token( - "$", - ), - 98..99, + 97..99, ), ( Token( @@ -179,21 +125,9 @@ expression: "compiler.tokenize(r#\"\n let x = \"my \\\"interpolated\\\" s ), ( Token( - "unclosed", - ), - 110..118, - ), - ( - Token( - "string", - ), - 119..125, - ), - ( - Token( - "abcd", + "unclosed string\n\n abcd ", ), - 135..139, + 110..140, ), ( Token( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__mismatched_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__mismatched_braces.snap new file mode 100644 index 0000000..03acf50 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__mismatched_braces.snap @@ -0,0 +1,36 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"{{{\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "{", + ), + 1..2, + ), + ( + Token( + "{", + ), + 2..3, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "\"", + ), + 4..5, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__parentheses-3.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__parentheses-3.snap index 4c83423..4478dc1 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__parentheses-3.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__parentheses-3.snap @@ -32,6 +32,6 @@ expression: "parse(&tokenize(\"(1\"))" ], ), [ - found end of input at 2..2 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '")"', + found end of input at 2..2 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '")"', ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__parentheses-4.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__parentheses-4.snap index 90e519f..ddf583b 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__parentheses-4.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__parentheses-4.snap @@ -32,6 +32,6 @@ expression: "parse(&tokenize(\"(1,)\"))" ], ), [ - found "," at 2..3 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '")"', + found "," at 2..3 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '")"', ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__recovery-4.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__recovery-4.snap index 3fe1f5e..97ca60a 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__recovery-4.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__recovery-4.snap @@ -50,16 +50,16 @@ expression: "parse(&tokenize(r#\"\n fun foo(a) {\n\n return \" ( Text( ( - "echo", - 43..47, + "echo ", + 43..48, ), ), - 43..47, + 43..48, ), ( - Escape( + Text( ( - "\"", + "\\\"", 48..50, ), ), @@ -94,9 +94,9 @@ expression: "parse(&tokenize(r#\"\n fun foo(a) {\n\n return \" 50..57, ), ( - Escape( + Text( ( - "\"", + "\\\"", 57..59, ), ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__recovery-5.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__recovery-5.snap index ccf7e06..d3d6bea 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__recovery-5.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__recovery-5.snap @@ -126,10 +126,14 @@ expression: "parse(&tokenize(r#\"\n // comments\n // comments\n\n ), ( Error, - 144..149, + 144..145, ), ( - Expression( + Echo( + ( + "echo", + 145..149, + ), ( Gt( ( @@ -138,20 +142,11 @@ expression: "parse(&tokenize(r#\"\n // comments\n // comments\n\n ( Text( ( - "NOT", - 151..154, - ), - ), - 151..154, - ), - ( - Text( - ( - "READY", - 155..160, + "NOT READY", + 151..160, ), ), - 155..160, + 151..160, ), ], ), @@ -165,7 +160,7 @@ expression: "parse(&tokenize(r#\"\n // comments\n // comments\n\n 150..163, ), ), - 150..163, + 145..163, ), ( Block( @@ -247,16 +242,16 @@ expression: "parse(&tokenize(r#\"\n // comments\n // comments\n\n ( Text( ( - "echo", - 224..228, + "echo ", + 224..229, ), ), - 224..228, + 224..229, ), ( - Escape( + Text( ( - "\"", + "\\\"", 229..231, ), ), @@ -291,9 +286,9 @@ expression: "parse(&tokenize(r#\"\n // comments\n // comments\n\n 231..238, ), ( - Escape( + Text( ( - "\"", + "\\\"", 238..240, ), ), @@ -482,7 +477,7 @@ expression: "parse(&tokenize(r#\"\n // comments\n // comments\n\n [ found end of input at 74..75 expected something else, found "CmdText" at 115..122 expected "type", - found end of input at 144..149 expected something else, + found end of input at 144..145 expected something else, found "{" at 164..165 expected "expression", found end of input at 170..171 expected something else, ], diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__single_char.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__single_char.snap new file mode 100644 index 0000000..0505235 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__single_char.snap @@ -0,0 +1,12 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"x\")" +--- +[ + ( + Token( + "x", + ), + 0..1, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__single_dollar_cmd.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__single_dollar_cmd.snap new file mode 100644 index 0000000..a690bd0 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__single_dollar_cmd.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$echo$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo", + ), + 1..5, + ), + ( + Token( + "$", + ), + 5..6, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__stdlib.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__stdlib.snap index 6346ab8..99b4fe2 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__stdlib.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__stdlib.snap @@ -65,19 +65,13 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 48..54, + 48..56, ), ( Text( - "\"", - ), - 55..56, - ), - ( - Escape( - "$", + "\\$", ), 56..58, ), @@ -174,20 +168,11 @@ expression: parse(&tokenize(&stdlib)) ( Text( ( - "$", - 106..107, - ), - ), - 106..107, - ), - ( - Text( - ( - "REPLY", - 107..112, + "$REPLY", + 106..112, ), ), - 107..112, + 106..112, ), ], ), @@ -283,39 +268,27 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 189..193, + 189..195, ), ( Text( - "\"", - ), - 194..195, - ), - ( - Escape( - "$", + "\\$", ), 195..197, ), - ( - Escape( - "{", - ), - 197..199, - ), ( Text( - "source", + "\\{", ), - 199..205, + 197..199, ), ( Text( - "/", + "source/", ), - 205..206, + 199..206, ), ( Expression( @@ -353,15 +326,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "}", - ), - 229..230, - ), - ( - Text( - "\"", + "}\"", ), - 230..231, + 229..231, ), ( Text( @@ -464,39 +431,27 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 303..307, + 303..309, ), ( Text( - "\"", - ), - 308..309, - ), - ( - Escape( - "$", + "\\$", ), 309..311, ), - ( - Escape( - "{", - ), - 311..313, - ), ( Text( - "source", + "\\{", ), - 313..319, + 311..313, ), ( Text( - "//", + "source//", ), - 319..321, + 313..321, ), ( Expression( @@ -534,15 +489,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "}", - ), - 344..345, - ), - ( - Text( - "\"", + "}\"", ), - 345..346, + 344..346, ), ( Text( @@ -662,15 +611,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 448..452, - ), - ( - Text( - "\"", + "echo \"", ), - 453..454, + 448..454, ), ( Expression( @@ -688,45 +631,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 462..463, - ), - ( - Text( - "|", - ), - 464..465, - ), - ( - Text( - "sed", - ), - 466..469, - ), - ( - CommandOption( - "-e", - ), - 470..472, - ), - ( - Text( - "\"", - ), - 473..474, - ), - ( - Text( - "s", - ), - 474..475, - ), - ( - Text( - "/", + "\" | sed -e \"s/", ), - 475..476, + 462..476, ), ( Expression( @@ -764,21 +671,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "/", - ), - 499..500, - ), - ( - Text( - "g", - ), - 500..501, - ), - ( - Text( - "\"", + "/g\"", ), - 501..502, + 499..502, ), ( Text( @@ -845,21 +740,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 538..539, - ), - ( - CommandOption( - "-d", - ), - 540..542, - ), - ( - Text( - "\"", + "[ -d \"", ), - 543..544, + 538..544, ), ( Expression( @@ -877,15 +760,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 550..551, - ), - ( - Text( - "]", + "\" ]", ), - 552..553, + 550..553, ), ( Text( @@ -1003,21 +880,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 642..643, - ), - ( - CommandOption( - "-f", - ), - 644..646, - ), - ( - Text( - "\"", + "[ -f \"", ), - 647..648, + 642..648, ), ( Expression( @@ -1035,15 +900,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 654..655, - ), - ( - Text( - "]", + "\" ]", ), - 656..657, + 654..657, ), ( Text( @@ -1166,15 +1025,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "<", + "< \"", ), - 752..753, - ), - ( - Text( - "\"", - ), - 754..755, + 752..755, ), ( Expression( @@ -1284,15 +1137,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 816..820, - ), - ( - Text( - "\"", + "echo \"", ), - 821..822, + 816..822, ), ( Expression( @@ -1310,21 +1157,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 831..832, - ), - ( - Text( - ">", - ), - 833..834, - ), - ( - Text( - "\"", + "\" > \"", ), - 835..836, + 831..836, ), ( Expression( @@ -1434,15 +1269,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 898..902, - ), - ( - Text( - "\"", + "echo \"", ), - 903..904, + 898..904, ), ( Expression( @@ -1460,27 +1289,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 913..914, - ), - ( - Text( - ">", - ), - 915..916, - ), - ( - Text( - ">", - ), - 916..917, - ), - ( - Text( - "\"", + "\" >> \"", ), - 918..919, + 913..919, ), ( Expression( @@ -1625,21 +1436,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "IFS", - ), - 1021..1024, - ), - ( - Text( - "=", - ), - 1024..1025, - ), - ( - Text( - "\"", + "IFS=\"", ), - 1025..1026, + 1021..1026, ), ( Expression( @@ -1657,33 +1456,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1037..1038, - ), - ( - Text( - "read", - ), - 1039..1043, - ), - ( - CommandOption( - "-rd", - ), - 1044..1047, - ), - ( - Text( - "''", - ), - 1048..1050, - ), - ( - CommandOption( - "-a", + "\" read -rd '' -a ", ), - 1051..1053, + 1037..1054, ), ( Expression( @@ -1710,49 +1485,13 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "<", - ), - 1070..1071, - ), - ( - Text( - "<", - ), - 1072..1073, - ), - ( - Text( - "(", - ), - 1073..1074, - ), - ( - Text( - "printf", - ), - 1074..1080, - ), - ( - Text( - "%", + " < <(printf %s \"", ), - 1081..1082, + 1069..1085, ), ( Text( - "s", - ), - 1082..1083, - ), - ( - Text( - "\"", - ), - 1084..1085, - ), - ( - Escape( - "$", + "\\$", ), 1085..1087, ), @@ -1781,15 +1520,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1100..1101, - ), - ( - Text( - ")", + "\")", ), - 1101..1102, + 1100..1102, ), ( Text( @@ -1911,19 +1644,13 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "IFS", + "IFS=", ), - 1197..1200, + 1197..1201, ), ( Text( - "=", - ), - 1200..1201, - ), - ( - Escape( - "$", + "\\$", ), 1201..1203, ), @@ -1933,35 +1660,17 @@ expression: parse(&tokenize(&stdlib)) ), 1203..1204, ), - ( - Escape( - "n'", - ), - 1204..1207, - ), ( Text( - "read", - ), - 1208..1212, - ), - ( - CommandOption( - "-rd", + "\\n", ), - 1213..1216, + 1204..1206, ), ( Text( - "''", - ), - 1217..1219, - ), - ( - CommandOption( - "-a", + "' read -rd '' -a ", ), - 1220..1222, + 1206..1223, ), ( Expression( @@ -1988,31 +1697,13 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "<", - ), - 1239..1240, - ), - ( - Text( - "<", + " <<<\"", ), - 1240..1241, + 1238..1243, ), ( Text( - "<", - ), - 1241..1242, - ), - ( - Text( - "\"", - ), - 1242..1243, - ), - ( - Escape( - "$", + "\\$", ), 1243..1245, ), @@ -2150,7 +1841,17 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - [], + [ + ( + Text( + ( + " ", + 1340..1341, + ), + ), + 1340..1341, + ), + ], ), 1339..1342, ), @@ -2249,21 +1950,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "IFS", - ), - 1418..1421, - ), - ( - Text( - "=", - ), - 1421..1422, - ), - ( - Text( - "\"", + "IFS=\"", ), - 1422..1423, + 1418..1423, ), ( Expression( @@ -2281,39 +1970,21 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ; echo \"", ), - 1434..1435, + 1434..1444, ), ( Text( - ";", + "\\$", ), - 1436..1437, + 1444..1446, ), ( Text( - "echo", + "\\{", ), - 1438..1442, - ), - ( - Text( - "\"", - ), - 1443..1444, - ), - ( - Escape( - "$", - ), - 1444..1446, - ), - ( - Escape( - "{", - ), - 1446..1448, + 1446..1448, ), ( Expression( @@ -2340,33 +2011,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "[", + "[*]}\"", ), - 1461..1462, - ), - ( - Text( - "*", - ), - 1462..1463, - ), - ( - Text( - "]", - ), - 1463..1464, - ), - ( - Text( - "}", - ), - 1464..1465, - ), - ( - Text( - "\"", - ), - 1465..1466, + 1461..1466, ), ( Text( @@ -2452,15 +2099,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 1528..1532, - ), - ( - Text( - "\"", + "echo \"", ), - 1533..1534, + 1528..1534, ), ( Expression( @@ -2478,105 +2119,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1540..1541, - ), - ( - Text( - "|", - ), - 1542..1543, - ), - ( - Text( - "sed", - ), - 1544..1547, - ), - ( - CommandOption( - "-e", - ), - 1548..1550, - ), - ( - Text( - "'s", - ), - 1551..1553, - ), - ( - Text( - "/", - ), - 1553..1554, - ), - ( - Text( - "^", - ), - 1554..1555, - ), - ( - Text( - "[", - ), - 1555..1556, - ), - ( - Text( - "[", - ), - 1556..1557, - ), - ( - Text( - ":", - ), - 1557..1558, - ), - ( - Text( - "space", - ), - 1558..1563, - ), - ( - Text( - ":", - ), - 1563..1564, - ), - ( - Text( - "]", - ), - 1564..1565, - ), - ( - Text( - "]", + "\" | sed -e 's/^[[:space:]]*//'", ), - 1565..1566, - ), - ( - Text( - "*", - ), - 1566..1567, - ), - ( - Text( - "//", - ), - 1567..1569, - ), - ( - Text( - "'", - ), - 1569..1570, + 1540..1570, ), ( Text( @@ -2662,15 +2207,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 1633..1637, - ), - ( - Text( - "\"", - ), - 1638..1639, + 1633..1639, ), ( Expression( @@ -2688,105 +2227,21 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1645..1646, - ), - ( - Text( - "|", - ), - 1647..1648, - ), - ( - Text( - "sed", - ), - 1649..1652, - ), - ( - CommandOption( - "-e", - ), - 1653..1655, - ), - ( - Text( - "'s", + "\" | sed -e 's/[[:space:]]*", ), - 1656..1658, + 1645..1671, ), ( Text( - "/", - ), - 1658..1659, - ), - ( - Text( - "[", - ), - 1659..1660, - ), - ( - Text( - "[", - ), - 1660..1661, - ), - ( - Text( - ":", - ), - 1661..1662, - ), - ( - Text( - "space", - ), - 1662..1667, - ), - ( - Text( - ":", - ), - 1667..1668, - ), - ( - Text( - "]", - ), - 1668..1669, - ), - ( - Text( - "]", - ), - 1669..1670, - ), - ( - Text( - "*", - ), - 1670..1671, - ), - ( - Escape( - "$", + "\\$", ), 1671..1673, ), ( Text( - "//", + "//'", ), - 1673..1675, - ), - ( - Text( - "'", - ), - 1675..1676, + 1673..1676, ), ( Text( @@ -2963,15 +2418,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 1809..1813, - ), - ( - Text( - "\"", + "echo \"", ), - 1814..1815, + 1809..1815, ), ( Expression( @@ -2989,105 +2438,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1821..1822, - ), - ( - Text( - "|", - ), - 1823..1824, - ), - ( - Text( - "tr", - ), - 1825..1827, - ), - ( - Text( - "'", - ), - 1828..1829, - ), - ( - Text( - "[", - ), - 1829..1830, - ), - ( - Text( - ":", - ), - 1830..1831, - ), - ( - Text( - "upper", - ), - 1831..1836, - ), - ( - Text( - ":", - ), - 1836..1837, - ), - ( - Text( - "]", - ), - 1837..1838, - ), - ( - Text( - "'", + "\" | tr '[:upper:]' '[:lower:]'", ), - 1838..1839, - ), - ( - Text( - "'", - ), - 1840..1841, - ), - ( - Text( - "[", - ), - 1841..1842, - ), - ( - Text( - ":", - ), - 1842..1843, - ), - ( - Text( - "lower", - ), - 1843..1848, - ), - ( - Text( - ":", - ), - 1848..1849, - ), - ( - Text( - "]", - ), - 1849..1850, - ), - ( - Text( - "'", - ), - 1850..1851, + 1821..1851, ), ( Text( @@ -3150,154 +2503,52 @@ expression: parse(&tokenize(&stdlib)) ), [ ( - Return( - ( - "return", - 1894..1900, - ), - Some( - ( - Command( - [ - ( - Unsafe, - 1901..1907, - ), - ], - [ - ( - Text( - "$", - ), - 1908..1909, - ), - ( - Text( - "echo", - ), - 1909..1913, - ), - ( - Text( - "\"", - ), - 1914..1915, - ), - ( - Expression( - ( - Var( - ( - "text", - 1916..1920, - ), - ), - 1916..1920, - ), - ), - 1915..1921, - ), - ( - Text( - "\"", - ), - 1921..1922, - ), - ( - Text( - "|", - ), - 1923..1924, - ), - ( - Text( - "tr", - ), - 1925..1927, - ), - ( - Text( - "'", - ), - 1928..1929, - ), - ( - Text( - "[", - ), - 1929..1930, - ), - ( - Text( - ":", - ), - 1930..1931, - ), - ( - Text( - "lower", - ), - 1931..1936, - ), - ( - Text( - ":", - ), - 1936..1937, - ), - ( - Text( - "]", - ), - 1937..1938, - ), - ( - Text( - "'", - ), - 1938..1939, - ), - ( - Text( - "'", - ), - 1940..1941, - ), - ( - Text( - "[", - ), - 1941..1942, - ), + Return( + ( + "return", + 1894..1900, + ), + Some( + ( + Command( + [ ( - Text( - ":", - ), - 1942..1943, + Unsafe, + 1901..1907, ), + ], + [ ( Text( - "upper", + "$", ), - 1943..1948, + 1908..1909, ), ( Text( - ":", + "echo \"", ), - 1948..1949, + 1909..1915, ), ( - Text( - "]", + Expression( + ( + Var( + ( + "text", + 1916..1920, + ), + ), + 1916..1920, + ), ), - 1949..1950, + 1915..1921, ), ( Text( - "'", + "\" | tr '[:lower:]' '[:upper:]'", ), - 1950..1951, + 1921..1951, ), ( Text( @@ -3422,25 +2673,19 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 2062..2066, + 2062..2068, ), ( Text( - "\"", - ), - 2067..2068, - ), - ( - Escape( - "$", + "\\$", ), 2068..2070, ), ( - Escape( - "{", + Text( + "\\{", ), 2070..2072, ), @@ -3475,15 +2720,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "}", - ), - 2087..2088, - ), - ( - Text( - "\"", + "}\"", ), - 2088..2089, + 2087..2089, ), ( Text( @@ -3543,25 +2782,19 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 2132..2136, + 2132..2138, ), ( Text( - "\"", - ), - 2137..2138, - ), - ( - Escape( - "$", + "\\$", ), 2138..2140, ), ( - Escape( - "{", + Text( + "\\{", ), 2140..2142, ), @@ -3596,33 +2829,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 2157..2158, - ), - ( - Text( - "@", - ), - 2158..2159, - ), - ( - Text( - "]", - ), - 2159..2160, - ), - ( - Text( - "}", - ), - 2160..2161, - ), - ( - Text( - "\"", + "[@]}\"", ), - 2161..2162, + 2157..2162, ), ( Text( @@ -3728,21 +2937,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 2239..2240, - ), - ( - CommandOption( - "-n", - ), - 2241..2243, - ), - ( - Text( - "\"", + "[ -n \"", ), - 2244..2245, + 2239..2245, ), ( Expression( @@ -3760,33 +2957,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2251..2252, - ), - ( - Text( - "]", - ), - 2253..2254, - ), - ( - Text( - "&&", - ), - 2255..2257, - ), - ( - Text( - "[", + "\" ] && [ \"", ), - 2258..2259, - ), - ( - Text( - "\"", - ), - 2260..2261, + 2251..2261, ), ( Expression( @@ -3804,21 +2977,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2267..2268, - ), - ( - CommandOption( - "-eq", - ), - 2269..2272, - ), - ( - Text( - "\"", + "\" -eq \"", ), - 2273..2274, + 2267..2274, ), ( Expression( @@ -3836,51 +2997,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2280..2281, - ), - ( - Text( - "]", - ), - 2282..2283, - ), - ( - Text( - "2", - ), - 2284..2285, - ), - ( - Text( - ">", - ), - 2285..2286, - ), - ( - Text( - "/", - ), - 2286..2287, - ), - ( - Text( - "dev", - ), - 2287..2290, - ), - ( - Text( - "/", - ), - 2290..2291, - ), - ( - Text( - "null", + "\" ] 2>/dev/null", ), - 2291..2295, + 2280..2295, ), ( Text( @@ -4020,67 +3139,19 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "for", - ), - 2395..2398, - ), - ( - Text( - "(", - ), - 2399..2400, - ), - ( - Text( - "(", - ), - 2400..2401, - ), - ( - Text( - "i", - ), - 2401..2402, - ), - ( - Text( - "=", - ), - 2402..2403, - ), - ( - Text( - "0", - ), - 2403..2404, - ), - ( - Text( - ";", - ), - 2404..2405, - ), - ( - Text( - "i", + "for ((i=0; i<", ), - 2406..2407, + 2395..2408, ), ( Text( - "<", - ), - 2407..2408, - ), - ( - Escape( - "$", + "\\$", ), 2408..2410, ), ( - Escape( - "{", + Text( + "\\{", ), 2410..2412, ), @@ -4115,57 +3186,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "}", - ), - 2426..2427, - ), - ( - Text( - ";", + "}; i++)); do\n ", ), - 2427..2428, - ), - ( - Text( - "i", - ), - 2429..2430, - ), - ( - Text( - "+", - ), - 2430..2431, - ), - ( - Text( - "+", - ), - 2431..2432, - ), - ( - Text( - ")", - ), - 2432..2433, - ), - ( - Text( - ")", - ), - 2433..2434, - ), - ( - Text( - ";", - ), - 2434..2435, - ), - ( - Text( - "do", - ), - 2436..2438, + 2426..2447, ), ( Expression( @@ -4185,129 +3208,69 @@ expression: parse(&tokenize(&stdlib)) 2455..2460, ), ), - 2455..2460, - ), - ), - 2447..2461, - ), - ( - Text( - "+=", - ), - 2461..2463, - ), - ( - Text( - "(", - ), - 2463..2464, - ), - ( - Text( - "\"", - ), - 2465..2466, - ), - ( - Escape( - "$", - ), - 2466..2468, - ), - ( - Escape( - "{", - ), - 2468..2470, - ), - ( - Expression( - ( - Nameof( - ( - "nameof", - 2471..2477, - ), - ( - Var( - ( - "text", - 2478..2482, - ), - ), - 2478..2482, - ), - ), - 2478..2482, - ), - ), - 2470..2483, - ), - ( - Text( - ":", - ), - 2483..2484, - ), - ( - Escape( - "$", - ), - 2484..2486, - ), - ( - Text( - "i", - ), - 2486..2487, - ), - ( - Text( - ":", + 2455..2460, + ), ), - 2487..2488, + 2447..2461, ), ( Text( - "1", + "+=( \"", ), - 2488..2489, + 2461..2466, ), ( Text( - "}", + "\\$", ), - 2489..2490, + 2466..2468, ), ( Text( - "\"", + "\\{", ), - 2490..2491, + 2468..2470, ), ( - Text( - ")", + Expression( + ( + Nameof( + ( + "nameof", + 2471..2477, + ), + ( + Var( + ( + "text", + 2478..2482, + ), + ), + 2478..2482, + ), + ), + 2478..2482, + ), ), - 2492..2493, + 2470..2483, ), ( Text( - ";", + ":", ), - 2493..2494, + 2483..2484, ), ( Text( - "done", + "\\$", ), - 2499..2503, + 2484..2486, ), ( Text( - ";", + "i:1}\" );\n done;", ), - 2503..2504, + 2486..2504, ), ( Text( @@ -4419,15 +3382,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 2598..2602, - ), - ( - Text( - "\"", + "echo \"", ), - 2603..2604, + 2598..2604, ), ( Expression( @@ -4445,195 +3402,33 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2610..2611, - ), - ( - Text( - "|", - ), - 2612..2613, - ), - ( - Text( - "awk", + "\" | awk '", ), - 2614..2617, + 2610..2619, ), ( Text( - "'", - ), - 2618..2619, - ), - ( - Escape( - "{", + "\\{", ), 2619..2621, ), ( Text( - "s", - ), - 2621..2622, - ), - ( - Text( - "=", - ), - 2622..2623, - ), - ( - Text( - "0", - ), - 2623..2624, - ), - ( - Text( - ";", - ), - 2624..2625, - ), - ( - Text( - "for", - ), - 2626..2629, - ), - ( - Text( - "(", - ), - 2630..2631, - ), - ( - Text( - "i", - ), - 2631..2632, - ), - ( - Text( - "=", - ), - 2632..2633, - ), - ( - Text( - "1", - ), - 2633..2634, - ), - ( - Text( - ";", - ), - 2634..2635, - ), - ( - Text( - "i", - ), - 2636..2637, - ), - ( - Text( - "<=", - ), - 2637..2639, - ), - ( - Text( - "NF", - ), - 2639..2641, - ), - ( - Text( - ";", - ), - 2641..2642, - ), - ( - Text( - "i", - ), - 2643..2644, - ), - ( - Text( - "+", - ), - 2644..2645, - ), - ( - Text( - "+", - ), - 2645..2646, - ), - ( - Text( - ")", - ), - 2646..2647, - ), - ( - Text( - "s", + "s=0; for (i=1; i<=NF; i++) s+=", ), - 2648..2649, + 2621..2651, ), ( Text( - "+=", - ), - 2649..2651, - ), - ( - Escape( - "$", + "\\$", ), 2651..2653, ), ( Text( - "i", - ), - 2653..2654, - ), - ( - Text( - ";", - ), - 2654..2655, - ), - ( - Text( - "print", - ), - 2656..2661, - ), - ( - Text( - "s", - ), - 2662..2663, - ), - ( - Text( - "}", - ), - 2663..2664, - ), - ( - Text( - "'", + "i; print s}'", ), - 2664..2665, + 2653..2665, ), ( Text( @@ -5332,9 +4127,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "eval", + "eval ", ), - 3255..3259, + 3255..3260, ), ( Expression( @@ -5457,15 +4252,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "exit", - ), - 3341..3345, - ), - ( - Text( - "\"", + "exit \"", ), - 3346..3347, + 3341..3347, ), ( Expression( @@ -5735,45 +4524,21 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 3541..3542, - ), - ( - CommandOption( - "-x", + "[ -x \"", ), - 3543..3545, + 3541..3547, ), ( Text( - "\"", - ), - 3546..3547, - ), - ( - Escape( - "$", + "\\$", ), 3547..3549, ), ( Text( - "(", - ), - 3549..3550, - ), - ( - Text( - "command", - ), - 3550..3557, - ), - ( - CommandOption( - "-v", + "(command -v ", ), - 3558..3560, + 3549..3561, ), ( Expression( @@ -5791,21 +4556,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - ")", - ), - 3570..3571, - ), - ( - Text( - "\"", - ), - 3571..3572, - ), - ( - Text( - "]", + ")\" ]", ), - 3573..3574, + 3570..3574, ), ( Text( @@ -5988,21 +4741,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "ln", - ), - 3745..3747, - ), - ( - CommandOption( - "-s", - ), - 3748..3750, - ), - ( - Text( - "\"", + "ln -s \"", ), - 3751..3752, + 3745..3752, ), ( Expression( @@ -6020,15 +4761,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3760..3761, - ), - ( - Text( - "\"", + "\" \"", ), - 3762..3763, + 3760..3763, ), ( Expression( @@ -6108,20 +4843,11 @@ expression: parse(&tokenize(&stdlib)) ( Text( ( - "The", - 3816..3819, + "The file ", + 3816..3825, ), ), - 3816..3819, - ), - ( - Text( - ( - "file", - 3820..3824, - ), - ), - 3820..3824, + 3816..3825, ), ( Expression( @@ -6140,29 +4866,11 @@ expression: parse(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 3834..3841, - ), - ), - 3834..3841, - ), - ( - Text( - ( - "exist", - 3842..3847, - ), - ), - 3842..3847, - ), - ( - Text( - ( - "!", - 3847..3848, + " doesn't exist!", + 3833..3848, ), ), - 3847..3848, + 3833..3848, ), ], ), @@ -6298,21 +5006,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "mkdir", - ), - 3954..3959, - ), - ( - CommandOption( - "-p", - ), - 3960..3962, - ), - ( - Text( - "\"", + "mkdir -p \"", ), - 3963..3964, + 3954..3964, ), ( Expression( @@ -6458,27 +5154,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "chmod", - ), - 4068..4073, - ), - ( - Text( - "+", - ), - 4074..4075, - ), - ( - Text( - "x", - ), - 4075..4076, - ), - ( - Text( - "\"", + "chmod +x \"", ), - 4077..4078, + 4068..4078, ), ( Expression( @@ -6558,20 +5236,11 @@ expression: parse(&tokenize(&stdlib)) ( Text( ( - "The", - 4124..4127, + "The file ", + 4124..4133, ), ), - 4124..4127, - ), - ( - Text( - ( - "file", - 4128..4132, - ), - ), - 4128..4132, + 4124..4133, ), ( Expression( @@ -6590,29 +5259,11 @@ expression: parse(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 4140..4147, - ), - ), - 4140..4147, - ), - ( - Text( - ( - "exist", - 4148..4153, - ), - ), - 4148..4153, - ), - ( - Text( - ( - "!", - 4153..4154, + " doesn't exist!", + 4139..4154, ), ), - 4153..4154, + 4139..4154, ), ], ), @@ -6787,21 +5438,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "chown", - ), - 4290..4295, - ), - ( - CommandOption( - "-R", - ), - 4296..4298, - ), - ( - Text( - "\"", + "chown -R \"", ), - 4299..4300, + 4290..4300, ), ( Expression( @@ -6819,15 +5458,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 4306..4307, - ), - ( - Text( - "\"", + "\" \"", ), - 4308..4309, + 4306..4309, ), ( Expression( @@ -7040,27 +5673,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "curl", - ), - 4471..4475, - ), - ( - CommandOption( - "-L", - ), - 4476..4478, - ), - ( - CommandOption( - "-o", - ), - 4479..4481, - ), - ( - Text( - "\"", + "curl -L -o \"", ), - 4482..4483, + 4471..4483, ), ( Expression( @@ -7078,15 +5693,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 4489..4490, - ), - ( - Text( - "\"", + "\" \"", ), - 4491..4492, + 4489..4492, ), ( Expression( @@ -7188,15 +5797,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "wget", - ), - 4559..4563, - ), - ( - Text( - "\"", + "wget \"", ), - 4564..4565, + 4559..4565, ), ( Expression( @@ -7214,21 +5817,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 4570..4571, - ), - ( - CommandOption( - "-P", - ), - 4572..4574, - ), - ( - Text( - "\"", + "\" -P \"", ), - 4575..4576, + 4570..4576, ), ( Expression( @@ -7330,15 +5921,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "aria2c", + "aria2c \"", ), - 4646..4652, - ), - ( - Text( - "\"", - ), - 4653..4654, + 4646..4654, ), ( Expression( @@ -7356,21 +5941,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 4659..4660, - ), - ( - CommandOption( - "-d", - ), - 4661..4663, - ), - ( - Text( - "\"", + "\" -d \"", ), - 4664..4665, + 4659..4665, ), ( Expression( @@ -7538,15 +6111,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "id", - ), - 4802..4804, - ), - ( - CommandOption( - "-u", + "id -u", ), - 4805..4807, + 4802..4807, ), ( Text( @@ -7711,51 +6278,27 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 4928..4932, + 4928..4934, ), ( Text( - "\"", - ), - 4933..4934, - ), - ( - Escape( - "$", + "\\$", ), 4934..4936, ), - ( - Escape( - "{", - ), - 4936..4938, - ), - ( - Text( - "!", - ), - 4938..4939, - ), - ( - Text( - "var", - ), - 4939..4942, - ), ( Text( - "}", + "\\{", ), - 4942..4943, + 4936..4938, ), ( Text( - "\"", + "!var}\"", ), - 4943..4944, + 4938..4944, ), ( Text( @@ -7860,20 +6403,11 @@ expression: parse(&tokenize(&stdlib)) ( Text( ( - ".", - 5012..5013, - ), - ), - 5012..5013, - ), - ( - Text( - ( - "env", - 5013..5016, + ".env", + 5012..5016, ), ), - 5013..5016, + 5012..5016, ), ], ), @@ -7907,33 +6441,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "source", - ), - 5037..5043, - ), - ( - Text( - "\"", - ), - 5044..5045, - ), - ( - Text( - ".", - ), - 5045..5046, - ), - ( - Text( - "env", - ), - 5046..5049, - ), - ( - Text( - "\"", + "source \".env\"", ), - 5049..5050, + 5037..5050, ), ( Text( @@ -7973,51 +6483,27 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 5075..5079, + 5075..5081, ), ( Text( - "\"", - ), - 5080..5081, - ), - ( - Escape( - "$", + "\\$", ), 5081..5083, ), - ( - Escape( - "{", - ), - 5083..5085, - ), - ( - Text( - "!", - ), - 5085..5086, - ), - ( - Text( - "var", - ), - 5086..5089, - ), ( Text( - "}", + "\\{", ), - 5089..5090, + 5083..5085, ), ( Text( - "\"", + "!var}\"", ), - 5090..5091, + 5085..5091, ), ( Text( @@ -8109,93 +6595,21 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "export", + "export \"", ), - 5161..5167, + 5161..5169, ), ( Text( - "\"", - ), - 5168..5169, - ), - ( - Escape( - "$", + "\\$", ), 5169..5171, ), ( Text( - "(", - ), - 5171..5172, - ), - ( - Text( - "xargs", - ), - 5172..5177, - ), - ( - Text( - "<", - ), - 5178..5179, - ), - ( - Text( - ".", - ), - 5180..5181, - ), - ( - Text( - "env", + "(xargs < .env)\" > /dev/null", ), - 5181..5184, - ), - ( - Text( - ")", - ), - 5184..5185, - ), - ( - Text( - "\"", - ), - 5185..5186, - ), - ( - Text( - ">", - ), - 5187..5188, - ), - ( - Text( - "/", - ), - 5189..5190, - ), - ( - Text( - "dev", - ), - 5190..5193, - ), - ( - Text( - "/", - ), - 5193..5194, - ), - ( - Text( - "null", - ), - 5194..5198, + 5171..5198, ), ( Text( @@ -8270,37 +6684,19 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 5248..5249, - ), - ( - Text( - "[", + "[[ ! -z ", ), - 5249..5250, + 5248..5256, ), ( Text( - "!", - ), - 5251..5252, - ), - ( - CommandOption( - "-z", - ), - 5253..5255, - ), - ( - Escape( - "$", + "\\$", ), 5256..5258, ), ( - Escape( - "{", + Text( + "\\{", ), 5258..5260, ), @@ -8335,33 +6731,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "+", - ), - 5274..5275, - ), - ( - Text( - "z", - ), - 5275..5276, - ), - ( - Text( - "}", - ), - 5276..5277, - ), - ( - Text( - "]", - ), - 5278..5279, - ), - ( - Text( - "]", + "+z} ]]", ), - 5279..5280, + 5274..5280, ), ( Text( @@ -8505,13 +6877,13 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "readonly", + "readonly ", ), - 5400..5408, + 5400..5409, ), ( - Escape( - "$", + Text( + "\\$", ), 5409..5411, ), @@ -8540,19 +6912,13 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "=", + "=\"", ), - 5424..5425, + 5424..5426, ), ( Text( - "\"", - ), - 5425..5426, - ), - ( - Escape( - "$", + "\\$", ), 5426..5428, ), @@ -8581,45 +6947,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 5440..5441, - ), - ( - Text( - "2", - ), - 5442..5443, - ), - ( - Text( - ">", - ), - 5443..5444, - ), - ( - Text( - "/", - ), - 5445..5446, - ), - ( - Text( - "dev", - ), - 5446..5449, - ), - ( - Text( - "/", - ), - 5449..5450, - ), - ( - Text( - "null", + "\" 2> /dev/null", ), - 5450..5454, + 5440..5454, ), ( Text( @@ -8704,19 +7034,19 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 5519..5523, + 5519..5524, ), ( - Escape( - "$", + Text( + "\\$", ), 5524..5526, ), ( - Escape( - "{", + Text( + "\\{", ), 5526..5528, ), @@ -8851,13 +7181,13 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "export", + "export ", ), - 5607..5613, + 5607..5614, ), ( - Escape( - "$", + Text( + "\\$", ), 5614..5616, ), @@ -8886,19 +7216,13 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "=", + "=\"", ), - 5629..5630, + 5629..5631, ), ( Text( - "\"", - ), - 5630..5631, - ), - ( - Escape( - "$", + "\\$", ), 5631..5633, ), @@ -8927,45 +7251,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 5645..5646, - ), - ( - Text( - "2", - ), - 5647..5648, - ), - ( - Text( - ">", - ), - 5648..5649, - ), - ( - Text( - "/", - ), - 5650..5651, - ), - ( - Text( - "dev", - ), - 5651..5654, - ), - ( - Text( - "/", - ), - 5654..5655, - ), - ( - Text( - "null", + "\" 2> /dev/null", ), - 5655..5659, + 5645..5659, ), ( Text( @@ -9050,19 +7338,19 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 5719..5723, + 5719..5724, ), ( - Escape( - "$", + Text( + "\\$", ), 5724..5726, ), ( - Escape( - "{", + Text( + "\\{", ), 5726..5728, ), @@ -9180,9 +7468,9 @@ expression: parse(&tokenize(&stdlib)) ), ( Text( - "unset", + "unset ", ), - 5794..5799, + 5794..5800, ), ( Expression( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__string_empty_interpolation.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_empty_interpolation.snap new file mode 100644 index 0000000..33dc232 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_empty_interpolation.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"test {} end\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "test ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "}", + ), + 7..8, + ), + ( + Token( + " end", + ), + 8..12, + ), + ( + Token( + "\"", + ), + 12..13, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__string_escapes.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_escapes.snap new file mode 100644 index 0000000..3f0c997 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_escapes.snap @@ -0,0 +1,60 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"hello\\nworld\\t\\\"quote\\\"\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "hello", + ), + 1..6, + ), + ( + Token( + "\\n", + ), + 6..8, + ), + ( + Token( + "world", + ), + 8..13, + ), + ( + Token( + "\\t", + ), + 13..15, + ), + ( + Token( + "\\\"", + ), + 15..17, + ), + ( + Token( + "quote", + ), + 17..22, + ), + ( + Token( + "\\\"", + ), + 22..24, + ), + ( + Token( + "\"", + ), + 24..25, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__string_multi_interpolation.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_multi_interpolation.snap new file mode 100644 index 0000000..8defaca --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_multi_interpolation.snap @@ -0,0 +1,84 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"{a} and {b} and {c}\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "{", + ), + 1..2, + ), + ( + Token( + "a", + ), + 2..3, + ), + ( + Token( + "}", + ), + 3..4, + ), + ( + Token( + " and ", + ), + 4..9, + ), + ( + Token( + "{", + ), + 9..10, + ), + ( + Token( + "b", + ), + 10..11, + ), + ( + Token( + "}", + ), + 11..12, + ), + ( + Token( + " and ", + ), + 12..17, + ), + ( + Token( + "{", + ), + 17..18, + ), + ( + Token( + "c", + ), + 18..19, + ), + ( + Token( + "}", + ), + 19..20, + ), + ( + Token( + "\"", + ), + 20..21, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__string_nested_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_nested_braces.snap new file mode 100644 index 0000000..dc714d8 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_nested_braces.snap @@ -0,0 +1,120 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"text {if true { \"inner\" } else { \"other\" }} end\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "text ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "if", + ), + 7..9, + ), + ( + Token( + "true", + ), + 10..14, + ), + ( + Token( + "{", + ), + 15..16, + ), + ( + Token( + "\"", + ), + 17..18, + ), + ( + Token( + "inner", + ), + 18..23, + ), + ( + Token( + "\"", + ), + 23..24, + ), + ( + Token( + "}", + ), + 25..26, + ), + ( + Token( + "else", + ), + 27..31, + ), + ( + Token( + "{", + ), + 32..33, + ), + ( + Token( + "\"", + ), + 34..35, + ), + ( + Token( + "other", + ), + 35..40, + ), + ( + Token( + "\"", + ), + 40..41, + ), + ( + Token( + "}", + ), + 42..43, + ), + ( + Token( + "}", + ), + 43..44, + ), + ( + Token( + " end", + ), + 44..48, + ), + ( + Token( + "\"", + ), + 48..49, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__string_trailing_backslash.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_trailing_backslash.snap new file mode 100644 index 0000000..b732480 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__string_trailing_backslash.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"test\\\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "test", + ), + 1..5, + ), + ( + Token( + "\\\"", + ), + 5..7, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__ternary-5.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__ternary-5.snap index dbdc05a..89d5177 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__ternary-5.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__ternary-5.snap @@ -53,7 +53,7 @@ expression: "parse(&tokenize(\"true then 1\"))" ], ), [ - found end of input at 11..11 expected '"."', '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '"else"', + found end of input at 11..11 expected '"as"', '"is"', '"*"', '"/"', '"%"', '"+"', '"-"', '">="', '">"', '"<="', '"<"', '"=="', '"!="', '"and"', '"or"', '".."', '"then"', or '"else"', found end of input at 11..11 expected "expression", ], ) diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__text-2.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__text-2.snap index c572340..a198d53 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__text-2.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__text-2.snap @@ -1,6 +1,6 @@ --- source: tests/grammar/alpha034.rs -expression: "parse_unwrap(\"\\\"Hello, {name}!\\\"\")" +expression: "parse_unwrap(&tokenize(\"\\\"Hello, {name}!\\\"\"))" --- [ ( @@ -13,20 +13,11 @@ expression: "parse_unwrap(\"\\\"Hello, {name}!\\\"\")" ( Text( ( - "Hello", - 1..6, + "Hello, ", + 1..8, ), ), - 1..6, - ), - ( - Text( - ( - ",", - 6..7, - ), - ), - 6..7, + 1..8, ), ( Expression( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__text-3.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__text-3.snap index a148009..c2a8eb7 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__text-3.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__text-3.snap @@ -1,6 +1,6 @@ --- source: tests/grammar/alpha034.rs -expression: "parse_unwrap(\"\\\"Hello, {name}! How are you?\\\"\")" +expression: "parse_unwrap(&tokenize(\"\\\"Hello, {name}! How are you?\\\"\"))" --- [ ( @@ -13,20 +13,11 @@ expression: "parse_unwrap(\"\\\"Hello, {name}! How are you?\\\"\")" ( Text( ( - "Hello", - 1..6, + "Hello, ", + 1..8, ), ), - 1..6, - ), - ( - Text( - ( - ",", - 6..7, - ), - ), - 6..7, + 1..8, ), ( Expression( @@ -45,47 +36,11 @@ expression: "parse_unwrap(\"\\\"Hello, {name}! How are you?\\\"\")" ( Text( ( - "!", - 14..15, - ), - ), - 14..15, - ), - ( - Text( - ( - "How", - 16..19, - ), - ), - 16..19, - ), - ( - Text( - ( - "are", - 20..23, - ), - ), - 20..23, - ), - ( - Text( - ( - "you", - 24..27, - ), - ), - 24..27, - ), - ( - Text( - ( - "?", - 27..28, + "! How are you?", + 14..28, ), ), - 27..28, + 14..28, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__text-4.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__text-4.snap index 23e5bbd..c2260fd 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__text-4.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__text-4.snap @@ -1,6 +1,6 @@ --- source: tests/grammar/alpha034.rs -expression: "parse_unwrap(r#\"\"\\\"text in quotes\\\" \\\\\"\"#)" +expression: "parse_unwrap(&tokenize(r#\"\"\\\"text in quotes\\\" \\\\\"\"#))" --- [ ( @@ -11,9 +11,9 @@ expression: "parse_unwrap(r#\"\"\\\"text in quotes\\\" \\\\\"\"#)" Text( [ ( - Escape( + Text( ( - "\"", + "\\\"", 1..3, ), ), @@ -22,43 +22,34 @@ expression: "parse_unwrap(r#\"\"\\\"text in quotes\\\" \\\\\"\"#)" ( Text( ( - "text", - 3..7, + "text in quotes", + 3..17, ), ), - 3..7, + 3..17, ), ( Text( ( - "in", - 8..10, + "\\\"", + 17..19, ), ), - 8..10, + 17..19, ), ( Text( ( - "quotes", - 11..17, + " ", + 19..20, ), ), - 11..17, + 19..20, ), ( - Escape( - ( - "\"", - 17..19, - ), - ), - 17..19, - ), - ( - Escape( + Text( ( - "\\", + "\\\\", 20..22, ), ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__text-8.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__text-8.snap index 56e8601..4932194 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__text-8.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__text-8.snap @@ -13,9 +13,9 @@ expression: "parse(&tokenize(r#\"\"\\\"\"#))" Text( [ ( - Escape( + Text( ( - "\"", + "\\\"", 1..3, ), ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__text.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__text.snap index e3b0a51..94eb2a5 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha034__text.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__text.snap @@ -1,6 +1,6 @@ --- source: tests/grammar/alpha034.rs -expression: "parse_unwrap(\"\\\"Hello, world!\\\"\")" +expression: "parse_unwrap(&tokenize(\"\\\"Hello, world!\\\"\"))" --- [ ( @@ -13,38 +13,11 @@ expression: "parse_unwrap(\"\\\"Hello, world!\\\"\")" ( Text( ( - "Hello", - 1..6, + "Hello, world!", + 1..14, ), ), - 1..6, - ), - ( - Text( - ( - ",", - 6..7, - ), - ), - 6..7, - ), - ( - Text( - ( - "world", - 8..13, - ), - ), - 8..13, - ), - ( - Text( - ( - "!", - 13..14, - ), - ), - 13..14, + 1..14, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__token_at_end.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__token_at_end.snap new file mode 100644 index 0000000..c76c534 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__token_at_end.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"let x\")" +--- +[ + ( + Token( + "let", + ), + 0..3, + ), + ( + Token( + "x", + ), + 4..5, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__triple_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__triple_dollar.snap new file mode 100644 index 0000000..7d7d69d --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__triple_dollar.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "$", + ), + 2..3, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__unclosed_command.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__unclosed_command.snap new file mode 100644 index 0000000..b6c28f5 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__unclosed_command.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(\"$echo test\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo test", + ), + 1..10, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha034__unclosed_string.snap b/tests/grammar/snapshots/r#mod__grammar__alpha034__unclosed_string.snap new file mode 100644 index 0000000..67d7def --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha034__unclosed_string.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha034.rs +expression: "compiler.tokenize(r#\"\"unclosed\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "unclosed", + ), + 1..9, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__command_backslash.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__command_backslash.snap new file mode 100644 index 0000000..1a653f1 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__command_backslash.snap @@ -0,0 +1,30 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"$test\\n$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "test", + ), + 1..5, + ), + ( + Token( + "\\n", + ), + 5..7, + ), + ( + Token( + "$", + ), + 7..8, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__command_escapes.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__command_escapes.snap new file mode 100644 index 0000000..ec6feaf --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__command_escapes.snap @@ -0,0 +1,48 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"$echo\\ with\\ spaces$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo", + ), + 1..5, + ), + ( + Token( + "\\ ", + ), + 5..7, + ), + ( + Token( + "with", + ), + 7..11, + ), + ( + Token( + "\\ ", + ), + 11..13, + ), + ( + Token( + "spaces", + ), + 13..19, + ), + ( + Token( + "$", + ), + 19..20, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__command_nested_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__command_nested_braces.snap new file mode 100644 index 0000000..8f67430 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__command_nested_braces.snap @@ -0,0 +1,90 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"$echo {if true { 1 } else { 0 }}$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "if", + ), + 7..9, + ), + ( + Token( + "true", + ), + 10..14, + ), + ( + Token( + "{", + ), + 15..16, + ), + ( + Token( + "1", + ), + 17..18, + ), + ( + Token( + "}", + ), + 19..20, + ), + ( + Token( + "else", + ), + 21..25, + ), + ( + Token( + "{", + ), + 26..27, + ), + ( + Token( + "0", + ), + 28..29, + ), + ( + Token( + "}", + ), + 30..31, + ), + ( + Token( + "}", + ), + 31..32, + ), + ( + Token( + "$", + ), + 32..33, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__context_command_to_main.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__context_command_to_main.snap new file mode 100644 index 0000000..81c626e --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__context_command_to_main.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$cmd {expr}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "cmd ", + ), + 1..5, + ), + ( + Token( + "{", + ), + 5..6, + ), + ( + Token( + "expr", + ), + 6..10, + ), + ( + Token( + "}", + ), + 10..11, + ), + ( + Token( + "$", + ), + 11..12, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__context_multiple_switches.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__context_multiple_switches.snap new file mode 100644 index 0000000..4cc876c --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__context_multiple_switches.snap @@ -0,0 +1,60 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"\"a {$b$} c\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "a ", + ), + 1..3, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "$", + ), + 4..5, + ), + ( + Token( + "b", + ), + 5..6, + ), + ( + Token( + "$", + ), + 6..7, + ), + ( + Token( + "}", + ), + 7..8, + ), + ( + Token( + " c", + ), + 8..10, + ), + ( + Token( + "\"", + ), + 10..11, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__context_string_to_main.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__context_string_to_main.snap new file mode 100644 index 0000000..6317f8c --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__context_string_to_main.snap @@ -0,0 +1,48 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"\"start {expr} end\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "start ", + ), + 1..7, + ), + ( + Token( + "{", + ), + 7..8, + ), + ( + Token( + "expr", + ), + 8..12, + ), + ( + Token( + "}", + ), + 12..13, + ), + ( + Token( + " end", + ), + 13..17, + ), + ( + Token( + "\"", + ), + 17..18, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_at_end.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_at_end.snap new file mode 100644 index 0000000..b0fced7 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_at_end.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"text$\")" +--- +[ + ( + Token( + "text", + ), + 0..4, + ), + ( + Token( + "$", + ), + 4..5, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_dollar.snap new file mode 100644 index 0000000..1eed2a9 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_dollar.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_dollar_char.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_dollar_char.snap new file mode 100644 index 0000000..62f3977 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_dollar_char.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$$x\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "x", + ), + 2..3, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_dollar_space.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_dollar_space.snap new file mode 100644 index 0000000..34f8a30 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__dollar_dollar_space.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$$ \")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_brace.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_brace.snap new file mode 100644 index 0000000..c159a1b --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_brace.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$$ {var}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "var", + ), + 4..7, + ), + ( + Token( + "}", + ), + 7..8, + ), + ( + Token( + "$", + ), + 8..9, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_end.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_end.snap new file mode 100644 index 0000000..1eed2a9 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_end.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_no_space.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_no_space.snap new file mode 100644 index 0000000..f2bb93b --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_no_space.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$$failed\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "failed", + ), + 2..8, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_whitespace.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_whitespace.snap new file mode 100644 index 0000000..3a8540e --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__double_dollar_whitespace.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$$ \\n\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__empty_input.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__empty_input.snap new file mode 100644 index 0000000..5d6d0d5 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__empty_input.snap @@ -0,0 +1,5 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"\")" +--- +[] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__empty_string.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__empty_string.snap new file mode 100644 index 0000000..6299bf2 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__empty_string.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"\"\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "\"", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__just_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__just_dollar.snap new file mode 100644 index 0000000..15df301 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__just_dollar.snap @@ -0,0 +1,12 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__mismatched_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__mismatched_braces.snap new file mode 100644 index 0000000..3d7e67a --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__mismatched_braces.snap @@ -0,0 +1,36 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"\"{{{\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "{", + ), + 1..2, + ), + ( + Token( + "{", + ), + 2..3, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "\"", + ), + 4..5, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__numbers.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__numbers.snap new file mode 100644 index 0000000..632e83b --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__numbers.snap @@ -0,0 +1,63 @@ +--- +source: tests/grammar/alpha035.rs +expression: parse_unwrap(&tokenize(input)) +--- +[ + ( + Statement( + ( + Expression( + ( + Number( + ( + 2.0, + 5..6, + ), + ), + 5..6, + ), + ), + 5..6, + ), + ), + 5..6, + ), + ( + Statement( + ( + Expression( + ( + Number( + ( + 2.4, + 11..14, + ), + ), + 11..14, + ), + ), + 11..14, + ), + ), + 11..14, + ), + ( + Statement( + ( + Expression( + ( + Number( + ( + 0.2, + 19..21, + ), + ), + 19..21, + ), + ), + 19..21, + ), + ), + 19..21, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__single_char.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__single_char.snap new file mode 100644 index 0000000..33578b9 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__single_char.snap @@ -0,0 +1,12 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"x\")" +--- +[ + ( + Token( + "x", + ), + 0..1, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__single_dollar_cmd.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__single_dollar_cmd.snap new file mode 100644 index 0000000..b2b5cc4 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__single_dollar_cmd.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$echo$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo", + ), + 1..5, + ), + ( + Token( + "$", + ), + 5..6, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_date.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_date.snap index 422daef..48d3481 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_date.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_date.snap @@ -1121,56 +1121,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 2939..2940, + "%FT%T%Z", + 2939..2946, ), ), - 2939..2940, - ), - ( - Text( - ( - "FT", - 2940..2942, - ), - ), - 2940..2942, - ), - ( - Text( - ( - "%", - 2942..2943, - ), - ), - 2942..2943, - ), - ( - Text( - ( - "T", - 2943..2944, - ), - ), - 2943..2944, - ), - ( - Text( - ( - "%", - 2944..2945, - ), - ), - 2944..2945, - ), - ( - Text( - ( - "Z", - 2945..2946, - ), - ), - 2945..2946, + 2939..2946, ), ], ), @@ -1238,63 +1193,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 2982..2986, - ), - ( - Text( - "+", - ), - 2987..2988, - ), - ( - Text( - "\"", - ), - 2988..2989, - ), - ( - Text( - "%", + "date +\"%FT%T%Z\"", ), - 2989..2990, - ), - ( - Text( - "FT", - ), - 2990..2992, - ), - ( - Text( - "%", - ), - 2992..2993, - ), - ( - Text( - "T", - ), - 2993..2994, - ), - ( - Text( - "%", - ), - 2994..2995, - ), - ( - Text( - "Z", - ), - 2995..2996, - ), - ( - Text( - "\"", - ), - 2996..2997, + 2982..2997, ), ( Text( @@ -1363,27 +1264,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 3030..3034, - ), - ( - CommandOption( - "--utc", - ), - 3035..3040, - ), - ( - CommandOption( - "-d", + "date --utc -d \"", ), - 3041..3043, - ), - ( - Text( - "\"", - ), - 3044..3045, + 3030..3045, ), ( Expression( @@ -1401,21 +1284,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3051..3052, - ), - ( - Text( - "+", + "\" +\"", ), - 3053..3054, - ), - ( - Text( - "\"", - ), - 3054..3055, + 3051..3055, ), ( Expression( @@ -1495,21 +1366,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 3096..3100, - ), - ( - CommandOption( - "-d", - ), - 3101..3103, - ), - ( - Text( - "\"", + "date -d \"", ), - 3104..3105, + 3096..3105, ), ( Expression( @@ -1527,21 +1386,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3111..3112, - ), - ( - Text( - "+", - ), - 3113..3114, - ), - ( - Text( - "\"", + "\" +\"", ), - 3114..3115, + 3111..3115, ), ( Expression( @@ -1668,27 +1515,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 3279..3283, - ), - ( - Text( - "+", - ), - 3284..3285, - ), - ( - Text( - "%", + "date +%s", ), - 3285..3286, - ), - ( - Text( - "s", - ), - 3286..3287, + 3279..3287, ), ( Text( @@ -2057,63 +1886,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 3620..3624, - ), - ( - Text( - "+", - ), - 3625..3626, - ), - ( - Text( - "\"", - ), - 3626..3627, - ), - ( - Text( - "%", - ), - 3627..3628, - ), - ( - Text( - "FT", - ), - 3628..3630, - ), - ( - Text( - "%", - ), - 3630..3631, - ), - ( - Text( - "T", - ), - 3631..3632, - ), - ( - Text( - "%", - ), - 3632..3633, - ), - ( - Text( - "Z", - ), - 3633..3634, - ), - ( - Text( - "\"", + "date +\"%FT%T%Z\"", ), - 3634..3635, + 3620..3635, ), ( Text( @@ -2177,20 +1952,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 3677..3678, - ), - ), - 3677..3678, - ), - ( - Text( - ( - "F", - 3678..3679, + "%F", + 3677..3679, ), ), - 3678..3679, + 3677..3679, ), ], ), @@ -2227,6 +1993,15 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3664..3694, ), + ( + Text( + ( + " ", + 3694..3695, + ), + ), + 3694..3695, + ), ( Expression( ( @@ -2241,6 +2016,15 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3695..3700, ), + ( + Text( + ( + " ", + 3700..3701, + ), + ), + 3700..3701, + ), ( Expression( ( @@ -2257,20 +2041,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 3714..3715, - ), - ), - 3714..3715, - ), - ( - Text( - ( - "T", - 3715..3716, + "%T", + 3714..3716, ), ), - 3715..3716, + 3714..3716, ), ], ), @@ -2645,20 +2420,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 4158..4159, + "%s", + 4158..4160, ), ), - 4158..4159, - ), - ( - Text( - ( - "s", - 4159..4160, - ), - ), - 4159..4160, + 4158..4160, ), ], ), @@ -2737,20 +2503,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 4218..4219, - ), - ), - 4218..4219, - ), - ( - Text( - ( - "s", - 4219..4220, + "%s", + 4218..4220, ), ), - 4219..4220, + 4218..4220, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_env.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_env.snap index 74ec417..11eadf2 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_env.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_env.snap @@ -124,20 +124,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - ".", - 201..202, + ".env", + 201..205, ), ), - 201..202, - ), - ( - Text( - ( - "env", - 202..205, - ), - ), - 202..205, + 201..205, ), ], ), @@ -183,51 +174,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 239..243, + 239..245, ), ( Text( - "\"", - ), - 244..245, - ), - ( - Escape( - "$", + "\\$", ), 245..247, ), - ( - Escape( - "{", - ), - 247..249, - ), - ( - Text( - "!", - ), - 249..250, - ), - ( - Text( - "var", - ), - 250..253, - ), ( Text( - "}", + "\\{", ), - 253..254, + 247..249, ), ( Text( - "\"", + "!var}\"", ), - 254..255, + 249..255, ), ( Text( @@ -363,15 +330,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "source", - ), - 346..352, - ), - ( - Text( - "\"", + "source \"", ), - 353..354, + 346..354, ), ( Expression( @@ -431,51 +392,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 386..390, + 386..392, ), ( Text( - "\"", - ), - 391..392, - ), - ( - Escape( - "$", + "\\$", ), 392..394, ), - ( - Escape( - "{", - ), - 394..396, - ), - ( - Text( - "!", - ), - 396..397, - ), ( Text( - "var", + "\\{", ), - 397..400, - ), - ( - Text( - "}", - ), - 400..401, + 394..396, ), ( Text( - "\"", + "!var}\"", ), - 401..402, + 396..402, ), ( Text( @@ -579,20 +516,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - ".", - 520..521, - ), - ), - 520..521, - ), - ( - Text( - ( - "env", - 521..524, + ".env", + 520..524, ), ), - 521..524, + 520..524, ), ], ), @@ -628,39 +556,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "export", + "export \"", ), - 547..553, + 547..555, ), ( Text( - "\"", - ), - 554..555, - ), - ( - Escape( - "$", + "\\$", ), 555..557, ), ( Text( - "(", - ), - 557..558, - ), - ( - Text( - "xargs", - ), - 558..563, - ), - ( - Text( - "<", + "(xargs < ", ), - 564..565, + 557..566, ), ( Expression( @@ -678,45 +588,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - ")", + ")\" > /dev/null", ), - 572..573, - ), - ( - Text( - "\"", - ), - 573..574, - ), - ( - Text( - ">", - ), - 575..576, - ), - ( - Text( - "/", - ), - 577..578, - ), - ( - Text( - "dev", - ), - 578..581, - ), - ( - Text( - "/", - ), - 581..582, - ), - ( - Text( - "null", - ), - 582..586, + 572..586, ), ( Text( @@ -807,37 +681,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 691..692, - ), - ( - Text( - "[", + "[[ ! -z ", ), - 692..693, + 691..699, ), ( Text( - "!", - ), - 694..695, - ), - ( - CommandOption( - "-z", - ), - 696..698, - ), - ( - Escape( - "$", + "\\$", ), 699..701, ), ( - Escape( - "{", + Text( + "\\{", ), 701..703, ), @@ -872,33 +728,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "+", - ), - 717..718, - ), - ( - Text( - "z", - ), - 718..719, - ), - ( - Text( - "}", - ), - 719..720, - ), - ( - Text( - "]", - ), - 721..722, - ), - ( - Text( - "]", + "+z} ]]", ), - 722..723, + 717..723, ), ( Text( @@ -1058,13 +890,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "readonly", + "readonly ", ), - 888..896, + 888..897, ), ( - Escape( - "$", + Text( + "\\$", ), 897..899, ), @@ -1093,19 +925,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "=", + "=\"", ), - 912..913, + 912..914, ), ( Text( - "\"", - ), - 913..914, - ), - ( - Escape( - "$", + "\\$", ), 914..916, ), @@ -1134,45 +960,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 928..929, - ), - ( - Text( - "2", - ), - 930..931, - ), - ( - Text( - ">", - ), - 931..932, - ), - ( - Text( - "/", - ), - 933..934, - ), - ( - Text( - "dev", - ), - 934..937, - ), - ( - Text( - "/", - ), - 937..938, - ), - ( - Text( - "null", + "\" 2> /dev/null", ), - 938..942, + 928..942, ), ( Text( @@ -1273,19 +1063,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 1052..1056, + 1052..1057, ), ( - Escape( - "$", + Text( + "\\$", ), 1057..1059, ), ( - Escape( - "{", + Text( + "\\{", ), 1059..1061, ), @@ -1436,13 +1226,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "export", + "export ", ), - 1185..1191, + 1185..1192, ), ( - Escape( - "$", + Text( + "\\$", ), 1192..1194, ), @@ -1471,19 +1261,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "=", + "=\"", ), - 1207..1208, + 1207..1209, ), ( Text( - "\"", - ), - 1208..1209, - ), - ( - Escape( - "$", + "\\$", ), 1209..1211, ), @@ -1512,45 +1296,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1223..1224, - ), - ( - Text( - "2", - ), - 1225..1226, - ), - ( - Text( - ">", - ), - 1226..1227, - ), - ( - Text( - "/", - ), - 1228..1229, - ), - ( - Text( - "dev", - ), - 1229..1232, - ), - ( - Text( - "/", - ), - 1232..1233, - ), - ( - Text( - "null", + "\" 2> /dev/null", ), - 1233..1237, + 1223..1237, ), ( Text( @@ -1651,19 +1399,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 1342..1346, + 1342..1347, ), ( - Escape( - "$", + Text( + "\\$", ), 1347..1349, ), ( - Escape( - "{", + Text( + "\\{", ), 1349..1351, ), @@ -1797,9 +1545,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "unset", + "unset ", ), - 1465..1470, + 1465..1471, ), ( Expression( @@ -1909,45 +1657,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", + "[ -x \"", ), - 1561..1562, - ), - ( - CommandOption( - "-x", - ), - 1563..1565, + 1561..1567, ), ( Text( - "\"", - ), - 1566..1567, - ), - ( - Escape( - "$", + "\\$", ), 1567..1569, ), ( Text( - "(", - ), - 1569..1570, - ), - ( - Text( - "command", + "(command -v ", ), - 1570..1577, - ), - ( - CommandOption( - "-v", - ), - 1578..1580, + 1569..1581, ), ( Expression( @@ -1965,21 +1689,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - ")", - ), - 1590..1591, - ), - ( - Text( - "\"", - ), - 1591..1592, - ), - ( - Text( - "]", + ")\" ]", ), - 1593..1594, + 1590..1594, ), ( Text( @@ -2127,19 +1839,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 1740..1746, + 1740..1748, ), ( Text( - "\"", - ), - 1747..1748, - ), - ( - Escape( - "$", + "\\$", ), 1748..1750, ), @@ -2234,9 +1940,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "$", + "\\$", 1798..1800, ), ), @@ -2397,83 +2103,47 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "[", - 2061..2062, - ), - ), - 2061..2062, - ), - ( - Escape( - ( - "x1b", - 2062..2066, - ), - ), - 2062..2066, - ), - ( - Text( - ( - "[", - 2066..2067, - ), - ), - 2066..2067, - ), - ( - Text( - ( - "1mY", - 2067..2070, + " [", + 2060..2062, ), ), - 2067..2070, + 2060..2062, ), ( Text( ( - "/", - 2070..2071, - ), - ), - 2070..2071, - ), - ( - Escape( - ( - "x1b", - 2071..2075, + "\\x", + 2062..2064, ), ), - 2071..2075, + 2062..2064, ), ( Text( ( - "[", - 2075..2076, + "1b[1mY/", + 2064..2071, ), ), - 2075..2076, + 2064..2071, ), ( Text( ( - "0mn", - 2076..2079, + "\\x", + 2071..2073, ), ), - 2076..2079, + 2071..2073, ), ( Text( ( - "]", - 2079..2080, + "1b[0mn]", + 2073..2080, ), ), - 2079..2080, + 2073..2080, ), ], ), @@ -2489,92 +2159,47 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "[", - 2089..2090, + " [y/", + 2088..2092, ), ), - 2089..2090, + 2088..2092, ), ( Text( ( - "y", - 2090..2091, + "\\x", + 2092..2094, ), ), - 2090..2091, + 2092..2094, ), ( Text( ( - "/", - 2091..2092, - ), - ), - 2091..2092, - ), - ( - Escape( - ( - "x1b", - 2092..2096, + "1b[1mN", + 2094..2100, ), ), - 2092..2096, + 2094..2100, ), ( Text( ( - "[", - 2096..2097, + "\\x", + 2100..2102, ), ), - 2096..2097, + 2100..2102, ), ( Text( ( - "1mN", - 2097..2100, - ), - ), - 2097..2100, - ), - ( - Escape( - ( - "x1b", - 2100..2104, + "1b[0m]", + 2102..2108, ), ), - 2100..2104, - ), - ( - Text( - ( - "[", - 2104..2105, - ), - ), - 2104..2105, - ), - ( - Text( - ( - "0m", - 2105..2107, - ), - ), - 2105..2107, - ), - ( - Text( - ( - "]", - 2107..2108, - ), - ), - 2107..2108, + 2102..2108, ), ], ), @@ -2614,33 +2239,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 2132..2138, + 2132..2140, ), ( Text( - "\"", + "\\x", ), - 2139..2140, - ), - ( - Escape( - "x1b", - ), - 2140..2144, + 2140..2142, ), ( Text( - "[", + "1b[1m", ), - 2144..2145, - ), - ( - Text( - "1m", - ), - 2145..2147, + 2142..2147, ), ( Expression( @@ -2656,23 +2269,17 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 2147..2155, ), - ( - Escape( - "x1b", - ), - 2155..2159, - ), ( Text( - "[", + "\\x", ), - 2159..2160, + 2155..2157, ), ( Text( - "0m", + "1b[0m", ), - 2160..2162, + 2157..2162, ), ( Expression( @@ -2722,27 +2329,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "read", - ), - 2190..2194, - ), - ( - CommandOption( - "-s", - ), - 2195..2197, - ), - ( - CommandOption( - "-n", - ), - 2198..2200, - ), - ( - Text( - "1", + "read -s -n 1", ), - 2201..2202, + 2190..2202, ), ( Text( @@ -2772,19 +2361,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 2213..2219, + 2213..2221, ), ( Text( - "\"", - ), - 2220..2221, - ), - ( - Escape( - "n", + "\\n", ), 2221..2223, ), @@ -2852,13 +2435,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 2263..2267, + 2263..2268, ), ( - Escape( - "$", + Text( + "\\$", ), 2268..2270, ), @@ -3067,9 +2650,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "eval", + "eval ", ), - 2438..2442, + 2438..2443, ), ( Expression( @@ -3208,15 +2791,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "exit", + "exit \"", ), - 2545..2549, - ), - ( - Text( - "\"", - ), - 2550..2551, + 2545..2551, ), ( Expression( @@ -3322,15 +2899,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "id", - ), - 2672..2674, - ), - ( - CommandOption( - "-u", + "id -u", ), - 2675..2677, + 2672..2677, ), ( Text( @@ -3553,21 +3124,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "=", - ), - 2864..2865, - ), - ( - Text( - "(", + "=(\"", ), - 2865..2866, - ), - ( - Text( - "\"", - ), - 2866..2867, + 2864..2867, ), ( Expression( @@ -3585,25 +3144,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" \"", ), - 2875..2876, + 2875..2878, ), ( Text( - "\"", - ), - 2877..2878, - ), - ( - Escape( - "$", + "\\$", ), 2878..2880, ), ( - Escape( - "{", + Text( + "\\{", ), 2880..2882, ), @@ -3632,39 +3185,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", + "[@]}\")", ), - 2895..2896, - ), - ( - Text( - "@", - ), - 2896..2897, - ), - ( - Text( - "]", - ), - 2897..2898, - ), - ( - Text( - "}", - ), - 2898..2899, - ), - ( - Text( - "\"", - ), - 2899..2900, - ), - ( - Text( - ")", - ), - 2900..2901, + 2895..2901, ), ( Text( @@ -3699,25 +3222,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 2915..2921, + 2915..2923, ), ( Text( - "\"", - ), - 2922..2923, - ), - ( - Escape( - "$", + "\\$", ), 2923..2925, ), ( - Escape( - "{", + Text( + "\\{", ), 2925..2927, ), @@ -3746,33 +3263,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 2940..2941, - ), - ( - Text( - "@", + "[@]}\"", ), - 2941..2942, - ), - ( - Text( - "]", - ), - 2942..2943, - ), - ( - Text( - "}", - ), - 2943..2944, - ), - ( - Text( - "\"", - ), - 2944..2945, + 2940..2945, ), ( Text( @@ -3873,13 +3366,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 3056..3060, + 3056..3061, ), ( - Escape( - "$", + Text( + "\\$", ), 3061..3063, ), @@ -3908,43 +3401,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "|", + " | sed -e 's/", ), - 3077..3078, + 3076..3089, ), ( Text( - "sed", - ), - 3079..3082, - ), - ( - CommandOption( - "-e", - ), - 3083..3085, - ), - ( - Text( - "'s", - ), - 3086..3088, - ), - ( - Text( - "/", - ), - 3088..3089, - ), - ( - Escape( - "\\", + "\\\\", ), 3089..3091, ), ( - Escape( - "\\", + Text( + "\\\\", ), 3091..3093, ), @@ -3954,107 +3423,35 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3093..3094, ), - ( - Escape( - "\\", - ), - 3094..3096, - ), - ( - Escape( - "\\", - ), - 3096..3098, - ), - ( - Escape( - "\\", - ), - 3098..3100, - ), - ( - Escape( - "\\", - ), - 3100..3102, - ), - ( - Text( - "/", - ), - 3102..3103, - ), ( Text( - "g'", - ), - 3103..3105, - ), - ( - CommandOption( - "-e", + "\\\\", ), - 3106..3108, - ), - ( - Text( - "\"", - ), - 3109..3110, - ), - ( - Text( - "s", - ), - 3110..3111, - ), - ( - Text( - "/", - ), - 3111..3112, - ), - ( - Text( - "%", - ), - 3112..3113, - ), - ( - Text( - "/", - ), - 3113..3114, - ), - ( - Text( - "%", - ), - 3114..3115, + 3094..3096, ), ( Text( - "%", + "\\\\", ), - 3115..3116, + 3096..3098, ), ( Text( - "/", + "\\\\", ), - 3116..3117, + 3098..3100, ), ( Text( - "g", + "\\\\", ), - 3117..3118, + 3100..3102, ), ( Text( - "\"", + "/g' -e \"s/%/%%/g\"", ), - 3118..3119, + 3102..3119, ), ( Text( @@ -4194,22 +3591,22 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "x1b", - 3264..3268, + "\\x", + 3264..3266, ), ), - 3264..3268, + 3264..3266, ), ( Text( ( - "[", - 3268..3269, + "1b[", + 3266..3269, ), ), - 3268..3269, + 3266..3269, ), ( Expression( @@ -4307,32 +3704,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3287..3311, ), - ( - Escape( - ( - "x1b", - 3311..3315, - ), - ), - 3311..3315, - ), ( Text( ( - "[", - 3315..3316, + "\\x", + 3311..3313, ), ), - 3315..3316, + 3311..3313, ), ( Text( ( - "0m", - 3316..3318, + "1b[0m", + 3313..3318, ), ), - 3316..3318, + 3313..3318, ), ], ), @@ -4413,32 +3801,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3402..3406, - ), - ), - 3402..3406, - ), ( Text( ( - "[", - 3406..3407, + "\\x", + 3402..3404, ), ), - 3406..3407, + 3402..3404, ), ( Text( ( - "1m", - 3407..3409, + "1b[1m", + 3404..3409, ), ), - 3407..3409, + 3404..3409, ), ( Expression( @@ -4467,32 +3846,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3409..3433, ), - ( - Escape( - ( - "x1b", - 3433..3437, - ), - ), - 3433..3437, - ), ( Text( ( - "[", - 3437..3438, + "\\x", + 3433..3435, ), ), - 3437..3438, + 3433..3435, ), ( Text( ( - "0m", - 3438..3440, + "1b[0m", + 3435..3440, ), ), - 3438..3440, + 3435..3440, ), ], ), @@ -4573,32 +3943,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3528..3532, - ), - ), - 3528..3532, - ), ( Text( ( - "[", - 3532..3533, + "\\x", + 3528..3530, ), ), - 3532..3533, + 3528..3530, ), ( Text( ( - "3m", - 3533..3535, + "1b[3m", + 3530..3535, ), ), - 3533..3535, + 3530..3535, ), ( Expression( @@ -4627,32 +3988,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3535..3559, ), - ( - Escape( - ( - "x1b", - 3559..3563, - ), - ), - 3559..3563, - ), ( Text( ( - "[", - 3563..3564, + "\\x", + 3559..3561, ), ), - 3563..3564, + 3559..3561, ), ( Text( ( - "0m", - 3564..3566, + "1b[0m", + 3561..3566, ), ), - 3564..3566, + 3561..3566, ), ], ), @@ -4733,32 +4085,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3662..3666, - ), - ), - 3662..3666, - ), ( Text( ( - "[", - 3666..3667, + "\\x", + 3662..3664, ), ), - 3666..3667, + 3662..3664, ), ( Text( ( - "4m", - 3667..3669, + "1b[4m", + 3664..3669, ), ), - 3667..3669, + 3664..3669, ), ( Expression( @@ -4787,32 +4130,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3669..3693, ), - ( - Escape( - ( - "x1b", - 3693..3697, - ), - ), - 3693..3697, - ), ( Text( ( - "[", - 3697..3698, + "\\x", + 3693..3695, ), ), - 3697..3698, + 3693..3695, ), ( Text( ( - "0m", - 3698..3700, + "1b[0m", + 3695..3700, ), ), - 3698..3700, + 3695..3700, ), ], ), @@ -4914,22 +4248,22 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "x1b", - 3811..3815, + "\\x", + 3811..3813, ), ), - 3811..3815, + 3811..3813, ), ( Text( ( - "[", - 3815..3816, + "1b[", + 3813..3816, ), ), - 3815..3816, + 3813..3816, ), ( Expression( @@ -4961,61 +4295,34 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "m", - 3831..3832, - ), - ), - 3831..3832, - ), - ( - Text( - ( - "%", - 3832..3833, + "m%s", + 3831..3834, ), ), - 3832..3833, + 3831..3834, ), ( Text( ( - "s", - 3833..3834, + "\\x", + 3834..3836, ), ), - 3833..3834, - ), - ( - Escape( - ( - "x1b", - 3834..3838, - ), - ), - 3834..3838, + 3834..3836, ), ( Text( ( - "[", - 3838..3839, + "1b[0m", + 3836..3841, ), ), - 3838..3839, + 3836..3841, ), ( Text( ( - "0m", - 3839..3841, - ), - ), - 3839..3841, - ), - ( - Escape( - ( - "n", + "\\n", 3841..3843, ), ), @@ -5123,136 +4430,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3938..3942, - ), - ), - 3938..3942, - ), - ( - Text( - ( - "[", - 3942..3943, - ), - ), - 3942..3943, - ), - ( - Text( - ( - "1", - 3943..3944, - ), - ), - 3943..3944, - ), - ( - Text( - ( - ";", - 3944..3945, - ), - ), - 3944..3945, - ), - ( - Text( - ( - "3", - 3945..3946, - ), - ), - 3945..3946, - ), ( Text( ( - ";", - 3946..3947, + "\\x", + 3938..3940, ), ), - 3946..3947, + 3938..3940, ), ( Text( ( - "97", - 3947..3949, + "1b[1;3;97;44m %s ", + 3940..3957, ), ), - 3947..3949, + 3940..3957, ), ( Text( ( - ";", - 3949..3950, + "\\x", + 3957..3959, ), ), - 3949..3950, + 3957..3959, ), ( Text( ( - "44m", - 3950..3953, + "1b[0m", + 3959..3964, ), ), - 3950..3953, + 3959..3964, ), ( Text( ( - "%", - 3954..3955, - ), - ), - 3954..3955, - ), - ( - Text( - ( - "s", - 3955..3956, - ), - ), - 3955..3956, - ), - ( - Escape( - ( - "x1b", - 3957..3961, - ), - ), - 3957..3961, - ), - ( - Text( - ( - "[", - 3961..3962, - ), - ), - 3961..3962, - ), - ( - Text( - ( - "0m", - 3962..3964, - ), - ), - 3962..3964, - ), - ( - Escape( - ( - "n", + "\\n", 3964..3966, ), ), @@ -5360,136 +4577,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4067..4071, - ), - ), - 4067..4071, - ), - ( - Text( - ( - "[", - 4071..4072, - ), - ), - 4071..4072, - ), - ( - Text( - ( - "1", - 4072..4073, - ), - ), - 4072..4073, - ), - ( - Text( - ( - ";", - 4073..4074, - ), - ), - 4073..4074, - ), - ( - Text( - ( - "3", - 4074..4075, - ), - ), - 4074..4075, - ), - ( - Text( - ( - ";", - 4075..4076, - ), - ), - 4075..4076, - ), ( Text( ( - "97", - 4076..4078, + "\\x", + 4067..4069, ), ), - 4076..4078, + 4067..4069, ), ( Text( ( - ";", - 4078..4079, + "1b[1;3;97;42m %s ", + 4069..4086, ), ), - 4078..4079, + 4069..4086, ), ( Text( ( - "42m", - 4079..4082, + "\\x", + 4086..4088, ), ), - 4079..4082, + 4086..4088, ), ( Text( ( - "%", - 4083..4084, + "1b[0m", + 4088..4093, ), ), - 4083..4084, + 4088..4093, ), ( Text( ( - "s", - 4084..4085, - ), - ), - 4084..4085, - ), - ( - Escape( - ( - "x1b", - 4086..4090, - ), - ), - 4086..4090, - ), - ( - Text( - ( - "[", - 4090..4091, - ), - ), - 4090..4091, - ), - ( - Text( - ( - "0m", - 4091..4093, - ), - ), - 4091..4093, - ), - ( - Escape( - ( - "n", + "\\n", 4093..4095, ), ), @@ -5597,136 +4724,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4196..4200, - ), - ), - 4196..4200, - ), - ( - Text( - ( - "[", - 4200..4201, - ), - ), - 4200..4201, - ), - ( - Text( - ( - "1", - 4201..4202, - ), - ), - 4201..4202, - ), - ( - Text( - ( - ";", - 4202..4203, - ), - ), - 4202..4203, - ), - ( - Text( - ( - "3", - 4203..4204, - ), - ), - 4203..4204, - ), - ( - Text( - ( - ";", - 4204..4205, - ), - ), - 4204..4205, - ), ( Text( ( - "97", - 4205..4207, + "\\x", + 4196..4198, ), ), - 4205..4207, + 4196..4198, ), ( Text( ( - ";", - 4207..4208, + "1b[1;3;97;43m %s ", + 4198..4215, ), ), - 4207..4208, + 4198..4215, ), ( Text( ( - "43m", - 4208..4211, + "\\x", + 4215..4217, ), ), - 4208..4211, + 4215..4217, ), ( Text( ( - "%", - 4212..4213, + "1b[0m", + 4217..4222, ), ), - 4212..4213, + 4217..4222, ), ( Text( ( - "s", - 4213..4214, - ), - ), - 4213..4214, - ), - ( - Escape( - ( - "x1b", - 4215..4219, - ), - ), - 4215..4219, - ), - ( - Text( - ( - "[", - 4219..4220, - ), - ), - 4219..4220, - ), - ( - Text( - ( - "0m", - 4220..4222, - ), - ), - 4220..4222, - ), - ( - Escape( - ( - "n", + "\\n", 4222..4224, ), ), @@ -5862,136 +4899,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4382..4386, - ), - ), - 4382..4386, - ), - ( - Text( - ( - "[", - 4386..4387, - ), - ), - 4386..4387, - ), - ( - Text( - ( - "1", - 4387..4388, - ), - ), - 4387..4388, - ), - ( - Text( - ( - ";", - 4388..4389, - ), - ), - 4388..4389, - ), - ( - Text( - ( - "3", - 4389..4390, - ), - ), - 4389..4390, - ), ( Text( ( - ";", - 4390..4391, + "\\x", + 4382..4384, ), ), - 4390..4391, + 4382..4384, ), ( Text( ( - "97", - 4391..4393, + "1b[1;3;97;41m %s ", + 4384..4401, ), ), - 4391..4393, + 4384..4401, ), ( Text( ( - ";", - 4393..4394, + "\\x", + 4401..4403, ), ), - 4393..4394, + 4401..4403, ), ( Text( ( - "41m", - 4394..4397, + "1b[0m", + 4403..4408, ), ), - 4394..4397, + 4403..4408, ), ( Text( ( - "%", - 4398..4399, - ), - ), - 4398..4399, - ), - ( - Text( - ( - "s", - 4399..4400, - ), - ), - 4399..4400, - ), - ( - Escape( - ( - "x1b", - 4401..4405, - ), - ), - 4401..4405, - ), - ( - Text( - ( - "[", - 4405..4406, - ), - ), - 4405..4406, - ), - ( - Text( - ( - "0m", - 4406..4408, - ), - ), - 4406..4408, - ), - ( - Escape( - ( - "n", + "\\n", 4408..4410, ), ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_fs.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_fs.snap index 207a830..aa2533f 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_fs.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_fs.snap @@ -65,21 +65,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", + "[ -d \"", ), - 61..62, - ), - ( - CommandOption( - "-d", - ), - 63..65, - ), - ( - Text( - "\"", - ), - 66..67, + 61..67, ), ( Expression( @@ -97,15 +85,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ]", ), - 73..74, - ), - ( - Text( - "]", - ), - 75..76, + 73..76, ), ( Text( @@ -239,21 +221,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", + "[ -f \"", ), - 190..191, - ), - ( - CommandOption( - "-f", - ), - 192..194, - ), - ( - Text( - "\"", - ), - 195..196, + 190..196, ), ( Expression( @@ -271,15 +241,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ]", ), - 202..203, - ), - ( - Text( - "]", - ), - 204..205, + 202..205, ), ( Text( @@ -418,15 +382,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "<", + "< \"", ), - 325..326, - ), - ( - Text( - "\"", - ), - 327..328, + 325..328, ), ( Expression( @@ -568,15 +526,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 459..463, - ), - ( - Text( - "\"", + "echo \"", ), - 464..465, + 459..465, ), ( Expression( @@ -594,21 +546,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 474..475, - ), - ( - Text( - ">", + "\" > \"", ), - 476..477, - ), - ( - Text( - "\"", - ), - 478..479, + 474..479, ), ( Expression( @@ -750,15 +690,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 612..616, - ), - ( - Text( - "\"", + "echo \"", ), - 617..618, + 612..618, ), ( Expression( @@ -776,27 +710,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 627..628, - ), - ( - Text( - ">", - ), - 629..630, - ), - ( - Text( - ">", + "\" >> \"", ), - 630..631, - ), - ( - Text( - "\"", - ), - 632..633, + 627..633, ), ( Expression( @@ -985,21 +901,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "ln", - ), - 854..856, - ), - ( - CommandOption( - "-s", - ), - 857..859, - ), - ( - Text( - "\"", + "ln -s \"", ), - 860..861, + 854..861, ), ( Expression( @@ -1017,15 +921,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 869..870, - ), - ( - Text( - "\"", + "\" \"", ), - 871..872, + 869..872, ), ( Expression( @@ -1105,20 +1003,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "The", - 925..928, - ), - ), - 925..928, - ), - ( - Text( - ( - "file", - 929..933, + "The file ", + 925..934, ), ), - 929..933, + 925..934, ), ( Expression( @@ -1137,29 +1026,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 943..950, + " doesn't exist!", + 942..957, ), ), - 943..950, - ), - ( - Text( - ( - "exist", - 951..956, - ), - ), - 951..956, - ), - ( - Text( - ( - "!", - 956..957, - ), - ), - 956..957, + 942..957, ), ], ), @@ -1311,21 +1182,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "mkdir", - ), - 1132..1137, - ), - ( - CommandOption( - "-p", - ), - 1138..1140, - ), - ( - Text( - "\"", + "mkdir -p \"", ), - 1141..1142, + 1132..1142, ), ( Expression( @@ -1503,27 +1362,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "chmod", + "chmod +x \"", ), - 1344..1349, - ), - ( - Text( - "+", - ), - 1350..1351, - ), - ( - Text( - "x", - ), - 1351..1352, - ), - ( - Text( - "\"", - ), - 1353..1354, + 1344..1354, ), ( Expression( @@ -1603,20 +1444,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "The", - 1400..1403, - ), - ), - 1400..1403, - ), - ( - Text( - ( - "file", - 1404..1408, + "The file ", + 1400..1409, ), ), - 1404..1408, + 1400..1409, ), ( Expression( @@ -1635,29 +1467,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 1416..1423, - ), - ), - 1416..1423, - ), - ( - Text( - ( - "exist", - 1424..1429, - ), - ), - 1424..1429, - ), - ( - Text( - ( - "!", - 1429..1430, + " doesn't exist!", + 1415..1430, ), ), - 1429..1430, + 1415..1430, ), ], ), @@ -1864,21 +1678,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "chown", - ), - 1642..1647, - ), - ( - CommandOption( - "-R", - ), - 1648..1650, - ), - ( - Text( - "\"", + "chown -R \"", ), - 1651..1652, + 1642..1652, ), ( Expression( @@ -1896,15 +1698,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1658..1659, - ), - ( - Text( - "\"", + "\" \"", ), - 1660..1661, + 1658..1661, ), ( Expression( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_http.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_http.snap index d57f411..895a9e7 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_http.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_http.snap @@ -229,27 +229,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "curl", + "curl -L -o \"", ), - 569..573, - ), - ( - CommandOption( - "-L", - ), - 574..576, - ), - ( - CommandOption( - "-o", - ), - 577..579, - ), - ( - Text( - "\"", - ), - 580..581, + 569..581, ), ( Expression( @@ -267,15 +249,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" \"", ), - 587..588, - ), - ( - Text( - "\"", - ), - 589..590, + 587..590, ), ( Expression( @@ -377,15 +353,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "wget", + "wget \"", ), - 657..661, - ), - ( - Text( - "\"", - ), - 662..663, + 657..663, ), ( Expression( @@ -403,21 +373,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" -P \"", ), - 668..669, - ), - ( - CommandOption( - "-P", - ), - 670..672, - ), - ( - Text( - "\"", - ), - 673..674, + 668..674, ), ( Expression( @@ -519,15 +477,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "aria2c", - ), - 744..750, - ), - ( - Text( - "\"", + "aria2c \"", ), - 751..752, + 744..752, ), ( Expression( @@ -545,21 +497,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 757..758, - ), - ( - CommandOption( - "-d", - ), - 759..761, - ), - ( - Text( - "\"", + "\" -d \"", ), - 762..763, + 757..763, ), ( Expression( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_math.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_math.snap index 9989797..ec2eb00 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_math.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_math.snap @@ -91,15 +91,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 98..102, - ), - ( - Text( - "\"", - ), - 103..104, + 98..104, ), ( Expression( @@ -117,195 +111,33 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 110..111, - ), - ( - Text( - "|", - ), - 112..113, - ), - ( - Text( - "awk", + "\" | awk '", ), - 114..117, + 110..119, ), ( Text( - "'", - ), - 118..119, - ), - ( - Escape( - "{", + "\\{", ), 119..121, ), ( Text( - "s", - ), - 121..122, - ), - ( - Text( - "=", - ), - 122..123, - ), - ( - Text( - "0", - ), - 123..124, - ), - ( - Text( - ";", - ), - 124..125, - ), - ( - Text( - "for", - ), - 126..129, - ), - ( - Text( - "(", - ), - 130..131, - ), - ( - Text( - "i", - ), - 131..132, - ), - ( - Text( - "=", - ), - 132..133, - ), - ( - Text( - "1", - ), - 133..134, - ), - ( - Text( - ";", - ), - 134..135, - ), - ( - Text( - "i", - ), - 136..137, - ), - ( - Text( - "<=", - ), - 137..139, - ), - ( - Text( - "NF", - ), - 139..141, - ), - ( - Text( - ";", - ), - 141..142, - ), - ( - Text( - "i", - ), - 143..144, - ), - ( - Text( - "+", - ), - 144..145, - ), - ( - Text( - "+", - ), - 145..146, - ), - ( - Text( - ")", + "s=0; for (i=1; i<=NF; i++) s+=", ), - 146..147, + 121..151, ), ( Text( - "s", - ), - 148..149, - ), - ( - Text( - "+=", - ), - 149..151, - ), - ( - Escape( - "$", + "\\$", ), 151..153, ), ( Text( - "i", - ), - 153..154, - ), - ( - Text( - ";", - ), - 154..155, - ), - ( - Text( - "print", - ), - 156..161, - ), - ( - Text( - "s", - ), - 162..163, - ), - ( - Text( - "}", - ), - 163..164, - ), - ( - Text( - "'", + "i; print s}'", ), - 164..165, + 153..165, ), ( Text( @@ -460,21 +292,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 329..333, - ), - ( - Text( - "\"", - ), - 334..335, - ), - ( - Text( - "(", + "echo \"(", ), - 335..336, + 329..336, ), ( Expression( @@ -492,63 +312,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "+", - ), - 344..345, - ), - ( - Text( - "0", - ), - 345..346, - ), - ( - Text( - ".", - ), - 346..347, - ), - ( - Text( - "5", - ), - 347..348, - ), - ( - Text( - ")", - ), - 348..349, - ), - ( - Text( - "/", - ), - 349..350, - ), - ( - Text( - "1", - ), - 350..351, - ), - ( - Text( - "\"", - ), - 351..352, - ), - ( - Text( - "|", - ), - 353..354, - ), - ( - Text( - "bc", + "+0.5)/1\" | bc", ), - 355..357, + 344..357, ), ( Text( @@ -614,21 +380,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 392..396, - ), - ( - Text( - "\"", - ), - 397..398, - ), - ( - Text( - "(", + "echo \"(", ), - 398..399, + 392..399, ), ( Expression( @@ -644,59 +398,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 399..407, ), - ( - CommandOption( - "-0", - ), - 407..409, - ), - ( - Text( - ".", - ), - 409..410, - ), - ( - Text( - "5", - ), - 410..411, - ), - ( - Text( - ")", - ), - 411..412, - ), - ( - Text( - "/", - ), - 412..413, - ), - ( - Text( - "1", - ), - 413..414, - ), - ( - Text( - "\"", - ), - 414..415, - ), - ( - Text( - "|", - ), - 416..417, - ), ( Text( - "bc", + "-0.5)/1\" | bc", ), - 418..420, + 407..420, ), ( Text( @@ -816,15 +522,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 571..575, - ), - ( - Text( - "\"", + "echo \"", ), - 576..577, + 571..577, ), ( Expression( @@ -842,195 +542,57 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" | awk '", ), - 585..586, + 585..594, ), ( Text( - "|", - ), - 587..588, - ), - ( - Text( - "awk", - ), - 589..592, - ), - ( - Text( - "'", - ), - 593..594, - ), - ( - Escape( - "{", + "\\{", ), 594..596, ), ( Text( - "printf", - ), - 596..602, - ), - ( - Text( - "\"", - ), - 603..604, - ), - ( - Text( - "%", - ), - 604..605, - ), - ( - Text( - "d", - ), - 605..606, - ), - ( - Text( - "\"", + "printf \"%d\", (", ), - 606..607, + 596..610, ), ( Text( - ",", - ), - 607..608, - ), - ( - Text( - "(", - ), - 609..610, - ), - ( - Escape( - "$", + "\\$", ), 610..612, ), ( Text( - "1", - ), - 612..613, - ), - ( - Text( - "<", - ), - 614..615, - ), - ( - Text( - "0", - ), - 616..617, - ), - ( - Text( - "?", + "1 < 0 ? int(", ), - 618..619, + 612..624, ), ( Text( - "int", - ), - 620..623, - ), - ( - Text( - "(", - ), - 623..624, - ), - ( - Escape( - "$", + "\\$", ), 624..626, ), ( Text( - "1", - ), - 626..627, - ), - ( - Text( - ")", - ), - 627..628, - ), - ( - CommandOption( - "-1", - ), - 629..632, - ), - ( - Text( - ":", + "1) - 1 : int(", ), - 633..634, + 626..639, ), ( Text( - "int", - ), - 635..638, - ), - ( - Text( - "(", - ), - 638..639, - ), - ( - Escape( - "$", + "\\$", ), 639..641, ), ( Text( - "1", - ), - 641..642, - ), - ( - Text( - ")", - ), - 642..643, - ), - ( - Text( - ")", - ), - 643..644, - ), - ( - Text( - "}", - ), - 644..645, - ), - ( - Text( - "'", + "1))}'", ), - 645..646, + 641..646, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_text.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_text.snap index 59ee232..a443151 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_text.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__stdlib_text.snap @@ -88,18 +88,18 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "$", + "\\$", 171..173, ), ), 171..173, ), ( - Escape( + Text( ( - "{", + "\\{", 173..175, ), ), @@ -108,20 +108,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "source", - 175..181, - ), - ), - 175..181, - ), - ( - Text( - ( - "/", - 181..182, + "source/", + 175..182, ), ), - 181..182, + 175..182, ), ( Expression( @@ -266,18 +257,18 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "$", + "\\$", 358..360, ), ), 358..360, ), ( - Escape( + Text( ( - "{", + "\\{", 360..362, ), ), @@ -286,20 +277,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "source", - 362..368, - ), - ), - 362..368, - ), - ( - Text( - ( - "//", - 368..370, + "source//", + 362..370, ), ), - 368..370, + 362..370, ), ( Expression( @@ -506,15 +488,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 617..621, - ), - ( - Text( - "\"", + "echo \"", ), - 622..623, + 617..623, ), ( Expression( @@ -532,45 +508,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 631..632, - ), - ( - Text( - "|", - ), - 633..634, - ), - ( - Text( - "sed", - ), - 635..638, - ), - ( - CommandOption( - "-e", - ), - 639..641, - ), - ( - Text( - "\"", - ), - 642..643, - ), - ( - Text( - "s", - ), - 643..644, - ), - ( - Text( - "/", + "\" | sed -e \"s/", ), - 644..645, + 631..645, ), ( Expression( @@ -608,21 +548,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "/", - ), - 668..669, - ), - ( - Text( - "g", - ), - 669..670, - ), - ( - Text( - "\"", + "/g\"", ), - 670..671, + 668..671, ), ( Text( @@ -758,21 +686,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "IFS", - ), - 869..872, - ), - ( - Text( - "=", - ), - 872..873, - ), - ( - Text( - "\"", + "IFS=\"", ), - 873..874, + 869..874, ), ( Expression( @@ -790,33 +706,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 885..886, - ), - ( - Text( - "read", - ), - 887..891, - ), - ( - CommandOption( - "-rd", - ), - 892..895, - ), - ( - Text( - "''", - ), - 896..898, - ), - ( - CommandOption( - "-a", + "\" read -rd '' -a ", ), - 899..901, + 885..902, ), ( Expression( @@ -843,49 +735,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "<", - ), - 918..919, - ), - ( - Text( - "<", - ), - 920..921, - ), - ( - Text( - "(", - ), - 921..922, - ), - ( - Text( - "printf", - ), - 922..928, - ), - ( - Text( - "%", + " < <(printf %s \"", ), - 929..930, + 917..933, ), ( Text( - "s", - ), - 930..931, - ), - ( - Text( - "\"", - ), - 932..933, - ), - ( - Escape( - "$", + "\\$", ), 933..935, ), @@ -914,15 +770,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 948..949, - ), - ( - Text( - ")", + "\")", ), - 949..950, + 948..950, ), ( Text( @@ -1047,9 +897,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "n", + "\\n", 1110..1112, ), ), @@ -1155,7 +1005,17 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - [], + [ + ( + Text( + ( + " ", + 1250..1251, + ), + ), + 1250..1251, + ), + ], ), 1249..1252, ), @@ -1270,21 +1130,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "IFS", - ), - 1375..1378, - ), - ( - Text( - "=", - ), - 1378..1379, - ), - ( - Text( - "\"", + "IFS=\"", ), - 1379..1380, + 1375..1380, ), ( Expression( @@ -1302,37 +1150,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ; echo \"", ), - 1391..1392, + 1391..1401, ), ( Text( - ";", - ), - 1393..1394, - ), - ( - Text( - "echo", - ), - 1395..1399, - ), - ( - Text( - "\"", - ), - 1400..1401, - ), - ( - Escape( - "$", + "\\$", ), 1401..1403, ), ( - Escape( - "{", + Text( + "\\{", ), 1403..1405, ), @@ -1361,33 +1191,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 1418..1419, - ), - ( - Text( - "*", - ), - 1419..1420, - ), - ( - Text( - "]", - ), - 1420..1421, - ), - ( - Text( - "}", - ), - 1421..1422, - ), - ( - Text( - "\"", + "[*]}\"", ), - 1422..1423, + 1418..1423, ), ( Text( @@ -1489,15 +1295,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 1536..1540, - ), - ( - Text( - "\"", + "echo \"", ), - 1541..1542, + 1536..1542, ), ( Expression( @@ -1515,105 +1315,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1548..1549, - ), - ( - Text( - "|", - ), - 1550..1551, - ), - ( - Text( - "sed", - ), - 1552..1555, - ), - ( - CommandOption( - "-e", - ), - 1556..1558, - ), - ( - Text( - "'s", - ), - 1559..1561, - ), - ( - Text( - "/", - ), - 1561..1562, - ), - ( - Text( - "^", - ), - 1562..1563, - ), - ( - Text( - "[", - ), - 1563..1564, - ), - ( - Text( - "[", - ), - 1564..1565, - ), - ( - Text( - ":", - ), - 1565..1566, - ), - ( - Text( - "space", - ), - 1566..1571, - ), - ( - Text( - ":", - ), - 1571..1572, - ), - ( - Text( - "]", - ), - 1572..1573, - ), - ( - Text( - "]", - ), - 1573..1574, - ), - ( - Text( - "*", - ), - 1574..1575, - ), - ( - Text( - "//", - ), - 1575..1577, - ), - ( - Text( - "'", + "\" | sed -e 's/^[[:space:]]*//'", ), - 1577..1578, + 1548..1578, ), ( Text( @@ -1715,15 +1419,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 1692..1696, - ), - ( - Text( - "\"", + "echo \"", ), - 1697..1698, + 1692..1698, ), ( Expression( @@ -1741,111 +1439,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1704..1705, - ), - ( - Text( - "|", - ), - 1706..1707, - ), - ( - Text( - "sed", - ), - 1708..1711, - ), - ( - CommandOption( - "-e", - ), - 1712..1714, - ), - ( - Text( - "'s", - ), - 1715..1717, - ), - ( - Text( - "/", - ), - 1717..1718, - ), - ( - Text( - "[", + "\" | sed -e 's/[[:space:]]*", ), - 1718..1719, + 1704..1730, ), ( Text( - "[", + "\\$", ), - 1719..1720, + 1730..1732, ), ( Text( - ":", + "//'", ), - 1720..1721, + 1732..1735, ), ( Text( - "space", + "$", ), - 1721..1726, - ), - ( - Text( - ":", - ), - 1726..1727, - ), - ( - Text( - "]", - ), - 1727..1728, - ), - ( - Text( - "]", - ), - 1728..1729, - ), - ( - Text( - "*", - ), - 1729..1730, - ), - ( - Escape( - "$", - ), - 1730..1732, - ), - ( - Text( - "//", - ), - 1732..1734, - ), - ( - Text( - "'", - ), - 1734..1735, - ), - ( - Text( - "$", - ), - 1735..1736, + 1735..1736, ), ], None, @@ -2048,15 +1662,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 1948..1952, - ), - ( - Text( - "\"", + "echo \"", ), - 1953..1954, + 1948..1954, ), ( Expression( @@ -2074,105 +1682,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1960..1961, - ), - ( - Text( - "|", - ), - 1962..1963, - ), - ( - Text( - "tr", - ), - 1964..1966, - ), - ( - Text( - "'", - ), - 1967..1968, - ), - ( - Text( - "[", - ), - 1968..1969, - ), - ( - Text( - ":", - ), - 1969..1970, - ), - ( - Text( - "upper", + "\" | tr '[:upper:]' '[:lower:]'", ), - 1970..1975, - ), - ( - Text( - ":", - ), - 1975..1976, - ), - ( - Text( - "]", - ), - 1976..1977, - ), - ( - Text( - "'", - ), - 1977..1978, - ), - ( - Text( - "'", - ), - 1979..1980, - ), - ( - Text( - "[", - ), - 1980..1981, - ), - ( - Text( - ":", - ), - 1981..1982, - ), - ( - Text( - "lower", - ), - 1982..1987, - ), - ( - Text( - ":", - ), - 1987..1988, - ), - ( - Text( - "]", - ), - 1988..1989, - ), - ( - Text( - "'", - ), - 1989..1990, + 1960..1990, ), ( Text( @@ -2274,15 +1786,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 2088..2092, - ), - ( - Text( - "\"", - ), - 2093..2094, + 2088..2094, ), ( Expression( @@ -2300,105 +1806,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2100..2101, - ), - ( - Text( - "|", - ), - 2102..2103, - ), - ( - Text( - "tr", - ), - 2104..2106, - ), - ( - Text( - "'", - ), - 2107..2108, - ), - ( - Text( - "[", - ), - 2108..2109, - ), - ( - Text( - ":", - ), - 2109..2110, - ), - ( - Text( - "lower", - ), - 2110..2115, - ), - ( - Text( - ":", - ), - 2115..2116, - ), - ( - Text( - "]", - ), - 2116..2117, - ), - ( - Text( - "'", - ), - 2117..2118, - ), - ( - Text( - "'", - ), - 2119..2120, - ), - ( - Text( - "[", - ), - 2120..2121, - ), - ( - Text( - ":", + "\" | tr '[:lower:]' '[:upper:]'", ), - 2121..2122, - ), - ( - Text( - "upper", - ), - 2122..2127, - ), - ( - Text( - ":", - ), - 2127..2128, - ), - ( - Text( - "]", - ), - 2128..2129, - ), - ( - Text( - "'", - ), - 2129..2130, + 2100..2130, ), ( Text( @@ -2495,21 +1905,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 2299..2300, - ), - ( - CommandOption( - "-n", + "[ -n \"", ), - 2301..2303, - ), - ( - Text( - "\"", - ), - 2304..2305, + 2299..2305, ), ( Expression( @@ -2527,33 +1925,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2311..2312, - ), - ( - Text( - "]", - ), - 2313..2314, - ), - ( - Text( - "&&", - ), - 2315..2317, - ), - ( - Text( - "[", - ), - 2318..2319, - ), - ( - Text( - "\"", + "\" ] && [ \"", ), - 2320..2321, + 2311..2321, ), ( Expression( @@ -2571,21 +1945,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2327..2328, - ), - ( - CommandOption( - "-eq", - ), - 2329..2332, - ), - ( - Text( - "\"", + "\" -eq \"", ), - 2333..2334, + 2327..2334, ), ( Expression( @@ -2603,51 +1965,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2340..2341, - ), - ( - Text( - "]", - ), - 2342..2343, - ), - ( - Text( - "2", - ), - 2344..2345, - ), - ( - Text( - ">", + "\" ] 2>/dev/null", ), - 2345..2346, - ), - ( - Text( - "/", - ), - 2346..2347, - ), - ( - Text( - "dev", - ), - 2347..2350, - ), - ( - Text( - "/", - ), - 2350..2351, - ), - ( - Text( - "null", - ), - 2351..2355, + 2340..2355, ), ( Text( @@ -2803,67 +2123,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "for", - ), - 2513..2516, - ), - ( - Text( - "(", - ), - 2517..2518, - ), - ( - Text( - "(", - ), - 2518..2519, - ), - ( - Text( - "i", - ), - 2519..2520, - ), - ( - Text( - "=", - ), - 2520..2521, - ), - ( - Text( - "0", - ), - 2521..2522, - ), - ( - Text( - ";", - ), - 2522..2523, - ), - ( - Text( - "i", + "for ((i=0; i<", ), - 2524..2525, + 2513..2526, ), ( Text( - "<", - ), - 2525..2526, - ), - ( - Escape( - "$", + "\\$", ), 2526..2528, ), ( - Escape( - "{", + Text( + "\\{", ), 2528..2530, ), @@ -2898,57 +2170,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", - ), - 2544..2545, - ), - ( - Text( - ";", - ), - 2545..2546, - ), - ( - Text( - "i", - ), - 2547..2548, - ), - ( - Text( - "+", - ), - 2548..2549, - ), - ( - Text( - "+", + "}; i++)); do\n ", ), - 2549..2550, - ), - ( - Text( - ")", - ), - 2550..2551, - ), - ( - Text( - ")", - ), - 2551..2552, - ), - ( - Text( - ";", - ), - 2552..2553, - ), - ( - Text( - "do", - ), - 2554..2556, + 2544..2565, ), ( Expression( @@ -2975,31 +2199,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "+=", + "+=( \"", ), - 2579..2581, + 2579..2584, ), ( Text( - "(", - ), - 2581..2582, - ), - ( - Text( - "\"", - ), - 2583..2584, - ), - ( - Escape( - "$", + "\\$", ), 2584..2586, ), ( - Escape( - "{", + Text( + "\\{", ), 2586..2588, ), @@ -3032,65 +2244,17 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 2601..2602, ), - ( - Escape( - "$", - ), - 2602..2604, - ), ( Text( - "i", + "\\$", ), - 2604..2605, - ), - ( - Text( - ":", - ), - 2605..2606, - ), - ( - Text( - "1", - ), - 2606..2607, - ), - ( - Text( - "}", - ), - 2607..2608, - ), - ( - Text( - "\"", - ), - 2608..2609, - ), - ( - Text( - ")", - ), - 2610..2611, - ), - ( - Text( - ";", - ), - 2611..2612, - ), - ( - Text( - "done", - ), - 2617..2621, + 2602..2604, ), ( Text( - ";", + "i:1}\" );\n done;", ), - 2621..2622, + 2604..2622, ), ( Text( @@ -3250,25 +2414,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 2783..2787, + 2783..2789, ), ( Text( - "\"", - ), - 2788..2789, - ), - ( - Escape( - "$", + "\\$", ), 2789..2791, ), ( - Escape( - "{", + Text( + "\\{", ), 2791..2793, ), @@ -3303,15 +2461,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", - ), - 2808..2809, - ), - ( - Text( - "\"", + "}\"", ), - 2809..2810, + 2808..2810, ), ( Text( @@ -3371,25 +2523,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 2853..2857, + 2853..2859, ), ( Text( - "\"", - ), - 2858..2859, - ), - ( - Escape( - "$", + "\\$", ), 2859..2861, ), ( - Escape( - "{", + Text( + "\\{", ), 2861..2863, ), @@ -3424,33 +2570,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", + "[@]}\"", ), - 2878..2879, - ), - ( - Text( - "@", - ), - 2879..2880, - ), - ( - Text( - "]", - ), - 2880..2881, - ), - ( - Text( - "}", - ), - 2881..2882, - ), - ( - Text( - "\"", - ), - 2882..2883, + 2878..2883, ), ( Text( @@ -3599,27 +2721,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "if", - ), - 3011..3013, - ), - ( - Text( - "[", - ), - 3014..3015, - ), - ( - Text( - "[", - ), - 3015..3016, - ), - ( - Text( - "\"", + "if [[ \"", ), - 3017..3018, + 3011..3018, ), ( Expression( @@ -3637,27 +2741,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3024..3025, - ), - ( - Text( - "==", - ), - 3026..3028, - ), - ( - Text( - "*", - ), - 3029..3030, - ), - ( - Text( - "\"", + "\" == *\"", ), - 3030..3031, + 3024..3031, ), ( Expression( @@ -3675,57 +2761,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3039..3040, - ), - ( - Text( - "*", - ), - 3040..3041, - ), - ( - Text( - "]", + "\"* ]]; then\n echo 1\n fi", ), - 3042..3043, - ), - ( - Text( - "]", - ), - 3043..3044, - ), - ( - Text( - ";", - ), - 3044..3045, - ), - ( - Text( - "then", - ), - 3046..3050, - ), - ( - Text( - "echo", - ), - 3055..3059, - ), - ( - Text( - "1", - ), - 3060..3061, - ), - ( - Text( - "fi", - ), - 3064..3066, + 3039..3066, ), ( Text( @@ -3870,15 +2908,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 3181..3185, - ), - ( - Text( - "\"", - ), - 3186..3187, + 3181..3187, ), ( Expression( @@ -3896,21 +2928,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3193..3194, - ), - ( - Text( - "|", - ), - 3195..3196, - ), - ( - Text( - "rev", + "\" | rev", ), - 3197..3200, + 3193..3200, ), ( Text( @@ -4034,27 +3054,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "if", - ), - 3320..3322, - ), - ( - Text( - "[", - ), - 3323..3324, - ), - ( - Text( - "[", - ), - 3324..3325, - ), - ( - Text( - "\"", + "if [[ \"", ), - 3326..3327, + 3320..3327, ), ( Expression( @@ -4072,89 +3074,29 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3333..3334, - ), - ( - Text( - "==", - ), - 3335..3337, - ), - ( - Text( - "\"", + "\" == \"", ), - 3338..3339, + 3333..3339, ), ( Expression( ( Var( - ( - "prefix", - 3340..3346, - ), - ), - 3340..3346, - ), - ), - 3339..3347, - ), - ( - Text( - "\"", - ), - 3347..3348, - ), - ( - Text( - "*", - ), - 3348..3349, - ), - ( - Text( - "]", - ), - 3350..3351, - ), - ( - Text( - "]", - ), - 3351..3352, - ), - ( - Text( - ";", - ), - 3352..3353, - ), - ( - Text( - "then", - ), - 3354..3358, - ), - ( - Text( - "echo", - ), - 3363..3367, - ), - ( - Text( - "1", + ( + "prefix", + 3340..3346, + ), + ), + 3340..3346, + ), ), - 3368..3369, + 3339..3347, ), ( Text( - "fi", + "\"* ]]; then\n echo 1\n fi", ), - 3372..3374, + 3347..3374, ), ( Text( @@ -4321,27 +3263,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "if", - ), - 3514..3516, - ), - ( - Text( - "[", - ), - 3517..3518, - ), - ( - Text( - "[", + "if [[ \"", ), - 3518..3519, - ), - ( - Text( - "\"", - ), - 3520..3521, + 3514..3521, ), ( Expression( @@ -4359,27 +3283,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3527..3528, - ), - ( - Text( - "==", - ), - 3529..3531, - ), - ( - Text( - "*", - ), - 3532..3533, - ), - ( - Text( - "\"", + "\" == *\"", ), - 3533..3534, + 3527..3534, ), ( Expression( @@ -4397,51 +3303,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ]]; then\n echo 1\n fi", ), - 3542..3543, - ), - ( - Text( - "]", - ), - 3544..3545, - ), - ( - Text( - "]", - ), - 3545..3546, - ), - ( - Text( - ";", - ), - 3546..3547, - ), - ( - Text( - "then", - ), - 3548..3552, - ), - ( - Text( - "echo", - ), - 3557..3561, - ), - ( - Text( - "1", - ), - 3562..3563, - ), - ( - Text( - "fi", - ), - 3566..3568, + 3542..3568, ), ( Text( @@ -4819,27 +3683,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", - ), - 4123..4129, - ), - ( - Text( - "\"", + "printf \"%.", ), - 4130..4131, - ), - ( - Text( - "%", - ), - 4131..4132, - ), - ( - Text( - ".", - ), - 4132..4133, + 4123..4133, ), ( Expression( @@ -4857,45 +3703,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "s", - ), - 4141..4142, - ), - ( - Text( - "\"", + "s\" \"", ), - 4142..4143, + 4141..4145, ), ( Text( - "\"", - ), - 4144..4145, - ), - ( - Escape( - "$", + "\\$", ), 4145..4147, ), - ( - Escape( - "{", - ), - 4147..4149, - ), ( Text( - "text", + "\\{", ), - 4149..4153, + 4147..4149, ), ( Text( - ":", + "text:", ), - 4153..4154, + 4149..4154, ), ( Expression( @@ -4913,15 +3741,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", - ), - 4161..4162, - ), - ( - Text( - "\"", + "}\"", ), - 4162..4163, + 4161..4163, ), ( Text( @@ -5056,69 +3878,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", - ), - 4421..4427, - ), - ( - Text( - "\"", - ), - 4428..4429, - ), - ( - Text( - "%", - ), - 4429..4430, - ), - ( - Text( - ".", - ), - 4430..4431, - ), - ( - Text( - "1s", - ), - 4431..4433, - ), - ( - Text( - "\"", + "printf \"%.1s\" \"", ), - 4433..4434, + 4421..4436, ), ( Text( - "\"", - ), - 4435..4436, - ), - ( - Escape( - "$", + "\\$", ), 4436..4438, ), - ( - Escape( - "{", - ), - 4438..4440, - ), ( Text( - "text", + "\\{", ), - 4440..4444, + 4438..4440, ), ( Text( - ":", + "text:", ), - 4444..4445, + 4440..4445, ), ( Expression( @@ -5136,15 +3916,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", - ), - 4452..4453, - ), - ( - Text( - "\"", + "}\"", ), - 4453..4454, + 4452..4454, ), ( Text( @@ -5246,15 +4020,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 4569..4573, - ), - ( - Text( - "\"", + "echo \"", ), - 4574..4575, + 4569..4575, ), ( Expression( @@ -5272,49 +4040,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 4581..4582, - ), - ( - Text( - "|", - ), - 4583..4584, - ), - ( - Text( - "sed", - ), - 4585..4588, - ), - ( - Text( - "\"", - ), - 4589..4590, - ), - ( - Text( - "s", - ), - 4590..4591, - ), - ( - Text( - "/", + "\" | sed \"s/^", ), - 4591..4592, + 4581..4593, ), ( Text( - "^", - ), - 4592..4593, - ), - ( - Escape( - "(", + "\\(", ), 4593..4595, ), @@ -5325,8 +4057,8 @@ expression: parse_unwrap(&tokenize(&stdlib)) 4595..4596, ), ( - Escape( - ")", + Text( + "\\)", ), 4596..4598, ), @@ -5337,28 +4069,22 @@ expression: parse_unwrap(&tokenize(&stdlib)) 4598..4599, ), ( - Escape( - "U", + Text( + "\\U", ), 4599..4601, ), - ( - Escape( - "1", - ), - 4601..4603, - ), ( Text( - "/", + "\\1", ), - 4603..4604, + 4601..4603, ), ( Text( - "\"", + "/\"", ), - 4604..4605, + 4603..4605, ), ( Text( @@ -5610,21 +4336,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", - ), - 4855..4861, - ), - ( - Text( - "\"", - ), - 4862..4863, - ), - ( - Text( - "%", + "printf \"%", ), - 4863..4864, + 4855..4864, ), ( Expression( @@ -5642,57 +4356,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "s", - ), - 4872..4873, - ), - ( - Text( - "\"", - ), - 4873..4874, - ), - ( - Text( - "\"", - ), - 4875..4876, - ), - ( - Text( - "\"", - ), - 4876..4877, - ), - ( - Text( - "|", - ), - 4878..4879, - ), - ( - Text( - "tr", - ), - 4880..4882, - ), - ( - Text( - "\"", - ), - 4883..4884, - ), - ( - Text( - "\"", - ), - 4885..4886, - ), - ( - Text( - "\"", + "s\" \"\" | tr \" \" \"", ), - 4887..4888, + 4872..4888, ), ( Expression( @@ -5997,21 +4663,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", - ), - 5171..5177, - ), - ( - Text( - "\"", - ), - 5178..5179, - ), - ( - Text( - "%", + "printf \"%", ), - 5179..5180, + 5171..5180, ), ( Expression( @@ -6029,57 +4683,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "s", - ), - 5188..5189, - ), - ( - Text( - "\"", - ), - 5189..5190, - ), - ( - Text( - "\"", - ), - 5191..5192, - ), - ( - Text( - "\"", - ), - 5192..5193, - ), - ( - Text( - "|", - ), - 5194..5195, - ), - ( - Text( - "tr", - ), - 5196..5198, - ), - ( - Text( - "\"", - ), - 5199..5200, - ), - ( - Text( - "\"", - ), - 5201..5202, - ), - ( - Text( - "\"", + "s\" \"\" | tr \" \" \"", ), - 5203..5204, + 5188..5204, ), ( Expression( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__string_escapes.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__string_escapes.snap new file mode 100644 index 0000000..45754da --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__string_escapes.snap @@ -0,0 +1,60 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"\"hello\\nworld\\t\\\"quote\\\"\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "hello", + ), + 1..6, + ), + ( + Token( + "\\n", + ), + 6..8, + ), + ( + Token( + "world", + ), + 8..13, + ), + ( + Token( + "\\t", + ), + 13..15, + ), + ( + Token( + "\\\"", + ), + 15..17, + ), + ( + Token( + "quote", + ), + 17..22, + ), + ( + Token( + "\\\"", + ), + 22..24, + ), + ( + Token( + "\"", + ), + 24..25, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__string_multi_interpolation.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__string_multi_interpolation.snap new file mode 100644 index 0000000..6b77a03 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__string_multi_interpolation.snap @@ -0,0 +1,84 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"\"{a} and {b} and {c}\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "{", + ), + 1..2, + ), + ( + Token( + "a", + ), + 2..3, + ), + ( + Token( + "}", + ), + 3..4, + ), + ( + Token( + " and ", + ), + 4..9, + ), + ( + Token( + "{", + ), + 9..10, + ), + ( + Token( + "b", + ), + 10..11, + ), + ( + Token( + "}", + ), + 11..12, + ), + ( + Token( + " and ", + ), + 12..17, + ), + ( + Token( + "{", + ), + 17..18, + ), + ( + Token( + "c", + ), + 18..19, + ), + ( + Token( + "}", + ), + 19..20, + ), + ( + Token( + "\"", + ), + 20..21, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__string_nested_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__string_nested_braces.snap new file mode 100644 index 0000000..ea45a3d --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__string_nested_braces.snap @@ -0,0 +1,120 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"\"text {if true { \"inner\" } else { \"other\" }} end\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "text ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "if", + ), + 7..9, + ), + ( + Token( + "true", + ), + 10..14, + ), + ( + Token( + "{", + ), + 15..16, + ), + ( + Token( + "\"", + ), + 17..18, + ), + ( + Token( + "inner", + ), + 18..23, + ), + ( + Token( + "\"", + ), + 23..24, + ), + ( + Token( + "}", + ), + 25..26, + ), + ( + Token( + "else", + ), + 27..31, + ), + ( + Token( + "{", + ), + 32..33, + ), + ( + Token( + "\"", + ), + 34..35, + ), + ( + Token( + "other", + ), + 35..40, + ), + ( + Token( + "\"", + ), + 40..41, + ), + ( + Token( + "}", + ), + 42..43, + ), + ( + Token( + "}", + ), + 43..44, + ), + ( + Token( + " end", + ), + 44..48, + ), + ( + Token( + "\"", + ), + 48..49, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__string_trailing_backslash.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__string_trailing_backslash.snap new file mode 100644 index 0000000..442d9ad --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__string_trailing_backslash.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"\"test\\\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "test", + ), + 1..5, + ), + ( + Token( + "\\\"", + ), + 5..7, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__triple_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__triple_dollar.snap new file mode 100644 index 0000000..f039a07 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__triple_dollar.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "$", + ), + 2..3, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__unclosed_command.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__unclosed_command.snap new file mode 100644 index 0000000..548c75d --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__unclosed_command.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(\"$echo test\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo test", + ), + 1..10, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha035__unclosed_string.snap b/tests/grammar/snapshots/r#mod__grammar__alpha035__unclosed_string.snap new file mode 100644 index 0000000..03a4f8a --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha035__unclosed_string.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha035.rs +expression: "compiler.tokenize(r#\"\"unclosed\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "unclosed", + ), + 1..9, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__command_backslash.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__command_backslash.snap new file mode 100644 index 0000000..b948635 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__command_backslash.snap @@ -0,0 +1,30 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"$test\\n$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "test", + ), + 1..5, + ), + ( + Token( + "\\n", + ), + 5..7, + ), + ( + Token( + "$", + ), + 7..8, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__command_escapes.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__command_escapes.snap new file mode 100644 index 0000000..a580a1a --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__command_escapes.snap @@ -0,0 +1,48 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"$echo\\ with\\ spaces$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo", + ), + 1..5, + ), + ( + Token( + "\\ ", + ), + 5..7, + ), + ( + Token( + "with", + ), + 7..11, + ), + ( + Token( + "\\ ", + ), + 11..13, + ), + ( + Token( + "spaces", + ), + 13..19, + ), + ( + Token( + "$", + ), + 19..20, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__command_nested_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__command_nested_braces.snap new file mode 100644 index 0000000..9d2372e --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__command_nested_braces.snap @@ -0,0 +1,90 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"$echo {if true { 1 } else { 0 }}$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "if", + ), + 7..9, + ), + ( + Token( + "true", + ), + 10..14, + ), + ( + Token( + "{", + ), + 15..16, + ), + ( + Token( + "1", + ), + 17..18, + ), + ( + Token( + "}", + ), + 19..20, + ), + ( + Token( + "else", + ), + 21..25, + ), + ( + Token( + "{", + ), + 26..27, + ), + ( + Token( + "0", + ), + 28..29, + ), + ( + Token( + "}", + ), + 30..31, + ), + ( + Token( + "}", + ), + 31..32, + ), + ( + Token( + "$", + ), + 32..33, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__comments_in_ifs.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__comments_in_ifs.snap index fbb9247..8cdcc42 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__comments_in_ifs.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__comments_in_ifs.snap @@ -351,29 +351,11 @@ expression: parse(&tokenize(input)) ( Text( ( - "Entry", - 298..303, + "Entry not allowed", + 298..315, ), ), - 298..303, - ), - ( - Text( - ( - "not", - 304..307, - ), - ), - 304..307, - ), - ( - Text( - ( - "allowed", - 308..315, - ), - ), - 308..315, + 298..315, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__context_command_to_main.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__context_command_to_main.snap new file mode 100644 index 0000000..fbf7b5e --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__context_command_to_main.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$cmd {expr}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "cmd ", + ), + 1..5, + ), + ( + Token( + "{", + ), + 5..6, + ), + ( + Token( + "expr", + ), + 6..10, + ), + ( + Token( + "}", + ), + 10..11, + ), + ( + Token( + "$", + ), + 11..12, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__context_multiple_switches.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__context_multiple_switches.snap new file mode 100644 index 0000000..8942fa1 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__context_multiple_switches.snap @@ -0,0 +1,60 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"\"a {$b$} c\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "a ", + ), + 1..3, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "$", + ), + 4..5, + ), + ( + Token( + "b", + ), + 5..6, + ), + ( + Token( + "$", + ), + 6..7, + ), + ( + Token( + "}", + ), + 7..8, + ), + ( + Token( + " c", + ), + 8..10, + ), + ( + Token( + "\"", + ), + 10..11, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__context_string_to_main.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__context_string_to_main.snap new file mode 100644 index 0000000..c17c4ac --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__context_string_to_main.snap @@ -0,0 +1,48 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"\"start {expr} end\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "start ", + ), + 1..7, + ), + ( + Token( + "{", + ), + 7..8, + ), + ( + Token( + "expr", + ), + 8..12, + ), + ( + Token( + "}", + ), + 12..13, + ), + ( + Token( + " end", + ), + 13..17, + ), + ( + Token( + "\"", + ), + 17..18, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_at_end.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_at_end.snap new file mode 100644 index 0000000..6934da5 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_at_end.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"text$\")" +--- +[ + ( + Token( + "text", + ), + 0..4, + ), + ( + Token( + "$", + ), + 4..5, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_dollar.snap new file mode 100644 index 0000000..b922044 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_dollar.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_dollar_char.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_dollar_char.snap new file mode 100644 index 0000000..12482c7 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_dollar_char.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$$x\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "x", + ), + 2..3, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_dollar_space.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_dollar_space.snap new file mode 100644 index 0000000..d52f14c --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__dollar_dollar_space.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$$ \")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_brace.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_brace.snap new file mode 100644 index 0000000..53279fb --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_brace.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$$ {var}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "var", + ), + 4..7, + ), + ( + Token( + "}", + ), + 7..8, + ), + ( + Token( + "$", + ), + 8..9, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_end.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_end.snap new file mode 100644 index 0000000..b922044 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_end.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_no_space.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_no_space.snap new file mode 100644 index 0000000..5b80637 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_no_space.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$$failed\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "failed", + ), + 2..8, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_whitespace.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_whitespace.snap new file mode 100644 index 0000000..a7eb7b9 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__double_dollar_whitespace.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$$ \\n\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__empty_input.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__empty_input.snap new file mode 100644 index 0000000..6369eb2 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__empty_input.snap @@ -0,0 +1,5 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"\")" +--- +[] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__empty_string.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__empty_string.snap new file mode 100644 index 0000000..0ba5326 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__empty_string.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"\"\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "\"", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__just_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__just_dollar.snap new file mode 100644 index 0000000..d14738f --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__just_dollar.snap @@ -0,0 +1,12 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__mismatched_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__mismatched_braces.snap new file mode 100644 index 0000000..d5b5fae --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__mismatched_braces.snap @@ -0,0 +1,36 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"\"{{{\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "{", + ), + 1..2, + ), + ( + Token( + "{", + ), + 2..3, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "\"", + ), + 4..5, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__numbers.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__numbers.snap new file mode 100644 index 0000000..9a8ebac --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__numbers.snap @@ -0,0 +1,63 @@ +--- +source: tests/grammar/alpha040.rs +expression: parse_unwrap(&tokenize(input)) +--- +[ + ( + Statement( + ( + Expression( + ( + Number( + ( + 2.0, + 5..6, + ), + ), + 5..6, + ), + ), + 5..6, + ), + ), + 5..6, + ), + ( + Statement( + ( + Expression( + ( + Number( + ( + 2.4, + 11..14, + ), + ), + 11..14, + ), + ), + 11..14, + ), + ), + 11..14, + ), + ( + Statement( + ( + Expression( + ( + Number( + ( + 0.2, + 19..21, + ), + ), + 19..21, + ), + ), + 19..21, + ), + ), + 19..21, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__single_char.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__single_char.snap new file mode 100644 index 0000000..695fa9f --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__single_char.snap @@ -0,0 +1,12 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"x\")" +--- +[ + ( + Token( + "x", + ), + 0..1, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__single_dollar_cmd.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__single_dollar_cmd.snap new file mode 100644 index 0000000..71ef9ff --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__single_dollar_cmd.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$echo$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo", + ), + 1..5, + ), + ( + Token( + "$", + ), + 5..6, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_date.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_date.snap index 0b78f20..d67f2e9 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_date.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_date.snap @@ -1265,56 +1265,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 3004..3005, + "%FT%T%Z", + 3004..3011, ), ), - 3004..3005, - ), - ( - Text( - ( - "FT", - 3005..3007, - ), - ), - 3005..3007, - ), - ( - Text( - ( - "%", - 3007..3008, - ), - ), - 3007..3008, - ), - ( - Text( - ( - "T", - 3008..3009, - ), - ), - 3008..3009, - ), - ( - Text( - ( - "%", - 3009..3010, - ), - ), - 3009..3010, - ), - ( - Text( - ( - "Z", - 3010..3011, - ), - ), - 3010..3011, + 3004..3011, ), ], ), @@ -1382,63 +1337,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 3047..3051, - ), - ( - Text( - "+", - ), - 3052..3053, - ), - ( - Text( - "\"", - ), - 3053..3054, - ), - ( - Text( - "%", - ), - 3054..3055, - ), - ( - Text( - "FT", + "date +\"%FT%T%Z\" ", ), - 3055..3057, - ), - ( - Text( - "%", - ), - 3057..3058, - ), - ( - Text( - "T", - ), - 3058..3059, - ), - ( - Text( - "%", - ), - 3059..3060, - ), - ( - Text( - "Z", - ), - 3060..3061, - ), - ( - Text( - "\"", - ), - 3061..3062, + 3046..3063, ), ( Text( @@ -1507,27 +1408,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 3097..3101, - ), - ( - CommandOption( - "--utc", - ), - 3102..3107, - ), - ( - CommandOption( - "-d", - ), - 3108..3110, - ), - ( - Text( - "\"", + "date --utc -d \"", ), - 3111..3112, + 3096..3112, ), ( Expression( @@ -1545,21 +1428,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3118..3119, - ), - ( - Text( - "+", + "\" +\"", ), - 3120..3121, - ), - ( - Text( - "\"", - ), - 3121..3122, + 3118..3122, ), ( Expression( @@ -1577,9 +1448,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3130..3131, + 3130..3132, ), ( Text( @@ -1639,21 +1510,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 3165..3169, - ), - ( - CommandOption( - "-d", + "date -d \"", ), - 3170..3172, - ), - ( - Text( - "\"", - ), - 3173..3174, + 3164..3174, ), ( Expression( @@ -1671,21 +1530,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" +\"", ), - 3180..3181, - ), - ( - Text( - "+", - ), - 3182..3183, - ), - ( - Text( - "\"", - ), - 3183..3184, + 3180..3184, ), ( Expression( @@ -1703,9 +1550,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3192..3193, + 3192..3194, ), ( Text( @@ -1812,27 +1659,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 3360..3364, - ), - ( - Text( - "+", + "date +%s ", ), - 3365..3366, - ), - ( - Text( - "%", - ), - 3366..3367, - ), - ( - Text( - "s", - ), - 3367..3368, + 3359..3369, ), ( Text( @@ -2265,63 +2094,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 3749..3753, - ), - ( - Text( - "+", + "date +\"%FT%T%Z\" ", ), - 3754..3755, - ), - ( - Text( - "\"", - ), - 3755..3756, - ), - ( - Text( - "%", - ), - 3756..3757, - ), - ( - Text( - "FT", - ), - 3757..3759, - ), - ( - Text( - "%", - ), - 3759..3760, - ), - ( - Text( - "T", - ), - 3760..3761, - ), - ( - Text( - "%", - ), - 3761..3762, - ), - ( - Text( - "Z", - ), - 3762..3763, - ), - ( - Text( - "\"", - ), - 3763..3764, + 3748..3765, ), ( Text( @@ -2385,20 +2160,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 3807..3808, + "%F", + 3807..3809, ), ), - 3807..3808, - ), - ( - Text( - ( - "F", - 3808..3809, - ), - ), - 3808..3809, + 3807..3809, ), ], ), @@ -2435,6 +2201,15 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3794..3824, ), + ( + Text( + ( + " ", + 3824..3825, + ), + ), + 3824..3825, + ), ( Expression( ( @@ -2449,6 +2224,15 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3825..3830, ), + ( + Text( + ( + " ", + 3830..3831, + ), + ), + 3830..3831, + ), ( Expression( ( @@ -2465,20 +2249,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 3844..3845, + "%T", + 3844..3846, ), ), - 3844..3845, - ), - ( - Text( - ( - "T", - 3845..3846, - ), - ), - 3845..3846, + 3844..3846, ), ], ), @@ -2917,20 +2692,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 4320..4321, - ), - ), - 4320..4321, - ), - ( - Text( - ( - "s", - 4321..4322, + "%s", + 4320..4322, ), ), - 4321..4322, + 4320..4322, ), ], ), @@ -3009,20 +2775,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 4380..4381, - ), - ), - 4380..4381, - ), - ( - Text( - ( - "s", - 4381..4382, + "%s", + 4380..4382, ), ), - 4381..4382, + 4380..4382, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_env.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_env.snap index 896a898..f369bb5 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_env.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_env.snap @@ -124,20 +124,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - ".", - 202..203, + ".env", + 202..206, ), ), - 202..203, - ), - ( - Text( - ( - "env", - 203..206, - ), - ), - 203..206, + 202..206, ), ], ), @@ -183,51 +174,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 240..244, + 239..246, ), ( Text( - "\"", - ), - 245..246, - ), - ( - Escape( - "$", + "\\$", ), 246..248, ), - ( - Escape( - "{", - ), - 248..250, - ), - ( - Text( - "!", - ), - 250..251, - ), - ( - Text( - "var", - ), - 251..254, - ), ( Text( - "}", + "\\{", ), - 254..255, + 248..250, ), ( Text( - "\"", + "!var}\" ", ), - 255..256, + 250..257, ), ( Text( @@ -363,15 +330,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "source", - ), - 349..355, - ), - ( - Text( - "\"", + "source \"", ), - 356..357, + 348..357, ), ( Expression( @@ -389,9 +350,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 363..364, + 363..365, ), ( Text( @@ -431,51 +392,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 390..394, + 389..396, ), ( Text( - "\"", - ), - 395..396, - ), - ( - Escape( - "$", + "\\$", ), 396..398, ), - ( - Escape( - "{", - ), - 398..400, - ), - ( - Text( - "!", - ), - 400..401, - ), - ( - Text( - "var", - ), - 401..404, - ), ( Text( - "}", + "\\{", ), - 404..405, + 398..400, ), ( Text( - "\"", + "!var}\" ", ), - 405..406, + 400..407, ), ( Text( @@ -579,20 +516,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - ".", - 527..528, - ), - ), - 527..528, - ), - ( - Text( - ( - "env", - 528..531, + ".env", + 527..531, ), ), - 528..531, + 527..531, ), ], ), @@ -628,39 +556,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "export", + "export \"", ), - 554..560, + 553..562, ), ( Text( - "\"", - ), - 561..562, - ), - ( - Escape( - "$", + "\\$", ), 562..564, ), ( Text( - "(", - ), - 564..565, - ), - ( - Text( - "xargs", - ), - 565..570, - ), - ( - Text( - "<", + "(xargs < ", ), - 571..572, + 564..573, ), ( Expression( @@ -678,45 +588,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - ")", - ), - 579..580, - ), - ( - Text( - "\"", - ), - 580..581, - ), - ( - Text( - ">", - ), - 582..583, - ), - ( - Text( - "/", - ), - 584..585, - ), - ( - Text( - "dev", - ), - 585..588, - ), - ( - Text( - "/", - ), - 588..589, - ), - ( - Text( - "null", + ")\" > /dev/null ", ), - 589..593, + 579..594, ), ( Text( @@ -807,37 +681,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 704..705, - ), - ( - Text( - "[", + "[[ ! -z ", ), - 705..706, + 703..712, ), ( Text( - "!", - ), - 707..708, - ), - ( - CommandOption( - "-z", - ), - 709..711, - ), - ( - Escape( - "$", + "\\$", ), 712..714, ), ( - Escape( - "{", + Text( + "\\{", ), 714..716, ), @@ -872,33 +728,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "+", - ), - 730..731, - ), - ( - Text( - "z", - ), - 731..732, - ), - ( - Text( - "}", - ), - 732..733, - ), - ( - Text( - "]", - ), - 734..735, - ), - ( - Text( - "]", + "+z} ]] ", ), - 735..736, + 730..737, ), ( Text( @@ -1058,13 +890,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "readonly", + "readonly ", ), - 900..908, + 899..909, ), ( - Escape( - "$", + Text( + "\\$", ), 909..911, ), @@ -1093,19 +925,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "=", + "=\"", ), - 924..925, + 924..926, ), ( Text( - "\"", - ), - 925..926, - ), - ( - Escape( - "$", + "\\$", ), 926..928, ), @@ -1134,45 +960,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 940..941, - ), - ( - Text( - "2", - ), - 942..943, - ), - ( - Text( - ">", - ), - 943..944, - ), - ( - Text( - "/", - ), - 945..946, - ), - ( - Text( - "dev", - ), - 946..949, - ), - ( - Text( - "/", - ), - 949..950, - ), - ( - Text( - "null", + "\" 2> /dev/null ", ), - 950..954, + 940..955, ), ( Text( @@ -1273,19 +1063,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 1063..1067, + 1062..1068, ), ( - Escape( - "$", + Text( + "\\$", ), 1068..1070, ), ( - Escape( - "{", + Text( + "\\{", ), 1070..1072, ), @@ -1320,9 +1110,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", + "} ", ), - 1086..1087, + 1086..1088, ), ( Text( @@ -1436,13 +1226,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "export", + "export ", ), - 1198..1204, + 1197..1205, ), ( - Escape( - "$", + Text( + "\\$", ), 1205..1207, ), @@ -1471,19 +1261,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "=", + "=\"", ), - 1220..1221, + 1220..1222, ), ( Text( - "\"", - ), - 1221..1222, - ), - ( - Escape( - "$", + "\\$", ), 1222..1224, ), @@ -1512,45 +1296,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1236..1237, - ), - ( - Text( - "2", - ), - 1238..1239, - ), - ( - Text( - ">", - ), - 1239..1240, - ), - ( - Text( - "/", - ), - 1241..1242, - ), - ( - Text( - "dev", - ), - 1242..1245, - ), - ( - Text( - "/", - ), - 1245..1246, - ), - ( - Text( - "null", + "\" 2> /dev/null ", ), - 1246..1250, + 1236..1251, ), ( Text( @@ -1651,19 +1399,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 1357..1361, + 1356..1362, ), ( - Escape( - "$", + Text( + "\\$", ), 1362..1364, ), ( - Escape( - "{", + Text( + "\\{", ), 1364..1366, ), @@ -1698,9 +1446,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", + "} ", ), - 1380..1381, + 1380..1382, ), ( Text( @@ -1797,9 +1545,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "unset", + "unset ", ), - 1486..1491, + 1485..1492, ), ( Expression( @@ -1815,6 +1563,12 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 1492..1498, ), + ( + Text( + " ", + ), + 1498..1499, + ), ( Text( "$", @@ -1909,45 +1663,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 1585..1586, - ), - ( - CommandOption( - "-x", + "[ -x \"", ), - 1587..1589, + 1584..1591, ), ( Text( - "\"", - ), - 1590..1591, - ), - ( - Escape( - "$", + "\\$", ), 1591..1593, ), ( Text( - "(", + "(command -v ", ), - 1593..1594, - ), - ( - Text( - "command", - ), - 1594..1601, - ), - ( - CommandOption( - "-v", - ), - 1602..1604, + 1593..1605, ), ( Expression( @@ -1965,21 +1695,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - ")", - ), - 1614..1615, - ), - ( - Text( - "\"", - ), - 1615..1616, - ), - ( - Text( - "]", + ")\" ] ", ), - 1617..1618, + 1614..1619, ), ( Text( @@ -2127,25 +1845,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "read", - ), - 1775..1779, - ), - ( - CommandOption( - "-p", + "read -p \"", ), - 1780..1782, + 1774..1784, ), ( Text( - "\"", - ), - 1783..1784, - ), - ( - Escape( - "$", + "\\$", ), 1784..1786, ), @@ -2174,9 +1880,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 1801..1802, + 1801..1803, ), ( Text( @@ -2203,9 +1909,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "$", + "\\$", 1817..1819, ), ), @@ -2314,31 +2020,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "read", + "read -s -p \"", ), - 1960..1964, - ), - ( - CommandOption( - "-s", - ), - 1965..1967, - ), - ( - CommandOption( - "-p", - ), - 1968..1970, + 1959..1972, ), ( Text( - "\"", - ), - 1971..1972, - ), - ( - Escape( - "$", + "\\$", ), 1972..1974, ), @@ -2367,9 +2055,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 1989..1990, + 1989..1991, ), ( Text( @@ -2399,33 +2087,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 2003..2007, - ), - ( - Text( - "\"", - ), - 2008..2009, - ), - ( - Text( - "\"", - ), - 2009..2010, - ), - ( - Text( - ">", - ), - 2011..2012, - ), - ( - Text( - "&2", + "echo \"\" >&2 ", ), - 2012..2014, + 2002..2015, ), ( Text( @@ -2459,9 +2123,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "$", + "\\$", 2035..2037, ), ), @@ -2638,83 +2302,47 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "[", - 2329..2330, - ), - ), - 2329..2330, - ), - ( - Escape( - ( - "x1b", - 2330..2334, + " [", + 2328..2330, ), ), - 2330..2334, + 2328..2330, ), ( Text( ( - "[", - 2334..2335, + "\\x", + 2330..2332, ), ), - 2334..2335, + 2330..2332, ), ( Text( ( - "1mY", - 2335..2338, + "1b[1mY/", + 2332..2339, ), ), - 2335..2338, + 2332..2339, ), ( Text( ( - "/", - 2338..2339, - ), - ), - 2338..2339, - ), - ( - Escape( - ( - "x1b", - 2339..2343, + "\\x", + 2339..2341, ), ), - 2339..2343, + 2339..2341, ), ( Text( ( - "[", - 2343..2344, + "1b[0mn]", + 2341..2348, ), ), - 2343..2344, - ), - ( - Text( - ( - "0mn", - 2344..2347, - ), - ), - 2344..2347, - ), - ( - Text( - ( - "]", - 2347..2348, - ), - ), - 2347..2348, + 2341..2348, ), ], ), @@ -2730,92 +2358,47 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "[", - 2357..2358, + " [y/", + 2356..2360, ), ), - 2357..2358, + 2356..2360, ), ( Text( ( - "y", - 2358..2359, + "\\x", + 2360..2362, ), ), - 2358..2359, + 2360..2362, ), ( Text( ( - "/", - 2359..2360, - ), - ), - 2359..2360, - ), - ( - Escape( - ( - "x1b", - 2360..2364, + "1b[1mN", + 2362..2368, ), ), - 2360..2364, + 2362..2368, ), ( Text( ( - "[", - 2364..2365, + "\\x", + 2368..2370, ), ), - 2364..2365, + 2368..2370, ), ( Text( ( - "1mN", - 2365..2368, + "1b[0m]", + 2370..2376, ), ), - 2365..2368, - ), - ( - Escape( - ( - "x1b", - 2368..2372, - ), - ), - 2368..2372, - ), - ( - Text( - ( - "[", - 2372..2373, - ), - ), - 2372..2373, - ), - ( - Text( - ( - "0m", - 2373..2375, - ), - ), - 2373..2375, - ), - ( - Text( - ( - "]", - 2375..2376, - ), - ), - 2375..2376, + 2370..2376, ), ], ), @@ -2855,33 +2438,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 2400..2406, + 2399..2408, ), ( Text( - "\"", + "\\x", ), - 2407..2408, - ), - ( - Escape( - "x1b", - ), - 2408..2412, + 2408..2410, ), ( Text( - "[", + "1b[1m", ), - 2412..2413, - ), - ( - Text( - "1m", - ), - 2413..2415, + 2410..2415, ), ( Expression( @@ -2897,23 +2468,17 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 2415..2423, ), - ( - Escape( - "x1b", - ), - 2423..2427, - ), ( Text( - "[", + "\\x", ), - 2427..2428, + 2423..2425, ), ( Text( - "0m", + "1b[0m", ), - 2428..2430, + 2425..2430, ), ( Expression( @@ -2931,9 +2496,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 2446..2447, + 2446..2448, ), ( Text( @@ -2963,27 +2528,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "read", - ), - 2460..2464, - ), - ( - CommandOption( - "-s", - ), - 2465..2467, - ), - ( - CommandOption( - "-n", + "read -s -n 1 ", ), - 2468..2470, - ), - ( - Text( - "1", - ), - 2471..2472, + 2459..2473, ), ( Text( @@ -3013,27 +2560,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 2485..2491, + 2484..2493, ), ( Text( - "\"", - ), - 2492..2493, - ), - ( - Escape( - "n", + "\\n", ), 2493..2495, ), ( Text( - "\"", + "\" ", ), - 2495..2496, + 2495..2497, ), ( Text( @@ -3093,21 +2634,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 2540..2544, + 2539..2545, ), ( - Escape( - "$", + Text( + "\\$", ), 2545..2547, ), ( Text( - "REPLY", + "REPLY ", ), - 2547..2552, + 2547..2553, ), ( Text( @@ -3308,9 +2849,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "eval", + "eval ", ), - 2717..2721, + 2716..2722, ), ( Expression( @@ -3326,6 +2867,12 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 2722..2731, ), + ( + Text( + " ", + ), + 2731..2732, + ), ( Text( "$", @@ -3439,15 +2986,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "id", + "id -u ", ), - 2871..2873, - ), - ( - CommandOption( - "-u", - ), - 2874..2876, + 2870..2877, ), ( Text( @@ -3645,6 +3186,12 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3050..3051, ), + ( + Text( + "", + ), + 3051..3052, + ), ( Expression( ( @@ -3670,21 +3217,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "=", - ), - 3065..3066, - ), - ( - Text( - "(", - ), - 3066..3067, - ), - ( - Text( - "\"", + "=(\"", ), - 3067..3068, + 3065..3068, ), ( Expression( @@ -3702,25 +3237,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" \"", ), - 3076..3077, + 3076..3079, ), ( Text( - "\"", - ), - 3078..3079, - ), - ( - Escape( - "$", + "\\$", ), 3079..3081, ), ( - Escape( - "{", + Text( + "\\{", ), 3081..3083, ), @@ -3749,39 +3278,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 3096..3097, - ), - ( - Text( - "@", + "[@]}\") ", ), - 3097..3098, - ), - ( - Text( - "]", - ), - 3098..3099, - ), - ( - Text( - "}", - ), - 3099..3100, - ), - ( - Text( - "\"", - ), - 3100..3101, - ), - ( - Text( - ")", - ), - 3101..3102, + 3096..3103, ), ( Text( @@ -3816,25 +3315,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 3117..3123, + 3116..3125, ), ( Text( - "\"", - ), - 3124..3125, - ), - ( - Escape( - "$", + "\\$", ), 3125..3127, ), ( - Escape( - "{", + Text( + "\\{", ), 3127..3129, ), @@ -3863,33 +3356,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", + "[@]}\" ", ), - 3142..3143, - ), - ( - Text( - "@", - ), - 3143..3144, - ), - ( - Text( - "]", - ), - 3144..3145, - ), - ( - Text( - "}", - ), - 3145..3146, - ), - ( - Text( - "\"", - ), - 3146..3147, + 3142..3148, ), ( Text( @@ -3990,13 +3459,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 3255..3259, + 3254..3260, ), ( - Escape( - "$", + Text( + "\\$", ), 3260..3262, ), @@ -4025,43 +3494,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "|", - ), - 3276..3277, - ), - ( - Text( - "sed", - ), - 3278..3281, - ), - ( - CommandOption( - "-e", - ), - 3282..3284, - ), - ( - Text( - "'s", + " | sed -e 's/", ), - 3285..3287, + 3275..3288, ), ( Text( - "/", - ), - 3287..3288, - ), - ( - Escape( - "\\", + "\\\\", ), 3288..3290, ), ( - Escape( - "\\", + Text( + "\\\\", ), 3290..3292, ), @@ -4071,107 +3516,35 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3292..3293, ), - ( - Escape( - "\\", - ), - 3293..3295, - ), - ( - Escape( - "\\", - ), - 3295..3297, - ), - ( - Escape( - "\\", - ), - 3297..3299, - ), - ( - Escape( - "\\", - ), - 3299..3301, - ), - ( - Text( - "/", - ), - 3301..3302, - ), - ( - Text( - "g'", - ), - 3302..3304, - ), - ( - CommandOption( - "-e", - ), - 3305..3307, - ), - ( - Text( - "\"", - ), - 3308..3309, - ), - ( - Text( - "s", - ), - 3309..3310, - ), - ( - Text( - "/", - ), - 3310..3311, - ), - ( - Text( - "%", - ), - 3311..3312, - ), - ( - Text( - "/", - ), - 3312..3313, - ), ( Text( - "%", + "\\\\", ), - 3313..3314, + 3293..3295, ), ( Text( - "%", + "\\\\", ), - 3314..3315, + 3295..3297, ), ( Text( - "/", + "\\\\", ), - 3315..3316, + 3297..3299, ), ( Text( - "g", + "\\\\", ), - 3316..3317, + 3299..3301, ), ( Text( - "\"", + "/g' -e \"s/%/%%/g\" ", ), - 3317..3318, + 3301..3319, ), ( Text( @@ -4311,22 +3684,22 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "x1b", - 3462..3466, + "\\x", + 3462..3464, ), ), - 3462..3466, + 3462..3464, ), ( Text( ( - "[", - 3466..3467, + "1b[", + 3464..3467, ), ), - 3466..3467, + 3464..3467, ), ( Expression( @@ -4424,32 +3797,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3485..3503, ), - ( - Escape( - ( - "x1b", - 3503..3507, - ), - ), - 3503..3507, - ), ( Text( ( - "[", - 3507..3508, + "\\x", + 3503..3505, ), ), - 3507..3508, + 3503..3505, ), ( Text( ( - "0m", - 3508..3510, + "1b[0m", + 3505..3510, ), ), - 3508..3510, + 3505..3510, ), ], ), @@ -4530,32 +3894,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3591..3595, - ), - ), - 3591..3595, - ), ( Text( ( - "[", - 3595..3596, + "\\x", + 3591..3593, ), ), - 3595..3596, + 3591..3593, ), ( Text( ( - "1m", - 3596..3598, + "1b[1m", + 3593..3598, ), ), - 3596..3598, + 3593..3598, ), ( Expression( @@ -4584,32 +3939,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3598..3616, ), - ( - Escape( - ( - "x1b", - 3616..3620, - ), - ), - 3616..3620, - ), ( Text( ( - "[", - 3620..3621, + "\\x", + 3616..3618, ), ), - 3620..3621, + 3616..3618, ), ( Text( ( - "0m", - 3621..3623, + "1b[0m", + 3618..3623, ), ), - 3621..3623, + 3618..3623, ), ], ), @@ -4690,32 +4036,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3708..3712, - ), - ), - 3708..3712, - ), ( Text( ( - "[", - 3712..3713, + "\\x", + 3708..3710, ), ), - 3712..3713, + 3708..3710, ), ( Text( ( - "3m", - 3713..3715, + "1b[3m", + 3710..3715, ), ), - 3713..3715, + 3710..3715, ), ( Expression( @@ -4744,32 +4081,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3715..3733, ), - ( - Escape( - ( - "x1b", - 3733..3737, - ), - ), - 3733..3737, - ), ( Text( ( - "[", - 3737..3738, + "\\x", + 3733..3735, ), ), - 3737..3738, + 3733..3735, ), ( Text( ( - "0m", - 3738..3740, + "1b[0m", + 3735..3740, ), ), - 3738..3740, + 3735..3740, ), ], ), @@ -4850,32 +4178,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3833..3837, - ), - ), - 3833..3837, - ), ( Text( ( - "[", - 3837..3838, + "\\x", + 3833..3835, ), ), - 3837..3838, + 3833..3835, ), ( Text( ( - "4m", - 3838..3840, + "1b[4m", + 3835..3840, ), ), - 3838..3840, + 3835..3840, ), ( Expression( @@ -4904,32 +4223,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3840..3858, ), - ( - Escape( - ( - "x1b", - 3858..3862, - ), - ), - 3858..3862, - ), ( Text( ( - "[", - 3862..3863, + "\\x", + 3858..3860, ), ), - 3862..3863, + 3858..3860, ), ( Text( ( - "0m", - 3863..3865, + "1b[0m", + 3860..3865, ), ), - 3863..3865, + 3860..3865, ), ], ), @@ -5031,22 +4341,22 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "x1b", - 3980..3984, + "\\x", + 3980..3982, ), ), - 3980..3984, + 3980..3982, ), ( Text( ( - "[", - 3984..3985, + "1b[", + 3982..3985, ), ), - 3984..3985, + 3982..3985, ), ( Expression( @@ -5078,61 +4388,34 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "m", - 4000..4001, - ), - ), - 4000..4001, - ), - ( - Text( - ( - "%", - 4001..4002, + "m%s", + 4000..4003, ), ), - 4001..4002, + 4000..4003, ), ( Text( ( - "s", - 4002..4003, - ), - ), - 4002..4003, - ), - ( - Escape( - ( - "x1b", - 4003..4007, + "\\x", + 4003..4005, ), ), - 4003..4007, + 4003..4005, ), ( Text( ( - "[", - 4007..4008, + "1b[0m", + 4005..4010, ), ), - 4007..4008, + 4005..4010, ), ( Text( ( - "0m", - 4008..4010, - ), - ), - 4008..4010, - ), - ( - Escape( - ( - "n", + "\\n", 4010..4012, ), ), @@ -5240,136 +4523,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4119..4123, - ), - ), - 4119..4123, - ), - ( - Text( - ( - "[", - 4123..4124, - ), - ), - 4123..4124, - ), ( Text( ( - "1", - 4124..4125, + "\\x", + 4119..4121, ), ), - 4124..4125, + 4119..4121, ), ( Text( ( - ";", - 4125..4126, + "1b[1;3;97;44m%s", + 4121..4136, ), ), - 4125..4126, + 4121..4136, ), ( Text( ( - "3", - 4126..4127, + "\\x", + 4136..4138, ), ), - 4126..4127, + 4136..4138, ), ( Text( ( - ";", - 4127..4128, + "1b[0m", + 4138..4143, ), ), - 4127..4128, + 4138..4143, ), ( Text( ( - "97", - 4128..4130, - ), - ), - 4128..4130, - ), - ( - Text( - ( - ";", - 4130..4131, - ), - ), - 4130..4131, - ), - ( - Text( - ( - "44m", - 4131..4134, - ), - ), - 4131..4134, - ), - ( - Text( - ( - "%", - 4134..4135, - ), - ), - 4134..4135, - ), - ( - Text( - ( - "s", - 4135..4136, - ), - ), - 4135..4136, - ), - ( - Escape( - ( - "x1b", - 4136..4140, - ), - ), - 4136..4140, - ), - ( - Text( - ( - "[", - 4140..4141, - ), - ), - 4140..4141, - ), - ( - Text( - ( - "0m", - 4141..4143, - ), - ), - 4141..4143, - ), - ( - Escape( - ( - "n", + "\\n", 4143..4145, ), ), @@ -5477,136 +4670,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4258..4262, - ), - ), - 4258..4262, - ), - ( - Text( - ( - "[", - 4262..4263, - ), - ), - 4262..4263, - ), - ( - Text( - ( - "1", - 4263..4264, - ), - ), - 4263..4264, - ), - ( - Text( - ( - ";", - 4264..4265, - ), - ), - 4264..4265, - ), - ( - Text( - ( - "3", - 4265..4266, - ), - ), - 4265..4266, - ), - ( - Text( - ( - ";", - 4266..4267, - ), - ), - 4266..4267, - ), - ( - Text( - ( - "97", - 4267..4269, - ), - ), - 4267..4269, - ), ( Text( ( - ";", - 4269..4270, + "\\x", + 4258..4260, ), ), - 4269..4270, + 4258..4260, ), ( Text( ( - "42m", - 4270..4273, + "1b[1;3;97;42m%s", + 4260..4275, ), ), - 4270..4273, + 4260..4275, ), ( Text( ( - "%", - 4273..4274, + "\\x", + 4275..4277, ), ), - 4273..4274, + 4275..4277, ), ( Text( ( - "s", - 4274..4275, + "1b[0m", + 4277..4282, ), ), - 4274..4275, - ), - ( - Escape( - ( - "x1b", - 4275..4279, - ), - ), - 4275..4279, + 4277..4282, ), ( Text( ( - "[", - 4279..4280, - ), - ), - 4279..4280, - ), - ( - Text( - ( - "0m", - 4280..4282, - ), - ), - 4280..4282, - ), - ( - Escape( - ( - "n", + "\\n", 4282..4284, ), ), @@ -5714,136 +4817,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4397..4401, - ), - ), - 4397..4401, - ), - ( - Text( - ( - "[", - 4401..4402, - ), - ), - 4401..4402, - ), - ( - Text( - ( - "1", - 4402..4403, - ), - ), - 4402..4403, - ), - ( - Text( - ( - ";", - 4403..4404, - ), - ), - 4403..4404, - ), - ( - Text( - ( - "3", - 4404..4405, - ), - ), - 4404..4405, - ), - ( - Text( - ( - ";", - 4405..4406, - ), - ), - 4405..4406, - ), - ( - Text( - ( - "97", - 4406..4408, - ), - ), - 4406..4408, - ), ( Text( ( - ";", - 4408..4409, + "\\x", + 4397..4399, ), ), - 4408..4409, + 4397..4399, ), ( Text( ( - "43m", - 4409..4412, + "1b[1;3;97;43m%s", + 4399..4414, ), ), - 4409..4412, + 4399..4414, ), ( Text( ( - "%", - 4412..4413, + "\\x", + 4414..4416, ), ), - 4412..4413, + 4414..4416, ), ( Text( ( - "s", - 4413..4414, - ), - ), - 4413..4414, - ), - ( - Escape( - ( - "x1b", - 4414..4418, + "1b[0m", + 4416..4421, ), ), - 4414..4418, + 4416..4421, ), ( Text( ( - "[", - 4418..4419, - ), - ), - 4418..4419, - ), - ( - Text( - ( - "0m", - 4419..4421, - ), - ), - 4419..4421, - ), - ( - Escape( - ( - "n", + "\\n", 4421..4423, ), ), @@ -5979,136 +4992,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4591..4595, - ), - ), - 4591..4595, - ), - ( - Text( - ( - "[", - 4595..4596, - ), - ), - 4595..4596, - ), - ( - Text( - ( - "1", - 4596..4597, - ), - ), - 4596..4597, - ), - ( - Text( - ( - ";", - 4597..4598, - ), - ), - 4597..4598, - ), - ( - Text( - ( - "3", - 4598..4599, - ), - ), - 4598..4599, - ), - ( - Text( - ( - ";", - 4599..4600, - ), - ), - 4599..4600, - ), - ( - Text( - ( - "97", - 4600..4602, - ), - ), - 4600..4602, - ), - ( - Text( - ( - ";", - 4602..4603, - ), - ), - 4602..4603, - ), ( Text( ( - "41m", - 4603..4606, + "\\x", + 4591..4593, ), ), - 4603..4606, + 4591..4593, ), ( Text( ( - "%", - 4606..4607, + "1b[1;3;97;41m%s", + 4593..4608, ), ), - 4606..4607, + 4593..4608, ), ( Text( ( - "s", - 4607..4608, + "\\x", + 4608..4610, ), ), - 4607..4608, - ), - ( - Escape( - ( - "x1b", - 4608..4612, - ), - ), - 4608..4612, + 4608..4610, ), ( Text( ( - "[", - 4612..4613, + "1b[0m", + 4610..4615, ), ), - 4612..4613, + 4610..4615, ), ( Text( ( - "0m", - 4613..4615, - ), - ), - 4613..4615, - ), - ( - Escape( - ( - "n", + "\\n", 4615..4617, ), ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_fs.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_fs.snap index ac03936..5bb0d58 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_fs.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_fs.snap @@ -109,21 +109,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", + "[ -d \"", ), - 135..136, - ), - ( - CommandOption( - "-d", - ), - 137..139, - ), - ( - Text( - "\"", - ), - 140..141, + 134..141, ), ( Expression( @@ -141,15 +129,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 147..148, - ), - ( - Text( - "]", + "\" ] ", ), - 149..150, + 147..151, ), ( Text( @@ -283,21 +265,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 271..272, - ), - ( - CommandOption( - "-f", - ), - 273..275, - ), - ( - Text( - "\"", + "[ -f \"", ), - 276..277, + 270..277, ), ( Expression( @@ -315,15 +285,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 283..284, - ), - ( - Text( - "]", + "\" ] ", ), - 285..286, + 283..287, ), ( Text( @@ -462,15 +426,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "<", - ), - 419..420, - ), - ( - Text( - "\"", + "< \"", ), - 421..422, + 418..422, ), ( Expression( @@ -488,9 +446,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 428..429, + 428..430, ), ( Text( @@ -612,15 +570,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 551..555, - ), - ( - Text( - "\"", + "echo \"", ), - 556..557, + 550..557, ), ( Expression( @@ -638,21 +590,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 566..567, - ), - ( - Text( - ">", - ), - 568..569, - ), - ( - Text( - "\"", + "\" > \"", ), - 570..571, + 566..571, ), ( Expression( @@ -670,9 +610,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 577..578, + 577..579, ), ( Text( @@ -810,15 +750,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 708..712, - ), - ( - Text( - "\"", + "echo \"", ), - 713..714, + 707..714, ), ( Expression( @@ -836,27 +770,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 723..724, - ), - ( - Text( - ">", - ), - 725..726, - ), - ( - Text( - ">", - ), - 726..727, - ), - ( - Text( - "\"", + "\" >> \"", ), - 728..729, + 723..729, ), ( Expression( @@ -874,9 +790,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 735..736, + 735..737, ), ( Text( @@ -1061,21 +977,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "ln", - ), - 959..961, - ), - ( - CommandOption( - "-s", - ), - 962..964, - ), - ( - Text( - "\"", + "ln -s \"", ), - 965..966, + 958..966, ), ( Expression( @@ -1093,15 +997,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 974..975, - ), - ( - Text( - "\"", + "\" \"", ), - 976..977, + 974..977, ), ( Expression( @@ -1119,9 +1017,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 990..991, + 990..992, ), ( Text( @@ -1181,20 +1079,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "The", - 1031..1034, - ), - ), - 1031..1034, - ), - ( - Text( - ( - "file", - 1035..1039, + "The file ", + 1031..1040, ), ), - 1035..1039, + 1031..1040, ), ( Expression( @@ -1213,29 +1102,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 1049..1056, - ), - ), - 1049..1056, - ), - ( - Text( - ( - "exist", - 1057..1062, - ), - ), - 1057..1062, - ), - ( - Text( - ( - "!", - 1062..1063, + " doesn't exist!", + 1048..1063, ), ), - 1062..1063, + 1048..1063, ), ], ), @@ -1387,21 +1258,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "mkdir", - ), - 1235..1240, - ), - ( - CommandOption( - "-p", - ), - 1241..1243, - ), - ( - Text( - "\"", + "mkdir -p \"", ), - 1244..1245, + 1234..1245, ), ( Expression( @@ -1419,9 +1278,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 1251..1252, + 1251..1253, ), ( Text( @@ -1612,15 +1471,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "chmod", - ), - 1467..1472, - ), - ( - Text( - "\"", + "chmod \"", ), - 1473..1474, + 1466..1474, ), ( Expression( @@ -1638,15 +1491,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1480..1481, - ), - ( - Text( - "\"", + "\" \"", ), - 1482..1483, + 1480..1483, ), ( Expression( @@ -1664,9 +1511,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 1489..1490, + 1489..1491, ), ( Text( @@ -1726,20 +1573,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "The", - 1530..1533, - ), - ), - 1530..1533, - ), - ( - Text( - ( - "file", - 1534..1538, + "The file ", + 1530..1539, ), ), - 1534..1538, + 1530..1539, ), ( Expression( @@ -1758,29 +1596,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 1546..1553, - ), - ), - 1546..1553, - ), - ( - Text( - ( - "exist", - 1554..1559, - ), - ), - 1554..1559, - ), - ( - Text( - ( - "!", - 1559..1560, + " doesn't exist!", + 1545..1560, ), ), - 1559..1560, + 1545..1560, ), ], ), @@ -2003,21 +1823,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "chown", - ), - 1783..1788, - ), - ( - CommandOption( - "-R", - ), - 1789..1791, - ), - ( - Text( - "\"", + "chown -R \"", ), - 1792..1793, + 1782..1793, ), ( Expression( @@ -2035,15 +1843,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1799..1800, - ), - ( - Text( - "\"", + "\" \"", ), - 1801..1802, + 1799..1802, ), ( Expression( @@ -2061,9 +1863,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 1808..1809, + 1808..1810, ), ( Text( @@ -2123,20 +1925,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "The", - 1849..1852, - ), - ), - 1849..1852, - ), - ( - Text( - ( - "file", - 1853..1857, + "The file ", + 1849..1858, ), ), - 1853..1857, + 1849..1858, ), ( Expression( @@ -2155,29 +1948,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 1865..1872, - ), - ), - 1865..1872, - ), - ( - Text( - ( - "exist", - 1873..1878, - ), - ), - 1873..1878, - ), - ( - Text( - ( - "!", - 1878..1879, + " doesn't exist!", + 1864..1879, ), ), - 1878..1879, + 1864..1879, ), ], ), @@ -2311,9 +2086,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "(", + "\\(", 2087..2089, ), ), @@ -2322,61 +2097,16 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "[", - 2089..2090, - ), - ), - 2089..2090, - ), - ( - Text( - ( - "^", - 2090..2091, - ), - ), - 2090..2091, - ), - ( - Text( - ( - "*", - 2091..2092, - ), - ), - 2091..2092, - ), - ( - Text( - ( - "?", - 2092..2093, - ), - ), - 2092..2093, - ), - ( - Text( - ( - "/", - 2093..2094, + "[^*?/]", + 2089..2095, ), ), - 2093..2094, + 2089..2095, ), ( Text( ( - "]", - 2094..2095, - ), - ), - 2094..2095, - ), - ( - Escape( - ( - ")", + "\\)", 2095..2097, ), ), @@ -2390,27 +2120,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "\\", + "\\\\", 2101..2103, ), ), 2101..2103, ), ( - Escape( + Text( ( - "\\", + "\\\\", 2103..2105, ), ), 2103..2105, ), ( - Escape( + Text( ( - "1", + "\\1", 2105..2107, ), ), @@ -2800,7 +2530,17 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - [], + [ + ( + Text( + ( + " ", + 2607..2608, + ), + ), + 2607..2608, + ), + ], ), 2606..2609, ), @@ -2847,33 +2587,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "eval", - ), - 2635..2639, - ), - ( - Text( - "\"", - ), - 2640..2641, - ), - ( - Text( - "for", - ), - 2641..2644, - ), - ( - Text( - "file", - ), - 2645..2649, - ), - ( - Text( - "in", + "eval \"for file in ", ), - 2650..2652, + 2634..2653, ), ( Expression( @@ -2891,49 +2607,31 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - ";", + "; do [ -e ", ), - 2663..2664, + 2663..2673, ), ( Text( - "do", - ), - 2665..2667, - ), - ( - Text( - "[", - ), - 2668..2669, - ), - ( - CommandOption( - "-e", - ), - 2670..2672, - ), - ( - Escape( - "\\", + "\\\\", ), 2673..2675, ), ( - Escape( - "\"", + Text( + "\\\"", ), 2675..2677, ), ( - Escape( - "\\", + Text( + "\\\\", ), 2677..2679, ), ( - Escape( - "$", + Text( + "\\$", ), 2679..2681, ), @@ -2944,56 +2642,44 @@ expression: parse_unwrap(&tokenize(&stdlib)) 2681..2685, ), ( - Escape( - "\\", + Text( + "\\\\", ), 2685..2687, ), - ( - Escape( - "\"", - ), - 2687..2689, - ), ( Text( - "]", + "\\\"", ), - 2690..2691, + 2687..2689, ), ( Text( - "&&", + " ] && echo ", ), - 2692..2694, + 2689..2700, ), ( Text( - "echo", - ), - 2695..2699, - ), - ( - Escape( - "\\", + "\\\\", ), 2700..2702, ), ( - Escape( - "\"", + Text( + "\\\"", ), 2702..2704, ), ( - Escape( - "\\", + Text( + "\\\\", ), 2704..2706, ), ( - Escape( - "$", + Text( + "\\$", ), 2706..2708, ), @@ -3003,35 +2689,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 2708..2712, ), - ( - Escape( - "\\", - ), - 2712..2714, - ), - ( - Escape( - "\"", - ), - 2714..2716, - ), ( Text( - ";", + "\\\\", ), - 2716..2717, + 2712..2714, ), ( Text( - "done", + "\\\"", ), - 2718..2722, + 2714..2716, ), ( Text( - "\"", + "; done\" ", ), - 2722..2723, + 2716..2724, ), ( Text( @@ -3083,9 +2757,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "n", + "\\n", 2752..2754, ), ), @@ -3386,18 +3060,18 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3227..3229, ), ), 3227..3229, ), ( - Escape( + Text( ( - "(", + "\\(", 3229..3231, ), ), @@ -3413,9 +3087,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) 3231..3234, ), ( - Escape( + Text( ( - ".", + "\\.", 3234..3236, ), ), @@ -3431,27 +3105,45 @@ expression: parse_unwrap(&tokenize(&stdlib)) 3236..3239, ), ( - Escape( + Text( + ( + "\\|", + 3239..3241, + ), + ), + 3239..3241, + ), + ( + Text( + ( + "tbz", + 3241..3244, + ), + ), + 3241..3244, + ), + ( + Text( ( - "|tbz", - 3239..3244, + "\\|", + 3244..3246, ), ), - 3239..3244, + 3244..3246, ), ( - Escape( + Text( ( - "|tbz2", - 3244..3250, + "tbz2", + 3246..3250, ), ), - 3244..3250, + 3246..3250, ), ( - Escape( + Text( ( - ")", + "\\)", 3250..3252, ), ), @@ -3489,21 +3181,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "tar", + "tar xvjf \"", ), - 3259..3262, - ), - ( - Text( - "xvjf", - ), - 3263..3267, - ), - ( - Text( - "\"", - ), - 3268..3269, + 3258..3269, ), ( Expression( @@ -3521,21 +3201,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" -C \"", ), - 3275..3276, - ), - ( - CommandOption( - "-C", - ), - 3277..3279, - ), - ( - Text( - "\"", - ), - 3280..3281, + 3275..3281, ), ( Expression( @@ -3553,9 +3221,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3289..3290, + 3289..3291, ), ( Text( @@ -3607,18 +3275,18 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3325..3327, ), ), 3325..3327, ), ( - Escape( + Text( ( - "(", + "\\(", 3327..3329, ), ), @@ -3634,9 +3302,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) 3329..3332, ), ( - Escape( + Text( ( - ".", + "\\.", 3332..3334, ), ), @@ -3652,18 +3320,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) 3334..3336, ), ( - Escape( + Text( ( - "|tgz", - 3336..3341, + "\\|", + 3336..3338, ), ), - 3336..3341, + 3336..3338, ), ( - Escape( + Text( ( - ")", + "tgz", + 3338..3341, + ), + ), + 3338..3341, + ), + ( + Text( + ( + "\\)", 3341..3343, ), ), @@ -3701,21 +3378,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "tar", - ), - 3350..3353, - ), - ( - Text( - "xzf", + "tar xzf \"", ), - 3354..3357, - ), - ( - Text( - "\"", - ), - 3358..3359, + 3349..3359, ), ( Expression( @@ -3733,21 +3398,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3365..3366, - ), - ( - CommandOption( - "-C", + "\" -C \"", ), - 3367..3369, - ), - ( - Text( - "\"", - ), - 3370..3371, + 3365..3371, ), ( Expression( @@ -3765,9 +3418,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3379..3380, + 3379..3381, ), ( Text( @@ -3819,18 +3472,18 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3415..3417, ), ), 3415..3417, ), ( - Escape( + Text( ( - "(", + "\\(", 3417..3419, ), ), @@ -3846,9 +3499,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) 3419..3422, ), ( - Escape( + Text( ( - ".", + "\\.", 3422..3424, ), ), @@ -3864,27 +3517,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) 3424..3426, ), ( - Escape( + Text( ( - "|txz", - 3426..3431, + "\\|", + 3426..3428, ), ), - 3426..3431, + 3426..3428, ), ( Text( ( - "$", - 3431..3432, + "txz$", + 3428..3432, ), ), - 3431..3432, + 3428..3432, ), ( - Escape( + Text( ( - ")", + "\\)", 3432..3434, ), ), @@ -3922,21 +3575,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "tar", - ), - 3441..3444, - ), - ( - Text( - "xJf", - ), - 3445..3448, - ), - ( - Text( - "\"", + "tar xJf \"", ), - 3449..3450, + 3440..3450, ), ( Expression( @@ -3954,21 +3595,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" -C \"", ), - 3456..3457, - ), - ( - CommandOption( - "-C", - ), - 3458..3460, - ), - ( - Text( - "\"", - ), - 3461..3462, + 3456..3462, ), ( Expression( @@ -3986,9 +3615,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3470..3471, + 3470..3472, ), ( Text( @@ -4040,9 +3669,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3506..3508, ), ), @@ -4051,20 +3680,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "bz2", - 3508..3511, - ), - ), - 3508..3511, - ), - ( - Text( - ( - "$", - 3511..3512, + "bz2$", + 3508..3512, ), ), - 3511..3512, + 3508..3512, ), ], ), @@ -4089,15 +3709,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "bunzip2", - ), - 3518..3525, - ), - ( - Text( - "\"", + "bunzip2 \"", ), - 3526..3527, + 3517..3527, ), ( Expression( @@ -4115,9 +3729,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3533..3534, + 3533..3535, ), ( Text( @@ -4169,9 +3783,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3569..3571, ), ), @@ -4180,20 +3794,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "deb", - 3571..3574, - ), - ), - 3571..3574, - ), - ( - Text( - ( - "$", - 3574..3575, + "deb$", + 3571..3575, ), ), - 3574..3575, + 3571..3575, ), ], ), @@ -4218,27 +3823,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "dpkg", + "dpkg-deb -xv \"", ), - 3581..3585, - ), - ( - CommandOption( - "-deb", - ), - 3585..3589, - ), - ( - CommandOption( - "-xv", - ), - 3590..3593, - ), - ( - Text( - "\"", - ), - 3594..3595, + 3580..3595, ), ( Expression( @@ -4256,15 +3843,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3601..3602, - ), - ( - Text( - "\"", + "\" \"", ), - 3603..3604, + 3601..3604, ), ( Expression( @@ -4282,9 +3863,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3612..3613, + 3612..3614, ), ( Text( @@ -4336,9 +3917,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3648..3650, ), ), @@ -4347,20 +3928,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "gz", - 3650..3652, - ), - ), - 3650..3652, - ), - ( - Text( - ( - "$", - 3652..3653, + "gz$", + 3650..3653, ), ), - 3652..3653, + 3650..3653, ), ], ), @@ -4385,15 +3957,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "gunzip", - ), - 3659..3665, - ), - ( - Text( - "\"", + "gunzip \"", ), - 3666..3667, + 3658..3667, ), ( Expression( @@ -4411,9 +3977,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3673..3674, + 3673..3675, ), ( Text( @@ -4465,9 +4031,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3709..3711, ), ), @@ -4476,20 +4042,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "rar", - 3711..3714, + "rar$", + 3711..3715, ), ), - 3711..3714, - ), - ( - Text( - ( - "$", - 3714..3715, - ), - ), - 3714..3715, + 3711..3715, ), ], ), @@ -4514,21 +4071,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "unrar", - ), - 3721..3726, - ), - ( - Text( - "x", + "unrar x \"", ), - 3727..3728, - ), - ( - Text( - "\"", - ), - 3729..3730, + 3720..3730, ), ( Expression( @@ -4546,15 +4091,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3736..3737, - ), - ( - Text( - "\"", + "\" \"", ), - 3738..3739, + 3736..3739, ), ( Expression( @@ -4572,9 +4111,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3747..3748, + 3747..3749, ), ( Text( @@ -4626,9 +4165,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3783..3785, ), ), @@ -4637,20 +4176,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "rpm", - 3785..3788, - ), - ), - 3785..3788, - ), - ( - Text( - ( - "$", - 3788..3789, + "rpm$", + 3785..3789, ), ), - 3788..3789, + 3785..3789, ), ], ), @@ -4675,15 +4205,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "rpm2cpio", - ), - 3795..3803, - ), - ( - Text( - "\"", + "rpm2cpio \"", ), - 3804..3805, + 3794..3805, ), ( Expression( @@ -4701,27 +4225,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3811..3812, - ), - ( - Text( - "|", + "\" | cpio -idm ", ), - 3813..3814, - ), - ( - Text( - "cpio", - ), - 3815..3819, - ), - ( - CommandOption( - "-idm", - ), - 3820..3824, + 3811..3825, ), ( Text( @@ -4773,9 +4279,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3859..3861, ), ), @@ -4784,20 +4290,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "tar", - 3861..3864, + "tar$", + 3861..3865, ), ), - 3861..3864, - ), - ( - Text( - ( - "$", - 3864..3865, - ), - ), - 3864..3865, + 3861..3865, ), ], ), @@ -4822,21 +4319,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "tar", + "tar xf \"", ), - 3871..3874, - ), - ( - Text( - "xf", - ), - 3875..3877, - ), - ( - Text( - "\"", - ), - 3878..3879, + 3870..3879, ), ( Expression( @@ -4854,21 +4339,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 3885..3886, - ), - ( - CommandOption( - "-C", - ), - 3887..3889, - ), - ( - Text( - "\"", + "\" -C \"", ), - 3890..3891, + 3885..3891, ), ( Expression( @@ -4886,9 +4359,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3899..3900, + 3899..3901, ), ( Text( @@ -4940,9 +4413,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 3935..3937, ), ), @@ -4951,20 +4424,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "xz", - 3937..3939, + "xz$", + 3937..3940, ), ), - 3937..3939, - ), - ( - Text( - ( - "$", - 3939..3940, - ), - ), - 3939..3940, + 3937..3940, ), ], ), @@ -4989,21 +4453,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "xz", - ), - 3946..3948, - ), - ( - CommandOption( - "--decompress", - ), - 3949..3961, - ), - ( - Text( - "\"", + "xz --decompress \"", ), - 3962..3963, + 3945..3963, ), ( Expression( @@ -5021,9 +4473,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3969..3970, + 3969..3971, ), ( Text( @@ -5075,9 +4527,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 4005..4007, ), ), @@ -5086,20 +4538,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "7z", - 4007..4009, - ), - ), - 4007..4009, - ), - ( - Text( - ( - "$", - 4009..4010, + "7z$", + 4007..4010, ), ), - 4009..4010, + 4007..4010, ), ], ), @@ -5124,21 +4567,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "7z", + "7z -y \"", ), - 4016..4018, - ), - ( - CommandOption( - "-y", - ), - 4019..4021, - ), - ( - Text( - "\"", - ), - 4022..4023, + 4015..4023, ), ( Expression( @@ -5156,21 +4587,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 4029..4030, - ), - ( - CommandOption( - "-o", - ), - 4031..4033, - ), - ( - Text( - "\"", + "\" -o \"", ), - 4034..4035, + 4029..4035, ), ( Expression( @@ -5188,9 +4607,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 4043..4044, + 4043..4045, ), ( Text( @@ -5242,18 +4661,18 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 4079..4081, ), ), 4079..4081, ), ( - Escape( + Text( ( - "(", + "\\(", 4081..4083, ), ), @@ -5269,27 +4688,45 @@ expression: parse_unwrap(&tokenize(&stdlib)) 4083..4086, ), ( - Escape( + Text( ( - "|war", - 4086..4091, + "\\|", + 4086..4088, ), ), - 4086..4091, + 4086..4088, ), ( - Escape( + Text( ( - "|jar", - 4091..4096, + "war", + 4088..4091, ), ), - 4091..4096, + 4088..4091, ), ( - Escape( + Text( + ( + "\\|", + 4091..4093, + ), + ), + 4091..4093, + ), + ( + Text( + ( + "jar", + 4093..4096, + ), + ), + 4093..4096, + ), + ( + Text( ( - ")", + "\\)", 4096..4098, ), ), @@ -5327,15 +4764,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "unzip", + "unzip \"", ), - 4105..4110, - ), - ( - Text( - "\"", - ), - 4111..4112, + 4104..4112, ), ( Expression( @@ -5353,21 +4784,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 4118..4119, - ), - ( - CommandOption( - "-d", - ), - 4120..4122, - ), - ( - Text( - "\"", + "\" -d \"", ), - 4123..4124, + 4118..4124, ), ( Expression( @@ -5385,9 +4804,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 4132..4133, + 4132..4134, ), ( Text( @@ -5438,47 +4857,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "Error", - 4178..4183, - ), - ), - 4178..4183, - ), - ( - Text( - ( - ":", - 4183..4184, - ), - ), - 4183..4184, - ), - ( - Text( - ( - "Unsupported", - 4185..4196, + "Error: Unsupported file type", + 4178..4206, ), ), - 4185..4196, - ), - ( - Text( - ( - "file", - 4197..4201, - ), - ), - 4197..4201, - ), - ( - Text( - ( - "type", - 4202..4206, - ), - ), - 4202..4206, + 4178..4206, ), ], ), @@ -5552,47 +4935,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "Error", - 4282..4287, - ), - ), - 4282..4287, - ), - ( - Text( - ( - ":", - 4287..4288, - ), - ), - 4287..4288, - ), - ( - Text( - ( - "File", - 4289..4293, - ), - ), - 4289..4293, - ), - ( - Text( - ( - "not", - 4294..4297, - ), - ), - 4294..4297, - ), - ( - Text( - ( - "found", - 4298..4303, + "Error: File not found", + 4282..4303, ), ), - 4298..4303, + 4282..4303, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_http.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_http.snap index 943de16..4700c4a 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_http.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_http.snap @@ -213,27 +213,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "curl", + "curl -L -o \"", ), - 481..485, - ), - ( - CommandOption( - "-L", - ), - 486..488, - ), - ( - CommandOption( - "-o", - ), - 489..491, - ), - ( - Text( - "\"", - ), - 492..493, + 480..493, ), ( Expression( @@ -251,15 +233,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 499..500, - ), - ( - Text( - "\"", + "\" \"", ), - 501..502, + 499..502, ), ( Expression( @@ -277,9 +253,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 507..508, + 507..509, ), ( Text( @@ -361,15 +337,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "wget", - ), - 570..574, - ), - ( - Text( - "\"", + "wget \"", ), - 575..576, + 569..576, ), ( Expression( @@ -387,21 +357,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" -P \"", ), - 581..582, - ), - ( - CommandOption( - "-P", - ), - 583..585, - ), - ( - Text( - "\"", - ), - 586..587, + 581..587, ), ( Expression( @@ -419,9 +377,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 593..594, + 593..595, ), ( Text( @@ -503,15 +461,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "aria2c", - ), - 658..664, - ), - ( - Text( - "\"", + "aria2c \"", ), - 665..666, + 657..666, ), ( Expression( @@ -529,21 +481,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 671..672, - ), - ( - CommandOption( - "-d", - ), - 673..675, - ), - ( - Text( - "\"", + "\" -d \"", ), - 676..677, + 671..677, ), ( Expression( @@ -561,9 +501,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 683..684, + 683..685, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_math.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_math.snap index f350c3d..b8cb839 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_math.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_math.snap @@ -91,15 +91,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 106..110, - ), - ( - Text( - "\"", - ), - 111..112, + 105..112, ), ( Expression( @@ -117,195 +111,33 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 118..119, - ), - ( - Text( - "|", - ), - 120..121, - ), - ( - Text( - "awk", + "\" | awk '", ), - 122..125, + 118..127, ), ( Text( - "'", - ), - 126..127, - ), - ( - Escape( - "{", + "\\{", ), 127..129, ), ( Text( - "s", - ), - 129..130, - ), - ( - Text( - "=", - ), - 130..131, - ), - ( - Text( - "0", - ), - 131..132, - ), - ( - Text( - ";", - ), - 132..133, - ), - ( - Text( - "for", - ), - 134..137, - ), - ( - Text( - "(", - ), - 138..139, - ), - ( - Text( - "i", - ), - 139..140, - ), - ( - Text( - "=", - ), - 140..141, - ), - ( - Text( - "1", - ), - 141..142, - ), - ( - Text( - ";", - ), - 142..143, - ), - ( - Text( - "i", - ), - 144..145, - ), - ( - Text( - "<=", - ), - 145..147, - ), - ( - Text( - "NF", - ), - 147..149, - ), - ( - Text( - ";", - ), - 149..150, - ), - ( - Text( - "i", - ), - 151..152, - ), - ( - Text( - "+", - ), - 152..153, - ), - ( - Text( - "+", - ), - 153..154, - ), - ( - Text( - ")", + "s=0; for (i=1; i<=NF; i++) s+=", ), - 154..155, + 129..159, ), ( Text( - "s", - ), - 156..157, - ), - ( - Text( - "+=", - ), - 157..159, - ), - ( - Escape( - "$", + "\\$", ), 159..161, ), ( Text( - "i", - ), - 161..162, - ), - ( - Text( - ";", - ), - 162..163, - ), - ( - Text( - "print", - ), - 164..169, - ), - ( - Text( - "s", - ), - 170..171, - ), - ( - Text( - "}", - ), - 171..172, - ), - ( - Text( - "'", + "i; print s}' ", ), - 172..173, + 161..174, ), ( Text( @@ -460,21 +292,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 342..346, - ), - ( - Text( - "\"", - ), - 347..348, - ), - ( - Text( - "(", + "echo \"(", ), - 348..349, + 341..349, ), ( Expression( @@ -492,63 +312,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "+", - ), - 357..358, - ), - ( - Text( - "0", - ), - 358..359, - ), - ( - Text( - ".", - ), - 359..360, - ), - ( - Text( - "5", - ), - 360..361, - ), - ( - Text( - ")", - ), - 361..362, - ), - ( - Text( - "/", - ), - 362..363, - ), - ( - Text( - "1", - ), - 363..364, - ), - ( - Text( - "\"", - ), - 364..365, - ), - ( - Text( - "|", - ), - 366..367, - ), - ( - Text( - "bc", + "+0.5)/1\" | bc ", ), - 368..370, + 357..371, ), ( Text( @@ -614,21 +380,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 406..410, - ), - ( - Text( - "\"", - ), - 411..412, - ), - ( - Text( - "(", + "echo \"(", ), - 412..413, + 405..413, ), ( Expression( @@ -644,59 +398,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 413..421, ), - ( - CommandOption( - "-0", - ), - 421..423, - ), - ( - Text( - ".", - ), - 423..424, - ), - ( - Text( - "5", - ), - 424..425, - ), - ( - Text( - ")", - ), - 425..426, - ), - ( - Text( - "/", - ), - 426..427, - ), - ( - Text( - "1", - ), - 427..428, - ), - ( - Text( - "\"", - ), - 428..429, - ), - ( - Text( - "|", - ), - 430..431, - ), ( Text( - "bc", + "-0.5)/1\" | bc ", ), - 432..434, + 421..435, ), ( Text( @@ -816,15 +522,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 589..593, - ), - ( - Text( - "\"", + "echo \"", ), - 594..595, + 588..595, ), ( Expression( @@ -842,195 +542,57 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" | awk '", ), - 603..604, + 603..612, ), ( Text( - "|", - ), - 605..606, - ), - ( - Text( - "awk", - ), - 607..610, - ), - ( - Text( - "'", - ), - 611..612, - ), - ( - Escape( - "{", + "\\{", ), 612..614, ), ( Text( - "printf", - ), - 614..620, - ), - ( - Text( - "\"", - ), - 621..622, - ), - ( - Text( - "%", - ), - 622..623, - ), - ( - Text( - "d", - ), - 623..624, - ), - ( - Text( - "\"", + "printf \"%d\", (", ), - 624..625, + 614..628, ), ( Text( - ",", - ), - 625..626, - ), - ( - Text( - "(", - ), - 627..628, - ), - ( - Escape( - "$", + "\\$", ), 628..630, ), ( Text( - "1", - ), - 630..631, - ), - ( - Text( - "<", - ), - 632..633, - ), - ( - Text( - "0", - ), - 634..635, - ), - ( - Text( - "?", + "1 < 0 ? int(", ), - 636..637, + 630..642, ), ( Text( - "int", - ), - 638..641, - ), - ( - Text( - "(", - ), - 641..642, - ), - ( - Escape( - "$", + "\\$", ), 642..644, ), ( Text( - "1", - ), - 644..645, - ), - ( - Text( - ")", - ), - 645..646, - ), - ( - CommandOption( - "-1", - ), - 647..650, - ), - ( - Text( - ":", + "1) - 1 : int(", ), - 651..652, + 644..657, ), ( Text( - "int", - ), - 653..656, - ), - ( - Text( - "(", - ), - 656..657, - ), - ( - Escape( - "$", + "\\$", ), 657..659, ), ( Text( - "1", - ), - 659..660, - ), - ( - Text( - ")", - ), - 660..661, - ), - ( - Text( - ")", - ), - 661..662, - ), - ( - Text( - "}", - ), - 662..663, - ), - ( - Text( - "'", + "1))}' ", ), - 663..664, + 659..665, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_text-2.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_text-2.snap index 7767c6e..cad2ccc 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_text-2.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_text-2.snap @@ -88,18 +88,18 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "$", + "\\$", 143..145, ), ), 143..145, ), ( - Escape( + Text( ( - "{", + "\\{", 145..147, ), ), @@ -108,20 +108,11 @@ expression: parse_unwrap(&tokens) ( Text( ( - "source", - 147..153, - ), - ), - 147..153, - ), - ( - Text( - ( - "//", - 153..155, + "source//", + 147..155, ), ), - 153..155, + 147..155, ), ( Expression( @@ -266,18 +257,18 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "$", + "\\$", 331..333, ), ), 331..333, ), ( - Escape( + Text( ( - "{", + "\\{", 333..335, ), ), @@ -286,20 +277,11 @@ expression: parse_unwrap(&tokens) ( Text( ( - "source", - 335..341, - ), - ), - 335..341, - ), - ( - Text( - ( - "/", - 341..342, + "source/", + 335..342, ), ), - 341..342, + 335..342, ), ( Expression( @@ -564,9 +546,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "/", + "\\/", 636..638, ), ), @@ -627,9 +609,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "/", + "\\/", 682..684, ), ), @@ -715,189 +697,81 @@ expression: parse_unwrap(&tokens) ), ( Text( - "re", + "re='", ), - 918..920, + 917..922, ), ( Text( - "=", + "\\b", ), - 920..921, + 922..924, ), ( Text( - "'", - ), - 921..922, - ), - ( - Escape( - "bCopyright", - ), - 922..933, - ), - ( - Escape( - "b", + "Copyright", ), - 933..935, + 924..933, ), ( Text( - ".", + "\\b", ), - 935..936, + 933..935, ), ( Text( - "+", - ), - 936..937, - ), - ( - Escape( - "bFree", + ".+", ), - 937..943, + 935..937, ), ( Text( - "Software", + "\\b", ), - 944..952, + 937..939, ), ( Text( - "Foundation", - ), - 953..963, - ), - ( - Escape( - "b'", + "Free Software Foundation", ), - 963..966, + 939..963, ), ( Text( - ";", + "\\b", ), - 966..967, + 963..965, ), ( Text( - "[", + "'; [[ ", ), - 968..969, + 965..971, ), ( Text( - "[", - ), - 969..970, - ), - ( - Escape( - "$", + "\\$", ), 971..973, ), ( Text( - "(", - ), - 973..974, - ), - ( - Text( - "sed", - ), - 974..977, - ), - ( - CommandOption( - "--version", - ), - 978..987, - ), - ( - Text( - "2", - ), - 988..989, - ), - ( - Text( - ">", - ), - 989..990, - ), - ( - Text( - "/", - ), - 990..991, - ), - ( - Text( - "dev", + "(sed --version 2>/dev/null) =~ ", ), - 991..994, + 973..1004, ), ( Text( - "/", - ), - 994..995, - ), - ( - Text( - "null", - ), - 995..999, - ), - ( - Text( - ")", - ), - 999..1000, - ), - ( - Text( - "=", - ), - 1001..1002, - ), - ( - Text( - "~", - ), - 1002..1003, - ), - ( - Escape( - "$", + "\\$", ), 1004..1006, ), ( Text( - "re", - ), - 1006..1008, - ), - ( - Text( - "]", + "re ]] ", ), - 1009..1010, - ), - ( - Text( - "]", - ), - 1010..1011, + 1006..1012, ), ( Text( @@ -955,20 +829,11 @@ expression: parse_unwrap(&tokens) ( Text( ( - "-", - 1055..1056, - ), - ), - 1055..1056, - ), - ( - Text( - ( - "r", - 1056..1057, + "-r", + 1055..1057, ), ), - 1056..1057, + 1055..1057, ), ], ), @@ -984,20 +849,11 @@ expression: parse_unwrap(&tokens) ( Text( ( - "-", - 1065..1066, - ), - ), - 1065..1066, - ), - ( - Text( - ( - "E", - 1066..1067, + "-E", + 1065..1067, ), ), - 1066..1067, + 1065..1067, ), ], ), @@ -1031,15 +887,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", + "echo \"", ), - 1090..1094, - ), - ( - Text( - "\"", - ), - 1095..1096, + 1089..1096, ), ( Expression( @@ -1057,27 +907,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 1104..1105, - ), - ( - Text( - "|", + "\" | sed \"", ), - 1106..1107, - ), - ( - Text( - "sed", - ), - 1108..1111, - ), - ( - Text( - "\"", - ), - 1112..1113, + 1104..1113, ), ( Expression( @@ -1095,33 +927,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 1119..1120, - ), - ( - CommandOption( - "-e", - ), - 1121..1123, - ), - ( - Text( - "\"", - ), - 1124..1125, - ), - ( - Text( - "s", - ), - 1125..1126, - ), - ( - Text( - "/", + "\" -e \"s/", ), - 1126..1127, + 1119..1127, ), ( Expression( @@ -1159,21 +967,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 1145..1146, - ), - ( - Text( - "g", - ), - 1146..1147, - ), - ( - Text( - "\"", + "/g\" ", ), - 1147..1148, + 1145..1149, ), ( Text( @@ -1228,15 +1024,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 1189..1193, - ), - ( - Text( - "\"", + "echo \"", ), - 1194..1195, + 1188..1195, ), ( Expression( @@ -1254,45 +1044,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 1203..1204, - ), - ( - Text( - "|", - ), - 1205..1206, - ), - ( - Text( - "sed", - ), - 1207..1210, - ), - ( - CommandOption( - "-e", - ), - 1211..1213, - ), - ( - Text( - "\"", - ), - 1214..1215, - ), - ( - Text( - "s", + "\" | sed -e \"s/", ), - 1215..1216, - ), - ( - Text( - "/", - ), - 1216..1217, + 1203..1217, ), ( Expression( @@ -1330,21 +1084,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 1235..1236, - ), - ( - Text( - "g", - ), - 1236..1237, - ), - ( - Text( - "\"", + "/g\" ", ), - 1237..1238, + 1235..1239, ), ( Text( @@ -1498,21 +1240,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "IFS", - ), - 1438..1441, - ), - ( - Text( - "=", - ), - 1441..1442, - ), - ( - Text( - "\"", + "IFS=\"", ), - 1442..1443, + 1437..1443, ), ( Expression( @@ -1530,33 +1260,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 1454..1455, - ), - ( - Text( - "read", - ), - 1456..1460, - ), - ( - CommandOption( - "-rd", - ), - 1461..1464, - ), - ( - Text( - "''", - ), - 1465..1467, - ), - ( - CommandOption( - "-a", + "\" read -rd '' -a ", ), - 1468..1470, + 1454..1471, ), ( Expression( @@ -1583,49 +1289,13 @@ expression: parse_unwrap(&tokens) ), ( Text( - "<", - ), - 1487..1488, - ), - ( - Text( - "<", - ), - 1489..1490, - ), - ( - Text( - "(", - ), - 1490..1491, - ), - ( - Text( - "printf", - ), - 1491..1497, - ), - ( - Text( - "%", - ), - 1498..1499, - ), - ( - Text( - "s", + " < <(printf %s \"", ), - 1499..1500, + 1486..1502, ), ( Text( - "\"", - ), - 1501..1502, - ), - ( - Escape( - "$", + "\\$", ), 1502..1504, ), @@ -1654,15 +1324,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 1517..1518, - ), - ( - Text( - ")", + "\") ", ), - 1518..1519, + 1517..1520, ), ( Text( @@ -1787,9 +1451,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "n", + "\\n", 1686..1688, ), ), @@ -1895,7 +1559,17 @@ expression: parse_unwrap(&tokens) ), ( Text( - [], + [ + ( + Text( + ( + " ", + 1834..1835, + ), + ), + 1834..1835, + ), + ], ), 1833..1836, ), @@ -2010,21 +1684,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "IFS", - ), - 1959..1962, - ), - ( - Text( - "=", - ), - 1962..1963, - ), - ( - Text( - "\"", + "IFS=\"", ), - 1963..1964, + 1958..1964, ), ( Expression( @@ -2042,39 +1704,21 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 1975..1976, - ), - ( - Text( - ";", + "\" ; echo \"", ), - 1977..1978, + 1975..1985, ), ( Text( - "echo", + "\\$", ), - 1979..1983, + 1985..1987, ), ( Text( - "\"", + "\\{", ), - 1984..1985, - ), - ( - Escape( - "$", - ), - 1985..1987, - ), - ( - Escape( - "{", - ), - 1987..1989, + 1987..1989, ), ( Expression( @@ -2101,33 +1745,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "[", - ), - 2002..2003, - ), - ( - Text( - "*", - ), - 2003..2004, - ), - ( - Text( - "]", - ), - 2004..2005, - ), - ( - Text( - "}", - ), - 2005..2006, - ), - ( - Text( - "\"", + "[*]}\" ", ), - 2006..2007, + 2002..2008, ), ( Text( @@ -2229,15 +1849,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 2123..2127, - ), - ( - Text( - "\"", + "echo \"", ), - 2128..2129, + 2122..2129, ), ( Expression( @@ -2255,105 +1869,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", + "\" | sed -e 's/^[[:space:]]*//' ", ), - 2135..2136, - ), - ( - Text( - "|", - ), - 2137..2138, - ), - ( - Text( - "sed", - ), - 2139..2142, - ), - ( - CommandOption( - "-e", - ), - 2143..2145, - ), - ( - Text( - "'s", - ), - 2146..2148, - ), - ( - Text( - "/", - ), - 2148..2149, - ), - ( - Text( - "^", - ), - 2149..2150, - ), - ( - Text( - "[", - ), - 2150..2151, - ), - ( - Text( - "[", - ), - 2151..2152, - ), - ( - Text( - ":", - ), - 2152..2153, - ), - ( - Text( - "space", - ), - 2153..2158, - ), - ( - Text( - ":", - ), - 2158..2159, - ), - ( - Text( - "]", - ), - 2159..2160, - ), - ( - Text( - "]", - ), - 2160..2161, - ), - ( - Text( - "*", - ), - 2161..2162, - ), - ( - Text( - "//", - ), - 2162..2164, - ), - ( - Text( - "'", - ), - 2164..2165, + 2135..2166, ), ( Text( @@ -2455,15 +1973,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 2282..2286, - ), - ( - Text( - "\"", + "echo \"", ), - 2287..2288, + 2281..2288, ), ( Expression( @@ -2481,105 +1993,21 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 2294..2295, - ), - ( - Text( - "|", - ), - 2296..2297, - ), - ( - Text( - "sed", - ), - 2298..2301, - ), - ( - CommandOption( - "-e", - ), - 2302..2304, - ), - ( - Text( - "'s", - ), - 2305..2307, - ), - ( - Text( - "/", - ), - 2307..2308, - ), - ( - Text( - "[", - ), - 2308..2309, - ), - ( - Text( - "[", - ), - 2309..2310, - ), - ( - Text( - ":", - ), - 2310..2311, - ), - ( - Text( - "space", - ), - 2311..2316, - ), - ( - Text( - ":", - ), - 2316..2317, - ), - ( - Text( - "]", - ), - 2317..2318, - ), - ( - Text( - "]", + "\" | sed -e 's/[[:space:]]*", ), - 2318..2319, + 2294..2320, ), ( Text( - "*", - ), - 2319..2320, - ), - ( - Escape( - "$", + "\\$", ), 2320..2322, ), ( Text( - "//", + "//' ", ), - 2322..2324, - ), - ( - Text( - "'", - ), - 2324..2325, + 2322..2326, ), ( Text( @@ -2788,15 +2216,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", + "echo \"", ), - 2552..2556, - ), - ( - Text( - "\"", - ), - 2557..2558, + 2551..2558, ), ( Expression( @@ -2814,105 +2236,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 2564..2565, - ), - ( - Text( - "|", - ), - 2566..2567, - ), - ( - Text( - "tr", - ), - 2568..2570, - ), - ( - Text( - "'", - ), - 2571..2572, - ), - ( - Text( - "[", - ), - 2572..2573, - ), - ( - Text( - ":", - ), - 2573..2574, - ), - ( - Text( - "upper", - ), - 2574..2579, - ), - ( - Text( - ":", - ), - 2579..2580, - ), - ( - Text( - "]", - ), - 2580..2581, - ), - ( - Text( - "'", - ), - 2581..2582, - ), - ( - Text( - "'", - ), - 2583..2584, - ), - ( - Text( - "[", - ), - 2584..2585, - ), - ( - Text( - ":", - ), - 2585..2586, - ), - ( - Text( - "lower", - ), - 2586..2591, - ), - ( - Text( - ":", - ), - 2591..2592, - ), - ( - Text( - "]", + "\" | tr '[:upper:]' '[:lower:]' ", ), - 2592..2593, - ), - ( - Text( - "'", - ), - 2593..2594, + 2564..2595, ), ( Text( @@ -3014,15 +2340,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", + "echo \"", ), - 2704..2708, - ), - ( - Text( - "\"", - ), - 2709..2710, + 2703..2710, ), ( Expression( @@ -3040,105 +2360,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 2716..2717, - ), - ( - Text( - "|", + "\" | tr '[:lower:]' '[:upper:]' ", ), - 2718..2719, - ), - ( - Text( - "tr", - ), - 2720..2722, - ), - ( - Text( - "'", - ), - 2723..2724, - ), - ( - Text( - "[", - ), - 2724..2725, - ), - ( - Text( - ":", - ), - 2725..2726, - ), - ( - Text( - "lower", - ), - 2726..2731, - ), - ( - Text( - ":", - ), - 2731..2732, - ), - ( - Text( - "]", - ), - 2732..2733, - ), - ( - Text( - "'", - ), - 2733..2734, - ), - ( - Text( - "'", - ), - 2735..2736, - ), - ( - Text( - "[", - ), - 2736..2737, - ), - ( - Text( - ":", - ), - 2737..2738, - ), - ( - Text( - "upper", - ), - 2738..2743, - ), - ( - Text( - ":", - ), - 2743..2744, - ), - ( - Text( - "]", - ), - 2744..2745, - ), - ( - Text( - "'", - ), - 2745..2746, + 2716..2747, ), ( Text( @@ -3235,21 +2459,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "[", - ), - 2924..2925, - ), - ( - CommandOption( - "-n", + "[ -n \"", ), - 2926..2928, - ), - ( - Text( - "\"", - ), - 2929..2930, + 2923..2930, ), ( Expression( @@ -3267,127 +2479,49 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 2936..2937, - ), - ( - Text( - "]", - ), - 2938..2939, - ), - ( - Text( - "&&", - ), - 2940..2942, - ), - ( - Text( - "[", - ), - 2943..2944, - ), - ( - Text( - "\"", - ), - 2945..2946, - ), - ( - Expression( - ( - Var( - ( - "text", - 2947..2951, - ), - ), - 2947..2951, - ), - ), - 2946..2952, - ), - ( - Text( - "\"", + "\" ] && [ \"", ), - 2952..2953, - ), - ( - CommandOption( - "-eq", - ), - 2954..2957, - ), - ( - Text( - "\"", - ), - 2958..2959, + 2936..2946, ), ( Expression( ( Var( ( - "text", - 2960..2964, - ), - ), - 2960..2964, - ), - ), - 2959..2965, - ), - ( - Text( - "\"", - ), - 2965..2966, - ), - ( - Text( - "]", - ), - 2967..2968, - ), - ( - Text( - "2", - ), - 2969..2970, - ), - ( - Text( - ">", - ), - 2970..2971, - ), - ( - Text( - "/", + "text", + 2947..2951, + ), + ), + 2947..2951, + ), ), - 2971..2972, + 2946..2952, ), ( Text( - "dev", + "\" -eq \"", ), - 2972..2975, + 2952..2959, ), ( - Text( - "/", + Expression( + ( + Var( + ( + "text", + 2960..2964, + ), + ), + 2960..2964, + ), ), - 2975..2976, + 2959..2965, ), ( Text( - "null", + "\" ] 2>/dev/null ", ), - 2976..2980, + 2965..2981, ), ( Text( @@ -3543,67 +2677,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "for", - ), - 3145..3148, - ), - ( - Text( - "(", - ), - 3149..3150, - ), - ( - Text( - "(", - ), - 3150..3151, - ), - ( - Text( - "i", - ), - 3151..3152, - ), - ( - Text( - "=", - ), - 3152..3153, - ), - ( - Text( - "0", - ), - 3153..3154, - ), - ( - Text( - ";", - ), - 3154..3155, - ), - ( - Text( - "i", + "for ((i=0; i<", ), - 3156..3157, + 3144..3158, ), ( Text( - "<", - ), - 3157..3158, - ), - ( - Escape( - "$", + "\\$", ), 3158..3160, ), ( - Escape( - "{", + Text( + "\\{", ), 3160..3162, ), @@ -3638,57 +2724,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 3176..3177, - ), - ( - Text( - ";", - ), - 3177..3178, - ), - ( - Text( - "i", - ), - 3179..3180, - ), - ( - Text( - "+", + "}; i++)); do\n ", ), - 3180..3181, - ), - ( - Text( - "+", - ), - 3181..3182, - ), - ( - Text( - ")", - ), - 3182..3183, - ), - ( - Text( - ")", - ), - 3183..3184, - ), - ( - Text( - ";", - ), - 3184..3185, - ), - ( - Text( - "do", - ), - 3186..3188, + 3176..3197, ), ( Expression( @@ -3715,31 +2753,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "+=", - ), - 3211..3213, - ), - ( - Text( - "(", + "+=( \"", ), - 3213..3214, + 3211..3216, ), ( Text( - "\"", - ), - 3215..3216, - ), - ( - Escape( - "$", + "\\$", ), 3216..3218, ), ( - Escape( - "{", + Text( + "\\{", ), 3218..3220, ), @@ -3772,59 +2798,17 @@ expression: parse_unwrap(&tokens) ), 3233..3234, ), - ( - Escape( - "$", - ), - 3234..3236, - ), - ( - Text( - "i", - ), - 3236..3237, - ), - ( - Text( - ":", - ), - 3237..3238, - ), - ( - Text( - "1", - ), - 3238..3239, - ), - ( - Text( - "}", - ), - 3239..3240, - ), - ( - Text( - "\"", - ), - 3240..3241, - ), - ( - Text( - ")", - ), - 3242..3243, - ), ( Text( - ";", + "\\$", ), - 3243..3244, + 3234..3236, ), ( Text( - "done", + "i:1}\" );\n done ", ), - 3249..3253, + 3236..3254, ), ( Text( @@ -3967,27 +2951,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "if", - ), - 3397..3399, - ), - ( - Text( - "[", + "if [[ \"", ), - 3400..3401, - ), - ( - Text( - "[", - ), - 3401..3402, - ), - ( - Text( - "\"", - ), - 3403..3404, + 3396..3404, ), ( Expression( @@ -4005,27 +2971,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 3410..3411, - ), - ( - Text( - "==", - ), - 3412..3414, - ), - ( - Text( - "*", + "\" == *\"", ), - 3415..3416, - ), - ( - Text( - "\"", - ), - 3416..3417, + 3410..3417, ), ( Expression( @@ -4043,57 +2991,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 3425..3426, - ), - ( - Text( - "*", - ), - 3426..3427, - ), - ( - Text( - "]", - ), - 3428..3429, - ), - ( - Text( - "]", + "\"* ]]; then\n echo 1\n fi ", ), - 3429..3430, - ), - ( - Text( - ";", - ), - 3430..3431, - ), - ( - Text( - "then", - ), - 3432..3436, - ), - ( - Text( - "echo", - ), - 3441..3445, - ), - ( - Text( - "1", - ), - 3446..3447, - ), - ( - Text( - "fi", - ), - 3450..3452, + 3425..3453, ), ( Text( @@ -4778,9 +3678,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "/", + "\\/", 4159..4161, ), ), @@ -4863,216 +3763,108 @@ expression: parse_unwrap(&tokens) 4301..4370, ), ), - 4301..4370, - ), - ( - Comment( - ( - Comment( - "contains \"GNU sed\".", - ), - 4382..4405, - ), - ), - 4382..4405, - ), - ( - Expression( - ( - Command( - [], - [ - ( - Text( - "$", - ), - 4417..4418, - ), - ( - Text( - "re", - ), - 4419..4421, - ), - ( - Text( - "=", - ), - 4421..4422, - ), - ( - Text( - "'", - ), - 4422..4423, - ), - ( - Escape( - "bCopyright", - ), - 4423..4434, - ), - ( - Escape( - "b", - ), - 4434..4436, - ), - ( - Text( - ".", - ), - 4436..4437, - ), - ( - Text( - "+", - ), - 4437..4438, - ), - ( - Escape( - "bFree", - ), - 4438..4444, - ), - ( - Text( - "Software", - ), - 4445..4453, - ), - ( - Text( - "Foundation", - ), - 4454..4464, - ), - ( - Escape( - "b'", - ), - 4464..4467, - ), - ( - Text( - ";", - ), - 4467..4468, - ), - ( - Text( - "[", - ), - 4469..4470, - ), + 4301..4370, + ), + ( + Comment( + ( + Comment( + "contains \"GNU sed\".", + ), + 4382..4405, + ), + ), + 4382..4405, + ), + ( + Expression( + ( + Command( + [], + [ ( Text( - "[", - ), - 4470..4471, - ), - ( - Escape( "$", ), - 4472..4474, - ), - ( - Text( - "(", - ), - 4474..4475, + 4417..4418, ), ( Text( - "sed", + "re='", ), - 4475..4478, - ), - ( - CommandOption( - "--version", - ), - 4479..4488, + 4418..4423, ), ( Text( - "2", + "\\b", ), - 4489..4490, + 4423..4425, ), ( Text( - ">", + "Copyright", ), - 4490..4491, + 4425..4434, ), ( Text( - "/", + "\\b", ), - 4491..4492, + 4434..4436, ), ( Text( - "dev", + ".+", ), - 4492..4495, + 4436..4438, ), ( Text( - "/", + "\\b", ), - 4495..4496, + 4438..4440, ), ( Text( - "null", + "Free Software Foundation", ), - 4496..4500, + 4440..4464, ), ( Text( - ")", + "\\b", ), - 4500..4501, + 4464..4466, ), ( Text( - "=", + "'; [[ ", ), - 4502..4503, + 4466..4472, ), ( Text( - "~", + "\\$", ), - 4503..4504, - ), - ( - Escape( - "$", - ), - 4505..4507, + 4472..4474, ), ( Text( - "re", + "(sed --version 2>/dev/null) =~ ", ), - 4507..4509, + 4474..4505, ), ( Text( - "]", + "\\$", ), - 4510..4511, + 4505..4507, ), ( Text( - "]", + "re ]] ", ), - 4511..4512, + 4507..4513, ), ( Text( @@ -5130,20 +3922,11 @@ expression: parse_unwrap(&tokens) ( Text( ( - "-", - 4556..4557, - ), - ), - 4556..4557, - ), - ( - Text( - ( - "r", - 4557..4558, + "-r", + 4556..4558, ), ), - 4557..4558, + 4556..4558, ), ], ), @@ -5159,20 +3942,11 @@ expression: parse_unwrap(&tokens) ( Text( ( - "-", - 4566..4567, - ), - ), - 4566..4567, - ), - ( - Text( - ( - "E", - 4567..4568, + "-E", + 4566..4568, ), ), - 4567..4568, + 4566..4568, ), ], ), @@ -5205,15 +3979,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 4593..4597, - ), - ( - Text( - "\"", + "echo \"", ), - 4598..4599, + 4592..4599, ), ( Expression( @@ -5231,27 +3999,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 4607..4608, - ), - ( - Text( - "|", - ), - 4609..4610, - ), - ( - Text( - "sed", - ), - 4611..4614, - ), - ( - Text( - "\"", + "\" | sed \"", ), - 4615..4616, + 4607..4616, ), ( Expression( @@ -5269,27 +4019,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 4622..4623, - ), - ( - CommandOption( - "-ne", - ), - 4624..4627, - ), - ( - Text( - "\"", - ), - 4628..4629, - ), - ( - Text( - "/", + "\" -ne \"/", ), - 4629..4630, + 4622..4630, ), ( Expression( @@ -5307,21 +4039,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 4638..4639, - ), - ( - Text( - "p", - ), - 4639..4640, - ), - ( - Text( - "\"", + "/p\" ", ), - 4640..4641, + 4638..4642, ), ( Text( @@ -5374,15 +4094,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 4684..4688, - ), - ( - Text( - "\"", + "echo \"", ), - 4689..4690, + 4683..4690, ), ( Expression( @@ -5400,39 +4114,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 4698..4699, - ), - ( - Text( - "|", - ), - 4700..4701, - ), - ( - Text( - "sed", - ), - 4702..4705, - ), - ( - CommandOption( - "-ne", - ), - 4706..4709, - ), - ( - Text( - "\"", - ), - 4710..4711, - ), - ( - Text( - "/", + "\" | sed -ne \"/", ), - 4711..4712, + 4698..4712, ), ( Expression( @@ -5450,21 +4134,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 4720..4721, - ), - ( - Text( - "p", - ), - 4721..4722, - ), - ( - Text( - "\"", + "/p\" ", ), - 4722..4723, + 4720..4724, ), ( Text( @@ -5888,15 +4560,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 5168..5172, - ), - ( - Text( - "\"", + "echo \"", ), - 5173..5174, + 5167..5174, ), ( Expression( @@ -5914,21 +4580,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 5180..5181, - ), - ( - Text( - "|", - ), - 5182..5183, - ), - ( - Text( - "rev", + "\" | rev ", ), - 5184..5187, + 5180..5188, ), ( Text( @@ -6052,27 +4706,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "if", - ), - 5310..5312, - ), - ( - Text( - "[", - ), - 5313..5314, - ), - ( - Text( - "[", - ), - 5314..5315, - ), - ( - Text( - "\"", + "if [[ \"", ), - 5316..5317, + 5309..5317, ), ( Expression( @@ -6090,21 +4726,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 5323..5324, - ), - ( - Text( - "==", - ), - 5325..5327, - ), - ( - Text( - "\"", + "\" == \"", ), - 5328..5329, + 5323..5329, ), ( Expression( @@ -6122,57 +4746,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 5337..5338, - ), - ( - Text( - "*", - ), - 5338..5339, - ), - ( - Text( - "]", - ), - 5340..5341, - ), - ( - Text( - "]", - ), - 5341..5342, - ), - ( - Text( - ";", - ), - 5342..5343, - ), - ( - Text( - "then", - ), - 5344..5348, - ), - ( - Text( - "echo", - ), - 5353..5357, - ), - ( - Text( - "1", - ), - 5358..5359, - ), - ( - Text( - "fi", + "\"* ]]; then\n echo 1\n fi ", ), - 5362..5364, + 5337..5365, ), ( Text( @@ -6339,27 +4915,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "if", - ), - 5507..5509, - ), - ( - Text( - "[", - ), - 5510..5511, - ), - ( - Text( - "[", - ), - 5511..5512, - ), - ( - Text( - "\"", + "if [[ \"", ), - 5513..5514, + 5506..5514, ), ( Expression( @@ -6377,27 +4935,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 5520..5521, - ), - ( - Text( - "==", - ), - 5522..5524, - ), - ( - Text( - "*", - ), - 5525..5526, - ), - ( - Text( - "\"", + "\" == *\"", ), - 5526..5527, + 5520..5527, ), ( Expression( @@ -6415,51 +4955,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 5535..5536, - ), - ( - Text( - "]", - ), - 5537..5538, - ), - ( - Text( - "]", - ), - 5538..5539, - ), - ( - Text( - ";", - ), - 5539..5540, - ), - ( - Text( - "then", - ), - 5541..5545, - ), - ( - Text( - "echo", - ), - 5550..5554, - ), - ( - Text( - "1", - ), - 5555..5556, - ), - ( - Text( - "fi", + "\" ]]; then\n echo 1\n fi ", ), - 5559..5561, + 5535..5562, ), ( Text( @@ -6853,27 +5351,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "printf", - ), - 6121..6127, - ), - ( - Text( - "\"", - ), - 6128..6129, - ), - ( - Text( - "%", - ), - 6129..6130, - ), - ( - Text( - ".", + "printf \"%.", ), - 6130..6131, + 6120..6131, ), ( Expression( @@ -6887,49 +5367,31 @@ expression: parse_unwrap(&tokens) 6132..6138, ), ), - 6131..6139, - ), - ( - Text( - "s", - ), - 6139..6140, - ), - ( - Text( - "\"", - ), - 6140..6141, + 6131..6139, ), ( Text( - "\"", + "s\" \"", ), - 6142..6143, + 6139..6143, ), ( - Escape( - "$", + Text( + "\\$", ), 6143..6145, ), - ( - Escape( - "{", - ), - 6145..6147, - ), ( Text( - "text", + "\\{", ), - 6147..6151, + 6145..6147, ), ( Text( - ":", + "text:", ), - 6151..6152, + 6147..6152, ), ( Expression( @@ -6947,15 +5409,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 6159..6160, - ), - ( - Text( - "\"", + "}\" ", ), - 6160..6161, + 6159..6162, ), ( Text( @@ -7106,69 +5562,27 @@ expression: parse_unwrap(&tokens) ), ( Text( - "printf", - ), - 6424..6430, - ), - ( - Text( - "\"", - ), - 6431..6432, - ), - ( - Text( - "%", - ), - 6432..6433, - ), - ( - Text( - ".", - ), - 6433..6434, - ), - ( - Text( - "1s", - ), - 6434..6436, - ), - ( - Text( - "\"", + "printf \"%.1s\" \"", ), - 6436..6437, + 6423..6439, ), ( Text( - "\"", - ), - 6438..6439, - ), - ( - Escape( - "$", + "\\$", ), 6439..6441, ), - ( - Escape( - "{", - ), - 6441..6443, - ), ( Text( - "text", + "\\{", ), - 6443..6447, + 6441..6443, ), ( Text( - ":", + "text:", ), - 6447..6448, + 6443..6448, ), ( Expression( @@ -7186,15 +5600,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 6455..6456, - ), - ( - Text( - "\"", + "}\" ", ), - 6456..6457, + 6455..6458, ), ( Text( @@ -7392,63 +5800,45 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", + "echo ", ), - 6682..6686, + 6681..6687, ), ( - Escape( - "\"", + Text( + "\\\"", ), 6687..6689, ), - ( - Escape( - "$", - ), - 6689..6691, - ), - ( - Escape( - "{", - ), - 6691..6693, - ), ( Text( - "BASH_VERSINFO", + "\\$", ), - 6693..6706, + 6689..6691, ), ( Text( - "[", + "\\{", ), - 6706..6707, + 6691..6693, ), ( Text( - "0", + "BASH_VERSINFO[0]}", ), - 6707..6708, + 6693..6710, ), ( Text( - "]", + "\\\"", ), - 6708..6709, + 6710..6712, ), ( Text( - "}", + " ", ), - 6709..6710, - ), - ( - Escape( - "\"", - ), - 6710..6712, + 6712..6713, ), ( Text( @@ -7529,45 +5919,45 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", + "echo ", ), - 6774..6778, + 6773..6779, ), ( - Escape( - "\"", + Text( + "\\\"", ), 6779..6781, ), ( - Escape( - "$", + Text( + "\\$", ), 6781..6783, ), ( - Escape( - "{", + Text( + "\\{", ), 6783..6785, ), ( Text( - "text^", + "text^}", ), - 6785..6790, + 6785..6791, ), ( Text( - "}", + "\\\"", ), - 6790..6791, + 6791..6793, ), ( - Escape( - "\"", + Text( + " ", ), - 6791..6793, + 6793..6794, ), ( Text( @@ -7617,189 +6007,81 @@ expression: parse_unwrap(&tokens) ), ( Text( - "re", + "re='", ), - 6847..6849, + 6846..6851, ), ( Text( - "=", + "\\b", ), - 6849..6850, + 6851..6853, ), ( Text( - "'", - ), - 6850..6851, - ), - ( - Escape( - "bCopyright", - ), - 6851..6862, - ), - ( - Escape( - "b", + "Copyright", ), - 6862..6864, + 6853..6862, ), ( Text( - ".", + "\\b", ), - 6864..6865, + 6862..6864, ), ( Text( - "+", - ), - 6865..6866, - ), - ( - Escape( - "bFree", + ".+", ), - 6866..6872, + 6864..6866, ), ( Text( - "Software", + "\\b", ), - 6873..6881, + 6866..6868, ), ( Text( - "Foundation", - ), - 6882..6892, - ), - ( - Escape( - "b'", + "Free Software Foundation", ), - 6892..6895, + 6868..6892, ), ( Text( - ";", + "\\b", ), - 6895..6896, + 6892..6894, ), ( Text( - "[", + "'; [[ ", ), - 6897..6898, + 6894..6900, ), ( Text( - "[", - ), - 6898..6899, - ), - ( - Escape( - "$", + "\\$", ), 6900..6902, ), ( Text( - "(", - ), - 6902..6903, - ), - ( - Text( - "sed", - ), - 6903..6906, - ), - ( - CommandOption( - "--version", - ), - 6907..6916, - ), - ( - Text( - "2", - ), - 6917..6918, - ), - ( - Text( - ">", - ), - 6918..6919, - ), - ( - Text( - "/", - ), - 6919..6920, - ), - ( - Text( - "dev", - ), - 6920..6923, - ), - ( - Text( - "/", - ), - 6923..6924, - ), - ( - Text( - "null", - ), - 6924..6928, - ), - ( - Text( - ")", - ), - 6928..6929, - ), - ( - Text( - "=", + "(sed --version 2>/dev/null) =~ ", ), - 6930..6931, + 6902..6933, ), ( Text( - "~", - ), - 6931..6932, - ), - ( - Escape( - "$", + "\\$", ), 6933..6935, ), ( Text( - "re", - ), - 6935..6937, - ), - ( - Text( - "]", + "re ]] ", ), - 6938..6939, - ), - ( - Text( - "]", - ), - 6939..6940, + 6935..6941, ), ( Text( @@ -7864,15 +6146,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 6989..6993, - ), - ( - Text( - "\"", + "echo \"", ), - 6994..6995, + 6988..6995, ), ( Expression( @@ -7890,49 +6166,13 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 7001..7002, - ), - ( - Text( - "|", - ), - 7003..7004, - ), - ( - Text( - "sed", - ), - 7005..7008, - ), - ( - Text( - "\"", - ), - 7009..7010, - ), - ( - Text( - "s", + "\" | sed \"s/^", ), - 7010..7011, + 7001..7013, ), ( Text( - "/", - ), - 7011..7012, - ), - ( - Text( - "^", - ), - 7012..7013, - ), - ( - Escape( - "(", + "\\(", ), 7013..7015, ), @@ -7943,8 +6183,8 @@ expression: parse_unwrap(&tokens) 7015..7016, ), ( - Escape( - ")", + Text( + "\\)", ), 7016..7018, ), @@ -7955,28 +6195,22 @@ expression: parse_unwrap(&tokens) 7018..7019, ), ( - Escape( - "U", + Text( + "\\U", ), 7019..7021, ), - ( - Escape( - "1", - ), - 7021..7023, - ), ( Text( - "/", + "\\1", ), - 7023..7024, + 7021..7023, ), ( Text( - "\"", + "/\" ", ), - 7024..7025, + 7023..7026, ), ( Text( @@ -8360,21 +6594,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "printf", + "printf \"%", ), - 7395..7401, - ), - ( - Text( - "\"", - ), - 7402..7403, - ), - ( - Text( - "%", - ), - 7403..7404, + 7394..7404, ), ( Expression( @@ -8392,57 +6614,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "s", - ), - 7412..7413, - ), - ( - Text( - "\"", - ), - 7413..7414, - ), - ( - Text( - "\"", + "s\" \"\" | tr \" \" \"", ), - 7415..7416, - ), - ( - Text( - "\"", - ), - 7416..7417, - ), - ( - Text( - "|", - ), - 7418..7419, - ), - ( - Text( - "tr", - ), - 7420..7422, - ), - ( - Text( - "\"", - ), - 7423..7424, - ), - ( - Text( - "\"", - ), - 7425..7426, - ), - ( - Text( - "\"", - ), - 7427..7428, + 7412..7428, ), ( Expression( @@ -8460,9 +6634,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", + "\" ", ), - 7433..7434, + 7433..7435, ), ( Text( @@ -8747,21 +6921,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "printf", - ), - 7712..7718, - ), - ( - Text( - "\"", - ), - 7719..7720, - ), - ( - Text( - "%", + "printf \"%", ), - 7720..7721, + 7711..7721, ), ( Expression( @@ -8779,57 +6941,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "s", - ), - 7729..7730, - ), - ( - Text( - "\"", - ), - 7730..7731, - ), - ( - Text( - "\"", - ), - 7732..7733, - ), - ( - Text( - "\"", - ), - 7733..7734, - ), - ( - Text( - "|", - ), - 7735..7736, - ), - ( - Text( - "tr", - ), - 7737..7739, - ), - ( - Text( - "\"", - ), - 7740..7741, - ), - ( - Text( - "\"", - ), - 7742..7743, - ), - ( - Text( - "\"", + "s\" \"\" | tr \" \" \"", ), - 7744..7745, + 7729..7745, ), ( Expression( @@ -8847,9 +6961,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", + "\" ", ), - 7750..7751, + 7750..7752, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_text.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_text.snap index cf550a8..ee09ff2 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_text.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__stdlib_text.snap @@ -89,39 +89,21 @@ expression: tokens ), ( Token( - "\\", + "\\$", ), - 143..144, + 143..145, ), ( Token( - "$", - ), - 144..145, - ), - ( - Token( - "\\", - ), - 145..146, - ), - ( - Token( - "{", - ), - 146..147, - ), - ( - Token( - "source", + "\\{", ), - 147..153, + 145..147, ), ( Token( - "//", + "source//", ), - 153..155, + 147..155, ), ( Token( @@ -269,39 +251,21 @@ expression: tokens ), ( Token( - "\\", - ), - 331..332, - ), - ( - Token( - "$", - ), - 332..333, - ), - ( - Token( - "\\", + "\\$", ), - 333..334, + 331..333, ), ( Token( - "{", - ), - 334..335, - ), - ( - Token( - "source", + "\\{", ), - 335..341, + 333..335, ), ( Token( - "/", + "source/", ), - 341..342, + 335..342, ), ( Token( @@ -611,15 +575,9 @@ expression: tokens ), ( Token( - "\\", - ), - 636..637, - ), - ( - Token( - "/", + "\\/", ), - 637..638, + 636..638, ), ( Token( @@ -701,15 +659,9 @@ expression: tokens ), ( Token( - "\\", - ), - 682..683, - ), - ( - Token( - "/", + "\\/", ), - 683..684, + 682..684, ), ( Token( @@ -767,7713 +719,5667 @@ expression: tokens ), ( Token( - "re", + "re='", ), - 918..920, + 917..922, ), ( Token( - "=", + "\\b", ), - 920..921, + 922..924, ), ( Token( - "'", + "Copyright", ), - 921..922, + 924..933, ), ( Token( - "\\", + "\\b", ), - 922..923, + 933..935, ), ( Token( - "bCopyright", + ".+", ), - 923..933, + 935..937, ), ( Token( - "\\", + "\\b", ), - 933..934, + 937..939, ), ( Token( - "b", + "Free Software Foundation", ), - 934..935, + 939..963, ), ( Token( - ".", + "\\b", ), - 935..936, + 963..965, ), ( Token( - "+", + "'; [[ ", ), - 936..937, + 965..971, ), ( Token( - "\\", + "\\$", ), - 937..938, + 971..973, ), ( Token( - "bFree", + "(sed --version 2>/dev/null) =~ ", ), - 938..943, + 973..1004, ), ( Token( - "Software", + "\\$", ), - 944..952, + 1004..1006, ), ( Token( - "Foundation", + "re ]] ", ), - 953..963, + 1006..1012, ), ( Token( - "\\", + "$", ), - 963..964, + 1012..1013, ), ( Token( - "b'", + "let", ), - 964..966, + 1026..1029, ), ( Token( - ";", + "flag", ), - 966..967, + 1030..1034, ), ( Token( - "[", + "=", ), - 968..969, + 1035..1036, ), ( Token( - "[", + "status", ), - 969..970, + 1037..1043, ), ( Token( - "\\", + "==", ), - 971..972, + 1044..1046, ), ( Token( - "$", + "0", ), - 972..973, + 1047..1048, ), ( Token( - "(", + "then", ), - 973..974, + 1049..1053, ), ( Token( - "sed", + "\"", ), - 974..977, + 1054..1055, ), ( Token( - "-", + "-r", ), - 978..979, + 1055..1057, ), ( Token( - "-", + "\"", ), - 979..980, + 1057..1058, ), ( Token( - "version", + "else", ), - 980..987, + 1059..1063, ), ( Token( - "2", + "\"", ), - 988..989, + 1064..1065, ), ( Token( - ">", + "-E", ), - 989..990, + 1065..1067, ), ( Token( - "/", + "\"", ), - 990..991, + 1067..1068, ), ( Token( - "dev", + "return", ), - 991..994, + 1081..1087, ), ( Token( - "/", + "$", ), - 994..995, + 1088..1089, ), ( Token( - "null", + "echo \"", ), - 995..999, + 1089..1096, ), ( Token( - ")", + "{", ), - 999..1000, + 1096..1097, ), ( Token( - "=", + "source", ), - 1001..1002, + 1097..1103, ), ( Token( - "~", + "}", ), - 1002..1003, + 1103..1104, ), ( Token( - "\\", + "\" | sed \"", ), - 1004..1005, + 1104..1113, ), ( Token( - "$", + "{", ), - 1005..1006, + 1113..1114, ), ( Token( - "re", + "flag", ), - 1006..1008, + 1114..1118, ), ( Token( - "]", + "}", ), - 1009..1010, + 1118..1119, ), ( Token( - "]", + "\" -e \"s/", ), - 1010..1011, + 1119..1127, ), ( Token( - "$", + "{", ), - 1012..1013, + 1127..1128, ), ( Token( - "let", + "search", ), - 1026..1029, + 1128..1134, ), ( Token( - "flag", + "}", ), - 1030..1034, + 1134..1135, ), ( Token( - "=", + "/", ), - 1035..1036, + 1135..1136, ), ( Token( - "status", + "{", ), - 1037..1043, + 1136..1137, ), ( Token( - "==", + "replace", ), - 1044..1046, + 1137..1144, ), ( Token( - "0", + "}", ), - 1047..1048, + 1144..1145, ), ( Token( - "then", + "/g\" ", ), - 1049..1053, + 1145..1149, ), ( Token( - "\"", + "$", ), - 1054..1055, + 1149..1150, ), ( Token( - "-", + "}", ), - 1055..1056, + 1159..1160, ), ( Token( - "r", + "else", ), - 1056..1057, + 1161..1165, ), ( Token( - "\"", + "{", ), - 1057..1058, + 1166..1167, ), ( Token( - "else", + "return", ), - 1059..1063, + 1180..1186, ), ( Token( - "\"", + "$", ), - 1064..1065, + 1187..1188, ), ( Token( - "-", + "echo \"", ), - 1065..1066, + 1188..1195, ), ( Token( - "E", + "{", ), - 1066..1067, + 1195..1196, ), ( Token( - "\"", + "source", ), - 1067..1068, + 1196..1202, ), ( Token( - "return", + "}", ), - 1081..1087, + 1202..1203, ), ( Token( - "$", + "\" | sed -e \"s/", ), - 1088..1089, + 1203..1217, ), ( Token( - "echo", + "{", ), - 1090..1094, + 1217..1218, ), ( Token( - "\"", + "search", ), - 1095..1096, + 1218..1224, ), ( Token( - "{", + "}", ), - 1096..1097, + 1224..1225, ), ( Token( - "source", + "/", ), - 1097..1103, + 1225..1226, ), ( Token( - "}", + "{", ), - 1103..1104, + 1226..1227, ), ( Token( - "\"", + "replace", ), - 1104..1105, + 1227..1234, ), ( Token( - "|", + "}", ), - 1106..1107, + 1234..1235, ), ( Token( - "sed", + "/g\" ", ), - 1108..1111, + 1235..1239, ), ( Token( - "\"", + "$", ), - 1112..1113, + 1239..1240, ), ( Token( - "{", + "}", ), - 1113..1114, + 1249..1250, ), ( Token( - "flag", + "}", ), - 1114..1118, + 1255..1256, ), ( Token( "}", ), - 1118..1119, + 1257..1258, ), ( Token( - "\"", + "/// Splits the input `text` into an array of substrings using the specified `delimiter`.\n", ), - 1119..1120, + 1260..1349, ), ( Token( - "-", + "pub", ), - 1121..1122, + 1349..1352, ), ( Token( - "e", + "fun", ), - 1122..1123, + 1353..1356, ), ( Token( - "\"", + "split", ), - 1124..1125, + 1357..1362, ), ( Token( - "s", + "(", ), - 1125..1126, + 1362..1363, ), ( Token( - "/", + "text", ), - 1126..1127, + 1363..1367, ), ( Token( - "{", + ":", ), - 1127..1128, + 1367..1368, ), ( Token( - "search", + "Text", ), - 1128..1134, + 1369..1373, ), ( Token( - "}", + ",", ), - 1134..1135, + 1373..1374, ), ( Token( - "/", + "delimiter", ), - 1135..1136, + 1375..1384, ), ( Token( - "{", + ":", ), - 1136..1137, + 1384..1385, ), ( Token( - "replace", + "Text", ), - 1137..1144, + 1386..1390, ), ( Token( - "}", + ")", ), - 1144..1145, + 1390..1391, ), ( Token( - "/", + ":", ), - 1145..1146, + 1391..1392, ), ( Token( - "g", + "[", ), - 1146..1147, + 1393..1394, ), ( Token( - "\"", + "Text", ), - 1147..1148, + 1394..1398, ), ( Token( - "$", + "]", ), - 1149..1150, + 1398..1399, ), ( Token( - "}", + "{", ), - 1159..1160, - ), - ( - Token( - "else", - ), - 1161..1165, - ), - ( - Token( - "{", - ), - 1166..1167, - ), - ( - Token( - "return", - ), - 1180..1186, + 1400..1401, ), ( Token( - "$", + "let", ), - 1187..1188, + 1406..1409, ), ( Token( - "echo", + "result", ), - 1189..1193, + 1410..1416, ), ( Token( - "\"", + "=", ), - 1194..1195, + 1417..1418, ), ( Token( - "{", + "[", ), - 1195..1196, + 1419..1420, ), ( Token( - "source", + "Text", ), - 1196..1202, + 1420..1424, ), ( Token( - "}", + "]", ), - 1202..1203, + 1424..1425, ), ( Token( - "\"", + "trust", ), - 1203..1204, + 1430..1435, ), ( Token( - "|", + "$", ), - 1205..1206, + 1436..1437, ), ( Token( - "sed", + "IFS=\"", ), - 1207..1210, + 1437..1443, ), ( Token( - "-", + "{", ), - 1211..1212, + 1443..1444, ), ( Token( - "e", + "delimiter", ), - 1212..1213, + 1444..1453, ), ( Token( - "\"", + "}", ), - 1214..1215, + 1453..1454, ), ( Token( - "s", + "\" read -rd '' -a ", ), - 1215..1216, + 1454..1471, ), ( Token( - "/", + "{", ), - 1216..1217, + 1471..1472, ), ( Token( - "{", + "nameof", ), - 1217..1218, + 1472..1478, ), ( Token( - "search", + "result", ), - 1218..1224, + 1479..1485, ), ( Token( "}", ), - 1224..1225, + 1485..1486, ), ( Token( - "/", + " < <(printf %s \"", ), - 1225..1226, + 1486..1502, ), ( Token( - "{", + "\\$", ), - 1226..1227, + 1502..1504, ), ( Token( - "replace", + "{", ), - 1227..1234, + 1504..1505, ), ( Token( - "}", + "nameof", ), - 1234..1235, + 1505..1511, ), ( Token( - "/", + "text", ), - 1235..1236, + 1512..1516, ), ( Token( - "g", + "}", ), - 1236..1237, + 1516..1517, ), ( Token( - "\"", + "\") ", ), - 1237..1238, + 1517..1520, ), ( Token( "$", ), - 1239..1240, + 1520..1521, ), ( Token( - "}", + "return", ), - 1249..1250, + 1526..1532, ), ( Token( - "}", + "result", ), - 1255..1256, + 1533..1539, ), ( Token( "}", ), - 1257..1258, + 1540..1541, ), ( Token( - "/// Splits the input `text` into an array of substrings using the specified `delimiter`.\n", + "/// Splits a `text` into an array of substrings based on newline characters.\n", ), - 1260..1349, + 1543..1620, ), ( Token( "pub", ), - 1349..1352, + 1620..1623, ), ( Token( "fun", ), - 1353..1356, + 1624..1627, ), ( Token( - "split", + "split_lines", ), - 1357..1362, + 1628..1639, ), ( Token( "(", ), - 1362..1363, + 1639..1640, ), ( Token( "text", ), - 1363..1367, + 1640..1644, ), ( Token( ":", ), - 1367..1368, + 1644..1645, ), ( Token( "Text", ), - 1369..1373, + 1646..1650, ), ( Token( - ",", + ")", ), - 1373..1374, + 1650..1651, ), ( Token( - "delimiter", + ":", ), - 1375..1384, + 1651..1652, ), ( Token( - ":", + "[", ), - 1384..1385, + 1653..1654, ), ( Token( "Text", ), - 1386..1390, + 1654..1658, ), ( Token( - ")", + "]", ), - 1390..1391, + 1658..1659, ), ( Token( - ":", + "{", ), - 1391..1392, + 1660..1661, ), ( Token( - "[", + "return", ), - 1393..1394, + 1666..1672, ), ( Token( - "Text", + "split", ), - 1394..1398, + 1673..1678, ), ( Token( - "]", + "(", ), - 1398..1399, + 1678..1679, ), ( Token( - "{", + "text", ), - 1400..1401, + 1679..1683, ), ( Token( - "let", + ",", ), - 1406..1409, + 1683..1684, ), ( Token( - "result", + "\"", ), - 1410..1416, + 1685..1686, ), ( Token( - "=", + "\\n", ), - 1417..1418, + 1686..1688, ), ( Token( - "[", + "\"", ), - 1419..1420, + 1688..1689, ), ( Token( - "Text", + ")", ), - 1420..1424, + 1689..1690, ), ( Token( - "]", + "}", ), - 1424..1425, + 1691..1692, ), ( Token( - "trust", + "/// Splits a `text` into an array of substrings based on space character.\n", ), - 1430..1435, + 1694..1768, ), ( Token( - "$", + "pub", ), - 1436..1437, + 1768..1771, ), ( Token( - "IFS", + "fun", ), - 1438..1441, + 1772..1775, ), ( Token( - "=", + "split_words", ), - 1441..1442, + 1776..1787, ), ( Token( - "\"", + "(", ), - 1442..1443, + 1787..1788, ), ( Token( - "{", + "text", ), - 1443..1444, + 1788..1792, ), ( Token( - "delimiter", + ":", ), - 1444..1453, + 1792..1793, ), ( Token( - "}", + "Text", ), - 1453..1454, + 1794..1798, ), ( Token( - "\"", + ")", ), - 1454..1455, + 1798..1799, ), ( Token( - "read", + ":", ), - 1456..1460, + 1799..1800, ), ( Token( - "-", + "[", ), - 1461..1462, + 1801..1802, ), ( Token( - "rd", + "Text", ), - 1462..1464, + 1802..1806, ), ( Token( - "''", + "]", ), - 1465..1467, + 1806..1807, ), ( Token( - "-", + "{", ), - 1468..1469, + 1808..1809, ), ( Token( - "a", + "return", ), - 1469..1470, + 1814..1820, ), ( Token( - "{", + "split", ), - 1471..1472, + 1821..1826, ), ( Token( - "nameof", + "(", ), - 1472..1478, + 1826..1827, ), ( Token( - "result", + "text", ), - 1479..1485, + 1827..1831, ), ( Token( - "}", + ",", ), - 1485..1486, + 1831..1832, ), ( Token( - "<", + "\"", ), - 1487..1488, + 1833..1834, ), ( Token( - "<", + " ", ), - 1489..1490, + 1834..1835, ), ( Token( - "(", + "\"", ), - 1490..1491, + 1835..1836, ), ( Token( - "printf", + ")", ), - 1491..1497, + 1836..1837, ), ( Token( - "%", + "}", ), - 1498..1499, + 1838..1839, ), ( Token( - "s", + "/// Merges text using the delimeter specified.\n", ), - 1499..1500, + 1841..1888, ), ( Token( - "\"", + "pub", ), - 1501..1502, + 1888..1891, ), ( Token( - "\\", + "fun", ), - 1502..1503, + 1892..1895, ), ( Token( - "$", + "join", ), - 1503..1504, - ), - ( - Token( - "{", - ), - 1504..1505, - ), - ( - Token( - "nameof", - ), - 1505..1511, - ), - ( - Token( - "text", - ), - 1512..1516, - ), - ( - Token( - "}", - ), - 1516..1517, + 1896..1900, ), ( Token( - "\"", + "(", ), - 1517..1518, + 1900..1901, ), ( Token( - ")", + "list", ), - 1518..1519, + 1901..1905, ), ( Token( - "$", + ":", ), - 1520..1521, + 1905..1906, ), ( Token( - "return", + "[", ), - 1526..1532, + 1907..1908, ), ( Token( - "result", + "Text", ), - 1533..1539, + 1908..1912, ), ( Token( - "}", + "]", ), - 1540..1541, + 1912..1913, ), ( Token( - "/// Splits a `text` into an array of substrings based on newline characters.\n", + ",", ), - 1543..1620, + 1913..1914, ), ( Token( - "pub", + "delimiter", ), - 1620..1623, + 1915..1924, ), ( Token( - "fun", + ":", ), - 1624..1627, + 1924..1925, ), ( Token( - "split_lines", + "Text", ), - 1628..1639, + 1926..1930, ), ( Token( - "(", + ")", ), - 1639..1640, + 1930..1931, ), ( Token( - "text", + ":", ), - 1640..1644, + 1931..1932, ), ( Token( - ":", + "Text", ), - 1644..1645, + 1933..1937, ), ( Token( - "Text", + "{", ), - 1646..1650, + 1938..1939, ), ( Token( - ")", + "return", ), - 1650..1651, + 1944..1950, ), ( Token( - ":", + "trust", ), - 1651..1652, + 1951..1956, ), ( Token( - "[", + "$", ), - 1653..1654, + 1957..1958, ), ( Token( - "Text", + "IFS=\"", ), - 1654..1658, + 1958..1964, ), ( Token( - "]", + "{", ), - 1658..1659, + 1964..1965, ), ( Token( - "{", + "delimiter", ), - 1660..1661, + 1965..1974, ), ( Token( - "return", + "}", ), - 1666..1672, + 1974..1975, ), ( Token( - "split", + "\" ; echo \"", ), - 1673..1678, + 1975..1985, ), ( Token( - "(", + "\\$", ), - 1678..1679, + 1985..1987, ), ( Token( - "text", + "\\{", ), - 1679..1683, + 1987..1989, ), ( Token( - ",", + "{", ), - 1683..1684, + 1989..1990, ), ( Token( - "\"", + "nameof", ), - 1685..1686, + 1990..1996, ), ( Token( - "\\", + "list", ), - 1686..1687, + 1997..2001, ), ( Token( - "n", + "}", ), - 1687..1688, + 2001..2002, ), ( Token( - "\"", + "[*]}\" ", ), - 1688..1689, + 2002..2008, ), ( Token( - ")", + "$", ), - 1689..1690, + 2008..2009, ), ( Token( "}", ), - 1691..1692, + 2010..2011, ), ( Token( - "/// Splits a `text` into an array of substrings based on space character.\n", + "/// Trims the spaces at top of the text using `sed`.\n", ), - 1694..1768, + 2013..2066, ), ( Token( "pub", ), - 1768..1771, + 2066..2069, ), ( Token( "fun", ), - 1772..1775, + 2070..2073, ), ( Token( - "split_words", + "trim_left", ), - 1776..1787, + 2074..2083, ), ( Token( "(", ), - 1787..1788, + 2083..2084, ), ( Token( "text", ), - 1788..1792, + 2084..2088, ), ( Token( ":", ), - 1792..1793, + 2088..2089, ), ( Token( "Text", ), - 1794..1798, + 2090..2094, ), ( Token( ")", ), - 1798..1799, + 2094..2095, ), ( Token( ":", ), - 1799..1800, - ), - ( - Token( - "[", - ), - 1801..1802, + 2095..2096, ), ( Token( "Text", ), - 1802..1806, + 2097..2101, ), ( Token( - "]", + "{", ), - 1806..1807, + 2102..2103, ), ( Token( - "{", + "return", ), - 1808..1809, + 2108..2114, ), ( Token( - "return", + "trust", ), - 1814..1820, + 2115..2120, ), ( Token( - "split", + "$", ), - 1821..1826, + 2121..2122, ), ( Token( - "(", + "echo \"", ), - 1826..1827, + 2122..2129, ), ( Token( - "text", + "{", ), - 1827..1831, + 2129..2130, ), ( Token( - ",", + "text", ), - 1831..1832, + 2130..2134, ), ( Token( - "\"", + "}", ), - 1833..1834, + 2134..2135, ), ( Token( - "\"", + "\" | sed -e 's/^[[:space:]]*//' ", ), - 1835..1836, + 2135..2166, ), ( Token( - ")", + "$", ), - 1836..1837, + 2166..2167, ), ( Token( "}", ), - 1838..1839, + 2168..2169, ), ( Token( - "/// Merges text using the delimeter specified.\n", + "/// Trims the spaces at end of the text using `sed`.\n", ), - 1841..1888, + 2171..2224, ), ( Token( "pub", ), - 1888..1891, + 2224..2227, ), ( Token( "fun", ), - 1892..1895, + 2228..2231, ), ( Token( - "join", + "trim_right", ), - 1896..1900, + 2232..2242, ), ( Token( "(", ), - 1900..1901, + 2242..2243, ), ( Token( - "list", + "text", ), - 1901..1905, + 2243..2247, ), ( Token( ":", ), - 1905..1906, + 2247..2248, ), ( Token( - "[", + "Text", ), - 1907..1908, + 2249..2253, ), ( Token( - "Text", + ")", ), - 1908..1912, + 2253..2254, ), ( Token( - "]", + ":", ), - 1912..1913, + 2254..2255, ), ( Token( - ",", + "Text", ), - 1913..1914, + 2256..2260, ), ( Token( - "delimiter", + "{", ), - 1915..1924, + 2261..2262, ), ( Token( - ":", + "return", ), - 1924..1925, + 2267..2273, ), ( Token( - "Text", + "trust", ), - 1926..1930, + 2274..2279, ), ( Token( - ")", + "$", ), - 1930..1931, + 2280..2281, ), ( Token( - ":", + "echo \"", ), - 1931..1932, - ), - ( - Token( - "Text", - ), - 1933..1937, + 2281..2288, ), ( Token( "{", ), - 1938..1939, + 2288..2289, ), ( Token( - "return", + "text", ), - 1944..1950, + 2289..2293, ), ( Token( - "trust", + "}", ), - 1951..1956, + 2293..2294, ), ( Token( - "$", + "\" | sed -e 's/[[:space:]]*", ), - 1957..1958, + 2294..2320, ), ( Token( - "IFS", + "\\$", ), - 1959..1962, + 2320..2322, ), ( Token( - "=", + "//' ", ), - 1962..1963, + 2322..2326, ), ( Token( - "\"", + "$", ), - 1963..1964, + 2326..2327, ), ( Token( - "{", + "}", ), - 1964..1965, + 2328..2329, ), ( Token( - "delimiter", + "/// Trims the spaces from the text input.\n", ), - 1965..1974, + 2331..2373, ), ( Token( - "}", + "pub", ), - 1974..1975, + 2373..2376, ), ( Token( - "\"", + "fun", ), - 1975..1976, + 2377..2380, ), ( Token( - ";", + "trim", ), - 1977..1978, + 2381..2385, ), ( Token( - "echo", + "(", ), - 1979..1983, + 2385..2386, ), ( Token( - "\"", + "text", ), - 1984..1985, + 2386..2390, ), ( Token( - "\\", + ":", ), - 1985..1986, + 2390..2391, ), ( Token( - "$", + "Text", ), - 1986..1987, + 2392..2396, ), ( Token( - "\\", + ")", ), - 1987..1988, + 2396..2397, ), ( Token( - "{", + ":", ), - 1988..1989, + 2397..2398, ), ( Token( - "{", + "Text", ), - 1989..1990, + 2399..2403, ), ( Token( - "nameof", + "{", ), - 1990..1996, + 2404..2405, ), ( Token( - "list", + "return", ), - 1997..2001, + 2410..2416, ), ( Token( - "}", + "trim_left", ), - 2001..2002, + 2417..2426, ), ( Token( - "[", + "(", ), - 2002..2003, + 2426..2427, ), ( Token( - "*", + "trim_right", ), - 2003..2004, + 2427..2437, ), ( Token( - "]", + "(", ), - 2004..2005, + 2437..2438, ), ( Token( - "}", + "text", ), - 2005..2006, + 2438..2442, ), ( Token( - "\"", + ")", ), - 2006..2007, + 2442..2443, ), ( Token( - "$", + ")", ), - 2008..2009, + 2443..2444, ), ( Token( "}", ), - 2010..2011, + 2445..2446, ), ( Token( - "/// Trims the spaces at top of the text using `sed`.\n", + "/// Makes the text input lowercase using `tr`.\n", ), - 2013..2066, + 2448..2495, ), ( Token( "pub", ), - 2066..2069, + 2495..2498, ), ( Token( "fun", ), - 2070..2073, + 2499..2502, ), ( Token( - "trim_left", + "lowercase", ), - 2074..2083, + 2503..2512, ), ( Token( "(", ), - 2083..2084, + 2512..2513, ), ( Token( "text", ), - 2084..2088, + 2513..2517, ), ( Token( ":", ), - 2088..2089, + 2517..2518, ), ( Token( "Text", ), - 2090..2094, + 2519..2523, ), ( Token( ")", ), - 2094..2095, + 2523..2524, ), ( Token( ":", ), - 2095..2096, + 2524..2525, ), ( Token( "Text", ), - 2097..2101, + 2526..2530, ), ( Token( "{", ), - 2102..2103, + 2531..2532, ), ( Token( "return", ), - 2108..2114, + 2537..2543, ), ( Token( "trust", ), - 2115..2120, + 2544..2549, ), ( Token( "$", ), - 2121..2122, - ), - ( - Token( - "echo", - ), - 2123..2127, + 2550..2551, ), ( Token( - "\"", + "echo \"", ), - 2128..2129, + 2551..2558, ), ( Token( "{", ), - 2129..2130, + 2558..2559, ), ( Token( "text", ), - 2130..2134, + 2559..2563, ), ( Token( "}", ), - 2134..2135, + 2563..2564, ), ( Token( - "\"", + "\" | tr '[:upper:]' '[:lower:]' ", ), - 2135..2136, + 2564..2595, ), ( Token( - "|", + "$", ), - 2137..2138, + 2595..2596, ), ( Token( - "sed", + "}", ), - 2139..2142, + 2597..2598, ), ( Token( - "-", + "/// Makes the text input uppercase using `tr`.\n", ), - 2143..2144, + 2600..2647, ), ( Token( - "e", + "pub", ), - 2144..2145, + 2647..2650, ), ( Token( - "'s", + "fun", ), - 2146..2148, + 2651..2654, ), ( Token( - "/", + "uppercase", ), - 2148..2149, + 2655..2664, ), ( Token( - "^", + "(", ), - 2149..2150, + 2664..2665, ), ( Token( - "[", + "text", ), - 2150..2151, + 2665..2669, ), ( Token( - "[", + ":", ), - 2151..2152, + 2669..2670, ), ( Token( - ":", + "Text", ), - 2152..2153, + 2671..2675, ), ( Token( - "space", + ")", ), - 2153..2158, + 2675..2676, ), ( Token( ":", ), - 2158..2159, + 2676..2677, ), ( Token( - "]", + "Text", ), - 2159..2160, + 2678..2682, ), ( Token( - "]", + "{", ), - 2160..2161, + 2683..2684, ), ( Token( - "*", + "return", ), - 2161..2162, + 2689..2695, ), ( Token( - "//", + "trust", ), - 2162..2164, + 2696..2701, ), ( Token( - "'", + "$", ), - 2164..2165, + 2702..2703, ), ( Token( - "$", + "echo \"", ), - 2166..2167, + 2703..2710, ), ( Token( - "}", + "{", ), - 2168..2169, + 2710..2711, ), ( Token( - "/// Trims the spaces at end of the text using `sed`.\n", + "text", ), - 2171..2224, + 2711..2715, ), ( Token( - "pub", + "}", ), - 2224..2227, + 2715..2716, ), ( Token( - "fun", + "\" | tr '[:lower:]' '[:upper:]' ", ), - 2228..2231, + 2716..2747, ), ( Token( - "trim_right", + "$", ), - 2232..2242, + 2747..2748, ), ( Token( - "(", + "}", ), - 2242..2243, + 2749..2750, ), ( Token( - "text", + "/// Attempts to parse a given text into a number, returning the parsed number or zero if parsing fails.\n", ), - 2243..2247, + 2752..2856, ), ( Token( - ":", + "#[", ), - 2247..2248, + 2856..2858, ), ( Token( - "Text", + "allow_absurd_cast", ), - 2249..2253, + 2858..2875, ), ( Token( - ")", + "]", ), - 2253..2254, + 2875..2876, ), ( Token( - ":", + "pub", ), - 2254..2255, + 2877..2880, ), ( Token( - "Text", + "fun", ), - 2256..2260, + 2881..2884, ), ( Token( - "{", + "parse_number", ), - 2261..2262, + 2885..2897, ), ( Token( - "return", + "(", ), - 2267..2273, + 2897..2898, ), ( Token( - "trust", + "text", ), - 2274..2279, + 2898..2902, ), ( Token( - "$", + ":", ), - 2280..2281, + 2902..2903, ), ( Token( - "echo", + "Text", ), - 2282..2286, + 2904..2908, ), ( Token( - "\"", + ")", ), - 2287..2288, + 2908..2909, ), ( Token( - "{", + ":", ), - 2288..2289, + 2909..2910, ), ( Token( - "text", + "Num", ), - 2289..2293, + 2911..2914, ), ( Token( - "}", + "?", ), - 2293..2294, + 2914..2915, ), ( Token( - "\"", + "{", ), - 2294..2295, + 2916..2917, ), ( Token( - "|", + "$", ), - 2296..2297, + 2922..2923, ), ( Token( - "sed", + "[ -n \"", ), - 2298..2301, + 2923..2930, ), ( Token( - "-", + "{", ), - 2302..2303, + 2930..2931, ), ( Token( - "e", + "text", ), - 2303..2304, + 2931..2935, ), ( Token( - "'s", + "}", ), - 2305..2307, + 2935..2936, ), ( Token( - "/", + "\" ] && [ \"", ), - 2307..2308, + 2936..2946, ), ( Token( - "[", + "{", ), - 2308..2309, + 2946..2947, ), ( Token( - "[", + "text", ), - 2309..2310, + 2947..2951, ), ( Token( - ":", + "}", ), - 2310..2311, + 2951..2952, ), ( Token( - "space", + "\" -eq \"", ), - 2311..2316, + 2952..2959, ), ( Token( - ":", + "{", ), - 2316..2317, + 2959..2960, ), ( Token( - "]", + "text", ), - 2317..2318, + 2960..2964, ), ( Token( - "]", + "}", ), - 2318..2319, + 2964..2965, ), ( Token( - "*", + "\" ] 2>/dev/null ", ), - 2319..2320, + 2965..2981, ), ( Token( - "\\", + "$", ), - 2320..2321, + 2981..2982, ), ( Token( - "$", + "?", ), - 2321..2322, + 2982..2983, ), ( Token( - "//", + "return", ), - 2322..2324, + 2988..2994, ), ( Token( - "'", + "text", ), - 2324..2325, + 2995..2999, ), ( Token( - "$", + "as", ), - 2326..2327, + 3000..3002, + ), + ( + Token( + "Num", + ), + 3003..3006, ), ( Token( "}", ), - 2328..2329, + 3007..3008, ), ( Token( - "/// Trims the spaces from the text input.\n", + "/// Splits a text into an array of individual characters.\n", ), - 2331..2373, + 3010..3068, ), ( Token( "pub", ), - 2373..2376, + 3068..3071, ), ( Token( "fun", ), - 2377..2380, + 3072..3075, ), ( Token( - "trim", + "split_chars", ), - 2381..2385, + 3076..3087, ), ( Token( "(", ), - 2385..2386, + 3087..3088, ), ( Token( "text", ), - 2386..2390, + 3088..3092, ), ( Token( ":", ), - 2390..2391, + 3092..3093, ), ( Token( "Text", ), - 2392..2396, + 3094..3098, ), ( Token( ")", ), - 2396..2397, + 3098..3099, ), ( Token( ":", ), - 2397..2398, + 3099..3100, ), ( Token( - "Text", + "[", ), - 2399..2403, + 3101..3102, ), ( Token( - "{", + "Text", ), - 2404..2405, + 3102..3106, ), ( Token( - "return", + "]", ), - 2410..2416, + 3106..3107, ), ( Token( - "trim_left", + "{", ), - 2417..2426, + 3108..3109, ), ( Token( - "(", + "let", ), - 2426..2427, + 3114..3117, ), ( Token( - "trim_right", + "chars", ), - 2427..2437, + 3118..3123, ), ( Token( - "(", + "=", ), - 2437..2438, + 3124..3125, ), ( Token( - "text", + "[", ), - 2438..2442, + 3126..3127, ), ( Token( - ")", + "Text", ), - 2442..2443, + 3127..3131, ), ( Token( - ")", + "]", ), - 2443..2444, + 3131..3132, ), ( Token( - "}", + "trust", ), - 2445..2446, + 3137..3142, ), ( Token( - "/// Makes the text input lowercase using `tr`.\n", + "$", ), - 2448..2495, + 3143..3144, ), ( Token( - "pub", + "for ((i=0; i<", ), - 2495..2498, + 3144..3158, ), ( Token( - "fun", + "\\$", ), - 2499..2502, + 3158..3160, ), ( Token( - "lowercase", + "\\{", ), - 2503..2512, + 3160..3162, ), ( Token( - "(", + "#", ), - 2512..2513, + 3162..3163, ), ( Token( - "text", + "{", ), - 2513..2517, + 3163..3164, ), ( Token( - ":", + "nameof", ), - 2517..2518, + 3164..3170, ), ( Token( - "Text", + "text", ), - 2519..2523, + 3171..3175, ), ( Token( - ")", + "}", ), - 2523..2524, + 3175..3176, ), ( Token( - ":", + "}; i++)); do\n ", ), - 2524..2525, + 3176..3197, ), ( Token( - "Text", + "{", ), - 2526..2530, + 3197..3198, ), ( Token( - "{", + "nameof", ), - 2531..2532, + 3198..3204, ), ( Token( - "return", + "chars", ), - 2537..2543, + 3205..3210, ), ( Token( - "trust", + "}", ), - 2544..2549, + 3210..3211, ), ( Token( - "$", + "+=( \"", ), - 2550..2551, + 3211..3216, ), ( Token( - "echo", + "\\$", ), - 2552..2556, + 3216..3218, ), ( Token( - "\"", + "\\{", ), - 2557..2558, + 3218..3220, ), ( Token( "{", ), - 2558..2559, + 3220..3221, ), ( Token( - "text", + "nameof", ), - 2559..2563, + 3221..3227, ), ( Token( - "}", + "text", ), - 2563..2564, + 3228..3232, ), ( Token( - "\"", + "}", ), - 2564..2565, + 3232..3233, ), ( Token( - "|", + ":", ), - 2566..2567, + 3233..3234, ), ( Token( - "tr", + "\\$", ), - 2568..2570, + 3234..3236, ), ( Token( - "'", + "i:1}\" );\n done ", ), - 2571..2572, + 3236..3254, ), ( Token( - "[", + "$", ), - 2572..2573, + 3254..3255, ), ( Token( - ":", + "return", ), - 2573..2574, + 3260..3266, ), ( Token( - "upper", + "chars", ), - 2574..2579, + 3267..3272, ), ( Token( - ":", + "}", ), - 2579..2580, + 3273..3274, ), ( Token( - "]", + "/// Checks if some text contains a value.\n", ), - 2580..2581, + 3276..3318, ), ( Token( - "'", + "pub", ), - 2581..2582, + 3318..3321, ), ( Token( - "'", + "fun", ), - 2583..2584, + 3322..3325, ), ( Token( - "[", + "text_contains", ), - 2584..2585, + 3326..3339, ), ( Token( - ":", + "(", ), - 2585..2586, + 3339..3340, ), ( Token( - "lower", + "text", ), - 2586..2591, + 3340..3344, ), ( Token( ":", ), - 2591..2592, + 3344..3345, ), ( Token( - "]", + "Text", ), - 2592..2593, + 3346..3350, ), ( Token( - "'", + ",", ), - 2593..2594, + 3350..3351, ), ( Token( - "$", + "phrase", ), - 2595..2596, + 3352..3358, ), ( Token( - "}", + ":", ), - 2597..2598, + 3358..3359, ), ( Token( - "/// Makes the text input uppercase using `tr`.\n", + "Text", ), - 2600..2647, + 3360..3364, ), ( Token( - "pub", + ")", ), - 2647..2650, + 3364..3365, ), ( Token( - "fun", + ":", ), - 2651..2654, + 3365..3366, ), ( Token( - "uppercase", + "Bool", ), - 2655..2664, + 3367..3371, ), ( Token( - "(", + "{", ), - 2664..2665, + 3372..3373, ), ( Token( - "text", + "let", ), - 2665..2669, + 3376..3379, ), ( Token( - ":", + "result", ), - 2669..2670, + 3380..3386, ), ( Token( - "Text", + "=", ), - 2671..2675, + 3387..3388, ), ( Token( - ")", + "trust", ), - 2675..2676, + 3389..3394, ), ( Token( - ":", + "$", ), - 2676..2677, + 3395..3396, ), ( Token( - "Text", + "if [[ \"", ), - 2678..2682, + 3396..3404, ), ( Token( "{", ), - 2683..2684, + 3404..3405, ), ( Token( - "return", + "text", ), - 2689..2695, + 3405..3409, ), ( Token( - "trust", + "}", ), - 2696..2701, + 3409..3410, ), ( Token( - "$", + "\" == *\"", ), - 2702..2703, + 3410..3417, ), ( Token( - "echo", + "{", ), - 2704..2708, + 3417..3418, ), ( Token( - "\"", + "phrase", ), - 2709..2710, + 3418..3424, ), ( Token( - "{", + "}", ), - 2710..2711, + 3424..3425, ), ( Token( - "text", + "\"* ]]; then\n echo 1\n fi ", ), - 2711..2715, + 3425..3453, ), ( Token( - "}", + "$", ), - 2715..2716, + 3453..3454, ), ( Token( - "\"", + "return", ), - 2716..2717, + 3458..3464, ), ( Token( - "|", + "result", ), - 2718..2719, + 3465..3471, ), ( Token( - "tr", + "==", ), - 2720..2722, + 3472..3474, ), ( Token( - "'", + "\"", ), - 2723..2724, + 3475..3476, ), ( Token( - "[", + "1", ), - 2724..2725, + 3476..3477, ), ( Token( - ":", + "\"", ), - 2725..2726, + 3477..3478, ), ( Token( - "lower", + "}", ), - 2726..2731, + 3479..3480, ), ( Token( - ":", + "/// Checks if an array value is in the text.\n", ), - 2731..2732, + 3482..3527, ), ( Token( - "]", + "pub", ), - 2732..2733, + 3527..3530, ), ( Token( - "'", + "fun", ), - 2733..2734, + 3531..3534, ), ( Token( - "'", + "text_contains_any", ), - 2735..2736, + 3535..3552, ), ( Token( - "[", + "(", ), - 2736..2737, + 3552..3553, ), ( Token( - ":", + "text", ), - 2737..2738, + 3553..3557, ), ( Token( - "upper", + ":", ), - 2738..2743, + 3557..3558, ), ( Token( - ":", + "Text", ), - 2743..2744, + 3559..3563, ), ( Token( - "]", + ",", ), - 2744..2745, + 3563..3564, ), ( Token( - "'", + "terms", ), - 2745..2746, + 3565..3570, ), ( Token( - "$", + ":", ), - 2747..2748, + 3570..3571, ), ( Token( - "}", + "[", ), - 2749..2750, + 3572..3573, ), ( Token( - "/// Attempts to parse a given text into a number, returning the parsed number or zero if parsing fails.\n", + "Text", ), - 2752..2856, + 3573..3577, ), ( Token( - "#", + "]", ), - 2856..2857, + 3577..3578, ), ( Token( - "[", + ")", ), - 2857..2858, + 3578..3579, ), ( Token( - "allow_absurd_cast", + ":", ), - 2858..2875, + 3579..3580, ), ( Token( - "]", + "Bool", ), - 2875..2876, + 3581..3585, ), ( Token( - "pub", + "{", ), - 2877..2880, + 3586..3587, ), ( Token( - "fun", + "for", ), - 2881..2884, + 3592..3595, ), ( Token( - "parse_number", + "term", ), - 2885..2897, + 3596..3600, ), ( Token( - "(", + "in", ), - 2897..2898, + 3601..3603, ), ( Token( - "text", + "terms", ), - 2898..2902, + 3604..3609, ), ( Token( - ":", + "{", ), - 2902..2903, + 3610..3611, ), ( Token( - "Text", + "if", ), - 2904..2908, + 3620..3622, ), ( Token( - ")", + "text_contains", ), - 2908..2909, + 3623..3636, ), ( Token( - ":", + "(", ), - 2909..2910, + 3636..3637, ), ( Token( - "Num", + "text", ), - 2911..2914, + 3637..3641, ), ( Token( - "?", + ",", ), - 2914..2915, + 3641..3642, ), ( Token( - "{", + "term", ), - 2916..2917, + 3643..3647, ), ( Token( - "$", + ")", ), - 2922..2923, + 3647..3648, ), ( Token( - "[", + "{", ), - 2924..2925, + 3649..3650, ), ( Token( - "-", + "return", ), - 2926..2927, + 3663..3669, ), ( Token( - "n", + "true", ), - 2927..2928, + 3670..3674, ), ( Token( - "\"", + "}", ), - 2929..2930, + 3683..3684, ), ( Token( - "{", + "}", ), - 2930..2931, + 3689..3690, ), ( Token( - "text", + "return", ), - 2931..2935, + 3696..3702, ), ( Token( - "}", + "false", ), - 2935..2936, + 3703..3708, ), ( Token( - "\"", + "}", ), - 2936..2937, + 3709..3710, ), ( Token( - "]", + "/// Checks if all the arrays values are in the string\n", ), - 2938..2939, + 3712..3766, ), ( Token( - "&&", + "pub", ), - 2940..2942, + 3766..3769, ), ( Token( - "[", + "fun", ), - 2943..2944, + 3770..3773, ), ( Token( - "\"", + "text_contains_all", ), - 2945..2946, + 3774..3791, ), ( Token( - "{", + "(", ), - 2946..2947, + 3791..3792, ), ( Token( "text", ), - 2947..2951, + 3792..3796, ), ( Token( - "}", + ":", ), - 2951..2952, + 3796..3797, ), ( Token( - "\"", + "Text", ), - 2952..2953, + 3798..3802, ), ( Token( - "-", + ",", ), - 2954..2955, + 3802..3803, ), ( Token( - "eq", + "terms", ), - 2955..2957, + 3804..3809, ), ( Token( - "\"", + ":", ), - 2958..2959, + 3809..3810, ), ( Token( - "{", + "[", ), - 2959..2960, + 3811..3812, ), ( Token( - "text", + "Text", ), - 2960..2964, + 3812..3816, ), ( Token( - "}", + "]", ), - 2964..2965, + 3816..3817, ), ( Token( - "\"", + ")", ), - 2965..2966, + 3817..3818, ), ( Token( - "]", + ":", ), - 2967..2968, + 3818..3819, ), ( Token( - "2", + "Bool", ), - 2969..2970, + 3820..3824, ), ( Token( - ">", + "{", ), - 2970..2971, + 3825..3826, ), ( Token( - "/", + "for", ), - 2971..2972, + 3831..3834, ), ( Token( - "dev", + "term", ), - 2972..2975, + 3835..3839, ), ( Token( - "/", + "in", ), - 2975..2976, + 3840..3842, ), ( Token( - "null", + "terms", ), - 2976..2980, + 3843..3848, ), ( Token( - "$", + "{", ), - 2981..2982, + 3849..3850, ), ( Token( - "?", + "if", ), - 2982..2983, + 3859..3861, ), ( Token( - "return", - ), - 2988..2994, - ), - ( - Token( - "text", + "not", ), - 2995..2999, + 3862..3865, ), ( Token( - "as", + "text_contains", ), - 3000..3002, + 3866..3879, ), ( Token( - "Num", + "(", ), - 3003..3006, + 3879..3880, ), ( Token( - "}", + "text", ), - 3007..3008, + 3880..3884, ), ( Token( - "/// Splits a text into an array of individual characters.\n", + ",", ), - 3010..3068, + 3884..3885, ), ( Token( - "pub", + "term", ), - 3068..3071, + 3886..3890, ), ( Token( - "fun", + ")", ), - 3072..3075, + 3890..3891, ), ( Token( - "split_chars", + "{", ), - 3076..3087, + 3892..3893, ), ( Token( - "(", + "return", ), - 3087..3088, + 3906..3912, ), ( Token( - "text", + "false", ), - 3088..3092, + 3913..3918, ), ( Token( - ":", + "}", ), - 3092..3093, + 3927..3928, ), ( Token( - "Text", + "}", ), - 3094..3098, + 3933..3934, ), ( Token( - ")", + "return", ), - 3098..3099, + 3940..3946, ), ( Token( - ":", + "true", ), - 3099..3100, + 3947..3951, ), ( Token( - "[", + "}", ), - 3101..3102, + 3952..3953, ), ( Token( - "Text", + "/// Match all occurences of a regex pattern.\n", ), - 3102..3106, + 3955..4000, ), ( Token( - "]", + "///\n", ), - 3106..3107, + 4000..4004, ), ( Token( - "{", + "/// Function uses `sed`\n", ), - 3108..3109, + 4004..4028, ), ( Token( - "let", + "pub", ), - 3114..3117, + 4028..4031, ), ( Token( - "chars", + "fun", ), - 3118..3123, + 4032..4035, ), ( Token( - "=", + "match_regex", ), - 3124..3125, + 4036..4047, ), ( Token( - "[", + "(", ), - 3126..3127, + 4047..4048, ), ( Token( - "Text", + "source", ), - 3127..3131, + 4048..4054, ), ( Token( - "]", + ":", ), - 3131..3132, + 4054..4055, ), ( Token( - "trust", + "Text", ), - 3137..3142, + 4056..4060, ), ( Token( - "$", + ",", ), - 3143..3144, + 4060..4061, ), ( Token( - "for", + "search", ), - 3145..3148, + 4062..4068, ), ( Token( - "(", + ":", ), - 3149..3150, + 4068..4069, ), ( Token( - "(", + "Text", ), - 3150..3151, + 4070..4074, ), ( Token( - "i", + ",", ), - 3151..3152, + 4074..4075, ), ( Token( - "=", + "extended", ), - 3152..3153, + 4076..4084, ), ( Token( - "0", + ":", ), - 3153..3154, + 4084..4085, ), ( Token( - ";", + "Bool", ), - 3154..3155, + 4086..4090, ), ( Token( - "i", + "=", ), - 3156..3157, + 4091..4092, ), ( Token( - "<", + "false", ), - 3157..3158, + 4093..4098, ), ( Token( - "\\", + ")", ), - 3158..3159, + 4098..4099, ), ( Token( - "$", + ":", ), - 3159..3160, + 4099..4100, ), ( Token( - "\\", + "Bool", ), - 3160..3161, + 4101..4105, ), ( Token( "{", ), - 3161..3162, + 4106..4107, ), ( Token( - "#", + "trust", ), - 3162..3163, + 4112..4117, ), ( Token( "{", ), - 3163..3164, + 4118..4119, ), ( Token( - "nameof", + "search", ), - 3164..3170, + 4128..4134, ), ( Token( - "text", + "=", ), - 3171..3175, + 4135..4136, ), ( Token( - "}", + "replace", ), - 3175..3176, + 4137..4144, ), ( Token( - "}", + "(", ), - 3176..3177, + 4144..4145, ), ( Token( - ";", + "search", ), - 3177..3178, + 4145..4151, ), ( Token( - "i", + ",", ), - 3179..3180, + 4151..4152, ), ( Token( - "+", + "\"", ), - 3180..3181, + 4153..4154, ), ( Token( - "+", + "/", ), - 3181..3182, + 4154..4155, ), ( Token( - ")", + "\"", ), - 3182..3183, + 4155..4156, ), ( Token( - ")", + ",", ), - 3183..3184, + 4156..4157, ), ( Token( - ";", + "\"", ), - 3184..3185, + 4158..4159, ), ( Token( - "do", + "\\/", ), - 3186..3188, + 4159..4161, ), ( Token( - "{", + "\"", ), - 3197..3198, + 4161..4162, ), ( Token( - "nameof", + ")", ), - 3198..3204, + 4162..4163, ), ( Token( - "chars", + "let", ), - 3205..3210, + 4172..4175, ), ( Token( - "}", + "output", ), - 3210..3211, + 4176..4182, ), ( Token( - "+=", + "=", ), - 3211..3213, + 4183..4184, ), ( Token( - "(", + "\"", ), - 3213..3214, + 4185..4186, ), ( Token( "\"", ), - 3215..3216, + 4186..4187, ), ( Token( - "\\", + "if", ), - 3216..3217, + 4196..4198, ), ( Token( - "$", + "extended", ), - 3217..3218, + 4199..4207, ), ( Token( - "\\", + "{", ), - 3218..3219, + 4208..4209, ), ( Token( - "{", + "// GNU sed versions 4.0 through 4.2 support extended regex syntax,\n", ), - 3219..3220, + 4222..4289, ), ( Token( - "{", + "// but only via the \"-r\" option; use that if the version information\n", ), - 3220..3221, + 4301..4370, ), ( Token( - "nameof", + "// contains \"GNU sed\".\n", ), - 3221..3227, + 4382..4405, ), ( Token( - "text", + "$", ), - 3228..3232, + 4417..4418, ), ( Token( - "}", + "re='", ), - 3232..3233, + 4418..4423, ), ( Token( - ":", + "\\b", ), - 3233..3234, + 4423..4425, ), ( Token( - "\\", + "Copyright", ), - 3234..3235, + 4425..4434, ), ( Token( - "$", + "\\b", ), - 3235..3236, + 4434..4436, ), ( Token( - "i", + ".+", ), - 3236..3237, + 4436..4438, ), ( Token( - ":", + "\\b", ), - 3237..3238, + 4438..4440, ), ( Token( - "1", + "Free Software Foundation", ), - 3238..3239, + 4440..4464, ), ( Token( - "}", + "\\b", ), - 3239..3240, + 4464..4466, ), ( Token( - "\"", + "'; [[ ", ), - 3240..3241, + 4466..4472, ), ( Token( - ")", + "\\$", ), - 3242..3243, + 4472..4474, ), ( Token( - ";", + "(sed --version 2>/dev/null) =~ ", ), - 3243..3244, + 4474..4505, ), ( Token( - "done", + "\\$", ), - 3249..3253, + 4505..4507, ), ( Token( - "$", + "re ]] ", ), - 3254..3255, + 4507..4513, ), ( Token( - "return", + "$", ), - 3260..3266, + 4513..4514, ), ( Token( - "chars", + "let", ), - 3267..3272, + 4527..4530, ), ( Token( - "}", + "flag", ), - 3273..3274, + 4531..4535, ), ( Token( - "/// Checks if some text contains a value.\n", + "=", ), - 3276..3318, + 4536..4537, ), ( Token( - "pub", + "status", ), - 3318..3321, + 4538..4544, ), ( Token( - "fun", + "==", ), - 3322..3325, + 4545..4547, ), ( Token( - "text_contains", + "0", ), - 3326..3339, + 4548..4549, ), ( Token( - "(", + "then", ), - 3339..3340, + 4550..4554, ), ( Token( - "text", + "\"", ), - 3340..3344, + 4555..4556, ), ( Token( - ":", + "-r", ), - 3344..3345, + 4556..4558, ), ( Token( - "Text", + "\"", ), - 3346..3350, + 4558..4559, ), ( Token( - ",", + "else", ), - 3350..3351, + 4560..4564, ), ( Token( - "phrase", + "\"", ), - 3352..3358, + 4565..4566, ), ( Token( - ":", + "-E", ), - 3358..3359, + 4566..4568, ), ( Token( - "Text", + "\"", ), - 3360..3364, + 4568..4569, ), ( Token( - ")", + "output", ), - 3364..3365, + 4582..4588, ), ( Token( - ":", + "=", ), - 3365..3366, + 4589..4590, ), ( Token( - "Bool", + "$", ), - 3367..3371, + 4591..4592, + ), + ( + Token( + "echo \"", + ), + 4592..4599, ), ( Token( "{", ), - 3372..3373, + 4599..4600, ), ( Token( - "let", + "source", ), - 3376..3379, + 4600..4606, ), ( Token( - "result", + "}", ), - 3380..3386, + 4606..4607, ), ( Token( - "=", + "\" | sed \"", ), - 3387..3388, + 4607..4616, ), ( Token( - "trust", + "{", ), - 3389..3394, + 4616..4617, ), ( Token( - "$", + "flag", ), - 3395..3396, + 4617..4621, ), ( Token( - "if", + "}", ), - 3397..3399, + 4621..4622, ), ( Token( - "[", + "\" -ne \"/", ), - 3400..3401, + 4622..4630, ), ( Token( - "[", + "{", ), - 3401..3402, + 4630..4631, ), ( Token( - "\"", + "search", ), - 3403..3404, + 4631..4637, ), ( Token( - "{", + "}", ), - 3404..3405, + 4637..4638, ), ( Token( - "text", + "/p\" ", ), - 3405..3409, + 4638..4642, + ), + ( + Token( + "$", + ), + 4642..4643, ), ( Token( "}", ), - 3409..3410, + 4652..4653, ), ( Token( - "\"", + "else", ), - 3410..3411, + 4654..4658, ), ( Token( - "==", + "{", + ), + 4659..4660, + ), + ( + Token( + "output", ), - 3412..3414, + 4673..4679, ), ( Token( - "*", + "=", ), - 3415..3416, + 4680..4681, ), ( Token( - "\"", + "$", ), - 3416..3417, + 4682..4683, + ), + ( + Token( + "echo \"", + ), + 4683..4690, ), ( Token( "{", ), - 3417..3418, + 4690..4691, ), ( Token( - "phrase", + "source", ), - 3418..3424, + 4691..4697, ), ( Token( "}", ), - 3424..3425, + 4697..4698, ), ( Token( - "\"", + "\" | sed -ne \"/", + ), + 4698..4712, + ), + ( + Token( + "{", ), - 3425..3426, + 4712..4713, ), ( Token( - "*", + "search", ), - 3426..3427, + 4713..4719, ), ( Token( - "]", + "}", ), - 3428..3429, + 4719..4720, ), ( Token( - "]", + "/p\" ", ), - 3429..3430, + 4720..4724, ), ( Token( - ";", + "$", ), - 3430..3431, + 4724..4725, ), ( Token( - "then", + "}", + ), + 4734..4735, + ), + ( + Token( + "if", ), - 3432..3436, + 4744..4746, ), ( Token( - "echo", + "output", ), - 3441..3445, + 4747..4753, ), ( Token( - "1", + "!=", ), - 3446..3447, + 4754..4756, ), ( Token( - "fi", + "\"", ), - 3450..3452, + 4757..4758, ), ( Token( - "$", + "\"", ), - 3453..3454, + 4758..4759, + ), + ( + Token( + "{", + ), + 4760..4761, ), ( Token( "return", ), - 3458..3464, + 4774..4780, ), ( Token( - "result", + "true", ), - 3465..3471, + 4781..4785, ), ( Token( - "==", + "}", ), - 3472..3474, + 4794..4795, ), ( Token( - "\"", + "}", ), - 3475..3476, + 4800..4801, ), ( Token( - "1", + "return", ), - 3476..3477, + 4806..4812, ), ( Token( - "\"", + "false", ), - 3477..3478, + 4813..4818, ), ( Token( "}", ), - 3479..3480, + 4819..4820, ), ( Token( - "/// Checks if an array value is in the text.\n", + "/// Checks if an array value (with regular expression) is in the text.\n", ), - 3482..3527, + 4822..4893, ), ( Token( "pub", ), - 3527..3530, + 4893..4896, ), ( Token( "fun", ), - 3531..3534, + 4897..4900, ), ( Token( - "text_contains_any", + "match_regex_any", ), - 3535..3552, + 4901..4916, ), ( Token( "(", ), - 3552..3553, + 4916..4917, ), ( Token( "text", ), - 3553..3557, + 4917..4921, ), ( Token( ":", ), - 3557..3558, + 4921..4922, ), ( Token( "Text", ), - 3559..3563, + 4923..4927, ), ( Token( ",", ), - 3563..3564, + 4927..4928, ), ( Token( "terms", ), - 3565..3570, + 4929..4934, ), ( Token( ":", ), - 3570..3571, + 4934..4935, ), ( Token( "[", ), - 3572..3573, + 4936..4937, ), ( Token( "Text", ), - 3573..3577, + 4937..4941, ), ( Token( "]", ), - 3577..3578, + 4941..4942, ), ( Token( ")", ), - 3578..3579, + 4942..4943, ), ( Token( ":", ), - 3579..3580, + 4943..4944, ), ( Token( "Bool", ), - 3581..3585, + 4945..4949, ), ( Token( "{", ), - 3586..3587, + 4950..4951, ), ( Token( "for", ), - 3592..3595, + 4956..4959, ), ( Token( "term", ), - 3596..3600, + 4960..4964, ), ( Token( "in", ), - 3601..3603, + 4965..4967, ), ( Token( "terms", ), - 3604..3609, + 4968..4973, ), ( Token( "{", ), - 3610..3611, + 4974..4975, ), ( Token( "if", ), - 3620..3622, + 4984..4986, ), ( Token( - "text_contains", + "match_regex", ), - 3623..3636, + 4987..4998, ), ( Token( "(", ), - 3636..3637, + 4998..4999, ), ( Token( "text", ), - 3637..3641, + 4999..5003, ), ( Token( ",", ), - 3641..3642, + 5003..5004, ), ( Token( "term", ), - 3643..3647, + 5005..5009, ), ( Token( - ")", + ",", ), - 3647..3648, - ), - ( - Token( - "{", - ), - 3649..3650, - ), - ( - Token( - "return", - ), - 3663..3669, - ), - ( - Token( - "true", - ), - 3670..3674, - ), - ( - Token( - "}", - ), - 3683..3684, - ), - ( - Token( - "}", - ), - 3689..3690, - ), - ( - Token( - "return", - ), - 3696..3702, - ), - ( - Token( - "false", - ), - 3703..3708, - ), - ( - Token( - "}", - ), - 3709..3710, - ), - ( - Token( - "/// Checks if all the arrays values are in the string\n", - ), - 3712..3766, - ), - ( - Token( - "pub", - ), - 3766..3769, - ), - ( - Token( - "fun", - ), - 3770..3773, - ), - ( - Token( - "text_contains_all", - ), - 3774..3791, - ), - ( - Token( - "(", - ), - 3791..3792, - ), - ( - Token( - "text", - ), - 3792..3796, - ), - ( - Token( - ":", - ), - 3796..3797, - ), - ( - Token( - "Text", - ), - 3798..3802, - ), - ( - Token( - ",", - ), - 3802..3803, - ), - ( - Token( - "terms", - ), - 3804..3809, - ), - ( - Token( - ":", - ), - 3809..3810, - ), - ( - Token( - "[", - ), - 3811..3812, - ), - ( - Token( - "Text", - ), - 3812..3816, - ), - ( - Token( - "]", - ), - 3816..3817, - ), - ( - Token( - ")", - ), - 3817..3818, - ), - ( - Token( - ":", - ), - 3818..3819, - ), - ( - Token( - "Bool", - ), - 3820..3824, - ), - ( - Token( - "{", - ), - 3825..3826, - ), - ( - Token( - "for", - ), - 3831..3834, - ), - ( - Token( - "term", - ), - 3835..3839, - ), - ( - Token( - "in", - ), - 3840..3842, - ), - ( - Token( - "terms", - ), - 3843..3848, - ), - ( - Token( - "{", - ), - 3849..3850, - ), - ( - Token( - "if", - ), - 3859..3861, - ), - ( - Token( - "not", - ), - 3862..3865, - ), - ( - Token( - "text_contains", - ), - 3866..3879, - ), - ( - Token( - "(", - ), - 3879..3880, - ), - ( - Token( - "text", - ), - 3880..3884, - ), - ( - Token( - ",", - ), - 3884..3885, - ), - ( - Token( - "term", - ), - 3886..3890, - ), - ( - Token( - ")", - ), - 3890..3891, - ), - ( - Token( - "{", - ), - 3892..3893, - ), - ( - Token( - "return", - ), - 3906..3912, - ), - ( - Token( - "false", - ), - 3913..3918, - ), - ( - Token( - "}", - ), - 3927..3928, - ), - ( - Token( - "}", - ), - 3933..3934, - ), - ( - Token( - "return", - ), - 3940..3946, - ), - ( - Token( - "true", - ), - 3947..3951, - ), - ( - Token( - "}", - ), - 3952..3953, - ), - ( - Token( - "/// Match all occurences of a regex pattern.\n", - ), - 3955..4000, - ), - ( - Token( - "///\n", - ), - 4000..4004, - ), - ( - Token( - "/// Function uses `sed`\n", - ), - 4004..4028, - ), - ( - Token( - "pub", - ), - 4028..4031, - ), - ( - Token( - "fun", - ), - 4032..4035, - ), - ( - Token( - "match_regex", - ), - 4036..4047, - ), - ( - Token( - "(", - ), - 4047..4048, - ), - ( - Token( - "source", - ), - 4048..4054, - ), - ( - Token( - ":", - ), - 4054..4055, - ), - ( - Token( - "Text", - ), - 4056..4060, - ), - ( - Token( - ",", - ), - 4060..4061, - ), - ( - Token( - "search", - ), - 4062..4068, - ), - ( - Token( - ":", - ), - 4068..4069, - ), - ( - Token( - "Text", - ), - 4070..4074, - ), - ( - Token( - ",", - ), - 4074..4075, - ), - ( - Token( - "extended", - ), - 4076..4084, - ), - ( - Token( - ":", - ), - 4084..4085, - ), - ( - Token( - "Bool", - ), - 4086..4090, - ), - ( - Token( - "=", - ), - 4091..4092, - ), - ( - Token( - "false", - ), - 4093..4098, - ), - ( - Token( - ")", - ), - 4098..4099, - ), - ( - Token( - ":", - ), - 4099..4100, - ), - ( - Token( - "Bool", - ), - 4101..4105, - ), - ( - Token( - "{", - ), - 4106..4107, - ), - ( - Token( - "trust", - ), - 4112..4117, - ), - ( - Token( - "{", - ), - 4118..4119, - ), - ( - Token( - "search", - ), - 4128..4134, - ), - ( - Token( - "=", - ), - 4135..4136, - ), - ( - Token( - "replace", - ), - 4137..4144, - ), - ( - Token( - "(", - ), - 4144..4145, - ), - ( - Token( - "search", - ), - 4145..4151, - ), - ( - Token( - ",", - ), - 4151..4152, - ), - ( - Token( - "\"", - ), - 4153..4154, - ), - ( - Token( - "/", - ), - 4154..4155, - ), - ( - Token( - "\"", - ), - 4155..4156, - ), - ( - Token( - ",", - ), - 4156..4157, - ), - ( - Token( - "\"", - ), - 4158..4159, - ), - ( - Token( - "\\", - ), - 4159..4160, - ), - ( - Token( - "/", - ), - 4160..4161, - ), - ( - Token( - "\"", - ), - 4161..4162, - ), - ( - Token( - ")", - ), - 4162..4163, - ), - ( - Token( - "let", - ), - 4172..4175, - ), - ( - Token( - "output", - ), - 4176..4182, - ), - ( - Token( - "=", - ), - 4183..4184, - ), - ( - Token( - "\"", - ), - 4185..4186, - ), - ( - Token( - "\"", - ), - 4186..4187, - ), - ( - Token( - "if", - ), - 4196..4198, - ), - ( - Token( - "extended", - ), - 4199..4207, - ), - ( - Token( - "{", - ), - 4208..4209, - ), - ( - Token( - "// GNU sed versions 4.0 through 4.2 support extended regex syntax,\n", - ), - 4222..4289, - ), - ( - Token( - "// but only via the \"-r\" option; use that if the version information\n", - ), - 4301..4370, - ), - ( - Token( - "// contains \"GNU sed\".\n", - ), - 4382..4405, - ), - ( - Token( - "$", - ), - 4417..4418, - ), - ( - Token( - "re", - ), - 4419..4421, - ), - ( - Token( - "=", - ), - 4421..4422, - ), - ( - Token( - "'", - ), - 4422..4423, - ), - ( - Token( - "\\", - ), - 4423..4424, - ), - ( - Token( - "bCopyright", - ), - 4424..4434, - ), - ( - Token( - "\\", - ), - 4434..4435, - ), - ( - Token( - "b", - ), - 4435..4436, - ), - ( - Token( - ".", - ), - 4436..4437, - ), - ( - Token( - "+", - ), - 4437..4438, - ), - ( - Token( - "\\", - ), - 4438..4439, - ), - ( - Token( - "bFree", - ), - 4439..4444, - ), - ( - Token( - "Software", - ), - 4445..4453, - ), - ( - Token( - "Foundation", - ), - 4454..4464, - ), - ( - Token( - "\\", - ), - 4464..4465, - ), - ( - Token( - "b'", - ), - 4465..4467, - ), - ( - Token( - ";", - ), - 4467..4468, - ), - ( - Token( - "[", - ), - 4469..4470, - ), - ( - Token( - "[", - ), - 4470..4471, - ), - ( - Token( - "\\", - ), - 4472..4473, - ), - ( - Token( - "$", - ), - 4473..4474, - ), - ( - Token( - "(", - ), - 4474..4475, - ), - ( - Token( - "sed", - ), - 4475..4478, - ), - ( - Token( - "-", - ), - 4479..4480, - ), - ( - Token( - "-", - ), - 4480..4481, - ), - ( - Token( - "version", - ), - 4481..4488, - ), - ( - Token( - "2", - ), - 4489..4490, - ), - ( - Token( - ">", - ), - 4490..4491, - ), - ( - Token( - "/", - ), - 4491..4492, - ), - ( - Token( - "dev", - ), - 4492..4495, - ), - ( - Token( - "/", - ), - 4495..4496, - ), - ( - Token( - "null", - ), - 4496..4500, - ), - ( - Token( - ")", - ), - 4500..4501, - ), - ( - Token( - "=", - ), - 4502..4503, - ), - ( - Token( - "~", - ), - 4503..4504, - ), - ( - Token( - "\\", - ), - 4505..4506, - ), - ( - Token( - "$", - ), - 4506..4507, - ), - ( - Token( - "re", - ), - 4507..4509, - ), - ( - Token( - "]", - ), - 4510..4511, - ), - ( - Token( - "]", - ), - 4511..4512, - ), - ( - Token( - "$", - ), - 4513..4514, - ), - ( - Token( - "let", - ), - 4527..4530, - ), - ( - Token( - "flag", - ), - 4531..4535, - ), - ( - Token( - "=", - ), - 4536..4537, - ), - ( - Token( - "status", - ), - 4538..4544, - ), - ( - Token( - "==", - ), - 4545..4547, - ), - ( - Token( - "0", - ), - 4548..4549, - ), - ( - Token( - "then", - ), - 4550..4554, - ), - ( - Token( - "\"", - ), - 4555..4556, - ), - ( - Token( - "-", - ), - 4556..4557, - ), - ( - Token( - "r", - ), - 4557..4558, - ), - ( - Token( - "\"", - ), - 4558..4559, - ), - ( - Token( - "else", - ), - 4560..4564, - ), - ( - Token( - "\"", - ), - 4565..4566, - ), - ( - Token( - "-", - ), - 4566..4567, - ), - ( - Token( - "E", - ), - 4567..4568, - ), - ( - Token( - "\"", - ), - 4568..4569, - ), - ( - Token( - "output", - ), - 4582..4588, - ), - ( - Token( - "=", - ), - 4589..4590, - ), - ( - Token( - "$", - ), - 4591..4592, - ), - ( - Token( - "echo", - ), - 4593..4597, - ), - ( - Token( - "\"", - ), - 4598..4599, - ), - ( - Token( - "{", - ), - 4599..4600, - ), - ( - Token( - "source", - ), - 4600..4606, - ), - ( - Token( - "}", - ), - 4606..4607, - ), - ( - Token( - "\"", - ), - 4607..4608, - ), - ( - Token( - "|", - ), - 4609..4610, - ), - ( - Token( - "sed", - ), - 4611..4614, - ), - ( - Token( - "\"", - ), - 4615..4616, - ), - ( - Token( - "{", - ), - 4616..4617, - ), - ( - Token( - "flag", - ), - 4617..4621, - ), - ( - Token( - "}", - ), - 4621..4622, - ), - ( - Token( - "\"", - ), - 4622..4623, - ), - ( - Token( - "-", - ), - 4624..4625, - ), - ( - Token( - "ne", - ), - 4625..4627, - ), - ( - Token( - "\"", - ), - 4628..4629, - ), - ( - Token( - "/", - ), - 4629..4630, - ), - ( - Token( - "{", - ), - 4630..4631, - ), - ( - Token( - "search", - ), - 4631..4637, - ), - ( - Token( - "}", - ), - 4637..4638, - ), - ( - Token( - "/", - ), - 4638..4639, - ), - ( - Token( - "p", - ), - 4639..4640, - ), - ( - Token( - "\"", - ), - 4640..4641, - ), - ( - Token( - "$", - ), - 4642..4643, - ), - ( - Token( - "}", - ), - 4652..4653, - ), - ( - Token( - "else", - ), - 4654..4658, - ), - ( - Token( - "{", - ), - 4659..4660, - ), - ( - Token( - "output", - ), - 4673..4679, - ), - ( - Token( - "=", - ), - 4680..4681, - ), - ( - Token( - "$", - ), - 4682..4683, - ), - ( - Token( - "echo", - ), - 4684..4688, - ), - ( - Token( - "\"", - ), - 4689..4690, - ), - ( - Token( - "{", - ), - 4690..4691, - ), - ( - Token( - "source", - ), - 4691..4697, - ), - ( - Token( - "}", - ), - 4697..4698, - ), - ( - Token( - "\"", - ), - 4698..4699, - ), - ( - Token( - "|", - ), - 4700..4701, - ), - ( - Token( - "sed", - ), - 4702..4705, - ), - ( - Token( - "-", - ), - 4706..4707, - ), - ( - Token( - "ne", - ), - 4707..4709, - ), - ( - Token( - "\"", - ), - 4710..4711, - ), - ( - Token( - "/", - ), - 4711..4712, - ), - ( - Token( - "{", - ), - 4712..4713, - ), - ( - Token( - "search", - ), - 4713..4719, - ), - ( - Token( - "}", - ), - 4719..4720, - ), - ( - Token( - "/", - ), - 4720..4721, - ), - ( - Token( - "p", - ), - 4721..4722, - ), - ( - Token( - "\"", - ), - 4722..4723, - ), - ( - Token( - "$", - ), - 4724..4725, - ), - ( - Token( - "}", - ), - 4734..4735, - ), - ( - Token( - "if", - ), - 4744..4746, - ), - ( - Token( - "output", - ), - 4747..4753, - ), - ( - Token( - "!=", - ), - 4754..4756, - ), - ( - Token( - "\"", - ), - 4757..4758, - ), - ( - Token( - "\"", - ), - 4758..4759, - ), - ( - Token( - "{", - ), - 4760..4761, - ), - ( - Token( - "return", - ), - 4774..4780, - ), - ( - Token( - "true", - ), - 4781..4785, - ), - ( - Token( - "}", - ), - 4794..4795, - ), - ( - Token( - "}", - ), - 4800..4801, - ), - ( - Token( - "return", - ), - 4806..4812, - ), - ( - Token( - "false", - ), - 4813..4818, - ), - ( - Token( - "}", - ), - 4819..4820, - ), - ( - Token( - "/// Checks if an array value (with regular expression) is in the text.\n", - ), - 4822..4893, - ), - ( - Token( - "pub", - ), - 4893..4896, - ), - ( - Token( - "fun", - ), - 4897..4900, - ), - ( - Token( - "match_regex_any", - ), - 4901..4916, - ), - ( - Token( - "(", - ), - 4916..4917, - ), - ( - Token( - "text", - ), - 4917..4921, - ), - ( - Token( - ":", - ), - 4921..4922, - ), - ( - Token( - "Text", - ), - 4923..4927, - ), - ( - Token( - ",", - ), - 4927..4928, - ), - ( - Token( - "terms", - ), - 4929..4934, - ), - ( - Token( - ":", - ), - 4934..4935, - ), - ( - Token( - "[", - ), - 4936..4937, - ), - ( - Token( - "Text", - ), - 4937..4941, - ), - ( - Token( - "]", - ), - 4941..4942, - ), - ( - Token( - ")", - ), - 4942..4943, - ), - ( - Token( - ":", - ), - 4943..4944, - ), - ( - Token( - "Bool", - ), - 4945..4949, - ), - ( - Token( - "{", - ), - 4950..4951, - ), - ( - Token( - "for", - ), - 4956..4959, - ), - ( - Token( - "term", - ), - 4960..4964, - ), - ( - Token( - "in", - ), - 4965..4967, - ), - ( - Token( - "terms", - ), - 4968..4973, - ), - ( - Token( - "{", - ), - 4974..4975, - ), - ( - Token( - "if", - ), - 4984..4986, - ), - ( - Token( - "match_regex", - ), - 4987..4998, - ), - ( - Token( - "(", - ), - 4998..4999, - ), - ( - Token( - "text", - ), - 4999..5003, - ), - ( - Token( - ",", - ), - 5003..5004, - ), - ( - Token( - "term", - ), - 5005..5009, - ), - ( - Token( - ",", - ), - 5009..5010, - ), - ( - Token( - "false", - ), - 5011..5016, - ), - ( - Token( - ")", - ), - 5016..5017, - ), - ( - Token( - "{", - ), - 5018..5019, - ), - ( - Token( - "return", - ), - 5032..5038, - ), - ( - Token( - "true", - ), - 5039..5043, - ), - ( - Token( - "}", - ), - 5052..5053, - ), - ( - Token( - "}", - ), - 5058..5059, - ), - ( - Token( - "return", - ), - 5065..5071, - ), - ( - Token( - "false", - ), - 5072..5077, - ), - ( - Token( - "}", - ), - 5078..5079, - ), - ( - Token( - "/// Reverses text using `rev`.\n", - ), - 5081..5112, - ), - ( - Token( - "pub", - ), - 5112..5115, - ), - ( - Token( - "fun", - ), - 5116..5119, - ), - ( - Token( - "reversed", - ), - 5120..5128, - ), - ( - Token( - "(", - ), - 5128..5129, - ), - ( - Token( - "text", - ), - 5129..5133, - ), - ( - Token( - ":", - ), - 5133..5134, - ), - ( - Token( - "Text", - ), - 5135..5139, - ), - ( - Token( - ")", - ), - 5139..5140, - ), - ( - Token( - ":", - ), - 5140..5141, - ), - ( - Token( - "Text", - ), - 5142..5146, - ), - ( - Token( - "{", - ), - 5147..5148, - ), - ( - Token( - "return", - ), - 5153..5159, - ), - ( - Token( - "trust", - ), - 5160..5165, - ), - ( - Token( - "$", - ), - 5166..5167, - ), - ( - Token( - "echo", - ), - 5168..5172, - ), - ( - Token( - "\"", - ), - 5173..5174, - ), - ( - Token( - "{", - ), - 5174..5175, - ), - ( - Token( - "text", - ), - 5175..5179, - ), - ( - Token( - "}", - ), - 5179..5180, - ), - ( - Token( - "\"", - ), - 5180..5181, - ), - ( - Token( - "|", - ), - 5182..5183, - ), - ( - Token( - "rev", - ), - 5184..5187, - ), - ( - Token( - "$", - ), - 5188..5189, - ), - ( - Token( - "}", - ), - 5190..5191, - ), - ( - Token( - "/// Checks if text starts with a value.\n", - ), - 5193..5233, - ), - ( - Token( - "pub", - ), - 5233..5236, - ), - ( - Token( - "fun", - ), - 5237..5240, - ), - ( - Token( - "starts_with", - ), - 5241..5252, - ), - ( - Token( - "(", - ), - 5252..5253, - ), - ( - Token( - "text", - ), - 5253..5257, - ), - ( - Token( - ":", - ), - 5257..5258, - ), - ( - Token( - "Text", - ), - 5259..5263, - ), - ( - Token( - ",", - ), - 5263..5264, - ), - ( - Token( - "prefix", - ), - 5265..5271, - ), - ( - Token( - ":", - ), - 5271..5272, - ), - ( - Token( - "Text", - ), - 5273..5277, - ), - ( - Token( - ")", - ), - 5277..5278, - ), - ( - Token( - ":", - ), - 5278..5279, - ), - ( - Token( - "Bool", - ), - 5280..5284, - ), - ( - Token( - "{", - ), - 5285..5286, - ), - ( - Token( - "let", - ), - 5289..5292, - ), - ( - Token( - "result", - ), - 5293..5299, - ), - ( - Token( - "=", - ), - 5300..5301, - ), - ( - Token( - "trust", - ), - 5302..5307, - ), - ( - Token( - "$", - ), - 5308..5309, - ), - ( - Token( - "if", - ), - 5310..5312, - ), - ( - Token( - "[", - ), - 5313..5314, - ), - ( - Token( - "[", - ), - 5314..5315, - ), - ( - Token( - "\"", - ), - 5316..5317, - ), - ( - Token( - "{", - ), - 5317..5318, - ), - ( - Token( - "text", - ), - 5318..5322, - ), - ( - Token( - "}", - ), - 5322..5323, - ), - ( - Token( - "\"", - ), - 5323..5324, - ), - ( - Token( - "==", - ), - 5325..5327, - ), - ( - Token( - "\"", - ), - 5328..5329, - ), - ( - Token( - "{", - ), - 5329..5330, - ), - ( - Token( - "prefix", - ), - 5330..5336, - ), - ( - Token( - "}", - ), - 5336..5337, - ), - ( - Token( - "\"", - ), - 5337..5338, - ), - ( - Token( - "*", - ), - 5338..5339, - ), - ( - Token( - "]", - ), - 5340..5341, - ), - ( - Token( - "]", - ), - 5341..5342, - ), - ( - Token( - ";", - ), - 5342..5343, - ), - ( - Token( - "then", - ), - 5344..5348, - ), - ( - Token( - "echo", - ), - 5353..5357, + 5009..5010, ), ( Token( - "1", + "false", ), - 5358..5359, + 5011..5016, ), ( Token( - "fi", + ")", ), - 5362..5364, + 5016..5017, ), ( Token( - "$", + "{", ), - 5365..5366, + 5018..5019, ), ( Token( "return", ), - 5370..5376, + 5032..5038, ), ( Token( - "result", + "true", ), - 5377..5383, + 5039..5043, ), ( Token( - "==", + "}", ), - 5384..5386, + 5052..5053, ), ( Token( - "\"", + "}", ), - 5387..5388, + 5058..5059, ), ( Token( - "1", + "return", ), - 5388..5389, + 5065..5071, ), ( Token( - "\"", + "false", ), - 5389..5390, + 5072..5077, ), ( Token( "}", ), - 5391..5392, + 5078..5079, ), ( Token( - "/// Checks if text ends with a value.\n", + "/// Reverses text using `rev`.\n", ), - 5394..5432, + 5081..5112, ), ( Token( "pub", ), - 5432..5435, + 5112..5115, ), ( Token( "fun", ), - 5436..5439, + 5116..5119, ), ( Token( - "ends_with", + "reversed", ), - 5440..5449, + 5120..5128, ), ( Token( "(", ), - 5449..5450, + 5128..5129, ), ( Token( "text", ), - 5450..5454, - ), - ( - Token( - ":", - ), - 5454..5455, - ), - ( - Token( - "Text", - ), - 5456..5460, - ), - ( - Token( - ",", - ), - 5460..5461, - ), - ( - Token( - "suffix", - ), - 5462..5468, + 5129..5133, ), ( Token( ":", ), - 5468..5469, + 5133..5134, ), ( Token( "Text", ), - 5470..5474, + 5135..5139, ), ( Token( ")", ), - 5474..5475, + 5139..5140, ), ( Token( ":", ), - 5475..5476, + 5140..5141, ), ( Token( - "Bool", + "Text", ), - 5477..5481, + 5142..5146, ), ( Token( "{", ), - 5482..5483, - ), - ( - Token( - "let", - ), - 5486..5489, - ), - ( - Token( - "result", - ), - 5490..5496, + 5147..5148, ), ( Token( - "=", + "return", ), - 5497..5498, + 5153..5159, ), ( Token( "trust", ), - 5499..5504, + 5160..5165, ), ( Token( "$", ), - 5505..5506, - ), - ( - Token( - "if", - ), - 5507..5509, - ), - ( - Token( - "[", - ), - 5510..5511, - ), - ( - Token( - "[", - ), - 5511..5512, + 5166..5167, ), ( Token( - "\"", + "echo \"", ), - 5513..5514, + 5167..5174, ), ( Token( "{", ), - 5514..5515, + 5174..5175, ), ( Token( "text", ), - 5515..5519, + 5175..5179, ), ( Token( "}", ), - 5519..5520, - ), - ( - Token( - "\"", - ), - 5520..5521, - ), - ( - Token( - "==", - ), - 5522..5524, + 5179..5180, ), ( Token( - "*", + "\" | rev ", ), - 5525..5526, + 5180..5188, ), ( Token( - "\"", + "$", ), - 5526..5527, + 5188..5189, ), ( Token( - "{", + "}", ), - 5527..5528, + 5190..5191, ), ( Token( - "suffix", + "/// Checks if text starts with a value.\n", ), - 5528..5534, + 5193..5233, ), ( Token( - "}", + "pub", ), - 5534..5535, + 5233..5236, ), ( Token( - "\"", + "fun", ), - 5535..5536, + 5237..5240, ), ( Token( - "]", + "starts_with", ), - 5537..5538, + 5241..5252, ), ( Token( - "]", + "(", ), - 5538..5539, + 5252..5253, ), ( Token( - ";", + "text", ), - 5539..5540, + 5253..5257, ), ( Token( - "then", + ":", ), - 5541..5545, + 5257..5258, ), ( Token( - "echo", + "Text", ), - 5550..5554, + 5259..5263, ), ( Token( - "1", + ",", ), - 5555..5556, + 5263..5264, ), ( Token( - "fi", + "prefix", ), - 5559..5561, + 5265..5271, ), ( Token( - "$", + ":", ), - 5562..5563, + 5271..5272, ), ( Token( - "return", + "Text", ), - 5567..5573, + 5273..5277, ), ( Token( - "result", + ")", ), - 5574..5580, + 5277..5278, ), ( Token( - "==", + ":", ), - 5581..5583, + 5278..5279, ), ( Token( - "\"", + "Bool", ), - 5584..5585, + 5280..5284, ), ( Token( - "1", + "{", ), - 5585..5586, + 5285..5286, ), ( Token( - "\"", + "let", ), - 5586..5587, + 5289..5292, ), ( Token( - "}", + "result", ), - 5588..5589, + 5293..5299, ), ( Token( - "/// Returns a substring from `text` starting at the given `index` (0-based).\n", + "=", ), - 5591..5668, + 5300..5301, ), ( Token( - "///\n", + "trust", ), - 5668..5672, + 5302..5307, ), ( Token( - "/// If `index` is negative, the substring starts from the end of `text` based on the absolute value of `index`.\n", + "$", ), - 5672..5784, + 5308..5309, ), ( Token( - "/// If `length` is provided, the substring will include `length` characters; otherwise, it slices to the end of `text`.\n", + "if [[ \"", ), - 5784..5904, + 5309..5317, ), ( Token( - "/// If `length` is negative, an empty string is returned.\n", + "{", ), - 5904..5962, + 5317..5318, ), ( Token( - "pub", + "text", ), - 5962..5965, + 5318..5322, ), ( Token( - "fun", + "}", ), - 5966..5969, + 5322..5323, ), ( Token( - "slice", + "\" == \"", ), - 5970..5975, + 5323..5329, ), ( Token( - "(", + "{", ), - 5975..5976, + 5329..5330, ), ( Token( - "text", + "prefix", ), - 5976..5980, + 5330..5336, ), ( Token( - ":", + "}", ), - 5980..5981, + 5336..5337, ), ( Token( - "Text", + "\"* ]]; then\n echo 1\n fi ", ), - 5982..5986, + 5337..5365, ), ( Token( - ",", + "$", ), - 5986..5987, + 5365..5366, ), ( Token( - "index", + "return", ), - 5988..5993, + 5370..5376, ), ( Token( - ":", + "result", ), - 5993..5994, + 5377..5383, ), ( Token( - "Num", + "==", ), - 5995..5998, + 5384..5386, ), ( Token( - ",", + "\"", ), - 5998..5999, + 5387..5388, ), ( Token( - "length", + "1", ), - 6000..6006, + 5388..5389, ), ( Token( - ":", + "\"", ), - 6006..6007, + 5389..5390, ), ( Token( - "Num", + "}", ), - 6008..6011, + 5391..5392, ), ( Token( - "=", + "/// Checks if text ends with a value.\n", ), - 6012..6013, + 5394..5432, ), ( Token( - "0", + "pub", ), - 6014..6015, + 5432..5435, ), ( Token( - ")", + "fun", ), - 6015..6016, + 5436..5439, ), ( Token( - ":", + "ends_with", ), - 6016..6017, + 5440..5449, ), ( Token( - "Text", + "(", ), - 6018..6022, + 5449..5450, ), ( Token( - "{", + "text", ), - 6023..6024, + 5450..5454, ), ( Token( - "if", + ":", ), - 6029..6031, + 5454..5455, ), ( Token( - "length", + "Text", ), - 6032..6038, + 5456..5460, ), ( Token( - "==", + ",", ), - 6039..6041, + 5460..5461, ), ( Token( - "0", + "suffix", ), - 6042..6043, + 5462..5468, ), ( Token( ":", ), - 6043..6044, + 5468..5469, ), ( Token( - "length", + "Text", ), - 6045..6051, + 5470..5474, ), ( Token( - "=", + ")", ), - 6052..6053, + 5474..5475, ), ( Token( - "len", + ":", ), - 6054..6057, + 5475..5476, ), ( Token( - "(", + "Bool", ), - 6057..6058, + 5477..5481, ), ( Token( - "text", + "{", ), - 6058..6062, + 5482..5483, ), ( Token( - ")", + "let", ), - 6062..6063, + 5486..5489, ), ( Token( - "-", + "result", ), - 6064..6065, + 5490..5496, ), ( Token( - "index", + "=", ), - 6066..6071, + 5497..5498, ), ( Token( - "if", + "trust", ), - 6076..6078, + 5499..5504, ), ( Token( - "length", + "$", ), - 6079..6085, + 5505..5506, ), ( Token( - "<=", + "if [[ \"", ), - 6086..6088, + 5506..5514, ), ( Token( - "0", + "{", ), - 6089..6090, + 5514..5515, ), ( Token( - ":", + "text", ), - 6090..6091, + 5515..5519, ), ( Token( - "return", + "}", ), - 6092..6098, + 5519..5520, ), ( Token( - "\"", + "\" == *\"", ), - 6099..6100, + 5520..5527, ), ( Token( - "\"", + "{", ), - 6100..6101, + 5527..5528, ), ( Token( - "return", + "suffix", ), - 6106..6112, + 5528..5534, ), ( Token( - "trust", + "}", ), - 6113..6118, + 5534..5535, ), ( Token( - "$", + "\" ]]; then\n echo 1\n fi ", ), - 6119..6120, + 5535..5562, ), ( Token( - "printf", + "$", ), - 6121..6127, + 5562..5563, ), ( Token( - "\"", + "return", ), - 6128..6129, + 5567..5573, ), ( Token( - "%", + "result", ), - 6129..6130, + 5574..5580, ), ( Token( - ".", + "==", ), - 6130..6131, + 5581..5583, ), ( Token( - "{", + "\"", ), - 6131..6132, + 5584..5585, ), ( Token( - "length", + "1", ), - 6132..6138, + 5585..5586, ), ( Token( - "}", + "\"", ), - 6138..6139, + 5586..5587, ), ( Token( - "s", + "}", ), - 6139..6140, + 5588..5589, ), ( Token( - "\"", + "/// Returns a substring from `text` starting at the given `index` (0-based).\n", ), - 6140..6141, + 5591..5668, ), ( Token( - "\"", + "///\n", ), - 6142..6143, + 5668..5672, ), ( Token( - "\\", + "/// If `index` is negative, the substring starts from the end of `text` based on the absolute value of `index`.\n", ), - 6143..6144, + 5672..5784, ), ( Token( - "$", + "/// If `length` is provided, the substring will include `length` characters; otherwise, it slices to the end of `text`.\n", ), - 6144..6145, + 5784..5904, ), ( Token( - "\\", + "/// If `length` is negative, an empty string is returned.\n", ), - 6145..6146, + 5904..5962, ), ( Token( - "{", + "pub", ), - 6146..6147, + 5962..5965, ), ( Token( - "text", + "fun", ), - 6147..6151, + 5966..5969, ), ( Token( - ":", + "slice", ), - 6151..6152, + 5970..5975, ), ( Token( - "{", + "(", ), - 6152..6153, + 5975..5976, ), ( Token( - "index", + "text", ), - 6153..6158, + 5976..5980, ), ( Token( - "}", + ":", ), - 6158..6159, + 5980..5981, ), ( Token( - "}", + "Text", ), - 6159..6160, + 5982..5986, ), ( Token( - "\"", + ",", ), - 6160..6161, + 5986..5987, ), ( Token( - "$", + "index", ), - 6162..6163, + 5988..5993, ), ( Token( - "}", + ":", ), - 6164..6165, + 5993..5994, ), ( Token( - "/// Returns the character from `text` at the specified `index` (0-based).\n", + "Num", ), - 6167..6241, + 5995..5998, ), ( Token( - "///\n", + ",", ), - 6241..6245, + 5998..5999, ), ( Token( - "/// If `index` is negative, the substring starts from the end of `text` based on the absolute value of `index`.\n", + "length", ), - 6245..6357, + 6000..6006, ), ( Token( - "pub", + ":", ), - 6357..6360, + 6006..6007, ), ( Token( - "fun", + "Num", ), - 6361..6364, + 6008..6011, ), ( Token( - "char_at", + "=", ), - 6365..6372, + 6012..6013, ), ( Token( - "(", + "0", ), - 6372..6373, + 6014..6015, ), ( Token( - "text", + ")", ), - 6373..6377, + 6015..6016, ), ( Token( ":", ), - 6377..6378, + 6016..6017, ), ( Token( "Text", ), - 6379..6383, + 6018..6022, ), ( Token( - ",", + "{", ), - 6383..6384, + 6023..6024, ), ( Token( - "index", + "if", ), - 6385..6390, + 6029..6031, ), ( Token( - ":", + "length", ), - 6390..6391, + 6032..6038, ), ( Token( - "Num", + "==", ), - 6392..6395, + 6039..6041, ), ( Token( - ")", + "0", ), - 6395..6396, + 6042..6043, ), ( Token( ":", ), - 6396..6397, + 6043..6044, ), ( Token( - "Text", + "length", ), - 6398..6402, + 6045..6051, ), ( Token( - "{", + "=", ), - 6403..6404, + 6052..6053, ), ( Token( - "return", + "len", ), - 6409..6415, + 6054..6057, ), ( Token( - "trust", + "(", ), - 6416..6421, + 6057..6058, ), ( Token( - "$", + "text", ), - 6422..6423, + 6058..6062, ), ( Token( - "printf", + ")", ), - 6424..6430, + 6062..6063, ), ( Token( - "\"", + "-", ), - 6431..6432, + 6064..6065, ), ( Token( - "%", + "index", ), - 6432..6433, + 6066..6071, ), ( Token( - ".", + "if", ), - 6433..6434, + 6076..6078, ), ( Token( - "1s", + "length", ), - 6434..6436, + 6079..6085, ), ( Token( - "\"", + "<=", ), - 6436..6437, + 6086..6088, ), ( Token( - "\"", + "0", ), - 6438..6439, + 6089..6090, ), ( Token( - "\\", + ":", ), - 6439..6440, + 6090..6091, ), ( Token( - "$", + "return", ), - 6440..6441, + 6092..6098, ), ( Token( - "\\", + "\"", ), - 6441..6442, + 6099..6100, ), ( Token( - "{", + "\"", ), - 6442..6443, + 6100..6101, ), ( Token( - "text", + "return", ), - 6443..6447, + 6106..6112, ), ( Token( - ":", + "trust", ), - 6447..6448, + 6113..6118, ), ( Token( - "{", + "$", ), - 6448..6449, + 6119..6120, ), ( Token( - "index", + "printf \"%.", ), - 6449..6454, + 6120..6131, ), ( Token( - "}", + "{", ), - 6454..6455, + 6131..6132, ), ( Token( - "}", + "length", ), - 6455..6456, + 6132..6138, ), ( Token( - "\"", + "}", ), - 6456..6457, + 6138..6139, ), ( Token( - "$", + "s\" \"", ), - 6458..6459, + 6139..6143, ), ( Token( - "}", + "\\$", ), - 6460..6461, + 6143..6145, ), ( Token( - "/// Capitalize the first letter of the given `text`.\n", + "\\{", ), - 6463..6516, + 6145..6147, ), ( Token( - "#", + "text:", ), - 6516..6517, + 6147..6152, ), ( Token( - "[", + "{", ), - 6517..6518, + 6152..6153, ), ( Token( - "allow_absurd_cast", + "index", ), - 6518..6535, + 6153..6158, ), ( Token( - "]", + "}", ), - 6535..6536, + 6158..6159, ), ( Token( - "pub", + "}\" ", ), - 6537..6540, + 6159..6162, ), ( Token( - "fun", + "$", ), - 6541..6544, + 6162..6163, ), ( Token( - "capitalized", + "}", ), - 6545..6556, + 6164..6165, ), ( Token( - "(", + "/// Returns the character from `text` at the specified `index` (0-based).\n", ), - 6556..6557, + 6167..6241, ), ( Token( - "text", + "///\n", ), - 6557..6561, + 6241..6245, ), ( Token( - ":", + "/// If `index` is negative, the substring starts from the end of `text` based on the absolute value of `index`.\n", ), - 6561..6562, + 6245..6357, ), ( Token( - "Text", + "pub", ), - 6563..6567, + 6357..6360, ), ( Token( - ")", + "fun", ), - 6567..6568, + 6361..6364, ), ( Token( - ":", + "char_at", ), - 6568..6569, + 6365..6372, ), ( Token( - "Text", + "(", ), - 6570..6574, + 6372..6373, ), ( Token( - "{", + "text", ), - 6575..6576, + 6373..6377, ), ( Token( - "trust", + ":", ), - 6581..6586, + 6377..6378, ), ( Token( - "{", + "Text", ), - 6587..6588, + 6379..6383, ), ( Token( - "if", + ",", ), - 6597..6599, + 6383..6384, ), ( Token( - "len", + "index", ), - 6600..6603, + 6385..6390, ), ( Token( - "(", + ":", ), - 6603..6604, + 6390..6391, ), ( Token( - "text", + "Num", ), - 6604..6608, + 6392..6395, ), ( Token( ")", ), - 6608..6609, + 6395..6396, ), ( Token( - "==", + ":", ), - 6610..6612, + 6396..6397, ), ( Token( - "0", + "Text", ), - 6613..6614, + 6398..6402, ), ( Token( "{", ), - 6615..6616, + 6403..6404, ), ( Token( "return", ), - 6629..6635, + 6409..6415, ), ( Token( - "text", + "trust", ), - 6636..6640, + 6416..6421, ), ( Token( - "}", + "$", ), - 6649..6650, + 6422..6423, ), ( Token( - "const", + "printf \"%.1s\" \"", ), - 6659..6664, + 6423..6439, ), ( Token( - "bash_version", + "\\$", ), - 6665..6677, + 6439..6441, ), ( Token( - "=", + "\\{", ), - 6678..6679, + 6441..6443, ), ( Token( - "$", + "text:", ), - 6680..6681, + 6443..6448, ), ( Token( - "echo", + "{", ), - 6682..6686, + 6448..6449, ), ( Token( - "\\", + "index", ), - 6687..6688, + 6449..6454, ), ( Token( - "\"", + "}", ), - 6688..6689, + 6454..6455, ), ( Token( - "\\", + "}\" ", ), - 6689..6690, + 6455..6458, ), ( Token( "$", ), - 6690..6691, + 6458..6459, ), ( Token( - "\\", + "}", ), - 6691..6692, + 6460..6461, ), ( Token( - "{", + "/// Capitalize the first letter of the given `text`.\n", ), - 6692..6693, + 6463..6516, ), ( Token( - "BASH_VERSINFO", + "#[", ), - 6693..6706, + 6516..6518, ), ( Token( - "[", + "allow_absurd_cast", ), - 6706..6707, + 6518..6535, ), ( Token( - "0", + "]", ), - 6707..6708, + 6535..6536, ), ( Token( - "]", + "pub", ), - 6708..6709, + 6537..6540, ), ( Token( - "}", + "fun", ), - 6709..6710, + 6541..6544, ), ( Token( - "\\", + "capitalized", ), - 6710..6711, + 6545..6556, ), ( Token( - "\"", + "(", ), - 6711..6712, + 6556..6557, ), ( Token( - "$", + "text", ), - 6713..6714, + 6557..6561, ), ( Token( - "as", + ":", ), - 6715..6717, + 6561..6562, ), ( Token( - "Num", + "Text", ), - 6718..6721, + 6563..6567, ), ( Token( - "if", + ")", ), - 6730..6732, + 6567..6568, + ), + ( + Token( + ":", + ), + 6568..6569, ), ( Token( - "bash_version", + "Text", ), - 6733..6745, + 6570..6574, ), ( Token( - ">=", + "{", ), - 6746..6748, + 6575..6576, ), ( Token( - "4", + "trust", ), - 6749..6750, + 6581..6586, ), ( Token( "{", ), - 6751..6752, + 6587..6588, ), ( Token( - "return", + "if", ), - 6765..6771, + 6597..6599, ), ( Token( - "$", + "len", ), - 6772..6773, + 6600..6603, ), ( Token( - "echo", + "(", ), - 6774..6778, + 6603..6604, ), ( Token( - "\\", + "text", ), - 6779..6780, + 6604..6608, ), ( Token( - "\"", + ")", ), - 6780..6781, + 6608..6609, ), ( Token( - "\\", + "==", ), - 6781..6782, + 6610..6612, ), ( Token( - "$", + "0", ), - 6782..6783, + 6613..6614, ), ( Token( - "\\", + "{", ), - 6783..6784, + 6615..6616, ), ( Token( - "{", + "return", ), - 6784..6785, + 6629..6635, ), ( Token( - "text^", + "text", ), - 6785..6790, + 6636..6640, ), ( Token( "}", ), - 6790..6791, + 6649..6650, ), ( Token( - "\\", + "const", ), - 6791..6792, + 6659..6664, ), ( Token( - "\"", + "bash_version", ), - 6792..6793, + 6665..6677, ), ( Token( - "$", + "=", ), - 6794..6795, + 6678..6679, ), ( Token( - "}", + "$", ), - 6804..6805, + 6680..6681, ), ( Token( - "// GNU sed supports \\U\n", + "echo ", ), - 6814..6837, + 6681..6687, ), ( Token( - "$", + "\\\"", ), - 6845..6846, + 6687..6689, ), ( Token( - "re", + "\\$", ), - 6847..6849, + 6689..6691, ), ( Token( - "=", + "\\{", ), - 6849..6850, + 6691..6693, ), ( Token( - "'", + "BASH_VERSINFO[0]}", ), - 6850..6851, + 6693..6710, ), ( Token( - "\\", + "\\\"", ), - 6851..6852, + 6710..6712, ), ( Token( - "bCopyright", + " ", ), - 6852..6862, + 6712..6713, ), ( Token( - "\\", + "$", ), - 6862..6863, + 6713..6714, ), ( Token( - "b", + "as", ), - 6863..6864, + 6715..6717, ), ( Token( - ".", + "Num", ), - 6864..6865, + 6718..6721, ), ( Token( - "+", + "if", ), - 6865..6866, + 6730..6732, ), ( Token( - "\\", + "bash_version", ), - 6866..6867, + 6733..6745, ), ( Token( - "bFree", + ">=", ), - 6867..6872, + 6746..6748, ), ( Token( - "Software", + "4", ), - 6873..6881, + 6749..6750, ), ( Token( - "Foundation", + "{", ), - 6882..6892, + 6751..6752, ), ( Token( - "\\", + "return", ), - 6892..6893, + 6765..6771, ), ( Token( - "b'", + "$", ), - 6893..6895, + 6772..6773, ), ( Token( - ";", + "echo ", ), - 6895..6896, + 6773..6779, ), ( Token( - "[", + "\\\"", ), - 6897..6898, + 6779..6781, ), ( Token( - "[", + "\\$", ), - 6898..6899, + 6781..6783, ), ( Token( - "\\", + "\\{", ), - 6900..6901, + 6783..6785, ), ( Token( - "$", + "text^}", ), - 6901..6902, + 6785..6791, ), ( Token( - "(", + "\\\"", ), - 6902..6903, + 6791..6793, ), ( Token( - "sed", + " ", ), - 6903..6906, + 6793..6794, ), ( Token( - "-", + "$", ), - 6907..6908, + 6794..6795, ), ( Token( - "-", + "}", ), - 6908..6909, + 6804..6805, ), ( Token( - "version", + "// GNU sed supports \\U\n", ), - 6909..6916, + 6814..6837, ), ( Token( - "2", + "$", ), - 6917..6918, + 6845..6846, ), ( Token( - ">", + "re='", ), - 6918..6919, + 6846..6851, ), ( Token( - "/", + "\\b", ), - 6919..6920, + 6851..6853, ), ( Token( - "dev", + "Copyright", ), - 6920..6923, + 6853..6862, ), ( Token( - "/", + "\\b", ), - 6923..6924, + 6862..6864, ), ( Token( - "null", + ".+", ), - 6924..6928, + 6864..6866, ), ( Token( - ")", + "\\b", ), - 6928..6929, + 6866..6868, ), ( Token( - "=", + "Free Software Foundation", ), - 6930..6931, + 6868..6892, ), ( Token( - "~", + "\\b", ), - 6931..6932, + 6892..6894, ), ( Token( - "\\", + "'; [[ ", ), - 6933..6934, + 6894..6900, ), ( Token( - "$", + "\\$", ), - 6934..6935, + 6900..6902, ), ( Token( - "re", + "(sed --version 2>/dev/null) =~ ", ), - 6935..6937, + 6902..6933, ), ( Token( - "]", + "\\$", ), - 6938..6939, + 6933..6935, ), ( Token( - "]", + "re ]] ", ), - 6939..6940, + 6935..6941, ), ( Token( @@ -8525,15 +6431,9 @@ expression: tokens ), ( Token( - "echo", - ), - 6989..6993, - ), - ( - Token( - "\"", + "echo \"", ), - 6994..6995, + 6988..6995, ), ( Token( @@ -8555,57 +6455,15 @@ expression: tokens ), ( Token( - "\"", - ), - 7001..7002, - ), - ( - Token( - "|", - ), - 7003..7004, - ), - ( - Token( - "sed", - ), - 7005..7008, - ), - ( - Token( - "\"", - ), - 7009..7010, - ), - ( - Token( - "s", - ), - 7010..7011, - ), - ( - Token( - "/", - ), - 7011..7012, - ), - ( - Token( - "^", - ), - 7012..7013, - ), - ( - Token( - "\\", + "\" | sed \"s/^", ), - 7013..7014, + 7001..7013, ), ( Token( - "(", + "\\(", ), - 7014..7015, + 7013..7015, ), ( Token( @@ -8615,15 +6473,9 @@ expression: tokens ), ( Token( - "\\", - ), - 7016..7017, - ), - ( - Token( - ")", + "\\)", ), - 7017..7018, + 7016..7018, ), ( Token( @@ -8633,39 +6485,21 @@ expression: tokens ), ( Token( - "\\", - ), - 7019..7020, - ), - ( - Token( - "U", - ), - 7020..7021, - ), - ( - Token( - "\\", - ), - 7021..7022, - ), - ( - Token( - "1", + "\\U", ), - 7022..7023, + 7019..7021, ), ( Token( - "/", + "\\1", ), - 7023..7024, + 7021..7023, ), ( Token( - "\"", + "/\" ", ), - 7024..7025, + 7023..7026, ), ( Token( @@ -9071,21 +6905,9 @@ expression: tokens ), ( Token( - "printf", + "printf \"%", ), - 7395..7401, - ), - ( - Token( - "\"", - ), - 7402..7403, - ), - ( - Token( - "%", - ), - 7403..7404, + 7394..7404, ), ( Token( @@ -9107,57 +6929,9 @@ expression: tokens ), ( Token( - "s", - ), - 7412..7413, - ), - ( - Token( - "\"", - ), - 7413..7414, - ), - ( - Token( - "\"", - ), - 7415..7416, - ), - ( - Token( - "\"", - ), - 7416..7417, - ), - ( - Token( - "|", - ), - 7418..7419, - ), - ( - Token( - "tr", + "s\" \"\" | tr \" \" \"", ), - 7420..7422, - ), - ( - Token( - "\"", - ), - 7423..7424, - ), - ( - Token( - "\"", - ), - 7425..7426, - ), - ( - Token( - "\"", - ), - 7427..7428, + 7412..7428, ), ( Token( @@ -9179,9 +6953,9 @@ expression: tokens ), ( Token( - "\"", + "\" ", ), - 7433..7434, + 7433..7435, ), ( Token( @@ -9473,21 +7247,9 @@ expression: tokens ), ( Token( - "printf", + "printf \"%", ), - 7712..7718, - ), - ( - Token( - "\"", - ), - 7719..7720, - ), - ( - Token( - "%", - ), - 7720..7721, + 7711..7721, ), ( Token( @@ -9509,57 +7271,9 @@ expression: tokens ), ( Token( - "s", - ), - 7729..7730, - ), - ( - Token( - "\"", - ), - 7730..7731, - ), - ( - Token( - "\"", - ), - 7732..7733, - ), - ( - Token( - "\"", - ), - 7733..7734, - ), - ( - Token( - "|", - ), - 7735..7736, - ), - ( - Token( - "tr", + "s\" \"\" | tr \" \" \"", ), - 7737..7739, - ), - ( - Token( - "\"", - ), - 7740..7741, - ), - ( - Token( - "\"", - ), - 7742..7743, - ), - ( - Token( - "\"", - ), - 7744..7745, + 7729..7745, ), ( Token( @@ -9581,9 +7295,9 @@ expression: tokens ), ( Token( - "\"", + "\" ", ), - 7750..7751, + 7750..7752, ), ( Token( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__string_escapes.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__string_escapes.snap new file mode 100644 index 0000000..283ccef --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__string_escapes.snap @@ -0,0 +1,60 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"\"hello\\nworld\\t\\\"quote\\\"\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "hello", + ), + 1..6, + ), + ( + Token( + "\\n", + ), + 6..8, + ), + ( + Token( + "world", + ), + 8..13, + ), + ( + Token( + "\\t", + ), + 13..15, + ), + ( + Token( + "\\\"", + ), + 15..17, + ), + ( + Token( + "quote", + ), + 17..22, + ), + ( + Token( + "\\\"", + ), + 22..24, + ), + ( + Token( + "\"", + ), + 24..25, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__string_multi_interpolation.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__string_multi_interpolation.snap new file mode 100644 index 0000000..8a0ced0 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__string_multi_interpolation.snap @@ -0,0 +1,84 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"\"{a} and {b} and {c}\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "{", + ), + 1..2, + ), + ( + Token( + "a", + ), + 2..3, + ), + ( + Token( + "}", + ), + 3..4, + ), + ( + Token( + " and ", + ), + 4..9, + ), + ( + Token( + "{", + ), + 9..10, + ), + ( + Token( + "b", + ), + 10..11, + ), + ( + Token( + "}", + ), + 11..12, + ), + ( + Token( + " and ", + ), + 12..17, + ), + ( + Token( + "{", + ), + 17..18, + ), + ( + Token( + "c", + ), + 18..19, + ), + ( + Token( + "}", + ), + 19..20, + ), + ( + Token( + "\"", + ), + 20..21, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__string_nested_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__string_nested_braces.snap new file mode 100644 index 0000000..88a8bbb --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__string_nested_braces.snap @@ -0,0 +1,120 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"\"text {if true { \"inner\" } else { \"other\" }} end\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "text ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "if", + ), + 7..9, + ), + ( + Token( + "true", + ), + 10..14, + ), + ( + Token( + "{", + ), + 15..16, + ), + ( + Token( + "\"", + ), + 17..18, + ), + ( + Token( + "inner", + ), + 18..23, + ), + ( + Token( + "\"", + ), + 23..24, + ), + ( + Token( + "}", + ), + 25..26, + ), + ( + Token( + "else", + ), + 27..31, + ), + ( + Token( + "{", + ), + 32..33, + ), + ( + Token( + "\"", + ), + 34..35, + ), + ( + Token( + "other", + ), + 35..40, + ), + ( + Token( + "\"", + ), + 40..41, + ), + ( + Token( + "}", + ), + 42..43, + ), + ( + Token( + "}", + ), + 43..44, + ), + ( + Token( + " end", + ), + 44..48, + ), + ( + Token( + "\"", + ), + 48..49, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__string_trailing_backslash.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__string_trailing_backslash.snap new file mode 100644 index 0000000..6b4eb6d --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__string_trailing_backslash.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"\"test\\\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "test", + ), + 1..5, + ), + ( + Token( + "\\\"", + ), + 5..7, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__triple_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__triple_dollar.snap new file mode 100644 index 0000000..9f108a9 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__triple_dollar.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "$", + ), + 2..3, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__unclosed_command.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__unclosed_command.snap new file mode 100644 index 0000000..6942daf --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__unclosed_command.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(\"$echo test\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo test", + ), + 1..10, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__unclosed_string.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__unclosed_string.snap new file mode 100644 index 0000000..fe91ad1 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__unclosed_string.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha040.rs +expression: "compiler.tokenize(r#\"\"unclosed\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "unclosed", + ), + 1..9, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__unfinished_function_call-2.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__unfinished_function_call-2.snap index 033dde5..1e0b3ac 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__unfinished_function_call-2.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__unfinished_function_call-2.snap @@ -242,29 +242,11 @@ expression: result ( Text( ( - "text", - 165..169, + "text.txt", + 165..173, ), ), - 165..169, - ), - ( - Text( - ( - ".", - 169..170, - ), - ), - 169..170, - ), - ( - Text( - ( - "txt", - 170..173, - ), - ), - 170..173, + 165..173, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha040__unfinished_function_call.snap b/tests/grammar/snapshots/r#mod__grammar__alpha040__unfinished_function_call.snap index d008c71..d7b8f2a 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha040__unfinished_function_call.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha040__unfinished_function_call.snap @@ -41,21 +41,9 @@ expression: tokens ), ( Token( - "std", + "std/array", ), - 37..40, - ), - ( - Token( - "/", - ), - 40..41, - ), - ( - Token( - "array", - ), - 41..46, + 37..46, ), ( Token( @@ -263,21 +251,9 @@ expression: tokens ), ( Token( - "text", - ), - 165..169, - ), - ( - Token( - ".", - ), - 169..170, - ), - ( - Token( - "txt", + "text.txt", ), - 170..173, + 165..173, ), ( Token( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__command_backslash.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__command_backslash.snap new file mode 100644 index 0000000..715b5b6 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__command_backslash.snap @@ -0,0 +1,30 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"$test\\n$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "test", + ), + 1..5, + ), + ( + Token( + "\\n", + ), + 5..7, + ), + ( + Token( + "$", + ), + 7..8, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__command_escapes.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__command_escapes.snap new file mode 100644 index 0000000..36cdd45 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__command_escapes.snap @@ -0,0 +1,48 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"$echo\\ with\\ spaces$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo", + ), + 1..5, + ), + ( + Token( + "\\ ", + ), + 5..7, + ), + ( + Token( + "with", + ), + 7..11, + ), + ( + Token( + "\\ ", + ), + 11..13, + ), + ( + Token( + "spaces", + ), + 13..19, + ), + ( + Token( + "$", + ), + 19..20, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__command_nested_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__command_nested_braces.snap new file mode 100644 index 0000000..7bfc791 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__command_nested_braces.snap @@ -0,0 +1,90 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"$echo {if true { 1 } else { 0 }}$\"#)" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "if", + ), + 7..9, + ), + ( + Token( + "true", + ), + 10..14, + ), + ( + Token( + "{", + ), + 15..16, + ), + ( + Token( + "1", + ), + 17..18, + ), + ( + Token( + "}", + ), + 19..20, + ), + ( + Token( + "else", + ), + 21..25, + ), + ( + Token( + "{", + ), + 26..27, + ), + ( + Token( + "0", + ), + 28..29, + ), + ( + Token( + "}", + ), + 30..31, + ), + ( + Token( + "}", + ), + 31..32, + ), + ( + Token( + "$", + ), + 32..33, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__comments_in_ifs.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__comments_in_ifs.snap index 3a867fd..23da1e7 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__comments_in_ifs.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__comments_in_ifs.snap @@ -376,29 +376,11 @@ expression: parse(&tokenize(input)) ( Text( ( - "Entry", - 321..326, + "Entry not allowed", + 321..338, ), ), - 321..326, - ), - ( - Text( - ( - "not", - 327..330, - ), - ), - 327..330, - ), - ( - Text( - ( - "allowed", - 331..338, - ), - ), - 331..338, + 321..338, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__context_command_to_main.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__context_command_to_main.snap new file mode 100644 index 0000000..92c49c1 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__context_command_to_main.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$cmd {expr}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "cmd ", + ), + 1..5, + ), + ( + Token( + "{", + ), + 5..6, + ), + ( + Token( + "expr", + ), + 6..10, + ), + ( + Token( + "}", + ), + 10..11, + ), + ( + Token( + "$", + ), + 11..12, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__context_multiple_switches.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__context_multiple_switches.snap new file mode 100644 index 0000000..731fac3 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__context_multiple_switches.snap @@ -0,0 +1,60 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"\"a {$b$} c\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "a ", + ), + 1..3, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "$", + ), + 4..5, + ), + ( + Token( + "b", + ), + 5..6, + ), + ( + Token( + "$", + ), + 6..7, + ), + ( + Token( + "}", + ), + 7..8, + ), + ( + Token( + " c", + ), + 8..10, + ), + ( + Token( + "\"", + ), + 10..11, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__context_string_to_main.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__context_string_to_main.snap new file mode 100644 index 0000000..baf3784 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__context_string_to_main.snap @@ -0,0 +1,48 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"\"start {expr} end\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "start ", + ), + 1..7, + ), + ( + Token( + "{", + ), + 7..8, + ), + ( + Token( + "expr", + ), + 8..12, + ), + ( + Token( + "}", + ), + 12..13, + ), + ( + Token( + " end", + ), + 13..17, + ), + ( + Token( + "\"", + ), + 17..18, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_at_end.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_at_end.snap new file mode 100644 index 0000000..161e57e --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_at_end.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"text$\")" +--- +[ + ( + Token( + "text", + ), + 0..4, + ), + ( + Token( + "$", + ), + 4..5, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_dollar.snap new file mode 100644 index 0000000..32c0089 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_dollar.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_dollar_char.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_dollar_char.snap new file mode 100644 index 0000000..0ab2db3 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_dollar_char.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$$x\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "x", + ), + 2..3, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_dollar_space.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_dollar_space.snap new file mode 100644 index 0000000..5ed99f2 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__dollar_dollar_space.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$$ \")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_brace.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_brace.snap new file mode 100644 index 0000000..a9437dc --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_brace.snap @@ -0,0 +1,42 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$$ {var}$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "var", + ), + 4..7, + ), + ( + Token( + "}", + ), + 7..8, + ), + ( + Token( + "$", + ), + 8..9, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_end.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_end.snap new file mode 100644 index 0000000..32c0089 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_end.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_no_space.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_no_space.snap new file mode 100644 index 0000000..b4c8eae --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_no_space.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$$failed\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "failed", + ), + 2..8, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_whitespace.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_whitespace.snap new file mode 100644 index 0000000..9acfbb3 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__double_dollar_whitespace.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$$ \\n\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__empty_input.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__empty_input.snap new file mode 100644 index 0000000..d0df723 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__empty_input.snap @@ -0,0 +1,5 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"\")" +--- +[] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__empty_string.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__empty_string.snap new file mode 100644 index 0000000..704a806 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__empty_string.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"\"\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "\"", + ), + 1..2, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__just_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__just_dollar.snap new file mode 100644 index 0000000..9797d9f --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__just_dollar.snap @@ -0,0 +1,12 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__mismatched_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__mismatched_braces.snap new file mode 100644 index 0000000..b714a60 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__mismatched_braces.snap @@ -0,0 +1,36 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"\"{{{\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "{", + ), + 1..2, + ), + ( + Token( + "{", + ), + 2..3, + ), + ( + Token( + "{", + ), + 3..4, + ), + ( + Token( + "\"", + ), + 4..5, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__mv_files.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__mv_files.snap index 1f9b9eb..66165e1 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__mv_files.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__mv_files.snap @@ -18,38 +18,11 @@ expression: parse_unwrap(&tokenize(input)) ( Text( ( - "/", - 9..10, + "/tmp/a", + 9..15, ), ), - 9..10, - ), - ( - Text( - ( - "tmp", - 10..13, - ), - ), - 10..13, - ), - ( - Text( - ( - "/", - 13..14, - ), - ), - 13..14, - ), - ( - Text( - ( - "a", - 14..15, - ), - ), - 14..15, + 9..15, ), ], ), @@ -61,38 +34,11 @@ expression: parse_unwrap(&tokenize(input)) ( Text( ( - "/", - 18..19, - ), - ), - 18..19, - ), - ( - Text( - ( - "tmp", - 19..22, - ), - ), - 19..22, - ), - ( - Text( - ( - "/", - 22..23, - ), - ), - 22..23, - ), - ( - Text( - ( - "b", - 23..24, + "/tmp/b", + 18..24, ), ), - 23..24, + 18..24, ), ], ), @@ -125,38 +71,11 @@ expression: parse_unwrap(&tokenize(input)) ( Text( ( - "/", - 42..43, + "/tmp/a", + 42..48, ), ), - 42..43, - ), - ( - Text( - ( - "tmp", - 43..46, - ), - ), - 43..46, - ), - ( - Text( - ( - "/", - 46..47, - ), - ), - 46..47, - ), - ( - Text( - ( - "a", - 47..48, - ), - ), - 47..48, + 42..48, ), ], ), @@ -168,38 +87,11 @@ expression: parse_unwrap(&tokenize(input)) ( Text( ( - "/", - 51..52, - ), - ), - 51..52, - ), - ( - Text( - ( - "tmp", - 52..55, - ), - ), - 52..55, - ), - ( - Text( - ( - "/", - 55..56, - ), - ), - 55..56, - ), - ( - Text( - ( - "b", - 56..57, + "/tmp/b", + 51..57, ), ), - 56..57, + 51..57, ), ], ), @@ -227,38 +119,11 @@ expression: parse_unwrap(&tokenize(input)) ( Text( ( - "/", - 68..69, + "/tmp/a", + 68..74, ), ), - 68..69, - ), - ( - Text( - ( - "tmp", - 69..72, - ), - ), - 69..72, - ), - ( - Text( - ( - "/", - 72..73, - ), - ), - 72..73, - ), - ( - Text( - ( - "a", - 73..74, - ), - ), - 73..74, + 68..74, ), ], ), @@ -270,38 +135,11 @@ expression: parse_unwrap(&tokenize(input)) ( Text( ( - "/", - 77..78, - ), - ), - 77..78, - ), - ( - Text( - ( - "tmp", - 78..81, - ), - ), - 78..81, - ), - ( - Text( - ( - "/", - 81..82, - ), - ), - 81..82, - ), - ( - Text( - ( - "b", - 82..83, + "/tmp/b", + 77..83, ), ), - 82..83, + 77..83, ), ], ), @@ -389,38 +227,11 @@ expression: parse_unwrap(&tokenize(input)) ( Text( ( - "/", - 147..148, + "/tmp/a", + 147..153, ), ), - 147..148, - ), - ( - Text( - ( - "tmp", - 148..151, - ), - ), - 148..151, - ), - ( - Text( - ( - "/", - 151..152, - ), - ), - 151..152, - ), - ( - Text( - ( - "a", - 152..153, - ), - ), - 152..153, + 147..153, ), ], ), @@ -432,38 +243,11 @@ expression: parse_unwrap(&tokenize(input)) ( Text( ( - "/", - 156..157, - ), - ), - 156..157, - ), - ( - Text( - ( - "tmp", - 157..160, - ), - ), - 157..160, - ), - ( - Text( - ( - "/", - 160..161, - ), - ), - 160..161, - ), - ( - Text( - ( - "b", - 161..162, + "/tmp/b", + 156..162, ), ), - 161..162, + 156..162, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__numbers.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__numbers.snap new file mode 100644 index 0000000..7edaf2d --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__numbers.snap @@ -0,0 +1,63 @@ +--- +source: tests/grammar/alpha050.rs +expression: parse_unwrap(&tokenize(input)) +--- +[ + ( + Statement( + ( + Expression( + ( + Int( + ( + 2, + 5..6, + ), + ), + 5..6, + ), + ), + 5..6, + ), + ), + 5..6, + ), + ( + Statement( + ( + Expression( + ( + Number( + ( + 2.4, + 11..14, + ), + ), + 11..14, + ), + ), + 11..14, + ), + ), + 11..14, + ), + ( + Statement( + ( + Expression( + ( + Number( + ( + 0.2, + 19..21, + ), + ), + 19..21, + ), + ), + 19..21, + ), + ), + 19..21, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__single_char.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__single_char.snap new file mode 100644 index 0000000..dcb4256 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__single_char.snap @@ -0,0 +1,12 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"x\")" +--- +[ + ( + Token( + "x", + ), + 0..1, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__single_dollar_cmd.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__single_dollar_cmd.snap new file mode 100644 index 0000000..e398035 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__single_dollar_cmd.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$echo$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo", + ), + 1..5, + ), + ( + Token( + "$", + ), + 5..6, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_date.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_date.snap index 01fe922..5454773 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_date.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_date.snap @@ -410,38 +410,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 1101..1102, + "%F %T", + 1101..1106, ), ), - 1101..1102, - ), - ( - Text( - ( - "F", - 1102..1103, - ), - ), - 1102..1103, - ), - ( - Text( - ( - "%", - 1104..1105, - ), - ), - 1104..1105, - ), - ( - Text( - ( - "T", - 1105..1106, - ), - ), - 1105..1106, + 1101..1106, ), ], ), @@ -519,20 +492,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "-", - 1166..1167, - ), - ), - 1166..1167, - ), - ( - Text( - ( - "u", - 1167..1168, + "-u", + 1166..1168, ), ), - 1167..1168, + 1166..1168, ), ], ), @@ -587,9 +551,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", + "date ", ), - 1233..1237, + 1232..1238, ), ( Expression( @@ -605,23 +569,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 1238..1248, ), - ( - CommandOption( - "-d", - ), - 1249..1251, - ), ( Text( - "\"", + " -d \"@", ), - 1252..1253, - ), - ( - Text( - "@", - ), - 1253..1254, + 1248..1254, ), ( Expression( @@ -639,21 +591,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1260..1261, - ), - ( - Text( - "+", - ), - 1262..1263, - ), - ( - Text( - "\"", + "\" +\"", ), - 1263..1264, + 1260..1264, ), ( Expression( @@ -671,45 +611,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1272..1273, - ), - ( - Text( - "2", - ), - 1274..1275, - ), - ( - Text( - ">", - ), - 1275..1276, - ), - ( - Text( - "/", - ), - 1276..1277, - ), - ( - Text( - "dev", - ), - 1277..1280, - ), - ( - Text( - "/", - ), - 1280..1281, - ), - ( - Text( - "null", + "\" 2>/dev/null ", ), - 1281..1285, + 1272..1286, ), ( Text( @@ -762,9 +666,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", + "date ", ), - 1360..1364, + 1359..1365, ), ( Expression( @@ -780,23 +684,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 1365..1375, ), - ( - CommandOption( - "-j", - ), - 1376..1378, - ), - ( - CommandOption( - "-r", - ), - 1379..1381, - ), ( Text( - "\"", + " -j -r \"", ), - 1382..1383, + 1375..1383, ), ( Expression( @@ -814,21 +706,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" +\"", ), - 1389..1390, - ), - ( - Text( - "+", - ), - 1391..1392, - ), - ( - Text( - "\"", - ), - 1392..1393, + 1389..1393, ), ( Expression( @@ -846,9 +726,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 1401..1402, + 1401..1403, ), ( Text( @@ -1005,38 +885,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "%", - 1802..1803, - ), - ), - 1802..1803, - ), - ( - Text( - ( - "F", - 1803..1804, + "%F %T", + 1802..1807, ), ), - 1803..1804, - ), - ( - Text( - ( - "%", - 1805..1806, - ), - ), - 1805..1806, - ), - ( - Text( - ( - "T", - 1806..1807, - ), - ), - 1806..1807, + 1802..1807, ), ], ), @@ -1114,20 +967,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "-", - 1866..1867, - ), - ), - 1866..1867, - ), - ( - Text( - ( - "u", - 1867..1868, + "-u", + 1866..1868, ), ), - 1867..1868, + 1866..1868, ), ], ), @@ -1184,9 +1028,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", + "date ", ), - 1933..1937, + 1932..1938, ), ( Expression( @@ -1202,47 +1046,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 1938..1948, ), - ( - CommandOption( - "-d", - ), - 1949..1951, - ), - ( - Text( - "\"", - ), - 1952..1953, - ), - ( - Escape( - "$", - ), - 1953..1955, - ), ( Text( - "(", + " -d \"", ), - 1955..1956, + 1948..1953, ), ( Text( - "date", + "\\$", ), - 1956..1960, - ), - ( - CommandOption( - "-d", - ), - 1961..1963, + 1953..1955, ), ( Text( - "\"", + "(date -d \"", ), - 1964..1965, + 1955..1965, ), ( Expression( @@ -1260,21 +1080,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1971..1972, - ), - ( - Text( - "+", + "\" +\"", ), - 1973..1974, - ), - ( - Text( - "\"", - ), - 1974..1975, + 1971..1975, ), ( Expression( @@ -1292,111 +1100,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1983..1984, - ), - ( - Text( - "2", + "\" 2>/dev/null)\" +%s 2>/dev/null ", ), - 1985..1986, - ), - ( - Text( - ">", - ), - 1986..1987, - ), - ( - Text( - "/", - ), - 1987..1988, - ), - ( - Text( - "dev", - ), - 1988..1991, - ), - ( - Text( - "/", - ), - 1991..1992, - ), - ( - Text( - "null", - ), - 1992..1996, - ), - ( - Text( - ")", - ), - 1996..1997, - ), - ( - Text( - "\"", - ), - 1997..1998, - ), - ( - Text( - "+", - ), - 1999..2000, - ), - ( - Text( - "%", - ), - 2000..2001, - ), - ( - Text( - "s", - ), - 2001..2002, - ), - ( - Text( - "2", - ), - 2003..2004, - ), - ( - Text( - ">", - ), - 2004..2005, - ), - ( - Text( - "/", - ), - 2005..2006, - ), - ( - Text( - "dev", - ), - 2006..2009, - ), - ( - Text( - "/", - ), - 2009..2010, - ), - ( - Text( - "null", - ), - 2010..2014, + 1983..2015, ), ( Text( @@ -1451,9 +1157,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", + "date ", ), - 2089..2093, + 2088..2094, ), ( Expression( @@ -1469,23 +1175,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 2094..2104, ), - ( - CommandOption( - "-j", - ), - 2105..2107, - ), - ( - CommandOption( - "-f", - ), - 2108..2110, - ), ( Text( - "\"", + " -j -f \"", ), - 2111..2112, + 2104..2112, ), ( Expression( @@ -1503,15 +1197,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" \"", ), - 2120..2121, - ), - ( - Text( - "\"", - ), - 2122..2123, + 2120..2123, ), ( Expression( @@ -1529,27 +1217,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2129..2130, - ), - ( - Text( - "+", - ), - 2131..2132, - ), - ( - Text( - "%", - ), - 2132..2133, - ), - ( - Text( - "s", + "\" +%s ", ), - 2133..2134, + 2129..2135, ), ( Text( @@ -1690,27 +1360,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "date", - ), - 2311..2315, - ), - ( - Text( - "+", - ), - 2316..2317, - ), - ( - Text( - "%", - ), - 2317..2318, - ), - ( - Text( - "s", + "date +%s ", ), - 2318..2319, + 2310..2320, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_env.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_env.snap index cbc8e8a..5244bb9 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_env.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_env.snap @@ -124,20 +124,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - ".", - 202..203, + ".env", + 202..206, ), ), - 202..203, - ), - ( - Text( - ( - "env", - 203..206, - ), - ), - 203..206, + 202..206, ), ], ), @@ -183,25 +174,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 240..244, + 239..246, ), ( Text( - "\"", - ), - 245..246, - ), - ( - Escape( - "$", + "\\$", ), 246..248, ), ( - Escape( - "{", + Text( + "\\{", ), 248..250, ), @@ -236,15 +221,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", - ), - 263..264, - ), - ( - Text( - "\"", + "}\" ", ), - 264..265, + 263..266, ), ( Text( @@ -380,15 +359,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "source", - ), - 358..364, - ), - ( - Text( - "\"", + "source \"", ), - 365..366, + 357..366, ), ( Expression( @@ -406,9 +379,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 372..373, + 372..374, ), ( Text( @@ -448,25 +421,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 399..403, + 398..405, ), ( Text( - "\"", - ), - 404..405, - ), - ( - Escape( - "$", + "\\$", ), 405..407, ), ( - Escape( - "{", + Text( + "\\{", ), 407..409, ), @@ -501,15 +468,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", + "}\" ", ), - 422..423, - ), - ( - Text( - "\"", - ), - 423..424, + 422..425, ), ( Text( @@ -613,20 +574,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - ".", - 545..546, - ), - ), - 545..546, - ), - ( - Text( - ( - "env", - 546..549, + ".env", + 545..549, ), ), - 546..549, + 545..549, ), ], ), @@ -662,39 +614,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "export", + "export \"", ), - 572..578, + 571..580, ), ( Text( - "\"", - ), - 579..580, - ), - ( - Escape( - "$", + "\\$", ), 580..582, ), ( Text( - "(", - ), - 582..583, - ), - ( - Text( - "xargs", - ), - 583..588, - ), - ( - Text( - "<", + "(xargs < ", ), - 589..590, + 582..591, ), ( Expression( @@ -712,45 +646,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - ")", - ), - 597..598, - ), - ( - Text( - "\"", - ), - 598..599, - ), - ( - Text( - ">", - ), - 600..601, - ), - ( - Text( - "/", - ), - 602..603, - ), - ( - Text( - "dev", - ), - 603..606, - ), - ( - Text( - "/", - ), - 606..607, - ), - ( - Text( - "null", + ")\" > /dev/null ", ), - 607..611, + 597..612, ), ( Text( @@ -841,37 +739,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 722..723, - ), - ( - Text( - "[", + "[[ ! -z ", ), - 723..724, + 721..730, ), ( Text( - "!", - ), - 725..726, - ), - ( - CommandOption( - "-z", - ), - 727..729, - ), - ( - Escape( - "$", + "\\$", ), 730..732, ), ( - Escape( - "{", + Text( + "\\{", ), 732..734, ), @@ -906,33 +786,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "+", - ), - 748..749, - ), - ( - Text( - "z", - ), - 749..750, - ), - ( - Text( - "}", - ), - 750..751, - ), - ( - Text( - "]", - ), - 752..753, - ), - ( - Text( - "]", + "+z} ]] ", ), - 753..754, + 748..755, ), ( Text( @@ -1104,13 +960,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "readonly", + "readonly ", ), - 918..926, + 917..927, ), ( - Escape( - "$", + Text( + "\\$", ), 927..929, ), @@ -1139,19 +995,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "=", + "=\"", ), - 942..943, + 942..944, ), ( Text( - "\"", - ), - 943..944, - ), - ( - Escape( - "$", + "\\$", ), 944..946, ), @@ -1180,45 +1030,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 958..959, - ), - ( - Text( - "2", - ), - 960..961, - ), - ( - Text( - ">", - ), - 961..962, - ), - ( - Text( - "/", - ), - 963..964, - ), - ( - Text( - "dev", - ), - 964..967, - ), - ( - Text( - "/", - ), - 967..968, - ), - ( - Text( - "null", + "\" 2> /dev/null ", ), - 968..972, + 958..973, ), ( Text( @@ -1324,19 +1138,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 1081..1085, + 1080..1086, ), ( - Escape( - "$", + Text( + "\\$", ), 1086..1088, ), ( - Escape( - "{", + Text( + "\\{", ), 1088..1090, ), @@ -1371,9 +1185,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", + "} ", ), - 1104..1105, + 1104..1106, ), ( Text( @@ -1492,13 +1306,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "export", + "export ", ), - 1216..1222, + 1215..1223, ), ( - Escape( - "$", + Text( + "\\$", ), 1223..1225, ), @@ -1527,19 +1341,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "=", + "=\"", ), - 1238..1239, + 1238..1240, ), ( Text( - "\"", - ), - 1239..1240, - ), - ( - Escape( - "$", + "\\$", ), 1240..1242, ), @@ -1568,45 +1376,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1254..1255, - ), - ( - Text( - "2", - ), - 1256..1257, - ), - ( - Text( - ">", - ), - 1257..1258, - ), - ( - Text( - "/", - ), - 1259..1260, - ), - ( - Text( - "dev", - ), - 1260..1263, - ), - ( - Text( - "/", - ), - 1263..1264, - ), - ( - Text( - "null", + "\" 2> /dev/null ", ), - 1264..1268, + 1254..1269, ), ( Text( @@ -1712,19 +1484,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 1375..1379, + 1374..1380, ), ( - Escape( - "$", + Text( + "\\$", ), 1380..1382, ), ( - Escape( - "{", + Text( + "\\{", ), 1382..1384, ), @@ -1759,9 +1531,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "}", + "} ", ), - 1398..1399, + 1398..1400, ), ( Text( @@ -1863,9 +1635,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "unset", + "unset ", ), - 1504..1509, + 1503..1510, ), ( Expression( @@ -1881,6 +1653,12 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 1510..1516, ), + ( + Text( + " ", + ), + 1516..1517, + ), ( Text( "$", @@ -1980,51 +1758,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 1603..1604, - ), - ( - CommandOption( - "-x", + "[ -x \"", ), - 1605..1607, + 1602..1609, ), ( Text( - "\"", - ), - 1608..1609, - ), - ( - Escape( - "$", + "\\$", ), 1609..1611, ), ( Text( - "(", + "(command -v \"", ), - 1611..1612, - ), - ( - Text( - "command", - ), - 1612..1619, - ), - ( - CommandOption( - "-v", - ), - 1620..1622, - ), - ( - Text( - "\"", - ), - 1623..1624, + 1611..1624, ), ( Expression( @@ -2042,27 +1790,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 1633..1634, - ), - ( - Text( - ")", - ), - 1634..1635, - ), - ( - Text( - "\"", - ), - 1635..1636, - ), - ( - Text( - "]", + "\")\" ] ", ), - 1637..1638, + 1633..1639, ), ( Text( @@ -2222,25 +1952,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "read", - ), - 1795..1799, - ), - ( - CommandOption( - "-p", + "read -p \"", ), - 1800..1802, + 1794..1804, ), ( Text( - "\"", - ), - 1803..1804, - ), - ( - Escape( - "$", + "\\$", ), 1804..1806, ), @@ -2269,9 +1987,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 1821..1822, + 1821..1823, ), ( Text( @@ -2311,21 +2029,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 1844..1848, + 1843..1849, ), ( - Escape( - "$", + Text( + "\\$", ), 1849..1851, ), ( Text( - "REPLY", + "REPLY ", ), - 1851..1856, + 1851..1857, ), ( Text( @@ -2428,31 +2146,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "read", + "read -s -p \"", ), - 1993..1997, - ), - ( - CommandOption( - "-s", - ), - 1998..2000, - ), - ( - CommandOption( - "-p", - ), - 2001..2003, + 1992..2005, ), ( Text( - "\"", - ), - 2004..2005, - ), - ( - Escape( - "$", + "\\$", ), 2005..2007, ), @@ -2481,9 +2181,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 2022..2023, + 2022..2024, ), ( Text( @@ -2513,33 +2213,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 2036..2040, - ), - ( - Text( - "\"", - ), - 2041..2042, - ), - ( - Text( - "\"", - ), - 2042..2043, - ), - ( - Text( - ">", - ), - 2044..2045, - ), - ( - Text( - "&2", + "echo \"\" >&2 ", ), - 2045..2047, + 2035..2048, ), ( Text( @@ -2574,21 +2250,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 2067..2071, + 2066..2072, ), ( - Escape( - "$", + Text( + "\\$", ), 2072..2074, ), ( Text( - "REPLY", + "REPLY ", ), - 2074..2079, + 2074..2080, ), ( Text( @@ -2766,184 +2442,103 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "[", - 2373..2374, + " [", + 2372..2374, + ), + ), + 2372..2374, + ), + ( + Text( + ( + "\\x", + 2374..2376, ), ), - 2373..2374, + 2374..2376, ), ( - Escape( + Text( ( - "x1b", - 2374..2378, + "1b[1mY/", + 2376..2383, ), ), - 2374..2378, + 2376..2383, ), ( Text( ( - "[", - 2378..2379, + "\\x", + 2383..2385, ), ), - 2378..2379, + 2383..2385, ), ( Text( ( - "1mY", - 2379..2382, + "1b[0mn]", + 2385..2392, ), ), - 2379..2382, + 2385..2392, ), + ], + ), + 2371..2393, + ), + ( + "else", + 2394..2398, + ), + ( + Text( + [ ( Text( ( - "/", - 2382..2383, + " [y/", + 2400..2404, ), ), - 2382..2383, + 2400..2404, ), ( - Escape( + Text( ( - "x1b", - 2383..2387, + "\\x", + 2404..2406, ), ), - 2383..2387, + 2404..2406, ), ( Text( ( - "[", - 2387..2388, + "1b[1mN", + 2406..2412, ), ), - 2387..2388, + 2406..2412, ), ( Text( ( - "0mn", - 2388..2391, + "\\x", + 2412..2414, ), ), - 2388..2391, + 2412..2414, ), ( Text( ( - "]", - 2391..2392, + "1b[0m]", + 2414..2420, ), ), - 2391..2392, - ), - ], - ), - 2371..2393, - ), - ( - "else", - 2394..2398, - ), - ( - Text( - [ - ( - Text( - ( - "[", - 2401..2402, - ), - ), - 2401..2402, - ), - ( - Text( - ( - "y", - 2402..2403, - ), - ), - 2402..2403, - ), - ( - Text( - ( - "/", - 2403..2404, - ), - ), - 2403..2404, - ), - ( - Escape( - ( - "x1b", - 2404..2408, - ), - ), - 2404..2408, - ), - ( - Text( - ( - "[", - 2408..2409, - ), - ), - 2408..2409, - ), - ( - Text( - ( - "1mN", - 2409..2412, - ), - ), - 2409..2412, - ), - ( - Escape( - ( - "x1b", - 2412..2416, - ), - ), - 2412..2416, - ), - ( - Text( - ( - "[", - 2416..2417, - ), - ), - 2416..2417, - ), - ( - Text( - ( - "0m", - 2417..2419, - ), - ), - 2417..2419, - ), - ( - Text( - ( - "]", - 2419..2420, - ), - ), - 2419..2420, + 2414..2420, ), ], ), @@ -2983,33 +2578,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", - ), - 2444..2450, - ), - ( - Text( - "\"", + "printf \"", ), - 2451..2452, - ), - ( - Escape( - "x1b", - ), - 2452..2456, + 2443..2452, ), ( Text( - "[", + "\\x", ), - 2456..2457, + 2452..2454, ), ( Text( - "1m", + "1b[1m", ), - 2457..2459, + 2454..2459, ), ( Expression( @@ -3025,23 +2608,17 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 2459..2467, ), - ( - Escape( - "x1b", - ), - 2467..2471, - ), ( Text( - "[", + "\\x", ), - 2471..2472, + 2467..2469, ), ( Text( - "0m", + "1b[0m", ), - 2472..2474, + 2469..2474, ), ( Expression( @@ -3059,9 +2636,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 2490..2491, + 2490..2492, ), ( Text( @@ -3091,27 +2668,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "read", - ), - 2504..2508, - ), - ( - CommandOption( - "-s", - ), - 2509..2511, - ), - ( - CommandOption( - "-n", - ), - 2512..2514, - ), - ( - Text( - "1", + "read -s -n 1 ", ), - 2515..2516, + 2503..2517, ), ( Text( @@ -3141,27 +2700,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 2529..2535, + 2528..2537, ), ( Text( - "\"", - ), - 2536..2537, - ), - ( - Escape( - "n", + "\\n", ), 2537..2539, ), ( Text( - "\"", + "\" ", ), - 2539..2540, + 2539..2541, ), ( Text( @@ -3221,21 +2774,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 2584..2588, + 2583..2589, ), ( - Escape( - "$", + Text( + "\\$", ), 2589..2591, ), ( Text( - "REPLY", + "REPLY ", ), - 2591..2596, + 2591..2597, ), ( Text( @@ -3436,9 +2989,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "eval", + "eval ", ), - 2761..2765, + 2760..2766, ), ( Expression( @@ -3454,6 +3007,12 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 2766..2775, ), + ( + Text( + " ", + ), + 2775..2776, + ), ( Text( "$", @@ -3567,15 +3126,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "id", - ), - 2915..2917, - ), - ( - CommandOption( - "-u", + "id -u ", ), - 2918..2920, + 2914..2921, ), ( Text( @@ -3773,6 +3326,12 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3094..3095, ), + ( + Text( + "", + ), + 3095..3096, + ), ( Expression( ( @@ -3798,21 +3357,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "=", - ), - 3109..3110, - ), - ( - Text( - "(", - ), - 3110..3111, - ), - ( - Text( - "\"", + "=(\"", ), - 3111..3112, + 3109..3112, ), ( Expression( @@ -3830,25 +3377,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" \"", ), - 3120..3121, + 3120..3123, ), ( Text( - "\"", - ), - 3122..3123, - ), - ( - Escape( - "$", + "\\$", ), 3123..3125, ), ( - Escape( - "{", + Text( + "\\{", ), 3125..3127, ), @@ -3877,39 +3418,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 3140..3141, - ), - ( - Text( - "@", - ), - 3141..3142, - ), - ( - Text( - "]", - ), - 3142..3143, - ), - ( - Text( - "}", - ), - 3143..3144, - ), - ( - Text( - "\"", - ), - 3144..3145, - ), - ( - Text( - ")", + "[@]}\") ", ), - 3145..3146, + 3140..3147, ), ( Text( @@ -3944,25 +3455,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "printf", + "printf \"", ), - 3161..3167, + 3160..3169, ), ( Text( - "\"", - ), - 3168..3169, - ), - ( - Escape( - "$", + "\\$", ), 3169..3171, ), ( - Escape( - "{", + Text( + "\\{", ), 3171..3173, ), @@ -3991,33 +3496,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 3186..3187, - ), - ( - Text( - "@", - ), - 3187..3188, - ), - ( - Text( - "]", - ), - 3188..3189, - ), - ( - Text( - "}", - ), - 3189..3190, - ), - ( - Text( - "\"", + "[@]}\" ", ), - 3190..3191, + 3186..3192, ), ( Text( @@ -4118,13 +3599,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo ", ), - 3299..3303, + 3298..3304, ), ( - Escape( - "$", + Text( + "\\$", ), 3304..3306, ), @@ -4153,43 +3634,19 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "|", + " | sed -e 's/", ), - 3320..3321, + 3319..3332, ), ( Text( - "sed", - ), - 3322..3325, - ), - ( - CommandOption( - "-e", - ), - 3326..3328, - ), - ( - Text( - "'s", - ), - 3329..3331, - ), - ( - Text( - "/", - ), - 3331..3332, - ), - ( - Escape( - "\\", + "\\\\", ), 3332..3334, ), ( - Escape( - "\\", + Text( + "\\\\", ), 3334..3336, ), @@ -4199,107 +3656,35 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3336..3337, ), - ( - Escape( - "\\", - ), - 3337..3339, - ), - ( - Escape( - "\\", - ), - 3339..3341, - ), - ( - Escape( - "\\", - ), - 3341..3343, - ), - ( - Escape( - "\\", - ), - 3343..3345, - ), ( Text( - "/", - ), - 3345..3346, - ), - ( - Text( - "g'", - ), - 3346..3348, - ), - ( - CommandOption( - "-e", - ), - 3349..3351, - ), - ( - Text( - "\"", - ), - 3352..3353, - ), - ( - Text( - "s", - ), - 3353..3354, - ), - ( - Text( - "/", - ), - 3354..3355, - ), - ( - Text( - "%", - ), - 3355..3356, - ), - ( - Text( - "/", - ), - 3356..3357, - ), - ( - Text( - "%", + "\\\\", ), - 3357..3358, + 3337..3339, ), ( Text( - "%", + "\\\\", ), - 3358..3359, + 3339..3341, ), ( Text( - "/", + "\\\\", ), - 3359..3360, + 3341..3343, ), ( Text( - "g", + "\\\\", ), - 3360..3361, + 3343..3345, ), ( Text( - "\"", + "/g' -e \"s/%/%%/g\" ", ), - 3361..3362, + 3345..3363, ), ( Text( @@ -4439,22 +3824,22 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "x1b", - 3506..3510, + "\\x", + 3506..3508, ), ), - 3506..3510, + 3506..3508, ), ( Text( ( - "[", - 3510..3511, + "1b[", + 3508..3511, ), ), - 3510..3511, + 3508..3511, ), ( Expression( @@ -4552,32 +3937,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3529..3547, ), - ( - Escape( - ( - "x1b", - 3547..3551, - ), - ), - 3547..3551, - ), ( Text( ( - "[", - 3551..3552, + "\\x", + 3547..3549, ), ), - 3551..3552, + 3547..3549, ), ( Text( ( - "0m", - 3552..3554, + "1b[0m", + 3549..3554, ), ), - 3552..3554, + 3549..3554, ), ], ), @@ -4658,32 +4034,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3635..3639, - ), - ), - 3635..3639, - ), ( Text( ( - "[", - 3639..3640, + "\\x", + 3635..3637, ), ), - 3639..3640, + 3635..3637, ), ( Text( ( - "1m", - 3640..3642, + "1b[1m", + 3637..3642, ), ), - 3640..3642, + 3637..3642, ), ( Expression( @@ -4712,32 +4079,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3642..3660, ), - ( - Escape( - ( - "x1b", - 3660..3664, - ), - ), - 3660..3664, - ), ( Text( ( - "[", - 3664..3665, + "\\x", + 3660..3662, ), ), - 3664..3665, + 3660..3662, ), ( Text( ( - "0m", - 3665..3667, + "1b[0m", + 3662..3667, ), ), - 3665..3667, + 3662..3667, ), ], ), @@ -4818,32 +4176,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3752..3756, - ), - ), - 3752..3756, - ), ( Text( ( - "[", - 3756..3757, + "\\x", + 3752..3754, ), ), - 3756..3757, + 3752..3754, ), ( Text( ( - "3m", - 3757..3759, + "1b[3m", + 3754..3759, ), ), - 3757..3759, + 3754..3759, ), ( Expression( @@ -4873,31 +4222,22 @@ expression: parse_unwrap(&tokenize(&stdlib)) 3759..3777, ), ( - Escape( + Text( ( - "x1b", - 3777..3781, + "\\x", + 3777..3779, ), ), - 3777..3781, + 3777..3779, ), ( Text( ( - "[", - 3781..3782, + "1b[0m", + 3779..3784, ), ), - 3781..3782, - ), - ( - Text( - ( - "0m", - 3782..3784, - ), - ), - 3782..3784, + 3779..3784, ), ], ), @@ -4978,32 +4318,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 3877..3881, - ), - ), - 3877..3881, - ), ( Text( ( - "[", - 3881..3882, + "\\x", + 3877..3879, ), ), - 3881..3882, + 3877..3879, ), ( Text( ( - "4m", - 3882..3884, + "1b[4m", + 3879..3884, ), ), - 3882..3884, + 3879..3884, ), ( Expression( @@ -5032,32 +4363,23 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 3884..3902, ), - ( - Escape( - ( - "x1b", - 3902..3906, - ), - ), - 3902..3906, - ), ( Text( ( - "[", - 3906..3907, + "\\x", + 3902..3904, ), ), - 3906..3907, + 3902..3904, ), ( Text( ( - "0m", - 3907..3909, + "1b[0m", + 3904..3909, ), ), - 3907..3909, + 3904..3909, ), ], ), @@ -5159,22 +4481,22 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "x1b", - 4024..4028, + "\\x", + 4024..4026, ), ), - 4024..4028, + 4024..4026, ), ( Text( ( - "[", - 4028..4029, + "1b[", + 4026..4029, ), ), - 4028..4029, + 4026..4029, ), ( Expression( @@ -5206,61 +4528,34 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "m", - 4044..4045, - ), - ), - 4044..4045, - ), - ( - Text( - ( - "%", - 4045..4046, + "m%s", + 4044..4047, ), ), - 4045..4046, + 4044..4047, ), ( Text( ( - "s", - 4046..4047, - ), - ), - 4046..4047, - ), - ( - Escape( - ( - "x1b", - 4047..4051, + "\\x", + 4047..4049, ), ), - 4047..4051, + 4047..4049, ), ( Text( ( - "[", - 4051..4052, + "1b[0m", + 4049..4054, ), ), - 4051..4052, + 4049..4054, ), ( Text( ( - "0m", - 4052..4054, - ), - ), - 4052..4054, - ), - ( - Escape( - ( - "n", + "\\n", 4054..4056, ), ), @@ -5368,136 +4663,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4163..4167, - ), - ), - 4163..4167, - ), - ( - Text( - ( - "[", - 4167..4168, - ), - ), - 4167..4168, - ), - ( - Text( - ( - "1", - 4168..4169, - ), - ), - 4168..4169, - ), - ( - Text( - ( - ";", - 4169..4170, - ), - ), - 4169..4170, - ), - ( - Text( - ( - "3", - 4170..4171, - ), - ), - 4170..4171, - ), - ( - Text( - ( - ";", - 4171..4172, - ), - ), - 4171..4172, - ), ( Text( ( - "97", - 4172..4174, + "\\x", + 4163..4165, ), ), - 4172..4174, + 4163..4165, ), ( Text( ( - ";", - 4174..4175, + "1b[1;3;97;44m%s", + 4165..4180, ), ), - 4174..4175, + 4165..4180, ), ( Text( ( - "44m", - 4175..4178, + "\\x", + 4180..4182, ), ), - 4175..4178, + 4180..4182, ), ( Text( ( - "%", - 4178..4179, + "1b[0m", + 4182..4187, ), ), - 4178..4179, + 4182..4187, ), ( Text( ( - "s", - 4179..4180, - ), - ), - 4179..4180, - ), - ( - Escape( - ( - "x1b", - 4180..4184, - ), - ), - 4180..4184, - ), - ( - Text( - ( - "[", - 4184..4185, - ), - ), - 4184..4185, - ), - ( - Text( - ( - "0m", - 4185..4187, - ), - ), - 4185..4187, - ), - ( - Escape( - ( - "n", + "\\n", 4187..4189, ), ), @@ -5605,136 +4810,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4302..4306, - ), - ), - 4302..4306, - ), - ( - Text( - ( - "[", - 4306..4307, - ), - ), - 4306..4307, - ), - ( - Text( - ( - "1", - 4307..4308, - ), - ), - 4307..4308, - ), - ( - Text( - ( - ";", - 4308..4309, - ), - ), - 4308..4309, - ), - ( - Text( - ( - "3", - 4309..4310, - ), - ), - 4309..4310, - ), ( Text( ( - ";", - 4310..4311, + "\\x", + 4302..4304, ), ), - 4310..4311, + 4302..4304, ), ( Text( ( - "97", - 4311..4313, + "1b[1;3;97;42m%s", + 4304..4319, ), ), - 4311..4313, + 4304..4319, ), ( Text( ( - ";", - 4313..4314, + "\\x", + 4319..4321, ), ), - 4313..4314, + 4319..4321, ), ( Text( ( - "42m", - 4314..4317, + "1b[0m", + 4321..4326, ), ), - 4314..4317, + 4321..4326, ), ( Text( ( - "%", - 4317..4318, - ), - ), - 4317..4318, - ), - ( - Text( - ( - "s", - 4318..4319, - ), - ), - 4318..4319, - ), - ( - Escape( - ( - "x1b", - 4319..4323, - ), - ), - 4319..4323, - ), - ( - Text( - ( - "[", - 4323..4324, - ), - ), - 4323..4324, - ), - ( - Text( - ( - "0m", - 4324..4326, - ), - ), - 4324..4326, - ), - ( - Escape( - ( - "n", + "\\n", 4326..4328, ), ), @@ -5842,136 +4957,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4441..4445, - ), - ), - 4441..4445, - ), - ( - Text( - ( - "[", - 4445..4446, - ), - ), - 4445..4446, - ), - ( - Text( - ( - "1", - 4446..4447, - ), - ), - 4446..4447, - ), - ( - Text( - ( - ";", - 4447..4448, - ), - ), - 4447..4448, - ), ( Text( ( - "3", - 4448..4449, + "\\x", + 4441..4443, ), ), - 4448..4449, + 4441..4443, ), ( Text( ( - ";", - 4449..4450, + "1b[1;3;97;43m%s", + 4443..4458, ), ), - 4449..4450, + 4443..4458, ), ( Text( ( - "97", - 4450..4452, + "\\x", + 4458..4460, ), ), - 4450..4452, + 4458..4460, ), ( Text( ( - ";", - 4452..4453, + "1b[0m", + 4460..4465, ), ), - 4452..4453, + 4460..4465, ), ( Text( ( - "43m", - 4453..4456, - ), - ), - 4453..4456, - ), - ( - Text( - ( - "%", - 4456..4457, - ), - ), - 4456..4457, - ), - ( - Text( - ( - "s", - 4457..4458, - ), - ), - 4457..4458, - ), - ( - Escape( - ( - "x1b", - 4458..4462, - ), - ), - 4458..4462, - ), - ( - Text( - ( - "[", - 4462..4463, - ), - ), - 4462..4463, - ), - ( - Text( - ( - "0m", - 4463..4465, - ), - ), - 4463..4465, - ), - ( - Escape( - ( - "n", + "\\n", 4465..4467, ), ), @@ -6107,136 +5132,46 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( [ - ( - Escape( - ( - "x1b", - 4635..4639, - ), - ), - 4635..4639, - ), ( Text( ( - "[", - 4639..4640, + "\\x", + 4635..4637, ), ), - 4639..4640, + 4635..4637, ), ( Text( ( - "1", - 4640..4641, + "1b[1;3;97;41m%s", + 4637..4652, ), ), - 4640..4641, + 4637..4652, ), ( Text( ( - ";", - 4641..4642, - ), - ), - 4641..4642, - ), - ( - Text( - ( - "3", - 4642..4643, - ), - ), - 4642..4643, - ), - ( - Text( - ( - ";", - 4643..4644, - ), - ), - 4643..4644, - ), - ( - Text( - ( - "97", - 4644..4646, - ), - ), - 4644..4646, - ), - ( - Text( - ( - ";", - 4646..4647, - ), - ), - 4646..4647, - ), - ( - Text( - ( - "41m", - 4647..4650, - ), - ), - 4647..4650, - ), - ( - Text( - ( - "%", - 4650..4651, - ), - ), - 4650..4651, - ), - ( - Text( - ( - "s", - 4651..4652, - ), - ), - 4651..4652, - ), - ( - Escape( - ( - "x1b", - 4652..4656, + "\\x", + 4652..4654, ), ), - 4652..4656, + 4652..4654, ), ( Text( ( - "[", - 4656..4657, + "1b[0m", + 4654..4659, ), ), - 4656..4657, + 4654..4659, ), ( Text( ( - "0m", - 4657..4659, - ), - ), - 4657..4659, - ), - ( - Escape( - ( - "n", + "\\n", 4659..4661, ), ), @@ -6423,63 +5358,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 4867..4871, + 4866..4873, ), ( Text( - "\"", - ), - 4872..4873, - ), - ( - Escape( - "$", + "\\$", ), 4873..4875, ), - ( - Escape( - "{", - ), - 4875..4877, - ), - ( - Text( - "BASH_VERSINFO", - ), - 4877..4890, - ), - ( - Text( - "[", - ), - 4890..4891, - ), - ( - Text( - "0", - ), - 4891..4892, - ), - ( - Text( - "]", - ), - 4892..4893, - ), ( Text( - "}", + "\\{", ), - 4893..4894, + 4875..4877, ), ( Text( - "\"", + "BASH_VERSINFO[0]}\" ", ), - 4894..4895, + 4877..4896, ), ( Text( @@ -6540,63 +5439,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 4929..4933, + 4928..4935, ), ( Text( - "\"", - ), - 4934..4935, - ), - ( - Escape( - "$", + "\\$", ), 4935..4937, ), - ( - Escape( - "{", - ), - 4937..4939, - ), ( Text( - "BASH_VERSINFO", + "\\{", ), - 4939..4952, - ), - ( - Text( - "[", - ), - 4952..4953, - ), - ( - Text( - "1", - ), - 4953..4954, - ), - ( - Text( - "]", - ), - 4954..4955, - ), - ( - Text( - "}", - ), - 4955..4956, + 4937..4939, ), ( Text( - "\"", + "BASH_VERSINFO[1]}\" ", ), - 4956..4957, + 4939..4958, ), ( Text( @@ -6657,63 +5520,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 4991..4995, + 4990..4997, ), ( Text( - "\"", - ), - 4996..4997, - ), - ( - Escape( - "$", + "\\$", ), 4997..4999, ), - ( - Escape( - "{", - ), - 4999..5001, - ), - ( - Text( - "BASH_VERSINFO", - ), - 5001..5014, - ), - ( - Text( - "[", - ), - 5014..5015, - ), - ( - Text( - "2", - ), - 5015..5016, - ), ( Text( - "]", + "\\{", ), - 5016..5017, - ), - ( - Text( - "}", - ), - 5017..5018, + 4999..5001, ), ( Text( - "\"", + "BASH_VERSINFO[2]}\" ", ), - 5018..5019, + 5001..5020, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_fs.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_fs.snap index f21ad0f..3656377 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_fs.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_fs.snap @@ -127,21 +127,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", + "[ -d \"", ), - 159..160, - ), - ( - CommandOption( - "-d", - ), - 161..163, - ), - ( - Text( - "\"", - ), - 164..165, + 158..165, ), ( Expression( @@ -159,15 +147,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 171..172, - ), - ( - Text( - "]", + "\" ] ", ), - 173..174, + 171..175, ), ( Text( @@ -292,21 +274,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "[", - ), - 284..285, - ), - ( - CommandOption( - "-f", - ), - 286..288, - ), - ( - Text( - "\"", + "[ -f \"", ), - 289..290, + 283..290, ), ( Expression( @@ -324,15 +294,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 296..297, - ), - ( - Text( - "]", + "\" ] ", ), - 298..299, + 296..300, ), ( Text( @@ -457,15 +421,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "<", - ), - 416..417, - ), - ( - Text( - "\"", + "< \"", ), - 418..419, + 415..419, ), ( Expression( @@ -483,9 +441,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 425..426, + 425..427, ), ( Text( @@ -625,15 +583,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 567..571, - ), - ( - Text( - "\"", + "echo \"", ), - 572..573, + 566..573, ), ( Expression( @@ -651,21 +603,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 582..583, - ), - ( - Text( - ">", - ), - 584..585, - ), - ( - Text( - "\"", + "\" > \"", ), - 586..587, + 582..587, ), ( Expression( @@ -683,9 +623,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 593..594, + 593..595, ), ( Text( @@ -841,15 +781,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", - ), - 743..747, - ), - ( - Text( - "\"", + "echo \"", ), - 748..749, + 742..749, ), ( Expression( @@ -867,27 +801,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 758..759, - ), - ( - Text( - ">", - ), - 760..761, - ), - ( - Text( - ">", - ), - 761..762, - ), - ( - Text( - "\"", + "\" >> \"", ), - 763..764, + 758..764, ), ( Expression( @@ -905,9 +821,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 770..771, + 770..772, ), ( Text( @@ -1092,21 +1008,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "ln", - ), - 977..979, - ), - ( - CommandOption( - "-s", - ), - 980..982, - ), - ( - Text( - "\"", + "ln -s \"", ), - 983..984, + 976..984, ), ( Expression( @@ -1124,15 +1028,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 992..993, - ), - ( - Text( - "\"", + "\" \"", ), - 994..995, + 992..995, ), ( Expression( @@ -1150,9 +1048,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 1008..1009, + 1008..1010, ), ( Text( @@ -1217,20 +1115,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "The", - 1050..1053, - ), - ), - 1050..1053, - ), - ( - Text( - ( - "file", - 1054..1058, + "The file ", + 1050..1059, ), ), - 1054..1058, + 1050..1059, ), ( Expression( @@ -1249,29 +1138,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 1068..1075, - ), - ), - 1068..1075, - ), - ( - Text( - ( - "exist", - 1076..1081, - ), - ), - 1076..1081, - ), - ( - Text( - ( - "!", - 1081..1082, + " doesn't exist!", + 1067..1082, ), ), - 1081..1082, + 1067..1082, ), ], ), @@ -1418,21 +1289,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "mkdir", - ), - 1243..1248, - ), - ( - CommandOption( - "-p", - ), - 1249..1251, - ), - ( - Text( - "\"", + "mkdir -p \"", ), - 1252..1253, + 1242..1253, ), ( Expression( @@ -1450,9 +1309,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 1259..1260, + 1259..1261, ), ( Text( @@ -1563,15 +1422,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "mktemp", - ), - 1404..1410, - ), - ( - CommandOption( - "--version", + "mktemp --version ", ), - 1411..1420, + 1403..1421, ), ( Text( @@ -1725,29 +1578,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "tmp", - 1664..1667, - ), - ), - 1664..1667, - ), - ( - Text( - ( - ".", - 1667..1668, - ), - ), - 1667..1668, - ), - ( - Text( - ( - "XXXXXXXXXX", - 1668..1678, + "tmp.XXXXXXXXXX", + 1664..1678, ), ), - 1668..1678, + 1664..1678, ), ], ), @@ -1877,74 +1712,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "The", - 1798..1801, - ), - ), - 1798..1801, - ), - ( - Text( - ( - "template", - 1802..1810, - ), - ), - 1802..1810, - ), - ( - Text( - ( - "cannot", - 1811..1817, - ), - ), - 1811..1817, - ), - ( - Text( - ( - "be", - 1818..1820, - ), - ), - 1818..1820, - ), - ( - Text( - ( - "an", - 1821..1823, - ), - ), - 1821..1823, - ), - ( - Text( - ( - "empty", - 1824..1829, - ), - ), - 1824..1829, - ), - ( - Text( - ( - "string", - 1830..1836, - ), - ), - 1830..1836, - ), - ( - Text( - ( - "!", - 1836..1837, + "The template cannot be an empty string!", + 1798..1837, ), ), - 1836..1837, + 1798..1837, ), ], ), @@ -2073,51 +1845,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "mktemp", - ), - 2071..2077, - ), - ( - CommandOption( - "-d", - ), - 2078..2080, - ), - ( - CommandOption( - "-p", + "mktemp -d -p \"", ), - 2081..2083, + 2070..2085, ), ( Text( - "\"", - ), - 2084..2085, - ), - ( - Escape( - "$", + "\\$", ), 2085..2087, ), ( Text( - "TMPDIR", - ), - 2087..2093, - ), - ( - Text( - "\"", - ), - 2093..2094, - ), - ( - Text( - "\"", + "TMPDIR\" \"", ), - 2095..2096, + 2087..2096, ), ( Expression( @@ -2135,9 +1877,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 2106..2107, + 2106..2108, ), ( Text( @@ -2200,57 +1942,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "mktemp", - ), - 2145..2151, - ), - ( - CommandOption( - "-d", + "mktemp -d -p \"", ), - 2152..2154, - ), - ( - CommandOption( - "-p", - ), - 2155..2157, + 2144..2159, ), ( Text( - "\"", - ), - 2158..2159, - ), - ( - Escape( - "$", + "\\$", ), 2159..2161, ), ( Text( - "TMPDIR", - ), - 2161..2167, - ), - ( - Text( - "\"", - ), - 2167..2168, - ), - ( - CommandOption( - "-t", - ), - 2169..2171, - ), - ( - Text( - "\"", + "TMPDIR\" -t \"", ), - 2172..2173, + 2161..2173, ), ( Expression( @@ -2268,9 +1974,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 2183..2184, + 2183..2185, ), ( Text( @@ -2351,56 +2057,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "Failed", - 2232..2238, - ), - ), - 2232..2238, - ), - ( - Text( - ( - "to", - 2239..2241, + "Failed to make a temporary directory", + 2232..2268, ), ), - 2239..2241, - ), - ( - Text( - ( - "make", - 2242..2246, - ), - ), - 2242..2246, - ), - ( - Text( - ( - "a", - 2247..2248, - ), - ), - 2247..2248, - ), - ( - Text( - ( - "temporary", - 2249..2258, - ), - ), - 2249..2258, - ), - ( - Text( - ( - "directory", - 2259..2268, - ), - ), - 2259..2268, + 2232..2268, ), ], ), @@ -2497,21 +2158,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "trap", + "trap 'rm -rf ", ), - 2352..2356, - ), - ( - Text( - "'rm", - ), - 2357..2360, - ), - ( - CommandOption( - "-rf", - ), - 2361..2364, + 2351..2365, ), ( Expression( @@ -2529,15 +2178,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "'", - ), - 2375..2376, - ), - ( - Text( - "EXIT", + "' EXIT ", ), - 2377..2381, + 2375..2382, ), ( Text( @@ -2572,115 +2215,34 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "Setting", - 2415..2422, - ), - ), - 2415..2422, - ), - ( - Text( - ( - "auto", - 2423..2427, - ), - ), - 2423..2427, - ), - ( - Text( - ( - "deletion", - 2428..2436, + "Setting auto deletion fails. You must delete temporary dir ", + 2415..2474, ), ), - 2428..2436, + 2415..2474, ), ( - Text( + Expression( ( - "fails", - 2437..2442, + Var( + ( + "filename", + 2475..2483, + ), + ), + 2475..2483, ), ), - 2437..2442, + 2474..2484, ), ( Text( ( ".", - 2442..2443, + 2484..2485, ), ), - 2442..2443, - ), - ( - Text( - ( - "You", - 2444..2447, - ), - ), - 2444..2447, - ), - ( - Text( - ( - "must", - 2448..2452, - ), - ), - 2448..2452, - ), - ( - Text( - ( - "delete", - 2453..2459, - ), - ), - 2453..2459, - ), - ( - Text( - ( - "temporary", - 2460..2469, - ), - ), - 2460..2469, - ), - ( - Text( - ( - "dir", - 2470..2473, - ), - ), - 2470..2473, - ), - ( - Expression( - ( - Var( - ( - "filename", - 2475..2483, - ), - ), - 2475..2483, - ), - ), - 2474..2484, - ), - ( - Text( - ( - ".", - 2484..2485, - ), - ), - 2484..2485, + 2484..2485, ), ], ), @@ -2739,15 +2301,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "trap", + "trap 'rmdir ", ), - 2532..2536, - ), - ( - Text( - "'rmdir", - ), - 2537..2543, + 2531..2544, ), ( Expression( @@ -2765,15 +2321,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "'", - ), - 2554..2555, - ), - ( - Text( - "EXIT", + "' EXIT ", ), - 2556..2560, + 2554..2561, ), ( Text( @@ -2808,92 +2358,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "Setting", - 2594..2601, - ), - ), - 2594..2601, - ), - ( - Text( - ( - "auto", - 2602..2606, - ), - ), - 2602..2606, - ), - ( - Text( - ( - "deletion", - 2607..2615, - ), - ), - 2607..2615, - ), - ( - Text( - ( - "fails", - 2616..2621, - ), - ), - 2616..2621, - ), - ( - Text( - ( - ".", - 2621..2622, - ), - ), - 2621..2622, - ), - ( - Text( - ( - "You", - 2623..2626, - ), - ), - 2623..2626, - ), - ( - Text( - ( - "must", - 2627..2631, - ), - ), - 2627..2631, - ), - ( - Text( - ( - "delete", - 2632..2638, - ), - ), - 2632..2638, - ), - ( - Text( - ( - "temporary", - 2639..2648, - ), - ), - 2639..2648, - ), - ( - Text( - ( - "dir", - 2649..2652, + "Setting auto deletion fails. You must delete temporary dir ", + 2594..2653, ), ), - 2649..2652, + 2594..2653, ), ( Expression( @@ -3143,15 +2612,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "chmod", - ), - 2917..2922, - ), - ( - Text( - "\"", + "chmod \"", ), - 2923..2924, + 2916..2924, ), ( Expression( @@ -3169,15 +2632,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 2930..2931, - ), - ( - Text( - "\"", + "\" \"", ), - 2932..2933, + 2930..2933, ), ( Expression( @@ -3195,9 +2652,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 2939..2940, + 2939..2941, ), ( Text( @@ -3262,20 +2719,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "The", - 2981..2984, - ), - ), - 2981..2984, - ), - ( - Text( - ( - "file", - 2985..2989, + "The file ", + 2981..2990, ), ), - 2985..2989, + 2981..2990, ), ( Expression( @@ -3294,29 +2742,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 2997..3004, - ), - ), - 2997..3004, - ), - ( - Text( - ( - "exist", - 3005..3010, - ), - ), - 3005..3010, - ), - ( - Text( - ( - "!", - 3010..3011, + " doesn't exist!", + 2996..3011, ), ), - 3010..3011, + 2996..3011, ), ], ), @@ -3534,21 +2964,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "chown", - ), - 3235..3240, - ), - ( - CommandOption( - "-R", - ), - 3241..3243, - ), - ( - Text( - "\"", + "chown -R \"", ), - 3244..3245, + 3234..3245, ), ( Expression( @@ -3566,15 +2984,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" \"", ), - 3251..3252, - ), - ( - Text( - "\"", - ), - 3253..3254, + 3251..3254, ), ( Expression( @@ -3592,9 +3004,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 3260..3261, + 3260..3262, ), ( Text( @@ -3659,20 +3071,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "The", - 3302..3305, - ), - ), - 3302..3305, - ), - ( - Text( - ( - "file", - 3306..3310, + "The file ", + 3302..3311, ), ), - 3306..3310, + 3302..3311, ), ( Expression( @@ -3691,29 +3094,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "doesn't", - 3318..3325, - ), - ), - 3318..3325, - ), - ( - Text( - ( - "exist", - 3326..3331, + " doesn't exist!", + 3317..3332, ), ), - 3326..3331, - ), - ( - Text( - ( - "!", - 3331..3332, - ), - ), - 3331..3332, + 3317..3332, ), ], ), @@ -3847,9 +3232,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "(", + "\\(", 3534..3536, ), ), @@ -3858,61 +3243,16 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "[", - 3536..3537, - ), - ), - 3536..3537, - ), - ( - Text( - ( - "^", - 3537..3538, - ), - ), - 3537..3538, - ), - ( - Text( - ( - "*", - 3538..3539, - ), - ), - 3538..3539, - ), - ( - Text( - ( - "?", - 3539..3540, - ), - ), - 3539..3540, - ), - ( - Text( - ( - "/", - 3540..3541, + "[^*?/]", + 3536..3542, ), ), - 3540..3541, + 3536..3542, ), ( Text( ( - "]", - 3541..3542, - ), - ), - 3541..3542, - ), - ( - Escape( - ( - ")", + "\\)", 3542..3544, ), ), @@ -3926,27 +3266,27 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "\\", + "\\\\", 3548..3550, ), ), 3548..3550, ), ( - Escape( + Text( ( - "\\", + "\\\\", 3550..3552, ), ), 3550..3552, ), ( - Escape( + Text( ( - "1", + "\\1", 3552..3554, ), ), @@ -4336,7 +3676,17 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - [], + [ + ( + Text( + ( + " ", + 4059..4060, + ), + ), + 4059..4060, + ), + ], ), 4058..4061, ), @@ -4383,33 +3733,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "eval", - ), - 4087..4091, - ), - ( - Text( - "\"", - ), - 4092..4093, - ), - ( - Text( - "for", - ), - 4093..4096, - ), - ( - Text( - "file", - ), - 4097..4101, - ), - ( - Text( - "in", + "eval \"for file in ", ), - 4102..4104, + 4086..4105, ), ( Expression( @@ -4427,31 +3753,13 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - ";", - ), - 4115..4116, - ), - ( - Text( - "do", + "; do [ -e ", ), - 4117..4119, + 4115..4125, ), ( Text( - "[", - ), - 4120..4121, - ), - ( - CommandOption( - "-e", - ), - 4122..4124, - ), - ( - Escape( - "\\", + "\\\\", ), 4125..4127, ), @@ -4462,14 +3770,14 @@ expression: parse_unwrap(&tokenize(&stdlib)) 4127..4128, ), ( - Escape( - "\\", + Text( + "\\\\", ), 4128..4130, ), ( - Escape( - "$", + Text( + "\\$", ), 4130..4132, ), @@ -4479,39 +3787,21 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 4132..4136, ), - ( - Escape( - "\\", - ), - 4136..4138, - ), - ( - Text( - "\"", - ), - 4138..4139, - ), ( Text( - "]", + "\\\\", ), - 4140..4141, + 4136..4138, ), ( Text( - "&&", + "\" ] && echo ", ), - 4142..4144, + 4138..4150, ), ( Text( - "echo", - ), - 4145..4149, - ), - ( - Escape( - "\\", + "\\\\", ), 4150..4152, ), @@ -4522,14 +3812,14 @@ expression: parse_unwrap(&tokenize(&stdlib)) 4152..4153, ), ( - Escape( - "\\", + Text( + "\\\\", ), 4153..4155, ), ( - Escape( - "$", + Text( + "\\$", ), 4155..4157, ), @@ -4540,40 +3830,22 @@ expression: parse_unwrap(&tokenize(&stdlib)) 4157..4161, ), ( - Escape( - "\\", + Text( + "\\\\", ), 4161..4163, ), ( Text( - "\"", + "\"; done\" ", ), - 4163..4164, + 4163..4172, ), ( Text( - ";", + "$", ), - 4164..4165, - ), - ( - Text( - "done", - ), - 4166..4170, - ), - ( - Text( - "\"", - ), - 4170..4171, - ), - ( - Text( - "$", - ), - 4172..4173, + 4172..4173, ), ], [ @@ -4624,9 +3896,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - "n", + "\\n", 4200..4202, ), ), @@ -4932,9 +4204,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 4675..4677, ), ), @@ -4943,25 +4215,16 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "(", - 4677..4678, + "(tar", + 4677..4681, ), ), - 4677..4678, + 4677..4681, ), ( Text( ( - "tar", - 4678..4681, - ), - ), - 4678..4681, - ), - ( - Escape( - ( - ".", + "\\.", 4681..4683, ), ), @@ -4970,29 +4233,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "bz2|tbz|tbz2", - 4683..4695, + "bz2|tbz|tbz2)$", + 4683..4697, ), ), - 4683..4695, - ), - ( - Text( - ( - ")", - 4695..4696, - ), - ), - 4695..4696, - ), - ( - Text( - ( - "$", - 4696..4697, - ), - ), - 4696..4697, + 4683..4697, ), ], ), @@ -5028,21 +4273,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "tar", - ), - 4709..4712, - ), - ( - Text( - "xvjf", + "tar xvjf \"", ), - 4713..4717, - ), - ( - Text( - "\"", - ), - 4718..4719, + 4708..4719, ), ( Expression( @@ -5060,21 +4293,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 4725..4726, - ), - ( - CommandOption( - "-C", - ), - 4727..4729, - ), - ( - Text( - "\"", + "\" -C \"", ), - 4730..4731, + 4725..4731, ), ( Expression( @@ -5092,9 +4313,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 4739..4740, + 4739..4741, ), ( Text( @@ -5154,9 +4375,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 4775..4777, ), ), @@ -5165,25 +4386,16 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "(", - 4777..4778, + "(tar", + 4777..4781, ), ), - 4777..4778, + 4777..4781, ), ( Text( ( - "tar", - 4778..4781, - ), - ), - 4778..4781, - ), - ( - Escape( - ( - ".", + "\\.", 4781..4783, ), ), @@ -5192,29 +4404,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "gz|tgz", - 4783..4789, + "gz|tgz)$", + 4783..4791, ), ), - 4783..4789, - ), - ( - Text( - ( - ")", - 4789..4790, - ), - ), - 4789..4790, - ), - ( - Text( - ( - "$", - 4790..4791, - ), - ), - 4790..4791, + 4783..4791, ), ], ), @@ -5250,21 +4444,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "tar", - ), - 4803..4806, - ), - ( - Text( - "xzf", - ), - 4807..4810, - ), - ( - Text( - "\"", + "tar xzf \"", ), - 4811..4812, + 4802..4812, ), ( Expression( @@ -5282,21 +4464,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" -C \"", ), - 4818..4819, - ), - ( - CommandOption( - "-C", - ), - 4820..4822, - ), - ( - Text( - "\"", - ), - 4823..4824, + 4818..4824, ), ( Expression( @@ -5314,9 +4484,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 4832..4833, + 4832..4834, ), ( Text( @@ -5376,9 +4546,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 4868..4870, ), ), @@ -5387,25 +4557,16 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "(", - 4870..4871, + "(tar", + 4870..4874, ), ), - 4870..4871, + 4870..4874, ), ( Text( ( - "tar", - 4871..4874, - ), - ), - 4871..4874, - ), - ( - Escape( - ( - ".", + "\\.", 4874..4876, ), ), @@ -5414,29 +4575,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "xz|txz", - 4876..4882, - ), - ), - 4876..4882, - ), - ( - Text( - ( - ")", - 4882..4883, - ), - ), - 4882..4883, - ), - ( - Text( - ( - "$", - 4883..4884, + "xz|txz)$", + 4876..4884, ), ), - 4883..4884, + 4876..4884, ), ], ), @@ -5472,21 +4615,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "tar", + "tar xJf \"", ), - 4896..4899, - ), - ( - Text( - "xJf", - ), - 4900..4903, - ), - ( - Text( - "\"", - ), - 4904..4905, + 4895..4905, ), ( Expression( @@ -5504,21 +4635,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 4911..4912, - ), - ( - CommandOption( - "-C", + "\" -C \"", ), - 4913..4915, - ), - ( - Text( - "\"", - ), - 4916..4917, + 4911..4917, ), ( Expression( @@ -5536,9 +4655,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 4925..4926, + 4925..4927, ), ( Text( @@ -5598,9 +4717,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 4961..4963, ), ), @@ -5609,20 +4728,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "bz2", - 4963..4966, + "bz2$", + 4963..4967, ), ), - 4963..4966, - ), - ( - Text( - ( - "$", - 4966..4967, - ), - ), - 4966..4967, + 4963..4967, ), ], ), @@ -5649,15 +4759,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "bunzip2", - ), - 4973..4980, - ), - ( - Text( - "\"", + "bunzip2 \"", ), - 4981..4982, + 4972..4982, ), ( Expression( @@ -5675,9 +4779,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 4988..4989, + 4988..4990, ), ( Text( @@ -5737,9 +4841,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 5024..5026, ), ), @@ -5748,20 +4852,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "deb", - 5026..5029, + "deb$", + 5026..5030, ), ), - 5026..5029, - ), - ( - Text( - ( - "$", - 5029..5030, - ), - ), - 5029..5030, + 5026..5030, ), ], ), @@ -5788,27 +4883,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "dpkg", - ), - 5036..5040, - ), - ( - CommandOption( - "-deb", + "dpkg-deb -xv \"", ), - 5040..5044, - ), - ( - CommandOption( - "-xv", - ), - 5045..5048, - ), - ( - Text( - "\"", - ), - 5049..5050, + 5035..5050, ), ( Expression( @@ -5826,15 +4903,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 5056..5057, - ), - ( - Text( - "\"", + "\" \"", ), - 5058..5059, + 5056..5059, ), ( Expression( @@ -5852,9 +4923,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 5067..5068, + 5067..5069, ), ( Text( @@ -5914,9 +4985,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 5103..5105, ), ), @@ -5925,20 +4996,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "gz", - 5105..5107, + "gz$", + 5105..5108, ), ), - 5105..5107, - ), - ( - Text( - ( - "$", - 5107..5108, - ), - ), - 5107..5108, + 5105..5108, ), ], ), @@ -5965,15 +5027,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "gunzip", + "gunzip \"", ), - 5114..5120, - ), - ( - Text( - "\"", - ), - 5121..5122, + 5113..5122, ), ( Expression( @@ -5991,9 +5047,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 5128..5129, + 5128..5130, ), ( Text( @@ -6053,9 +5109,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 5164..5166, ), ), @@ -6064,20 +5120,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "rar", - 5166..5169, - ), - ), - 5166..5169, - ), - ( - Text( - ( - "$", - 5169..5170, + "rar$", + 5166..5170, ), ), - 5169..5170, + 5166..5170, ), ], ), @@ -6104,21 +5151,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "unrar", + "unrar x \"", ), - 5176..5181, - ), - ( - Text( - "x", - ), - 5182..5183, - ), - ( - Text( - "\"", - ), - 5184..5185, + 5175..5185, ), ( Expression( @@ -6136,15 +5171,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 5191..5192, - ), - ( - Text( - "\"", + "\" \"", ), - 5193..5194, + 5191..5194, ), ( Expression( @@ -6162,9 +5191,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 5202..5203, + 5202..5204, ), ( Text( @@ -6224,9 +5253,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 5238..5240, ), ), @@ -6235,20 +5264,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "rpm", - 5240..5243, + "rpm$", + 5240..5244, ), ), - 5240..5243, - ), - ( - Text( - ( - "$", - 5243..5244, - ), - ), - 5243..5244, + 5240..5244, ), ], ), @@ -6275,15 +5295,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "rpm2cpio", + "rpm2cpio \"", ), - 5250..5258, - ), - ( - Text( - "\"", - ), - 5259..5260, + 5249..5260, ), ( Expression( @@ -6301,27 +5315,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 5266..5267, - ), - ( - Text( - "|", + "\" | cpio -idm ", ), - 5268..5269, - ), - ( - Text( - "cpio", - ), - 5270..5274, - ), - ( - CommandOption( - "-idm", - ), - 5275..5279, + 5266..5280, ), ( Text( @@ -6381,9 +5377,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 5314..5316, ), ), @@ -6392,20 +5388,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "tar", - 5316..5319, - ), - ), - 5316..5319, - ), - ( - Text( - ( - "$", - 5319..5320, + "tar$", + 5316..5320, ), ), - 5319..5320, + 5316..5320, ), ], ), @@ -6432,21 +5419,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "tar", + "tar xf \"", ), - 5326..5329, - ), - ( - Text( - "xf", - ), - 5330..5332, - ), - ( - Text( - "\"", - ), - 5333..5334, + 5325..5334, ), ( Expression( @@ -6464,21 +5439,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 5340..5341, - ), - ( - CommandOption( - "-C", - ), - 5342..5344, - ), - ( - Text( - "\"", + "\" -C \"", ), - 5345..5346, + 5340..5346, ), ( Expression( @@ -6496,9 +5459,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 5354..5355, + 5354..5356, ), ( Text( @@ -6558,9 +5521,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 5390..5392, ), ), @@ -6569,20 +5532,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "xz", - 5392..5394, + "xz$", + 5392..5395, ), ), - 5392..5394, - ), - ( - Text( - ( - "$", - 5394..5395, - ), - ), - 5394..5395, + 5392..5395, ), ], ), @@ -6609,21 +5563,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "xz", - ), - 5401..5403, - ), - ( - CommandOption( - "--decompress", + "xz --decompress \"", ), - 5404..5416, - ), - ( - Text( - "\"", - ), - 5417..5418, + 5400..5418, ), ( Expression( @@ -6641,9 +5583,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 5424..5425, + 5424..5426, ), ( Text( @@ -6703,9 +5645,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 5460..5462, ), ), @@ -6714,20 +5656,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "7z", - 5462..5464, + "7z$", + 5462..5465, ), ), - 5462..5464, - ), - ( - Text( - ( - "$", - 5464..5465, - ), - ), - 5464..5465, + 5462..5465, ), ], ), @@ -6754,21 +5687,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "7z", + "7z -y \"", ), - 5471..5473, - ), - ( - CommandOption( - "-y", - ), - 5474..5476, - ), - ( - Text( - "\"", - ), - 5477..5478, + 5470..5478, ), ( Expression( @@ -6786,21 +5707,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 5484..5485, - ), - ( - CommandOption( - "-o", + "\" -o \"", ), - 5486..5488, - ), - ( - Text( - "\"", - ), - 5489..5490, + 5484..5490, ), ( Expression( @@ -6818,9 +5727,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 5498..5499, + 5498..5500, ), ( Text( @@ -6880,18 +5789,18 @@ expression: parse_unwrap(&tokenize(&stdlib)) Text( [ ( - Escape( + Text( ( - ".", + "\\.", 5534..5536, ), ), 5534..5536, ), ( - Escape( + Text( ( - "(", + "\\(", 5536..5538, ), ), @@ -6907,27 +5816,45 @@ expression: parse_unwrap(&tokenize(&stdlib)) 5538..5541, ), ( - Escape( + Text( ( - "|war", - 5541..5546, + "\\|", + 5541..5543, ), ), - 5541..5546, + 5541..5543, ), ( - Escape( + Text( ( - "|jar", - 5546..5551, + "war", + 5543..5546, ), ), - 5546..5551, + 5543..5546, ), ( - Escape( + Text( ( - ")", + "\\|", + 5546..5548, + ), + ), + 5546..5548, + ), + ( + Text( + ( + "jar", + 5548..5551, + ), + ), + 5548..5551, + ), + ( + Text( + ( + "\\)", 5551..5553, ), ), @@ -6967,15 +5894,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "unzip", + "unzip \"", ), - 5560..5565, - ), - ( - Text( - "\"", - ), - 5566..5567, + 5559..5567, ), ( Expression( @@ -6993,21 +5914,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 5573..5574, - ), - ( - CommandOption( - "-d", + "\" -d \"", ), - 5575..5577, - ), - ( - Text( - "\"", - ), - 5578..5579, + 5573..5579, ), ( Expression( @@ -7025,9 +5934,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 5587..5588, + 5587..5589, ), ( Text( @@ -7086,47 +5995,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "Error", - 5633..5638, - ), - ), - 5633..5638, - ), - ( - Text( - ( - ":", - 5638..5639, + "Error: Unsupported file type", + 5633..5661, ), ), - 5638..5639, - ), - ( - Text( - ( - "Unsupported", - 5640..5651, - ), - ), - 5640..5651, - ), - ( - Text( - ( - "file", - 5652..5656, - ), - ), - 5652..5656, - ), - ( - Text( - ( - "type", - 5657..5661, - ), - ), - 5657..5661, + 5633..5661, ), ], ), @@ -7200,47 +6073,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ( Text( ( - "Error", - 5737..5742, - ), - ), - 5737..5742, - ), - ( - Text( - ( - ":", - 5742..5743, - ), - ), - 5742..5743, - ), - ( - Text( - ( - "File", - 5744..5748, - ), - ), - 5744..5748, - ), - ( - Text( - ( - "not", - 5749..5752, - ), - ), - 5749..5752, - ), - ( - Text( - ( - "found", - 5753..5758, + "Error: File not found", + 5737..5758, ), ), - 5753..5758, + 5737..5758, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_http.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_http.snap index 66c2be6..172c724 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_http.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_http.snap @@ -217,27 +217,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "curl", + "curl -L -o \"", ), - 479..483, - ), - ( - CommandOption( - "-L", - ), - 484..486, - ), - ( - CommandOption( - "-o", - ), - 487..489, - ), - ( - Text( - "\"", - ), - 490..491, + 478..491, ), ( Expression( @@ -255,15 +237,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 497..498, - ), - ( - Text( - "\"", + "\" \"", ), - 499..500, + 497..500, ), ( Expression( @@ -281,9 +257,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 505..506, + 505..507, ), ( Text( @@ -369,15 +345,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "wget", - ), - 575..579, - ), - ( - Text( - "\"", + "wget \"", ), - 580..581, + 574..581, ), ( Expression( @@ -395,21 +365,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" -P \"", ), - 586..587, - ), - ( - CommandOption( - "-P", - ), - 588..590, - ), - ( - Text( - "\"", - ), - 591..592, + 586..592, ), ( Expression( @@ -427,9 +385,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 598..599, + 598..600, ), ( Text( @@ -515,15 +473,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "aria2c", - ), - 670..676, - ), - ( - Text( - "\"", + "aria2c \"", ), - 677..678, + 669..678, ), ( Expression( @@ -541,21 +493,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", - ), - 683..684, - ), - ( - CommandOption( - "-d", - ), - 685..687, - ), - ( - Text( - "\"", + "\" -d \"", ), - 688..689, + 683..689, ), ( Expression( @@ -573,9 +513,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" ", ), - 695..696, + 695..697, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_math.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_math.snap index 50afb09..1b1a9d3 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_math.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_math.snap @@ -284,21 +284,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"(", ), - 299..303, - ), - ( - Text( - "\"", - ), - 304..305, - ), - ( - Text( - "(", - ), - 305..306, + 298..306, ), ( Expression( @@ -316,63 +304,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "+", - ), - 314..315, - ), - ( - Text( - "0", + "+0.5)/1\" | bc ", ), - 315..316, - ), - ( - Text( - ".", - ), - 316..317, - ), - ( - Text( - "5", - ), - 317..318, - ), - ( - Text( - ")", - ), - 318..319, - ), - ( - Text( - "/", - ), - 319..320, - ), - ( - Text( - "1", - ), - 320..321, - ), - ( - Text( - "\"", - ), - 321..322, - ), - ( - Text( - "|", - ), - 323..324, - ), - ( - Text( - "bc", - ), - 325..327, + 314..328, ), ( Text( @@ -438,21 +372,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"(", ), - 363..367, - ), - ( - Text( - "\"", - ), - 368..369, - ), - ( - Text( - "(", - ), - 369..370, + 362..370, ), ( Expression( @@ -468,59 +390,11 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), 370..378, ), - ( - CommandOption( - "-0", - ), - 378..380, - ), - ( - Text( - ".", - ), - 380..381, - ), - ( - Text( - "5", - ), - 381..382, - ), - ( - Text( - ")", - ), - 382..383, - ), ( Text( - "/", + "-0.5)/1\" | bc ", ), - 383..384, - ), - ( - Text( - "1", - ), - 384..385, - ), - ( - Text( - "\"", - ), - 385..386, - ), - ( - Text( - "|", - ), - 387..388, - ), - ( - Text( - "bc", - ), - 389..391, + 378..392, ), ( Text( @@ -640,15 +514,9 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "echo", + "echo \"", ), - 546..550, - ), - ( - Text( - "\"", - ), - 551..552, + 545..552, ), ( Expression( @@ -666,195 +534,57 @@ expression: parse_unwrap(&tokenize(&stdlib)) ), ( Text( - "\"", + "\" | awk '", ), - 560..561, + 560..569, ), ( Text( - "|", - ), - 562..563, - ), - ( - Text( - "awk", - ), - 564..567, - ), - ( - Text( - "'", - ), - 568..569, - ), - ( - Escape( - "{", + "\\{", ), 569..571, ), ( Text( - "printf", - ), - 571..577, - ), - ( - Text( - "\"", - ), - 578..579, - ), - ( - Text( - "%", - ), - 579..580, - ), - ( - Text( - "d", - ), - 580..581, - ), - ( - Text( - "\"", + "printf \"%d\", (", ), - 581..582, + 571..585, ), ( Text( - ",", - ), - 582..583, - ), - ( - Text( - "(", - ), - 584..585, - ), - ( - Escape( - "$", + "\\$", ), 585..587, ), ( Text( - "1", - ), - 587..588, - ), - ( - Text( - "<", - ), - 589..590, - ), - ( - Text( - "0", - ), - 591..592, - ), - ( - Text( - "?", - ), - 593..594, - ), - ( - Text( - "int", + "1 < 0 ? int(", ), - 595..598, + 587..599, ), ( Text( - "(", - ), - 598..599, - ), - ( - Escape( - "$", + "\\$", ), 599..601, ), ( Text( - "1", - ), - 601..602, - ), - ( - Text( - ")", - ), - 602..603, - ), - ( - CommandOption( - "-1", - ), - 604..607, - ), - ( - Text( - ":", - ), - 608..609, - ), - ( - Text( - "int", + "1) - 1 : int(", ), - 610..613, + 601..614, ), ( Text( - "(", - ), - 613..614, - ), - ( - Escape( - "$", + "\\$", ), 614..616, ), ( Text( - "1", - ), - 616..617, - ), - ( - Text( - ")", - ), - 617..618, - ), - ( - Text( - ")", - ), - 618..619, - ), - ( - Text( - "}", - ), - 619..620, - ), - ( - Text( - "'", + "1))}' ", ), - 620..621, + 616..622, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_text-2.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_text-2.snap index 217b6fd..6e8290a 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_text-2.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_text-2.snap @@ -94,63 +94,27 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", + "echo \"", ), - 281..285, + 280..287, ), ( Text( - "\"", - ), - 286..287, - ), - ( - Escape( - "$", + "\\$", ), 287..289, ), - ( - Escape( - "{", - ), - 289..291, - ), - ( - Text( - "BASH_VERSINFO", - ), - 291..304, - ), - ( - Text( - "[", - ), - 304..305, - ), ( Text( - "0", + "\\{", ), - 305..306, - ), - ( - Text( - "]", - ), - 306..307, - ), - ( - Text( - "}", - ), - 307..308, + 289..291, ), ( Text( - "\"", + "BASH_VERSINFO[0]}\" ", ), - 308..309, + 291..310, ), ( Text( @@ -211,63 +175,27 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", + "echo \"", ), - 343..347, + 342..349, ), ( Text( - "\"", - ), - 348..349, - ), - ( - Escape( - "$", + "\\$", ), 349..351, ), - ( - Escape( - "{", - ), - 351..353, - ), - ( - Text( - "BASH_VERSINFO", - ), - 353..366, - ), - ( - Text( - "[", - ), - 366..367, - ), - ( - Text( - "1", - ), - 367..368, - ), - ( - Text( - "]", - ), - 368..369, - ), ( Text( - "}", + "\\{", ), - 369..370, + 351..353, ), ( Text( - "\"", + "BASH_VERSINFO[1]}\" ", ), - 370..371, + 353..372, ), ( Text( @@ -328,63 +256,27 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", + "echo \"", ), - 405..409, + 404..411, ), ( Text( - "\"", - ), - 410..411, - ), - ( - Escape( - "$", + "\\$", ), 411..413, ), - ( - Escape( - "{", - ), - 413..415, - ), - ( - Text( - "BASH_VERSINFO", - ), - 415..428, - ), - ( - Text( - "[", - ), - 428..429, - ), - ( - Text( - "2", - ), - 429..430, - ), - ( - Text( - "]", - ), - 430..431, - ), ( Text( - "}", + "\\{", ), - 431..432, + 413..415, ), ( Text( - "\"", + "BASH_VERSINFO[2]}\" ", ), - 432..433, + 415..434, ), ( Text( @@ -643,6 +535,12 @@ expression: parse_unwrap(&tokens) ), 721..722, ), + ( + Text( + "", + ), + 722..723, + ), ( Expression( ( @@ -668,25 +566,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "=", + "=\"", ), - 738..739, + 738..740, ), ( Text( - "\"", - ), - 739..740, - ), - ( - Escape( - "$", + "\\$", ), 740..742, ), ( - Escape( - "{", + Text( + "\\{", ), 742..744, ), @@ -715,25 +607,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "//", + "//\"", ), - 759..761, + 759..762, ), ( Text( - "\"", - ), - 761..762, - ), - ( - Escape( - "$", + "\\$", ), 762..764, ), ( - Escape( - "{", + Text( + "\\{", ), 764..766, ), @@ -762,37 +648,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 781..782, - ), - ( - Text( - "\"", - ), - 782..783, - ), - ( - Text( - "/", + "}\"/\"", ), - 783..784, + 781..785, ), ( Text( - "\"", - ), - 784..785, - ), - ( - Escape( - "$", + "\\$", ), 785..787, ), ( - Escape( - "{", + Text( + "\\{", ), 787..789, ), @@ -821,27 +689,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 805..806, - ), - ( - Text( - "\"", - ), - 806..807, - ), - ( - Text( - "}", - ), - 807..808, - ), - ( - Text( - "\"", + "}\"}\" ", ), - 808..809, + 805..810, ), ( Text( @@ -893,6 +743,12 @@ expression: parse_unwrap(&tokens) ), 839..840, ), + ( + Text( + "", + ), + 840..841, + ), ( Expression( ( @@ -918,25 +774,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "=", + "=\"", ), - 856..857, + 856..858, ), ( Text( - "\"", - ), - 857..858, - ), - ( - Escape( - "$", + "\\$", ), 858..860, ), ( - Escape( - "{", + Text( + "\\{", ), 860..862, ), @@ -965,25 +815,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "//", + "//\"", ), - 877..879, + 877..880, ), ( Text( - "\"", - ), - 879..880, - ), - ( - Escape( - "$", + "\\$", ), 880..882, ), ( - Escape( - "{", + Text( + "\\{", ), 882..884, ), @@ -1012,31 +856,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 899..900, - ), - ( - Text( - "\"", + "}\"/", ), - 900..901, + 899..902, ), ( Text( - "/", - ), - 901..902, - ), - ( - Escape( - "$", + "\\$", ), 902..904, ), ( - Escape( - "{", + Text( + "\\{", ), 904..906, ), @@ -1065,21 +897,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 922..923, - ), - ( - Text( - "}", - ), - 923..924, - ), - ( - Text( - "\"", + "}}\" ", ), - 924..925, + 922..926, ), ( Text( @@ -1310,6 +1130,12 @@ expression: parse_unwrap(&tokens) ), 1206..1207, ), + ( + Text( + "", + ), + 1207..1208, + ), ( Expression( ( @@ -1335,25 +1161,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "=", + "=\"", ), - 1223..1224, + 1223..1225, ), ( Text( - "\"", - ), - 1224..1225, - ), - ( - Escape( - "$", + "\\$", ), 1225..1227, ), ( - Escape( - "{", + Text( + "\\{", ), 1227..1229, ), @@ -1382,25 +1202,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", + "/\"", ), - 1244..1245, + 1244..1246, ), ( Text( - "\"", - ), - 1245..1246, - ), - ( - Escape( - "$", + "\\$", ), 1246..1248, ), ( - Escape( - "{", + Text( + "\\{", ), 1248..1250, ), @@ -1429,37 +1243,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", + "}\"/\"", ), - 1265..1266, + 1265..1269, ), ( Text( - "\"", - ), - 1266..1267, - ), - ( - Text( - "/", - ), - 1267..1268, - ), - ( - Text( - "\"", - ), - 1268..1269, - ), - ( - Escape( - "$", + "\\$", ), 1269..1271, ), ( - Escape( - "{", + Text( + "\\{", ), 1271..1273, ), @@ -1488,27 +1284,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 1289..1290, - ), - ( - Text( - "\"", - ), - 1290..1291, - ), - ( - Text( - "}", - ), - 1291..1292, - ), - ( - Text( - "\"", + "}\"}\" ", ), - 1292..1293, + 1289..1294, ), ( Text( @@ -1560,6 +1338,12 @@ expression: parse_unwrap(&tokens) ), 1323..1324, ), + ( + Text( + "", + ), + 1324..1325, + ), ( Expression( ( @@ -1585,25 +1369,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "=", + "=\"", ), - 1340..1341, + 1340..1342, ), ( Text( - "\"", - ), - 1341..1342, - ), - ( - Escape( - "$", + "\\$", ), 1342..1344, ), ( - Escape( - "{", + Text( + "\\{", ), 1344..1346, ), @@ -1632,25 +1410,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", + "/\"", ), - 1361..1362, + 1361..1363, ), ( Text( - "\"", - ), - 1362..1363, - ), - ( - Escape( - "$", + "\\$", ), 1363..1365, ), ( - Escape( - "{", + Text( + "\\{", ), 1365..1367, ), @@ -1679,31 +1451,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 1382..1383, - ), - ( - Text( - "\"", + "}\"/", ), - 1383..1384, + 1382..1385, ), ( Text( - "/", - ), - 1384..1385, - ), - ( - Escape( - "$", + "\\$", ), 1385..1387, ), ( - Escape( - "{", + Text( + "\\{", ), 1387..1389, ), @@ -1732,21 +1492,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", + "}}\" ", ), - 1405..1406, - ), - ( - Text( - "}", - ), - 1406..1407, - ), - ( - Text( - "\"", - ), - 1407..1408, + 1405..1409, ), ( Text( @@ -1942,195 +1690,87 @@ expression: parse_unwrap(&tokens) ), ( Text( - "re", + "re='", ), - 1679..1681, + 1678..1683, ), ( Text( - "=", + "\\b", ), - 1681..1682, + 1683..1685, ), ( Text( - "'", + "Copyright", ), - 1682..1683, - ), - ( - Escape( - "bCopyright", - ), - 1683..1694, - ), - ( - Escape( - "b", - ), - 1694..1696, + 1685..1694, ), ( Text( - ".", + "\\b", ), - 1696..1697, + 1694..1696, ), ( Text( - "+", + ".+", ), - 1697..1698, - ), - ( - Escape( - "bFree", - ), - 1698..1704, + 1696..1698, ), ( Text( - "Software", + "\\b", ), - 1705..1713, + 1698..1700, ), ( Text( - "Foundation", - ), - 1714..1724, - ), - ( - Escape( - "b'", + "Free Software Foundation", ), - 1724..1727, + 1700..1724, ), ( Text( - ";", + "\\b", ), - 1727..1728, + 1724..1726, ), ( Text( - "[", + "'; [[ ", ), - 1729..1730, + 1726..1732, ), ( Text( - "[", - ), - 1730..1731, - ), - ( - Escape( - "$", + "\\$", ), 1732..1734, ), ( Text( - "(", - ), - 1734..1735, - ), - ( - Text( - "sed", - ), - 1735..1738, - ), - ( - CommandOption( - "--version", - ), - 1739..1748, - ), - ( - Text( - "2", - ), - 1749..1750, - ), - ( - Text( - ">", - ), - 1750..1751, - ), - ( - Text( - "/", + "(sed --version 2>/dev/null) =~ ", ), - 1751..1752, + 1734..1765, ), ( Text( - "dev", + "\\$", ), - 1752..1755, + 1765..1767, ), ( Text( - "/", + "re ]] ", ), - 1755..1756, + 1767..1773, ), ( Text( - "null", + "$", ), - 1756..1760, - ), - ( - Text( - ")", - ), - 1760..1761, - ), - ( - Text( - "=", - ), - 1762..1763, - ), - ( - Text( - "~", - ), - 1763..1764, - ), - ( - Escape( - "$", - ), - 1765..1767, - ), - ( - Text( - "re", - ), - 1767..1769, - ), - ( - Text( - "]", - ), - 1770..1771, - ), - ( - Text( - "]", - ), - 1771..1772, - ), - ( - Text( - "$", - ), - 1773..1774, + 1773..1774, ), ], [], @@ -2221,129 +1861,57 @@ expression: parse_unwrap(&tokens) ), ( Text( - "re", + "re='", ), - 1845..1847, + 1844..1849, ), ( Text( - "=", + "\\b", ), - 1847..1848, + 1849..1851, ), ( Text( - "'", - ), - 1848..1849, - ), - ( - Escape( - "bBusyBox", - ), - 1849..1858, - ), - ( - Escape( - "b'", + "BusyBox", ), - 1858..1861, + 1851..1858, ), ( Text( - ";", + "\\b", ), - 1861..1862, + 1858..1860, ), ( Text( - "[", + "'; [[ ", ), - 1863..1864, + 1860..1866, ), ( Text( - "[", - ), - 1864..1865, - ), - ( - Escape( - "$", + "\\$", ), 1866..1868, ), ( Text( - "(", - ), - 1868..1869, - ), - ( - Text( - "sed", - ), - 1869..1872, - ), - ( - Text( - "2", - ), - 1873..1874, - ), - ( - Text( - ">", - ), - 1874..1875, - ), - ( - Text( - "&1", - ), - 1875..1877, - ), - ( - Text( - ")", - ), - 1877..1878, - ), - ( - Text( - "=", + "(sed 2>&1) =~ ", ), - 1879..1880, + 1868..1882, ), ( Text( - "~", - ), - 1880..1881, - ), - ( - Escape( - "$", + "\\$", ), 1882..1884, ), ( Text( - "re", - ), - 1884..1886, - ), - ( - Text( - "]", - ), - 1887..1888, - ), - ( - Text( - "]", + "re ]] ", ), - 1888..1889, + 1884..1890, ), ( Text( @@ -2675,9 +2243,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "\\", + "\\\\", 2306..2308, ), ), @@ -2747,9 +2315,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "\\", + "\\\\", 2363..2365, ), ), @@ -2881,9 +2449,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "\\", + "\\\\", 2561..2563, ), ), @@ -2906,18 +2474,18 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "\\", + "\\\\", 2568..2570, ), ), 2568..2570, ), ( - Escape( + Text( ( - "b", + "\\b", 2570..2572, ), ), @@ -3044,15 +2612,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 2803..2807, - ), - ( - Text( - "\"", + "echo \"", ), - 2808..2809, + 2802..2809, ), ( Expression( @@ -3070,51 +2632,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 2817..2818, - ), - ( - Text( - "|", - ), - 2819..2820, - ), - ( - Text( - "sed", - ), - 2821..2824, - ), - ( - CommandOption( - "-r", - ), - 2825..2827, - ), - ( - CommandOption( - "-e", - ), - 2828..2830, - ), - ( - Text( - "\"", - ), - 2831..2832, - ), - ( - Text( - "s", - ), - 2832..2833, - ), - ( - Text( - "/", + "\" | sed -r -e \"s/", ), - 2833..2834, + 2817..2834, ), ( Expression( @@ -3152,21 +2672,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 2857..2858, - ), - ( - Text( - "g", - ), - 2858..2859, - ), - ( - Text( - "\"", + "/g\" ", ), - 2859..2860, + 2857..2861, ), ( Text( @@ -3221,15 +2729,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 2909..2913, - ), - ( - Text( - "\"", + "echo \"", ), - 2914..2915, + 2908..2915, ), ( Expression( @@ -3247,51 +2749,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 2923..2924, - ), - ( - Text( - "|", - ), - 2925..2926, - ), - ( - Text( - "sed", - ), - 2927..2930, - ), - ( - CommandOption( - "-E", - ), - 2931..2933, - ), - ( - CommandOption( - "-e", - ), - 2934..2936, - ), - ( - Text( - "\"", - ), - 2937..2938, - ), - ( - Text( - "s", - ), - 2938..2939, - ), - ( - Text( - "/", + "\" | sed -E -e \"s/", ), - 2939..2940, + 2923..2940, ), ( Expression( @@ -3329,21 +2789,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 2963..2964, - ), - ( - Text( - "g", - ), - 2964..2965, - ), - ( - Text( - "\"", + "/g\" ", ), - 2965..2966, + 2963..2967, ), ( Text( @@ -3495,9 +2943,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "|", + "\\|", 3231..3233, ), ), @@ -3562,15 +3010,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 3276..3280, - ), - ( - Text( - "\"", + "echo \"", ), - 3281..3282, + 3275..3282, ), ( Expression( @@ -3588,45 +3030,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 3290..3291, - ), - ( - Text( - "|", - ), - 3292..3293, - ), - ( - Text( - "sed", - ), - 3294..3297, - ), - ( - CommandOption( - "-e", - ), - 3298..3300, - ), - ( - Text( - "\"", - ), - 3301..3302, - ), - ( - Text( - "s", + "\" | sed -e \"s/", ), - 3302..3303, - ), - ( - Text( - "/", - ), - 3303..3304, + 3290..3304, ), ( Expression( @@ -3664,21 +3070,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 3327..3328, - ), - ( - Text( - "g", - ), - 3328..3329, - ), - ( - Text( - "\"", + "/g\" ", ), - 3329..3330, + 3327..3331, ), ( Text( @@ -3832,21 +3226,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "IFS", - ), - 3530..3533, - ), - ( - Text( - "=", - ), - 3533..3534, - ), - ( - Text( - "\"", + "IFS=\"", ), - 3534..3535, + 3529..3535, ), ( Expression( @@ -3864,33 +3246,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 3546..3547, - ), - ( - Text( - "read", - ), - 3548..3552, - ), - ( - CommandOption( - "-rd", - ), - 3553..3556, - ), - ( - Text( - "''", - ), - 3557..3559, - ), - ( - CommandOption( - "-a", + "\" read -rd '' -a ", ), - 3560..3562, + 3546..3563, ), ( Expression( @@ -3917,49 +3275,13 @@ expression: parse_unwrap(&tokens) ), ( Text( - "<", - ), - 3579..3580, - ), - ( - Text( - "<", - ), - 3581..3582, - ), - ( - Text( - "(", - ), - 3582..3583, - ), - ( - Text( - "printf", + " < <(printf %s \"", ), - 3583..3589, + 3578..3594, ), ( Text( - "%", - ), - 3590..3591, - ), - ( - Text( - "s", - ), - 3591..3592, - ), - ( - Text( - "\"", - ), - 3593..3594, - ), - ( - Escape( - "$", + "\\$", ), 3594..3596, ), @@ -3988,15 +3310,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 3609..3610, - ), - ( - Text( - ")", + "\") ", ), - 3610..3611, + 3609..3612, ), ( Text( @@ -4121,9 +3437,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "n", + "\\n", 3778..3780, ), ), @@ -4229,7 +3545,17 @@ expression: parse_unwrap(&tokens) ), ( Text( - [], + [ + ( + Text( + ( + " ", + 3926..3927, + ), + ), + 3926..3927, + ), + ], ), 3925..3928, ), @@ -4344,21 +3670,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "IFS", - ), - 4051..4054, - ), - ( - Text( - "=", - ), - 4054..4055, - ), - ( - Text( - "\"", + "IFS=\"", ), - 4055..4056, + 4050..4056, ), ( Expression( @@ -4376,37 +3690,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 4067..4068, - ), - ( - Text( - ";", - ), - 4069..4070, - ), - ( - Text( - "echo", + "\" ; echo \"", ), - 4071..4075, + 4067..4077, ), ( Text( - "\"", - ), - 4076..4077, - ), - ( - Escape( - "$", + "\\$", ), 4077..4079, ), ( - Escape( - "{", + Text( + "\\{", ), 4079..4081, ), @@ -4435,33 +3731,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "[", - ), - 4094..4095, - ), - ( - Text( - "*", - ), - 4095..4096, - ), - ( - Text( - "]", - ), - 4096..4097, - ), - ( - Text( - "}", - ), - 4097..4098, - ), - ( - Text( - "\"", + "[*]}\" ", ), - 4098..4099, + 4094..4100, ), ( Text( @@ -4563,15 +3835,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 4215..4219, - ), - ( - Text( - "\"", + "echo \"", ), - 4220..4221, + 4214..4221, ), ( Expression( @@ -4589,105 +3855,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 4227..4228, - ), - ( - Text( - "|", - ), - 4229..4230, - ), - ( - Text( - "sed", - ), - 4231..4234, - ), - ( - CommandOption( - "-e", - ), - 4235..4237, - ), - ( - Text( - "'s", - ), - 4238..4240, - ), - ( - Text( - "/", - ), - 4240..4241, - ), - ( - Text( - "^", - ), - 4241..4242, - ), - ( - Text( - "[", - ), - 4242..4243, - ), - ( - Text( - "[", - ), - 4243..4244, - ), - ( - Text( - ":", - ), - 4244..4245, - ), - ( - Text( - "space", - ), - 4245..4250, - ), - ( - Text( - ":", - ), - 4250..4251, - ), - ( - Text( - "]", - ), - 4251..4252, - ), - ( - Text( - "]", - ), - 4252..4253, - ), - ( - Text( - "*", - ), - 4253..4254, - ), - ( - Text( - "//", - ), - 4254..4256, - ), - ( - Text( - "'", + "\" | sed -e 's/^[[:space:]]*//' ", ), - 4256..4257, + 4227..4258, ), ( Text( @@ -4789,15 +3959,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 4374..4378, - ), - ( - Text( - "\"", + "echo \"", ), - 4379..4380, + 4373..4380, ), ( Expression( @@ -4815,105 +3979,21 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 4386..4387, - ), - ( - Text( - "|", - ), - 4388..4389, - ), - ( - Text( - "sed", - ), - 4390..4393, - ), - ( - CommandOption( - "-e", - ), - 4394..4396, - ), - ( - Text( - "'s", - ), - 4397..4399, - ), - ( - Text( - "/", - ), - 4399..4400, - ), - ( - Text( - "[", - ), - 4400..4401, - ), - ( - Text( - "[", - ), - 4401..4402, - ), - ( - Text( - ":", - ), - 4402..4403, - ), - ( - Text( - "space", - ), - 4403..4408, - ), - ( - Text( - ":", - ), - 4408..4409, - ), - ( - Text( - "]", - ), - 4409..4410, - ), - ( - Text( - "]", + "\" | sed -e 's/[[:space:]]*", ), - 4410..4411, + 4386..4412, ), ( Text( - "*", - ), - 4411..4412, - ), - ( - Escape( - "$", + "\\$", ), 4412..4414, ), ( Text( - "//", - ), - 4414..4416, - ), - ( - Text( - "'", + "//' ", ), - 4416..4417, + 4414..4418, ), ( Text( @@ -5122,15 +4202,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 4644..4648, - ), - ( - Text( - "\"", + "echo \"", ), - 4649..4650, + 4643..4650, ), ( Expression( @@ -5148,105 +4222,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 4656..4657, - ), - ( - Text( - "|", - ), - 4658..4659, - ), - ( - Text( - "tr", - ), - 4660..4662, - ), - ( - Text( - "'", - ), - 4663..4664, - ), - ( - Text( - "[", - ), - 4664..4665, - ), - ( - Text( - ":", - ), - 4665..4666, - ), - ( - Text( - "upper", - ), - 4666..4671, - ), - ( - Text( - ":", - ), - 4671..4672, - ), - ( - Text( - "]", - ), - 4672..4673, - ), - ( - Text( - "'", - ), - 4673..4674, - ), - ( - Text( - "'", - ), - 4675..4676, - ), - ( - Text( - "[", - ), - 4676..4677, - ), - ( - Text( - ":", - ), - 4677..4678, - ), - ( - Text( - "lower", - ), - 4678..4683, - ), - ( - Text( - ":", - ), - 4683..4684, - ), - ( - Text( - "]", - ), - 4684..4685, - ), - ( - Text( - "'", + "\" | tr '[:upper:]' '[:lower:]' ", ), - 4685..4686, + 4656..4687, ), ( Text( @@ -5348,15 +4326,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 4796..4800, - ), - ( - Text( - "\"", + "echo \"", ), - 4801..4802, + 4795..4802, ), ( Expression( @@ -5374,111 +4346,15 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 4808..4809, - ), - ( - Text( - "|", - ), - 4810..4811, - ), - ( - Text( - "tr", + "\" | tr '[:lower:]' '[:upper:]' ", ), - 4812..4814, + 4808..4839, ), ( Text( - "'", + "$", ), - 4815..4816, - ), - ( - Text( - "[", - ), - 4816..4817, - ), - ( - Text( - ":", - ), - 4817..4818, - ), - ( - Text( - "lower", - ), - 4818..4823, - ), - ( - Text( - ":", - ), - 4823..4824, - ), - ( - Text( - "]", - ), - 4824..4825, - ), - ( - Text( - "'", - ), - 4825..4826, - ), - ( - Text( - "'", - ), - 4827..4828, - ), - ( - Text( - "[", - ), - 4828..4829, - ), - ( - Text( - ":", - ), - 4829..4830, - ), - ( - Text( - "upper", - ), - 4830..4835, - ), - ( - Text( - ":", - ), - 4835..4836, - ), - ( - Text( - "]", - ), - 4836..4837, - ), - ( - Text( - "'", - ), - 4837..4838, - ), - ( - Text( - "$", - ), - 4839..4840, + 4839..4840, ), ], [], @@ -5569,21 +4445,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "[", + "[ -n \"", ), - 4966..4967, - ), - ( - CommandOption( - "-n", - ), - 4968..4970, - ), - ( - Text( - "\"", - ), - 4971..4972, + 4965..4972, ), ( Expression( @@ -5601,33 +4465,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 4978..4979, - ), - ( - Text( - "]", - ), - 4980..4981, - ), - ( - Text( - "&&", - ), - 4982..4984, - ), - ( - Text( - "[", + "\" ] && [ \"", ), - 4985..4986, - ), - ( - Text( - "\"", - ), - 4987..4988, + 4978..4988, ), ( Expression( @@ -5645,21 +4485,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", + "\" -eq \"", ), - 4994..4995, - ), - ( - CommandOption( - "-eq", - ), - 4996..4999, - ), - ( - Text( - "\"", - ), - 5000..5001, + 4994..5001, ), ( Expression( @@ -5677,51 +4505,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", + "\" ] 2>/dev/null ", ), - 5007..5008, - ), - ( - Text( - "]", - ), - 5009..5010, - ), - ( - Text( - "2", - ), - 5011..5012, - ), - ( - Text( - ">", - ), - 5012..5013, - ), - ( - Text( - "/", - ), - 5013..5014, - ), - ( - Text( - "dev", - ), - 5014..5017, - ), - ( - Text( - "/", - ), - 5017..5018, - ), - ( - Text( - "null", - ), - 5018..5022, + 5007..5023, ), ( Text( @@ -5864,92 +4650,11 @@ expression: parse_unwrap(&tokens) ( Text( ( - "^", - 5183..5184, - ), - ), - 5183..5184, - ), - ( - Text( - ( - "-", - 5184..5185, - ), - ), - 5184..5185, - ), - ( - Text( - ( - "?", - 5185..5186, + "^-?[0-9]+$", + 5183..5193, ), ), - 5185..5186, - ), - ( - Text( - ( - "[", - 5186..5187, - ), - ), - 5186..5187, - ), - ( - Text( - ( - "0", - 5187..5188, - ), - ), - 5187..5188, - ), - ( - Text( - ( - "-", - 5188..5189, - ), - ), - 5188..5189, - ), - ( - Text( - ( - "9", - 5189..5190, - ), - ), - 5189..5190, - ), - ( - Text( - ( - "]", - 5190..5191, - ), - ), - 5190..5191, - ), - ( - Text( - ( - "+", - 5191..5192, - ), - ), - 5191..5192, - ), - ( - Text( - ( - "$", - 5192..5193, - ), - ), - 5192..5193, + 5183..5193, ), ], ), @@ -5979,88 +4684,16 @@ expression: parse_unwrap(&tokens) ( Text( ( - "^", - 5213..5214, - ), - ), - 5213..5214, - ), - ( - Text( - ( - "-", - 5214..5215, - ), - ), - 5214..5215, - ), - ( - Text( - ( - "?", - 5215..5216, - ), - ), - 5215..5216, - ), - ( - Text( - ( - "[", - 5216..5217, - ), - ), - 5216..5217, - ), - ( - Text( - ( - "0", - 5217..5218, - ), - ), - 5217..5218, - ), - ( - Text( - ( - "-", - 5218..5219, + "^-?[0-9]*", + 5213..5222, ), ), - 5218..5219, + 5213..5222, ), ( Text( ( - "9", - 5219..5220, - ), - ), - 5219..5220, - ), - ( - Text( - ( - "]", - 5220..5221, - ), - ), - 5220..5221, - ), - ( - Text( - ( - "*", - 5221..5222, - ), - ), - 5221..5222, - ), - ( - Escape( - ( - ".", + "\\.", 5222..5224, ), ), @@ -6069,65 +4702,11 @@ expression: parse_unwrap(&tokens) ( Text( ( - "[", - 5224..5225, - ), - ), - 5224..5225, - ), - ( - Text( - ( - "0", - 5225..5226, - ), - ), - 5225..5226, - ), - ( - Text( - ( - "-", - 5226..5227, - ), - ), - 5226..5227, - ), - ( - Text( - ( - "9", - 5227..5228, - ), - ), - 5227..5228, - ), - ( - Text( - ( - "]", - 5228..5229, - ), - ), - 5228..5229, - ), - ( - Text( - ( - "+", - 5229..5230, - ), - ), - 5229..5230, - ), - ( - Text( - ( - "$", - 5230..5231, + "[0-9]+$", + 5224..5231, ), ), - 5230..5231, + 5224..5231, ), ], ), @@ -6153,15 +4732,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "[", - ), - 5240..5241, - ), - ( - Text( - "[", + "[[ ", ), - 5241..5242, + 5239..5243, ), ( Expression( @@ -6179,15 +4752,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "=", + " =~ ", ), - 5250..5251, - ), - ( - Text( - "~", - ), - 5251..5252, + 5249..5253, ), ( Expression( @@ -6205,33 +4772,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "]", - ), - 5262..5263, - ), - ( - Text( - "]", - ), - 5263..5264, - ), - ( - Text( - "||", - ), - 5265..5267, - ), - ( - Text( - "[", - ), - 5268..5269, - ), - ( - Text( - "[", + " ]] || [[ ", ), - 5269..5270, + 5261..5271, ), ( Expression( @@ -6249,15 +4792,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "=", + " =~ ", ), - 5278..5279, - ), - ( - Text( - "~", - ), - 5279..5280, + 5277..5281, ), ( Expression( @@ -6275,15 +4812,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "]", + " ]] ", ), - 5292..5293, - ), - ( - Text( - "]", - ), - 5293..5294, + 5291..5295, ), ( Text( @@ -6444,67 +4975,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "for", + "for ((i=0; i<", ), - 5459..5462, + 5458..5472, ), ( Text( - "(", - ), - 5463..5464, - ), - ( - Text( - "(", - ), - 5464..5465, - ), - ( - Text( - "i", - ), - 5465..5466, - ), - ( - Text( - "=", - ), - 5466..5467, - ), - ( - Text( - "0", - ), - 5467..5468, - ), - ( - Text( - ";", - ), - 5468..5469, - ), - ( - Text( - "i", - ), - 5470..5471, - ), - ( - Text( - "<", - ), - 5471..5472, - ), - ( - Escape( - "$", + "\\$", ), 5472..5474, ), ( - Escape( - "{", + Text( + "\\{", ), 5474..5476, ), @@ -6539,57 +5022,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 5490..5491, - ), - ( - Text( - ";", - ), - 5491..5492, - ), - ( - Text( - "i", - ), - 5493..5494, - ), - ( - Text( - "+", - ), - 5494..5495, - ), - ( - Text( - "+", - ), - 5495..5496, - ), - ( - Text( - ")", - ), - 5496..5497, - ), - ( - Text( - ")", - ), - 5497..5498, - ), - ( - Text( - ";", - ), - 5498..5499, - ), - ( - Text( - "do", + "}; i++)); do\n ", ), - 5500..5502, + 5490..5511, ), ( Expression( @@ -6616,31 +5051,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "+=", + "+=( \"", ), - 5525..5527, + 5525..5530, ), ( Text( - "(", - ), - 5527..5528, - ), - ( - Text( - "\"", - ), - 5529..5530, - ), - ( - Escape( - "$", + "\\$", ), 5530..5532, ), ( - Escape( - "{", + Text( + "\\{", ), 5532..5534, ), @@ -6657,75 +5080,33 @@ expression: parse_unwrap(&tokens) ( "text", 5542..5546, - ), - ), - 5542..5546, - ), - ), - 5542..5546, - ), - ), - 5534..5547, - ), - ( - Text( - ":", - ), - 5547..5548, - ), - ( - Escape( - "$", - ), - 5548..5550, - ), - ( - Text( - "i", - ), - 5550..5551, - ), - ( - Text( - ":", - ), - 5551..5552, - ), - ( - Text( - "1", - ), - 5552..5553, - ), - ( - Text( - "}", - ), - 5553..5554, - ), - ( - Text( - "\"", + ), + ), + 5542..5546, + ), + ), + 5542..5546, + ), ), - 5554..5555, + 5534..5547, ), ( Text( - ")", + ":", ), - 5556..5557, + 5547..5548, ), ( Text( - ";", + "\\$", ), - 5557..5558, + 5548..5550, ), ( Text( - "done", + "i:1}\" );\n done ", ), - 5563..5567, + 5550..5568, ), ( Text( @@ -6868,27 +5249,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "if", - ), - 5713..5715, - ), - ( - Text( - "[", - ), - 5716..5717, - ), - ( - Text( - "[", - ), - 5717..5718, - ), - ( - Text( - "\"", + "if [[ \"", ), - 5719..5720, + 5712..5720, ), ( Expression( @@ -6906,27 +5269,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 5728..5729, - ), - ( - Text( - "==", - ), - 5730..5732, - ), - ( - Text( - "*", - ), - 5733..5734, - ), - ( - Text( - "\"", + "\" == *\"", ), - 5734..5735, + 5728..5735, ), ( Expression( @@ -6944,57 +5289,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 5743..5744, - ), - ( - Text( - "*", - ), - 5744..5745, - ), - ( - Text( - "]", - ), - 5746..5747, - ), - ( - Text( - "]", - ), - 5747..5748, - ), - ( - Text( - ";", - ), - 5748..5749, - ), - ( - Text( - "then", - ), - 5750..5754, - ), - ( - Text( - "echo", - ), - 5759..5763, - ), - ( - Text( - "1", - ), - 5764..5765, - ), - ( - Text( - "fi", + "\"* ]]; then\n echo 1\n fi ", ), - 5768..5770, + 5743..5771, ), ( Text( @@ -7704,9 +6001,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "/", + "\\/", 6547..6549, ), ), @@ -7853,9 +6150,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "\\", + "\\\\", 6768..6770, ), ), @@ -7878,18 +6175,18 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "\\", + "\\\\", 6775..6777, ), ), 6775..6777, ), ( - Escape( + Text( ( - "b", + "\\b", 6777..6779, ), ), @@ -8035,9 +6332,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "b", + "\\b", 7089..7091, ), ), @@ -8051,9 +6348,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "\\", + "\\\\", 7095..7097, ), ), @@ -8098,15 +6395,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 7128..7132, - ), - ( - Text( - "\"", + "echo \"", ), - 7133..7134, + 7127..7134, ), ( Expression( @@ -8124,45 +6415,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 7142..7143, - ), - ( - Text( - "|", - ), - 7144..7145, - ), - ( - Text( - "sed", - ), - 7146..7149, - ), - ( - CommandOption( - "-r", - ), - 7150..7152, - ), - ( - CommandOption( - "-ne", - ), - 7153..7156, - ), - ( - Text( - "\"", - ), - 7157..7158, - ), - ( - Text( - "/", + "\" | sed -r -ne \"/", ), - 7158..7159, + 7142..7159, ), ( Expression( @@ -8180,21 +6435,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 7167..7168, - ), - ( - Text( - "p", - ), - 7168..7169, - ), - ( - Text( - "\"", + "/p\" ", ), - 7169..7170, + 7167..7171, ), ( Text( @@ -8247,15 +6490,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 7221..7225, - ), - ( - Text( - "\"", + "echo \"", ), - 7226..7227, + 7220..7227, ), ( Expression( @@ -8273,45 +6510,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 7235..7236, - ), - ( - Text( - "|", - ), - 7237..7238, - ), - ( - Text( - "sed", - ), - 7239..7242, - ), - ( - CommandOption( - "-E", - ), - 7243..7245, - ), - ( - CommandOption( - "-ne", - ), - 7246..7249, - ), - ( - Text( - "\"", - ), - 7250..7251, - ), - ( - Text( - "/", + "\" | sed -E -ne \"/", ), - 7251..7252, + 7235..7252, ), ( Expression( @@ -8329,21 +6530,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 7260..7261, - ), - ( - Text( - "p", - ), - 7261..7262, - ), - ( - Text( - "\"", + "/p\" ", ), - 7262..7263, + 7260..7264, ), ( Text( @@ -8494,9 +6683,9 @@ expression: parse_unwrap(&tokens) Text( [ ( - Escape( + Text( ( - "|", + "\\|", 7528..7530, ), ), @@ -8560,15 +6749,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 7575..7579, - ), - ( - Text( - "\"", + "echo \"", ), - 7580..7581, + 7574..7581, ), ( Expression( @@ -8586,39 +6769,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 7589..7590, - ), - ( - Text( - "|", - ), - 7591..7592, - ), - ( - Text( - "sed", + "\" | sed -ne \"/", ), - 7593..7596, - ), - ( - CommandOption( - "-ne", - ), - 7597..7600, - ), - ( - Text( - "\"", - ), - 7601..7602, - ), - ( - Text( - "/", - ), - 7602..7603, + 7589..7603, ), ( Expression( @@ -8636,21 +6789,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "/", - ), - 7611..7612, - ), - ( - Text( - "p", - ), - 7612..7613, - ), - ( - Text( - "\"", + "/p\" ", ), - 7613..7614, + 7611..7615, ), ( Text( @@ -9074,15 +7215,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 8059..8063, - ), - ( - Text( - "\"", + "echo \"", ), - 8064..8065, + 8058..8065, ), ( Expression( @@ -9100,21 +7235,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 8071..8072, - ), - ( - Text( - "|", - ), - 8073..8074, - ), - ( - Text( - "rev", + "\" | rev ", ), - 8075..8078, + 8071..8079, ), ( Text( @@ -9238,27 +7361,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "if", - ), - 8201..8203, - ), - ( - Text( - "[", - ), - 8204..8205, - ), - ( - Text( - "[", - ), - 8205..8206, - ), - ( - Text( - "\"", + "if [[ \"", ), - 8207..8208, + 8200..8208, ), ( Expression( @@ -9276,89 +7381,29 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 8214..8215, - ), - ( - Text( - "==", - ), - 8216..8218, - ), - ( - Text( - "\"", - ), - 8219..8220, - ), - ( - Expression( - ( - Var( - ( - "prefix", - 8221..8227, - ), - ), - 8221..8227, - ), - ), - 8220..8228, - ), - ( - Text( - "\"", - ), - 8228..8229, - ), - ( - Text( - "*", - ), - 8229..8230, - ), - ( - Text( - "]", - ), - 8231..8232, - ), - ( - Text( - "]", - ), - 8232..8233, - ), - ( - Text( - ";", - ), - 8233..8234, - ), - ( - Text( - "then", - ), - 8235..8239, - ), - ( - Text( - "echo", + "\" == \"", ), - 8244..8248, + 8214..8220, ), ( - Text( - "1", + Expression( + ( + Var( + ( + "prefix", + 8221..8227, + ), + ), + 8221..8227, + ), ), - 8249..8250, + 8220..8228, ), ( Text( - "fi", + "\"* ]]; then\n echo 1\n fi ", ), - 8253..8255, + 8228..8256, ), ( Text( @@ -9525,27 +7570,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "if", - ), - 8398..8400, - ), - ( - Text( - "[", - ), - 8401..8402, - ), - ( - Text( - "[", - ), - 8402..8403, - ), - ( - Text( - "\"", + "if [[ \"", ), - 8404..8405, + 8397..8405, ), ( Expression( @@ -9563,27 +7590,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 8411..8412, - ), - ( - Text( - "==", - ), - 8413..8415, - ), - ( - Text( - "*", - ), - 8416..8417, - ), - ( - Text( - "\"", + "\" == *\"", ), - 8417..8418, + 8411..8418, ), ( Expression( @@ -9601,51 +7610,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 8426..8427, - ), - ( - Text( - "]", - ), - 8428..8429, - ), - ( - Text( - "]", - ), - 8429..8430, - ), - ( - Text( - ";", - ), - 8430..8431, - ), - ( - Text( - "then", - ), - 8432..8436, - ), - ( - Text( - "echo", - ), - 8441..8445, - ), - ( - Text( - "1", - ), - 8446..8447, - ), - ( - Text( - "fi", + "\" ]]; then\n echo 1\n fi ", ), - 8450..8452, + 8426..8453, ), ( Text( @@ -10049,27 +8016,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "printf", - ), - 9012..9018, - ), - ( - Text( - "\"", - ), - 9019..9020, - ), - ( - Text( - "%", - ), - 9020..9021, - ), - ( - Text( - ".", + "printf \"%.", ), - 9021..9022, + 9011..9022, ), ( Expression( @@ -10087,45 +8036,27 @@ expression: parse_unwrap(&tokens) ), ( Text( - "s", - ), - 9030..9031, - ), - ( - Text( - "\"", + "s\" \"", ), - 9031..9032, + 9030..9034, ), ( Text( - "\"", - ), - 9033..9034, - ), - ( - Escape( - "$", + "\\$", ), 9034..9036, ), - ( - Escape( - "{", - ), - 9036..9038, - ), ( Text( - "text", + "\\{", ), - 9038..9042, + 9036..9038, ), ( Text( - ":", + "text: ", ), - 9042..9043, + 9038..9044, ), ( Expression( @@ -10143,15 +8074,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 9051..9052, - ), - ( - Text( - "\"", + "}\" ", ), - 9052..9053, + 9051..9054, ), ( Text( @@ -10302,69 +8227,27 @@ expression: parse_unwrap(&tokens) ), ( Text( - "printf", - ), - 9316..9322, - ), - ( - Text( - "\"", - ), - 9323..9324, - ), - ( - Text( - "%", - ), - 9324..9325, - ), - ( - Text( - ".", - ), - 9325..9326, - ), - ( - Text( - "1s", - ), - 9326..9328, - ), - ( - Text( - "\"", + "printf \"%.1s\" \"", ), - 9328..9329, + 9315..9331, ), ( Text( - "\"", - ), - 9330..9331, - ), - ( - Escape( - "$", + "\\$", ), 9331..9333, ), - ( - Escape( - "{", - ), - 9333..9335, - ), ( Text( - "text", + "\\{", ), - 9335..9339, + 9333..9335, ), ( Text( - ":", + "text: ", ), - 9339..9340, + 9335..9341, ), ( Expression( @@ -10382,15 +8265,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "}", - ), - 9348..9349, - ), - ( - Text( - "\"", + "}\" ", ), - 9349..9350, + 9348..9351, ), ( Text( @@ -10628,25 +8505,19 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", + "echo \"", ), - 9600..9604, + 9599..9606, ), ( Text( - "\"", - ), - 9605..9606, - ), - ( - Escape( - "$", + "\\$", ), 9606..9608, ), ( - Escape( - "{", + Text( + "\\{", ), 9608..9610, ), @@ -10675,21 +8546,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "^", - ), - 9623..9624, - ), - ( - Text( - "}", + "^}\" ", ), - 9624..9625, - ), - ( - Text( - "\"", - ), - 9625..9626, + 9623..9627, ), ( Text( @@ -10786,15 +8645,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "echo", - ), - 9741..9745, - ), - ( - Text( - "\"", + "echo \"", ), - 9746..9747, + 9740..9747, ), ( Expression( @@ -10812,49 +8665,13 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", - ), - 9753..9754, - ), - ( - Text( - "|", - ), - 9755..9756, - ), - ( - Text( - "sed", - ), - 9757..9760, - ), - ( - Text( - "\"", - ), - 9761..9762, - ), - ( - Text( - "s", + "\" | sed \"s/^", ), - 9762..9763, + 9753..9765, ), ( Text( - "/", - ), - 9763..9764, - ), - ( - Text( - "^", - ), - 9764..9765, - ), - ( - Escape( - "(", + "\\(", ), 9765..9767, ), @@ -10865,8 +8682,8 @@ expression: parse_unwrap(&tokens) 9767..9768, ), ( - Escape( - ")", + Text( + "\\)", ), 9768..9770, ), @@ -10877,28 +8694,22 @@ expression: parse_unwrap(&tokens) 9770..9771, ), ( - Escape( - "U", + Text( + "\\U", ), 9771..9773, ), - ( - Escape( - "1", - ), - 9773..9775, - ), ( Text( - "/", + "\\1", ), - 9775..9776, + 9773..9775, ), ( Text( - "\"", + "/\" ", ), - 9776..9777, + 9775..9778, ), ( Text( @@ -11287,21 +9098,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "printf", + "printf \"%", ), - 10146..10152, - ), - ( - Text( - "\"", - ), - 10153..10154, - ), - ( - Text( - "%", - ), - 10154..10155, + 10145..10155, ), ( Expression( @@ -11319,57 +9118,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "s", - ), - 10163..10164, - ), - ( - Text( - "\"", - ), - 10164..10165, - ), - ( - Text( - "\"", + "s\" \"\" | tr \" \" \"", ), - 10166..10167, - ), - ( - Text( - "\"", - ), - 10167..10168, - ), - ( - Text( - "|", - ), - 10169..10170, - ), - ( - Text( - "tr", - ), - 10171..10173, - ), - ( - Text( - "\"", - ), - 10174..10175, - ), - ( - Text( - "\"", - ), - 10176..10177, - ), - ( - Text( - "\"", - ), - 10178..10179, + 10163..10179, ), ( Expression( @@ -11387,9 +9138,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", + "\" ", ), - 10184..10185, + 10184..10186, ), ( Text( @@ -11679,21 +9430,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "printf", - ), - 10463..10469, - ), - ( - Text( - "\"", - ), - 10470..10471, - ), - ( - Text( - "%", + "printf \"%", ), - 10471..10472, + 10462..10472, ), ( Expression( @@ -11711,57 +9450,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "s", - ), - 10480..10481, - ), - ( - Text( - "\"", - ), - 10481..10482, - ), - ( - Text( - "\"", - ), - 10483..10484, - ), - ( - Text( - "\"", - ), - 10484..10485, - ), - ( - Text( - "|", - ), - 10486..10487, - ), - ( - Text( - "tr", - ), - 10488..10490, - ), - ( - Text( - "\"", - ), - 10491..10492, - ), - ( - Text( - "\"", - ), - 10493..10494, - ), - ( - Text( - "\"", + "s\" \"\" | tr \" \" \"", ), - 10495..10496, + 10480..10496, ), ( Expression( @@ -11779,9 +9470,9 @@ expression: parse_unwrap(&tokens) ), ( Text( - "\"", + "\" ", ), - 10501..10502, + 10501..10503, ), ( Text( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_text.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_text.snap index 5ee2d62..a3ec864 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_text.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__stdlib_text.snap @@ -17,15 +17,9 @@ expression: tokens ), ( Token( - "#", - ), - 208..209, - ), - ( - Token( - "[", + "#[", ), - 209..210, + 208..210, ), ( Token( @@ -125,75 +119,27 @@ expression: tokens ), ( Token( - "echo", - ), - 281..285, - ), - ( - Token( - "\"", - ), - 286..287, - ), - ( - Token( - "\\", - ), - 287..288, - ), - ( - Token( - "$", - ), - 288..289, - ), - ( - Token( - "\\", - ), - 289..290, - ), - ( - Token( - "{", - ), - 290..291, - ), - ( - Token( - "BASH_VERSINFO", - ), - 291..304, - ), - ( - Token( - "[", - ), - 304..305, - ), - ( - Token( - "0", + "echo \"", ), - 305..306, + 280..287, ), ( Token( - "]", + "\\$", ), - 306..307, + 287..289, ), ( Token( - "}", + "\\{", ), - 307..308, + 289..291, ), ( Token( - "\"", + "BASH_VERSINFO[0]}\" ", ), - 308..309, + 291..310, ), ( Token( @@ -245,75 +191,27 @@ expression: tokens ), ( Token( - "echo", - ), - 343..347, - ), - ( - Token( - "\"", - ), - 348..349, - ), - ( - Token( - "\\", - ), - 349..350, - ), - ( - Token( - "$", - ), - 350..351, - ), - ( - Token( - "\\", + "echo \"", ), - 351..352, + 342..349, ), ( Token( - "{", - ), - 352..353, - ), - ( - Token( - "BASH_VERSINFO", - ), - 353..366, - ), - ( - Token( - "[", - ), - 366..367, - ), - ( - Token( - "1", - ), - 367..368, - ), - ( - Token( - "]", + "\\$", ), - 368..369, + 349..351, ), ( Token( - "}", + "\\{", ), - 369..370, + 351..353, ), ( Token( - "\"", + "BASH_VERSINFO[1]}\" ", ), - 370..371, + 353..372, ), ( Token( @@ -365,75 +263,27 @@ expression: tokens ), ( Token( - "echo", - ), - 405..409, - ), - ( - Token( - "\"", - ), - 410..411, - ), - ( - Token( - "\\", - ), - 411..412, - ), - ( - Token( - "$", - ), - 412..413, - ), - ( - Token( - "\\", - ), - 413..414, - ), - ( - Token( - "{", - ), - 414..415, - ), - ( - Token( - "BASH_VERSINFO", - ), - 415..428, - ), - ( - Token( - "[", - ), - 428..429, - ), - ( - Token( - "2", + "echo \"", ), - 429..430, + 404..411, ), ( Token( - "]", + "\\$", ), - 430..431, + 411..413, ), ( Token( - "}", + "\\{", ), - 431..432, + 413..415, ), ( Token( - "\"", + "BASH_VERSINFO[2]}\" ", ), - 432..433, + 415..434, ), ( Token( @@ -693,6 +543,12 @@ expression: tokens ), 721..722, ), + ( + Token( + "", + ), + 722..723, + ), ( Token( "{", @@ -719,39 +575,21 @@ expression: tokens ), ( Token( - "=", - ), - 738..739, - ), - ( - Token( - "\"", - ), - 739..740, - ), - ( - Token( - "\\", - ), - 740..741, - ), - ( - Token( - "$", + "=\"", ), - 741..742, + 738..740, ), ( Token( - "\\", + "\\$", ), - 742..743, + 740..742, ), ( Token( - "{", + "\\{", ), - 743..744, + 742..744, ), ( Token( @@ -779,39 +617,21 @@ expression: tokens ), ( Token( - "//", - ), - 759..761, - ), - ( - Token( - "\"", - ), - 761..762, - ), - ( - Token( - "\\", - ), - 762..763, - ), - ( - Token( - "$", + "//\"", ), - 763..764, + 759..762, ), ( Token( - "\\", + "\\$", ), - 764..765, + 762..764, ), ( Token( - "{", + "\\{", ), - 765..766, + 764..766, ), ( Token( @@ -839,51 +659,21 @@ expression: tokens ), ( Token( - "}", - ), - 781..782, - ), - ( - Token( - "\"", - ), - 782..783, - ), - ( - Token( - "/", - ), - 783..784, - ), - ( - Token( - "\"", - ), - 784..785, - ), - ( - Token( - "\\", - ), - 785..786, - ), - ( - Token( - "$", + "}\"/\"", ), - 786..787, + 781..785, ), ( Token( - "\\", + "\\$", ), - 787..788, + 785..787, ), ( Token( - "{", + "\\{", ), - 788..789, + 787..789, ), ( Token( @@ -911,27 +701,9 @@ expression: tokens ), ( Token( - "}", - ), - 805..806, - ), - ( - Token( - "\"", - ), - 806..807, - ), - ( - Token( - "}", - ), - 807..808, - ), - ( - Token( - "\"", + "}\"}\" ", ), - 808..809, + 805..810, ), ( Token( @@ -969,6 +741,12 @@ expression: tokens ), 839..840, ), + ( + Token( + "", + ), + 840..841, + ), ( Token( "{", @@ -995,39 +773,21 @@ expression: tokens ), ( Token( - "=", - ), - 856..857, - ), - ( - Token( - "\"", - ), - 857..858, - ), - ( - Token( - "\\", - ), - 858..859, - ), - ( - Token( - "$", + "=\"", ), - 859..860, + 856..858, ), ( Token( - "\\", + "\\$", ), - 860..861, + 858..860, ), ( Token( - "{", + "\\{", ), - 861..862, + 860..862, ), ( Token( @@ -1055,39 +815,21 @@ expression: tokens ), ( Token( - "//", - ), - 877..879, - ), - ( - Token( - "\"", - ), - 879..880, - ), - ( - Token( - "\\", + "//\"", ), - 880..881, + 877..880, ), ( Token( - "$", - ), - 881..882, - ), - ( - Token( - "\\", + "\\$", ), - 882..883, + 880..882, ), ( Token( - "{", + "\\{", ), - 883..884, + 882..884, ), ( Token( @@ -1115,91 +857,55 @@ expression: tokens ), ( Token( - "}", + "}\"/", ), - 899..900, + 899..902, ), ( Token( - "\"", + "\\$", ), - 900..901, + 902..904, ), ( Token( - "/", + "\\{", ), - 901..902, + 904..906, ), ( Token( - "\\", + "{", ), - 902..903, + 906..907, ), ( Token( - "$", + "nameof", ), - 903..904, + 907..913, ), ( Token( - "\\", + "replace", ), - 904..905, + 914..921, ), ( Token( - "{", + "}", ), - 905..906, + 921..922, ), ( Token( - "{", + "}}\" ", ), - 906..907, + 922..926, ), ( Token( - "nameof", - ), - 907..913, - ), - ( - Token( - "replace", - ), - 914..921, - ), - ( - Token( - "}", - ), - 921..922, - ), - ( - Token( - "}", - ), - 922..923, - ), - ( - Token( - "}", - ), - 923..924, - ), - ( - Token( - "\"", - ), - 924..925, - ), - ( - Token( - "$", + "$", ), 926..927, ), @@ -1413,6 +1119,12 @@ expression: tokens ), 1206..1207, ), + ( + Token( + "", + ), + 1207..1208, + ), ( Token( "{", @@ -1439,39 +1151,21 @@ expression: tokens ), ( Token( - "=", - ), - 1223..1224, - ), - ( - Token( - "\"", - ), - 1224..1225, - ), - ( - Token( - "\\", - ), - 1225..1226, - ), - ( - Token( - "$", + "=\"", ), - 1226..1227, + 1223..1225, ), ( Token( - "\\", + "\\$", ), - 1227..1228, + 1225..1227, ), ( Token( - "{", + "\\{", ), - 1228..1229, + 1227..1229, ), ( Token( @@ -1499,39 +1193,21 @@ expression: tokens ), ( Token( - "/", - ), - 1244..1245, - ), - ( - Token( - "\"", - ), - 1245..1246, - ), - ( - Token( - "\\", - ), - 1246..1247, - ), - ( - Token( - "$", + "/\"", ), - 1247..1248, + 1244..1246, ), ( Token( - "\\", + "\\$", ), - 1248..1249, + 1246..1248, ), ( Token( - "{", + "\\{", ), - 1249..1250, + 1248..1250, ), ( Token( @@ -1559,51 +1235,21 @@ expression: tokens ), ( Token( - "}", - ), - 1265..1266, - ), - ( - Token( - "\"", - ), - 1266..1267, - ), - ( - Token( - "/", - ), - 1267..1268, - ), - ( - Token( - "\"", - ), - 1268..1269, - ), - ( - Token( - "\\", + "}\"/\"", ), - 1269..1270, + 1265..1269, ), ( Token( - "$", - ), - 1270..1271, - ), - ( - Token( - "\\", + "\\$", ), - 1271..1272, + 1269..1271, ), ( Token( - "{", + "\\{", ), - 1272..1273, + 1271..1273, ), ( Token( @@ -1631,27 +1277,9 @@ expression: tokens ), ( Token( - "}", - ), - 1289..1290, - ), - ( - Token( - "\"", - ), - 1290..1291, - ), - ( - Token( - "}", - ), - 1291..1292, - ), - ( - Token( - "\"", + "}\"}\" ", ), - 1292..1293, + 1289..1294, ), ( Token( @@ -1689,6 +1317,12 @@ expression: tokens ), 1323..1324, ), + ( + Token( + "", + ), + 1324..1325, + ), ( Token( "{", @@ -1715,39 +1349,21 @@ expression: tokens ), ( Token( - "=", - ), - 1340..1341, - ), - ( - Token( - "\"", - ), - 1341..1342, - ), - ( - Token( - "\\", - ), - 1342..1343, - ), - ( - Token( - "$", + "=\"", ), - 1343..1344, + 1340..1342, ), ( Token( - "\\", + "\\$", ), - 1344..1345, + 1342..1344, ), ( Token( - "{", + "\\{", ), - 1345..1346, + 1344..1346, ), ( Token( @@ -1775,39 +1391,21 @@ expression: tokens ), ( Token( - "/", - ), - 1361..1362, - ), - ( - Token( - "\"", - ), - 1362..1363, - ), - ( - Token( - "\\", - ), - 1363..1364, - ), - ( - Token( - "$", + "/\"", ), - 1364..1365, + 1361..1363, ), ( Token( - "\\", + "\\$", ), - 1365..1366, + 1363..1365, ), ( Token( - "{", + "\\{", ), - 1366..1367, + 1365..1367, ), ( Token( @@ -1835,45 +1433,21 @@ expression: tokens ), ( Token( - "}", - ), - 1382..1383, - ), - ( - Token( - "\"", - ), - 1383..1384, - ), - ( - Token( - "/", - ), - 1384..1385, - ), - ( - Token( - "\\", - ), - 1385..1386, - ), - ( - Token( - "$", + "}\"/", ), - 1386..1387, + 1382..1385, ), ( Token( - "\\", + "\\$", ), - 1387..1388, + 1385..1387, ), ( Token( - "{", + "\\{", ), - 1388..1389, + 1387..1389, ), ( Token( @@ -1901,21 +1475,9 @@ expression: tokens ), ( Token( - "}", - ), - 1405..1406, - ), - ( - Token( - "}", - ), - 1406..1407, - ), - ( - Token( - "\"", + "}}\" ", ), - 1407..1408, + 1405..1409, ), ( Token( @@ -2087,529 +1649,277 @@ expression: tokens ), ( Token( - "re", + "re='", ), - 1679..1681, + 1678..1683, ), ( Token( - "=", + "\\b", ), - 1681..1682, + 1683..1685, ), ( Token( - "'", + "Copyright", ), - 1682..1683, + 1685..1694, ), ( Token( - "\\", + "\\b", ), - 1683..1684, + 1694..1696, ), ( Token( - "bCopyright", + ".+", ), - 1684..1694, + 1696..1698, ), ( Token( - "\\", + "\\b", ), - 1694..1695, + 1698..1700, ), ( Token( - "b", + "Free Software Foundation", ), - 1695..1696, + 1700..1724, ), ( Token( - ".", + "\\b", ), - 1696..1697, + 1724..1726, ), ( Token( - "+", + "'; [[ ", ), - 1697..1698, + 1726..1732, ), ( Token( - "\\", + "\\$", ), - 1698..1699, + 1732..1734, ), ( Token( - "bFree", + "(sed --version 2>/dev/null) =~ ", ), - 1699..1704, + 1734..1765, ), ( Token( - "Software", + "\\$", ), - 1705..1713, + 1765..1767, ), ( Token( - "Foundation", + "re ]] ", ), - 1714..1724, + 1767..1773, ), ( Token( - "\\", + "$", ), - 1724..1725, + 1773..1774, ), ( Token( - "b'", + "if", ), - 1725..1727, + 1779..1781, ), ( Token( - ";", + "status", ), - 1727..1728, + 1782..1788, ), ( Token( - "[", + "==", ), - 1729..1730, + 1789..1791, ), ( Token( - "[", + "0", ), - 1730..1731, + 1792..1793, ), ( Token( - "\\", + "{", ), - 1732..1733, + 1794..1795, ), ( Token( - "$", + "return", ), - 1733..1734, + 1804..1810, ), ( Token( - "(", + "SED_VERSION_GNU", ), - 1734..1735, + 1811..1826, ), ( Token( - "sed", + "}", ), - 1735..1738, + 1831..1832, ), ( Token( - "-", + "trust", ), - 1739..1740, + 1837..1842, ), ( Token( - "-", + "$", ), - 1740..1741, + 1843..1844, ), ( Token( - "version", + "re='", ), - 1741..1748, + 1844..1849, ), ( Token( - "2", + "\\b", ), - 1749..1750, + 1849..1851, ), ( Token( - ">", + "BusyBox", ), - 1750..1751, + 1851..1858, ), ( Token( - "/", + "\\b", ), - 1751..1752, + 1858..1860, ), ( Token( - "dev", + "'; [[ ", ), - 1752..1755, + 1860..1866, ), ( Token( - "/", + "\\$", ), - 1755..1756, + 1866..1868, ), ( Token( - "null", + "(sed 2>&1) =~ ", ), - 1756..1760, + 1868..1882, ), ( Token( - ")", + "\\$", ), - 1760..1761, + 1882..1884, ), ( Token( - "=", + "re ]] ", ), - 1762..1763, + 1884..1890, ), ( Token( - "~", + "$", ), - 1763..1764, + 1890..1891, ), ( Token( - "\\", + "if", ), - 1765..1766, + 1896..1898, ), ( Token( - "$", + "status", ), - 1766..1767, + 1899..1905, ), ( Token( - "re", + "==", ), - 1767..1769, + 1906..1908, ), ( Token( - "]", + "0", ), - 1770..1771, + 1909..1910, ), ( Token( - "]", + "{", ), - 1771..1772, + 1911..1912, ), ( Token( - "$", + "return", ), - 1773..1774, + 1921..1927, ), ( Token( - "if", + "SED_VERSION_BUSYBOX", ), - 1779..1781, + 1928..1947, ), ( Token( - "status", + "}", ), - 1782..1788, + 1952..1953, ), ( Token( - "==", + "return", ), - 1789..1791, + 1958..1964, ), ( Token( - "0", + "SED_VERSION_UNKNOWN", ), - 1792..1793, + 1965..1984, ), ( Token( - "{", + "}", ), - 1794..1795, + 1985..1986, ), ( Token( - "return", - ), - 1804..1810, - ), - ( - Token( - "SED_VERSION_GNU", - ), - 1811..1826, - ), - ( - Token( - "}", - ), - 1831..1832, - ), - ( - Token( - "trust", - ), - 1837..1842, - ), - ( - Token( - "$", - ), - 1843..1844, - ), - ( - Token( - "re", - ), - 1845..1847, - ), - ( - Token( - "=", - ), - 1847..1848, - ), - ( - Token( - "'", - ), - 1848..1849, - ), - ( - Token( - "\\", - ), - 1849..1850, - ), - ( - Token( - "bBusyBox", - ), - 1850..1858, - ), - ( - Token( - "\\", - ), - 1858..1859, - ), - ( - Token( - "b'", - ), - 1859..1861, - ), - ( - Token( - ";", - ), - 1861..1862, - ), - ( - Token( - "[", - ), - 1863..1864, - ), - ( - Token( - "[", - ), - 1864..1865, - ), - ( - Token( - "\\", - ), - 1866..1867, - ), - ( - Token( - "$", - ), - 1867..1868, - ), - ( - Token( - "(", - ), - 1868..1869, - ), - ( - Token( - "sed", - ), - 1869..1872, - ), - ( - Token( - "2", - ), - 1873..1874, - ), - ( - Token( - ">", - ), - 1874..1875, - ), - ( - Token( - "&1", - ), - 1875..1877, - ), - ( - Token( - ")", - ), - 1877..1878, - ), - ( - Token( - "=", - ), - 1879..1880, - ), - ( - Token( - "~", - ), - 1880..1881, - ), - ( - Token( - "\\", - ), - 1882..1883, - ), - ( - Token( - "$", - ), - 1883..1884, - ), - ( - Token( - "re", - ), - 1884..1886, - ), - ( - Token( - "]", - ), - 1887..1888, - ), - ( - Token( - "]", - ), - 1888..1889, - ), - ( - Token( - "$", - ), - 1890..1891, - ), - ( - Token( - "if", - ), - 1896..1898, - ), - ( - Token( - "status", - ), - 1899..1905, - ), - ( - Token( - "==", - ), - 1906..1908, - ), - ( - Token( - "0", - ), - 1909..1910, - ), - ( - Token( - "{", - ), - 1911..1912, - ), - ( - Token( - "return", - ), - 1921..1927, - ), - ( - Token( - "SED_VERSION_BUSYBOX", - ), - 1928..1947, - ), - ( - Token( - "}", - ), - 1952..1953, - ), - ( - Token( - "return", - ), - 1958..1964, - ), - ( - Token( - "SED_VERSION_UNKNOWN", - ), - 1965..1984, - ), - ( - Token( - "}", - ), - 1985..1986, - ), - ( - Token( - "/// Replaces all occurrences of a regex pattern in the content with the provided replace text.\n", + "/// Replaces all occurrences of a regex pattern in the content with the provided replace text.\n", ), 1988..2083, ), @@ -2891,15 +2201,9 @@ expression: tokens ), ( Token( - "\\", - ), - 2306..2307, - ), - ( - Token( - "\\", + "\\\\", ), - 2307..2308, + 2306..2308, ), ( Token( @@ -2987,15 +2291,9 @@ expression: tokens ), ( Token( - "\\", - ), - 2363..2364, - ), - ( - Token( - "\\", + "\\\\", ), - 2364..2365, + 2363..2365, ), ( Token( @@ -3119,15 +2417,9 @@ expression: tokens ), ( Token( - "\\", - ), - 2561..2562, - ), - ( - Token( - "\\", + "\\\\", ), - 2562..2563, + 2561..2563, ), ( Token( @@ -3155,27 +2447,15 @@ expression: tokens ), ( Token( - "\\", - ), - 2568..2569, - ), - ( - Token( - "\\", - ), - 2569..2570, - ), - ( - Token( - "\\", + "\\\\", ), - 2570..2571, + 2568..2570, ), ( Token( - "b", + "\\b", ), - 2571..2572, + 2570..2572, ), ( Token( @@ -3269,15 +2549,9 @@ expression: tokens ), ( Token( - "echo", - ), - 2803..2807, - ), - ( - Token( - "\"", + "echo \"", ), - 2808..2809, + 2802..2809, ), ( Token( @@ -3299,99 +2573,45 @@ expression: tokens ), ( Token( - "\"", + "\" | sed -r -e \"s/", ), - 2817..2818, + 2817..2834, ), ( Token( - "|", + "{", ), - 2819..2820, + 2834..2835, ), ( Token( - "sed", + "search", ), - 2821..2824, + 2835..2841, ), ( Token( - "-", + "}", ), - 2825..2826, + 2841..2842, ), ( Token( - "r", + "/", ), - 2826..2827, + 2842..2843, ), ( Token( - "-", + "{", ), - 2828..2829, + 2843..2844, ), ( Token( - "e", + "replace_text", ), - 2829..2830, - ), - ( - Token( - "\"", - ), - 2831..2832, - ), - ( - Token( - "s", - ), - 2832..2833, - ), - ( - Token( - "/", - ), - 2833..2834, - ), - ( - Token( - "{", - ), - 2834..2835, - ), - ( - Token( - "search", - ), - 2835..2841, - ), - ( - Token( - "}", - ), - 2841..2842, - ), - ( - Token( - "/", - ), - 2842..2843, - ), - ( - Token( - "{", - ), - 2843..2844, - ), - ( - Token( - "replace_text", - ), - 2844..2856, + 2844..2856, ), ( Token( @@ -3401,21 +2621,9 @@ expression: tokens ), ( Token( - "/", - ), - 2857..2858, - ), - ( - Token( - "g", - ), - 2858..2859, - ), - ( - Token( - "\"", + "/g\" ", ), - 2859..2860, + 2857..2861, ), ( Token( @@ -3455,15 +2663,9 @@ expression: tokens ), ( Token( - "echo", - ), - 2909..2913, - ), - ( - Token( - "\"", + "echo \"", ), - 2914..2915, + 2908..2915, ), ( Token( @@ -3485,63 +2687,9 @@ expression: tokens ), ( Token( - "\"", - ), - 2923..2924, - ), - ( - Token( - "|", - ), - 2925..2926, - ), - ( - Token( - "sed", - ), - 2927..2930, - ), - ( - Token( - "-", - ), - 2931..2932, - ), - ( - Token( - "E", - ), - 2932..2933, - ), - ( - Token( - "-", - ), - 2934..2935, - ), - ( - Token( - "e", - ), - 2935..2936, - ), - ( - Token( - "\"", - ), - 2937..2938, - ), - ( - Token( - "s", - ), - 2938..2939, - ), - ( - Token( - "/", + "\" | sed -E -e \"s/", ), - 2939..2940, + 2923..2940, ), ( Token( @@ -3587,21 +2735,9 @@ expression: tokens ), ( Token( - "/", - ), - 2963..2964, - ), - ( - Token( - "g", - ), - 2964..2965, - ), - ( - Token( - "\"", + "/g\" ", ), - 2965..2966, + 2963..2967, ), ( Token( @@ -3737,15 +2873,9 @@ expression: tokens ), ( Token( - "\\", - ), - 3231..3232, - ), - ( - Token( - "|", + "\\|", ), - 3232..3233, + 3231..3233, ), ( Token( @@ -3803,15 +2933,9 @@ expression: tokens ), ( Token( - "echo", - ), - 3276..3280, - ), - ( - Token( - "\"", + "echo \"", ), - 3281..3282, + 3275..3282, ), ( Token( @@ -3833,51 +2957,9 @@ expression: tokens ), ( Token( - "\"", - ), - 3290..3291, - ), - ( - Token( - "|", - ), - 3292..3293, - ), - ( - Token( - "sed", - ), - 3294..3297, - ), - ( - Token( - "-", - ), - 3298..3299, - ), - ( - Token( - "e", - ), - 3299..3300, - ), - ( - Token( - "\"", - ), - 3301..3302, - ), - ( - Token( - "s", - ), - 3302..3303, - ), - ( - Token( - "/", + "\" | sed -e \"s/", ), - 3303..3304, + 3290..3304, ), ( Token( @@ -3923,21 +3005,9 @@ expression: tokens ), ( Token( - "/", - ), - 3327..3328, - ), - ( - Token( - "g", - ), - 3328..3329, - ), - ( - Token( - "\"", + "/g\" ", ), - 3329..3330, + 3327..3331, ), ( Token( @@ -4121,21 +3191,9 @@ expression: tokens ), ( Token( - "IFS", - ), - 3530..3533, - ), - ( - Token( - "=", - ), - 3533..3534, - ), - ( - Token( - "\"", + "IFS=\"", ), - 3534..3535, + 3529..3535, ), ( Token( @@ -4157,45 +3215,9 @@ expression: tokens ), ( Token( - "\"", - ), - 3546..3547, - ), - ( - Token( - "read", - ), - 3548..3552, - ), - ( - Token( - "-", - ), - 3553..3554, - ), - ( - Token( - "rd", - ), - 3554..3556, - ), - ( - Token( - "''", - ), - 3557..3559, - ), - ( - Token( - "-", - ), - 3560..3561, - ), - ( - Token( - "a", + "\" read -rd '' -a ", ), - 3561..3562, + 3546..3563, ), ( Token( @@ -4223,57 +3245,15 @@ expression: tokens ), ( Token( - "<", - ), - 3579..3580, - ), - ( - Token( - "<", - ), - 3581..3582, - ), - ( - Token( - "(", - ), - 3582..3583, - ), - ( - Token( - "printf", - ), - 3583..3589, - ), - ( - Token( - "%", - ), - 3590..3591, - ), - ( - Token( - "s", - ), - 3591..3592, - ), - ( - Token( - "\"", - ), - 3593..3594, - ), - ( - Token( - "\\", + " < <(printf %s \"", ), - 3594..3595, + 3578..3594, ), ( Token( - "$", + "\\$", ), - 3595..3596, + 3594..3596, ), ( Token( @@ -4301,15 +3281,9 @@ expression: tokens ), ( Token( - "\"", - ), - 3609..3610, - ), - ( - Token( - ")", + "\") ", ), - 3610..3611, + 3609..3612, ), ( Token( @@ -4457,15 +3431,9 @@ expression: tokens ), ( Token( - "\\", - ), - 3778..3779, - ), - ( - Token( - "n", + "\\n", ), - 3779..3780, + 3778..3780, ), ( Token( @@ -4605,6 +3573,12 @@ expression: tokens ), 3925..3926, ), + ( + Token( + " ", + ), + 3926..3927, + ), ( Token( "\"", @@ -4751,31 +3725,19 @@ expression: tokens ), ( Token( - "IFS", + "IFS=\"", ), - 4051..4054, + 4050..4056, ), ( Token( - "=", + "{", ), - 4054..4055, + 4056..4057, ), ( Token( - "\"", - ), - 4055..4056, - ), - ( - Token( - "{", - ), - 4056..4057, - ), - ( - Token( - "delimiter", + "delimiter", ), 4057..4066, ), @@ -4787,51 +3749,21 @@ expression: tokens ), ( Token( - "\"", - ), - 4067..4068, - ), - ( - Token( - ";", - ), - 4069..4070, - ), - ( - Token( - "echo", - ), - 4071..4075, - ), - ( - Token( - "\"", - ), - 4076..4077, - ), - ( - Token( - "\\", - ), - 4077..4078, - ), - ( - Token( - "$", + "\" ; echo \"", ), - 4078..4079, + 4067..4077, ), ( Token( - "\\", + "\\$", ), - 4079..4080, + 4077..4079, ), ( Token( - "{", + "\\{", ), - 4080..4081, + 4079..4081, ), ( Token( @@ -4859,33 +3791,9 @@ expression: tokens ), ( Token( - "[", - ), - 4094..4095, - ), - ( - Token( - "*", - ), - 4095..4096, - ), - ( - Token( - "]", - ), - 4096..4097, - ), - ( - Token( - "}", - ), - 4097..4098, - ), - ( - Token( - "\"", + "[*]}\" ", ), - 4098..4099, + 4094..4100, ), ( Token( @@ -4991,15 +3899,9 @@ expression: tokens ), ( Token( - "echo", - ), - 4215..4219, - ), - ( - Token( - "\"", + "echo \"", ), - 4220..4221, + 4214..4221, ), ( Token( @@ -5021,111 +3923,9 @@ expression: tokens ), ( Token( - "\"", - ), - 4227..4228, - ), - ( - Token( - "|", - ), - 4229..4230, - ), - ( - Token( - "sed", - ), - 4231..4234, - ), - ( - Token( - "-", - ), - 4235..4236, - ), - ( - Token( - "e", - ), - 4236..4237, - ), - ( - Token( - "'s", - ), - 4238..4240, - ), - ( - Token( - "/", - ), - 4240..4241, - ), - ( - Token( - "^", - ), - 4241..4242, - ), - ( - Token( - "[", - ), - 4242..4243, - ), - ( - Token( - "[", - ), - 4243..4244, - ), - ( - Token( - ":", - ), - 4244..4245, - ), - ( - Token( - "space", - ), - 4245..4250, - ), - ( - Token( - ":", - ), - 4250..4251, - ), - ( - Token( - "]", - ), - 4251..4252, - ), - ( - Token( - "]", - ), - 4252..4253, - ), - ( - Token( - "*", - ), - 4253..4254, - ), - ( - Token( - "//", - ), - 4254..4256, - ), - ( - Token( - "'", + "\" | sed -e 's/^[[:space:]]*//' ", ), - 4256..4257, + 4227..4258, ), ( Token( @@ -5231,15 +4031,9 @@ expression: tokens ), ( Token( - "echo", - ), - 4374..4378, - ), - ( - Token( - "\"", + "echo \"", ), - 4379..4380, + 4373..4380, ), ( Token( @@ -5261,117 +4055,21 @@ expression: tokens ), ( Token( - "\"", - ), - 4386..4387, - ), - ( - Token( - "|", - ), - 4388..4389, - ), - ( - Token( - "sed", - ), - 4390..4393, - ), - ( - Token( - "-", - ), - 4394..4395, - ), - ( - Token( - "e", - ), - 4395..4396, - ), - ( - Token( - "'s", - ), - 4397..4399, - ), - ( - Token( - "/", - ), - 4399..4400, - ), - ( - Token( - "[", - ), - 4400..4401, - ), - ( - Token( - "[", - ), - 4401..4402, - ), - ( - Token( - ":", - ), - 4402..4403, - ), - ( - Token( - "space", - ), - 4403..4408, - ), - ( - Token( - ":", - ), - 4408..4409, - ), - ( - Token( - "]", - ), - 4409..4410, - ), - ( - Token( - "]", - ), - 4410..4411, - ), - ( - Token( - "*", - ), - 4411..4412, - ), - ( - Token( - "\\", - ), - 4412..4413, - ), - ( - Token( - "$", + "\" | sed -e 's/[[:space:]]*", ), - 4413..4414, + 4386..4412, ), ( Token( - "//", + "\\$", ), - 4414..4416, + 4412..4414, ), ( Token( - "'", + "//' ", ), - 4416..4417, + 4414..4418, ), ( Token( @@ -5603,15 +4301,9 @@ expression: tokens ), ( Token( - "echo", - ), - 4644..4648, - ), - ( - Token( - "\"", + "echo \"", ), - 4649..4650, + 4643..4650, ), ( Token( @@ -5633,1143 +4325,645 @@ expression: tokens ), ( Token( - "\"", + "\" | tr '[:upper:]' '[:lower:]' ", ), - 4656..4657, + 4656..4687, ), ( Token( - "|", + "$", ), - 4658..4659, + 4687..4688, ), ( Token( - "tr", + "}", ), - 4660..4662, + 4689..4690, ), ( Token( - "'", + "/// Makes the text input uppercase using `tr`.\n", ), - 4663..4664, + 4692..4739, ), ( Token( - "[", + "pub", ), - 4664..4665, + 4739..4742, ), ( Token( - ":", + "fun", ), - 4665..4666, + 4743..4746, ), ( Token( - "upper", + "uppercase", ), - 4666..4671, + 4747..4756, ), ( Token( - ":", + "(", ), - 4671..4672, + 4756..4757, ), ( Token( - "]", + "text", ), - 4672..4673, + 4757..4761, ), ( Token( - "'", + ":", ), - 4673..4674, + 4761..4762, ), ( Token( - "'", + "Text", ), - 4675..4676, + 4763..4767, ), ( Token( - "[", + ")", ), - 4676..4677, + 4767..4768, ), ( Token( ":", ), - 4677..4678, + 4768..4769, ), ( Token( - "lower", + "Text", ), - 4678..4683, + 4770..4774, ), ( Token( - ":", + "{", ), - 4683..4684, + 4775..4776, ), ( Token( - "]", + "return", ), - 4684..4685, + 4781..4787, ), ( Token( - "'", + "trust", ), - 4685..4686, + 4788..4793, ), ( Token( "$", ), - 4687..4688, + 4794..4795, + ), + ( + Token( + "echo \"", + ), + 4795..4802, + ), + ( + Token( + "{", + ), + 4802..4803, + ), + ( + Token( + "text", + ), + 4803..4807, ), ( Token( "}", ), - 4689..4690, + 4807..4808, ), ( Token( - "/// Makes the text input uppercase using `tr`.\n", + "\" | tr '[:lower:]' '[:upper:]' ", ), - 4692..4739, + 4808..4839, ), ( Token( - "pub", + "$", ), - 4739..4742, + 4839..4840, ), ( Token( - "fun", + "}", ), - 4743..4746, + 4841..4842, ), ( Token( - "uppercase", + "/// Attempts to parse a given text into an `Int` number.\n", ), - 4747..4756, + 4844..4901, ), ( Token( - "(", + "#[", ), - 4756..4757, + 4901..4903, ), ( Token( - "text", + "allow_absurd_cast", ), - 4757..4761, + 4903..4920, ), ( Token( - ":", + "]", ), - 4761..4762, + 4920..4921, ), ( Token( - "Text", + "pub", ), - 4763..4767, + 4922..4925, ), ( Token( - ")", + "fun", ), - 4767..4768, + 4926..4929, ), ( Token( - ":", + "parse_int", ), - 4768..4769, + 4930..4939, ), ( Token( - "Text", + "(", ), - 4770..4774, + 4939..4940, ), ( Token( - "{", + "text", ), - 4775..4776, + 4940..4944, ), ( Token( - "return", + ":", ), - 4781..4787, + 4944..4945, ), ( Token( - "trust", + "Text", ), - 4788..4793, + 4946..4950, ), ( Token( - "$", + ")", ), - 4794..4795, + 4950..4951, ), ( Token( - "echo", + ":", ), - 4796..4800, + 4951..4952, ), ( Token( - "\"", + "Int", ), - 4801..4802, + 4953..4956, ), ( Token( - "{", + "?", ), - 4802..4803, + 4956..4957, ), ( Token( - "text", + "{", ), - 4803..4807, + 4958..4959, ), ( Token( - "}", + "$", ), - 4807..4808, + 4964..4965, ), ( Token( - "\"", + "[ -n \"", ), - 4808..4809, + 4965..4972, ), ( Token( - "|", + "{", ), - 4810..4811, + 4972..4973, ), ( Token( - "tr", + "text", ), - 4812..4814, + 4973..4977, ), ( Token( - "'", + "}", ), - 4815..4816, + 4977..4978, ), ( Token( - "[", + "\" ] && [ \"", ), - 4816..4817, + 4978..4988, ), ( Token( - ":", + "{", ), - 4817..4818, + 4988..4989, ), ( Token( - "lower", + "text", ), - 4818..4823, + 4989..4993, ), ( Token( - ":", + "}", ), - 4823..4824, + 4993..4994, ), ( Token( - "]", + "\" -eq \"", ), - 4824..4825, + 4994..5001, ), ( Token( - "'", + "{", ), - 4825..4826, + 5001..5002, ), ( Token( - "'", + "text", ), - 4827..4828, + 5002..5006, ), ( Token( - "[", + "}", ), - 4828..4829, + 5006..5007, ), ( Token( - ":", + "\" ] 2>/dev/null ", ), - 4829..4830, + 5007..5023, ), ( Token( - "upper", + "$", ), - 4830..4835, + 5023..5024, ), ( Token( - ":", + "?", ), - 4835..4836, + 5024..5025, ), ( Token( - "]", + "return", ), - 4836..4837, + 5030..5036, ), ( Token( - "'", + "text", ), - 4837..4838, + 5037..5041, ), ( Token( - "$", + "as", ), - 4839..4840, + 5042..5044, ), ( Token( - "}", + "Int", ), - 4841..4842, + 5045..5048, ), ( Token( - "/// Attempts to parse a given text into an `Int` number.\n", + "}", ), - 4844..4901, + 5049..5050, ), ( Token( - "#", + "/// Attempts to parse a given text into a `Num` number.\n", ), - 4901..4902, + 5052..5108, ), ( Token( - "[", + "#[", ), - 4902..4903, + 5108..5110, ), ( Token( "allow_absurd_cast", ), - 4903..4920, + 5110..5127, ), ( Token( "]", ), - 4920..4921, + 5127..5128, ), ( Token( "pub", ), - 4922..4925, + 5129..5132, ), ( Token( "fun", ), - 4926..4929, + 5133..5136, ), ( Token( - "parse_int", + "parse_num", ), - 4930..4939, + 5137..5146, ), ( Token( "(", ), - 4939..4940, + 5146..5147, ), ( Token( "text", ), - 4940..4944, + 5147..5151, ), ( Token( ":", ), - 4944..4945, + 5151..5152, ), ( Token( "Text", ), - 4946..4950, + 5153..5157, ), ( Token( ")", ), - 4950..4951, + 5157..5158, ), ( Token( ":", ), - 4951..4952, + 5158..5159, ), ( Token( - "Int", + "Num", ), - 4953..4956, + 5160..5163, ), ( Token( "?", ), - 4956..4957, + 5163..5164, ), ( Token( "{", ), - 4958..4959, + 5165..5166, ), ( Token( - "$", + "let", ), - 4964..4965, + 5171..5174, ), ( Token( - "[", + "re_int", ), - 4966..4967, + 5175..5181, ), ( Token( - "-", + "=", ), - 4968..4969, + 5181..5182, ), ( Token( - "n", + "\"", ), - 4969..4970, + 5182..5183, ), ( Token( - "\"", + "^-?[0-9]+$", ), - 4971..4972, + 5183..5193, ), ( Token( - "{", + "\"", ), - 4972..4973, + 5193..5194, ), ( Token( - "text", + "let", ), - 4973..4977, + 5199..5202, ), ( Token( - "}", + "re_float", ), - 4977..4978, + 5203..5211, ), ( Token( - "\"", + "=", ), - 4978..4979, + 5211..5212, ), ( Token( - "]", + "\"", ), - 4980..4981, + 5212..5213, ), ( Token( - "&&", + "^-?[0-9]*", ), - 4982..4984, + 5213..5222, ), ( Token( - "[", + "\\.", ), - 4985..4986, + 5222..5224, ), ( Token( - "\"", + "[0-9]+$", ), - 4987..4988, + 5224..5231, ), ( Token( - "{", + "\"", ), - 4988..4989, + 5231..5232, ), ( Token( - "text", + "$", ), - 4989..4993, + 5238..5239, ), ( Token( - "}", + "[[ ", ), - 4993..4994, + 5239..5243, ), ( Token( - "\"", + "{", ), - 4994..4995, + 5243..5244, ), ( Token( - "-", + "text", ), - 4996..4997, + 5244..5248, ), ( Token( - "eq", + "}", ), - 4997..4999, + 5248..5249, ), ( Token( - "\"", + " =~ ", ), - 5000..5001, + 5249..5253, ), ( Token( "{", ), - 5001..5002, + 5253..5254, ), ( Token( - "text", + "re_int", ), - 5002..5006, + 5254..5260, ), ( Token( "}", ), - 5006..5007, + 5260..5261, ), ( Token( - "\"", + " ]] || [[ ", ), - 5007..5008, + 5261..5271, ), ( Token( - "]", + "{", ), - 5009..5010, + 5271..5272, ), ( Token( - "2", + "text", ), - 5011..5012, + 5272..5276, ), ( Token( - ">", + "}", ), - 5012..5013, + 5276..5277, ), ( Token( - "/", + " =~ ", ), - 5013..5014, + 5277..5281, ), ( Token( - "dev", + "{", ), - 5014..5017, - ), - ( - Token( - "/", - ), - 5017..5018, - ), - ( - Token( - "null", - ), - 5018..5022, - ), - ( - Token( - "$", - ), - 5023..5024, - ), - ( - Token( - "?", - ), - 5024..5025, - ), - ( - Token( - "return", - ), - 5030..5036, - ), - ( - Token( - "text", - ), - 5037..5041, - ), - ( - Token( - "as", - ), - 5042..5044, - ), - ( - Token( - "Int", - ), - 5045..5048, - ), - ( - Token( - "}", - ), - 5049..5050, - ), - ( - Token( - "/// Attempts to parse a given text into a `Num` number.\n", - ), - 5052..5108, - ), - ( - Token( - "#", - ), - 5108..5109, - ), - ( - Token( - "[", - ), - 5109..5110, - ), - ( - Token( - "allow_absurd_cast", - ), - 5110..5127, - ), - ( - Token( - "]", - ), - 5127..5128, - ), - ( - Token( - "pub", - ), - 5129..5132, - ), - ( - Token( - "fun", - ), - 5133..5136, - ), - ( - Token( - "parse_num", - ), - 5137..5146, - ), - ( - Token( - "(", - ), - 5146..5147, - ), - ( - Token( - "text", - ), - 5147..5151, - ), - ( - Token( - ":", - ), - 5151..5152, - ), - ( - Token( - "Text", - ), - 5153..5157, - ), - ( - Token( - ")", - ), - 5157..5158, - ), - ( - Token( - ":", - ), - 5158..5159, - ), - ( - Token( - "Num", - ), - 5160..5163, - ), - ( - Token( - "?", - ), - 5163..5164, - ), - ( - Token( - "{", - ), - 5165..5166, - ), - ( - Token( - "let", - ), - 5171..5174, - ), - ( - Token( - "re_int", - ), - 5175..5181, - ), - ( - Token( - "=", - ), - 5181..5182, - ), - ( - Token( - "\"", - ), - 5182..5183, - ), - ( - Token( - "^", - ), - 5183..5184, - ), - ( - Token( - "-", - ), - 5184..5185, - ), - ( - Token( - "?", - ), - 5185..5186, - ), - ( - Token( - "[", - ), - 5186..5187, - ), - ( - Token( - "0", - ), - 5187..5188, - ), - ( - Token( - "-", - ), - 5188..5189, - ), - ( - Token( - "9", - ), - 5189..5190, - ), - ( - Token( - "]", - ), - 5190..5191, - ), - ( - Token( - "+", - ), - 5191..5192, - ), - ( - Token( - "$", - ), - 5192..5193, - ), - ( - Token( - "\"", - ), - 5193..5194, - ), - ( - Token( - "let", - ), - 5199..5202, - ), - ( - Token( - "re_float", - ), - 5203..5211, - ), - ( - Token( - "=", - ), - 5211..5212, - ), - ( - Token( - "\"", - ), - 5212..5213, - ), - ( - Token( - "^", - ), - 5213..5214, - ), - ( - Token( - "-", - ), - 5214..5215, - ), - ( - Token( - "?", - ), - 5215..5216, - ), - ( - Token( - "[", - ), - 5216..5217, - ), - ( - Token( - "0", - ), - 5217..5218, - ), - ( - Token( - "-", - ), - 5218..5219, - ), - ( - Token( - "9", - ), - 5219..5220, - ), - ( - Token( - "]", - ), - 5220..5221, - ), - ( - Token( - "*", - ), - 5221..5222, - ), - ( - Token( - "\\", - ), - 5222..5223, - ), - ( - Token( - ".", - ), - 5223..5224, - ), - ( - Token( - "[", - ), - 5224..5225, - ), - ( - Token( - "0", - ), - 5225..5226, - ), - ( - Token( - "-", - ), - 5226..5227, - ), - ( - Token( - "9", - ), - 5227..5228, - ), - ( - Token( - "]", - ), - 5228..5229, - ), - ( - Token( - "+", - ), - 5229..5230, - ), - ( - Token( - "$", - ), - 5230..5231, - ), - ( - Token( - "\"", - ), - 5231..5232, - ), - ( - Token( - "$", - ), - 5238..5239, - ), - ( - Token( - "[", - ), - 5240..5241, - ), - ( - Token( - "[", - ), - 5241..5242, - ), - ( - Token( - "{", - ), - 5243..5244, - ), - ( - Token( - "text", - ), - 5244..5248, - ), - ( - Token( - "}", - ), - 5248..5249, - ), - ( - Token( - "=", - ), - 5250..5251, - ), - ( - Token( - "~", - ), - 5251..5252, - ), - ( - Token( - "{", - ), - 5253..5254, - ), - ( - Token( - "re_int", - ), - 5254..5260, - ), - ( - Token( - "}", - ), - 5260..5261, - ), - ( - Token( - "]", - ), - 5262..5263, - ), - ( - Token( - "]", - ), - 5263..5264, - ), - ( - Token( - "||", - ), - 5265..5267, - ), - ( - Token( - "[", - ), - 5268..5269, - ), - ( - Token( - "[", - ), - 5269..5270, - ), - ( - Token( - "{", - ), - 5271..5272, - ), - ( - Token( - "text", - ), - 5272..5276, - ), - ( - Token( - "}", - ), - 5276..5277, - ), - ( - Token( - "=", - ), - 5278..5279, - ), - ( - Token( - "~", - ), - 5279..5280, - ), - ( - Token( - "{", - ), - 5281..5282, + 5281..5282, ), ( Token( @@ -6785,15 +4979,9 @@ expression: tokens ), ( Token( - "]", - ), - 5292..5293, - ), - ( - Token( - "]", + " ]] ", ), - 5293..5294, + 5291..5295, ), ( Token( @@ -6949,343 +5137,163 @@ expression: tokens Token( "Text", ), - 5441..5445, - ), - ( - Token( - "]", - ), - 5445..5446, - ), - ( - Token( - "trust", - ), - 5451..5456, - ), - ( - Token( - "$", - ), - 5457..5458, - ), - ( - Token( - "for", - ), - 5459..5462, - ), - ( - Token( - "(", - ), - 5463..5464, - ), - ( - Token( - "(", - ), - 5464..5465, - ), - ( - Token( - "i", - ), - 5465..5466, - ), - ( - Token( - "=", - ), - 5466..5467, - ), - ( - Token( - "0", - ), - 5467..5468, - ), - ( - Token( - ";", - ), - 5468..5469, - ), - ( - Token( - "i", - ), - 5470..5471, - ), - ( - Token( - "<", - ), - 5471..5472, - ), - ( - Token( - "\\", - ), - 5472..5473, - ), - ( - Token( - "$", - ), - 5473..5474, - ), - ( - Token( - "\\", - ), - 5474..5475, - ), - ( - Token( - "{", - ), - 5475..5476, - ), - ( - Token( - "#", - ), - 5476..5477, - ), - ( - Token( - "{", - ), - 5477..5478, - ), - ( - Token( - "nameof", - ), - 5478..5484, - ), - ( - Token( - "text", - ), - 5485..5489, - ), - ( - Token( - "}", - ), - 5489..5490, - ), - ( - Token( - "}", - ), - 5490..5491, - ), - ( - Token( - ";", - ), - 5491..5492, - ), - ( - Token( - "i", - ), - 5493..5494, - ), - ( - Token( - "+", - ), - 5494..5495, - ), - ( - Token( - "+", - ), - 5495..5496, - ), - ( - Token( - ")", - ), - 5496..5497, - ), - ( - Token( - ")", - ), - 5497..5498, - ), - ( - Token( - ";", - ), - 5498..5499, + 5441..5445, ), ( Token( - "do", + "]", ), - 5500..5502, + 5445..5446, ), ( Token( - "{", + "trust", ), - 5511..5512, + 5451..5456, ), ( Token( - "nameof", + "$", ), - 5512..5518, + 5457..5458, ), ( Token( - "chars", + "for ((i=0; i<", ), - 5519..5524, + 5458..5472, ), ( Token( - "}", + "\\$", ), - 5524..5525, + 5472..5474, ), ( Token( - "+=", + "\\{", ), - 5525..5527, + 5474..5476, ), ( Token( - "(", + "#", ), - 5527..5528, + 5476..5477, ), ( Token( - "\"", + "{", ), - 5529..5530, + 5477..5478, ), ( Token( - "\\", + "nameof", ), - 5530..5531, + 5478..5484, ), ( Token( - "$", + "text", ), - 5531..5532, + 5485..5489, ), ( Token( - "\\", + "}", ), - 5532..5533, + 5489..5490, ), ( Token( - "{", + "}; i++)); do\n ", ), - 5533..5534, + 5490..5511, ), ( Token( "{", ), - 5534..5535, + 5511..5512, ), ( Token( "nameof", ), - 5535..5541, + 5512..5518, ), ( Token( - "text", + "chars", ), - 5542..5546, + 5519..5524, ), ( Token( "}", ), - 5546..5547, + 5524..5525, ), ( Token( - ":", + "+=( \"", ), - 5547..5548, + 5525..5530, ), ( Token( - "\\", + "\\$", ), - 5548..5549, + 5530..5532, ), ( Token( - "$", + "\\{", ), - 5549..5550, + 5532..5534, ), ( Token( - "i", + "{", ), - 5550..5551, + 5534..5535, ), ( Token( - ":", + "nameof", ), - 5551..5552, + 5535..5541, ), ( Token( - "1", + "text", ), - 5552..5553, + 5542..5546, ), ( Token( "}", ), - 5553..5554, - ), - ( - Token( - "\"", - ), - 5554..5555, + 5546..5547, ), ( Token( - ")", + ":", ), - 5556..5557, + 5547..5548, ), ( Token( - ";", + "\\$", ), - 5557..5558, + 5548..5550, ), ( Token( - "done", + "i:1}\" );\n done ", ), - 5563..5567, + 5550..5568, ), ( Token( @@ -7439,27 +5447,9 @@ expression: tokens ), ( Token( - "if", - ), - 5713..5715, - ), - ( - Token( - "[", - ), - 5716..5717, - ), - ( - Token( - "[", - ), - 5717..5718, - ), - ( - Token( - "\"", + "if [[ \"", ), - 5719..5720, + 5712..5720, ), ( Token( @@ -7481,27 +5471,9 @@ expression: tokens ), ( Token( - "\"", - ), - 5728..5729, - ), - ( - Token( - "==", - ), - 5730..5732, - ), - ( - Token( - "*", - ), - 5733..5734, - ), - ( - Token( - "\"", + "\" == *\"", ), - 5734..5735, + 5728..5735, ), ( Token( @@ -7523,57 +5495,9 @@ expression: tokens ), ( Token( - "\"", - ), - 5743..5744, - ), - ( - Token( - "*", - ), - 5744..5745, - ), - ( - Token( - "]", - ), - 5746..5747, - ), - ( - Token( - "]", - ), - 5747..5748, - ), - ( - Token( - ";", - ), - 5748..5749, - ), - ( - Token( - "then", - ), - 5750..5754, - ), - ( - Token( - "echo", - ), - 5759..5763, - ), - ( - Token( - "1", - ), - 5764..5765, - ), - ( - Token( - "fi", + "\"* ]]; then\n echo 1\n fi ", ), - 5768..5770, + 5743..5771, ), ( Token( @@ -8345,15 +6269,9 @@ expression: tokens ), ( Token( - "\\", - ), - 6547..6548, - ), - ( - Token( - "/", + "\\/", ), - 6548..6549, + 6547..6549, ), ( Token( @@ -8501,15 +6419,9 @@ expression: tokens ), ( Token( - "\\", - ), - 6768..6769, - ), - ( - Token( - "\\", + "\\\\", ), - 6769..6770, + 6768..6770, ), ( Token( @@ -8537,27 +6449,15 @@ expression: tokens ), ( Token( - "\\", - ), - 6775..6776, - ), - ( - Token( - "\\", - ), - 6776..6777, - ), - ( - Token( - "\\", + "\\\\", ), - 6777..6778, + 6775..6777, ), ( Token( - "b", + "\\b", ), - 6778..6779, + 6777..6779, ), ( Token( @@ -8687,15 +6587,9 @@ expression: tokens ), ( Token( - "\\", + "\\b", ), - 7089..7090, - ), - ( - Token( - "b", - ), - 7090..7091, + 7089..7091, ), ( Token( @@ -8717,15 +6611,9 @@ expression: tokens ), ( Token( - "\\", - ), - 7095..7096, - ), - ( - Token( - "\\", + "\\\\", ), - 7096..7097, + 7095..7097, ), ( Token( @@ -8765,15 +6653,9 @@ expression: tokens ), ( Token( - "echo", - ), - 7128..7132, - ), - ( - Token( - "\"", + "echo \"", ), - 7133..7134, + 7127..7134, ), ( Token( @@ -8795,57 +6677,9 @@ expression: tokens ), ( Token( - "\"", - ), - 7142..7143, - ), - ( - Token( - "|", - ), - 7144..7145, - ), - ( - Token( - "sed", - ), - 7146..7149, - ), - ( - Token( - "-", - ), - 7150..7151, - ), - ( - Token( - "r", - ), - 7151..7152, - ), - ( - Token( - "-", - ), - 7153..7154, - ), - ( - Token( - "ne", - ), - 7154..7156, - ), - ( - Token( - "\"", - ), - 7157..7158, - ), - ( - Token( - "/", + "\" | sed -r -ne \"/", ), - 7158..7159, + 7142..7159, ), ( Token( @@ -8867,21 +6701,9 @@ expression: tokens ), ( Token( - "/", - ), - 7167..7168, - ), - ( - Token( - "p", - ), - 7168..7169, - ), - ( - Token( - "\"", + "/p\" ", ), - 7169..7170, + 7167..7171, ), ( Token( @@ -8911,103 +6733,49 @@ expression: tokens Token( "output", ), - 7210..7216, - ), - ( - Token( - "=", - ), - 7217..7218, - ), - ( - Token( - "$", - ), - 7219..7220, - ), - ( - Token( - "echo", - ), - 7221..7225, - ), - ( - Token( - "\"", - ), - 7226..7227, - ), - ( - Token( - "{", - ), - 7227..7228, - ), - ( - Token( - "source", - ), - 7228..7234, - ), - ( - Token( - "}", - ), - 7234..7235, - ), - ( - Token( - "\"", - ), - 7235..7236, - ), - ( - Token( - "|", - ), - 7237..7238, + 7210..7216, ), ( Token( - "sed", + "=", ), - 7239..7242, + 7217..7218, ), ( Token( - "-", + "$", ), - 7243..7244, + 7219..7220, ), ( Token( - "E", + "echo \"", ), - 7244..7245, + 7220..7227, ), ( Token( - "-", + "{", ), - 7246..7247, + 7227..7228, ), ( Token( - "ne", + "source", ), - 7247..7249, + 7228..7234, ), ( Token( - "\"", + "}", ), - 7250..7251, + 7234..7235, ), ( Token( - "/", + "\" | sed -E -ne \"/", ), - 7251..7252, + 7235..7252, ), ( Token( @@ -9029,21 +6797,9 @@ expression: tokens ), ( Token( - "/", - ), - 7260..7261, - ), - ( - Token( - "p", - ), - 7261..7262, - ), - ( - Token( - "\"", + "/p\" ", ), - 7262..7263, + 7260..7264, ), ( Token( @@ -9179,15 +6935,9 @@ expression: tokens ), ( Token( - "\\", - ), - 7528..7529, - ), - ( - Token( - "|", + "\\|", ), - 7529..7530, + 7528..7530, ), ( Token( @@ -9251,15 +7001,9 @@ expression: tokens ), ( Token( - "echo", - ), - 7575..7579, - ), - ( - Token( - "\"", + "echo \"", ), - 7580..7581, + 7574..7581, ), ( Token( @@ -9281,45 +7025,9 @@ expression: tokens ), ( Token( - "\"", - ), - 7589..7590, - ), - ( - Token( - "|", - ), - 7591..7592, - ), - ( - Token( - "sed", - ), - 7593..7596, - ), - ( - Token( - "-", - ), - 7597..7598, - ), - ( - Token( - "ne", - ), - 7598..7600, - ), - ( - Token( - "\"", - ), - 7601..7602, - ), - ( - Token( - "/", + "\" | sed -ne \"/", ), - 7602..7603, + 7589..7603, ), ( Token( @@ -9341,21 +7049,9 @@ expression: tokens ), ( Token( - "/", - ), - 7611..7612, - ), - ( - Token( - "p", - ), - 7612..7613, - ), - ( - Token( - "\"", + "/p\" ", ), - 7613..7614, + 7611..7615, ), ( Token( @@ -9779,15 +7475,9 @@ expression: tokens ), ( Token( - "echo", - ), - 8059..8063, - ), - ( - Token( - "\"", + "echo \"", ), - 8064..8065, + 8058..8065, ), ( Token( @@ -9809,21 +7499,9 @@ expression: tokens ), ( Token( - "\"", - ), - 8071..8072, - ), - ( - Token( - "|", - ), - 8073..8074, - ), - ( - Token( - "rev", + "\" | rev ", ), - 8075..8078, + 8071..8079, ), ( Token( @@ -9965,27 +7643,9 @@ expression: tokens ), ( Token( - "if", - ), - 8201..8203, - ), - ( - Token( - "[", - ), - 8204..8205, - ), - ( - Token( - "[", - ), - 8205..8206, - ), - ( - Token( - "\"", + "if [[ \"", ), - 8207..8208, + 8200..8208, ), ( Token( @@ -10007,21 +7667,9 @@ expression: tokens ), ( Token( - "\"", - ), - 8214..8215, - ), - ( - Token( - "==", - ), - 8216..8218, - ), - ( - Token( - "\"", + "\" == \"", ), - 8219..8220, + 8214..8220, ), ( Token( @@ -10043,57 +7691,9 @@ expression: tokens ), ( Token( - "\"", - ), - 8228..8229, - ), - ( - Token( - "*", - ), - 8229..8230, - ), - ( - Token( - "]", - ), - 8231..8232, - ), - ( - Token( - "]", - ), - 8232..8233, - ), - ( - Token( - ";", - ), - 8233..8234, - ), - ( - Token( - "then", - ), - 8235..8239, - ), - ( - Token( - "echo", - ), - 8244..8248, - ), - ( - Token( - "1", - ), - 8249..8250, - ), - ( - Token( - "fi", + "\"* ]]; then\n echo 1\n fi ", ), - 8253..8255, + 8228..8256, ), ( Token( @@ -10271,27 +7871,9 @@ expression: tokens ), ( Token( - "if", - ), - 8398..8400, - ), - ( - Token( - "[", - ), - 8401..8402, - ), - ( - Token( - "[", - ), - 8402..8403, - ), - ( - Token( - "\"", + "if [[ \"", ), - 8404..8405, + 8397..8405, ), ( Token( @@ -10313,27 +7895,9 @@ expression: tokens ), ( Token( - "\"", - ), - 8411..8412, - ), - ( - Token( - "==", - ), - 8413..8415, - ), - ( - Token( - "*", - ), - 8416..8417, - ), - ( - Token( - "\"", + "\" == *\"", ), - 8417..8418, + 8411..8418, ), ( Token( @@ -10355,51 +7919,9 @@ expression: tokens ), ( Token( - "\"", - ), - 8426..8427, - ), - ( - Token( - "]", - ), - 8428..8429, - ), - ( - Token( - "]", - ), - 8429..8430, - ), - ( - Token( - ";", - ), - 8430..8431, - ), - ( - Token( - "then", - ), - 8432..8436, - ), - ( - Token( - "echo", - ), - 8441..8445, - ), - ( - Token( - "1", - ), - 8446..8447, - ), - ( - Token( - "fi", + "\" ]]; then\n echo 1\n fi ", ), - 8450..8452, + 8426..8453, ), ( Token( @@ -10747,103 +8269,55 @@ expression: tokens Token( "$", ), - 9010..9011, - ), - ( - Token( - "printf", - ), - 9012..9018, - ), - ( - Token( - "\"", - ), - 9019..9020, - ), - ( - Token( - "%", - ), - 9020..9021, - ), - ( - Token( - ".", - ), - 9021..9022, - ), - ( - Token( - "{", - ), - 9022..9023, - ), - ( - Token( - "length", - ), - 9023..9029, - ), - ( - Token( - "}", - ), - 9029..9030, - ), - ( - Token( - "s", - ), - 9030..9031, + 9010..9011, ), ( Token( - "\"", + "printf \"%.", ), - 9031..9032, + 9011..9022, ), ( Token( - "\"", + "{", ), - 9033..9034, + 9022..9023, ), ( Token( - "\\", + "length", ), - 9034..9035, + 9023..9029, ), ( Token( - "$", + "}", ), - 9035..9036, + 9029..9030, ), ( Token( - "\\", + "s\" \"", ), - 9036..9037, + 9030..9034, ), ( Token( - "{", + "\\$", ), - 9037..9038, + 9034..9036, ), ( Token( - "text", + "\\{", ), - 9038..9042, + 9036..9038, ), ( Token( - ":", + "text: ", ), - 9042..9043, + 9038..9044, ), ( Token( @@ -10865,15 +8339,9 @@ expression: tokens ), ( Token( - "}", - ), - 9051..9052, - ), - ( - Token( - "\"", + "}\" ", ), - 9052..9053, + 9051..9054, ), ( Token( @@ -11015,81 +8483,27 @@ expression: tokens ), ( Token( - "printf", - ), - 9316..9322, - ), - ( - Token( - "\"", - ), - 9323..9324, - ), - ( - Token( - "%", - ), - 9324..9325, - ), - ( - Token( - ".", - ), - 9325..9326, - ), - ( - Token( - "1s", - ), - 9326..9328, - ), - ( - Token( - "\"", - ), - 9328..9329, - ), - ( - Token( - "\"", - ), - 9330..9331, - ), - ( - Token( - "\\", - ), - 9331..9332, - ), - ( - Token( - "$", + "printf \"%.1s\" \"", ), - 9332..9333, + 9315..9331, ), ( Token( - "\\", + "\\$", ), - 9333..9334, + 9331..9333, ), ( Token( - "{", - ), - 9334..9335, - ), - ( - Token( - "text", + "\\{", ), - 9335..9339, + 9333..9335, ), ( Token( - ":", + "text: ", ), - 9339..9340, + 9335..9341, ), ( Token( @@ -11111,15 +8525,9 @@ expression: tokens ), ( Token( - "}", - ), - 9348..9349, - ), - ( - Token( - "\"", + "}\" ", ), - 9349..9350, + 9348..9351, ), ( Token( @@ -11141,15 +8549,9 @@ expression: tokens ), ( Token( - "#", - ), - 9409..9410, - ), - ( - Token( - "[", + "#[", ), - 9410..9411, + 9409..9411, ), ( Token( @@ -11375,39 +8777,21 @@ expression: tokens ), ( Token( - "echo", - ), - 9600..9604, - ), - ( - Token( - "\"", - ), - 9605..9606, - ), - ( - Token( - "\\", - ), - 9606..9607, - ), - ( - Token( - "$", + "echo \"", ), - 9607..9608, + 9599..9606, ), ( Token( - "\\", + "\\$", ), - 9608..9609, + 9606..9608, ), ( Token( - "{", + "\\{", ), - 9609..9610, + 9608..9610, ), ( Token( @@ -11435,21 +8819,9 @@ expression: tokens ), ( Token( - "^", - ), - 9623..9624, - ), - ( - Token( - "}", - ), - 9624..9625, - ), - ( - Token( - "\"", + "^}\" ", ), - 9625..9626, + 9623..9627, ), ( Token( @@ -11525,15 +8897,9 @@ expression: tokens ), ( Token( - "echo", - ), - 9741..9745, - ), - ( - Token( - "\"", + "echo \"", ), - 9746..9747, + 9740..9747, ), ( Token( @@ -11555,57 +8921,15 @@ expression: tokens ), ( Token( - "\"", - ), - 9753..9754, - ), - ( - Token( - "|", - ), - 9755..9756, - ), - ( - Token( - "sed", - ), - 9757..9760, - ), - ( - Token( - "\"", - ), - 9761..9762, - ), - ( - Token( - "s", - ), - 9762..9763, - ), - ( - Token( - "/", - ), - 9763..9764, - ), - ( - Token( - "^", - ), - 9764..9765, - ), - ( - Token( - "\\", + "\" | sed \"s/^", ), - 9765..9766, + 9753..9765, ), ( Token( - "(", + "\\(", ), - 9766..9767, + 9765..9767, ), ( Token( @@ -11615,15 +8939,9 @@ expression: tokens ), ( Token( - "\\", - ), - 9768..9769, - ), - ( - Token( - ")", + "\\)", ), - 9769..9770, + 9768..9770, ), ( Token( @@ -11633,39 +8951,21 @@ expression: tokens ), ( Token( - "\\", - ), - 9771..9772, - ), - ( - Token( - "U", - ), - 9772..9773, - ), - ( - Token( - "\\", - ), - 9773..9774, - ), - ( - Token( - "1", + "\\U", ), - 9774..9775, + 9771..9773, ), ( Token( - "/", + "\\1", ), - 9775..9776, + 9773..9775, ), ( Token( - "\"", + "/\" ", ), - 9776..9777, + 9775..9778, ), ( Token( @@ -12071,21 +9371,9 @@ expression: tokens ), ( Token( - "printf", - ), - 10146..10152, - ), - ( - Token( - "\"", - ), - 10153..10154, - ), - ( - Token( - "%", + "printf \"%", ), - 10154..10155, + 10145..10155, ), ( Token( @@ -12107,57 +9395,9 @@ expression: tokens ), ( Token( - "s", - ), - 10163..10164, - ), - ( - Token( - "\"", - ), - 10164..10165, - ), - ( - Token( - "\"", - ), - 10166..10167, - ), - ( - Token( - "\"", - ), - 10167..10168, - ), - ( - Token( - "|", - ), - 10169..10170, - ), - ( - Token( - "tr", - ), - 10171..10173, - ), - ( - Token( - "\"", - ), - 10174..10175, - ), - ( - Token( - "\"", - ), - 10176..10177, - ), - ( - Token( - "\"", + "s\" \"\" | tr \" \" \"", ), - 10178..10179, + 10163..10179, ), ( Token( @@ -12179,9 +9419,9 @@ expression: tokens ), ( Token( - "\"", + "\" ", ), - 10184..10185, + 10184..10186, ), ( Token( @@ -12473,21 +9713,9 @@ expression: tokens ), ( Token( - "printf", - ), - 10463..10469, - ), - ( - Token( - "\"", - ), - 10470..10471, - ), - ( - Token( - "%", + "printf \"%", ), - 10471..10472, + 10462..10472, ), ( Token( @@ -12509,57 +9737,9 @@ expression: tokens ), ( Token( - "s", - ), - 10480..10481, - ), - ( - Token( - "\"", - ), - 10481..10482, - ), - ( - Token( - "\"", - ), - 10483..10484, - ), - ( - Token( - "\"", - ), - 10484..10485, - ), - ( - Token( - "|", - ), - 10486..10487, - ), - ( - Token( - "tr", + "s\" \"\" | tr \" \" \"", ), - 10488..10490, - ), - ( - Token( - "\"", - ), - 10491..10492, - ), - ( - Token( - "\"", - ), - 10493..10494, - ), - ( - Token( - "\"", - ), - 10495..10496, + 10480..10496, ), ( Token( @@ -12581,9 +9761,9 @@ expression: tokens ), ( Token( - "\"", + "\" ", ), - 10501..10502, + 10501..10503, ), ( Token( diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__string_escapes.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__string_escapes.snap new file mode 100644 index 0000000..f0ef920 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__string_escapes.snap @@ -0,0 +1,60 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"\"hello\\nworld\\t\\\"quote\\\"\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "hello", + ), + 1..6, + ), + ( + Token( + "\\n", + ), + 6..8, + ), + ( + Token( + "world", + ), + 8..13, + ), + ( + Token( + "\\t", + ), + 13..15, + ), + ( + Token( + "\\\"", + ), + 15..17, + ), + ( + Token( + "quote", + ), + 17..22, + ), + ( + Token( + "\\\"", + ), + 22..24, + ), + ( + Token( + "\"", + ), + 24..25, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__string_multi_interpolation.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__string_multi_interpolation.snap new file mode 100644 index 0000000..0585318 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__string_multi_interpolation.snap @@ -0,0 +1,84 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"\"{a} and {b} and {c}\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "{", + ), + 1..2, + ), + ( + Token( + "a", + ), + 2..3, + ), + ( + Token( + "}", + ), + 3..4, + ), + ( + Token( + " and ", + ), + 4..9, + ), + ( + Token( + "{", + ), + 9..10, + ), + ( + Token( + "b", + ), + 10..11, + ), + ( + Token( + "}", + ), + 11..12, + ), + ( + Token( + " and ", + ), + 12..17, + ), + ( + Token( + "{", + ), + 17..18, + ), + ( + Token( + "c", + ), + 18..19, + ), + ( + Token( + "}", + ), + 19..20, + ), + ( + Token( + "\"", + ), + 20..21, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__string_nested_braces.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__string_nested_braces.snap new file mode 100644 index 0000000..34b008f --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__string_nested_braces.snap @@ -0,0 +1,120 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"\"text {if true { \"inner\" } else { \"other\" }} end\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "text ", + ), + 1..6, + ), + ( + Token( + "{", + ), + 6..7, + ), + ( + Token( + "if", + ), + 7..9, + ), + ( + Token( + "true", + ), + 10..14, + ), + ( + Token( + "{", + ), + 15..16, + ), + ( + Token( + "\"", + ), + 17..18, + ), + ( + Token( + "inner", + ), + 18..23, + ), + ( + Token( + "\"", + ), + 23..24, + ), + ( + Token( + "}", + ), + 25..26, + ), + ( + Token( + "else", + ), + 27..31, + ), + ( + Token( + "{", + ), + 32..33, + ), + ( + Token( + "\"", + ), + 34..35, + ), + ( + Token( + "other", + ), + 35..40, + ), + ( + Token( + "\"", + ), + 40..41, + ), + ( + Token( + "}", + ), + 42..43, + ), + ( + Token( + "}", + ), + 43..44, + ), + ( + Token( + " end", + ), + 44..48, + ), + ( + Token( + "\"", + ), + 48..49, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__string_trailing_backslash.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__string_trailing_backslash.snap new file mode 100644 index 0000000..cfa2f33 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__string_trailing_backslash.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"\"test\\\"\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "test", + ), + 1..5, + ), + ( + Token( + "\\\"", + ), + 5..7, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__triple_dollar.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__triple_dollar.snap new file mode 100644 index 0000000..ec87d23 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__triple_dollar.snap @@ -0,0 +1,24 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$$$\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "$", + ), + 1..2, + ), + ( + Token( + "$", + ), + 2..3, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__unclosed_command.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__unclosed_command.snap new file mode 100644 index 0000000..953a308 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__unclosed_command.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(\"$echo test\")" +--- +[ + ( + Token( + "$", + ), + 0..1, + ), + ( + Token( + "echo test", + ), + 1..10, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__unclosed_string.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__unclosed_string.snap new file mode 100644 index 0000000..58ee422 --- /dev/null +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__unclosed_string.snap @@ -0,0 +1,18 @@ +--- +source: tests/grammar/alpha050.rs +expression: "compiler.tokenize(r#\"\"unclosed\"#)" +--- +[ + ( + Token( + "\"", + ), + 0..1, + ), + ( + Token( + "unclosed", + ), + 1..9, + ), +] diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__unfinished_function_call-2.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__unfinished_function_call-2.snap index 3bf4af5..a8d8792 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__unfinished_function_call-2.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__unfinished_function_call-2.snap @@ -242,29 +242,11 @@ expression: result ( Text( ( - "text", - 165..169, + "text.txt", + 165..173, ), ), - 165..169, - ), - ( - Text( - ( - ".", - 169..170, - ), - ), - 169..170, - ), - ( - Text( - ( - "txt", - 170..173, - ), - ), - 170..173, + 165..173, ), ], ), diff --git a/tests/grammar/snapshots/r#mod__grammar__alpha050__unfinished_function_call.snap b/tests/grammar/snapshots/r#mod__grammar__alpha050__unfinished_function_call.snap index e76b58b..86c1267 100644 --- a/tests/grammar/snapshots/r#mod__grammar__alpha050__unfinished_function_call.snap +++ b/tests/grammar/snapshots/r#mod__grammar__alpha050__unfinished_function_call.snap @@ -41,21 +41,9 @@ expression: tokens ), ( Token( - "std", + "std/array", ), - 37..40, - ), - ( - Token( - "/", - ), - 40..41, - ), - ( - Token( - "array", - ), - 41..46, + 37..46, ), ( Token( @@ -263,21 +251,9 @@ expression: tokens ), ( Token( - "text", - ), - 165..169, - ), - ( - Token( - ".", - ), - 169..170, - ), - ( - Token( - "txt", + "text.txt", ), - 170..173, + 165..173, ), ( Token(