[feat] add napi support

This commit is contained in:
HerringtonDarkholme
2022-08-28 23:22:10 -04:00
parent e6f7434420
commit 532f0557a6
70 changed files with 7486 additions and 3113 deletions
+15
View File
@@ -0,0 +1,15 @@
[target.aarch64-unknown-linux-gnu]
linker = "aarch64-linux-gnu-gcc"
[target.armv7-unknown-linux-gnueabihf]
linker = "arm-linux-gnueabihf-gcc"
[target.x86_64-unknown-linux-musl]
rustflags = [
"-C",
"target-feature=-crt-static",
]
[target.aarch64-unknown-linux-musl]
linker = "aarch64-linux-musl-gcc"
rustflags = ["-C", "target-feature=-crt-static"]
+15
View File
@@ -0,0 +1,15 @@
# EditorConfig helps developers define and maintain consistent
# coding styles between different editors or IDEs
# http://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.md]
trim_trailing_whitespace = false
+510
View File
@@ -0,0 +1,510 @@
name: CI
env:
DEBUG: napi:*
APP_NAME: package-template
MACOSX_DEPLOYMENT_TARGET: '10.13'
'on':
push:
branches:
- main
tags-ignore:
- '**'
paths-ignore:
- '**/*.md'
- LICENSE
- '**/*.gitignore'
- .editorconfig
- docs/**
pull_request: null
jobs:
build:
if: "!contains(github.event.head_commit.message, 'skip ci')"
strategy:
fail-fast: false
matrix:
settings:
- host: macos-latest
target: x86_64-apple-darwin
build: |
yarn build
strip -x *.node
- host: windows-latest
build: yarn build
target: x86_64-pc-windows-msvc
- host: windows-latest
build: |
yarn build --target i686-pc-windows-msvc
yarn test
target: i686-pc-windows-msvc
- host: ubuntu-latest
target: x86_64-unknown-linux-gnu
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
build: |-
set -e &&
yarn build --target x86_64-unknown-linux-gnu &&
strip *.node
- host: ubuntu-latest
target: x86_64-unknown-linux-musl
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
build: set -e && yarn build && strip *.node
- host: macos-latest
target: aarch64-apple-darwin
build: |
sudo rm -Rf /Library/Developer/CommandLineTools/SDKs/*;
export CC=$(xcrun -f clang);
export CXX=$(xcrun -f clang++);
SYSROOT=$(xcrun --sdk macosx --show-sdk-path);
export CFLAGS="-isysroot $SYSROOT -isystem $SYSROOT";
yarn build --target aarch64-apple-darwin
strip -x *.node
- host: ubuntu-latest
target: aarch64-unknown-linux-gnu
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
build: |-
set -e &&
yarn build --target aarch64-unknown-linux-gnu &&
aarch64-unknown-linux-gnu-strip *.node
- host: ubuntu-latest
target: armv7-unknown-linux-gnueabihf
setup: |
sudo apt-get update
sudo apt-get install gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf -y
build: |
yarn build --target=armv7-unknown-linux-gnueabihf
arm-linux-gnueabihf-strip *.node
- host: ubuntu-latest
target: aarch64-linux-android
build: |
export CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android24-clang"
export CC="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android24-clang"
export CXX="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android24-clang++"
export AR="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar"
export PATH="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin:${PATH}"
yarn build --target aarch64-linux-android
${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip *.node
- host: ubuntu-latest
target: armv7-linux-androideabi
build: |
export CARGO_TARGET_ARMV7_LINUX_ANDROIDEABI_LINKER="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/armv7a-linux-androideabi24-clang"
export CC="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/armv7a-linux-androideabi24-clang"
export CXX="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/armv7a-linux-androideabi24-clang++"
export AR="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar"
export PATH="${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin:${PATH}"
yarn build --target armv7-linux-androideabi
${ANDROID_NDK_LATEST_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip *.node
- host: ubuntu-latest
target: aarch64-unknown-linux-musl
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
build: |-
set -e &&
rustup target add aarch64-unknown-linux-musl &&
yarn build --target aarch64-unknown-linux-musl &&
/aarch64-linux-musl-cross/bin/aarch64-linux-musl-strip *.node
- host: windows-latest
target: aarch64-pc-windows-msvc
build: yarn build --target aarch64-pc-windows-msvc
name: stable - ${{ matrix.settings.target }} - node@16
runs-on: ${{ matrix.settings.host }}
steps:
- uses: actions/checkout@v3
- name: Setup node
uses: actions/setup-node@v3
if: ${{ !matrix.settings.docker }}
with:
node-version: 16
check-latest: true
cache: yarn
- name: Install
uses: actions-rs/toolchain@v1
if: ${{ !matrix.settings.docker }}
with:
profile: minimal
override: true
toolchain: stable
target: ${{ matrix.settings.target }}
- name: Cache cargo
uses: actions/cache@v3
with:
path: |
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
.cargo-cache
target/
key: ${{ matrix.settings.target }}-cargo-registry
- name: Cache NPM dependencies
uses: actions/cache@v3
with:
path: .yarn/cache
key: npm-cache-build-${{ matrix.settings.target }}-node@16
- name: Setup toolchain
run: ${{ matrix.settings.setup }}
if: ${{ matrix.settings.setup }}
shell: bash
- name: Setup node x86
if: matrix.settings.target == 'i686-pc-windows-msvc'
run: yarn config set supportedArchitectures.cpu "ia32"
shell: bash
- name: Install dependencies
run: yarn install
- name: Setup node x86
uses: actions/setup-node@v3
if: matrix.settings.target == 'i686-pc-windows-msvc'
with:
node-version: 16
check-latest: true
cache: yarn
architecture: x86
- name: Build in docker
uses: addnab/docker-run-action@v3
if: ${{ matrix.settings.docker }}
with:
image: ${{ matrix.settings.docker }}
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
run: ${{ matrix.settings.build }}
- name: Build
run: ${{ matrix.settings.build }}
if: ${{ !matrix.settings.docker }}
shell: bash
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: bindings-${{ matrix.settings.target }}
path: ${{ env.APP_NAME }}.*.node
if-no-files-found: error
build-freebsd:
runs-on: macos-12
name: Build FreeBSD
steps:
- uses: actions/checkout@v3
- name: Build
id: build
uses: vmactions/freebsd-vm@v0.2.3
env:
DEBUG: napi:*
RUSTUP_HOME: /usr/local/rustup
CARGO_HOME: /usr/local/cargo
RUSTUP_IO_THREADS: 1
with:
envs: DEBUG RUSTUP_HOME CARGO_HOME RUSTUP_IO_THREADS
usesh: true
mem: 3000
prepare: |
pkg install -y curl node16 python2
curl -qL https://www.npmjs.com/install.sh | sh
npm install --location=global --ignore-scripts yarn
curl https://sh.rustup.rs -sSf --output rustup.sh
sh rustup.sh -y --profile minimal --default-toolchain stable
export PATH="/usr/local/cargo/bin:$PATH"
echo "~~~~ rustc --version ~~~~"
rustc --version
echo "~~~~ node -v ~~~~"
node -v
echo "~~~~ yarn --version ~~~~"
yarn --version
run: |
export PATH="/usr/local/cargo/bin:$PATH"
pwd
ls -lah
whoami
env
freebsd-version
yarn install
yarn build
strip -x *.node
yarn test
rm -rf node_modules
rm -rf target
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: bindings-freebsd
path: ${{ env.APP_NAME }}.*.node
if-no-files-found: error
test-macOS-windows-binding:
name: Test bindings on ${{ matrix.settings.target }} - node@${{ matrix.node }}
needs:
- build
strategy:
fail-fast: false
matrix:
settings:
- host: windows-latest
target: x86_64-pc-windows-msvc
node:
- '14'
- '16'
- '18'
runs-on: ${{ matrix.settings.host }}
steps:
- uses: actions/checkout@v3
- name: Setup node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
check-latest: true
cache: yarn
- name: Cache NPM dependencies
uses: actions/cache@v3
with:
path: .yarn/cache
key: npm-cache-test-${{ matrix.settings.target }}-${{ matrix.node }}
- name: Install dependencies
run: yarn install
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: bindings-${{ matrix.settings.target }}
path: .
- name: List packages
run: ls -R .
shell: bash
- name: Test bindings
run: yarn test
test-linux-x64-gnu-binding:
name: Test bindings on Linux-x64-gnu - node@${{ matrix.node }}
needs:
- build
strategy:
fail-fast: false
matrix:
node:
- '14'
- '16'
- '18'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
check-latest: true
cache: yarn
- name: Cache NPM dependencies
uses: actions/cache@v3
with:
path: .yarn/cache
key: npm-cache-test-linux-x64-gnu-${{ matrix.node }}
- name: Install dependencies
run: yarn install
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: bindings-x86_64-unknown-linux-gnu
path: .
- name: List packages
run: ls -R .
shell: bash
- name: Test bindings
run: docker run --rm -v $(pwd):/build -w /build node:${{ matrix.node }}-slim yarn test
test-linux-x64-musl-binding:
name: Test bindings on x86_64-unknown-linux-musl - node@${{ matrix.node }}
needs:
- build
strategy:
fail-fast: false
matrix:
node:
- '14'
- '16'
- '18'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node }}
check-latest: true
cache: yarn
- name: Cache NPM dependencies
uses: actions/cache@v3
with:
path: .yarn/cache
key: npm-cache-test-x86_64-unknown-linux-musl-${{ matrix.node }}
- name: Install dependencies
run: |
yarn config set supportedArchitectures.libc "musl"
yarn install
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: bindings-x86_64-unknown-linux-musl
path: .
- name: List packages
run: ls -R .
shell: bash
- name: Test bindings
run: docker run --rm -v $(pwd):/build -w /build node:${{ matrix.node }}-alpine yarn test
test-linux-aarch64-gnu-binding:
name: Test bindings on aarch64-unknown-linux-gnu - node@${{ matrix.node }}
needs:
- build
strategy:
fail-fast: false
matrix:
node:
- '14'
- '16'
- '18'
runs-on: ubuntu-latest
steps:
- run: docker run --rm --privileged multiarch/qemu-user-static:register --reset
- uses: actions/checkout@v3
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: bindings-aarch64-unknown-linux-gnu
path: .
- name: List packages
run: ls -R .
shell: bash
- name: Cache NPM dependencies
uses: actions/cache@v3
with:
path: .yarn/cache
key: npm-cache-test-linux-aarch64-gnu-${{ matrix.node }}
- name: Install dependencies
run: |
yarn config set supportedArchitectures.cpu "arm64"
yarn config set supportedArchitectures.libc "glibc"
yarn install
- name: Setup and run tests
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs:aarch64-${{ matrix.node }}
options: '-v ${{ github.workspace }}:/build -w /build'
run: |
set -e
yarn test
ls -la
test-linux-aarch64-musl-binding:
name: Test bindings on aarch64-unknown-linux-musl - node@${{ matrix.node }}
needs:
- build
runs-on: ubuntu-latest
steps:
- run: docker run --rm --privileged multiarch/qemu-user-static:register --reset
- uses: actions/checkout@v3
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: bindings-aarch64-unknown-linux-musl
path: .
- name: List packages
run: ls -R .
shell: bash
- name: Cache NPM dependencies
uses: actions/cache@v3
with:
path: .yarn/cache
key: npm-cache-test-linux-aarch64-musl-${{ matrix.node }}
- name: Install dependencies
run: |
yarn config set supportedArchitectures.cpu "arm64"
yarn config set supportedArchitectures.libc "musl"
yarn install
- name: Setup and run tests
uses: addnab/docker-run-action@v3
with:
image: multiarch/alpine:aarch64-latest-stable
options: '-v ${{ github.workspace }}:/build -w /build'
run: |
set -e
apk add nodejs npm yarn
yarn test
test-linux-arm-gnueabihf-binding:
name: Test bindings on armv7-unknown-linux-gnueabihf - node@${{ matrix.node }}
needs:
- build
strategy:
fail-fast: false
matrix:
node:
- '14'
- '16'
- '18'
runs-on: ubuntu-latest
steps:
- run: docker run --rm --privileged multiarch/qemu-user-static:register --reset
- uses: actions/checkout@v3
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: bindings-armv7-unknown-linux-gnueabihf
path: .
- name: List packages
run: ls -R .
shell: bash
- name: Cache NPM dependencies
uses: actions/cache@v3
with:
path: .yarn/cache
key: npm-cache-test-linux-arm-gnueabihf-${{ matrix.node }}
- name: Install dependencies
run: |
yarn config set supportedArchitectures.cpu "arm"
yarn install
- name: Setup and run tests
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs:armhf-${{ matrix.node }}
options: '-v ${{ github.workspace }}:/build -w /build'
run: |
set -e
yarn test
ls -la
publish:
name: Publish
runs-on: ubuntu-latest
needs:
- build-freebsd
- test-macOS-windows-binding
- test-linux-x64-gnu-binding
- test-linux-x64-musl-binding
- test-linux-aarch64-gnu-binding
- test-linux-aarch64-musl-binding
- test-linux-arm-gnueabihf-binding
steps:
- uses: actions/checkout@v3
- name: Setup node
uses: actions/setup-node@v3
with:
node-version: 16
check-latest: true
cache: yarn
- name: Cache NPM dependencies
uses: actions/cache@v3
with:
path: .yarn/cache
key: npm-cache-ubuntu-latest-publish
restore-keys: |
npm-cache-
- name: Install dependencies
run: yarn install
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
path: artifacts
- name: Move artifacts
run: yarn artifacts
- name: List packages
run: ls -R ./npm
shell: bash
- name: Publish
run: |
if git log -1 --pretty=%B | grep "^[0-9]\+\.[0-9]\+\.[0-9]\+$";
then
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
npm publish --access public
elif git log -1 --pretty=%B | grep "^[0-9]\+\.[0-9]\+\.[0-9]\+";
then
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
npm publish --tag next --access public
else
echo "Not a release, skipping publish"
fi
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
+4
View File
@@ -131,3 +131,7 @@ dist
Cargo.lock
*.node
crates/napi/index.js
crates/napi/index.d.ts
.yarn
+17 -17
View File
@@ -4,26 +4,26 @@ use ignore::WalkBuilder;
use std::fs::read_to_string;
pub fn find_config(config_path: Option<String>) -> RuleCollection<SupportLang> {
let config_path = config_path.unwrap_or_else(find_default_config);
let config_str = read_to_string(config_path).unwrap();
let sg_config = deserialize_sgconfig(&config_str).unwrap();
let mut configs = vec![];
for dir in sg_config.rule_dirs {
let walker = WalkBuilder::new(&dir).types(config_file_type()).build();
for dir in walker {
let config_file = dir.unwrap();
if !config_file.file_type().unwrap().is_file() {
continue;
}
let path = config_file.path();
let config_path = config_path.unwrap_or_else(find_default_config);
let config_str = read_to_string(config_path).unwrap();
let sg_config = deserialize_sgconfig(&config_str).unwrap();
let mut configs = vec![];
for dir in sg_config.rule_dirs {
let walker = WalkBuilder::new(&dir).types(config_file_type()).build();
for dir in walker {
let config_file = dir.unwrap();
if !config_file.file_type().unwrap().is_file() {
continue;
}
let path = config_file.path();
let yaml = read_to_string(path).unwrap();
configs.extend(from_yaml_string(&yaml).unwrap());
}
let yaml = read_to_string(path).unwrap();
configs.extend(from_yaml_string(&yaml).unwrap());
}
RuleCollection::new(configs)
}
RuleCollection::new(configs)
}
fn find_default_config() -> String {
"sgconfig.yml".to_string()
"sgconfig.yml".to_string()
}
+52 -52
View File
@@ -6,70 +6,70 @@ use std::sync::mpsc;
// https://github.com/console-rs/console/blob/be1c2879536c90ffc2b54938b5964084f5fef67d/src/common_term.rs#L56
/// clear screen
pub fn clear() {
print!("\r\x1b[2J\r\x1b[H");
print!("\r\x1b[2J\r\x1b[H");
}
pub fn prompt(prompt_text: &str, letters: &str, default: Option<char>) -> Result<char> {
loop {
let input = prompt_reply_stdout(prompt_text)?;
if let Some(default) = default && input.is_empty() {
loop {
let input = prompt_reply_stdout(prompt_text)?;
if let Some(default) = default && input.is_empty() {
return Ok(default);
}
if input.len() == 1 && letters.contains(&input) {
return Ok(input.chars().next().unwrap());
}
println!("Come again?")
if input.len() == 1 && letters.contains(&input) {
return Ok(input.chars().next().unwrap());
}
println!("Come again?")
}
}
pub fn run_walker(walker: WalkParallel, f: impl Fn(DirEntry) -> WalkState + Sync) {
walker.run(|| {
Box::new(|result| match result {
Ok(entry) => f(entry),
Err(err) => {
eprintln!("ERROR: {}", err);
WalkState::Continue
}
})
});
walker.run(|| {
Box::new(|result| match result {
Ok(entry) => f(entry),
Err(err) => {
eprintln!("ERROR: {}", err);
WalkState::Continue
}
})
});
}
pub fn run_walker_interactive<T: Send>(
walker: WalkParallel,
producer: impl Fn(DirEntry) -> Option<T> + Sync,
consumer: impl Fn(T) + Send,
walker: WalkParallel,
producer: impl Fn(DirEntry) -> Option<T> + Sync,
consumer: impl Fn(T) + Send,
) {
let (tx, rx) = mpsc::channel();
let producer = &producer;
crossbeam::scope(|s| {
s.spawn(move |_| {
walker.run(|| {
let tx = tx.clone();
Box::new(move |result| {
let entry = match result {
Ok(entry) => entry,
Err(err) => {
eprintln!("ERROR: {}", err);
return WalkState::Continue;
}
};
let result = match producer(entry) {
Some(ret) => ret,
None => return WalkState::Continue,
};
match tx.send(result) {
Ok(_) => WalkState::Continue,
Err(_) => WalkState::Quit,
}
})
})
});
s.spawn(move |_| {
while let Ok(ret) = rx.recv() {
clear();
consumer(ret);
let (tx, rx) = mpsc::channel();
let producer = &producer;
crossbeam::scope(|s| {
s.spawn(move |_| {
walker.run(|| {
let tx = tx.clone();
Box::new(move |result| {
let entry = match result {
Ok(entry) => entry,
Err(err) => {
eprintln!("ERROR: {}", err);
return WalkState::Continue;
}
});
})
.expect("Error occurred during spawning threads");
};
let result = match producer(entry) {
Some(ret) => ret,
None => return WalkState::Continue,
};
match tx.send(result) {
Ok(_) => WalkState::Continue,
Err(_) => WalkState::Quit,
}
})
})
});
s.spawn(move |_| {
while let Ok(ret) = rx.recv() {
clear();
consumer(ret);
}
});
})
.expect("Error occurred during spawning threads");
}
+155 -155
View File
@@ -16,15 +16,15 @@ use tree_sitter_typescript::{language_tsx, language_typescript};
pub use rust::Rust;
macro_rules! impl_lang {
($lang: ident, $func: ident) => {
#[derive(Clone, Copy)]
pub struct $lang;
impl Language for $lang {
fn get_ts_language(&self) -> TSLanguage {
$func().into()
}
}
};
($lang: ident, $func: ident) => {
#[derive(Clone, Copy)]
pub struct $lang;
impl Language for $lang {
fn get_ts_language(&self) -> TSLanguage {
$func().into()
}
}
};
}
impl_lang!(C, language_c);
@@ -47,191 +47,191 @@ use std::str::FromStr;
/// represents a dynamic language
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum SupportLang {
C,
Go,
Html,
JavaScript,
Kotlin,
Lua,
Python,
Rust,
Swift,
Tsx,
TypeScript,
C,
Go,
Html,
JavaScript,
Kotlin,
Lua,
Python,
Rust,
Swift,
Tsx,
TypeScript,
}
#[derive(Debug)]
pub enum SupportLangErr {
LanguageNotSupported(String),
LanguageNotSupported(String),
}
impl Display for SupportLangErr {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
use SupportLangErr::*;
match self {
LanguageNotSupported(lang) => write!(f, "{} is not supported!", lang),
}
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
use SupportLangErr::*;
match self {
LanguageNotSupported(lang) => write!(f, "{} is not supported!", lang),
}
}
}
impl std::error::Error for SupportLangErr {}
impl FromStr for SupportLang {
type Err = SupportLangErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use SupportLang::*;
match s {
"c" => Ok(C),
"go" | "golang" => Ok(Go),
"html" => Ok(Html),
"js" | "jsx" => Ok(JavaScript),
"kt" | "ktm" | "kts" => Ok(Kotlin),
"lua" => Ok(Lua),
"py" | "python" => Ok(Python),
"rs" | "rust" => Ok(Rust),
"swift" => Ok(Swift),
"ts" => Ok(TypeScript),
"tsx" => Ok(Tsx),
_ => Err(SupportLangErr::LanguageNotSupported(s.to_string())),
}
type Err = SupportLangErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use SupportLang::*;
match s {
"c" => Ok(C),
"go" | "golang" => Ok(Go),
"html" => Ok(Html),
"js" | "jsx" => Ok(JavaScript),
"kt" | "ktm" | "kts" => Ok(Kotlin),
"lua" => Ok(Lua),
"py" | "python" => Ok(Python),
"rs" | "rust" => Ok(Rust),
"swift" => Ok(Swift),
"ts" => Ok(TypeScript),
"tsx" => Ok(Tsx),
_ => Err(SupportLangErr::LanguageNotSupported(s.to_string())),
}
}
}
macro_rules! impl_lang_method {
($method: ident, $return_type: ty) => {
#[inline]
fn $method(&self) -> $return_type {
use SupportLang as S;
match self {
S::C => C.$method(),
S::Go => Go.$method(),
S::Html => Html.$method(),
S::JavaScript => JavaScript.$method(),
S::Kotlin => Kotlin.$method(),
S::Lua => Lua.$method(),
S::Python => Python.$method(),
S::Rust => Rust.$method(),
S::Swift => Swift.$method(),
S::Tsx => Tsx.$method(),
S::TypeScript => TypeScript.$method(),
}
}
};
($method: ident, $return_type: ty) => {
#[inline]
fn $method(&self) -> $return_type {
use SupportLang as S;
match self {
S::C => C.$method(),
S::Go => Go.$method(),
S::Html => Html.$method(),
S::JavaScript => JavaScript.$method(),
S::Kotlin => Kotlin.$method(),
S::Lua => Lua.$method(),
S::Python => Python.$method(),
S::Rust => Rust.$method(),
S::Swift => Swift.$method(),
S::Tsx => Tsx.$method(),
S::TypeScript => TypeScript.$method(),
}
}
};
}
// TODO: optimize this using macro
impl Language for SupportLang {
fn from_path<P: AsRef<Path>>(path: P) -> Option<Self> {
from_extension(path.as_ref())
}
fn from_path<P: AsRef<Path>>(path: P) -> Option<Self> {
from_extension(path.as_ref())
}
impl_lang_method!(get_ts_language, TSLanguage);
impl_lang_method!(meta_var_char, char);
impl_lang_method!(expando_char, char);
impl_lang_method!(get_ts_language, TSLanguage);
impl_lang_method!(meta_var_char, char);
impl_lang_method!(expando_char, char);
fn extract_meta_var(&self, source: &str) -> Option<MetaVariable> {
use SupportLang as S;
match self {
S::C => C.extract_meta_var(source),
S::Go => Go.extract_meta_var(source),
S::Html => Html.extract_meta_var(source),
S::JavaScript => JavaScript.extract_meta_var(source),
S::Kotlin => Kotlin.extract_meta_var(source),
S::Lua => Lua.extract_meta_var(source),
S::Python => Python.extract_meta_var(source),
S::Rust => Rust.extract_meta_var(source),
S::Swift => Swift.extract_meta_var(source),
S::Tsx => Tsx.extract_meta_var(source),
S::TypeScript => TypeScript.extract_meta_var(source),
}
fn extract_meta_var(&self, source: &str) -> Option<MetaVariable> {
use SupportLang as S;
match self {
S::C => C.extract_meta_var(source),
S::Go => Go.extract_meta_var(source),
S::Html => Html.extract_meta_var(source),
S::JavaScript => JavaScript.extract_meta_var(source),
S::Kotlin => Kotlin.extract_meta_var(source),
S::Lua => Lua.extract_meta_var(source),
S::Python => Python.extract_meta_var(source),
S::Rust => Rust.extract_meta_var(source),
S::Swift => Swift.extract_meta_var(source),
S::Tsx => Tsx.extract_meta_var(source),
S::TypeScript => TypeScript.extract_meta_var(source),
}
}
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
use SupportLang as S;
match self {
S::C => C.pre_process_pattern(query),
S::Go => Go.pre_process_pattern(query),
S::Html => Html.pre_process_pattern(query),
S::JavaScript => JavaScript.pre_process_pattern(query),
S::Kotlin => Kotlin.pre_process_pattern(query),
S::Lua => Lua.pre_process_pattern(query),
S::Python => Python.pre_process_pattern(query),
S::Rust => Rust.pre_process_pattern(query),
S::Swift => Swift.pre_process_pattern(query),
S::Tsx => Tsx.pre_process_pattern(query),
S::TypeScript => TypeScript.pre_process_pattern(query),
}
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
use SupportLang as S;
match self {
S::C => C.pre_process_pattern(query),
S::Go => Go.pre_process_pattern(query),
S::Html => Html.pre_process_pattern(query),
S::JavaScript => JavaScript.pre_process_pattern(query),
S::Kotlin => Kotlin.pre_process_pattern(query),
S::Lua => Lua.pre_process_pattern(query),
S::Python => Python.pre_process_pattern(query),
S::Rust => Rust.pre_process_pattern(query),
S::Swift => Swift.pre_process_pattern(query),
S::Tsx => Tsx.pre_process_pattern(query),
S::TypeScript => TypeScript.pre_process_pattern(query),
}
}
}
/// Guess which programming language a file is written in
/// Adapt from https://github.com/Wilfred/difftastic/blob/master/src/parse/guess_language.rs
pub fn from_extension(path: &Path) -> Option<SupportLang> {
use SupportLang::*;
match path.extension()?.to_str()? {
"c" | "h" => Some(C),
"go" => Some(Go),
"html" | "htm" | "xhtml" => Some(Html),
"cjs" | "js" | "mjs" | "jsx" => Some(JavaScript),
"kt" | "ktm" | "kts" => Some(Kotlin),
"lua" => Some(Lua),
"py" | "py3" | "pyi" | "bzl" => Some(Python),
"rs" => Some(Rust),
"swift" => Some(Swift),
"ts" => Some(TypeScript),
"tsx" => Some(Tsx),
_ => None,
}
use SupportLang::*;
match path.extension()?.to_str()? {
"c" | "h" => Some(C),
"go" => Some(Go),
"html" | "htm" | "xhtml" => Some(Html),
"cjs" | "js" | "mjs" | "jsx" => Some(JavaScript),
"kt" | "ktm" | "kts" => Some(Kotlin),
"lua" => Some(Lua),
"py" | "py3" | "pyi" | "bzl" => Some(Python),
"rs" => Some(Rust),
"swift" => Some(Swift),
"ts" => Some(TypeScript),
"tsx" => Some(Tsx),
_ => None,
}
}
pub fn file_types(lang: &SupportLang) -> Types {
use SupportLang as L;
let mut builder = TypesBuilder::new();
builder.add_defaults();
let builder = match lang {
L::C => builder.select("c"),
L::Go => builder.select("go"),
L::Html => builder.select("html"),
L::JavaScript => {
builder.add("myjs", "*.js").unwrap();
builder.add("myjs", "*.cjs").unwrap();
builder.add("myjs", "*.jsx").unwrap();
builder.add("myjs", "*.mjs").unwrap();
builder.select("myjs")
}
L::Kotlin => builder.select("kotlin"),
L::Lua => builder.select("lua"),
L::Python => builder.select("py"),
L::Rust => builder.select("rust"),
L::Swift => builder.select("swift"),
L::Tsx => {
builder.add("mytsx", "*.tsx").unwrap();
builder.select("mytsx")
}
L::TypeScript => {
builder.add("myts", "*.ts").unwrap();
builder.add("myts", "*.cts").unwrap();
builder.add("myts", "*.mts").unwrap();
builder.select("myts")
}
};
builder.build().unwrap()
use SupportLang as L;
let mut builder = TypesBuilder::new();
builder.add_defaults();
let builder = match lang {
L::C => builder.select("c"),
L::Go => builder.select("go"),
L::Html => builder.select("html"),
L::JavaScript => {
builder.add("myjs", "*.js").unwrap();
builder.add("myjs", "*.cjs").unwrap();
builder.add("myjs", "*.jsx").unwrap();
builder.add("myjs", "*.mjs").unwrap();
builder.select("myjs")
}
L::Kotlin => builder.select("kotlin"),
L::Lua => builder.select("lua"),
L::Python => builder.select("py"),
L::Rust => builder.select("rust"),
L::Swift => builder.select("swift"),
L::Tsx => {
builder.add("mytsx", "*.tsx").unwrap();
builder.select("mytsx")
}
L::TypeScript => {
builder.add("myts", "*.ts").unwrap();
builder.add("myts", "*.cts").unwrap();
builder.add("myts", "*.mts").unwrap();
builder.select("myts")
}
};
builder.build().unwrap()
}
pub fn config_file_type() -> Types {
let mut builder = TypesBuilder::new();
builder.add("yml", "*.yml").unwrap();
builder.add("yml", "*.yaml").unwrap();
builder.select("yml");
builder.build().unwrap()
let mut builder = TypesBuilder::new();
builder.add("yml", "*.yml").unwrap();
builder.add("yml", "*.yaml").unwrap();
builder.select("yml");
builder.build().unwrap()
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_guess_by_extension() {
let path = Path::new("foo.rs");
assert_eq!(from_extension(path), Some(SupportLang::Rust));
}
use super::*;
#[test]
fn test_guess_by_extension() {
let path = Path::new("foo.rs");
assert_eq!(from_extension(path), Some(SupportLang::Rust));
}
}
+54 -54
View File
@@ -6,51 +6,51 @@ use tree_sitter_rust::language as language_rust;
#[derive(Clone, Copy)]
pub struct Rust;
impl Language for Rust {
fn get_ts_language(&self) -> TSLanguage {
language_rust().into()
}
// we can use any char in unicode range [:XID_Start:]
// https://doc.rust-lang.org/reference/identifiers.html
fn expando_char(&self) -> char {
'µ'
}
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
// use stack buffer to reduce allocation
let mut buf = [0; 4];
let expando = self.expando_char().encode_utf8(&mut buf);
// TODO: use more precise replacement
let replaced = query.replace(self.meta_var_char(), expando);
Cow::Owned(replaced)
}
fn get_ts_language(&self) -> TSLanguage {
language_rust().into()
}
// we can use any char in unicode range [:XID_Start:]
// https://doc.rust-lang.org/reference/identifiers.html
fn expando_char(&self) -> char {
'µ'
}
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
// use stack buffer to reduce allocation
let mut buf = [0; 4];
let expando = self.expando_char().encode_utf8(&mut buf);
// TODO: use more precise replacement
let replaced = query.replace(self.meta_var_char(), expando);
Cow::Owned(replaced)
}
}
#[cfg(test)]
mod test {
use super::*;
use ast_grep_core::{Matcher, Pattern};
use super::*;
use ast_grep_core::{Matcher, Pattern};
fn test_match(s1: &str, s2: &str) {
let pattern = Pattern::new(s1, Rust);
let cand = Rust.ast_grep(s2);
assert!(
pattern.find_node(cand.root()).is_some(),
"goal: {}, candidate: {}",
pattern.root.root().to_sexp(),
cand.root().to_sexp(),
);
}
fn test_match(s1: &str, s2: &str) {
let pattern = Pattern::new(s1, Rust);
let cand = Rust.ast_grep(s2);
assert!(
pattern.find_node(cand.root()).is_some(),
"goal: {}, candidate: {}",
pattern.root.root().to_sexp(),
cand.root().to_sexp(),
);
}
#[test]
fn test_rust_pattern() {
// fix #6
test_match("Some($A)", "fn test() { Some(123) }");
test_match(
"
#[test]
fn test_rust_pattern() {
// fix #6
test_match("Some($A)", "fn test() { Some(123) }");
test_match(
"
match $A {
Some($B) => $B,
None => $C,
}",
r#"fn test() {
r#"fn test() {
patterns = match config.include.clone() {
Some(patterns) => patterns,
None => Vec::from([cwd
@@ -60,32 +60,32 @@ patterns = match config.include.clone() {
.into_owned()]),
};
}"#,
);
}
);
}
fn test_replace(src: &str, pattern: &str, replacer: &str) -> String {
let mut source = Rust.ast_grep(src);
let replacer = Pattern::new(replacer, Rust);
assert!(source.replace(pattern, replacer));
source.generate()
}
fn test_replace(src: &str, pattern: &str, replacer: &str) -> String {
let mut source = Rust.ast_grep(src);
let replacer = Pattern::new(replacer, Rust);
assert!(source.replace(pattern, replacer));
source.generate()
}
#[test]
fn test_rust_replace() {
let ret = test_replace("fn test() { Some(123) }", "Some($A)", "Ok($A)");
assert_eq!(ret, "fn test() { Ok(123) }");
let ret = test_replace(
r#"
#[test]
fn test_rust_replace() {
let ret = test_replace("fn test() { Some(123) }", "Some($A)", "Ok($A)");
assert_eq!(ret, "fn test() { Ok(123) }");
let ret = test_replace(
r#"
patterns = match config.include.clone() {
Some(patterns) => patterns,
None => 123,
}"#,
"match $A {
"match $A {
Some($B) => $B,
None => $C,
}",
"$A.unwrap_or($C)",
);
assert_eq!(ret, "\npatterns = config.include.clone().unwrap_or(123)")
}
"$A.unwrap_or($C)",
);
assert_eq!(ret, "\npatterns = config.include.clone().unwrap_or(123)")
}
}
+14 -14
View File
@@ -3,23 +3,23 @@ use ast_grep_lsp::{Backend, LspService, Server};
use std::io::Result;
async fn run_language_server_impl() {
// env_logger::init();
// env_logger::init();
let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout();
let configs = find_config(None);
let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout();
let configs = find_config(None);
let (service, socket) = LspService::build(|client| Backend::new(client, configs)).finish();
Server::new(stdin, stdout, socket).serve(service).await;
let (service, socket) = LspService::build(|client| Backend::new(client, configs)).finish();
Server::new(stdin, stdout, socket).serve(service).await;
}
pub fn run_language_server() -> Result<()> {
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap()
.block_on(async {
run_language_server_impl().await;
});
Ok(())
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap()
.block_on(async {
run_language_server_impl().await;
});
Ok(())
}
+38 -38
View File
@@ -21,56 +21,56 @@ use languages::SupportLang;
* sg -p ""
*/
pub struct Args {
#[clap(subcommand)]
command: Option<Commands>,
#[clap(subcommand)]
command: Option<Commands>,
/// AST pattern to match
#[clap(short, long, requires = "lang")]
pattern: Option<String>,
/// AST pattern to match
#[clap(short, long, requires = "lang")]
pattern: Option<String>,
/// String to replace the matched AST node
#[clap(short, long)]
rewrite: Option<String>,
/// String to replace the matched AST node
#[clap(short, long)]
rewrite: Option<String>,
/// Print query pattern's tree-sitter AST
#[clap(long, parse(from_flag))]
debug_query: bool,
/// Print query pattern's tree-sitter AST
#[clap(long, parse(from_flag))]
debug_query: bool,
/// The language of the pattern query
#[clap(short, long)]
lang: Option<SupportLang>,
/// The language of the pattern query
#[clap(short, long)]
lang: Option<SupportLang>,
#[clap(short, long, parse(from_flag))]
interactive: bool,
#[clap(short, long, parse(from_flag))]
interactive: bool,
/// The path whose descendent files are to be explored.
#[clap(value_parser, default_value = ".")]
path: String,
/// The path whose descendent files are to be explored.
#[clap(value_parser, default_value = ".")]
path: String,
/// Include hidden files in search
#[clap(short, long, parse(from_flag))]
hidden: bool,
/// Include hidden files in search
#[clap(short, long, parse(from_flag))]
hidden: bool,
}
#[derive(Subcommand)]
enum Commands {
/// Scan and rewrite code
Scan(ScanArg),
/// test ast-grep rule
Test(TestArg),
/// starts language server
Lsp,
/// Scan and rewrite code
Scan(ScanArg),
/// test ast-grep rule
Test(TestArg),
/// starts language server
Lsp,
}
fn main() -> Result<()> {
let mut args = Args::parse();
let command = args.command.take();
if command.is_none() {
return run_with_pattern(args);
}
match command.unwrap() {
Commands::Scan(arg) => run_with_config(arg),
Commands::Test(arg) => run_test_rule(arg),
Commands::Lsp => lsp::run_language_server(),
}
let mut args = Args::parse();
let command = args.command.take();
if command.is_none() {
return run_with_pattern(args);
}
match command.unwrap() {
Commands::Scan(arg) => run_with_config(arg),
Commands::Test(arg) => run_test_rule(arg),
Commands::Lsp => lsp::run_language_server(),
}
}
+151 -151
View File
@@ -17,8 +17,8 @@ pub use codespan_reporting::{files::SimpleFile, term::ColorArg};
use crate::languages::SupportLang;
pub struct ErrorReporter {
writer: StandardStream,
config: term::Config,
writer: StandardStream,
config: term::Config,
}
arg_enum! {
@@ -31,172 +31,172 @@ arg_enum! {
}
impl ErrorReporter {
pub fn new(color: ColorChoice, style: ReportStyle) -> Self {
let display_style = match style {
ReportStyle::Rich => DisplayStyle::Rich,
ReportStyle::Medium => DisplayStyle::Medium,
ReportStyle::Short => DisplayStyle::Short,
};
Self {
writer: StandardStream::stdout(color),
config: term::Config {
display_style,
..Default::default()
},
}
pub fn new(color: ColorChoice, style: ReportStyle) -> Self {
let display_style = match style {
ReportStyle::Rich => DisplayStyle::Rich,
ReportStyle::Medium => DisplayStyle::Medium,
ReportStyle::Short => DisplayStyle::Short,
};
Self {
writer: StandardStream::stdout(color),
config: term::Config {
display_style,
..Default::default()
},
}
}
pub fn print_rule<'a>(
&self,
matches: impl Iterator<Item = NodeMatch<'a, SupportLang>>,
file: SimpleFile<Cow<str>, &String>,
rule: &RuleConfig<SupportLang>,
) {
let config = &self.config;
let writer = &self.writer;
let serverity = match rule.severity {
Severity::Error => diagnostic::Severity::Error,
Severity::Warning => diagnostic::Severity::Warning,
Severity::Info => diagnostic::Severity::Note,
Severity::Hint => diagnostic::Severity::Help,
};
let lock = &mut writer.lock();
for m in matches {
let range = m.range();
let mut labels = vec![Label::primary((), range)];
if let Some(secondary_nodes) = m.get_env().get_labels("secondary") {
labels.extend(secondary_nodes.iter().map(|n| {
let range = n.range();
Label::secondary((), range)
}));
}
let diagnostic = Diagnostic::new(serverity)
.with_code(&rule.id)
.with_message(&rule.message)
.with_notes(rule.note.iter().cloned().collect())
.with_labels(labels);
term::emit(lock, config, &file, &diagnostic).unwrap();
}
pub fn print_rule<'a>(
&self,
matches: impl Iterator<Item = NodeMatch<'a, SupportLang>>,
file: SimpleFile<Cow<str>, &String>,
rule: &RuleConfig<SupportLang>,
) {
let config = &self.config;
let writer = &self.writer;
let serverity = match rule.severity {
Severity::Error => diagnostic::Severity::Error,
Severity::Warning => diagnostic::Severity::Warning,
Severity::Info => diagnostic::Severity::Note,
Severity::Hint => diagnostic::Severity::Help,
};
let lock = &mut writer.lock();
for m in matches {
let range = m.range();
let mut labels = vec![Label::primary((), range)];
if let Some(secondary_nodes) = m.get_env().get_labels("secondary") {
labels.extend(secondary_nodes.iter().map(|n| {
let range = n.range();
Label::secondary((), range)
}));
}
let diagnostic = Diagnostic::new(serverity)
.with_code(&rule.id)
.with_message(&rule.message)
.with_notes(rule.note.iter().cloned().collect())
.with_labels(labels);
term::emit(lock, config, &file, &diagnostic).unwrap();
}
}
}
pub fn print_matches<'a>(
matches: impl Iterator<Item = NodeMatch<'a, SupportLang>>,
path: &Path,
pattern: &impl Matcher<SupportLang>,
rewrite: &Option<Pattern<SupportLang>>,
matches: impl Iterator<Item = NodeMatch<'a, SupportLang>>,
path: &Path,
pattern: &impl Matcher<SupportLang>,
rewrite: &Option<Pattern<SupportLang>>,
) {
let lock = std::io::stdout().lock(); // lock stdout to avoid interleaving output
println!(
"{}",
Color::Cyan.italic().paint(format!("{}", path.display()))
);
if let Some(rewrite) = rewrite {
// TODO: actual matching happened in stdout lock, optimize it out
for e in matches {
let display = e.display_context(3);
let old_str = format!(
"{}{}{}\n",
display.leading, display.matched, display.trailing
);
let new_str = format!(
"{}{}{}\n",
display.leading,
e.replace(pattern, rewrite).unwrap().inserted_text,
display.trailing
);
let base_line = display.start_line;
print_diff(&old_str, &new_str, base_line);
}
} else {
for e in matches {
let display = e.display_context(0);
let leading = display.leading;
let trailing = display.trailing;
let matched = display.matched;
let highlighted = format!("{leading}{matched}{trailing}");
let lines = highlighted.lines().count();
let mut num = display.start_line;
let width = (lines + display.start_line).to_string().chars().count();
print!("{num:>width$}|"); // initial line num
print_highlight(leading.lines(), Style::new().dimmed(), width, &mut num);
print_highlight(matched.lines(), Style::new().bold(), width, &mut num);
print_highlight(trailing.lines(), Style::new().dimmed(), width, &mut num);
println!(); // end match new line
}
let lock = std::io::stdout().lock(); // lock stdout to avoid interleaving output
println!(
"{}",
Color::Cyan.italic().paint(format!("{}", path.display()))
);
if let Some(rewrite) = rewrite {
// TODO: actual matching happened in stdout lock, optimize it out
for e in matches {
let display = e.display_context(3);
let old_str = format!(
"{}{}{}\n",
display.leading, display.matched, display.trailing
);
let new_str = format!(
"{}{}{}\n",
display.leading,
e.replace(pattern, rewrite).unwrap().inserted_text,
display.trailing
);
let base_line = display.start_line;
print_diff(&old_str, &new_str, base_line);
}
drop(lock);
} else {
for e in matches {
let display = e.display_context(0);
let leading = display.leading;
let trailing = display.trailing;
let matched = display.matched;
let highlighted = format!("{leading}{matched}{trailing}");
let lines = highlighted.lines().count();
let mut num = display.start_line;
let width = (lines + display.start_line).to_string().chars().count();
print!("{num:>width$}|"); // initial line num
print_highlight(leading.lines(), Style::new().dimmed(), width, &mut num);
print_highlight(matched.lines(), Style::new().bold(), width, &mut num);
print_highlight(trailing.lines(), Style::new().dimmed(), width, &mut num);
println!(); // end match new line
}
}
drop(lock);
}
fn print_highlight<'a>(
mut lines: impl Iterator<Item = &'a str>,
style: Style,
width: usize,
num: &mut usize,
mut lines: impl Iterator<Item = &'a str>,
style: Style,
width: usize,
num: &mut usize,
) {
if let Some(line) = lines.next() {
let line = style.paint(line);
print!("{line}");
}
for line in lines {
println!();
*num += 1;
let line = style.paint(line);
print!("{num:>width$}|{line}");
}
if let Some(line) = lines.next() {
let line = style.paint(line);
print!("{line}");
}
for line in lines {
println!();
*num += 1;
let line = style.paint(line);
print!("{num:>width$}|{line}");
}
}
fn index_display(index: Option<usize>, style: Style, width: usize) -> impl Display {
let index_str = match index {
None => format!("{:width$}", ""),
Some(idx) => format!("{:<width$}", idx),
};
style.paint(index_str)
let index_str = match index {
None => format!("{:width$}", ""),
Some(idx) => format!("{:<width$}", idx),
};
style.paint(index_str)
}
fn print_diff(old: &str, new: &str, base_line: usize) {
static THISTLE1: Color = Color::Fixed(225);
static SEA_GREEN: Color = Color::Fixed(158);
static RED: Color = Color::Fixed(161);
static GREEN: Color = Color::Fixed(35);
let diff = TextDiff::from_lines(old, new);
let width = base_line.to_string().chars().count();
for (idx, group) in diff.grouped_ops(3).iter().enumerate() {
if idx > 0 {
println!("{:-^1$}", "-", 80);
}
for op in group {
for change in diff.iter_inline_changes(op) {
let (sign, s, bg) = match change.tag() {
ChangeTag::Delete => (
"-",
Style::new().fg(RED).on(THISTLE1),
Style::new().on(THISTLE1),
),
ChangeTag::Insert => (
"+",
Style::new().fg(GREEN).on(SEA_GREEN),
Style::new().on(SEA_GREEN),
),
ChangeTag::Equal => (" ", Style::new().dimmed(), Style::new()),
};
print!(
"{}{}|{}",
index_display(change.old_index().map(|i| i + base_line), s, width + 1),
index_display(change.new_index().map(|i| i + base_line), s, width),
s.paint(sign),
);
for (emphasized, value) in change.iter_strings_lossy() {
if emphasized {
print!("{}", s.bold().paint(value));
} else {
print!("{}", bg.paint(value));
}
}
if change.missing_newline() {
println!();
}
}
}
static THISTLE1: Color = Color::Fixed(225);
static SEA_GREEN: Color = Color::Fixed(158);
static RED: Color = Color::Fixed(161);
static GREEN: Color = Color::Fixed(35);
let diff = TextDiff::from_lines(old, new);
let width = base_line.to_string().chars().count();
for (idx, group) in diff.grouped_ops(3).iter().enumerate() {
if idx > 0 {
println!("{:-^1$}", "-", 80);
}
for op in group {
for change in diff.iter_inline_changes(op) {
let (sign, s, bg) = match change.tag() {
ChangeTag::Delete => (
"-",
Style::new().fg(RED).on(THISTLE1),
Style::new().on(THISTLE1),
),
ChangeTag::Insert => (
"+",
Style::new().fg(GREEN).on(SEA_GREEN),
Style::new().on(SEA_GREEN),
),
ChangeTag::Equal => (" ", Style::new().dimmed(), Style::new()),
};
print!(
"{}{}|{}",
index_display(change.old_index().map(|i| i + base_line), s, width + 1),
index_display(change.new_index().map(|i| i + base_line), s, width),
s.paint(sign),
);
for (emphasized, value) in change.iter_strings_lossy() {
if emphasized {
print!("{}", s.bold().paint(value));
} else {
print!("{}", bg.paint(value));
}
}
if change.missing_newline() {
println!();
}
}
}
}
}
+188 -188
View File
@@ -15,241 +15,241 @@ use crate::{interaction, Args as PatternArg};
#[derive(Args)]
pub struct ScanArg {
/// Path to ast-grep config, either YAML or folder of YAMLs
#[clap(short, long)]
config: Option<String>,
/// Path to ast-grep config, either YAML or folder of YAMLs
#[clap(short, long)]
config: Option<String>,
/// Include hidden files in search
#[clap(short, long, parse(from_flag))]
hidden: bool,
/// Include hidden files in search
#[clap(short, long, parse(from_flag))]
hidden: bool,
#[clap(short, long, parse(from_flag))]
interactive: bool,
#[clap(short, long, parse(from_flag))]
interactive: bool,
#[clap(long, default_value = "auto")]
color: ColorArg,
#[clap(long, default_value = "auto")]
color: ColorArg,
#[clap(long, default_value = "rich")]
report_style: ReportStyle,
#[clap(long, default_value = "rich")]
report_style: ReportStyle,
/// The path whose descendent files are to be explored.
#[clap(value_parser, default_value = ".")]
path: String,
/// The path whose descendent files are to be explored.
#[clap(value_parser, default_value = ".")]
path: String,
}
// Every run will include Search or Replace
// Search or Replace by arguments `pattern` and `rewrite` passed from CLI
pub fn run_with_pattern(args: PatternArg) -> Result<()> {
let pattern = args.pattern.unwrap();
let threads = num_cpus::get().min(12);
let lang = args.lang.unwrap();
let pattern = Pattern::new(&pattern, lang);
if args.debug_query {
println!("Pattern TreeSitter {:?}", pattern);
}
let walker = WalkBuilder::new(&args.path)
.hidden(args.hidden)
.threads(threads)
.types(file_types(&lang))
.build_parallel();
let rewrite = args.rewrite.map(|s| Pattern::new(s.as_ref(), lang));
if !args.interactive {
run_walker(walker, |path| {
match_one_file(path, lang, &pattern, &rewrite)
});
return Ok(());
}
run_walker_interactive(
walker,
|path| filter_file_interactive(path, lang, &pattern),
|(grep, path)| run_one_interaction(&path, &grep, &pattern, &rewrite),
);
Ok(())
let pattern = args.pattern.unwrap();
let threads = num_cpus::get().min(12);
let lang = args.lang.unwrap();
let pattern = Pattern::new(&pattern, lang);
if args.debug_query {
println!("Pattern TreeSitter {:?}", pattern);
}
let walker = WalkBuilder::new(&args.path)
.hidden(args.hidden)
.threads(threads)
.types(file_types(&lang))
.build_parallel();
let rewrite = args.rewrite.map(|s| Pattern::new(s.as_ref(), lang));
if !args.interactive {
run_walker(walker, |path| {
match_one_file(path, lang, &pattern, &rewrite)
});
return Ok(());
}
run_walker_interactive(
walker,
|path| filter_file_interactive(path, lang, &pattern),
|(grep, path)| run_one_interaction(&path, &grep, &pattern, &rewrite),
);
Ok(())
}
fn get_rules<'c>(
path: &Path,
configs: &'c RuleCollection<SupportLang>,
path: &Path,
configs: &'c RuleCollection<SupportLang>,
) -> Vec<&'c RuleConfig<SupportLang>> {
let lang = match SupportLang::from_path(path) {
Some(lang) => lang,
None => return vec![],
};
configs.get_rules_for_lang(&lang)
let lang = match SupportLang::from_path(path) {
Some(lang) => lang,
None => return vec![],
};
configs.get_rules_for_lang(&lang)
}
pub fn run_with_config(args: ScanArg) -> Result<()> {
let configs = find_config(args.config);
let threads = num_cpus::get().min(12);
let walker = WalkBuilder::new(&args.path)
.hidden(args.hidden)
.threads(threads)
.build_parallel();
let reporter = ErrorReporter::new(args.color.into(), args.report_style);
if !args.interactive {
run_walker(walker, |path| {
for config in get_rules(path, &configs) {
let lang = config.language;
if from_extension(path).filter(|&n| n == lang).is_none() {
continue;
}
match_rule_on_file(path, lang, config, &reporter)
}
});
} else {
run_walker_interactive(
walker,
|path| {
for config in get_rules(path, &configs) {
let lang = config.language;
let matcher = config.get_matcher();
if from_extension(path).filter(|&n| n == lang).is_none() {
continue;
}
let ret = filter_file_interactive(path, lang, &matcher);
if ret.is_some() {
return ret;
}
}
None
},
|(grep, path)| {
for config in get_rules(&path, &configs) {
if from_extension(&path)
.filter(|&n| n == config.language)
.is_none()
{
continue;
}
let matcher = config.get_matcher();
let fixer = config.get_fixer();
run_one_interaction(&path, &grep, matcher, &fixer);
}
},
);
}
Ok(())
let configs = find_config(args.config);
let threads = num_cpus::get().min(12);
let walker = WalkBuilder::new(&args.path)
.hidden(args.hidden)
.threads(threads)
.build_parallel();
let reporter = ErrorReporter::new(args.color.into(), args.report_style);
if !args.interactive {
run_walker(walker, |path| {
for config in get_rules(path, &configs) {
let lang = config.language;
if from_extension(path).filter(|&n| n == lang).is_none() {
continue;
}
match_rule_on_file(path, lang, config, &reporter)
}
});
} else {
run_walker_interactive(
walker,
|path| {
for config in get_rules(path, &configs) {
let lang = config.language;
let matcher = config.get_matcher();
if from_extension(path).filter(|&n| n == lang).is_none() {
continue;
}
let ret = filter_file_interactive(path, lang, &matcher);
if ret.is_some() {
return ret;
}
}
None
},
|(grep, path)| {
for config in get_rules(&path, &configs) {
if from_extension(&path)
.filter(|&n| n == config.language)
.is_none()
{
continue;
}
let matcher = config.get_matcher();
let fixer = config.get_fixer();
run_one_interaction(&path, &grep, matcher, &fixer);
}
},
);
}
Ok(())
}
fn run_one_interaction<M: Matcher<SupportLang>>(
path: &PathBuf,
grep: &AstGrep<SupportLang>,
matcher: M,
rewrite: &Option<Pattern<SupportLang>>,
path: &PathBuf,
grep: &AstGrep<SupportLang>,
matcher: M,
rewrite: &Option<Pattern<SupportLang>>,
) {
let mut matches = grep.root().find_all(&matcher).peekable();
if matches.peek().is_none() {
return;
let mut matches = grep.root().find_all(&matcher).peekable();
if matches.peek().is_none() {
return;
}
print_matches(matches, path, &matcher, rewrite);
let rewrite = match rewrite {
Some(r) => r,
None => {
interaction::prompt("Next", "", Some('\n')).unwrap();
return;
}
print_matches(matches, path, &matcher, rewrite);
let rewrite = match rewrite {
Some(r) => r,
None => {
interaction::prompt("Next", "", Some('\n')).unwrap();
return;
}
};
let response = interaction::prompt("Accept change? (Yes[y], No[n], All[a])", "yna", Some('y'))
.expect("Error happened during prompt");
match response {
'y' => {
let new_content = apply_rewrite(grep, &matcher, rewrite);
std::fs::write(&path, new_content).expect("write file content failed");
}
'a' => (),
_ => (),
};
let response = interaction::prompt("Accept change? (Yes[y], No[n], All[a])", "yna", Some('y'))
.expect("Error happened during prompt");
match response {
'y' => {
let new_content = apply_rewrite(grep, &matcher, rewrite);
std::fs::write(&path, new_content).expect("write file content failed");
}
'a' => (),
_ => (),
}
}
fn apply_rewrite<M: Matcher<SupportLang>>(
grep: &AstGrep<SupportLang>,
matcher: M,
rewrite: &Pattern<SupportLang>,
grep: &AstGrep<SupportLang>,
matcher: M,
rewrite: &Pattern<SupportLang>,
) -> String {
let root = grep.root();
let edits = root.replace_all(matcher, rewrite);
let mut new_content = String::new();
let mut start = 0;
for edit in edits {
new_content.push_str(&grep.source()[start..edit.position]);
new_content.push_str(&edit.inserted_text);
start = edit.position + edit.deleted_length;
}
new_content
let root = grep.root();
let edits = root.replace_all(matcher, rewrite);
let mut new_content = String::new();
let mut start = 0;
for edit in edits {
new_content.push_str(&grep.source()[start..edit.position]);
new_content.push_str(&edit.inserted_text);
start = edit.position + edit.deleted_length;
}
new_content
}
fn filter_file(entry: DirEntry) -> Option<DirEntry> {
entry.file_type()?.is_file().then_some(entry)
entry.file_type()?.is_file().then_some(entry)
}
fn run_walker(walker: WalkParallel, f: impl Fn(&Path) + Sync) {
interaction::run_walker(walker, |entry| {
if let Some(e) = filter_file(entry) {
f(e.path());
}
WalkState::Continue
});
interaction::run_walker(walker, |entry| {
if let Some(e) = filter_file(entry) {
f(e.path());
}
WalkState::Continue
});
}
fn run_walker_interactive<T: Send>(
walker: WalkParallel,
producer: impl Fn(&Path) -> Option<T> + Sync,
consumer: impl Fn(T) + Send,
walker: WalkParallel,
producer: impl Fn(&Path) -> Option<T> + Sync,
consumer: impl Fn(T) + Send,
) {
interaction::run_walker_interactive(
walker,
|entry| producer(filter_file(entry)?.path()),
consumer,
);
interaction::run_walker_interactive(
walker,
|entry| producer(filter_file(entry)?.path()),
consumer,
);
}
fn match_rule_on_file(
path: &Path,
lang: SupportLang,
rule: &RuleConfig<SupportLang>,
reporter: &ErrorReporter,
path: &Path,
lang: SupportLang,
rule: &RuleConfig<SupportLang>,
reporter: &ErrorReporter,
) {
let matcher = rule.get_matcher();
let file_content = match read_to_string(&path) {
Ok(content) => content,
_ => return,
};
let grep = lang.ast_grep(&file_content);
let mut matches = grep.root().find_all(matcher).peekable();
if matches.peek().is_none() {
return;
}
let file = SimpleFile::new(path.to_string_lossy(), &file_content);
reporter.print_rule(matches, file, rule);
let matcher = rule.get_matcher();
let file_content = match read_to_string(&path) {
Ok(content) => content,
_ => return,
};
let grep = lang.ast_grep(&file_content);
let mut matches = grep.root().find_all(matcher).peekable();
if matches.peek().is_none() {
return;
}
let file = SimpleFile::new(path.to_string_lossy(), &file_content);
reporter.print_rule(matches, file, rule);
}
fn match_one_file(
path: &Path,
lang: SupportLang,
pattern: &impl Matcher<SupportLang>,
rewrite: &Option<Pattern<SupportLang>>,
path: &Path,
lang: SupportLang,
pattern: &impl Matcher<SupportLang>,
rewrite: &Option<Pattern<SupportLang>>,
) {
let file_content = match read_to_string(&path) {
Ok(content) => content,
_ => return,
};
let grep = lang.ast_grep(file_content);
let mut matches = grep.root().find_all(pattern).peekable();
if matches.peek().is_none() {
return;
}
print_matches(matches, path, pattern, rewrite);
let file_content = match read_to_string(&path) {
Ok(content) => content,
_ => return,
};
let grep = lang.ast_grep(file_content);
let mut matches = grep.root().find_all(pattern).peekable();
if matches.peek().is_none() {
return;
}
print_matches(matches, path, pattern, rewrite);
}
fn filter_file_interactive(
path: &Path,
lang: SupportLang,
pattern: &impl Matcher<SupportLang>,
path: &Path,
lang: SupportLang,
pattern: &impl Matcher<SupportLang>,
) -> Option<(AstGrep<SupportLang>, PathBuf)> {
let file_content = read_to_string(path)
.map_err(|err| eprintln!("ERROR: {}", err))
.ok()?;
let grep = lang.ast_grep(file_content);
let has_match = grep.root().find(&pattern).is_some();
has_match.then_some((grep, path.to_path_buf()))
let file_content = read_to_string(path)
.map_err(|err| eprintln!("ERROR: {}", err))
.ok()?;
let grep = lang.ast_grep(file_content);
let has_match = grep.root().find(&pattern).is_some();
has_match.then_some((grep, path.to_path_buf()))
}
+1 -1
View File
@@ -7,5 +7,5 @@ use std::io::Result;
pub struct TestArg {}
pub fn run_test_rule(_arg: TestArg) -> Result<()> {
todo!("test sg rule is not implemented yet.")
todo!("test sg rule is not implemented yet.")
}
+38 -38
View File
@@ -11,61 +11,61 @@ use std::collections::HashMap;
#[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub enum SerializableMetaVarMatcher {
/// A regex to filter metavar based on its textual content.
Regex(String),
/// A pattern to filter matched metavar based on its AST tree shape.
Pattern(String),
/// A kind_id to filter matched metavar based on its ts-node kind
Kind(String),
/// A regex to filter metavar based on its textual content.
Regex(String),
/// A pattern to filter matched metavar based on its AST tree shape.
Pattern(String),
/// A kind_id to filter matched metavar based on its ts-node kind
Kind(String),
}
#[derive(Debug)]
pub enum SerializeError {
InvalidRegex(regex::Error),
// InvalidPattern,
InvalidRegex(regex::Error),
// InvalidPattern,
}
pub fn try_from_serializable<L: Language>(
meta_var: SerializableMetaVarMatcher,
lang: L,
meta_var: SerializableMetaVarMatcher,
lang: L,
) -> Result<MetaVarMatcher<L>, SerializeError> {
use SerializableMetaVarMatcher as S;
match meta_var {
S::Regex(s) => match Regex::new(&s) {
Ok(r) => Ok(MetaVarMatcher::Regex(r)),
Err(e) => Err(SerializeError::InvalidRegex(e)),
},
S::Pattern(p) => Ok(MetaVarMatcher::Pattern(Pattern::new(&p, lang))),
S::Kind(p) => Ok(MetaVarMatcher::Kind(KindMatcher::new(&p, lang))),
}
use SerializableMetaVarMatcher as S;
match meta_var {
S::Regex(s) => match Regex::new(&s) {
Ok(r) => Ok(MetaVarMatcher::Regex(r)),
Err(e) => Err(SerializeError::InvalidRegex(e)),
},
S::Pattern(p) => Ok(MetaVarMatcher::Pattern(Pattern::new(&p, lang))),
S::Kind(p) => Ok(MetaVarMatcher::Kind(KindMatcher::new(&p, lang))),
}
}
pub fn try_deserialize_matchers<L: Language>(
meta_vars: HashMap<String, SerializableMetaVarMatcher>,
lang: L,
meta_vars: HashMap<String, SerializableMetaVarMatcher>,
lang: L,
) -> Result<MetaVarMatchers<L>, SerializeError> {
let mut map = MetaVarMatchers::new();
for (key, matcher) in meta_vars {
map.insert(key, try_from_serializable(matcher, lang.clone())?);
}
Ok(map)
let mut map = MetaVarMatchers::new();
for (key, matcher) in meta_vars {
map.insert(key, try_from_serializable(matcher, lang.clone())?);
}
Ok(map)
}
pub struct RuleWithConstraint<L: Language> {
pub rule: Rule<L>,
pub matchers: MetaVarMatchers<L>,
pub rule: Rule<L>,
pub matchers: MetaVarMatchers<L>,
}
impl<L: Language> Matcher<L> for RuleWithConstraint<L> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.rule.match_node_with_env(node, env)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.rule.match_node_with_env(node, env)
}
fn get_meta_var_env<'tree>(&self) -> MetaVarEnv<'tree, L> {
MetaVarEnv::from_matchers(self.matchers.clone())
}
fn get_meta_var_env<'tree>(&self) -> MetaVarEnv<'tree, L> {
MetaVarEnv::from_matchers(self.matchers.clone())
}
}
+93 -93
View File
@@ -7,65 +7,65 @@ use serde_yaml::Deserializer;
use ast_grep_core::language::Language;
pub use rule::{
try_from_serializable as deserialize_rule, Rule, RuleConfig, SerializableRule, Severity,
try_from_serializable as deserialize_rule, Rule, RuleConfig, SerializableRule, Severity,
};
pub use rule_collection::RuleCollection;
#[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AstGrepConfig {
/// YAML rule directory
pub rule_dirs: Vec<String>,
/// overriding config for rules
pub rules: Option<Vec<()>>,
/// YAML rule directory
pub rule_dirs: Vec<String>,
/// overriding config for rules
pub rules: Option<Vec<()>>,
}
pub fn deserialize_sgconfig(source: &str) -> Result<AstGrepConfig, serde_yaml::Error> {
let yaml = Deserializer::from_str(source);
AstGrepConfig::deserialize(yaml)
let yaml = Deserializer::from_str(source);
AstGrepConfig::deserialize(yaml)
}
pub fn from_yaml_string<'a, L: Language + Deserialize<'a>>(
yamls: &'a str,
yamls: &'a str,
) -> Result<Vec<RuleConfig<L>>, serde_yaml::Error> {
let mut ret = vec![];
for yaml in Deserializer::from_str(yamls) {
let config = RuleConfig::deserialize(yaml)?;
ret.push(config);
}
Ok(ret)
let mut ret = vec![];
for yaml in Deserializer::from_str(yamls) {
let config = RuleConfig::deserialize(yaml)?;
ret.push(config);
}
Ok(ret)
}
#[cfg(test)]
mod test {
use super::*;
use ast_grep_core::language::TSLanguage;
#[derive(Clone, Deserialize, PartialEq, Eq)]
pub enum TypeScript {
Tsx,
}
impl Language for TypeScript {
fn get_ts_language(&self) -> TSLanguage {
tree_sitter_typescript::language_tsx().into()
}
use super::*;
use ast_grep_core::language::TSLanguage;
#[derive(Clone, Deserialize, PartialEq, Eq)]
pub enum TypeScript {
Tsx,
}
impl Language for TypeScript {
fn get_ts_language(&self) -> TSLanguage {
tree_sitter_typescript::language_tsx().into()
}
}
fn test_rule_match(yaml: &str, source: &str) {
let config = &from_yaml_string::<TypeScript>(yaml).expect("rule should parse")[0];
let grep = config.language.ast_grep(source);
assert!(grep.root().find(config.get_matcher()).is_some());
}
fn test_rule_match(yaml: &str, source: &str) {
let config = &from_yaml_string::<TypeScript>(yaml).expect("rule should parse")[0];
let grep = config.language.ast_grep(source);
assert!(grep.root().find(config.get_matcher()).is_some());
}
fn test_rule_unmatch(yaml: &str, source: &str) {
let config = &from_yaml_string::<TypeScript>(yaml).expect("rule should parse")[0];
let grep = config.language.ast_grep(source);
assert!(grep.root().find(config.get_matcher()).is_none());
}
fn test_rule_unmatch(yaml: &str, source: &str) {
let config = &from_yaml_string::<TypeScript>(yaml).expect("rule should parse")[0];
let grep = config.language.ast_grep(source);
assert!(grep.root().find(config.get_matcher()).is_none());
}
fn make_yaml(rule: &str) -> String {
format!(
r"
fn make_yaml(rule: &str) -> String {
format!(
r"
id: test
message: test rule
severity: info
@@ -73,82 +73,82 @@ language: Tsx
rule:
{rule}
"
)
}
)
}
#[test]
fn test_deserialize_rule_config() {
let yaml = &make_yaml(
"
#[test]
fn test_deserialize_rule_config() {
let yaml = &make_yaml(
"
pattern: let a = 123
",
);
test_rule_match(yaml, "let a = 123; let b = 33;");
test_rule_match(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "const a = 33");
}
);
test_rule_match(yaml, "let a = 123; let b = 33;");
test_rule_match(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "const a = 33");
}
#[test]
fn test_deserialize_nested() {
let yaml = &make_yaml(
"
#[test]
fn test_deserialize_nested() {
let yaml = &make_yaml(
"
all:
- pattern: let $A = 123
- pattern: let a = $B
",
);
test_rule_match(yaml, "let a = 123; let b = 33;");
test_rule_match(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "const a = 33");
test_rule_unmatch(yaml, "let a = 33");
}
);
test_rule_match(yaml, "let a = 123; let b = 33;");
test_rule_match(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "const a = 33");
test_rule_unmatch(yaml, "let a = 33");
}
#[test]
fn test_deserialize_kind() {
let yaml = &make_yaml(
"
#[test]
fn test_deserialize_kind() {
let yaml = &make_yaml(
"
kind: class_body
",
);
test_rule_match(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "const B = { func() {let a = 123; }}");
}
);
test_rule_match(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "const B = { func() {let a = 123; }}");
}
#[test]
fn test_deserialize_inside() {
let yaml = &make_yaml(
"
#[test]
fn test_deserialize_inside() {
let yaml = &make_yaml(
"
all:
- inside:
kind: class_body
- pattern: let a = 123
",
);
test_rule_unmatch(yaml, "let a = 123; let b = 33;");
test_rule_match(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "let a = 123");
}
);
test_rule_unmatch(yaml, "let a = 123; let b = 33;");
test_rule_match(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "let a = 123");
}
#[test]
fn test_deserialize_not_inside() {
let yaml = &make_yaml(
"
#[test]
fn test_deserialize_not_inside() {
let yaml = &make_yaml(
"
all:
- not:
inside:
kind: class_body
- pattern: let a = 123
",
);
test_rule_match(yaml, "let a = 123; let b = 33;");
test_rule_unmatch(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "let a = 13");
}
);
test_rule_match(yaml, "let a = 123; let b = 33;");
test_rule_unmatch(yaml, "class B { func() {let a = 123; }}");
test_rule_unmatch(yaml, "let a = 13");
}
#[test]
fn test_deserialize_meta_var() {
let yaml = &make_yaml(
"
#[test]
fn test_deserialize_meta_var() {
let yaml = &make_yaml(
"
all:
- inside:
any:
@@ -156,9 +156,9 @@ rule:
- pattern: let $A = ($$$) => $$$
- pattern: $A($$$)
",
);
test_rule_match(yaml, "function recursion() { recursion() }");
test_rule_match(yaml, "let recursion = () => { recursion() }");
test_rule_unmatch(yaml, "function callOther() { other() }");
}
);
test_rule_match(yaml, "function recursion() { recursion() }");
test_rule_match(yaml, "let recursion = () => { recursion() }");
test_rule_unmatch(yaml, "function callOther() { other() }");
}
}
+280 -275
View File
@@ -1,6 +1,6 @@
pub use crate::constraints::{
try_deserialize_matchers, try_from_serializable as deserialize_meta_var, RuleWithConstraint,
SerializableMetaVarMatcher,
try_deserialize_matchers, try_from_serializable as deserialize_meta_var, RuleWithConstraint,
SerializableMetaVarMatcher,
};
use ast_grep_core::language::Language;
use ast_grep_core::meta_var::MetaVarEnv;
@@ -16,381 +16,386 @@ use std::marker::PhantomData;
#[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub enum Severity {
Hint,
Info,
Warning,
Error,
Hint,
Info,
Warning,
Error,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct RuleConfig<L: Language> {
/// Unique, descriptive identifier, e.g., no-unused-variable
pub id: String,
/// Main message highlighting why this rule fired. It should be single line and concise,
/// but specific enough to be understood without additional context.
pub message: String,
/// Additional notes to elaborate the message and provide potential fix to the issue.
pub note: Option<String>,
/// One of: Info, Warning, or Error
pub severity: Severity,
/// Specify the language to parse and the file extension to includ in matching.
pub language: L,
/// Pattern rules to find matching AST nodes
pub rule: SerializableRule,
/// A pattern to auto fix the issue. It can reference metavariables appeared in rule.
pub fix: Option<String>,
/// Addtional meta variables pattern to filter matching
pub constraints: Option<HashMap<String, SerializableMetaVarMatcher>>,
/// Glob patterns to specify that the rule only applies to matching files
pub files: Option<Vec<String>>,
/// Glob patterns that exclude rules from applying to files
pub ignores: Option<Vec<String>>,
/// Documentation link to this rule
pub url: Option<String>,
/// Extra information for the rule
pub metadata: Option<HashMap<String, String>>,
/// Unique, descriptive identifier, e.g., no-unused-variable
pub id: String,
/// Main message highlighting why this rule fired. It should be single line and concise,
/// but specific enough to be understood without additional context.
pub message: String,
/// Additional notes to elaborate the message and provide potential fix to the issue.
pub note: Option<String>,
/// One of: Info, Warning, or Error
pub severity: Severity,
/// Specify the language to parse and the file extension to includ in matching.
pub language: L,
/// Pattern rules to find matching AST nodes
pub rule: SerializableRule,
/// A pattern to auto fix the issue. It can reference metavariables appeared in rule.
pub fix: Option<String>,
/// Addtional meta variables pattern to filter matching
pub constraints: Option<HashMap<String, SerializableMetaVarMatcher>>,
/// Glob patterns to specify that the rule only applies to matching files
pub files: Option<Vec<String>>,
/// Glob patterns that exclude rules from applying to files
pub ignores: Option<Vec<String>>,
/// Documentation link to this rule
pub url: Option<String>,
/// Extra information for the rule
pub metadata: Option<HashMap<String, String>>,
}
impl<L: Language> RuleConfig<L> {
pub fn get_matcher(&self) -> RuleWithConstraint<L> {
let rule = self.get_rule();
let matchers = self.get_meta_var_matchers();
RuleWithConstraint { rule, matchers }
}
pub fn get_matcher(&self) -> RuleWithConstraint<L> {
let rule = self.get_rule();
let matchers = self.get_meta_var_matchers();
RuleWithConstraint { rule, matchers }
}
pub fn get_rule(&self) -> Rule<L> {
try_from_serializable(self.rule.clone(), self.language.clone()).unwrap()
}
pub fn get_rule(&self) -> Rule<L> {
try_from_serializable(self.rule.clone(), self.language.clone()).unwrap()
}
pub fn get_fixer(&self) -> Option<Pattern<L>> {
Some(Pattern::new(self.fix.as_ref()?, self.language.clone()))
}
pub fn get_fixer(&self) -> Option<Pattern<L>> {
Some(Pattern::new(self.fix.as_ref()?, self.language.clone()))
}
pub fn get_meta_var_matchers(&self) -> MetaVarMatchers<L> {
if let Some(constraints) = self.constraints.clone() {
try_deserialize_matchers(constraints, self.language.clone()).unwrap()
} else {
MetaVarMatchers::default()
}
pub fn get_meta_var_matchers(&self) -> MetaVarMatchers<L> {
if let Some(constraints) = self.constraints.clone() {
try_deserialize_matchers(constraints, self.language.clone()).unwrap()
} else {
MetaVarMatchers::default()
}
}
}
#[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub enum SerializableRule {
All(Vec<SerializableRule>),
Any(Vec<SerializableRule>),
Not(Box<SerializableRule>),
Inside(Box<RelationalRule>),
Has(Box<RelationalRule>),
Precedes(Box<RelationalRule>),
Follows(Box<RelationalRule>),
Pattern(PatternStyle),
Kind(String),
All(Vec<SerializableRule>),
Any(Vec<SerializableRule>),
Not(Box<SerializableRule>),
Inside(Box<RelationalRule>),
Has(Box<RelationalRule>),
Precedes(Box<RelationalRule>),
Follows(Box<RelationalRule>),
Pattern(PatternStyle),
Kind(String),
}
#[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct RelationalRule {
#[serde(flatten)]
rule: SerializableRule,
#[serde(default)]
until: Option<SerializableRule>,
#[serde(default)]
immediate: bool,
#[serde(flatten)]
rule: SerializableRule,
#[serde(default)]
until: Option<SerializableRule>,
#[serde(default)]
immediate: bool,
}
#[derive(Serialize, Deserialize, Clone)]
#[serde(untagged)]
pub enum PatternStyle {
Str(String),
Contextual { context: String, selector: String },
Str(String),
Contextual { context: String, selector: String },
}
pub enum Rule<L: Language> {
All(o::All<L, Rule<L>>),
Any(o::Any<L, Rule<L>>),
Not(Box<o::Not<L, Rule<L>>>),
Inside(Box<Inside<L>>),
Has(Box<Has<L>>),
Precedes(Box<Precedes<L>>),
Follows(Box<Follows<L>>),
Pattern(Pattern<L>),
Kind(KindMatcher<L>),
All(o::All<L, Rule<L>>),
Any(o::Any<L, Rule<L>>),
Not(Box<o::Not<L, Rule<L>>>),
Inside(Box<Inside<L>>),
Has(Box<Has<L>>),
Precedes(Box<Precedes<L>>),
Follows(Box<Follows<L>>),
Pattern(Pattern<L>),
Kind(KindMatcher<L>),
}
impl<L: Language> Matcher<L> for Rule<L> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
use Rule::*;
match self {
All(all) => all.match_node_with_env(node, env),
Any(any) => any.match_node_with_env(node, env),
Not(not) => not.match_node_with_env(node, env),
Inside(parent) => match_and_add_label(&**parent, node, env),
Has(child) => match_and_add_label(&**child, node, env),
Precedes(latter) => match_and_add_label(&**latter, node, env),
Follows(former) => match_and_add_label(&**former, node, env),
Pattern(pattern) => pattern.match_node_with_env(node, env),
Kind(kind) => kind.match_node_with_env(node, env),
}
}
}
fn match_and_add_label<'tree, L: Language, M: Matcher<L>>(
inner: &M,
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
use Rule::*;
match self {
All(all) => all.match_node_with_env(node, env),
Any(any) => any.match_node_with_env(node, env),
Not(not) => not.match_node_with_env(node, env),
Inside(parent) => match_and_add_label(&**parent, node, env),
Has(child) => match_and_add_label(&**child, node, env),
Precedes(latter) => match_and_add_label(&**latter, node, env),
Follows(former) => match_and_add_label(&**former, node, env),
Pattern(pattern) => pattern.match_node_with_env(node, env),
Kind(kind) => kind.match_node_with_env(node, env),
}
}
}
fn match_and_add_label<'tree, L: Language, M: Matcher<L>>(
inner: &M,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
let matched = inner.match_node_with_env(node, env)?;
env.add_label("secondary", matched.clone());
Some(matched)
let matched = inner.match_node_with_env(node, env)?;
env.add_label("secondary", matched.clone());
Some(matched)
}
fn until<L: Language>(pattern: &Option<Rule<L>>) -> impl Fn(&Node<L>) -> bool + '_ {
move |n| {
if let Some(m) = pattern {
m.match_node(n.clone()).is_none()
} else {
true
}
move |n| {
if let Some(m) = pattern {
m.match_node(n.clone()).is_none()
} else {
true
}
}
}
pub struct Inside<L: Language> {
outer: Rule<L>,
until: Option<Rule<L>>,
immediate: bool,
lang: PhantomData<L>,
outer: Rule<L>,
until: Option<Rule<L>>,
immediate: bool,
lang: PhantomData<L>,
}
impl<L: Language> Inside<L> {
fn try_new(relation: RelationalRule, lang: L) -> Result<Inside<L>, SerializeError> {
let util_node = if let Some(until) = relation.until {
Some(try_from_serializable(until, lang.clone())?)
} else {
None
};
Ok(Self {
outer: try_from_serializable(relation.rule, lang)?,
until: util_node,
immediate: relation.immediate,
lang: PhantomData,
})
}
fn try_new(relation: RelationalRule, lang: L) -> Result<Inside<L>, SerializeError> {
let util_node = if let Some(until) = relation.until {
Some(try_from_serializable(until, lang.clone())?)
} else {
None
};
Ok(Self {
outer: try_from_serializable(relation.rule, lang)?,
until: util_node,
immediate: relation.immediate,
lang: PhantomData,
})
}
}
impl<L: Language> Matcher<L> for Inside<L> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if self.immediate {
self.outer.match_node_with_env(node.parent()?, env)
} else {
node.ancestors()
.take_while(until(&self.until))
.find_map(|n| self.outer.match_node_with_env(n, env))
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if self.immediate {
self.outer.match_node_with_env(node.parent()?, env)
} else {
node
.ancestors()
.take_while(until(&self.until))
.find_map(|n| self.outer.match_node_with_env(n, env))
}
}
}
pub struct Has<L: Language> {
inner: Rule<L>,
until: Option<Rule<L>>,
immediate: bool,
lang: PhantomData<L>,
inner: Rule<L>,
until: Option<Rule<L>>,
immediate: bool,
lang: PhantomData<L>,
}
impl<L: Language> Has<L> {
fn try_new(relation: RelationalRule, lang: L) -> Result<Self, SerializeError> {
let util_node = if let Some(until) = relation.until {
Some(try_from_serializable(until, lang.clone())?)
} else {
None
};
Ok(Self {
inner: try_from_serializable(relation.rule, lang)?,
until: util_node,
immediate: relation.immediate,
lang: PhantomData,
})
}
fn try_new(relation: RelationalRule, lang: L) -> Result<Self, SerializeError> {
let util_node = if let Some(until) = relation.until {
Some(try_from_serializable(until, lang.clone())?)
} else {
None
};
Ok(Self {
inner: try_from_serializable(relation.rule, lang)?,
until: util_node,
immediate: relation.immediate,
lang: PhantomData,
})
}
}
impl<L: Language> Matcher<L> for Has<L> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if self.immediate {
node.children()
.find_map(|n| self.inner.match_node_with_env(n, env))
} else {
node.dfs()
.skip(1)
.take_while(until(&self.until))
.find_map(|n| self.inner.match_node_with_env(n, env))
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if self.immediate {
node
.children()
.find_map(|n| self.inner.match_node_with_env(n, env))
} else {
node
.dfs()
.skip(1)
.take_while(until(&self.until))
.find_map(|n| self.inner.match_node_with_env(n, env))
}
}
}
pub struct Precedes<L: Language> {
inner: Rule<L>,
until: Option<Rule<L>>,
immediate: bool,
lang: PhantomData<L>,
inner: Rule<L>,
until: Option<Rule<L>>,
immediate: bool,
lang: PhantomData<L>,
}
impl<L: Language> Precedes<L> {
fn try_new(relation: RelationalRule, lang: L) -> Result<Self, SerializeError> {
let util_node = if let Some(until) = relation.until {
Some(try_from_serializable(until, lang.clone())?)
} else {
None
};
Ok(Self {
inner: try_from_serializable(relation.rule, lang)?,
until: util_node,
immediate: relation.immediate,
lang: PhantomData,
})
}
fn try_new(relation: RelationalRule, lang: L) -> Result<Self, SerializeError> {
let util_node = if let Some(until) = relation.until {
Some(try_from_serializable(until, lang.clone())?)
} else {
None
};
Ok(Self {
inner: try_from_serializable(relation.rule, lang)?,
until: util_node,
immediate: relation.immediate,
lang: PhantomData,
})
}
}
impl<L: Language> Matcher<L> for Precedes<L> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if self.immediate {
self.inner.match_node_with_env(node.prev()?, env)
} else {
node.prev_all()
.take_while(until(&self.until))
.find_map(|n| self.inner.match_node_with_env(n, env))
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if self.immediate {
self.inner.match_node_with_env(node.prev()?, env)
} else {
node
.prev_all()
.take_while(until(&self.until))
.find_map(|n| self.inner.match_node_with_env(n, env))
}
}
}
pub struct Follows<L: Language> {
inner: Rule<L>,
until: Option<Rule<L>>,
immediate: bool,
lang: PhantomData<L>,
inner: Rule<L>,
until: Option<Rule<L>>,
immediate: bool,
lang: PhantomData<L>,
}
impl<L: Language> Follows<L> {
fn try_new(relation: RelationalRule, lang: L) -> Result<Self, SerializeError> {
let util_node = if let Some(until) = relation.until {
Some(try_from_serializable(until, lang.clone())?)
} else {
None
};
Ok(Self {
inner: try_from_serializable(relation.rule, lang)?,
until: util_node,
immediate: relation.immediate,
lang: PhantomData,
})
}
fn try_new(relation: RelationalRule, lang: L) -> Result<Self, SerializeError> {
let util_node = if let Some(until) = relation.until {
Some(try_from_serializable(until, lang.clone())?)
} else {
None
};
Ok(Self {
inner: try_from_serializable(relation.rule, lang)?,
until: util_node,
immediate: relation.immediate,
lang: PhantomData,
})
}
}
impl<L: Language> Matcher<L> for Follows<L> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if self.immediate {
self.inner.match_node_with_env(node.next()?, env)
} else {
node.next_all()
.take_while(until(&self.until))
.find_map(|n| self.inner.match_node_with_env(n, env))
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if self.immediate {
self.inner.match_node_with_env(node.next()?, env)
} else {
node
.next_all()
.take_while(until(&self.until))
.find_map(|n| self.inner.match_node_with_env(n, env))
}
}
}
#[derive(Debug)]
pub enum SerializeError {
MissPositiveMatcher,
MissPositiveMatcher,
}
impl std::error::Error for SerializeError {}
impl fmt::Display for SerializeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::MissPositiveMatcher => write!(f, "missing positive matcher"),
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::MissPositiveMatcher => write!(f, "missing positive matcher"),
}
}
}
// TODO: implement positive/non positive
pub fn try_from_serializable<L: Language>(
serialized: SerializableRule,
lang: L,
serialized: SerializableRule,
lang: L,
) -> Result<Rule<L>, SerializeError> {
use Rule as R;
use SerializableRule as S;
let mapper = |s| try_from_serializable(s, lang.clone());
let convert_rules = |rules: Vec<SerializableRule>| {
let mut inner = Vec::with_capacity(rules.len());
for rule in rules {
inner.push(try_from_serializable(rule, lang.clone())?);
}
Ok(inner)
};
let ret = match serialized {
S::All(all) => R::All(o::All::new(convert_rules(all)?)),
S::Any(any) => R::Any(o::Any::new(convert_rules(any)?)),
S::Not(not) => R::Not(Box::new(o::Not::new(mapper(*not)?))),
S::Inside(inside) => R::Inside(Box::new(Inside::try_new(*inside, lang)?)),
S::Has(has) => R::Has(Box::new(Has::try_new(*has, lang)?)),
S::Precedes(precedes) => R::Precedes(Box::new(Precedes::try_new(*precedes, lang)?)),
S::Follows(follows) => R::Follows(Box::new(Follows::try_new(*follows, lang)?)),
S::Kind(kind) => R::Kind(KindMatcher::new(&kind, lang)),
S::Pattern(PatternStyle::Str(pattern)) => R::Pattern(Pattern::new(&pattern, lang)),
S::Pattern(PatternStyle::Contextual { context, selector }) => {
R::Pattern(Pattern::contextual(&context, &selector, lang))
}
};
Ok(ret)
use Rule as R;
use SerializableRule as S;
let mapper = |s| try_from_serializable(s, lang.clone());
let convert_rules = |rules: Vec<SerializableRule>| {
let mut inner = Vec::with_capacity(rules.len());
for rule in rules {
inner.push(try_from_serializable(rule, lang.clone())?);
}
Ok(inner)
};
let ret = match serialized {
S::All(all) => R::All(o::All::new(convert_rules(all)?)),
S::Any(any) => R::Any(o::Any::new(convert_rules(any)?)),
S::Not(not) => R::Not(Box::new(o::Not::new(mapper(*not)?))),
S::Inside(inside) => R::Inside(Box::new(Inside::try_new(*inside, lang)?)),
S::Has(has) => R::Has(Box::new(Has::try_new(*has, lang)?)),
S::Precedes(precedes) => R::Precedes(Box::new(Precedes::try_new(*precedes, lang)?)),
S::Follows(follows) => R::Follows(Box::new(Follows::try_new(*follows, lang)?)),
S::Kind(kind) => R::Kind(KindMatcher::new(&kind, lang)),
S::Pattern(PatternStyle::Str(pattern)) => R::Pattern(Pattern::new(&pattern, lang)),
S::Pattern(PatternStyle::Contextual { context, selector }) => {
R::Pattern(Pattern::contextual(&context, &selector, lang))
}
};
Ok(ret)
}
#[cfg(test)]
mod test {
use super::*;
use serde_yaml::from_str;
use PatternStyle::*;
use SerializableRule::*;
use super::*;
use serde_yaml::from_str;
use PatternStyle::*;
use SerializableRule::*;
#[test]
fn test_pattern() {
let src = r"
#[test]
fn test_pattern() {
let src = r"
pattern: Test
";
let rule: SerializableRule = from_str(src).expect("cannot parse rule");
assert!(matches!(rule, Pattern(Str(_))));
let src = r"
let rule: SerializableRule = from_str(src).expect("cannot parse rule");
assert!(matches!(rule, Pattern(Str(_))));
let src = r"
pattern:
context: class $C { set $B() {} }
selector: method_definition
";
let rule: SerializableRule = from_str(src).expect("cannot parse rule");
assert!(matches!(rule, Pattern(Contextual { .. })));
}
let rule: SerializableRule = from_str(src).expect("cannot parse rule");
assert!(matches!(rule, Pattern(Contextual { .. })));
}
#[test]
fn test_relational() {
let src = r"
#[test]
fn test_relational() {
let src = r"
inside:
pattern: class A {}
immediate: true
until:
pattern: function() {}
";
let rule: SerializableRule = from_str(src).expect("cannot parse rule");
match rule {
SerializableRule::Inside(rule) => assert!(rule.immediate),
_ => unreachable!(),
}
let rule: SerializableRule = from_str(src).expect("cannot parse rule");
match rule {
SerializableRule::Inside(rule) => assert!(rule.immediate),
_ => unreachable!(),
}
}
}
+47 -47
View File
@@ -2,68 +2,68 @@ use crate::RuleConfig;
use ast_grep_core::language::Language;
pub struct RuleBucket<L: Language> {
rules: Vec<RuleConfig<L>>,
lang: L,
rules: Vec<RuleConfig<L>>,
lang: L,
}
impl<L: Language> RuleBucket<L> {
fn new(lang: L) -> Self {
Self {
rules: vec![],
lang,
}
}
pub fn add(&mut self, rule: RuleConfig<L>) {
self.rules.push(rule);
fn new(lang: L) -> Self {
Self {
rules: vec![],
lang,
}
}
pub fn add(&mut self, rule: RuleConfig<L>) {
self.rules.push(rule);
}
}
/// A collection of rules to run one round of scanning.
/// Rules will be grouped together based on their language, path globbing and pattern rule.
pub struct RuleCollection<L: Language + Eq> {
// use vec since we don't have many languages
pub tenured: Vec<RuleBucket<L>>,
pub contingent: Vec<RuleConfig<L>>,
// use vec since we don't have many languages
pub tenured: Vec<RuleBucket<L>>,
pub contingent: Vec<RuleConfig<L>>,
}
impl<L: Language + Eq> RuleCollection<L> {
pub fn new(configs: Vec<RuleConfig<L>>) -> Self {
let mut tenured = vec![];
let mut contingent = vec![];
for config in configs {
if config.files.is_none() && config.ignores.is_none() {
Self::add_tenured_rule(&mut tenured, config);
} else {
contingent.push(config);
}
}
Self {
tenured,
contingent,
}
pub fn new(configs: Vec<RuleConfig<L>>) -> Self {
let mut tenured = vec![];
let mut contingent = vec![];
for config in configs {
if config.files.is_none() && config.ignores.is_none() {
Self::add_tenured_rule(&mut tenured, config);
} else {
contingent.push(config);
}
}
// TODO: get rules without allocation
pub fn get_rules_for_lang(&self, lang: &L) -> Vec<&RuleConfig<L>> {
// TODO: add contingent
for rule in &self.tenured {
if &rule.lang == lang {
return rule.rules.iter().collect();
}
}
vec![]
Self {
tenured,
contingent,
}
}
fn add_tenured_rule(tenured: &mut Vec<RuleBucket<L>>, rule: RuleConfig<L>) {
let lang = rule.language.clone();
for bucket in tenured.iter_mut() {
if bucket.lang == lang {
bucket.add(rule);
return;
}
}
let mut bucket = RuleBucket::new(lang);
// TODO: get rules without allocation
pub fn get_rules_for_lang(&self, lang: &L) -> Vec<&RuleConfig<L>> {
// TODO: add contingent
for rule in &self.tenured {
if &rule.lang == lang {
return rule.rules.iter().collect();
}
}
vec![]
}
fn add_tenured_rule(tenured: &mut Vec<RuleBucket<L>>, rule: RuleConfig<L>) {
let lang = rule.language.clone();
for bucket in tenured.iter_mut() {
if bucket.lang == lang {
bucket.add(rule);
tenured.push(bucket);
return;
}
}
let mut bucket = RuleBucket::new(lang);
bucket.add(rule);
tenured.push(bucket);
}
}
+51 -51
View File
@@ -9,71 +9,71 @@ pub use tree_sitter::Language as TSLanguage;
/// * if we need to use other char in meta var for parser at runtime
/// * pre process the Pattern code.
pub trait Language: Clone {
/// Return the file language from path. Return None if the file type is not supported.
fn from_path<P: AsRef<Path>>(_path: P) -> Option<Self> {
// TODO: throw panic here if not implemented properly?
None
}
/// Return the file language from path. Return None if the file type is not supported.
fn from_path<P: AsRef<Path>>(_path: P) -> Option<Self> {
// TODO: throw panic here if not implemented properly?
None
}
/// Create an [`AstGrep`] instance for the language
fn ast_grep<S: AsRef<str>>(&self, source: S) -> AstGrep<Self> {
AstGrep::new(source, self.clone())
}
/// Create an [`AstGrep`] instance for the language
fn ast_grep<S: AsRef<str>>(&self, source: S) -> AstGrep<Self> {
AstGrep::new(source, self.clone())
}
/// tree sitter language to parse the source
fn get_ts_language(&self) -> TSLanguage;
/// ignore trivial tokens in language matching
fn skippable_kind_ids(&self) -> &'static [u16] {
&[]
}
/// tree sitter language to parse the source
fn get_ts_language(&self) -> TSLanguage;
/// ignore trivial tokens in language matching
fn skippable_kind_ids(&self) -> &'static [u16] {
&[]
}
/// normalize pattern code before matching
/// e.g. remove expression_statement, or prefer parsing {} to object over block
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
Cow::Borrowed(query)
}
/// normalize pattern code before matching
/// e.g. remove expression_statement, or prefer parsing {} to object over block
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
Cow::Borrowed(query)
}
/// Configure meta variable special character
/// By default $ is the metavar char, but in PHP it can be #
#[inline]
fn meta_var_char(&self) -> char {
'$'
}
/// Configure meta variable special character
/// By default $ is the metavar char, but in PHP it can be #
#[inline]
fn meta_var_char(&self) -> char {
'$'
}
/// Some language does not accept $ as the leading char for identifiers.
/// We need to change $ to other char at run-time to make parser happy, thus the name expando.
/// By default this is the same as meta_var char so replacement is done at runtime.
#[inline]
fn expando_char(&self) -> char {
self.meta_var_char()
}
/// Some language does not accept $ as the leading char for identifiers.
/// We need to change $ to other char at run-time to make parser happy, thus the name expando.
/// By default this is the same as meta_var char so replacement is done at runtime.
#[inline]
fn expando_char(&self) -> char {
self.meta_var_char()
}
/// extract MetaVariable from a given source string
/// At runtime we need to use expand_char
fn extract_meta_var(&self, source: &str) -> Option<MetaVariable> {
extract_meta_var(source, self.expando_char())
}
/// extract MetaVariable from a given source string
/// At runtime we need to use expand_char
fn extract_meta_var(&self, source: &str) -> Option<MetaVariable> {
extract_meta_var(source, self.expando_char())
}
}
impl Language for TSLanguage {
fn get_ts_language(&self) -> TSLanguage {
self.clone()
}
fn get_ts_language(&self) -> TSLanguage {
self.clone()
}
}
#[cfg(test)]
mod test {
use super::*;
#[derive(Clone)]
pub struct Tsx;
impl Language for Tsx {
fn from_path<P: AsRef<Path>>(_path: P) -> Option<Self> {
Some(Tsx)
}
fn get_ts_language(&self) -> TSLanguage {
tree_sitter_typescript::language_tsx().into()
}
use super::*;
#[derive(Clone)]
pub struct Tsx;
impl Language for Tsx {
fn from_path<P: AsRef<Path>>(_path: P) -> Option<Self> {
Some(Tsx)
}
fn get_ts_language(&self) -> TSLanguage {
tree_sitter_typescript::language_tsx().into()
}
}
}
#[cfg(test)]
+58 -58
View File
@@ -21,79 +21,79 @@ use ts_parser::Edit;
#[derive(Clone)]
pub struct AstGrep<L: Language> {
inner: Root<L>,
inner: Root<L>,
}
impl<L: Language> AstGrep<L> {
pub fn new<S: AsRef<str>>(src: S, lang: L) -> Self {
Self {
inner: Root::new(src.as_ref(), lang),
}
pub fn new<S: AsRef<str>>(src: S, lang: L) -> Self {
Self {
inner: Root::new(src.as_ref(), lang),
}
}
pub fn source(&self) -> &str {
&self.inner.source
}
pub fn source(&self) -> &str {
&self.inner.source
}
pub fn root(&self) -> Node<L> {
self.inner.root()
}
pub fn root(&self) -> Node<L> {
self.inner.root()
}
pub fn edit(&mut self, edit: Edit) -> &mut Self {
self.inner.do_edit(edit);
self
}
pub fn edit(&mut self, edit: Edit) -> &mut Self {
self.inner.do_edit(edit);
self
}
pub fn replace<M: PositiveMatcher<L>, R: Replacer<L>>(
&mut self,
pattern: M,
replacer: R,
) -> bool {
if let Some(edit) = self.root().replace(pattern, replacer) {
self.edit(edit);
true
} else {
false
}
pub fn replace<M: PositiveMatcher<L>, R: Replacer<L>>(
&mut self,
pattern: M,
replacer: R,
) -> bool {
if let Some(edit) = self.root().replace(pattern, replacer) {
self.edit(edit);
true
} else {
false
}
}
pub fn generate(self) -> String {
self.inner.source
}
pub fn generate(self) -> String {
self.inner.source
}
}
#[cfg(test)]
mod test {
use super::*;
use language::Tsx;
#[test]
fn test_replace() {
let mut ast_grep = Tsx.ast_grep("var a = 1; let b = 2;");
ast_grep.replace("var $A = $B", "let $A = $B");
let source = ast_grep.generate();
assert_eq!(source, "let a = 1; let b = 2;"); // note the semicolon
}
use super::*;
use language::Tsx;
#[test]
fn test_replace() {
let mut ast_grep = Tsx.ast_grep("var a = 1; let b = 2;");
ast_grep.replace("var $A = $B", "let $A = $B");
let source = ast_grep.generate();
assert_eq!(source, "let a = 1; let b = 2;"); // note the semicolon
}
#[test]
fn test_replace_by_rule() {
let rule = Op::either("let a = 123").or("let b = 456");
let mut ast_grep = Tsx.ast_grep("let a = 123");
let replaced = ast_grep.replace(rule, "console.log('it works!')");
assert!(replaced);
let source = ast_grep.generate();
assert_eq!(source, "console.log('it works!')");
}
#[test]
fn test_replace_by_rule() {
let rule = Op::either("let a = 123").or("let b = 456");
let mut ast_grep = Tsx.ast_grep("let a = 123");
let replaced = ast_grep.replace(rule, "console.log('it works!')");
assert!(replaced);
let source = ast_grep.generate();
assert_eq!(source, "console.log('it works!')");
}
#[test]
fn test_replace_trivia() {
let mut ast_grep = Tsx.ast_grep("var a = 1 /*haha*/;");
ast_grep.replace("var $A = $B", "let $A = $B");
let source = ast_grep.generate();
assert_eq!(source, "let a = 1;"); // semicolon
#[test]
fn test_replace_trivia() {
let mut ast_grep = Tsx.ast_grep("var a = 1 /*haha*/;");
ast_grep.replace("var $A = $B", "let $A = $B");
let source = ast_grep.generate();
assert_eq!(source, "let a = 1;"); // semicolon
let mut ast_grep = Tsx.ast_grep("var a = 1; /*haha*/");
ast_grep.replace("var $A = $B", "let $A = $B");
let source = ast_grep.generate();
assert_eq!(source, "let a = 1; /*haha*/");
}
let mut ast_grep = Tsx.ast_grep("var a = 1; /*haha*/");
ast_grep.replace("var $A = $B", "let $A = $B");
let source = ast_grep.generate();
assert_eq!(source, "let a = 1; /*haha*/");
}
}
+282 -281
View File
@@ -3,324 +3,325 @@ use crate::Language;
use crate::Node;
fn match_leaf_meta_var<'goal, 'tree, L: Language>(
goal: &Node<'goal, L>,
candidate: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
goal: &Node<'goal, L>,
candidate: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
let extracted = extract_var_from_node(goal)?;
use MetaVariable as MV;
match extracted {
MV::Named(name) => {
env.insert(name, candidate.clone())?;
Some(candidate)
}
MV::Anonymous => Some(candidate),
// Ellipsis will be matched in parent level
MV::Ellipsis => Some(candidate),
MV::NamedEllipsis(name) => {
env.insert(name, candidate.clone())?;
Some(candidate)
}
let extracted = extract_var_from_node(goal)?;
use MetaVariable as MV;
match extracted {
MV::Named(name) => {
env.insert(name, candidate.clone())?;
Some(candidate)
}
MV::Anonymous => Some(candidate),
// Ellipsis will be matched in parent level
MV::Ellipsis => Some(candidate),
MV::NamedEllipsis(name) => {
env.insert(name, candidate.clone())?;
Some(candidate)
}
}
}
fn try_get_ellipsis_mode(node: &Node<impl Language>) -> Result<Option<String>, ()> {
match extract_var_from_node(node).ok_or(())? {
MetaVariable::Ellipsis => Ok(None),
MetaVariable::NamedEllipsis(n) => Ok(Some(n)),
_ => Err(()),
}
match extract_var_from_node(node).ok_or(())? {
MetaVariable::Ellipsis => Ok(None),
MetaVariable::NamedEllipsis(n) => Ok(Some(n)),
_ => Err(()),
}
}
fn update_ellipsis_env<'t, L: Language>(
optional_name: &Option<String>,
mut matched: Vec<Node<'t, L>>,
env: &mut MetaVarEnv<'t, L>,
cand_children: impl Iterator<Item = Node<'t, L>>,
skipped_anonymous: usize,
optional_name: &Option<String>,
mut matched: Vec<Node<'t, L>>,
env: &mut MetaVarEnv<'t, L>,
cand_children: impl Iterator<Item = Node<'t, L>>,
skipped_anonymous: usize,
) {
if let Some(name) = optional_name.as_ref() {
matched.extend(cand_children);
let skipped = matched.len().saturating_sub(skipped_anonymous);
drop(matched.drain(skipped..));
env.insert_multi(name.to_string(), matched);
}
if let Some(name) = optional_name.as_ref() {
matched.extend(cand_children);
let skipped = matched.len().saturating_sub(skipped_anonymous);
drop(matched.drain(skipped..));
env.insert_multi(name.to_string(), matched);
}
}
pub fn match_node_non_recursive<'goal, 'tree, L: Language>(
goal: &Node<'goal, L>,
candidate: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
goal: &Node<'goal, L>,
candidate: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
let is_leaf = goal.is_leaf();
if is_leaf {
if let Some(matched) = match_leaf_meta_var(goal, candidate.clone(), env) {
return Some(matched);
}
let is_leaf = goal.is_leaf();
if is_leaf {
if let Some(matched) = match_leaf_meta_var(goal, candidate.clone(), env) {
return Some(matched);
}
if goal.kind_id() != candidate.kind_id() {
return None;
}
if goal.kind_id() != candidate.kind_id() {
return None;
}
if is_leaf {
if extract_var_from_node(goal).is_some() {
return None;
}
if is_leaf {
if extract_var_from_node(goal).is_some() {
return None;
}
return if goal.text() == candidate.text() {
Some(candidate)
} else {
None
};
}
let mut goal_children = goal.children().peekable();
let mut cand_children = candidate.children().peekable();
cand_children.peek()?;
loop {
let curr_node = goal_children.peek().unwrap();
if let Ok(optional_name) = try_get_ellipsis_mode(curr_node) {
let mut matched = vec![];
goal_children.next();
// goal has all matched
if goal_children.peek().is_none() {
update_ellipsis_env(&optional_name, matched, env, cand_children, 0);
return Some(candidate);
}
let mut skipped_anonymous = 0;
while !goal_children.peek().unwrap().inner.is_named() {
goal_children.next();
skipped_anonymous += 1;
if goal_children.peek().is_none() {
update_ellipsis_env(
&optional_name,
matched,
env,
cand_children,
skipped_anonymous,
);
return Some(candidate);
}
}
// if next node is a Ellipsis, consume one candidate node
if try_get_ellipsis_mode(goal_children.peek().unwrap()).is_ok() {
matched.push(cand_children.next().unwrap());
cand_children.peek()?;
update_ellipsis_env(
&optional_name,
matched,
env,
std::iter::empty(),
skipped_anonymous,
);
continue;
}
loop {
if match_node_non_recursive(
goal_children.peek().unwrap(),
cand_children.peek().unwrap().clone(),
env,
)
.is_some()
{
// found match non Ellipsis,
update_ellipsis_env(
&optional_name,
matched,
env,
std::iter::empty(),
skipped_anonymous,
);
break;
}
matched.push(cand_children.next().unwrap());
cand_children.peek()?;
}
}
match_node_non_recursive(
goal_children.peek().unwrap(),
cand_children.peek().unwrap().clone(),
env,
)?;
return if goal.text() == candidate.text() {
Some(candidate)
} else {
None
};
}
let mut goal_children = goal.children().peekable();
let mut cand_children = candidate.children().peekable();
cand_children.peek()?;
loop {
let curr_node = goal_children.peek().unwrap();
if let Ok(optional_name) = try_get_ellipsis_mode(curr_node) {
let mut matched = vec![];
goal_children.next();
// goal has all matched
if goal_children.peek().is_none() {
update_ellipsis_env(&optional_name, matched, env, cand_children, 0);
return Some(candidate);
}
let mut skipped_anonymous = 0;
while !goal_children.peek().unwrap().inner.is_named() {
goal_children.next();
skipped_anonymous += 1;
if goal_children.peek().is_none() {
// all goal found, return
return Some(candidate.clone());
update_ellipsis_env(
&optional_name,
matched,
env,
cand_children,
skipped_anonymous,
);
return Some(candidate);
}
cand_children.next();
}
// if next node is a Ellipsis, consume one candidate node
if try_get_ellipsis_mode(goal_children.peek().unwrap()).is_ok() {
matched.push(cand_children.next().unwrap());
cand_children.peek()?;
update_ellipsis_env(
&optional_name,
matched,
env,
std::iter::empty(),
skipped_anonymous,
);
continue;
}
loop {
if match_node_non_recursive(
goal_children.peek().unwrap(),
cand_children.peek().unwrap().clone(),
env,
)
.is_some()
{
// found match non Ellipsis,
update_ellipsis_env(
&optional_name,
matched,
env,
std::iter::empty(),
skipped_anonymous,
);
break;
}
matched.push(cand_children.next().unwrap());
cand_children.peek()?;
}
}
match_node_non_recursive(
goal_children.peek().unwrap(),
cand_children.peek().unwrap().clone(),
env,
)?;
goal_children.next();
if goal_children.peek().is_none() {
// all goal found, return
return Some(candidate.clone());
}
cand_children.next();
cand_children.peek()?;
}
}
pub fn does_node_match_exactly<L: Language>(goal: &Node<L>, candidate: Node<L>) -> bool {
if goal.kind_id() != candidate.kind_id() {
return false;
}
if goal.is_leaf() {
return goal.text() == candidate.text();
}
let goal_children = goal.children();
let cand_children = candidate.children();
if goal_children.len() != cand_children.len() {
return false;
}
goal_children
.zip(cand_children)
.all(|(g, c)| does_node_match_exactly(&g, c))
if goal.kind_id() != candidate.kind_id() {
return false;
}
if goal.is_leaf() {
return goal.text() == candidate.text();
}
let goal_children = goal.children();
let cand_children = candidate.children();
if goal_children.len() != cand_children.len() {
return false;
}
goal_children
.zip(cand_children)
.all(|(g, c)| does_node_match_exactly(&g, c))
}
fn extract_var_from_node<L: Language>(goal: &Node<L>) -> Option<MetaVariable> {
let key = goal.text();
goal.root.lang.extract_meta_var(&key)
let key = goal.text();
goal.root.lang.extract_meta_var(&key)
}
#[cfg(test)]
mod test {
use super::*;
use crate::language::{Language, Tsx};
use crate::ts_parser::parse as parse_base;
use crate::Root;
use std::collections::HashMap;
use super::*;
use crate::language::{Language, Tsx};
use crate::ts_parser::parse as parse_base;
use crate::Root;
use std::collections::HashMap;
fn parse(src: &str) -> tree_sitter::Tree {
parse_base(src, None, Tsx.get_ts_language()).unwrap()
}
fn find_node_recursive<'goal, 'tree>(
goal: &Node<'goal, Tsx>,
node: Node<'tree, Tsx>,
env: &mut MetaVarEnv<'tree, Tsx>,
) -> Option<Node<'tree, Tsx>> {
match_node_non_recursive(goal, node.clone(), env).or_else(|| {
node.children()
.find_map(|sub| find_node_recursive(goal, sub, env))
})
}
fn parse(src: &str) -> tree_sitter::Tree {
parse_base(src, None, Tsx.get_ts_language()).unwrap()
}
fn find_node_recursive<'goal, 'tree>(
goal: &Node<'goal, Tsx>,
node: Node<'tree, Tsx>,
env: &mut MetaVarEnv<'tree, Tsx>,
) -> Option<Node<'tree, Tsx>> {
match_node_non_recursive(goal, node.clone(), env).or_else(|| {
node
.children()
.find_map(|sub| find_node_recursive(goal, sub, env))
})
}
fn test_match(s1: &str, s2: &str) -> HashMap<String, String> {
let goal = parse(s1);
let goal = Node {
inner: goal.root_node().child(0).unwrap(),
root: &Root {
inner: goal.clone(),
source: s1.to_string(),
lang: Tsx,
},
};
let cand = parse(s2);
let cand = Node {
inner: cand.root_node(),
root: &Root {
inner: cand.clone(),
source: s2.to_string(),
lang: Tsx,
},
};
let mut env = MetaVarEnv::new();
let ret = find_node_recursive(&goal, cand.clone(), &mut env);
assert!(
ret.is_some(),
"goal: {}, candidate: {}",
goal.inner.to_sexp(),
cand.inner.to_sexp(),
);
HashMap::from(env)
}
fn test_match(s1: &str, s2: &str) -> HashMap<String, String> {
let goal = parse(s1);
let goal = Node {
inner: goal.root_node().child(0).unwrap(),
root: &Root {
inner: goal.clone(),
source: s1.to_string(),
lang: Tsx,
},
};
let cand = parse(s2);
let cand = Node {
inner: cand.root_node(),
root: &Root {
inner: cand.clone(),
source: s2.to_string(),
lang: Tsx,
},
};
let mut env = MetaVarEnv::new();
let ret = find_node_recursive(&goal, cand.clone(), &mut env);
assert!(
ret.is_some(),
"goal: {}, candidate: {}",
goal.inner.to_sexp(),
cand.inner.to_sexp(),
);
HashMap::from(env)
}
fn test_non_match(s1: &str, s2: &str) {
let goal = parse(s1);
let goal = Node {
inner: goal.root_node().child(0).unwrap(),
root: &Root {
inner: goal.clone(),
source: s1.to_string(),
lang: Tsx,
},
};
let cand = parse(s2);
let cand = Node {
inner: cand.root_node(),
root: &Root {
inner: cand.clone(),
source: s2.to_string(),
lang: Tsx,
},
};
let mut env = MetaVarEnv::new();
let ret = find_node_recursive(&goal, cand, &mut env);
assert!(ret.is_none());
}
fn test_non_match(s1: &str, s2: &str) {
let goal = parse(s1);
let goal = Node {
inner: goal.root_node().child(0).unwrap(),
root: &Root {
inner: goal.clone(),
source: s1.to_string(),
lang: Tsx,
},
};
let cand = parse(s2);
let cand = Node {
inner: cand.root_node(),
root: &Root {
inner: cand.clone(),
source: s2.to_string(),
lang: Tsx,
},
};
let mut env = MetaVarEnv::new();
let ret = find_node_recursive(&goal, cand, &mut env);
assert!(ret.is_none());
}
#[test]
fn test_simple_match() {
test_match("const a = 123", "const a=123");
test_non_match("const a = 123", "var a = 123");
}
#[test]
fn test_simple_match() {
test_match("const a = 123", "const a=123");
test_non_match("const a = 123", "var a = 123");
}
#[test]
fn test_nested_match() {
test_match("const a = 123", "function() {const a= 123;}");
test_match("const a = 123", "class A { constructor() {const a= 123;}}");
test_match(
"const a = 123",
"for (let a of []) while (true) { const a = 123;}",
);
}
#[test]
fn test_nested_match() {
test_match("const a = 123", "function() {const a= 123;}");
test_match("const a = 123", "class A { constructor() {const a= 123;}}");
test_match(
"const a = 123",
"for (let a of []) while (true) { const a = 123;}",
);
}
#[test]
fn test_should_exactly_match() {
test_match(
"function foo() { let a = 123; }",
"function foo() { let a = 123; }",
);
test_non_match(
"function foo() { let a = 123; }",
"function bar() { let a = 123; }",
);
}
#[test]
fn test_match_inner() {
test_match(
"function bar() { let a = 123; }",
"function foo() { function bar() {let a = 123; }}",
);
test_non_match(
"function foo() { let a = 123; }",
"function foo() { function bar() {let a = 123; }}",
);
}
#[test]
fn test_should_exactly_match() {
test_match(
"function foo() { let a = 123; }",
"function foo() { let a = 123; }",
);
test_non_match(
"function foo() { let a = 123; }",
"function bar() { let a = 123; }",
);
}
#[test]
fn test_match_inner() {
test_match(
"function bar() { let a = 123; }",
"function foo() { function bar() {let a = 123; }}",
);
test_non_match(
"function foo() { let a = 123; }",
"function foo() { function bar() {let a = 123; }}",
);
}
#[test]
fn test_single_ellipsis() {
test_match("foo($$$)", "foo(a, b, c)");
test_match("foo($$$)", "foo()");
}
#[test]
fn test_named_ellipsis() {
test_match("foo($$$A, c)", "foo(a, b, c)");
test_match("foo($$$A, b, c)", "foo(a, b, c)");
test_match("foo($$$A, a, b, c)", "foo(a, b, c)");
test_non_match("foo($$$A, a, b, c)", "foo(b, c)");
}
#[test]
fn test_single_ellipsis() {
test_match("foo($$$)", "foo(a, b, c)");
test_match("foo($$$)", "foo()");
}
#[test]
fn test_named_ellipsis() {
test_match("foo($$$A, c)", "foo(a, b, c)");
test_match("foo($$$A, b, c)", "foo(a, b, c)");
test_match("foo($$$A, a, b, c)", "foo(a, b, c)");
test_non_match("foo($$$A, a, b, c)", "foo(b, c)");
}
#[test]
fn test_leading_ellipsis() {
test_match("foo($$$, c)", "foo(a, b, c)");
test_match("foo($$$, b, c)", "foo(a, b, c)");
test_match("foo($$$, a, b, c)", "foo(a, b, c)");
test_non_match("foo($$$, a, b, c)", "foo(b, c)");
}
#[test]
fn test_trailing_ellipsis() {
test_match("foo(a, $$$)", "foo(a, b, c)");
test_match("foo(a, b, $$$)", "foo(a, b, c)");
// test_match("foo(a, b, c, $$$)", "foo(a, b, c)");
test_non_match("foo(a, b, c, $$$)", "foo(b, c)");
}
#[test]
fn test_leading_ellipsis() {
test_match("foo($$$, c)", "foo(a, b, c)");
test_match("foo($$$, b, c)", "foo(a, b, c)");
test_match("foo($$$, a, b, c)", "foo(a, b, c)");
test_non_match("foo($$$, a, b, c)", "foo(b, c)");
}
#[test]
fn test_trailing_ellipsis() {
test_match("foo(a, $$$)", "foo(a, b, c)");
test_match("foo(a, b, $$$)", "foo(a, b, c)");
// test_match("foo(a, b, c, $$$)", "foo(a, b, c)");
test_non_match("foo(a, b, c, $$$)", "foo(b, c)");
}
#[test]
fn test_meta_var_multiple_occurrence() {
test_match("$A($$$)", "test(123)");
test_match("$A($B)", "test(123)");
test_non_match("$A($A)", "test(aaa)");
test_non_match("$A($A)", "test(123)");
test_non_match("$A($A, $A)", "test(123, 456)");
test_match("$A($A)", "test(test)");
}
#[test]
fn test_meta_var_multiple_occurrence() {
test_match("$A($$$)", "test(123)");
test_match("$A($B)", "test(123)");
test_non_match("$A($A)", "test(aaa)");
test_non_match("$A($A)", "test(123)");
test_non_match("$A($A, $A)", "test(123, 456)");
test_match("$A($A)", "test(test)");
}
}
+182 -180
View File
@@ -9,33 +9,33 @@ use std::ops::{Deref, DerefMut};
#[derive(Clone)]
pub struct KindMatcher<L: Language> {
kind: KindId,
lang: PhantomData<L>,
kind: KindId,
lang: PhantomData<L>,
}
impl<L: Language> KindMatcher<L> {
pub fn new(node_kind: &str, lang: L) -> Self {
Self {
kind: lang
.get_ts_language()
.id_for_node_kind(node_kind, /*named*/ true),
lang: PhantomData,
}
pub fn new(node_kind: &str, lang: L) -> Self {
Self {
kind: lang
.get_ts_language()
.id_for_node_kind(node_kind, /*named*/ true),
lang: PhantomData,
}
}
}
impl<L: Language> Matcher<L> for KindMatcher<L> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if node.kind_id() == self.kind {
Some(node)
} else {
None
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
if node.kind_id() == self.kind {
Some(node)
} else {
None
}
}
}
impl<L: Language> PositiveMatcher<L> for KindMatcher<L> {}
@@ -43,105 +43,107 @@ impl<L: Language> PositiveMatcher<L> for KindMatcher<L> {}
* N.B. At least one positive term is required for matching
*/
pub trait Matcher<L: Language> {
/// Returns the node why the input is matched or None if not matched.
/// The return value is usually input node itself, but it can be different node.
/// For example `Has` matcher can return the child or descendant node.
fn match_node_with_env<'tree>(
&self,
_node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>>;
/// Returns the node why the input is matched or None if not matched.
/// The return value is usually input node itself, but it can be different node.
/// For example `Has` matcher can return the child or descendant node.
fn match_node_with_env<'tree>(
&self,
_node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>>;
fn match_node<'tree>(&self, node: Node<'tree, L>) -> Option<NodeMatch<'tree, L>> {
let mut env = self.get_meta_var_env();
let node = self.match_node_with_env(node, &mut env)?;
env.match_constraints().then_some(NodeMatch(node, env))
}
fn match_node<'tree>(&self, node: Node<'tree, L>) -> Option<NodeMatch<'tree, L>> {
let mut env = self.get_meta_var_env();
let node = self.match_node_with_env(node, &mut env)?;
env.match_constraints().then_some(NodeMatch(node, env))
}
fn get_meta_var_matchers(&self) -> MetaVarMatchers<L> {
MetaVarMatchers::new()
}
fn get_meta_var_matchers(&self) -> MetaVarMatchers<L> {
MetaVarMatchers::new()
}
fn get_meta_var_env<'tree>(&self) -> MetaVarEnv<'tree, L> {
MetaVarEnv::from_matchers(self.get_meta_var_matchers())
}
fn get_meta_var_env<'tree>(&self) -> MetaVarEnv<'tree, L> {
MetaVarEnv::from_matchers(self.get_meta_var_matchers())
}
fn find_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.match_node_with_env(node.clone(), env).or_else(|| {
node.children()
.find_map(|sub| self.find_node_with_env(sub, env))
})
}
fn find_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.match_node_with_env(node.clone(), env).or_else(|| {
node
.children()
.find_map(|sub| self.find_node_with_env(sub, env))
})
}
fn find_node<'tree>(&self, node: Node<'tree, L>) -> Option<NodeMatch<'tree, L>> {
self.match_node(node.clone())
.or_else(|| node.children().find_map(|sub| self.find_node(sub)))
}
fn find_node<'tree>(&self, node: Node<'tree, L>) -> Option<NodeMatch<'tree, L>> {
self
.match_node(node.clone())
.or_else(|| node.children().find_map(|sub| self.find_node(sub)))
}
fn find_all_nodes(self, node: Node<L>) -> FindAllNodes<L, Self>
where
Self: Sized,
{
FindAllNodes::new(self, node)
}
fn find_all_nodes(self, node: Node<L>) -> FindAllNodes<L, Self>
where
Self: Sized,
{
FindAllNodes::new(self, node)
}
}
impl<L: Language> Matcher<L> for str {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
let pattern = Pattern::new(self, node.root.lang.clone());
pattern.match_node_with_env(node, env)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
let pattern = Pattern::new(self, node.root.lang.clone());
pattern.match_node_with_env(node, env)
}
}
impl<L, T> Matcher<L> for &T
where
L: Language,
T: Matcher<L> + ?Sized,
L: Language,
T: Matcher<L> + ?Sized,
{
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
(**self).match_node_with_env(node, env)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
(**self).match_node_with_env(node, env)
}
}
impl<L: Language> PositiveMatcher<L> for str {}
impl<L, T> PositiveMatcher<L> for &T
where
L: Language,
T: PositiveMatcher<L> + ?Sized,
L: Language,
T: PositiveMatcher<L> + ?Sized,
{
}
impl<L: Language> Matcher<L> for Box<dyn Matcher<L>> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
// NOTE: must double deref boxed value to avoid recursion
(**self).match_node_with_env(node, env)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
// NOTE: must double deref boxed value to avoid recursion
(**self).match_node_with_env(node, env)
}
}
impl<L: Language> Matcher<L> for Box<dyn PositiveMatcher<L>> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
// NOTE: must double deref boxed value to avoid recursion
(**self).match_node_with_env(node, env)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
// NOTE: must double deref boxed value to avoid recursion
(**self).match_node_with_env(node, env)
}
}
impl<L: Language> PositiveMatcher<L> for Box<dyn PositiveMatcher<L>> {}
@@ -151,51 +153,51 @@ impl<L: Language> PositiveMatcher<L> for Box<dyn PositiveMatcher<L>> {}
pub trait PositiveMatcher<L: Language>: Matcher<L> {}
pub struct FindAllNodes<'tree, L: Language, M: Matcher<L>> {
dfs: Dfs<'tree, L>,
matcher: M,
dfs: Dfs<'tree, L>,
matcher: M,
}
impl<'tree, L: Language, M: Matcher<L>> FindAllNodes<'tree, L, M> {
fn new(matcher: M, node: Node<'tree, L>) -> Self {
Self {
dfs: node.dfs(),
matcher,
}
fn new(matcher: M, node: Node<'tree, L>) -> Self {
Self {
dfs: node.dfs(),
matcher,
}
}
}
impl<'tree, L: Language, M: Matcher<L>> Iterator for FindAllNodes<'tree, L, M> {
type Item = NodeMatch<'tree, L>;
fn next(&mut self) -> Option<Self::Item> {
for cand in self.dfs.by_ref() {
if let Some(matched) = self.matcher.match_node(cand) {
return Some(matched);
}
}
None
type Item = NodeMatch<'tree, L>;
fn next(&mut self) -> Option<Self::Item> {
for cand in self.dfs.by_ref() {
if let Some(matched) = self.matcher.match_node(cand) {
return Some(matched);
}
}
None
}
}
pub struct MatchAll;
impl<L: Language> Matcher<L> for MatchAll {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
Some(node)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
Some(node)
}
}
pub struct MatchNone;
impl<L: Language> Matcher<L> for MatchNone {
fn match_node_with_env<'tree>(
&self,
_node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
None
}
fn match_node_with_env<'tree>(
&self,
_node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
None
}
}
impl<L: Language> PositiveMatcher<L> for MatchNone {}
@@ -203,81 +205,81 @@ impl<L: Language> PositiveMatcher<L> for MatchNone {}
pub struct NodeMatch<'tree, L: Language>(Node<'tree, L>, MetaVarEnv<'tree, L>);
impl<'tree, L: Language> NodeMatch<'tree, L> {
pub fn get_env(&self) -> &MetaVarEnv<'tree, L> {
&self.1
}
pub fn get_env(&self) -> &MetaVarEnv<'tree, L> {
&self.1
}
}
impl<'tree, L: Language> From<NodeMatch<'tree, L>> for Node<'tree, L> {
fn from(node_match: NodeMatch<'tree, L>) -> Self {
node_match.0
}
fn from(node_match: NodeMatch<'tree, L>) -> Self {
node_match.0
}
}
impl<'tree, L: Language> Deref for NodeMatch<'tree, L> {
type Target = Node<'tree, L>;
fn deref(&self) -> &Self::Target {
&self.0
}
type Target = Node<'tree, L>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'tree, L: Language> DerefMut for NodeMatch<'tree, L> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<'tree, L: Language> Borrow<Node<'tree, L>> for NodeMatch<'tree, L> {
fn borrow(&self) -> &Node<'tree, L> {
&self.0
}
fn borrow(&self) -> &Node<'tree, L> {
&self.0
}
}
impl<'tree, L: Language> BorrowMut<Node<'tree, L>> for NodeMatch<'tree, L> {
fn borrow_mut(&mut self) -> &mut Node<'tree, L> {
&mut self.0
}
fn borrow_mut(&mut self) -> &mut Node<'tree, L> {
&mut self.0
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::language::Tsx;
use crate::Root;
use super::*;
use crate::language::Tsx;
use crate::Root;
fn pattern_node(s: &str) -> Root<Tsx> {
Root::new(s, Tsx)
}
#[test]
fn test_kind_match() {
let kind = "public_field_definition";
let cand = pattern_node("class A { a = 123 }");
let cand = cand.root();
let pattern = KindMatcher::new(kind, Tsx);
assert!(
pattern.find_node(cand.clone()).is_some(),
"goal: {}, candidate: {}",
kind,
cand.inner.to_sexp(),
);
}
fn pattern_node(s: &str) -> Root<Tsx> {
Root::new(s, Tsx)
}
#[test]
fn test_kind_match() {
let kind = "public_field_definition";
let cand = pattern_node("class A { a = 123 }");
let cand = cand.root();
let pattern = KindMatcher::new(kind, Tsx);
assert!(
pattern.find_node(cand.clone()).is_some(),
"goal: {}, candidate: {}",
kind,
cand.inner.to_sexp(),
);
}
#[test]
fn test_kind_non_match() {
let kind = "field_definition";
let cand = pattern_node("const a = 123");
let cand = cand.root();
let pattern = KindMatcher::new(kind, Tsx);
assert!(
pattern.find_node(cand.clone()).is_none(),
"goal: {}, candidate: {}",
kind,
cand.inner.to_sexp(),
);
}
#[test]
fn test_kind_non_match() {
let kind = "field_definition";
let cand = pattern_node("const a = 123");
let cand = cand.root();
let pattern = KindMatcher::new(kind, Tsx);
assert!(
pattern.find_node(cand.clone()).is_none(),
"goal: {}, candidate: {}",
kind,
cand.inner.to_sexp(),
);
}
#[test]
fn test_box_match() {
let boxed: Box<dyn Matcher<Tsx>> = Box::new("const a = 123");
let cand = pattern_node("const a = 123");
let cand = cand.root();
assert!(boxed.find_node(cand).is_some());
}
#[test]
fn test_box_match() {
let boxed: Box<dyn Matcher<Tsx>> = Box::new("const a = 123");
let cand = pattern_node("const a = 123");
let cand = cand.root();
assert!(boxed.find_node(cand).is_some());
}
}
+187 -186
View File
@@ -10,250 +10,251 @@ pub type MetaVariableID = String;
/// a dictionary that stores metavariable instantiation
/// const a = 123 matched with const a = $A will produce env: $A => 123
pub struct MetaVarEnv<'tree, L: Language> {
var_matchers: MetaVarMatchers<L>,
single_matched: HashMap<MetaVariableID, Node<'tree, L>>,
multi_matched: HashMap<MetaVariableID, Vec<Node<'tree, L>>>,
var_matchers: MetaVarMatchers<L>,
single_matched: HashMap<MetaVariableID, Node<'tree, L>>,
multi_matched: HashMap<MetaVariableID, Vec<Node<'tree, L>>>,
}
impl<'tree, L: Language> MetaVarEnv<'tree, L> {
pub fn new() -> Self {
Self::from_matchers(MetaVarMatchers::new())
}
pub fn new() -> Self {
Self::from_matchers(MetaVarMatchers::new())
}
pub fn from_matchers(var_matchers: MetaVarMatchers<L>) -> Self {
Self {
var_matchers,
single_matched: HashMap::new(),
multi_matched: HashMap::new(),
pub fn from_matchers(var_matchers: MetaVarMatchers<L>) -> Self {
Self {
var_matchers,
single_matched: HashMap::new(),
multi_matched: HashMap::new(),
}
}
pub fn insert(&mut self, id: MetaVariableID, ret: Node<'tree, L>) -> Option<&mut Self> {
if !self.match_variable(&id, ret.clone()) {
return None;
}
self.single_matched.insert(id, ret);
Some(self)
}
pub fn insert_multi(
&mut self,
id: MetaVariableID,
ret: Vec<Node<'tree, L>>,
) -> Option<&mut Self> {
self.multi_matched.insert(id, ret);
Some(self)
}
pub fn get(&self, var: &MetaVariable) -> Option<MatchResult<'_, 'tree, L>> {
match var {
MetaVariable::Named(n) => self.single_matched.get(n).map(MatchResult::Single),
MetaVariable::NamedEllipsis(n) => self.multi_matched.get(n).map(MatchResult::Multi),
_ => None,
}
}
pub fn add_label(&mut self, label: &str, node: Node<'tree, L>) {
self
.multi_matched
.entry(label.into())
.or_insert_with(Vec::new)
.push(node);
}
pub fn get_labels(&self, label: &str) -> Option<&Vec<Node<'tree, L>>> {
self.multi_matched.get(label)
}
pub fn match_constraints(&self) -> bool {
for (var_id, candidate) in &self.single_matched {
if let Some(m) = self.var_matchers.0.get(var_id) {
if !m.matches(candidate.clone()) {
return false;
}
}
}
true
}
pub fn insert(&mut self, id: MetaVariableID, ret: Node<'tree, L>) -> Option<&mut Self> {
if !self.match_variable(&id, ret.clone()) {
return None;
}
self.single_matched.insert(id, ret);
Some(self)
}
pub fn insert_multi(
&mut self,
id: MetaVariableID,
ret: Vec<Node<'tree, L>>,
) -> Option<&mut Self> {
self.multi_matched.insert(id, ret);
Some(self)
}
pub fn get(&self, var: &MetaVariable) -> Option<MatchResult<'_, 'tree, L>> {
match var {
MetaVariable::Named(n) => self.single_matched.get(n).map(MatchResult::Single),
MetaVariable::NamedEllipsis(n) => self.multi_matched.get(n).map(MatchResult::Multi),
_ => None,
}
}
pub fn add_label(&mut self, label: &str, node: Node<'tree, L>) {
self.multi_matched
.entry(label.into())
.or_insert_with(Vec::new)
.push(node);
}
pub fn get_labels(&self, label: &str) -> Option<&Vec<Node<'tree, L>>> {
self.multi_matched.get(label)
}
pub fn match_constraints(&self) -> bool {
for (var_id, candidate) in &self.single_matched {
if let Some(m) = self.var_matchers.0.get(var_id) {
if !m.matches(candidate.clone()) {
return false;
}
}
}
true
}
fn match_variable(&self, id: &MetaVariableID, candidate: Node<L>) -> bool {
if let Some(m) = self.single_matched.get(id) {
return does_node_match_exactly(m, candidate);
}
true
fn match_variable(&self, id: &MetaVariableID, candidate: Node<L>) -> bool {
if let Some(m) = self.single_matched.get(id) {
return does_node_match_exactly(m, candidate);
}
true
}
}
impl<'tree, L: Language> Default for MetaVarEnv<'tree, L> {
fn default() -> Self {
Self::new()
}
fn default() -> Self {
Self::new()
}
}
impl<'tree, L: Language> From<MetaVarEnv<'tree, L>> for HashMap<String, String> {
fn from(env: MetaVarEnv<'tree, L>) -> Self {
let mut ret = HashMap::new();
for (id, node) in env.single_matched {
ret.insert(id, node.text().into());
}
for (id, nodes) in env.multi_matched {
let s: Vec<_> = nodes.iter().map(|n| n.text()).collect();
let s = s.join(", ");
ret.insert(id, format!("[{s}]"));
}
ret
fn from(env: MetaVarEnv<'tree, L>) -> Self {
let mut ret = HashMap::new();
for (id, node) in env.single_matched {
ret.insert(id, node.text().into());
}
for (id, nodes) in env.multi_matched {
let s: Vec<_> = nodes.iter().map(|n| n.text()).collect();
let s = s.join(", ");
ret.insert(id, format!("[{s}]"));
}
ret
}
}
pub enum MatchResult<'a, 'tree, L: Language> {
/// $A for captured meta var
Single(&'a Node<'tree, L>),
/// $$$A for captured ellipsis
Multi(&'a Vec<Node<'tree, L>>),
/// $A for captured meta var
Single(&'a Node<'tree, L>),
/// $$$A for captured ellipsis
Multi(&'a Vec<Node<'tree, L>>),
}
#[derive(Debug, PartialEq, Eq)]
pub enum MetaVariable {
/// $A for captured meta var
Named(MetaVariableID),
/// $_ for non-captured meta var
Anonymous,
/// $$$ for non-captured ellipsis
Ellipsis,
/// $$$A for captured ellipsis
NamedEllipsis(MetaVariableID),
/// $A for captured meta var
Named(MetaVariableID),
/// $_ for non-captured meta var
Anonymous,
/// $$$ for non-captured ellipsis
Ellipsis,
/// $$$A for captured ellipsis
NamedEllipsis(MetaVariableID),
}
#[derive(Clone)]
pub struct MetaVarMatchers<L: Language>(HashMap<MetaVariableID, MetaVarMatcher<L>>);
impl<L: Language> MetaVarMatchers<L> {
pub fn new() -> Self {
Self(HashMap::new())
}
pub fn new() -> Self {
Self(HashMap::new())
}
pub fn insert(&mut self, var_id: MetaVariableID, matcher: MetaVarMatcher<L>) {
self.0.insert(var_id, matcher);
}
pub fn insert(&mut self, var_id: MetaVariableID, matcher: MetaVarMatcher<L>) {
self.0.insert(var_id, matcher);
}
}
impl<L: Language> Default for MetaVarMatchers<L> {
fn default() -> Self {
Self::new()
}
fn default() -> Self {
Self::new()
}
}
#[derive(Clone)]
pub enum MetaVarMatcher<L: Language> {
#[cfg(feature = "regex")]
/// A regex to filter matched metavar based on its textual content.
Regex(regex::Regex),
/// A pattern to filter matched metavar based on its AST tree shape.
Pattern(Pattern<L>),
/// A kind_id to filter matched metavar based on its ts-node kind
Kind(KindMatcher<L>),
#[cfg(feature = "regex")]
/// A regex to filter matched metavar based on its textual content.
Regex(regex::Regex),
/// A pattern to filter matched metavar based on its AST tree shape.
Pattern(Pattern<L>),
/// A kind_id to filter matched metavar based on its ts-node kind
Kind(KindMatcher<L>),
}
impl<L: Language> MetaVarMatcher<L> {
pub fn matches(&self, candidate: Node<L>) -> bool {
use crate::matcher::Matcher;
use MetaVarMatcher::*;
let mut env = MetaVarEnv::new();
match self {
#[cfg(feature = "regex")]
Regex(regexp) => regexp.is_match(&candidate.text()),
Pattern(p) => p.match_node_with_env(candidate, &mut env).is_some(),
Kind(k) => k.match_node_with_env(candidate, &mut env).is_some(),
}
pub fn matches(&self, candidate: Node<L>) -> bool {
use crate::matcher::Matcher;
use MetaVarMatcher::*;
let mut env = MetaVarEnv::new();
match self {
#[cfg(feature = "regex")]
Regex(regexp) => regexp.is_match(&candidate.text()),
Pattern(p) => p.match_node_with_env(candidate, &mut env).is_some(),
Kind(k) => k.match_node_with_env(candidate, &mut env).is_some(),
}
}
}
pub(crate) fn extract_meta_var(src: &str, meta_char: char) -> Option<MetaVariable> {
use MetaVariable::*;
let ellipsis: String = std::iter::repeat(meta_char).take(3).collect();
if src == ellipsis {
return Some(Ellipsis);
}
if let Some(trimmed) = src.strip_prefix(&ellipsis) {
if !trimmed.chars().all(is_valid_meta_var_char) {
return None;
}
if trimmed.starts_with('_') {
return Some(Ellipsis);
} else {
return Some(NamedEllipsis(trimmed.to_owned()));
}
}
if !src.starts_with(meta_char) {
return None;
}
let trimmed = &src[meta_char.len_utf8()..];
// $A or $_
use MetaVariable::*;
let ellipsis: String = std::iter::repeat(meta_char).take(3).collect();
if src == ellipsis {
return Some(Ellipsis);
}
if let Some(trimmed) = src.strip_prefix(&ellipsis) {
if !trimmed.chars().all(is_valid_meta_var_char) {
return None;
return None;
}
if trimmed.starts_with('_') {
Some(Anonymous)
return Some(Ellipsis);
} else {
Some(Named(trimmed.to_owned()))
return Some(NamedEllipsis(trimmed.to_owned()));
}
}
if !src.starts_with(meta_char) {
return None;
}
let trimmed = &src[meta_char.len_utf8()..];
// $A or $_
if !trimmed.chars().all(is_valid_meta_var_char) {
return None;
}
if trimmed.starts_with('_') {
Some(Anonymous)
} else {
Some(Named(trimmed.to_owned()))
}
}
fn is_valid_meta_var_char(c: char) -> bool {
matches!(c, 'A'..='Z' | '_')
matches!(c, 'A'..='Z' | '_')
}
#[cfg(test)]
mod test {
use super::*;
use crate::language::Tsx;
use crate::Pattern;
use super::*;
use crate::language::Tsx;
use crate::Pattern;
fn extract_var(s: &str) -> Option<MetaVariable> {
extract_meta_var(s, '$')
}
#[test]
fn test_match_var() {
use MetaVariable::*;
assert_eq!(extract_var("$$$"), Some(Ellipsis));
assert_eq!(extract_var("$ABC"), Some(Named("ABC".into())));
assert_eq!(extract_var("$$$ABC"), Some(NamedEllipsis("ABC".into())));
assert_eq!(extract_var("$_"), Some(Anonymous));
assert_eq!(extract_var("abc"), None);
assert_eq!(extract_var("$abc"), None);
}
fn extract_var(s: &str) -> Option<MetaVariable> {
extract_meta_var(s, '$')
}
#[test]
fn test_match_var() {
use MetaVariable::*;
assert_eq!(extract_var("$$$"), Some(Ellipsis));
assert_eq!(extract_var("$ABC"), Some(Named("ABC".into())));
assert_eq!(extract_var("$$$ABC"), Some(NamedEllipsis("ABC".into())));
assert_eq!(extract_var("$_"), Some(Anonymous));
assert_eq!(extract_var("abc"), None);
assert_eq!(extract_var("$abc"), None);
}
fn match_constraints(pattern: &str, node: &str) -> bool {
let mut matchers = MetaVarMatchers(HashMap::new());
matchers.insert(
"A".to_string(),
MetaVarMatcher::Pattern(Pattern::new(pattern, Tsx)),
);
let mut env = MetaVarEnv::from_matchers(matchers);
let root = Tsx.ast_grep(node);
let node = root.root().child(0).unwrap().child(0).unwrap();
env.insert("A".to_string(), node);
env.match_constraints()
}
fn match_constraints(pattern: &str, node: &str) -> bool {
let mut matchers = MetaVarMatchers(HashMap::new());
matchers.insert(
"A".to_string(),
MetaVarMatcher::Pattern(Pattern::new(pattern, Tsx)),
);
let mut env = MetaVarEnv::from_matchers(matchers);
let root = Tsx.ast_grep(node);
let node = root.root().child(0).unwrap().child(0).unwrap();
env.insert("A".to_string(), node);
env.match_constraints()
}
#[test]
fn test_non_ascii_meta_var() {
let extract = |s| extract_meta_var(s, 'µ');
use MetaVariable::*;
assert_eq!(extract("µµµ"), Some(Ellipsis));
assert_eq!(extract("µABC"), Some(Named("ABC".into())));
assert_eq!(extract("µµµABC"), Some(NamedEllipsis("ABC".into())));
assert_eq!(extract("µ_"), Some(Anonymous));
assert_eq!(extract("abc"), None);
assert_eq!(extract("µabc"), None);
}
#[test]
fn test_non_ascii_meta_var() {
let extract = |s| extract_meta_var(s, 'µ');
use MetaVariable::*;
assert_eq!(extract("µµµ"), Some(Ellipsis));
assert_eq!(extract("µABC"), Some(Named("ABC".into())));
assert_eq!(extract("µµµABC"), Some(NamedEllipsis("ABC".into())));
assert_eq!(extract("µ_"), Some(Anonymous));
assert_eq!(extract("abc"), None);
assert_eq!(extract("µabc"), None);
}
#[test]
fn test_match_constraints() {
assert!(match_constraints("a + b", "a + b"));
}
#[test]
fn test_match_constraints() {
assert!(match_constraints("a + b", "a + b"));
}
#[test]
fn test_match_not_constraints() {
assert!(!match_constraints("a - b", "a + b"));
}
#[test]
fn test_match_not_constraints() {
assert!(!match_constraints("a - b", "a + b"));
}
}
+367 -365
View File
@@ -9,448 +9,450 @@ use std::borrow::Cow;
/// Note: Root is generic against [`Language`](crate::language::Language)
#[derive(Clone)]
pub struct Root<L: Language> {
pub(crate) inner: tree_sitter::Tree,
pub(crate) source: String,
pub(crate) lang: L,
pub(crate) inner: tree_sitter::Tree,
pub(crate) source: String,
pub(crate) lang: L,
}
impl<L: Language> Root<L> {
pub fn new(src: &str, lang: L) -> Self {
Self {
inner: parse(src, None, lang.get_ts_language()).unwrap(),
source: src.into(),
lang,
}
}
// extract non generic implementation to reduce code size
pub fn do_edit(&mut self, edit: Edit) {
let input = unsafe { self.source.as_mut_vec() };
let input_edit = perform_edit(&mut self.inner, input, &edit);
self.inner.edit(&input_edit);
self.inner = parse(&self.source, Some(&self.inner), self.lang.get_ts_language()).unwrap();
pub fn new(src: &str, lang: L) -> Self {
Self {
inner: parse(src, None, lang.get_ts_language()).unwrap(),
source: src.into(),
lang,
}
}
// extract non generic implementation to reduce code size
pub fn do_edit(&mut self, edit: Edit) {
let input = unsafe { self.source.as_mut_vec() };
let input_edit = perform_edit(&mut self.inner, input, &edit);
self.inner.edit(&input_edit);
self.inner = parse(&self.source, Some(&self.inner), self.lang.get_ts_language()).unwrap();
}
pub fn root(&self) -> Node<L> {
Node {
inner: self.inner.root_node(),
root: self,
}
pub fn root(&self) -> Node<L> {
Node {
inner: self.inner.root_node(),
root: self,
}
}
}
// the lifetime r represents root
#[derive(Clone)]
pub struct Node<'r, L: Language> {
pub(crate) inner: tree_sitter::Node<'r>,
pub(crate) root: &'r Root<L>,
pub(crate) inner: tree_sitter::Node<'r>,
pub(crate) root: &'r Root<L>,
}
pub type KindId = u16;
struct NodeWalker<'tree, L: Language> {
cursor: tree_sitter::TreeCursor<'tree>,
root: &'tree Root<L>,
count: usize,
cursor: tree_sitter::TreeCursor<'tree>,
root: &'tree Root<L>,
count: usize,
}
impl<'tree, L: Language> Iterator for NodeWalker<'tree, L> {
type Item = Node<'tree, L>;
fn next(&mut self) -> Option<Self::Item> {
if self.count == 0 {
return None;
}
let ret = Some(Node {
inner: self.cursor.node(),
root: self.root,
});
self.cursor.goto_next_sibling();
self.count -= 1;
ret
type Item = Node<'tree, L>;
fn next(&mut self) -> Option<Self::Item> {
if self.count == 0 {
return None;
}
let ret = Some(Node {
inner: self.cursor.node(),
root: self.root,
});
self.cursor.goto_next_sibling();
self.count -= 1;
ret
}
}
impl<'tree, L: Language> ExactSizeIterator for NodeWalker<'tree, L> {
fn len(&self) -> usize {
self.count
}
fn len(&self) -> usize {
self.count
}
}
pub struct Dfs<'tree, L: Language> {
cursor: tree_sitter::TreeCursor<'tree>,
root: &'tree Root<L>,
// record the starting node, if we return back to starting point
// we should terminate the dfs.
start_id: Option<usize>,
cursor: tree_sitter::TreeCursor<'tree>,
root: &'tree Root<L>,
// record the starting node, if we return back to starting point
// we should terminate the dfs.
start_id: Option<usize>,
}
impl<'tree, L: Language> Dfs<'tree, L> {
fn new(node: &Node<'tree, L>) -> Self {
Self {
cursor: node.inner.walk(),
root: node.root,
start_id: Some(node.inner.id()),
}
fn new(node: &Node<'tree, L>) -> Self {
Self {
cursor: node.inner.walk(),
root: node.root,
start_id: Some(node.inner.id()),
}
}
}
impl<'tree, L: Language> Iterator for Dfs<'tree, L> {
type Item = Node<'tree, L>;
fn next(&mut self) -> Option<Self::Item> {
let start = self.start_id?;
let cursor = &mut self.cursor;
let inner = cursor.node();
let ret = Some(Node {
inner,
root: self.root,
});
if cursor.goto_first_child() {
return ret;
}
while cursor.node().id() != start {
if cursor.goto_next_sibling() {
return ret;
}
cursor.goto_parent();
}
self.start_id = None;
ret
type Item = Node<'tree, L>;
fn next(&mut self) -> Option<Self::Item> {
let start = self.start_id?;
let cursor = &mut self.cursor;
let inner = cursor.node();
let ret = Some(Node {
inner,
root: self.root,
});
if cursor.goto_first_child() {
return ret;
}
while cursor.node().id() != start {
if cursor.goto_next_sibling() {
return ret;
}
cursor.goto_parent();
}
self.start_id = None;
ret
}
}
// internal API
impl<'r, L: Language> Node<'r, L> {
pub fn is_leaf(&self) -> bool {
self.inner.child_count() == 0
}
pub fn kind(&self) -> Cow<str> {
self.inner.kind()
}
pub fn kind_id(&self) -> KindId {
self.inner.kind_id()
}
pub fn is_leaf(&self) -> bool {
self.inner.child_count() == 0
}
pub fn kind(&self) -> Cow<str> {
self.inner.kind()
}
pub fn kind_id(&self) -> KindId {
self.inner.kind_id()
}
pub fn range(&self) -> std::ops::Range<usize> {
(self.inner.start_byte() as usize)..(self.inner.end_byte() as usize)
}
pub fn start_pos(&self) -> (usize, usize) {
let pos = self.inner.start_position();
(pos.row() as usize, pos.column() as usize)
}
pub fn end_pos(&self) -> (usize, usize) {
let pos = self.inner.end_position();
(pos.row() as usize, pos.column() as usize)
}
pub fn text(&self) -> Cow<'r, str> {
self.inner
.utf8_text(self.root.source.as_bytes())
.expect("invalid source text encoding")
}
pub fn to_sexp(&self) -> Cow<'_, str> {
self.inner.to_sexp()
}
pub fn range(&self) -> std::ops::Range<usize> {
(self.inner.start_byte() as usize)..(self.inner.end_byte() as usize)
}
pub fn start_pos(&self) -> (usize, usize) {
let pos = self.inner.start_position();
(pos.row() as usize, pos.column() as usize)
}
pub fn end_pos(&self) -> (usize, usize) {
let pos = self.inner.end_position();
(pos.row() as usize, pos.column() as usize)
}
pub fn text(&self) -> Cow<'r, str> {
self
.inner
.utf8_text(self.root.source.as_bytes())
.expect("invalid source text encoding")
}
pub fn to_sexp(&self) -> Cow<'_, str> {
self.inner.to_sexp()
}
pub fn display_context(&self, context_lines: usize) -> DisplayContext<'r> {
let bytes = self.root.source.as_bytes();
let start = self.inner.start_byte() as usize;
let end = self.inner.end_byte() as usize;
let (mut leading, mut trailing) = (start, end);
let mut lines_before = context_lines + 1;
while leading > 0 {
if bytes[leading - 1] == b'\n' {
lines_before -= 1;
if lines_before == 0 {
break;
}
}
leading -= 1;
}
// tree-sitter will append line ending to source so trailing can be out of bound
trailing = trailing.min(bytes.len() - 1);
let mut lines_after = context_lines + 1;
while trailing < bytes.len() - 1 {
if bytes[trailing + 1] == b'\n' {
lines_after -= 1;
if lines_after == 0 {
break;
}
}
trailing += 1;
}
DisplayContext {
matched: self.text(),
leading: &self.root.source[leading..start],
trailing: &self.root.source[end..=trailing],
start_line: self.inner.start_position().row() as usize + 1,
pub fn display_context(&self, context_lines: usize) -> DisplayContext<'r> {
let bytes = self.root.source.as_bytes();
let start = self.inner.start_byte() as usize;
let end = self.inner.end_byte() as usize;
let (mut leading, mut trailing) = (start, end);
let mut lines_before = context_lines + 1;
while leading > 0 {
if bytes[leading - 1] == b'\n' {
lines_before -= 1;
if lines_before == 0 {
break;
}
}
leading -= 1;
}
// tree-sitter will append line ending to source so trailing can be out of bound
trailing = trailing.min(bytes.len() - 1);
let mut lines_after = context_lines + 1;
while trailing < bytes.len() - 1 {
if bytes[trailing + 1] == b'\n' {
lines_after -= 1;
if lines_after == 0 {
break;
}
}
trailing += 1;
}
DisplayContext {
matched: self.text(),
leading: &self.root.source[leading..start],
trailing: &self.root.source[end..=trailing],
start_line: self.inner.start_position().row() as usize + 1,
}
}
}
/**
* Corredponds to inside/has/precedes/follows
*/
impl<'r, L: Language> Node<'r, L> {
pub fn matches<M: Matcher<L>>(&self, m: M) -> bool {
m.match_node(self.clone()).is_some()
}
pub fn matches<M: Matcher<L>>(&self, m: M) -> bool {
m.match_node(self.clone()).is_some()
}
pub fn inside<M: Matcher<L>>(&self, m: M) -> bool {
self.ancestors().find_map(|n| m.match_node(n)).is_some()
}
pub fn inside<M: Matcher<L>>(&self, m: M) -> bool {
self.ancestors().find_map(|n| m.match_node(n)).is_some()
}
pub fn has<M: Matcher<L>>(&self, m: M) -> bool {
self.dfs().skip(1).find_map(|n| m.match_node(n)).is_some()
}
pub fn has<M: Matcher<L>>(&self, m: M) -> bool {
self.dfs().skip(1).find_map(|n| m.match_node(n)).is_some()
}
pub fn precedes<M: Matcher<L>>(&self, m: M) -> bool {
self.next_all().find_map(|n| m.match_node(n)).is_some()
}
pub fn precedes<M: Matcher<L>>(&self, m: M) -> bool {
self.next_all().find_map(|n| m.match_node(n)).is_some()
}
pub fn follows<M: Matcher<L>>(&self, m: M) -> bool {
self.prev_all().find_map(|n| m.match_node(n)).is_some()
}
pub fn follows<M: Matcher<L>>(&self, m: M) -> bool {
self.prev_all().find_map(|n| m.match_node(n)).is_some()
}
}
pub struct DisplayContext<'r> {
/// content for the matched node
pub matched: Cow<'r, str>,
/// content before the matched node
pub leading: &'r str,
/// content after the matched node
pub trailing: &'r str,
/// start line of the matched node
pub start_line: usize,
/// content for the matched node
pub matched: Cow<'r, str>,
/// content before the matched node
pub leading: &'r str,
/// content after the matched node
pub trailing: &'r str,
/// start line of the matched node
pub start_line: usize,
}
/// tree traversal API
impl<'r, L: Language> Node<'r, L> {
pub fn children<'s>(&'s self) -> impl ExactSizeIterator<Item = Node<'r, L>> + 's {
let mut cursor = self.inner.walk();
cursor.goto_first_child();
NodeWalker {
cursor,
root: self.root,
count: self.inner.child_count() as usize,
pub fn children<'s>(&'s self) -> impl ExactSizeIterator<Item = Node<'r, L>> + 's {
let mut cursor = self.inner.walk();
cursor.goto_first_child();
NodeWalker {
cursor,
root: self.root,
count: self.inner.child_count() as usize,
}
}
pub fn dfs<'s>(&'s self) -> Dfs<'r, L> {
Dfs::new(self)
}
#[must_use]
pub fn find<M: Matcher<L>>(&self, pat: M) -> Option<Node<'r, L>> {
pat.find_node(self.clone()).map(Node::from)
}
pub fn find_all<M: Matcher<L>>(&self, pat: M) -> impl Iterator<Item = NodeMatch<'r, L>> {
pat.find_all_nodes(self.clone())
}
pub fn field(&self, name: &str) -> Option<Self> {
let mut cursor = self.inner.walk();
let inner = self
.inner
.children_by_field_name(name, &mut cursor)
.next()?;
Some(Node {
inner,
root: self.root,
})
}
pub fn field_children(&self, name: &str) -> impl Iterator<Item = Node<'r, L>> {
let field_id = self
.root
.lang
.get_ts_language()
.field_id_for_name(name)
.unwrap_or(0);
let root = self.root;
let mut cursor = self.inner.walk();
cursor.goto_first_child();
let mut done = false;
std::iter::from_fn(move || {
if done {
return None;
}
while cursor.field_id() != Some(field_id) {
if !cursor.goto_next_sibling() {
return None;
}
}
}
let inner = cursor.node();
if !cursor.goto_next_sibling() {
done = true;
}
Some(Node { inner, root })
})
}
pub fn dfs<'s>(&'s self) -> Dfs<'r, L> {
Dfs::new(self)
}
#[must_use]
pub fn parent(&self) -> Option<Self> {
let inner = self.inner.parent()?;
Some(Node {
inner,
root: self.root,
})
}
#[must_use]
pub fn find<M: Matcher<L>>(&self, pat: M) -> Option<Node<'r, L>> {
pat.find_node(self.clone()).map(Node::from)
}
#[must_use]
pub fn child(&self, nth: usize) -> Option<Self> {
// TODO: support usize
let inner = self.inner.child(nth as u32)?;
Some(Node {
inner,
root: self.root,
})
}
pub fn find_all<M: Matcher<L>>(&self, pat: M) -> impl Iterator<Item = NodeMatch<'r, L>> {
pat.find_all_nodes(self.clone())
}
pub fn field(&self, name: &str) -> Option<Self> {
let mut cursor = self.inner.walk();
let inner = self
.inner
.children_by_field_name(name, &mut cursor)
.next()?;
pub fn ancestors(&self) -> impl Iterator<Item = Node<'r, L>> + '_ {
let mut parent = self.inner.parent();
std::iter::from_fn(move || {
let inner = parent.clone()?;
let ret = Some(Node {
inner: inner.clone(),
root: self.root,
});
parent = inner.parent();
ret
})
}
#[must_use]
pub fn next(&self) -> Option<Self> {
let inner = self.inner.next_sibling()?;
Some(Node {
inner,
root: self.root,
})
}
pub fn next_all(&self) -> impl Iterator<Item = Node<'r, L>> + '_ {
let mut cursor = self.inner.walk();
let root = self.root;
std::iter::from_fn(move || {
if cursor.goto_next_sibling() {
Some(Node {
inner,
root: self.root,
inner: cursor.node(),
root,
})
}
pub fn field_children(&self, name: &str) -> impl Iterator<Item = Node<'r, L>> {
let field_id = self
.root
.lang
.get_ts_language()
.field_id_for_name(name)
.unwrap_or(0);
let root = self.root;
let mut cursor = self.inner.walk();
cursor.goto_first_child();
let mut done = false;
std::iter::from_fn(move || {
if done {
return None;
}
while cursor.field_id() != Some(field_id) {
if !cursor.goto_next_sibling() {
return None;
}
}
let inner = cursor.node();
if !cursor.goto_next_sibling() {
done = true;
}
Some(Node { inner, root })
})
}
#[must_use]
pub fn parent(&self) -> Option<Self> {
let inner = self.inner.parent()?;
Some(Node {
inner,
root: self.root,
})
}
#[must_use]
pub fn child(&self, nth: usize) -> Option<Self> {
// TODO: support usize
let inner = self.inner.child(nth as u32)?;
Some(Node {
inner,
root: self.root,
})
}
pub fn ancestors(&self) -> impl Iterator<Item = Node<'r, L>> + '_ {
let mut parent = self.inner.parent();
std::iter::from_fn(move || {
let inner = parent.clone()?;
let ret = Some(Node {
inner: inner.clone(),
root: self.root,
});
parent = inner.parent();
ret
})
}
#[must_use]
pub fn next(&self) -> Option<Self> {
let inner = self.inner.next_sibling()?;
Some(Node {
inner,
root: self.root,
})
}
pub fn next_all(&self) -> impl Iterator<Item = Node<'r, L>> + '_ {
let mut cursor = self.inner.walk();
let root = self.root;
std::iter::from_fn(move || {
if cursor.goto_next_sibling() {
Some(Node {
inner: cursor.node(),
root,
})
} else {
None
}
})
}
#[must_use]
pub fn prev(&self) -> Option<Node<'r, L>> {
let inner = self.inner.prev_sibling()?;
Some(Node {
inner,
root: self.root,
})
}
pub fn prev_all(&self) -> impl Iterator<Item = Node<'r, L>> + '_ {
let root = self.root;
let mut inner = self.inner.clone();
std::iter::from_fn(move || {
let prev = inner.prev_sibling()?;
inner = prev.clone();
Some(Node {
inner: inner.clone(),
root,
})
})
}
} else {
None
}
})
}
#[must_use]
pub fn prev(&self) -> Option<Node<'r, L>> {
let inner = self.inner.prev_sibling()?;
Some(Node {
inner,
root: self.root,
})
}
pub fn prev_all(&self) -> impl Iterator<Item = Node<'r, L>> + '_ {
let root = self.root;
let mut inner = self.inner.clone();
std::iter::from_fn(move || {
let prev = inner.prev_sibling()?;
inner = prev.clone();
Some(Node {
inner: inner.clone(),
root,
})
})
}
}
/// Tree manipulation API
impl<'r, L: Language> Node<'r, L> {
pub fn replace<M: Matcher<L>, R: Replacer<L>>(&self, matcher: M, replacer: R) -> Option<Edit> {
let mut env = matcher.get_meta_var_env();
let node = matcher.find_node_with_env(self.clone(), &mut env)?;
let inner = node.inner;
pub fn replace<M: Matcher<L>, R: Replacer<L>>(&self, matcher: M, replacer: R) -> Option<Edit> {
let mut env = matcher.get_meta_var_env();
let node = matcher.find_node_with_env(self.clone(), &mut env)?;
let inner = node.inner;
let position = inner.start_byte();
// instead of using start_byte/end_byte, ignore trivia like semicolon ;
let named_cnt = inner.named_child_count();
let end = inner.named_child(named_cnt - 1).unwrap().end_byte();
let deleted_length = end - position;
let inserted_text = replacer.generate_replacement(&env, self.root.lang.clone());
Some(Edit {
position: position as usize,
deleted_length: deleted_length as usize,
inserted_text,
})
}
pub fn replace_all<M: Matcher<L>, R: Replacer<L>>(&self, matcher: M, replacer: R) -> Vec<Edit> {
self
.find_all(matcher)
.map(|matched| {
let env = matched.get_env();
let inner = &matched.inner;
let position = inner.start_byte();
// instead of using start_byte/end_byte, ignore trivia like semicolon ;
let named_cnt = inner.named_child_count();
let end = inner.named_child(named_cnt - 1).unwrap().end_byte();
let deleted_length = end - position;
let inserted_text = replacer.generate_replacement(&env, self.root.lang.clone());
Some(Edit {
position: position as usize,
deleted_length: deleted_length as usize,
inserted_text,
})
}
pub fn replace_all<M: Matcher<L>, R: Replacer<L>>(&self, matcher: M, replacer: R) -> Vec<Edit> {
self.find_all(matcher)
.map(|matched| {
let env = matched.get_env();
let inner = &matched.inner;
let position = inner.start_byte();
// instead of using start_byte/end_byte, ignore trivia like semicolon ;
let named_cnt = inner.named_child_count();
let end = inner.named_child(named_cnt - 1).unwrap().end_byte();
let deleted_length = end - position;
let inserted_text = replacer.generate_replacement(env, self.root.lang.clone());
Edit {
position: position as usize,
deleted_length: deleted_length as usize,
inserted_text,
}
})
.collect()
}
pub fn replace_by(&self) {
todo!()
}
pub fn after(&self) {
todo!()
}
pub fn before(&self) {
todo!()
}
pub fn append(&self) {
todo!()
}
pub fn prepend(&self) {
todo!()
}
pub fn empty(&self) {
todo!()
}
pub fn remove(&self) {
todo!()
}
let inserted_text = replacer.generate_replacement(env, self.root.lang.clone());
Edit {
position: position as usize,
deleted_length: deleted_length as usize,
inserted_text,
}
})
.collect()
}
pub fn replace_by(&self) {
todo!()
}
pub fn after(&self) {
todo!()
}
pub fn before(&self) {
todo!()
}
pub fn append(&self) {
todo!()
}
pub fn prepend(&self) {
todo!()
}
pub fn empty(&self) {
todo!()
}
pub fn remove(&self) {
todo!()
}
}
#[cfg(test)]
mod test {
use crate::language::{Language, Tsx};
#[test]
fn test_is_leaf() {
let root = Tsx.ast_grep("let a = 123");
let node = root.root();
assert!(!node.is_leaf());
}
use crate::language::{Language, Tsx};
#[test]
fn test_is_leaf() {
let root = Tsx.ast_grep("let a = 123");
let node = root.root();
assert!(!node.is_leaf());
}
#[test]
fn test_children() {
let root = Tsx.ast_grep("let a = 123");
let node = root.root();
let children: Vec<_> = node.children().collect();
assert_eq!(children.len(), 1);
let texts: Vec<_> = children[0]
.children()
.map(|c| c.text().to_string())
.collect();
assert_eq!(texts, vec!["let", "a = 123"]);
}
#[test]
fn test_children() {
let root = Tsx.ast_grep("let a = 123");
let node = root.root();
let children: Vec<_> = node.children().collect();
assert_eq!(children.len(), 1);
let texts: Vec<_> = children[0]
.children()
.map(|c| c.text().to_string())
.collect();
assert_eq!(texts, vec!["let", "a = 123"]);
}
#[test]
fn test_display_context() {
// display context should not panic
let s = "i()";
assert_eq!(s.len(), 3);
let root = Tsx.ast_grep(s);
let node = root.root();
assert_eq!(node.display_context(0).trailing.len(), 0);
}
#[test]
fn test_display_context() {
// display context should not panic
let s = "i()";
assert_eq!(s.len(), 3);
let root = Tsx.ast_grep(s);
let node = root.root();
assert_eq!(node.display_context(0).trailing.len(), 0);
}
}
+262 -258
View File
@@ -5,58 +5,59 @@ use crate::Node;
use std::marker::PhantomData;
pub struct And<L: Language, P1: Matcher<L>, P2: Matcher<L>> {
pattern1: P1,
pattern2: P2,
lang: PhantomData<L>,
pattern1: P1,
pattern2: P2,
lang: PhantomData<L>,
}
impl<L: Language, P1, P2> PositiveMatcher<L> for And<L, P1, P2>
where
P1: PositiveMatcher<L>,
P2: Matcher<L>,
P1: PositiveMatcher<L>,
P2: Matcher<L>,
{
}
impl<L: Language, P1, P2> Matcher<L> for And<L, P1, P2>
where
P1: Matcher<L>,
P2: Matcher<L>,
P1: Matcher<L>,
P2: Matcher<L>,
{
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
let node = self.pattern1.match_node_with_env(node, env)?;
self.pattern2.match_node_with_env(node, env)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
let node = self.pattern1.match_node_with_env(node, env)?;
self.pattern2.match_node_with_env(node, env)
}
}
pub struct All<L: Language, P: Matcher<L>> {
patterns: Vec<P>,
lang: PhantomData<L>,
patterns: Vec<P>,
lang: PhantomData<L>,
}
impl<L: Language, P: Matcher<L>> All<L, P> {
pub fn new<PS: IntoIterator<Item = P>>(patterns: PS) -> Self {
Self {
patterns: patterns.into_iter().collect(),
lang: PhantomData,
}
pub fn new<PS: IntoIterator<Item = P>>(patterns: PS) -> Self {
Self {
patterns: patterns.into_iter().collect(),
lang: PhantomData,
}
}
}
impl<L: Language, P: Matcher<L>> Matcher<L> for All<L, P> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.patterns
.iter()
.all(|p| p.match_node_with_env(node.clone(), env).is_some())
.then_some(node)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self
.patterns
.iter()
.all(|p| p.match_node_with_env(node.clone(), env).is_some())
.then_some(node)
}
}
// TODO: this is not correct. We don't need every sub pattern to be positive
@@ -64,320 +65,323 @@ impl<L: Language, P: Matcher<L>> Matcher<L> for All<L, P> {
impl<L: Language, M: Matcher<L>> PositiveMatcher<L> for All<L, M> {}
pub struct Any<L, P> {
patterns: Vec<P>,
lang: PhantomData<L>,
patterns: Vec<P>,
lang: PhantomData<L>,
}
impl<L: Language, P: Matcher<L>> Any<L, P> {
pub fn new<PS: IntoIterator<Item = P>>(patterns: PS) -> Self {
Self {
patterns: patterns.into_iter().collect(),
lang: PhantomData,
}
pub fn new<PS: IntoIterator<Item = P>>(patterns: PS) -> Self {
Self {
patterns: patterns.into_iter().collect(),
lang: PhantomData,
}
}
}
impl<L: Language, M: Matcher<L>> Matcher<L> for Any<L, M> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.patterns
.iter()
.find_map(|p| p.match_node_with_env(node.clone(), env))
.map(|_| node)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self
.patterns
.iter()
.find_map(|p| p.match_node_with_env(node.clone(), env))
.map(|_| node)
}
}
impl<L: Language, P: PositiveMatcher<L>> PositiveMatcher<L> for Any<L, P> {}
pub struct Or<L: Language, P1: Matcher<L>, P2: Matcher<L>> {
pattern1: P1,
pattern2: P2,
lang: PhantomData<L>,
pattern1: P1,
pattern2: P2,
lang: PhantomData<L>,
}
impl<L, P1, P2> Matcher<L> for Or<L, P1, P2>
where
L: Language,
P1: Matcher<L>,
P2: Matcher<L>,
L: Language,
P1: Matcher<L>,
P2: Matcher<L>,
{
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.pattern1
.match_node_with_env(node.clone(), env)
.or_else(|| self.pattern2.match_node_with_env(node, env))
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self
.pattern1
.match_node_with_env(node.clone(), env)
.or_else(|| self.pattern2.match_node_with_env(node, env))
}
}
impl<L, P1, P2> PositiveMatcher<L> for Or<L, P1, P2>
where
L: Language,
P1: PositiveMatcher<L>,
P2: PositiveMatcher<L>,
L: Language,
P1: PositiveMatcher<L>,
P2: PositiveMatcher<L>,
{
}
pub struct Not<L: Language, M: Matcher<L>> {
not: M,
lang: PhantomData<L>,
not: M,
lang: PhantomData<L>,
}
impl<L: Language, M: Matcher<L>> Not<L, M> {
pub fn new(not: M) -> Self {
Self {
not,
lang: PhantomData,
}
pub fn new(not: M) -> Self {
Self {
not,
lang: PhantomData,
}
}
}
impl<L, P> Matcher<L> for Not<L, P>
where
L: Language,
P: Matcher<L>,
L: Language,
P: Matcher<L>,
{
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.not
.match_node_with_env(node.clone(), env)
.xor(Some(node))
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self
.not
.match_node_with_env(node.clone(), env)
.xor(Some(node))
}
}
#[derive(Clone)]
pub struct Op<L: Language, M: Matcher<L>> {
inner: M,
meta_vars: MetaVarMatchers<L>,
inner: M,
meta_vars: MetaVarMatchers<L>,
}
impl<L, M> Matcher<L> for Op<L, M>
where
L: Language,
M: Matcher<L>,
L: Language,
M: Matcher<L>,
{
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.inner.match_node_with_env(node, env)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
self.inner.match_node_with_env(node, env)
}
fn get_meta_var_matchers(&self) -> MetaVarMatchers<L> {
// TODO: avoid clone here
self.meta_vars.clone()
}
fn get_meta_var_matchers(&self) -> MetaVarMatchers<L> {
// TODO: avoid clone here
self.meta_vars.clone()
}
}
impl<L, P> PositiveMatcher<L> for Op<L, P>
where
L: Language,
P: PositiveMatcher<L>,
L: Language,
P: PositiveMatcher<L>,
{
}
pub struct Predicate<F> {
func: F,
func: F,
}
impl<L, F> Matcher<L> for Predicate<F>
where
L: Language,
F: for<'tree> Fn(Node<'tree, L>) -> bool,
L: Language,
F: for<'tree> Fn(Node<'tree, L>) -> bool,
{
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
(self.func)(node.clone()).then_some(node)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
_env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
(self.func)(node.clone()).then_some(node)
}
}
impl<L: Language, M: Matcher<L>> Op<L, M> {
pub fn not(pattern: M) -> Not<L, M> {
Not {
not: pattern,
lang: PhantomData,
}
pub fn not(pattern: M) -> Not<L, M> {
Not {
not: pattern,
lang: PhantomData,
}
}
pub fn func<F>(func: F) -> Predicate<F>
where
F: for<'tree> Fn(Node<'tree, L>) -> bool,
{
Predicate { func }
}
pub fn func<F>(func: F) -> Predicate<F>
where
F: for<'tree> Fn(Node<'tree, L>) -> bool,
{
Predicate { func }
}
pub fn with_meta_var(&mut self, var_id: String, matcher: MetaVarMatcher<L>) -> &mut Self {
self.meta_vars.insert(var_id, matcher);
self
}
pub fn with_meta_var(&mut self, var_id: String, matcher: MetaVarMatcher<L>) -> &mut Self {
self.meta_vars.insert(var_id, matcher);
self
}
}
impl<L: Language, M: PositiveMatcher<L>> Op<L, M> {
pub fn every(pattern: M) -> Op<L, And<L, M, MatchAll>> {
Op {
inner: And {
pattern1: pattern,
pattern2: MatchAll,
lang: PhantomData,
},
meta_vars: MetaVarMatchers::new(),
}
pub fn every(pattern: M) -> Op<L, And<L, M, MatchAll>> {
Op {
inner: And {
pattern1: pattern,
pattern2: MatchAll,
lang: PhantomData,
},
meta_vars: MetaVarMatchers::new(),
}
pub fn either(pattern: M) -> Op<L, Or<L, M, MatchNone>> {
Op {
inner: Or {
pattern1: pattern,
pattern2: MatchNone,
lang: PhantomData,
},
meta_vars: MetaVarMatchers::new(),
}
}
pub fn either(pattern: M) -> Op<L, Or<L, M, MatchNone>> {
Op {
inner: Or {
pattern1: pattern,
pattern2: MatchNone,
lang: PhantomData,
},
meta_vars: MetaVarMatchers::new(),
}
}
pub fn new(matcher: M) -> Op<L, M> {
Self {
inner: matcher,
meta_vars: MetaVarMatchers::new(),
}
pub fn new(matcher: M) -> Op<L, M> {
Self {
inner: matcher,
meta_vars: MetaVarMatchers::new(),
}
}
}
type NestedAnd<L, M, N, O> = And<L, And<L, M, N>, O>;
impl<L: Language, M: Matcher<L>, N: Matcher<L>> Op<L, And<L, M, N>> {
pub fn and<O: Matcher<L>>(self, other: O) -> Op<L, NestedAnd<L, M, N, O>> {
Op {
inner: And {
pattern1: self.inner,
pattern2: other,
lang: PhantomData,
},
meta_vars: self.meta_vars,
}
pub fn and<O: Matcher<L>>(self, other: O) -> Op<L, NestedAnd<L, M, N, O>> {
Op {
inner: And {
pattern1: self.inner,
pattern2: other,
lang: PhantomData,
},
meta_vars: self.meta_vars,
}
}
}
type NestedOr<L, M, N, O> = Or<L, Or<L, M, N>, O>;
impl<L: Language, M: Matcher<L>, N: Matcher<L>> Op<L, Or<L, M, N>> {
pub fn or<O: Matcher<L>>(self, other: O) -> Op<L, NestedOr<L, M, N, O>> {
Op {
inner: Or {
pattern1: self.inner,
pattern2: other,
lang: PhantomData,
},
meta_vars: self.meta_vars,
}
pub fn or<O: Matcher<L>>(self, other: O) -> Op<L, NestedOr<L, M, N, O>> {
Op {
inner: Or {
pattern1: self.inner,
pattern2: other,
lang: PhantomData,
},
meta_vars: self.meta_vars,
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::language::Tsx;
use crate::Root;
use super::*;
use crate::language::Tsx;
use crate::Root;
fn test_find(matcher: &impl Matcher<Tsx>, code: &str) {
let node = Root::new(code, Tsx);
assert!(matcher.find_node(node.root()).is_some());
}
fn test_not_find(matcher: &impl Matcher<Tsx>, code: &str) {
let node = Root::new(code, Tsx);
assert!(matcher.find_node(node.root()).is_none());
}
fn find_all(matcher: impl Matcher<Tsx>, code: &str) -> Vec<String> {
let node = Root::new(code, Tsx);
matcher
.find_all_nodes(node.root())
.map(|n| n.text().to_string())
.collect()
}
fn test_find(matcher: &impl Matcher<Tsx>, code: &str) {
let node = Root::new(code, Tsx);
assert!(matcher.find_node(node.root()).is_some());
}
fn test_not_find(matcher: &impl Matcher<Tsx>, code: &str) {
let node = Root::new(code, Tsx);
assert!(matcher.find_node(node.root()).is_none());
}
fn find_all(matcher: impl Matcher<Tsx>, code: &str) -> Vec<String> {
let node = Root::new(code, Tsx);
matcher
.find_all_nodes(node.root())
.map(|n| n.text().to_string())
.collect()
}
#[test]
fn test_or() {
let matcher = Or {
pattern1: "let a = 1",
pattern2: "const b = 2",
lang: PhantomData,
};
test_find(&matcher, "let a = 1");
test_find(&matcher, "const b = 2");
test_not_find(&matcher, "let a = 2");
test_not_find(&matcher, "const a = 1");
test_not_find(&matcher, "let b = 2");
test_not_find(&matcher, "const b = 1");
}
#[test]
fn test_or() {
let matcher = Or {
pattern1: "let a = 1",
pattern2: "const b = 2",
lang: PhantomData,
};
test_find(&matcher, "let a = 1");
test_find(&matcher, "const b = 2");
test_not_find(&matcher, "let a = 2");
test_not_find(&matcher, "const a = 1");
test_not_find(&matcher, "let b = 2");
test_not_find(&matcher, "const b = 1");
}
#[test]
fn test_not() {
let matcher = Not {
not: "let a = 1",
lang: PhantomData,
};
test_find(&matcher, "const b = 2");
}
#[test]
fn test_not() {
let matcher = Not {
not: "let a = 1",
lang: PhantomData,
};
test_find(&matcher, "const b = 2");
}
#[test]
fn test_and() {
let matcher = And {
pattern1: "let a = $_",
pattern2: Not {
not: "let a = 123",
lang: PhantomData,
},
lang: PhantomData,
};
test_find(&matcher, "let a = 233");
test_find(&matcher, "let a = 456");
test_not_find(&matcher, "let a = 123");
}
#[test]
fn test_and() {
let matcher = And {
pattern1: "let a = $_",
pattern2: Not {
not: "let a = 123",
lang: PhantomData,
},
lang: PhantomData,
};
test_find(&matcher, "let a = 233");
test_find(&matcher, "let a = 456");
test_not_find(&matcher, "let a = 123");
}
#[test]
fn test_api_and() {
let matcher = Op::every("let a = $_").and(Op::not("let a = 123"));
test_find(&matcher, "let a = 233");
test_find(&matcher, "let a = 456");
test_not_find(&matcher, "let a = 123");
}
#[test]
fn test_api_and() {
let matcher = Op::every("let a = $_").and(Op::not("let a = 123"));
test_find(&matcher, "let a = 233");
test_find(&matcher, "let a = 456");
test_not_find(&matcher, "let a = 123");
}
#[test]
fn test_api_or() {
let matcher = Op::either("let a = 1").or("const b = 2");
test_find(&matcher, "let a = 1");
test_find(&matcher, "const b = 2");
test_not_find(&matcher, "let a = 2");
test_not_find(&matcher, "const a = 1");
test_not_find(&matcher, "let b = 2");
test_not_find(&matcher, "const b = 1");
}
#[test]
fn test_multiple_match() {
let sequential = find_all("$A + b", "let f = () => a + b; let ff = () => c + b");
assert_eq!(sequential.len(), 2);
let nested = find_all(
"function $A() { $$$ }",
"function a() { function b() { b } }",
);
assert_eq!(nested.len(), 2);
}
#[test]
fn test_api_or() {
let matcher = Op::either("let a = 1").or("const b = 2");
test_find(&matcher, "let a = 1");
test_find(&matcher, "const b = 2");
test_not_find(&matcher, "let a = 2");
test_not_find(&matcher, "const a = 1");
test_not_find(&matcher, "let b = 2");
test_not_find(&matcher, "const b = 1");
}
#[test]
fn test_multiple_match() {
let sequential = find_all("$A + b", "let f = () => a + b; let ff = () => c + b");
assert_eq!(sequential.len(), 2);
let nested = find_all(
"function $A() { $$$ }",
"function a() { function b() { b } }",
);
assert_eq!(nested.len(), 2);
}
#[test]
fn test_multiple_match_order() {
let ret = find_all(
"$A + b",
"let f = () => () => () => a + b; let ff = () => c + b",
);
assert_eq!(ret, ["a + b", "c + b"], "should match source code order");
}
#[test]
fn test_multiple_match_order() {
let ret = find_all(
"$A + b",
"let f = () => () => () => a + b; let ff = () => c + b",
);
assert_eq!(ret, ["a + b", "c + b"], "should match source code order");
}
// TODO add test case for func
// TODO add test case for func
}
+144 -144
View File
@@ -5,179 +5,179 @@ use crate::{meta_var::MetaVarEnv, Node, Root};
#[derive(Clone)]
pub struct Pattern<L: Language> {
pub root: Root<L>,
selector: Option<KindMatcher<L>>,
pub root: Root<L>,
selector: Option<KindMatcher<L>>,
}
impl<L: Language> Pattern<L> {
pub fn new(src: &str, lang: L) -> Self {
let processed = lang.pre_process_pattern(src);
let root = Root::new(&processed, lang);
let goal = root.root();
if goal.inner.child_count() != 1 {
todo!("multi-children pattern is not supported yet.")
}
Self {
root,
selector: None,
}
pub fn new(src: &str, lang: L) -> Self {
let processed = lang.pre_process_pattern(src);
let root = Root::new(&processed, lang);
let goal = root.root();
if goal.inner.child_count() != 1 {
todo!("multi-children pattern is not supported yet.")
}
Self {
root,
selector: None,
}
}
pub fn contextual(context: &str, selector: &str, lang: L) -> Self {
let processed = lang.pre_process_pattern(context);
let root = Root::new(&processed, lang.clone());
let goal = root.root();
if goal.inner.child_count() != 1 {
todo!("multi-children pattern is not supported yet.")
}
let kind_matcher = KindMatcher::new(selector, lang);
if goal.find(&kind_matcher).is_none() {
todo!("use result to indicate failure");
}
Self {
root,
selector: Some(kind_matcher),
}
pub fn contextual(context: &str, selector: &str, lang: L) -> Self {
let processed = lang.pre_process_pattern(context);
let root = Root::new(&processed, lang.clone());
let goal = root.root();
if goal.inner.child_count() != 1 {
todo!("multi-children pattern is not supported yet.")
}
let kind_matcher = KindMatcher::new(selector, lang);
if goal.find(&kind_matcher).is_none() {
todo!("use result to indicate failure");
}
Self {
root,
selector: Some(kind_matcher),
}
}
// TODO: extract out matcher in recursion
fn matcher(&self) -> Node<L> {
let root = self.root.root();
if let Some(kind_matcher) = &self.selector {
return root
.find(kind_matcher)
.expect("contextual match should succeed");
}
let mut node = root.inner;
while node.child_count() == 1 {
node = node.child(0).unwrap();
}
Node {
inner: node,
root: &self.root,
}
// TODO: extract out matcher in recursion
fn matcher(&self) -> Node<L> {
let root = self.root.root();
if let Some(kind_matcher) = &self.selector {
return root
.find(kind_matcher)
.expect("contextual match should succeed");
}
let mut node = root.inner;
while node.child_count() == 1 {
node = node.child(0).unwrap();
}
Node {
inner: node,
root: &self.root,
}
}
}
impl<L: Language> Matcher<L> for Pattern<L> {
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
match_node_non_recursive(&self.matcher(), node, env)
}
fn match_node_with_env<'tree>(
&self,
node: Node<'tree, L>,
env: &mut MetaVarEnv<'tree, L>,
) -> Option<Node<'tree, L>> {
match_node_non_recursive(&self.matcher(), node, env)
}
}
impl<L: Language> std::fmt::Debug for Pattern<L> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.matcher().inner.to_sexp())
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.matcher().inner.to_sexp())
}
}
impl<L: Language> PositiveMatcher<L> for Pattern<L> {}
#[cfg(test)]
mod test {
use super::*;
use crate::language::Tsx;
use std::collections::HashMap;
use super::*;
use crate::language::Tsx;
use std::collections::HashMap;
fn pattern_node(s: &str) -> Root<Tsx> {
Root::new(s, Tsx)
}
fn pattern_node(s: &str) -> Root<Tsx> {
Root::new(s, Tsx)
}
fn test_match(s1: &str, s2: &str) {
let pattern = Pattern::new(s1, Tsx);
let cand = pattern_node(s2);
let cand = cand.root();
assert!(
pattern.find_node(cand.clone()).is_some(),
"goal: {}, candidate: {}",
pattern.root.root().inner.to_sexp(),
cand.inner.to_sexp(),
);
}
fn test_non_match(s1: &str, s2: &str) {
let pattern = Pattern::new(s1, Tsx);
let cand = pattern_node(s2);
let cand = cand.root();
assert!(
pattern.find_node(cand.clone()).is_none(),
"goal: {}, candidate: {}",
pattern.root.root().inner.to_sexp(),
cand.inner.to_sexp(),
);
}
fn test_match(s1: &str, s2: &str) {
let pattern = Pattern::new(s1, Tsx);
let cand = pattern_node(s2);
let cand = cand.root();
assert!(
pattern.find_node(cand.clone()).is_some(),
"goal: {}, candidate: {}",
pattern.root.root().inner.to_sexp(),
cand.inner.to_sexp(),
);
}
fn test_non_match(s1: &str, s2: &str) {
let pattern = Pattern::new(s1, Tsx);
let cand = pattern_node(s2);
let cand = cand.root();
assert!(
pattern.find_node(cand.clone()).is_none(),
"goal: {}, candidate: {}",
pattern.root.root().inner.to_sexp(),
cand.inner.to_sexp(),
);
}
#[test]
fn test_meta_variable() {
test_match("const a = $VALUE", "const a = 123");
test_match("const $VARIABLE = $VALUE", "const a = 123");
test_match("const $VARIABLE = $VALUE", "const a = 123");
}
#[test]
fn test_meta_variable() {
test_match("const a = $VALUE", "const a = 123");
test_match("const $VARIABLE = $VALUE", "const a = 123");
test_match("const $VARIABLE = $VALUE", "const a = 123");
}
fn match_env(goal_str: &str, cand: &str) -> HashMap<String, String> {
let pattern = Pattern::new(goal_str, Tsx);
let cand = pattern_node(cand);
let cand = cand.root();
let mut env = MetaVarEnv::new();
pattern.find_node_with_env(cand, &mut env).unwrap();
HashMap::from(env)
}
fn match_env(goal_str: &str, cand: &str) -> HashMap<String, String> {
let pattern = Pattern::new(goal_str, Tsx);
let cand = pattern_node(cand);
let cand = cand.root();
let mut env = MetaVarEnv::new();
pattern.find_node_with_env(cand, &mut env).unwrap();
HashMap::from(env)
}
#[test]
fn test_meta_variable_env() {
let env = match_env("const a = $VALUE", "const a = 123");
assert_eq!(env["VALUE"], "123");
}
#[test]
fn test_meta_variable_env() {
let env = match_env("const a = $VALUE", "const a = 123");
assert_eq!(env["VALUE"], "123");
}
#[test]
fn test_match_non_atomic() {
let env = match_env("const a = $VALUE", "const a = 5 + 3");
assert_eq!(env["VALUE"], "5 + 3");
}
#[test]
fn test_match_non_atomic() {
let env = match_env("const a = $VALUE", "const a = 5 + 3");
assert_eq!(env["VALUE"], "5 + 3");
}
#[test]
fn test_class_assignment() {
test_match("class $C { $MEMBER = $VAL}", "class A {a = 123}");
test_non_match("class $C { $MEMBER = $VAL; b = 123; }", "class A {a = 123}");
// test_match("a = 123", "class A {a = 123}");
test_non_match("a = 123", "class B {b = 123}");
}
#[test]
fn test_class_assignment() {
test_match("class $C { $MEMBER = $VAL}", "class A {a = 123}");
test_non_match("class $C { $MEMBER = $VAL; b = 123; }", "class A {a = 123}");
// test_match("a = 123", "class A {a = 123}");
test_non_match("a = 123", "class B {b = 123}");
}
#[test]
fn test_return() {
test_match("$A($B)", "return test(123)");
}
#[test]
fn test_return() {
test_match("$A($B)", "return test(123)");
}
#[test]
fn test_contextual_pattern() {
let pattern = Pattern::contextual("class A { $F = $I }", "public_field_definition", Tsx);
let cand = pattern_node("class B { b = 123 }");
assert!(pattern.find_node(cand.root()).is_some());
let cand = pattern_node("let b = 123");
assert!(pattern.find_node(cand.root()).is_none());
}
#[test]
fn test_contextual_pattern() {
let pattern = Pattern::contextual("class A { $F = $I }", "public_field_definition", Tsx);
let cand = pattern_node("class B { b = 123 }");
assert!(pattern.find_node(cand.root()).is_some());
let cand = pattern_node("let b = 123");
assert!(pattern.find_node(cand.root()).is_none());
}
#[test]
fn test_contextual_match_with_env() {
let pattern = Pattern::contextual("class A { $F = $I }", "public_field_definition", Tsx);
let cand = pattern_node("class B { b = 123 }");
let mut env = MetaVarEnv::new();
assert!(pattern.find_node_with_env(cand.root(), &mut env).is_some());
let env = HashMap::from(env);
assert_eq!(env["F"], "b");
assert_eq!(env["I"], "123");
}
#[test]
fn test_contextual_match_with_env() {
let pattern = Pattern::contextual("class A { $F = $I }", "public_field_definition", Tsx);
let cand = pattern_node("class B { b = 123 }");
let mut env = MetaVarEnv::new();
assert!(pattern.find_node_with_env(cand.root(), &mut env).is_some());
let env = HashMap::from(env);
assert_eq!(env["F"], "b");
assert_eq!(env["I"], "123");
}
#[test]
fn test_contextual_unmatch_with_env() {
let pattern = Pattern::contextual("class A { $F = $I }", "public_field_definition", Tsx);
let cand = pattern_node("let b = 123");
let mut env = MetaVarEnv::new();
assert!(pattern.find_node_with_env(cand.root(), &mut env).is_none());
let env = HashMap::from(env);
assert!(env.is_empty());
}
#[test]
fn test_contextual_unmatch_with_env() {
let pattern = Pattern::contextual("class A { $F = $I }", "public_field_definition", Tsx);
let cand = pattern_node("let b = 123");
let mut env = MetaVarEnv::new();
assert!(pattern.find_node_with_env(cand.root(), &mut env).is_none());
let env = HashMap::from(env);
assert!(env.is_empty());
}
}
+179 -179
View File
@@ -6,224 +6,224 @@ use crate::{Node, Root};
/// Replace meta variable in the replacer string
pub trait Replacer<L: Language> {
fn generate_replacement(&self, env: &MetaVarEnv<L>, lang: L) -> String;
fn generate_replacement(&self, env: &MetaVarEnv<L>, lang: L) -> String;
}
impl<L: Language> Replacer<L> for str {
fn generate_replacement(&self, env: &MetaVarEnv<L>, lang: L) -> String {
let root = Root::new(self, lang.clone());
let edits = collect_edits(&root, env, lang);
merge_edits_to_string(edits, &root)
}
fn generate_replacement(&self, env: &MetaVarEnv<L>, lang: L) -> String {
let root = Root::new(self, lang.clone());
let edits = collect_edits(&root, env, lang);
merge_edits_to_string(edits, &root)
}
}
impl<L: Language> Replacer<L> for Pattern<L> {
fn generate_replacement(&self, env: &MetaVarEnv<L>, lang: L) -> String {
let edits = collect_edits(&self.root, env, lang);
merge_edits_to_string(edits, &self.root)
}
fn generate_replacement(&self, env: &MetaVarEnv<L>, lang: L) -> String {
let edits = collect_edits(&self.root, env, lang);
merge_edits_to_string(edits, &self.root)
}
}
impl<L, T> Replacer<L> for &T
where
L: Language,
T: Replacer<L> + ?Sized,
L: Language,
T: Replacer<L> + ?Sized,
{
fn generate_replacement(&self, env: &MetaVarEnv<L>, lang: L) -> String {
(**self).generate_replacement(env, lang)
}
fn generate_replacement(&self, env: &MetaVarEnv<L>, lang: L) -> String {
(**self).generate_replacement(env, lang)
}
}
fn collect_edits<L: Language>(root: &Root<L>, env: &MetaVarEnv<L>, lang: L) -> Vec<Edit> {
let mut node = root.root();
let root_id = node.inner.id();
let mut edits = vec![];
let mut node = root.root();
let root_id = node.inner.id();
let mut edits = vec![];
// this is a post-order DFS that stops traversal when the node matches
'outer: loop {
if let Some(text) = get_meta_var_replacement(&node, env, lang.clone()) {
let position = node.inner.start_byte();
let length = node.inner.end_byte() - position;
edits.push(Edit {
position: position as usize,
deleted_length: length as usize,
inserted_text: text,
});
} else if let Some(first_child) = node.child(0) {
// traverse down to child
node = first_child;
continue;
} else if node.inner.is_missing() {
// TODO: better handling missing node
if let Some(sibling) = node.next() {
node = sibling;
continue;
} else {
break;
}
}
// traverse up to parent until getting to root
loop {
// come back to the root node, terminating dfs
if node.inner.id() == root_id {
break 'outer;
}
if let Some(sibling) = node.next() {
node = sibling;
break;
}
node = node.parent().unwrap();
}
// this is a post-order DFS that stops traversal when the node matches
'outer: loop {
if let Some(text) = get_meta_var_replacement(&node, env, lang.clone()) {
let position = node.inner.start_byte();
let length = node.inner.end_byte() - position;
edits.push(Edit {
position: position as usize,
deleted_length: length as usize,
inserted_text: text,
});
} else if let Some(first_child) = node.child(0) {
// traverse down to child
node = first_child;
continue;
} else if node.inner.is_missing() {
// TODO: better handling missing node
if let Some(sibling) = node.next() {
node = sibling;
continue;
} else {
break;
}
}
// add the missing one
edits.push(Edit {
position: root.source.len(),
deleted_length: 0,
inserted_text: String::new(),
});
edits
// traverse up to parent until getting to root
loop {
// come back to the root node, terminating dfs
if node.inner.id() == root_id {
break 'outer;
}
if let Some(sibling) = node.next() {
node = sibling;
break;
}
node = node.parent().unwrap();
}
}
// add the missing one
edits.push(Edit {
position: root.source.len(),
deleted_length: 0,
inserted_text: String::new(),
});
edits
}
fn merge_edits_to_string<L: Language>(edits: Vec<Edit>, root: &Root<L>) -> String {
let mut ret = String::new();
let mut start = 0;
for edit in edits {
ret.push_str(&root.source[start..edit.position]);
ret.push_str(&edit.inserted_text);
start = edit.position + edit.deleted_length;
}
ret
let mut ret = String::new();
let mut start = 0;
for edit in edits {
ret.push_str(&root.source[start..edit.position]);
ret.push_str(&edit.inserted_text);
start = edit.position + edit.deleted_length;
}
ret
}
fn get_meta_var_replacement<L: Language>(
node: &Node<L>,
env: &MetaVarEnv<L>,
lang: L,
node: &Node<L>,
env: &MetaVarEnv<L>,
lang: L,
) -> Option<String> {
if !node.is_leaf() {
return None;
if !node.is_leaf() {
return None;
}
let meta_var = lang.extract_meta_var(&node.text())?;
let replaced = match env.get(&meta_var)? {
MatchResult::Single(replaced) => replaced.text().to_string(),
MatchResult::Multi(nodes) => {
if nodes.is_empty() {
String::new()
} else {
let start = nodes[0].inner.start_byte() as usize;
let end = nodes[nodes.len() - 1].inner.end_byte() as usize;
nodes[0].root.source[start..end].to_string()
}
}
let meta_var = lang.extract_meta_var(&node.text())?;
let replaced = match env.get(&meta_var)? {
MatchResult::Single(replaced) => replaced.text().to_string(),
MatchResult::Multi(nodes) => {
if nodes.is_empty() {
String::new()
} else {
let start = nodes[0].inner.start_byte() as usize;
let end = nodes[nodes.len() - 1].inner.end_byte() as usize;
nodes[0].root.source[start..end].to_string()
}
}
};
Some(replaced)
};
Some(replaced)
}
impl<'a, L: Language> Replacer<L> for Node<'a, L> {
fn generate_replacement(&self, _: &MetaVarEnv<L>, _: L) -> String {
self.text().to_string()
}
fn generate_replacement(&self, _: &MetaVarEnv<L>, _: L) -> String {
self.text().to_string()
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::language::{Language, Tsx};
use std::collections::HashMap;
use super::*;
use crate::language::{Language, Tsx};
use std::collections::HashMap;
fn test_str_replace(replacer: &str, vars: &[(&str, &str)], expected: &str) {
let mut env = MetaVarEnv::new();
let roots: Vec<_> = vars
.iter()
.map(|(v, p)| (v, Tsx.ast_grep(p).inner))
.collect();
for (var, root) in &roots {
env.insert(var.to_string(), root.root());
}
let replaced = replacer.generate_replacement(&env, Tsx);
assert_eq!(
replaced,
expected,
"wrong replacement {replaced} {expected} {:?}",
HashMap::from(env)
);
fn test_str_replace(replacer: &str, vars: &[(&str, &str)], expected: &str) {
let mut env = MetaVarEnv::new();
let roots: Vec<_> = vars
.iter()
.map(|(v, p)| (v, Tsx.ast_grep(p).inner))
.collect();
for (var, root) in &roots {
env.insert(var.to_string(), root.root());
}
let replaced = replacer.generate_replacement(&env, Tsx);
assert_eq!(
replaced,
expected,
"wrong replacement {replaced} {expected} {:?}",
HashMap::from(env)
);
}
#[test]
fn test_no_env() {
test_str_replace("let a = 123", &[], "let a = 123");
test_str_replace(
"console.log('hello world'); let b = 123;",
&[],
"console.log('hello world'); let b = 123;",
);
}
#[test]
fn test_no_env() {
test_str_replace("let a = 123", &[], "let a = 123");
test_str_replace(
"console.log('hello world'); let b = 123;",
&[],
"console.log('hello world'); let b = 123;",
);
}
#[test]
fn test_single_env() {
test_str_replace("let a = $A", &[("A", "123")], "let a = 123");
test_str_replace(
"console.log($HW); let b = 123;",
&[("HW", "'hello world'")],
"console.log('hello world'); let b = 123;",
);
}
#[test]
fn test_single_env() {
test_str_replace("let a = $A", &[("A", "123")], "let a = 123");
test_str_replace(
"console.log($HW); let b = 123;",
&[("HW", "'hello world'")],
"console.log('hello world'); let b = 123;",
);
}
#[test]
fn test_multiple_env() {
test_str_replace("let $V = $A", &[("A", "123"), ("V", "a")], "let a = 123");
test_str_replace(
"console.log($HW); let $B = 123;",
&[("HW", "'hello world'"), ("B", "b")],
"console.log('hello world'); let b = 123;",
);
}
#[test]
fn test_multiple_env() {
test_str_replace("let $V = $A", &[("A", "123"), ("V", "a")], "let a = 123");
test_str_replace(
"console.log($HW); let $B = 123;",
&[("HW", "'hello world'"), ("B", "b")],
"console.log('hello world'); let b = 123;",
);
}
#[test]
fn test_multiple_occurrences() {
test_str_replace("let $A = $A", &[("A", "a")], "let a = a");
test_str_replace("var $A = () => $A", &[("A", "a")], "var a = () => a");
test_str_replace(
"const $A = () => { console.log($B); $A(); };",
&[("B", "'hello world'"), ("A", "a")],
"const a = () => { console.log('hello world'); a(); };",
);
}
#[test]
fn test_multiple_occurrences() {
test_str_replace("let $A = $A", &[("A", "a")], "let a = a");
test_str_replace("var $A = () => $A", &[("A", "a")], "var a = () => a");
test_str_replace(
"const $A = () => { console.log($B); $A(); };",
&[("B", "'hello world'"), ("A", "a")],
"const a = () => { console.log('hello world'); a(); };",
);
}
fn test_ellipsis_replace(replacer: &str, vars: &[(&str, &str)], expected: &str) {
let mut env = MetaVarEnv::new();
let roots: Vec<_> = vars
.iter()
.map(|(v, p)| (v, Tsx.ast_grep(p).inner))
.collect();
for (var, root) in &roots {
env.insert_multi(var.to_string(), root.root().children().collect());
}
let replaced = replacer.generate_replacement(&env, Tsx);
assert_eq!(
replaced,
expected,
"wrong replacement {replaced} {expected} {:?}",
HashMap::from(env)
);
fn test_ellipsis_replace(replacer: &str, vars: &[(&str, &str)], expected: &str) {
let mut env = MetaVarEnv::new();
let roots: Vec<_> = vars
.iter()
.map(|(v, p)| (v, Tsx.ast_grep(p).inner))
.collect();
for (var, root) in &roots {
env.insert_multi(var.to_string(), root.root().children().collect());
}
let replaced = replacer.generate_replacement(&env, Tsx);
assert_eq!(
replaced,
expected,
"wrong replacement {replaced} {expected} {:?}",
HashMap::from(env)
);
}
#[test]
fn test_ellipsis_meta_var() {
test_ellipsis_replace(
"let a = () => { $$$B }",
&[("B", "alert('works!')")],
"let a = () => { alert('works!') }",
);
test_ellipsis_replace(
"let a = () => { $$$B }",
&[("B", "alert('works!');console.log(123)")],
"let a = () => { alert('works!');console.log(123) }",
);
}
#[test]
fn test_ellipsis_meta_var() {
test_ellipsis_replace(
"let a = () => { $$$B }",
&[("B", "alert('works!')")],
"let a = () => { alert('works!') }",
);
test_ellipsis_replace(
"let a = () => { $$$B }",
&[("B", "alert('works!');console.log(123)")],
"let a = () => { alert('works!');console.log(123) }",
);
}
#[test]
fn test_nested_matching_replace() {
// TODO
}
#[test]
fn test_nested_matching_replace() {
// TODO
}
}
+99 -93
View File
@@ -4,127 +4,133 @@ pub use tree_sitter::{Language, Tree};
/// Represents tree-sitter related error
#[derive(Debug)]
pub enum TSParseError {
Parse(ParserError),
Language(LanguageError),
/// A general error when tree sitter fails to parse in time. It can be caused by
/// the following reasons but tree-sitter does not provide error detail.
/// * The timeout set with [Parser::set_timeout_micros] expired
/// * The cancellation flag set with [Parser::set_cancellation_flag] was flipped
/// * The parser has not yet had a language assigned with [Parser::set_language]
TreeUnavailable,
MultiRoot,
Parse(ParserError),
Language(LanguageError),
/// A general error when tree sitter fails to parse in time. It can be caused by
/// the following reasons but tree-sitter does not provide error detail.
/// * The timeout set with [Parser::set_timeout_micros] expired
/// * The cancellation flag set with [Parser::set_cancellation_flag] was flipped
/// * The parser has not yet had a language assigned with [Parser::set_language]
TreeUnavailable,
MultiRoot,
}
impl From<ParserError> for TSParseError {
fn from(e: ParserError) -> Self {
Self::Parse(e)
}
fn from(e: ParserError) -> Self {
Self::Parse(e)
}
}
impl From<LanguageError> for TSParseError {
fn from(e: LanguageError) -> Self {
Self::Language(e)
}
fn from(e: LanguageError) -> Self {
Self::Language(e)
}
}
pub fn parse(
source_code: &str,
old_tree: Option<&Tree>,
ts_lang: Language,
source_code: &str,
old_tree: Option<&Tree>,
ts_lang: Language,
) -> Result<Tree, TSParseError> {
let mut parser = Parser::new()?;
parser.set_language(&ts_lang)?;
if let Some(tree) = parser.parse(source_code, old_tree)? {
Ok(tree)
} else {
Err(TSParseError::TreeUnavailable)
}
let mut parser = Parser::new()?;
parser.set_language(&ts_lang)?;
if let Some(tree) = parser.parse(source_code, old_tree)? {
Ok(tree)
} else {
Err(TSParseError::TreeUnavailable)
}
}
// https://github.com/tree-sitter/tree-sitter/blob/e4e5ffe517ca2c668689b24cb17c51b8c6db0790/cli/src/parse.rs
#[derive(Debug)]
pub struct Edit {
pub position: usize,
pub deleted_length: usize,
pub inserted_text: String,
pub position: usize,
pub deleted_length: usize,
pub inserted_text: String,
}
fn position_for_offset(input: &[u8], offset: usize) -> Point {
let (mut row, mut col) = (0, 0);
for c in &input[0..offset] {
if *c as char == '\n' {
row += 1;
col = 0;
} else {
col += 1;
}
let (mut row, mut col) = (0, 0);
for c in &input[0..offset] {
if *c as char == '\n' {
row += 1;
col = 0;
} else {
col += 1;
}
Point::new(row, col)
}
Point::new(row, col)
}
pub fn perform_edit(tree: &mut Tree, input: &mut Vec<u8>, edit: &Edit) -> InputEdit {
let start_byte = edit.position;
let old_end_byte = edit.position + edit.deleted_length;
let new_end_byte = edit.position + edit.inserted_text.len();
let start_position = position_for_offset(input, start_byte);
let old_end_position = position_for_offset(input, old_end_byte);
input.splice(start_byte..old_end_byte, edit.inserted_text.bytes());
let new_end_position = position_for_offset(input, new_end_byte);
let edit = InputEdit::new(
start_byte as u32,
old_end_byte as u32,
new_end_byte as u32,
&start_position,
&old_end_position,
&new_end_position,
);
tree.edit(&edit);
edit
let start_byte = edit.position;
let old_end_byte = edit.position + edit.deleted_length;
let new_end_byte = edit.position + edit.inserted_text.len();
let start_position = position_for_offset(input, start_byte);
let old_end_position = position_for_offset(input, old_end_byte);
input.splice(start_byte..old_end_byte, edit.inserted_text.bytes());
let new_end_position = position_for_offset(input, new_end_byte);
let edit = InputEdit::new(
start_byte as u32,
old_end_byte as u32,
new_end_byte as u32,
&start_position,
&old_end_position,
&new_end_position,
);
tree.edit(&edit);
edit
}
#[cfg(test)]
mod test {
use super::{parse as parse_lang, *};
use crate::language::{Language, Tsx};
use super::{parse as parse_lang, *};
use crate::language::{Language, Tsx};
fn parse(src: &str) -> Tree {
parse_lang(src, None, Tsx.get_ts_language()).unwrap()
}
fn parse(src: &str) -> Tree {
parse_lang(src, None, Tsx.get_ts_language()).unwrap()
}
#[test]
fn test_tree_sitter() {
let tree = parse("var a = 1234");
let root_node = tree.root_node();
assert_eq!(root_node.kind(), "program");
assert_eq!(root_node.start_position().column(), 0);
assert_eq!(root_node.end_position().column(), 12);
assert_eq!(root_node.to_sexp(), "(program (variable_declaration (variable_declarator name: (identifier) value: (number))))");
}
#[test]
fn test_tree_sitter() {
let tree = parse("var a = 1234");
let root_node = tree.root_node();
assert_eq!(root_node.kind(), "program");
assert_eq!(root_node.start_position().column(), 0);
assert_eq!(root_node.end_position().column(), 12);
assert_eq!(
root_node.to_sexp(),
"(program (variable_declaration (variable_declarator name: (identifier) value: (number))))"
);
}
#[test]
fn test_object_literal() {
let tree = parse("{a: $X}");
let root_node = tree.root_node();
// wow this is not label. technically it is wrong but practically it is better LOL
assert_eq!(root_node.to_sexp(), "(program (expression_statement (object (pair key: (property_identifier) value: (identifier)))))");
}
#[test]
fn test_object_literal() {
let tree = parse("{a: $X}");
let root_node = tree.root_node();
// wow this is not label. technically it is wrong but practically it is better LOL
assert_eq!(root_node.to_sexp(), "(program (expression_statement (object (pair key: (property_identifier) value: (identifier)))))");
}
#[test]
fn test_edit() {
let mut src = "a + b".to_string();
let mut tree = parse(&src);
let edit = perform_edit(
&mut tree,
unsafe { src.as_mut_vec() },
&Edit {
position: 1,
deleted_length: 0,
inserted_text: " * b".into(),
},
);
tree.edit(&edit);
let tree2 = parse_lang(&src, Some(&tree), Tsx.get_ts_language()).unwrap();
assert_eq!(tree.root_node().to_sexp(), "(program (expression_statement (binary_expression left: (identifier) right: (identifier))))");
assert_eq!(tree2.root_node().to_sexp(), "(program (expression_statement (binary_expression left: (binary_expression left: (binary_expression left: (identifier) right: (identifier)) right: (identifier)) right: (identifier))))");
}
#[test]
fn test_edit() {
let mut src = "a + b".to_string();
let mut tree = parse(&src);
let edit = perform_edit(
&mut tree,
unsafe { src.as_mut_vec() },
&Edit {
position: 1,
deleted_length: 0,
inserted_text: " * b".into(),
},
);
tree.edit(&edit);
let tree2 = parse_lang(&src, Some(&tree), Tsx.get_ts_language()).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
"(program (expression_statement (binary_expression left: (identifier) right: (identifier))))"
);
assert_eq!(tree2.root_node().to_sexp(), "(program (expression_statement (binary_expression left: (binary_expression left: (binary_expression left: (identifier) right: (identifier)) right: (identifier)) right: (identifier))))");
}
}
+166 -160
View File
@@ -15,192 +15,198 @@ pub trait LSPLang = Language + Eq + Send + Sync + 'static;
#[derive(Clone)]
struct VersionedAst<L: Language> {
version: i32,
root: AstGrep<L>,
version: i32,
root: AstGrep<L>,
}
pub struct Backend<L: LSPLang> {
client: Client,
map: DashMap<String, VersionedAst<L>>,
rules: RuleCollection<L>,
client: Client,
map: DashMap<String, VersionedAst<L>>,
rules: RuleCollection<L>,
}
#[tower_lsp::async_trait]
impl<L: LSPLang> LanguageServer for Backend<L> {
async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> {
Ok(InitializeResult {
server_info: Some(ServerInfo {
name: "ast-grep language server".to_string(),
version: None,
}),
capabilities: ServerCapabilities {
// TODO: change this to incremental
text_document_sync: Some(TextDocumentSyncCapability::Kind(
TextDocumentSyncKind::FULL,
)),
code_action_provider: None,
..ServerCapabilities::default()
},
})
}
async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> {
Ok(InitializeResult {
server_info: Some(ServerInfo {
name: "ast-grep language server".to_string(),
version: None,
}),
capabilities: ServerCapabilities {
// TODO: change this to incremental
text_document_sync: Some(TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL)),
code_action_provider: None,
..ServerCapabilities::default()
},
})
}
async fn initialized(&self, _: InitializedParams) {
self.client
.log_message(MessageType::INFO, "server initialized!")
.await;
}
async fn initialized(&self, _: InitializedParams) {
self
.client
.log_message(MessageType::INFO, "server initialized!")
.await;
}
async fn shutdown(&self) -> Result<()> {
Ok(())
}
async fn shutdown(&self) -> Result<()> {
Ok(())
}
async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) {
self.client
.log_message(MessageType::INFO, "workspace folders changed!")
.await;
}
async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) {
self
.client
.log_message(MessageType::INFO, "workspace folders changed!")
.await;
}
async fn did_change_configuration(&self, _: DidChangeConfigurationParams) {
self.client
.log_message(MessageType::INFO, "configuration changed!")
.await;
}
async fn did_change_configuration(&self, _: DidChangeConfigurationParams) {
self
.client
.log_message(MessageType::INFO, "configuration changed!")
.await;
}
async fn did_change_watched_files(&self, _: DidChangeWatchedFilesParams) {
self.client
.log_message(MessageType::INFO, "watched files have changed!")
.await;
}
async fn did_open(&self, params: DidOpenTextDocumentParams) {
self.client
.log_message(MessageType::INFO, "file opened!")
.await;
self.on_open(params).await;
}
async fn did_change_watched_files(&self, _: DidChangeWatchedFilesParams) {
self
.client
.log_message(MessageType::INFO, "watched files have changed!")
.await;
}
async fn did_open(&self, params: DidOpenTextDocumentParams) {
self
.client
.log_message(MessageType::INFO, "file opened!")
.await;
self.on_open(params).await;
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
self.on_change(params).await;
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
self.on_change(params).await;
}
async fn did_save(&self, _: DidSaveTextDocumentParams) {
self.client
.log_message(MessageType::INFO, "file saved!")
.await;
}
async fn did_save(&self, _: DidSaveTextDocumentParams) {
self
.client
.log_message(MessageType::INFO, "file saved!")
.await;
}
async fn did_close(&self, params: DidCloseTextDocumentParams) {
self.on_close(params).await;
self.client
.log_message(MessageType::INFO, "file closed!")
.await;
}
async fn did_close(&self, params: DidCloseTextDocumentParams) {
self.on_close(params).await;
self
.client
.log_message(MessageType::INFO, "file closed!")
.await;
}
}
fn convert_node_match_to_range<L: Language>(node_match: NodeMatch<L>) -> Range {
let (start_row, start_col) = node_match.start_pos();
let (end_row, end_col) = node_match.end_pos();
Range {
start: Position {
line: start_row as u32,
character: start_col as u32,
},
end: Position {
line: end_row as u32,
character: end_col as u32,
},
}
let (start_row, start_col) = node_match.start_pos();
let (end_row, end_col) = node_match.end_pos();
Range {
start: Position {
line: start_row as u32,
character: start_col as u32,
},
end: Position {
line: end_row as u32,
character: end_col as u32,
},
}
}
fn url_to_code_description(url: &Option<String>) -> Option<CodeDescription> {
let href = Url::parse(url.as_ref()?).ok()?;
Some(CodeDescription { href })
let href = Url::parse(url.as_ref()?).ok()?;
Some(CodeDescription { href })
}
impl<L: LSPLang> Backend<L> {
pub fn new(client: Client, rules: RuleCollection<L>) -> Self {
Self {
client,
rules,
map: DashMap::new(),
}
pub fn new(client: Client, rules: RuleCollection<L>) -> Self {
Self {
client,
rules,
map: DashMap::new(),
}
async fn publish_diagnostics(&self, uri: Url, versioned: &VersionedAst<L>) -> Option<()> {
let mut diagnostics = vec![];
let lang = Self::infer_lang_from_uri(&uri)?;
let rules = self.rules.get_rules_for_lang(&lang);
for rule in rules {
let matcher = rule.get_matcher();
// TODO: don't run rules with unmatching language
diagnostics.extend(
versioned
.root
.root()
.find_all(&matcher)
.map(|m| Diagnostic {
range: convert_node_match_to_range(m),
code: Some(NumberOrString::String(rule.id.clone())),
code_description: url_to_code_description(&rule.url),
severity: Some(match rule.severity {
Severity::Error => DiagnosticSeverity::ERROR,
Severity::Warning => DiagnosticSeverity::WARNING,
Severity::Info => DiagnosticSeverity::INFORMATION,
Severity::Hint => DiagnosticSeverity::HINT,
}),
message: rule.message.clone(),
source: Some(String::from("ast-grep")),
tags: None,
related_information: None, // TODO: add labels
data: None,
}),
);
}
self.client
.publish_diagnostics(uri, diagnostics, Some(versioned.version))
.await;
Some(())
}
async fn publish_diagnostics(&self, uri: Url, versioned: &VersionedAst<L>) -> Option<()> {
let mut diagnostics = vec![];
let lang = Self::infer_lang_from_uri(&uri)?;
let rules = self.rules.get_rules_for_lang(&lang);
for rule in rules {
let matcher = rule.get_matcher();
// TODO: don't run rules with unmatching language
diagnostics.extend(
versioned
.root
.root()
.find_all(&matcher)
.map(|m| Diagnostic {
range: convert_node_match_to_range(m),
code: Some(NumberOrString::String(rule.id.clone())),
code_description: url_to_code_description(&rule.url),
severity: Some(match rule.severity {
Severity::Error => DiagnosticSeverity::ERROR,
Severity::Warning => DiagnosticSeverity::WARNING,
Severity::Info => DiagnosticSeverity::INFORMATION,
Severity::Hint => DiagnosticSeverity::HINT,
}),
message: rule.message.clone(),
source: Some(String::from("ast-grep")),
tags: None,
related_information: None, // TODO: add labels
data: None,
}),
);
}
async fn on_open(&self, params: DidOpenTextDocumentParams) -> Option<()> {
let text_doc = params.text_document;
let uri = text_doc.uri.as_str();
let text = text_doc.text;
let lang = Self::infer_lang_from_uri(&text_doc.uri)?;
let root = AstGrep::new(text, lang);
let versioned = VersionedAst {
version: text_doc.version,
root,
};
let copied = versioned.clone();
self.map.insert(uri.to_owned(), versioned); // don't lock dashmap
self.publish_diagnostics(text_doc.uri, &copied).await;
Some(())
}
async fn on_change(&self, params: DidChangeTextDocumentParams) -> Option<()> {
let text_doc = params.text_document;
let uri = text_doc.uri.as_str();
let text = &params.content_changes[0].text;
let lang = Self::infer_lang_from_uri(&text_doc.uri)?;
let root = AstGrep::new(text, lang);
let mut versioned = self.map.get_mut(uri)?;
// skip old version update
if versioned.version > text_doc.version {
return None;
}
*versioned = VersionedAst {
version: text_doc.version,
root,
};
let copied = versioned.clone();
drop(versioned); // don't lock dashmap
self.publish_diagnostics(text_doc.uri, &copied).await;
Some(())
}
async fn on_close(&self, params: DidCloseTextDocumentParams) {
self.map.remove(params.text_document.uri.as_str());
self
.client
.publish_diagnostics(uri, diagnostics, Some(versioned.version))
.await;
Some(())
}
async fn on_open(&self, params: DidOpenTextDocumentParams) -> Option<()> {
let text_doc = params.text_document;
let uri = text_doc.uri.as_str();
let text = text_doc.text;
let lang = Self::infer_lang_from_uri(&text_doc.uri)?;
let root = AstGrep::new(text, lang);
let versioned = VersionedAst {
version: text_doc.version,
root,
};
let copied = versioned.clone();
self.map.insert(uri.to_owned(), versioned); // don't lock dashmap
self.publish_diagnostics(text_doc.uri, &copied).await;
Some(())
}
async fn on_change(&self, params: DidChangeTextDocumentParams) -> Option<()> {
let text_doc = params.text_document;
let uri = text_doc.uri.as_str();
let text = &params.content_changes[0].text;
let lang = Self::infer_lang_from_uri(&text_doc.uri)?;
let root = AstGrep::new(text, lang);
let mut versioned = self.map.get_mut(uri)?;
// skip old version update
if versioned.version > text_doc.version {
return None;
}
*versioned = VersionedAst {
version: text_doc.version,
root,
};
let copied = versioned.clone();
drop(versioned); // don't lock dashmap
self.publish_diagnostics(text_doc.uri, &copied).await;
Some(())
}
async fn on_close(&self, params: DidCloseTextDocumentParams) {
self.map.remove(params.text_document.uri.as_str());
}
// TODO: support other urls besides file_scheme
fn infer_lang_from_uri(uri: &Url) -> Option<L> {
let path = uri.to_file_path().ok()?;
L::from_path(path)
}
// TODO: support other urls besides file_scheme
fn infer_lang_from_uri(uri: &Url) -> Option<L> {
let path = uri.to_file_path().ok()?;
L::from_path(path)
}
}
+14
View File
@@ -0,0 +1,14 @@
# Auto detect text files and perform LF normalization
* text=auto
*.ts text eol=lf merge=union
*.tsx text eol=lf merge=union
*.rs text eol=lf merge=union
*.js text eol=lf merge=union
*.json text eol=lf merge=union
*.debug text eol=lf merge=union
# Generated codes
index.js linguist-detectable=false
index.d.ts linguist-detectable=false
+783
View File
File diff suppressed because one or more lines are too long
+3
View File
@@ -0,0 +1,3 @@
nodeLinker: node-modules
yarnPath: .yarn/releases/yarn-3.2.3.cjs
+26
View File
@@ -0,0 +1,26 @@
[package]
name = "ast-grep-napi"
version = "0.1.1"
authors = ["HerringtonDarkholme <2883231+HerringtonDarkholme@users.noreply.github.com>"]
edition = "2018"
description = "Search and Rewrite code at large scale using precise AST pattern"
keywords = ["ast", "pattern", "codemod", "search", "rewrite"]
license = "MIT"
repository = "https://github.com/HerringtonDarkholme/ast-grep"
[dependencies]
napi = "2"
napi-derive = "2"
ast-grep-core = { version="0.1.1", path = "../core" }
ast-grep-config = { version="0.1.1", path = "../config" }
serde = { version = "1.0", features = ["derive"] }
tree-sitter-typescript="0.20.1"
[lib]
crate-type = ["cdylib"]
[build-dependencies]
napi-build = "2"
[profile.release]
lto = true
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 N-API for Rust
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+105
View File
@@ -0,0 +1,105 @@
# `@napi-rs/package-template`
![https://github.com/napi-rs/package-template/actions](https://github.com/napi-rs/package-template/workflows/CI/badge.svg)
> Template project for writing node packages with napi-rs.
# Usage
1. Click **Use this template**.
2. **Clone** your project.
3. Run `yarn install` to install dependencies.
4. Run `npx napi rename -n [name]` command under the project folder to rename your package.
## Install this test package
```
yarn add @napi-rs/package-template
```
## Support matrix
### Operating Systems
| | node14 | node16 | node18 |
| ---------------- | ------ | ------ | ------ |
| Windows x64 | ✓ | ✓ | ✓ |
| Windows x32 | ✓ | ✓ | ✓ |
| Windows arm64 | ✓ | ✓ | ✓ |
| macOS x64 | ✓ | ✓ | ✓ |
| macOS arm64 | ✓ | ✓ | ✓ |
| Linux x64 gnu | ✓ | ✓ | ✓ |
| Linux x64 musl | ✓ | ✓ | ✓ |
| Linux arm gnu | ✓ | ✓ | ✓ |
| Linux arm64 gnu | ✓ | ✓ | ✓ |
| Linux arm64 musl | ✓ | ✓ | ✓ |
| Android arm64 | ✓ | ✓ | ✓ |
| Android armv7 | ✓ | ✓ | ✓ |
| FreeBSD x64 | ✓ | ✓ | ✓ |
## Ability
### Build
After `yarn build/npm run build` command, you can see `package-template.[darwin|win32|linux].node` file in project root. This is the native addon built from [lib.rs](./src/lib.rs).
### Test
With [ava](https://github.com/avajs/ava), run `yarn test/npm run test` to testing native addon. You can also switch to another testing framework if you want.
### CI
With GitHub Actions, each commit and pull request will be built and tested automatically in [`node@14`, `node@16`, `@node18`] x [`macOS`, `Linux`, `Windows`] matrix. You will never be afraid of the native addon broken in these platforms.
### Release
Release native package is very difficult in old days. Native packages may ask developers who use it to install `build toolchain` like `gcc/llvm`, `node-gyp` or something more.
With `GitHub actions`, we can easily prebuild a `binary` for major platforms. And with `N-API`, we should never be afraid of **ABI Compatible**.
The other problem is how to deliver prebuild `binary` to users. Downloading it in `postinstall` script is a common way that most packages do it right now. The problem with this solution is it introduced many other packages to download binary that has not been used by `runtime codes`. The other problem is some users may not easily download the binary from `GitHub/CDN` if they are behind a private network (But in most cases, they have a private NPM mirror).
In this package, we choose a better way to solve this problem. We release different `npm packages` for different platforms. And add it to `optionalDependencies` before releasing the `Major` package to npm.
`NPM` will choose which native package should download from `registry` automatically. You can see [npm](./npm) dir for details. And you can also run `yarn add @napi-rs/package-template` to see how it works.
## Develop requirements
- Install the latest `Rust`
- Install `Node.js@10+` which fully supported `Node-API`
- Install `yarn@1.x`
## Test in local
- yarn
- yarn build
- yarn test
And you will see:
```bash
$ ava --verbose
✔ sync function from native code
✔ sleep function from native code (201ms)
2 tests passed
✨ Done in 1.12s.
```
## Release package
Ensure you have set your **NPM_TOKEN** in the `GitHub` project setting.
In `Settings -> Secrets`, add **NPM_TOKEN** into it.
When you want to release the package:
```
npm version [<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease [--preid=<prerelease-id>] | from-git]
git push
```
GitHub actions will do the rest job for you.
+18
View File
@@ -0,0 +1,18 @@
import test from 'ava'
import { findNodes } from '../index'
test('sync function from native code', (t) => {
t.deepEqual(findNodes('console.log(123)', 'console'), [
{
start: {
row: 0,
col: 0,
},
end: {
row: 0,
col: 7,
},
},
])
})
+5
View File
@@ -0,0 +1,5 @@
extern crate napi_build;
fn main() {
napi_build::setup();
}
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-android-arm-eabi`
This is the **armv7-linux-androideabi** binary for `@napi-rs/package-template`
@@ -0,0 +1,25 @@
{
"name": "ast-grep-napi-android-arm-eabi",
"version": "1.0.0",
"os": [
"android"
],
"cpu": [
"arm"
],
"main": "ast-grep-napi.android-arm-eabi.node",
"files": [
"ast-grep-napi.android-arm-eabi.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
+3
View File
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-android-arm64`
This is the **aarch64-linux-android** binary for `@napi-rs/package-template`
@@ -0,0 +1,25 @@
{
"name": "ast-grep-napi-android-arm64",
"version": "1.0.0",
"os": [
"android"
],
"cpu": [
"arm64"
],
"main": "ast-grep-napi.android-arm64.node",
"files": [
"ast-grep-napi.android-arm64.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
+3
View File
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-darwin-arm64`
This is the **aarch64-apple-darwin** binary for `@napi-rs/package-template`
+25
View File
@@ -0,0 +1,25 @@
{
"name": "ast-grep-napi-darwin-arm64",
"version": "1.0.0",
"os": [
"darwin"
],
"cpu": [
"arm64"
],
"main": "ast-grep-napi.darwin-arm64.node",
"files": [
"ast-grep-napi.darwin-arm64.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
+3
View File
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-darwin-x64`
This is the **x86_64-apple-darwin** binary for `@napi-rs/package-template`
+25
View File
@@ -0,0 +1,25 @@
{
"name": "ast-grep-napi-darwin-x64",
"version": "1.0.0",
"os": [
"darwin"
],
"cpu": [
"x64"
],
"main": "ast-grep-napi.darwin-x64.node",
"files": [
"ast-grep-napi.darwin-x64.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
+3
View File
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-freebsd-x64`
This is the **x86_64-unknown-freebsd** binary for `@napi-rs/package-template`
+25
View File
@@ -0,0 +1,25 @@
{
"name": "ast-grep-napi-freebsd-x64",
"version": "1.0.0",
"os": [
"freebsd"
],
"cpu": [
"x64"
],
"main": "ast-grep-napi.freebsd-x64.node",
"files": [
"ast-grep-napi.freebsd-x64.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-linux-arm-gnueabihf`
This is the **armv7-unknown-linux-gnueabihf** binary for `@napi-rs/package-template`
@@ -0,0 +1,25 @@
{
"name": "ast-grep-napi-linux-arm-gnueabihf",
"version": "1.0.0",
"os": [
"linux"
],
"cpu": [
"arm"
],
"main": "ast-grep-napi.linux-arm-gnueabihf.node",
"files": [
"ast-grep-napi.linux-arm-gnueabihf.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-linux-arm64-gnu`
This is the **aarch64-unknown-linux-gnu** binary for `@napi-rs/package-template`
@@ -0,0 +1,28 @@
{
"name": "ast-grep-napi-linux-arm64-gnu",
"version": "1.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"libc": [
"glibc"
],
"main": "ast-grep-napi.linux-arm64-gnu.node",
"files": [
"ast-grep-napi.linux-arm64-gnu.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-linux-arm64-musl`
This is the **aarch64-unknown-linux-musl** binary for `@napi-rs/package-template`
@@ -0,0 +1,28 @@
{
"name": "ast-grep-napi-linux-arm64-musl",
"version": "1.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"libc": [
"musl"
],
"main": "ast-grep-napi.linux-arm64-musl.node",
"files": [
"ast-grep-napi.linux-arm64-musl.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
+3
View File
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-linux-x64-gnu`
This is the **x86_64-unknown-linux-gnu** binary for `@napi-rs/package-template`
@@ -0,0 +1,28 @@
{
"name": "ast-grep-napi-linux-x64-gnu",
"version": "1.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"libc": [
"glibc"
],
"main": "ast-grep-napi.linux-x64-gnu.node",
"files": [
"ast-grep-napi.linux-x64-gnu.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
+3
View File
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-linux-x64-musl`
This is the **x86_64-unknown-linux-musl** binary for `@napi-rs/package-template`
@@ -0,0 +1,28 @@
{
"name": "ast-grep-napi-linux-x64-musl",
"version": "1.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"libc": [
"musl"
],
"main": "ast-grep-napi.linux-x64-musl.node",
"files": [
"ast-grep-napi.linux-x64-musl.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-win32-arm64-msvc`
This is the **aarch64-pc-windows-msvc** binary for `@napi-rs/package-template`
@@ -0,0 +1,25 @@
{
"name": "ast-grep-napi-win32-arm64-msvc",
"version": "1.0.0",
"os": [
"win32"
],
"cpu": [
"arm64"
],
"main": "ast-grep-napi.win32-arm64-msvc.node",
"files": [
"ast-grep-napi.win32-arm64-msvc.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-win32-ia32-msvc`
This is the **i686-pc-windows-msvc** binary for `@napi-rs/package-template`
@@ -0,0 +1,25 @@
{
"name": "ast-grep-napi-win32-ia32-msvc",
"version": "1.0.0",
"os": [
"win32"
],
"cpu": [
"ia32"
],
"main": "ast-grep-napi.win32-ia32-msvc.node",
"files": [
"ast-grep-napi.win32-ia32-msvc.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
+3
View File
@@ -0,0 +1,3 @@
# `@napi-rs/package-template-win32-x64-msvc`
This is the **x86_64-pc-windows-msvc** binary for `@napi-rs/package-template`
@@ -0,0 +1,25 @@
{
"name": "ast-grep-napi-win32-x64-msvc",
"version": "1.0.0",
"os": [
"win32"
],
"cpu": [
"x64"
],
"main": "ast-grep-napi.win32-x64-msvc.node",
"files": [
"ast-grep-napi.win32-x64-msvc.node"
],
"description": "Search and Rewrite code at large scale using precise AST pattern",
"keywords": ["ast", "pattern", "codemod", "search", "rewrite"],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"repository": "https://github.com/HerringtonDarkholme/ast-grep"
}
+73
View File
@@ -0,0 +1,73 @@
{
"name": "ast-grep-napi",
"version": "0.1.1",
"description": "Search and Rewrite code at large scale using precise AST pattern",
"main": "index.js",
"repository": "https://github.com/HerringtonDarkholme/ast-grep",
"license": "MIT",
"keywords": [
"ast",
"pattern",
"codemod",
"search",
"rewrite"
],
"files": [
"index.d.ts",
"index.js"
],
"napi": {
"name": "ast-grep-napi",
"triples": {
"defaults": true,
"additional": [
"x86_64-unknown-linux-musl",
"aarch64-unknown-linux-gnu",
"i686-pc-windows-msvc",
"armv7-unknown-linux-gnueabihf",
"aarch64-apple-darwin",
"aarch64-linux-android",
"x86_64-unknown-freebsd",
"aarch64-unknown-linux-musl",
"aarch64-pc-windows-msvc",
"armv7-linux-androideabi"
]
}
},
"engines": {
"node": ">= 10"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform --release",
"build:debug": "napi build --platform",
"prepublishOnly": "napi prepublish -t npm",
"test": "ava",
"version": "napi version"
},
"devDependencies": {
"@napi-rs/cli": "^2.11.4",
"@swc-node/register": "^1.5.1",
"ava": "^4.3.3",
"chalk": "^5.0.1",
"typescript": "^4.8.2"
},
"ava": {
"require": [
"@swc-node/register"
],
"extensions": [
"ts"
],
"timeout": "2m",
"workerThreads": false,
"environmentVariables": {
"TS_NODE_PROJECT": "./tsconfig.json"
}
},
"packageManager": "yarn@3.2.3"
}
+50
View File
@@ -0,0 +1,50 @@
#![deny(clippy::all)]
use ast_grep_core::language::{Language, TSLanguage};
use ast_grep_core::{NodeMatch, Pattern};
use napi_derive::napi;
#[derive(Clone)]
pub struct Tsx;
impl Language for Tsx {
fn get_ts_language(&self) -> TSLanguage {
tree_sitter_typescript::language_tsx().into()
}
}
#[napi(object)]
pub struct Pos {
pub row: u32,
pub col: u32,
}
fn from_tuple(pos: (usize, usize)) -> Pos {
Pos {
row: pos.0 as u32,
col: pos.1 as u32,
}
}
#[napi(object)]
pub struct MatchResult {
pub start: Pos,
pub end: Pos,
}
impl<L: Language> From<NodeMatch<'_, L>> for MatchResult {
fn from(m: NodeMatch<L>) -> Self {
let start = from_tuple(m.start_pos());
let end = from_tuple(m.end_pos());
Self { start, end }
}
}
#[napi]
pub fn find_nodes(src: String, pattern: String) -> Vec<MatchResult> {
let root = Tsx.ast_grep(src);
let pattern = Pattern::new(&pattern, Tsx);
root
.root()
.find_all(pattern)
.map(MatchResult::from)
.collect()
}
+14
View File
@@ -0,0 +1,14 @@
{
"compilerOptions": {
"target": "ES2018",
"strict": true,
"moduleResolution": "node",
"module": "CommonJS",
"noUnusedLocals": true,
"noUnusedParameters": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true
},
"include": ["."],
"exclude": ["node_modules"]
}
File diff suppressed because it is too large Load Diff
+27 -27
View File
@@ -12,44 +12,44 @@ static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
#[derive(Serialize, Deserialize)]
pub struct MatchResult {
pub start: usize,
pub end: usize,
pub start: usize,
pub end: usize,
}
#[wasm_bindgen]
pub async fn find_nodes(
src: String,
config: JsValue,
parser_path: String,
src: String,
config: JsValue,
parser_path: String,
) -> Result<String, JsError> {
tree_sitter::TreeSitter::init().await?;
let mut parser = tree_sitter::Parser::new()?;
let lang = get_lang(parser_path).await?;
parser.set_language(&lang).expect_throw("set lang");
let config: SerializableRule = config.into_serde()?;
let root = lang.ast_grep(src);
let matcher = deserialize_rule(config, lang)?;
let ret: Vec<_> = root
.root()
.find_all(matcher)
.map(|n| {
let start = n.start_pos();
let end = n.end_pos();
vec![start.0, start.1, end.0, end.1]
})
.collect();
Ok(format!("{:?}", ret))
tree_sitter::TreeSitter::init().await?;
let mut parser = tree_sitter::Parser::new()?;
let lang = get_lang(parser_path).await?;
parser.set_language(&lang).expect_throw("set lang");
let config: SerializableRule = config.into_serde()?;
let root = lang.ast_grep(src);
let matcher = deserialize_rule(config, lang)?;
let ret: Vec<_> = root
.root()
.find_all(matcher)
.map(|n| {
let start = n.start_pos();
let end = n.end_pos();
vec![start.0, start.1, end.0, end.1]
})
.collect();
Ok(format!("{:?}", ret))
}
#[cfg(target_arch = "wasm32")]
async fn get_lang(parser_path: String) -> Result<tree_sitter::Language, JsError> {
let lang = web_tree_sitter_sys::Language::load_path(&parser_path)
.await
.map_err(tree_sitter::LanguageError::from)?;
Ok(tree_sitter::Language::from(lang))
let lang = web_tree_sitter_sys::Language::load_path(&parser_path)
.await
.map_err(tree_sitter::LanguageError::from)?;
Ok(tree_sitter::Language::from(lang))
}
#[cfg(not(target_arch = "wasm32"))]
async fn get_lang(_path: String) -> Result<tree_sitter::Language, JsError> {
unreachable!()
unreachable!()
}
+7 -7
View File
@@ -1,10 +1,10 @@
#[cfg(feature = "console_error_panic_hook")]
pub fn set_panic_hook() {
// When the `console_error_panic_hook` feature is enabled, we can call the
// `set_panic_hook` function at least once during initialization, and then
// we will get better error messages if our code ever panics.
//
// For more details see
// https://github.com/rustwasm/console_error_panic_hook#readme
console_error_panic_hook::set_once();
// When the `console_error_panic_hook` feature is enabled, we can call the
// `set_panic_hook` function at least once during initialization, and then
// we will get better error messages if our code ever panics.
//
// For more details see
// https://github.com/rustwasm/console_error_panic_hook#readme
console_error_panic_hook::set_once();
}
+1 -1
View File
@@ -8,5 +8,5 @@ wasm_bindgen_test_configure!(run_in_browser);
#[wasm_bindgen_test]
fn pass() {
assert_eq!(1 + 1, 2);
assert_eq!(1 + 1, 2);
}
+1
View File
@@ -0,0 +1 @@
tab_spaces = 2