mirror of
https://github.com/clockworklabs/SpacetimeDB.git
synced 2026-05-11 10:29:21 -04:00
3d3c99f8db
# Description of Changes This PR shrinks `JsWorkerRequest` so that it is (almost) as small as the call reducer request. To do that, a bunch of trivial changes had to be done to auth code, that mostly revolves around `String` -> `Box<str>`. This should help the auth code, but that is incidental. The main goal was to improve throughput through the request tx/rx channel for V8, which is taking quite a bit of time in flamegraphs. I also noticed while making this change that the wrong hash map was being used in a bunch of places, so I fixed all of those. A follow up PR will shrink the reply side to fit within a cache line. Yet another follow up PR will change the channel to replace flume with `fibre::spsc`. # API and ABI breaking changes None # Expected complexity level and risk 2, fairly trivial changes. # Testing Covered by existing tests.
654 lines
25 KiB
Rust
654 lines
25 KiB
Rust
use std::collections::BTreeMap;
|
|
use std::fs;
|
|
use std::io::{self, Write};
|
|
use std::path::{Path, PathBuf};
|
|
use std::process::Command;
|
|
use toml::Value;
|
|
|
|
fn main() {
|
|
let git_hash = find_git_hash();
|
|
println!("cargo:rustc-env=GIT_HASH={git_hash}");
|
|
|
|
generate_template_files();
|
|
}
|
|
|
|
fn nix_injected_commit_hash() -> Option<String> {
|
|
use std::env::VarError;
|
|
// Our flake.nix sets this environment variable to be our git commit hash during the build.
|
|
// This is important because git metadata is otherwise not available within the nix build sandbox,
|
|
// and we don't install the git command-line tool in our build.
|
|
match std::env::var("SPACETIMEDB_NIX_BUILD_GIT_COMMIT") {
|
|
Ok(commit_sha) => {
|
|
// Var is set, we're building under Nix.
|
|
Some(commit_sha)
|
|
}
|
|
|
|
Err(VarError::NotPresent) => {
|
|
// Var is not set, we're not in Nix.
|
|
None
|
|
}
|
|
Err(VarError::NotUnicode(gross)) => {
|
|
// Var is set but is invalid unicode, something is very wrong.
|
|
panic!("Injected commit hash is not valid unicode: {gross:?}")
|
|
}
|
|
}
|
|
}
|
|
|
|
fn is_nix_build() -> bool {
|
|
nix_injected_commit_hash().is_some()
|
|
}
|
|
|
|
fn find_git_hash() -> String {
|
|
nix_injected_commit_hash().unwrap_or_else(|| {
|
|
// When we're *not* building in Nix, we can assume that git metadata is still present in the filesystem,
|
|
// and that the git command-line tool is installed.
|
|
let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap();
|
|
String::from_utf8(output.stdout).unwrap().trim().to_string()
|
|
})
|
|
}
|
|
|
|
fn get_manifest_dir() -> PathBuf {
|
|
PathBuf::from(std::env::var("CARGO_MANIFEST_DIR").unwrap())
|
|
}
|
|
|
|
// This method generates functions with data used in `spacetime init`:
|
|
//
|
|
// * `get_templates_json` - returns contents of the JSON file with the list of templates
|
|
// * `get_template_files` - returns a HashMap with templates contents based on the
|
|
// templates list at templates/templates-list.json
|
|
// * `get_ai_rules_base` - returns base AI rules for all languages
|
|
// * `get_ai_rules_typescript` - returns TypeScript-specific AI rules
|
|
// * `get_ai_rules_rust` - returns Rust-specific AI rules
|
|
// * `get_ai_rules_csharp` - returns C#-specific AI rules
|
|
fn generate_template_files() {
|
|
let manifest_dir = get_manifest_dir();
|
|
let repo_root = get_repo_root();
|
|
let templates_dir = repo_root.join("templates");
|
|
let out_dir = std::env::var("OUT_DIR").unwrap();
|
|
let dest_path = Path::new(&out_dir).join("embedded_templates.rs");
|
|
|
|
println!("cargo:rerun-if-changed=../../templates");
|
|
|
|
let discovered_templates = discover_templates(&templates_dir);
|
|
|
|
let mut generated_code = String::new();
|
|
generated_code.push_str("use spacetimedb_data_structures::map::HashMap;\n\n");
|
|
|
|
generated_code.push_str("pub fn get_templates_json() -> &'static str {\n");
|
|
generated_code.push_str(" r#\"");
|
|
generated_code.push_str(&generate_templates_json(&discovered_templates));
|
|
generated_code.push_str("\"#\n");
|
|
generated_code.push_str("}\n\n");
|
|
|
|
generated_code
|
|
.push_str("pub fn get_template_files() -> HashMap<&'static str, HashMap<&'static str, &'static str>> {\n");
|
|
generated_code.push_str(" let mut templates = HashMap::new();\n\n");
|
|
|
|
for template in &discovered_templates {
|
|
if let Some(ref server_source) = template.server_source {
|
|
let server_path = PathBuf::from(server_source);
|
|
generate_template_entry(&mut generated_code, &server_path, server_source, &manifest_dir);
|
|
}
|
|
|
|
if let Some(ref client_source) = template.client_source {
|
|
let client_path = PathBuf::from(client_source);
|
|
generate_template_entry(&mut generated_code, &client_path, client_source, &manifest_dir);
|
|
}
|
|
}
|
|
|
|
generated_code.push_str(" templates\n");
|
|
generated_code.push_str("}\n\n");
|
|
|
|
let repo_root = get_repo_root();
|
|
let workspace_cargo = repo_root.join("Cargo.toml");
|
|
println!("cargo:rerun-if-changed={}", workspace_cargo.display());
|
|
|
|
let (workspace_edition, workspace_versions) =
|
|
extract_workspace_metadata(&workspace_cargo).expect("Failed to extract workspace metadata");
|
|
|
|
let ts_bindings_package = repo_root.join("crates/bindings-typescript/package.json");
|
|
println!("cargo:rerun-if-changed={}", ts_bindings_package.display());
|
|
let ts_bindings_version =
|
|
extract_ts_bindings_version(&ts_bindings_package).expect("Failed to read TypeScript bindings version");
|
|
|
|
// Embed AI rules files from docs/static/ai-rules/
|
|
let ai_rules_dir = repo_root.join("docs/static/ai-rules");
|
|
|
|
// Base rules (all languages)
|
|
let base_rules_path = ai_rules_dir.join("spacetimedb.mdc");
|
|
if base_rules_path.exists() {
|
|
generated_code.push_str("pub fn get_ai_rules_base() -> &'static str {\n");
|
|
generated_code.push_str(&format!(
|
|
" include_str!(\"{}\")\n",
|
|
base_rules_path.to_str().unwrap().replace('\\', "\\\\")
|
|
));
|
|
generated_code.push_str("}\n\n");
|
|
println!("cargo:rerun-if-changed={}", base_rules_path.display());
|
|
} else {
|
|
panic!("Could not find \"docs/static/ai-rules/spacetimedb.mdc\" file.");
|
|
}
|
|
|
|
// TypeScript-specific rules
|
|
let ts_rules_path = ai_rules_dir.join("spacetimedb-typescript.mdc");
|
|
if ts_rules_path.exists() {
|
|
generated_code.push_str("pub fn get_ai_rules_typescript() -> &'static str {\n");
|
|
generated_code.push_str(&format!(
|
|
" include_str!(\"{}\")\n",
|
|
ts_rules_path.to_str().unwrap().replace('\\', "\\\\")
|
|
));
|
|
generated_code.push_str("}\n\n");
|
|
println!("cargo:rerun-if-changed={}", ts_rules_path.display());
|
|
} else {
|
|
panic!("Could not find \"docs/static/ai-rules/spacetimedb-typescript.mdc\" file.");
|
|
}
|
|
|
|
// Rust-specific rules
|
|
let rust_rules_path = ai_rules_dir.join("spacetimedb-rust.mdc");
|
|
if rust_rules_path.exists() {
|
|
generated_code.push_str("pub fn get_ai_rules_rust() -> &'static str {\n");
|
|
generated_code.push_str(&format!(
|
|
" include_str!(\"{}\")\n",
|
|
rust_rules_path.to_str().unwrap().replace('\\', "\\\\")
|
|
));
|
|
generated_code.push_str("}\n\n");
|
|
println!("cargo:rerun-if-changed={}", rust_rules_path.display());
|
|
} else {
|
|
panic!("Could not find \"docs/static/ai-rules/spacetimedb-rust.mdc\" file.");
|
|
}
|
|
|
|
// C#-specific rules
|
|
let csharp_rules_path = ai_rules_dir.join("spacetimedb-csharp.mdc");
|
|
if csharp_rules_path.exists() {
|
|
generated_code.push_str("pub fn get_ai_rules_csharp() -> &'static str {\n");
|
|
generated_code.push_str(&format!(
|
|
" include_str!(\"{}\")\n",
|
|
csharp_rules_path.to_str().unwrap().replace('\\', "\\\\")
|
|
));
|
|
generated_code.push_str("}\n\n");
|
|
println!("cargo:rerun-if-changed={}", csharp_rules_path.display());
|
|
} else {
|
|
panic!("Could not find \"docs/static/ai-rules/spacetimedb-csharp.mdc\" file.");
|
|
}
|
|
|
|
// Expose workspace metadata so `spacetime init` can rewrite template manifests without hardcoding versions.
|
|
generated_code.push_str("pub fn get_workspace_edition() -> &'static str {\n");
|
|
generated_code.push_str(&format!(" \"{}\"\n", workspace_edition.escape_default()));
|
|
generated_code.push_str("}\n\n");
|
|
|
|
generated_code.push_str("pub fn get_workspace_dependency_version(name: &str) -> Option<&'static str> {\n");
|
|
generated_code.push_str(" match name {\n");
|
|
for (name, version) in &workspace_versions {
|
|
generated_code.push_str(&format!(
|
|
" \"{}\" => Some(\"{}\"),\n",
|
|
name.escape_default(),
|
|
version.escape_default()
|
|
));
|
|
}
|
|
generated_code.push_str(" _ => None,\n");
|
|
generated_code.push_str(" }\n");
|
|
generated_code.push_str("}\n");
|
|
|
|
generated_code.push('\n');
|
|
generated_code.push_str("pub fn get_typescript_bindings_version() -> &'static str {\n");
|
|
generated_code.push_str(&format!(" \"{}\"\n", ts_bindings_version.escape_default()));
|
|
generated_code.push_str("}\n");
|
|
|
|
write_if_changed(&dest_path, generated_code.as_bytes()).expect("Failed to write embedded_templates.rs");
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
struct TemplateInfo {
|
|
id: String,
|
|
description: String,
|
|
server_source: Option<String>,
|
|
client_source: Option<String>,
|
|
server_lang: Option<String>,
|
|
client_lang: Option<String>,
|
|
}
|
|
|
|
#[derive(serde::Deserialize)]
|
|
struct TemplateMetadata {
|
|
description: String,
|
|
client_lang: Option<String>,
|
|
server_lang: Option<String>,
|
|
}
|
|
|
|
fn discover_templates(templates_dir: &Path) -> Vec<TemplateInfo> {
|
|
let mut templates = Vec::new();
|
|
|
|
let entries = match fs::read_dir(templates_dir) {
|
|
Ok(entries) => entries,
|
|
Err(_) => return templates,
|
|
};
|
|
|
|
for entry in entries.flatten() {
|
|
let path = entry.path();
|
|
if !path.is_dir() {
|
|
continue;
|
|
}
|
|
|
|
let template_name = match path.file_name().and_then(|n| n.to_str()) {
|
|
Some(name) => name,
|
|
None => continue,
|
|
};
|
|
|
|
let metadata_path = path.join(".template.json");
|
|
if !metadata_path.exists() {
|
|
continue;
|
|
}
|
|
|
|
let metadata_content = match fs::read_to_string(&metadata_path) {
|
|
Ok(content) => content,
|
|
Err(_) => continue,
|
|
};
|
|
|
|
let metadata: TemplateMetadata = match serde_json::from_str(&metadata_content) {
|
|
Ok(meta) => meta,
|
|
Err(_) => continue,
|
|
};
|
|
|
|
let server_source = if metadata.server_lang.is_some() {
|
|
Some(format!("{}/spacetimedb", template_name))
|
|
} else {
|
|
None
|
|
};
|
|
|
|
let client_source = if metadata.client_lang.is_some() {
|
|
Some(template_name.to_string())
|
|
} else {
|
|
None
|
|
};
|
|
|
|
if server_source.is_some() || client_source.is_some() {
|
|
templates.push(TemplateInfo {
|
|
id: template_name.to_string(),
|
|
description: metadata.description,
|
|
server_source,
|
|
client_source,
|
|
server_lang: metadata.server_lang,
|
|
client_lang: metadata.client_lang,
|
|
});
|
|
}
|
|
}
|
|
|
|
templates.sort_by(|a, b| a.id.cmp(&b.id));
|
|
templates
|
|
}
|
|
|
|
fn generate_templates_json(templates: &[TemplateInfo]) -> String {
|
|
let mut json = String::from("{\n \"highlights\": [\n");
|
|
|
|
for template in templates {
|
|
if template.id.contains("react") {
|
|
json.push_str(" { \"name\": \"React\", \"template_id\": \"");
|
|
json.push_str(&template.id);
|
|
json.push_str("\" }\n");
|
|
break;
|
|
}
|
|
}
|
|
|
|
json.push_str(" ],\n \"templates\": [\n");
|
|
|
|
for (i, template) in templates.iter().enumerate() {
|
|
json.push_str(" {\n");
|
|
json.push_str(&format!(" \"id\": \"{}\",\n", template.id));
|
|
json.push_str(&format!(" \"description\": \"{}\",\n", template.description));
|
|
|
|
if let Some(ref server_source) = template.server_source {
|
|
json.push_str(&format!(" \"server_source\": \"{}\",\n", server_source));
|
|
} else {
|
|
json.push_str(" \"server_source\": \"\",\n");
|
|
}
|
|
|
|
if let Some(ref client_source) = template.client_source {
|
|
json.push_str(&format!(" \"client_source\": \"{}\",\n", client_source));
|
|
} else {
|
|
json.push_str(" \"client_source\": \"\",\n");
|
|
}
|
|
|
|
if let Some(ref server_lang) = template.server_lang {
|
|
json.push_str(&format!(" \"server_lang\": \"{}\",\n", server_lang));
|
|
} else {
|
|
json.push_str(" \"server_lang\": \"\",\n");
|
|
}
|
|
|
|
if let Some(ref client_lang) = template.client_lang {
|
|
json.push_str(&format!(" \"client_lang\": \"{}\"", client_lang));
|
|
} else {
|
|
json.push_str(" \"client_lang\": \"\"");
|
|
}
|
|
|
|
json.push_str("\n }");
|
|
if i < templates.len() - 1 {
|
|
json.push(',');
|
|
}
|
|
json.push('\n');
|
|
}
|
|
|
|
json.push_str(" ]\n}");
|
|
json
|
|
}
|
|
|
|
fn generate_template_entry(code: &mut String, template_path: &Path, source: &str, manifest_dir: &Path) {
|
|
let (git_files, resolved_base) = get_git_tracked_files(template_path, manifest_dir);
|
|
|
|
if git_files.is_empty() {
|
|
panic!("Template '{}' has no git-tracked files! Check that the directory exists and contains files tracked by git.", source);
|
|
}
|
|
|
|
// Example: /Users/user/SpacetimeDB
|
|
let repo_root = get_repo_root();
|
|
let repo_root_canonical = std::fs::canonicalize(&repo_root).unwrap();
|
|
// Example: /Users/user/SpacetimeDB/crates/cli
|
|
let manifest_canonical = Path::new(manifest_dir).canonicalize().unwrap();
|
|
// Example: crates/cli
|
|
let manifest_rel = manifest_canonical.strip_prefix(&repo_root_canonical).unwrap();
|
|
|
|
// Example for inside crate: /Users/user/SpacetimeDB/crates/cli/templates/basic-rs/server
|
|
// Example for outside crate: /Users/user/SpacetimeDB/modules/chat-console-rs
|
|
let resolved_canonical = repo_root.join(&resolved_base).canonicalize().unwrap();
|
|
|
|
// If the files are outside of the cli crate we need to copy them to the crate directory,
|
|
// so they're included properly even when the crate is published
|
|
let local_copy_dir = if resolved_canonical.strip_prefix(&manifest_canonical).is_err() {
|
|
// Example source: "../../modules/quickstart-chat"
|
|
// Sanitized: "parent_parent_modules_quickstart-chat"
|
|
let sanitized_source = source.replace("/", "_").replace("\\", "_").replace("..", "parent");
|
|
// Example: /Users/user/SpacetimeDB/crates/cli/.templates/parent_parent_modules_quickstart-chat
|
|
let copy_dir = Path::new(manifest_dir).join(".templates").join(&sanitized_source);
|
|
fs::create_dir_all(©_dir).expect("Failed to create .templates directory");
|
|
|
|
Some(copy_dir)
|
|
} else {
|
|
None
|
|
};
|
|
|
|
code.push_str(" {\n");
|
|
code.push_str(" let mut files = HashMap::new();\n");
|
|
|
|
for file_path in git_files {
|
|
// Example file_path: modules/chat-console-rs/src/lib.rs (relative to repo root)
|
|
// Example resolved_base: modules/chat-console-rs
|
|
// Example relative_path: src/lib.rs
|
|
let relative_path = match file_path.strip_prefix(&resolved_base) {
|
|
Ok(p) => p,
|
|
Err(_) => {
|
|
eprintln!(
|
|
"Warning: Could not strip prefix '{}' from '{}' for source '{}'",
|
|
resolved_base.display(),
|
|
file_path.display(),
|
|
source
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
// Example: "src/lib.rs"
|
|
let relative_str = relative_path.to_str().unwrap().replace("\\", "/");
|
|
|
|
// Example: /Users/user/SpacetimeDB/modules/quickstart-chat/src/lib.rs
|
|
let full_path = repo_root.join(&file_path);
|
|
if full_path.exists() && full_path.is_file() {
|
|
let include_path = if let Some(ref copy_dir) = local_copy_dir {
|
|
// Outside crate: copy to .templates
|
|
// Example dest_file: /Users/user/SpacetimeDB/crates/cli/.templates/parent_parent_modules_chat-console-rs/src/lib.rs
|
|
let dest_file = copy_dir.join(relative_path);
|
|
fs::create_dir_all(dest_file.parent().unwrap()).expect("Failed to create parent directory");
|
|
copy_if_changed(&full_path, &dest_file)
|
|
.unwrap_or_else(|_| panic!("Failed to copy file {:?} to {:?}", full_path, dest_file));
|
|
|
|
// Example relative_to_manifest: .templates/parent_parent_modules_chat-console-rs/src/lib.rs
|
|
let relative_to_manifest = dest_file.strip_prefix(manifest_dir).unwrap();
|
|
let path_str = relative_to_manifest.to_str().unwrap().replace("\\", "/");
|
|
// Watch the original file for changes
|
|
// Example: modules/chat-console-rs/src/lib.rs
|
|
println!("cargo:rerun-if-changed={}", full_path.display());
|
|
path_str
|
|
} else {
|
|
// Inside crate: use path relative to CARGO_MANIFEST_DIR
|
|
// Example file_path: crates/cli/templates/basic-rs/server/src/lib.rs
|
|
// Example manifest_rel: crates/cli
|
|
// Result: templates/basic-rs/server/src/lib.rs
|
|
let relative_to_manifest = file_path.strip_prefix(manifest_rel).unwrap();
|
|
let path_str = relative_to_manifest.to_str().unwrap().replace("\\", "/");
|
|
// Example: crates/cli/templates/basic-rs/server/src/lib.rs
|
|
println!("cargo:rerun-if-changed={}", full_path.display());
|
|
path_str
|
|
};
|
|
|
|
// Example include_path (inside crate): "templates/basic-rs/server/src/lib.rs"
|
|
// Example include_path (outside crate): ".templates/parent_parent_modules_chat-console-rs/src/lib.rs"
|
|
// Example relative_str: "src/lib.rs"
|
|
code.push_str(&format!(
|
|
" files.insert(\"{}\", include_str!(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/{}\")));\n",
|
|
relative_str, include_path
|
|
));
|
|
}
|
|
}
|
|
|
|
code.push_str(&format!(" templates.insert(\"{}\", files);\n", source));
|
|
code.push_str(" }\n\n");
|
|
}
|
|
|
|
/// Get a list of files tracked by git from a given directory
|
|
fn get_git_tracked_files(path: &Path, manifest_dir: &Path) -> (Vec<PathBuf>, PathBuf) {
|
|
if is_nix_build() {
|
|
// When building in Nix, we already know that there are no untracked files in our source tree,
|
|
// so we just list all of the files.
|
|
list_all_files(path, manifest_dir)
|
|
} else {
|
|
// When building outside of Nix, we invoke `git` to list all the tracked files.
|
|
get_git_tracked_files_via_cli(path, manifest_dir)
|
|
}
|
|
}
|
|
|
|
fn list_all_files(path: &Path, manifest_dir: &Path) -> (Vec<PathBuf>, PathBuf) {
|
|
let manifest_dir = manifest_dir.canonicalize().unwrap_or_else(|err| {
|
|
panic!(
|
|
"Failed to canonicalize manifest_dir path {}: {err:#?}",
|
|
manifest_dir.display()
|
|
)
|
|
});
|
|
|
|
let template_root_absolute = get_full_path_within_manifest_dir(path, &manifest_dir);
|
|
|
|
let repo_root = get_repo_root();
|
|
|
|
let mut files = Vec::new();
|
|
ls_recursively(&template_root_absolute, &repo_root, &mut files);
|
|
|
|
(files, make_repo_root_relative(&template_root_absolute, &repo_root))
|
|
}
|
|
|
|
/// Get all the paths of files within `root_dir`,
|
|
/// transform them into paths relative to `repo_root`,
|
|
/// and insert them into `out`.
|
|
fn ls_recursively(root_dir: &Path, repo_root: &Path, out: &mut Vec<PathBuf>) {
|
|
for dir_ent in std::fs::read_dir(root_dir).unwrap_or_else(|err| {
|
|
panic!(
|
|
"Failed to read_dir from template directory {}: {err:#?}",
|
|
root_dir.display()
|
|
)
|
|
}) {
|
|
let dir_ent = dir_ent.unwrap_or_else(|err| {
|
|
panic!(
|
|
"Got error during read_dir from template directory {}: {err:#?}",
|
|
root_dir.display(),
|
|
)
|
|
});
|
|
let file_path = dir_ent.path();
|
|
let file_type = dir_ent.file_type().unwrap_or_else(|err| {
|
|
panic!(
|
|
"Failed to get file_type for template file {}: {err:#?}",
|
|
file_path.display(),
|
|
)
|
|
});
|
|
if file_type.is_dir() {
|
|
ls_recursively(&file_path, repo_root, out);
|
|
} else {
|
|
out.push(make_repo_root_relative(&file_path, repo_root));
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Treat `relative_path` as a relative path within the repo root's templates directory
|
|
/// and transform it into an absolute, canonical path.
|
|
fn get_full_path_within_manifest_dir(relative_path: &Path, _manifest_dir: &Path) -> PathBuf {
|
|
let repo_root = get_repo_root();
|
|
let full_path = repo_root.join("templates").join(relative_path);
|
|
|
|
full_path.canonicalize().unwrap_or_else(|e| {
|
|
panic!("Failed to canonicalize path {}: {}", full_path.display(), e);
|
|
})
|
|
}
|
|
|
|
/// Transform `full_path` into a relative path within `repo_root`.
|
|
///
|
|
/// `full_path` and `repo_root` should both be canonical paths, as by [`Path::canonicalize`].
|
|
fn make_repo_root_relative(full_path: &Path, repo_root: &Path) -> PathBuf {
|
|
full_path
|
|
.strip_prefix(repo_root)
|
|
.map(|p| p.to_path_buf())
|
|
.unwrap_or_else(|_| {
|
|
panic!(
|
|
"Path {} is outside repo root {}",
|
|
full_path.display(),
|
|
repo_root.display()
|
|
)
|
|
})
|
|
}
|
|
|
|
fn get_git_tracked_files_via_cli(path: &Path, manifest_dir: &Path) -> (Vec<PathBuf>, PathBuf) {
|
|
let repo_root = get_repo_root();
|
|
let repo_root = repo_root.canonicalize().unwrap_or_else(|err| {
|
|
panic!(
|
|
"Failed to canonicalize repo_root path {}: {err:#?}",
|
|
repo_root.display(),
|
|
)
|
|
});
|
|
|
|
let resolved_path = make_repo_root_relative(&get_full_path_within_manifest_dir(path, manifest_dir), &repo_root);
|
|
|
|
let output = Command::new("git")
|
|
.args(["ls-files", resolved_path.to_str().unwrap()])
|
|
.current_dir(repo_root)
|
|
.output()
|
|
.expect("Failed to execute git ls-files");
|
|
|
|
if !output.status.success() {
|
|
return (Vec::new(), resolved_path);
|
|
}
|
|
|
|
let stdout = String::from_utf8(output.stdout).unwrap();
|
|
let files: Vec<PathBuf> = stdout
|
|
.lines()
|
|
.filter(|line| !line.is_empty())
|
|
.map(PathBuf::from)
|
|
.collect();
|
|
|
|
(files, resolved_path)
|
|
}
|
|
|
|
fn get_repo_root() -> PathBuf {
|
|
let manifest_dir = get_manifest_dir();
|
|
// Cargo doesn't expose a way to get the workspace root, AFAICT (pgoldman 2025-10-31).
|
|
// We don't want to query git metadata for this, as that will break in Nix builds.
|
|
// We happen to know our own directory structure, so we can just walk the tree to get to the root.
|
|
let repo_root = manifest_dir.join("..").join("..");
|
|
repo_root.canonicalize().unwrap_or_else(|err| {
|
|
panic!(
|
|
"Failed to canonicalize repo_root path {}: {err:#?}",
|
|
repo_root.display()
|
|
)
|
|
})
|
|
}
|
|
|
|
fn extract_workspace_metadata(path: &Path) -> io::Result<(String, BTreeMap<String, String>)> {
|
|
let content = fs::read_to_string(path)?;
|
|
let parsed: Value = content
|
|
.parse()
|
|
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
|
|
|
let table = parsed
|
|
.as_table()
|
|
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "workspace manifest is not a table"))?;
|
|
|
|
let workspace = table
|
|
.get("workspace")
|
|
.and_then(Value::as_table)
|
|
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "workspace section missing"))?;
|
|
|
|
let edition = workspace
|
|
.get("package")
|
|
.and_then(Value::as_table)
|
|
.and_then(|pkg| pkg.get("edition"))
|
|
.and_then(Value::as_str)
|
|
.unwrap_or("2021")
|
|
.to_string();
|
|
|
|
let mut versions = BTreeMap::new();
|
|
if let Some(deps) = workspace.get("dependencies").and_then(Value::as_table) {
|
|
for (name, value) in deps {
|
|
let version_opt = match value {
|
|
Value::String(s) => Some(normalize_version(s)),
|
|
Value::Table(table) => table.get("version").and_then(Value::as_str).map(normalize_version),
|
|
_ => None,
|
|
};
|
|
|
|
if let Some(version) = version_opt {
|
|
versions.insert(name.clone(), version);
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok((edition, versions))
|
|
}
|
|
|
|
fn extract_ts_bindings_version(path: &Path) -> io::Result<String> {
|
|
let content = fs::read_to_string(path)?;
|
|
let parsed: serde_json::Value =
|
|
serde_json::from_str(&content).map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
|
parsed
|
|
.get("version")
|
|
.and_then(serde_json::Value::as_str)
|
|
.map(|s| s.to_string())
|
|
.ok_or_else(|| {
|
|
io::Error::new(
|
|
io::ErrorKind::InvalidData,
|
|
"Missing \"version\" field in TypeScript bindings package.json",
|
|
)
|
|
})
|
|
}
|
|
|
|
fn normalize_version(version: &str) -> String {
|
|
version.trim().trim_start_matches('=').to_string()
|
|
}
|
|
|
|
fn write_if_changed(path: &Path, contents: &[u8]) -> io::Result<()> {
|
|
match fs::read(path) {
|
|
Ok(existing) if existing == contents => Ok(()),
|
|
_ => {
|
|
if let Some(parent) = path.parent() {
|
|
fs::create_dir_all(parent)?;
|
|
}
|
|
let mut file = fs::File::create(path)?;
|
|
file.write_all(contents)
|
|
}
|
|
}
|
|
}
|
|
|
|
fn copy_if_changed(src: &Path, dst: &Path) -> io::Result<()> {
|
|
let src_bytes = fs::read(src)?;
|
|
if let Ok(existing) = fs::read(dst) {
|
|
if existing == src_bytes {
|
|
return Ok(());
|
|
}
|
|
}
|
|
|
|
if let Some(parent) = dst.parent() {
|
|
fs::create_dir_all(parent)?;
|
|
}
|
|
|
|
let mut file = fs::File::create(dst)?;
|
|
file.write_all(&src_bytes)
|
|
}
|