Skip to content

Sync rewatch 1.0.9 #7010

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Sep 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
- Removed empty `default` case from `switch` statement in the generated code
- Optimised the Type Extension runtime code and removed trailing `/1` from `RE_EXN_ID` https://github.com/rescript-lang/rescript-compiler/pull/6958
- Compact output for anonymous functions. https://github.com/rescript-lang/rescript-compiler/pull/6945
- Rewatch 1.0.9. https://github.com/rescript-lang/rescript-compiler/pull/7010

#### :bug: Bug Fix
- Fix issue where long layout break added a trailing comma in partial application `...`. https://github.com/rescript-lang/rescript-compiler/pull/6949
Expand Down
2 changes: 1 addition & 1 deletion rewatch/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion rewatch/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "rewatch"
version = "1.0.6"
version = "1.0.9"
edition = "2021"

[dependencies]
Expand Down
33 changes: 29 additions & 4 deletions rewatch/src/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ use console::style;
use indicatif::{ProgressBar, ProgressStyle};
use serde::Serialize;
use std::fmt;
use std::fs::File;
use std::io::{stdout, Write};
use std::path::PathBuf;
use std::time::{Duration, Instant};
Expand Down Expand Up @@ -52,7 +53,7 @@ pub struct CompilerArgs {
pub parser_args: Vec<String>,
}

pub fn get_compiler_args(path: &str, rescript_version: Option<String>) -> String {
pub fn get_compiler_args(path: &str, rescript_version: Option<String>, bsc_path: Option<String>) -> String {
let filename = &helpers::get_abs_path(path);
let package_root = helpers::get_abs_path(
&helpers::get_nearest_bsconfig(&std::path::PathBuf::from(path)).expect("Couldn't find package root"),
Expand All @@ -64,7 +65,10 @@ pub fn get_compiler_args(path: &str, rescript_version: Option<String>) -> String
let rescript_version = if let Some(rescript_version) = rescript_version {
rescript_version
} else {
let bsc_path = helpers::get_bsc(&package_root, workspace_root.to_owned());
let bsc_path = match bsc_path {
Some(bsc_path) => bsc_path,
None => helpers::get_bsc(&package_root, workspace_root.to_owned()),
};
helpers::get_rescript_version(&bsc_path)
};
// make PathBuf from package root and get the relative path for filename
Expand Down Expand Up @@ -134,10 +138,14 @@ pub fn initialize_build(
default_timing: Option<Duration>,
filter: &Option<regex::Regex>,
path: &str,
bsc_path: Option<String>,
) -> Result<BuildState, InitializeBuildError> {
let project_root = helpers::get_abs_path(path);
let workspace_root = helpers::get_workspace_root(&project_root);
let bsc_path = helpers::get_bsc(&project_root, workspace_root.to_owned());
let bsc_path = match bsc_path {
Some(bsc_path) => bsc_path,
None => helpers::get_bsc(&project_root, workspace_root.to_owned()),
};
let root_config_name = packages::get_package_name(&project_root);
let rescript_version = helpers::get_rescript_version(&bsc_path);

Expand Down Expand Up @@ -407,11 +415,26 @@ impl fmt::Display for BuildError {
}
}

pub fn write_build_ninja(build_state: &BuildState) {
// write build.ninja files in the packages after a non-incremental build
// this is necessary to bust the editor tooling cache. The editor tooling
// is watching this file.
// we don't need to do this in an incremental build because there are no file
// changes (deletes / additions)
for package in build_state.packages.values() {
// write empty file:
let mut f = File::create(std::path::Path::new(&package.get_bs_build_path()).join("build.ninja"))
.expect("Unable to write file");
f.write_all(b"").expect("unable to write to ninja file");
}
}

pub fn build(
filter: &Option<regex::Regex>,
path: &str,
no_timing: bool,
create_sourcedirs: bool,
bsc_path: Option<String>,
) -> Result<BuildState, BuildError> {
let default_timing: Option<std::time::Duration> = if no_timing {
Some(std::time::Duration::new(0.0 as u64, 0.0 as u32))
Expand All @@ -420,7 +443,7 @@ pub fn build(
};
let timing_total = Instant::now();
let mut build_state =
initialize_build(default_timing, filter, path).map_err(BuildError::InitializeBuild)?;
initialize_build(default_timing, filter, path, bsc_path).map_err(BuildError::InitializeBuild)?;

match incremental_build(&mut build_state, default_timing, true, false, create_sourcedirs) {
Ok(_) => {
Expand All @@ -432,10 +455,12 @@ pub fn build(
default_timing.unwrap_or(timing_total_elapsed).as_secs_f64()
);
clean::cleanup_after_build(&build_state);
write_build_ninja(&build_state);
Ok(build_state)
}
Err(e) => {
clean::cleanup_after_build(&build_state);
write_build_ninja(&build_state);
Err(BuildError::IncrementalBuild(e))
}
}
Expand Down
15 changes: 10 additions & 5 deletions rewatch/src/build/clean.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ fn remove_compile_asset(package: &packages::Package, source_file: &str, extensio

pub fn remove_compile_assets(package: &packages::Package, source_file: &str) {
// optimization
// only issue cmti if htere is an interfacce file
// only issue cmti if there is an interfacce file
for extension in &["cmj", "cmi", "cmt", "cmti"] {
remove_compile_asset(package, source_file, extension);
}
Expand Down Expand Up @@ -237,10 +237,11 @@ pub fn cleanup_previous_build(
.map(|module_name| {
// if the module is a namespace, we need to mark the whole namespace as dirty when a module has been deleted
if let Some(namespace) = helpers::get_namespace_from_module_name(module_name) {
return namespace;
return vec![namespace, module_name.to_string()];
}
module_name.to_string()
vec![module_name.to_string()]
})
.flatten()
.collect::<AHashSet<String>>();

build_state.deleted_modules = deleted_module_names;
Expand Down Expand Up @@ -318,12 +319,16 @@ pub fn cleanup_after_build(build_state: &BuildState) {
});
}

pub fn clean(path: &str) {
pub fn clean(path: &str, bsc_path: Option<String>) {
let project_root = helpers::get_abs_path(path);
let workspace_root = helpers::get_workspace_root(&project_root);
let packages = packages::make(&None, &project_root, &workspace_root);
let root_config_name = packages::get_package_name(&project_root);
let bsc_path = helpers::get_bsc(&project_root, workspace_root.to_owned());
let bsc_path = match bsc_path {
Some(bsc_path) => bsc_path,
None => helpers::get_bsc(&project_root, workspace_root.to_owned()),
};

let rescript_version = helpers::get_rescript_version(&bsc_path);

let timing_clean_compiler_assets = Instant::now();
Expand Down
8 changes: 4 additions & 4 deletions rewatch/src/build/compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@ pub fn compile(
// for sure clean modules -- after checking the hash of the cmi
let mut clean_modules = AHashSet::<String>::new();

// TODO: calculate the real dirty modules from the orginal dirty modules in each iteration
// TODO: calculate the real dirty modules from the original dirty modules in each iteration
// taken into account the modules that we know are clean, so they don't propagate through the
// deps graph
// create a hashset of all clean modules form the file-hashes
// create a hashset of all clean modules from the file-hashes
let mut loop_count = 0;
let mut files_total_count = compiled_modules.len();
let mut files_current_loop_count;
Expand Down Expand Up @@ -575,7 +575,7 @@ fn compile_file(
// because editor tooling doesn't support namespace entries yet
// we just remove the @ for now. This makes sure the editor support
// doesn't break
.join(module_name.to_owned().replace('@', "") + ".cmi"),
.join(module_name.to_owned() + ".cmi"),
);
let _ = std::fs::copy(
build_path_abs.to_string() + "/" + &module_name + ".cmj",
Expand All @@ -590,7 +590,7 @@ fn compile_file(
// because editor tooling doesn't support namespace entries yet
// we just remove the @ for now. This makes sure the editor support
// doesn't break
.join(module_name.to_owned().replace('@', "") + ".cmt"),
.join(module_name.to_owned() + ".cmt"),
);
} else {
let _ = std::fs::copy(
Expand Down
3 changes: 2 additions & 1 deletion rewatch/src/build/deps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ fn get_dep_modules(
_ => dep_first,
};
let namespaced_name = dep.to_owned() + "-" + namespace;
if package_modules.contains(&namespaced_name) {
if package_modules.contains(&namespaced_name) || valid_modules.contains(&namespaced_name)
{
namespaced_name
} else {
dep.to_string()
Expand Down
2 changes: 1 addition & 1 deletion rewatch/src/build/namespaces.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pub fn gen_mlmap(
let path = build_path_abs.to_string() + "/" + namespace + ".mlmap";
let mut file = File::create(&path).expect("Unable to create mlmap");

file.write_all(b"randjbuildsystem\n" as &[u8])
file.write_all(b"randjbuildsystem\n")
.expect("Unable to write mlmap");

let mut modules = Vec::from_iter(depending_modules.to_owned());
Expand Down
6 changes: 3 additions & 3 deletions rewatch/src/build/packages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ pub fn read_folders(
/// Given a projects' root folder and a `bsconfig::Source`, this recursively creates all the
/// sources in a flat list. In the process, it removes the children, as they are being resolved
/// because of the recursiveness. So you get a flat list of files back, retaining the type_ and
/// wether it needs to recurse into all structures
/// whether it needs to recurse into all structures
fn get_source_dirs(source: bsconfig::Source, sub_path: Option<PathBuf>) -> AHashSet<bsconfig::PackageSource> {
let mut source_folders: AHashSet<bsconfig::PackageSource> = AHashSet::new();

Expand Down Expand Up @@ -276,7 +276,7 @@ pub fn read_dependency(

/// # Make Package

/// Given a bsconfig, reqursively finds all dependencies.
/// Given a bsconfig, recursively finds all dependencies.
/// 1. It starts with registering dependencies and
/// prevents the operation for the ones which are already
/// registerd for the parent packages. Especially relevant for peerDependencies.
Expand Down Expand Up @@ -430,7 +430,7 @@ fn read_packages(project_root: &str, workspace_root: Option<String>) -> AHashMap
/// data from the config and pushes it forwards. Another thing is the 'type_', some files / folders
/// can be marked with the type 'dev'. Which means that they may not be around in the distributed
/// NPM package. The file reader allows for this, just warns when this happens.
/// TODO -> Check wether we actually need the `fs::Metadata`
/// TODO -> Check whether we actually need the `fs::Metadata`
pub fn get_source_files(
package_dir: &Path,
filter: &Option<regex::Regex>,
Expand Down
38 changes: 37 additions & 1 deletion rewatch/src/build/parse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,29 @@ pub fn generate_asts(
namespaces::compile_mlmap(package, module_name, &build_state.bsc_path);
let mlmap_hash_after = helpers::compute_file_hash(&compile_path);

let suffix = package
.namespace
.to_suffix()
.expect("namespace should be set for mlmap module");
// copy the mlmap to the bs build path for editor tooling
let base_build_path = package.get_build_path() + "/" + &suffix;
let base_bs_build_path = package.get_bs_build_path() + "/" + &suffix;
let _ = std::fs::copy(
base_build_path.to_string() + ".cmi",
base_bs_build_path.to_string() + ".cmi",
);
let _ = std::fs::copy(
base_build_path.to_string() + ".cmt",
base_bs_build_path.to_string() + ".cmt",
);
let _ = std::fs::copy(
base_build_path.to_string() + ".cmj",
base_bs_build_path.to_string() + ".cmj",
);
let _ = std::fs::copy(
base_build_path.to_string() + ".mlmap",
base_bs_build_path.to_string() + ".mlmap",
);
match (mlmap_hash, mlmap_hash_after) {
(Some(digest), Some(digest_after)) => !digest.eq(&digest_after),
_ => true,
Expand Down Expand Up @@ -299,7 +322,7 @@ fn generate_ast(
);

/* Create .ast */
if let Some(res_to_ast) = Some(
let result = if let Some(res_to_ast) = Some(
Command::new(bsc_path)
.current_dir(&build_path_abs)
.args(parser_args)
Expand All @@ -322,7 +345,20 @@ fn generate_ast(
"Could not find canonicalize_string_path for file {} in package {}",
filename, package.name
))
};
match &result {
Ok((ast_path, _)) => {
let dir = std::path::Path::new(filename).parent().unwrap();
let _ = std::fs::copy(
build_path_abs.to_string() + "/" + ast_path,
std::path::Path::new(&package.get_bs_build_path())
.join(dir)
.join(ast_path),
);
}
Err(_) => (),
}
result
}

fn path_to_ast_extension(path: &Path) -> &str {
Expand Down
15 changes: 13 additions & 2 deletions rewatch/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,18 @@ struct Args {
#[arg(short, long)]
create_sourcedirs: Option<bool>,

/// This prints the compiler arguments. It expects the path to a rescript.json file.
/// This also requires --bsc-path and --rescript-version to be present
#[arg(long)]
compiler_args: Option<String>,

/// To be used in conjunction with compiler_args
#[arg(long)]
rescript_version: Option<String>,

/// A custom path to bsc
#[arg(long)]
bsc_path: Option<String>,
}

fn main() {
Expand All @@ -65,7 +72,10 @@ fn main() {
match args.compiler_args {
None => (),
Some(path) => {
println!("{}", build::get_compiler_args(&path, args.rescript_version));
println!(
"{}",
build::get_compiler_args(&path, args.rescript_version, args.bsc_path)
);
std::process::exit(0);
}
}
Expand All @@ -76,13 +86,14 @@ fn main() {
std::process::exit(1)
}
lock::Lock::Aquired(_) => match command {
Command::Clean => build::clean::clean(&folder),
Command::Clean => build::clean::clean(&folder, args.bsc_path),
Command::Build => {
match build::build(
&filter,
&folder,
args.no_timing.unwrap_or(false),
args.create_sourcedirs.unwrap_or(false),
args.bsc_path,
) {
Err(e) => {
eprintln!("Error Building: {e}");
Expand Down
8 changes: 6 additions & 2 deletions rewatch/src/watcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ async fn async_watch(
after_build: Option<String>,
create_sourcedirs: bool,
) -> notify::Result<()> {
let mut build_state = build::initialize_build(None, filter, path).expect("Can't initialize build");
let mut build_state = build::initialize_build(None, filter, path, None).expect("Can't initialize build");
let mut needs_compile_type = CompileType::Incremental;
// create a mutex to capture if ctrl-c was pressed
let ctrlc_pressed = Arc::new(Mutex::new(false));
Expand Down Expand Up @@ -205,12 +205,16 @@ async fn async_watch(
}
CompileType::Full => {
let timing_total = Instant::now();
build_state = build::initialize_build(None, filter, path).expect("Can't initialize build");
build_state =
build::initialize_build(None, filter, path, None).expect("Can't initialize build");
let _ =
build::incremental_build(&mut build_state, None, initial_build, false, create_sourcedirs);
if let Some(a) = after_build.clone() {
cmd::run(a)
}

build::write_build_ninja(&build_state);

let timing_total_elapsed = timing_total.elapsed();
println!(
"\n{}{}Finished compilation in {:.2}s\n",
Expand Down
Loading