Skip to content

Commit

Permalink
feat: lsp complete for unimport schemas. (#1757)
Browse files Browse the repository at this point in the history
* feat: lsp complete for unimport schemas. Apply the new api load_all_files_under_paths to lsp, complete the unimport schemas, and complete the corresponding import schema_pkg

Signed-off-by: he1pa <[email protected]>

* remove debug code

Signed-off-by: he1pa <[email protected]>

* Merge duplicate completion items

Signed-off-by: he1pa <[email protected]>

* only complete in root scope

Signed-off-by: he1pa <[email protected]>

* move some test data file

Signed-off-by: he1pa <[email protected]>

* filter schema in resolver, instead of advanced resolver. Do not analyze unimported pkg in AdvancedResolver to avoid extra performance loss

Signed-off-by: he1pa <[email protected]>

* add ut

Signed-off-by: he1pa <[email protected]>

* fix windows ut

Signed-off-by: he1pa <[email protected]>

---------

Signed-off-by: he1pa <[email protected]>
  • Loading branch information
He1pa authored Nov 28, 2024
1 parent 0d34de4 commit 6bbb967
Show file tree
Hide file tree
Showing 46 changed files with 727 additions and 415 deletions.
25 changes: 21 additions & 4 deletions kclvm/ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,9 @@ impl Into<SerializeProgram> for Program {
pub struct Program {
pub root: String,
pub pkgs: HashMap<String, Vec<String>>,
pub pkgs_not_imported: HashMap<String, Vec<String>>,
pub modules: HashMap<String, Arc<RwLock<Module>>>,
pub modules_not_imported: HashMap<String, Arc<RwLock<Module>>>,
}

impl Program {
Expand Down Expand Up @@ -453,7 +455,11 @@ impl Program {
&self,
module_path: &str,
) -> anyhow::Result<Option<RwLockReadGuard<'_, Module>>> {
match self.modules.get(module_path) {
match self
.modules
.get(module_path)
.or(self.modules_not_imported.get(module_path))
{
Some(module_ref) => match module_ref.read() {
Ok(m) => Ok(Some(m)),
Err(_) => Err(anyhow::anyhow!("Failed to acquire module lock")),
Expand All @@ -466,7 +472,11 @@ impl Program {
&self,
module_path: &str,
) -> anyhow::Result<Option<RwLockWriteGuard<'_, Module>>> {
match self.modules.get(module_path) {
match self
.modules
.get(module_path)
.or(self.modules_not_imported.get(module_path))
{
Some(module_ref) => match module_ref.write() {
Ok(m) => Ok(Some(m)),
Err(_) => Err(anyhow::anyhow!("Failed to acquire module lock")),
Expand All @@ -476,12 +486,19 @@ impl Program {
}

pub fn get_module_ref(&self, module_path: &str) -> Option<Arc<RwLock<Module>>> {
self.modules.get(module_path).cloned()
self.modules
.get(module_path)
.cloned()
.or(self.modules_not_imported.get(module_path).cloned())
}

pub fn get_modules_for_pkg(&self, pkg_name: &str) -> Vec<Arc<RwLock<Module>>> {
let mut result = Vec::new();
if let Some(module_names) = self.pkgs.get(pkg_name) {
if let Some(module_names) = self
.pkgs
.get(pkg_name)
.or(self.pkgs_not_imported.get(pkg_name))
{
for module_name in module_names {
if let Some(module) = self.get_module_ref(module_name) {
result.push(module);
Expand Down
6 changes: 2 additions & 4 deletions kclvm/driver/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,14 +146,12 @@ pub fn lookup_compile_workspaces(
}
}
WorkSpaceKind::Folder(folder) => {
let mut load_opt = kclvm_parser::LoadProgramOptions::default();
let metadata =
fill_pkg_maps_for_k_file(tool, path.into(), &mut load_opt).unwrap_or(None);
let load_opt = kclvm_parser::LoadProgramOptions::default();
let metadata = None;

if load_pkg {
if folder.is_dir() {
if let Ok(files) = get_kcl_files(folder.clone(), false) {
// return (files, Some(load_opt), metadata);
workspaces.insert(workspace, (files, Some(load_opt), metadata));
return (workspaces, None);
}
Expand Down
141 changes: 77 additions & 64 deletions kclvm/parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -926,7 +926,10 @@ pub fn parse_program(
opts: &LoadProgramOptions,
) -> Result<LoadProgramResult> {
let compile_entries = get_compile_entries_from_paths(&paths, &opts)?;
let workdir = compile_entries.get_root_path().to_string();
let workdir = compile_entries
.get_root_path()
.to_string()
.adjust_canonicalization();
let mut pkgs: HashMap<String, Vec<String>> = HashMap::new();
let mut new_files = HashSet::new();
for entry in compile_entries.iter() {
Expand Down Expand Up @@ -1004,11 +1007,12 @@ pub fn parse_program(
}
}
}

let program = ast::Program {
root: workdir,
pkgs,
pkgs_not_imported: HashMap::new(),
modules,
modules_not_imported: HashMap::new(),
};

Ok(LoadProgramResult {
Expand Down Expand Up @@ -1038,9 +1042,13 @@ pub fn load_all_files_under_paths(
&loader.opts,
) {
Ok(res) => {
let diag = sess.1.read().diagnostics.clone();
let mut res = res.clone();
let k_files_from_import = res.paths.clone();
let (k_files_under_path, pkgmap) = get_files_from_path(paths, opts)?;
let mut paths = paths.to_vec();
paths.push(&res.program.root);
let (k_files_under_path, pkgmap) =
get_files_from_path(&res.program.root, &paths, opts)?;
loader.pkgmap.extend(pkgmap);

// Filter unparsed file
Expand All @@ -1054,33 +1062,69 @@ pub fn load_all_files_under_paths(
}
}

let module_cache = module_cache.unwrap_or_default();
let pkgs = &mut res.program.pkgs;
let module_cache = loader.module_cache.clone();
let pkgs_not_imported = &mut res.program.pkgs_not_imported;

let mut new_files = HashSet::new();

// Bfs unparsed and import files
loader.parsed_file.extend(unparsed_file.clone());
while let Some(file) = unparsed_file.pop_front() {
new_files.insert(file.clone());
let deps = parse_file(
sess.clone(),
file,
None,
module_cache.clone(),
pkgs,
&mut loader.pkgmap,
loader.file_graph.clone(),
&loader.opts,
)?;
for dep in deps {
if loader.parsed_file.insert(dep.clone()) {
unparsed_file.push_back(dep.clone());
}
let module_cache_read = module_cache.read();
match &module_cache_read {
Ok(m_cache) => match m_cache.ast_cache.get(file.get_path()) {
Some(_) => continue,
None => {
drop(module_cache_read);
let deps = parse_file(
sess.clone(),
file.clone(),
None,
module_cache.clone(),
pkgs_not_imported,
&mut loader.pkgmap,
loader.file_graph.clone(),
&loader.opts,
)?;

let m_ref = match module_cache.read() {
Ok(module_cache) => module_cache
.ast_cache
.get(file.get_path())
.expect(&format!(
"Module not found in module: {:?}",
file.get_path()
))
.clone(),
Err(e) => {
return Err(anyhow::anyhow!("Parse program failed: {e}"))
}
};

let pkg = loader.pkgmap.get(&file).expect("file not in pkgmap");
let mut m = m_ref.write().unwrap();
fix_rel_import_path_with_file(
&pkg.pkg_root,
&mut m,
&file,
&loader.pkgmap,
&loader.opts,
sess.clone(),
);

for dep in deps {
if loader.parsed_file.insert(dep.clone()) {
unparsed_file.push_back(dep.clone());
}
}
}
},
Err(e) => return Err(anyhow::anyhow!("Parse entry failed: {e}")),
}
}

// Merge unparsed module into res
let modules = &mut res.program.modules;
let modules_not_imported = &mut res.program.modules_not_imported;
for file in &new_files {
let filename = file.get_path().to_str().unwrap().to_string();
let m_ref = match module_cache.read() {
Expand All @@ -1094,52 +1138,17 @@ pub fn load_all_files_under_paths(
.clone(),
Err(e) => return Err(anyhow::anyhow!("Parse program failed: {e}")),
};
modules.insert(filename.clone(), m_ref);
match pkgs.get_mut(&file.pkg_path) {
modules_not_imported.insert(filename.clone(), m_ref);
match pkgs_not_imported.get_mut(&file.pkg_path) {
Some(pkg_modules) => {
pkg_modules.push(filename.clone());
}
None => {
pkgs.insert(file.pkg_path.clone(), vec![filename]);
pkgs_not_imported.insert(file.pkg_path.clone(), vec![filename]);
}
}
}

// Generate new paths
let files = match loader.file_graph.read() {
Ok(file_graph) => {
let files = match file_graph.toposort() {
Ok(files) => files,
Err(_) => file_graph.paths(),
};

let file_path_graph = file_graph.file_path_graph().0;
if let Err(cycle) = toposort(&file_path_graph) {
let formatted_cycle = cycle
.iter()
.map(|file| format!("- {}\n", file.to_string_lossy()))
.collect::<String>();

sess.1.write().add_error(
ErrorKind::RecursiveLoad,
&[Message {
range: (Position::dummy_pos(), Position::dummy_pos()),
style: Style::Line,
message: format!(
"Could not compiles due to cyclic import statements\n{}",
formatted_cycle.trim_end()
),
note: None,
suggested_replacement: None,
}],
);
}
files
}
Err(e) => return Err(anyhow::anyhow!("Parse program failed: {e}")),
};

res.paths = files.iter().map(|file| file.get_path().clone()).collect();
sess.1.write().diagnostics = diag;
return Ok(res);
}
e => return e,
Expand All @@ -1149,6 +1158,7 @@ pub fn load_all_files_under_paths(

/// Get all kcl files under path and dependencies from opts, regardless of whether they are imported or not
pub fn get_files_from_path(
root: &str,
paths: &[&str],
opts: Option<LoadProgramOptions>,
) -> Result<(HashMap<String, Vec<PathBuf>>, HashMap<PkgFile, Pkg>)> {
Expand All @@ -1175,11 +1185,15 @@ pub fn get_files_from_path(
.to_str()
.unwrap()
.to_string();
let fix_path = fix_path
let mut fix_path = fix_path
.replace(['/', '\\'], ".")
.trim_end_matches('.')
.to_string();

if fix_path.is_empty() {
fix_path = MAIN_PKG.to_string();
}

let pkgfile = PkgFile::new(p.clone(), fix_path.clone());
pkgmap.insert(
pkgfile,
Expand All @@ -1203,12 +1217,11 @@ pub fn get_files_from_path(
if path_buf.is_dir() {
let all_k_files_under_path = get_kcl_files(path, true)?;
for f in &all_k_files_under_path {
let p = PathBuf::from(f);

let p = PathBuf::from(f.adjust_canonicalization());
let fix_path = p
.parent()
.unwrap()
.strip_prefix(path_buf.clone())
.strip_prefix(root)
.unwrap()
.to_str()
.unwrap()
Expand Down
6 changes: 4 additions & 2 deletions kclvm/parser/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -865,6 +865,8 @@ fn parse_all_file_under_path() {

let res = load_all_files_under_paths(sess.clone(), &[main], Some(opt), None).unwrap();

assert_eq!(res.program.pkgs.keys().len(), 4);
assert_eq!(res.paths.len(), 4);
assert_eq!(res.program.pkgs.keys().len(), 1);
assert_eq!(res.program.pkgs_not_imported.keys().len(), 3);

assert_eq!(res.paths.len(), 1);
}
Loading

0 comments on commit 6bbb967

Please sign in to comment.