Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
914 changes: 894 additions & 20 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ emmylua_code_analysis = { path = "crates/emmylua_code_analysis", version = "0.20
emmylua_parser = { path = "crates/emmylua_parser", version = "0.23.0" }
emmylua_parser_desc = { path = "crates/emmylua_parser_desc", version = "0.23.0" }
emmylua_diagnostic_macro = { path = "crates/emmylua_diagnostic_macro", version = "0.5.0" }
schema_to_emmylua = { path = "crates/schema_to_emmylua", version = "0.1.0" }

# external
lsp-server = "0.7.9"
Expand Down Expand Up @@ -54,6 +55,7 @@ mimalloc = { version = "0.1.48", features = ["v3"] }
googletest = "0.14.2"
unicode-general-category = "1.0.0"
luars = { version = "0.1.0", features = ["serde"] }
reqwest = "0.13.1"

# Lint configuration for the entire workspace
[workspace.lints.clippy]
Expand Down
2 changes: 2 additions & 0 deletions crates/emmylua_code_analysis/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ include_dir.workspace = true
emmylua_codestyle.workspace = true
itertools.workspace = true
luars.workspace = true
reqwest.workspace = true
schema_to_emmylua.workspace = true

[package.metadata.i18n]
available-locales = ["en", "zh_CN", "zh_HK"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -469,8 +469,8 @@ pub fn analyze_doc_tag_schema(analyzer: &mut DocAnalyzer, tag: LuaDocTagSchema)

let schema_index = analyzer.db.get_json_schema_index_mut();
if let Some(schema_file) = schema_index.get_schema_file(&url) {
if let JsonSchemaFile::Resolved(file_id) = schema_file {
let types = vec![LuaType::ModuleRef(*file_id)];
if let JsonSchemaFile::Resolved(type_id) = schema_file {
let types = vec![LuaType::Ref(type_id.clone())];
bind_type_to_owner(analyzer, &tag, &types, None);
}
} else {
Expand Down
8 changes: 8 additions & 0 deletions crates/emmylua_code_analysis/src/db_index/schema/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,14 @@ impl JsonSchemaIndex {
})
.collect()
}

pub fn reset_rest_schemas(&mut self) {
for schema_file in self.schema_files.values_mut() {
if let JsonSchemaFile::NeedResolve = schema_file {
*schema_file = JsonSchemaFile::BadUrl;
}
}
}
}

impl LuaIndex for JsonSchemaIndex {
Expand Down
35 changes: 33 additions & 2 deletions crates/emmylua_code_analysis/src/db_index/schema/schema_file.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,38 @@
use crate::FileId;
use url::Url;

use crate::LuaTypeDeclId;

#[derive(Debug, Clone)]
pub enum JsonSchemaFile {
NeedResolve,
Resolved(FileId),
BadUrl,
Resolved(LuaTypeDeclId),
}

pub fn get_schema_short_name(url: &Url) -> String {
const MAX_LEN: usize = 64;

let url_str = url.as_str();
let mut new_name = String::new();
for c in url_str.chars().rev() {
if new_name.len() >= MAX_LEN {
break;
}

if c.is_alphanumeric() || c == '-' || c == '_' || c == '.' {
new_name.push(c);
} else if !c.is_control() && c != ' ' {
new_name.push('_');
}
}

let mut result: String = new_name.chars().rev().collect();

result = result.trim_matches(|c| c == '_' || c == '.').to_string();

if result.is_empty() {
return "schema".to_string();
}

result
}
82 changes: 82 additions & 0 deletions crates/emmylua_code_analysis/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ pub use profile::Profile;
pub use resources::get_best_resources_dir;
pub use resources::load_resource_from_include_dir;
use resources::load_resource_std;
use schema_to_emmylua::SchemaConverter;
pub use semantic::*;
use std::collections::HashMap;
use std::{collections::HashSet, path::PathBuf, sync::Arc};
pub use test_lib::VirtualWorkspace;
use tokio_util::sync::CancellationToken;
Expand Down Expand Up @@ -260,6 +262,86 @@ impl EmmyLuaAnalysis {
self.remove_file_by_uri(&uri);
}
}

pub fn check_schema_update(&self) -> bool {
self.compilation
.get_db()
.get_json_schema_index()
.has_need_resolve_schemas()
}

pub async fn update_schema(&mut self) {
let urls = self
.compilation
.get_db()
.get_json_schema_index()
.get_need_resolve_schemas();
let mut url_contents = HashMap::new();
for url in urls {
if url.scheme() == "file" {
if let Ok(path) = url.to_file_path() {
if path.exists() {
let result = read_file_with_encoding(&path, "utf-8");
if let Some(content) = result {
url_contents.insert(url.clone(), content);
} else {
log::error!("Failed to read schema file: {:?}", url);
}
}
}
} else {
let result = reqwest::get(url.as_str()).await;
if let Ok(response) = result {
if let Ok(content) = response.text().await {
url_contents.insert(url.clone(), content);
} else {
log::error!("Failed to read schema content from URL: {:?}", url);
}
} else {
log::error!("Failed to fetch schema from URL: {:?}", url);
}
Comment on lines +294 to +302

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

The current implementation doesn't check the HTTP status code of the response from reqwest. This means it might try to parse an error page (e.g., for a 404 Not Found) as a JSON schema, which will fail later during conversion. It's important to check if the request was successful (i.e., has a 2xx status code) before attempting to process the response body.

                if let Ok(response) = result {
                    if response.status().is_success() {
                        if let Ok(content) = response.text().await {
                            url_contents.insert(url.clone(), content);
                        } else {
                            log::error!("Failed to read schema content from URL: {:?}", url);
                        }
                    } else {
                        log::error!(
                            "Failed to fetch schema from URL: {:?}, status: {}",
                            url,
                            response.status()
                        );
                    }
                } else {
                    log::error!("Failed to fetch schema from URL: {:?}", url);
                }

}
}

if url_contents.is_empty() {
return;
}

let work_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let converter = SchemaConverter::new(true);
for (url, json_content) in url_contents {
let short_name = get_schema_short_name(&url);
match converter.convert_from_str(&json_content) {
Ok(convert_result) => {
let path = work_dir.join(short_name);
let Some(file_id) =
self.update_file_by_path(&path, Some(convert_result.annotation_text))
else {
continue;
};
Comment on lines +310 to +321

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

Using std::env::current_dir() to determine the path for generated schema files is unreliable and can lead to issues. The current working directory might not be writable, or it could be the user's project root, which would result in creating unwanted files in their workspace. A more robust approach is to use a custom URI scheme for these virtual files, ensuring they are handled in-memory without affecting the file system. This also makes file identity unique and avoids potential path collisions.

        let converter = SchemaConverter::new(true);
        for (url, json_content) in url_contents {
            match converter.convert_from_str(&json_content) {
                Ok(convert_result) => {
                    let Ok(uri) = Uri::parse(&format!("emmylua-schema:{}", url.as_str())) else {
                        log::error!("Failed to create schema URI for {}", url);
                        continue;
                    };
                    let Some(file_id) =
                        self.update_file_by_uri(&uri, Some(convert_result.annotation_text))
                    else {
                        continue;
                    };

if let Some(f) = self
.compilation
.get_db_mut()
.get_json_schema_index_mut()
.get_schema_file_mut(&url)
{
*f = JsonSchemaFile::Resolved(LuaTypeDeclId::local(
file_id,
&convert_result.root_type_name,
));
}
}
Err(e) => {
log::error!("Failed to convert schema from URL {:?}: {}", url, e);
}
}
}

self.compilation
.get_db_mut()
.get_json_schema_index_mut()
.reset_rest_schemas();
}
}

impl Default for EmmyLuaAnalysis {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -568,6 +568,10 @@ fn infer_union_member(
}
}

if member_types.iter().all(|t| t.is_nil()) {
return Err(InferFailReason::FieldNotFound);
}

Ok(LuaType::from_vec(member_types))
}

Expand Down
23 changes: 12 additions & 11 deletions crates/emmylua_code_analysis/src/semantic/infer/infer_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,6 @@ pub fn infer_table_field_value_should_be(
.get_parent::<LuaTableExpr>()
.ok_or(InferFailReason::None)?;
let parent_table_expr_type = infer_table_should_be(db, cache, parnet_table_expr)?;

let index = LuaIndexMemberExpr::TableField(table_field.clone());
let reason = match infer_member_by_member_key(
db,
Expand Down Expand Up @@ -294,18 +293,20 @@ fn infer_table_field_type_by_parent(
) -> InferResult {
let member_id = LuaMemberId::new(field.get_syntax_id(), cache.get_file_id());
if let Some(type_cache) = db.get_type_index().get_type_cache(&member_id.into()) {
let typ = type_cache.as_type();
match typ {
LuaType::TableConst(_) => {}
LuaType::Tuple(tuple) => {
let types = tuple.get_types();
// 这种情况下缓存的类型可能是不精确的
if tuple.is_infer_resolve() && types.len() == 1 && types[0].is_unknown() {
} else {
return Ok(typ.clone());
if type_cache.is_doc() {
let typ = type_cache.as_type();
match typ {
LuaType::TableConst(_) => {}
LuaType::Tuple(tuple) => {
let types = tuple.get_types();
// 这种情况下缓存的类型可能是不精确的
if tuple.is_infer_resolve() && types.len() == 1 && types[0].is_unknown() {
} else {
return Ok(typ.clone());
}
}
typ => return Ok(typ.clone()),
}
typ => return Ok(typ.clone()),
}
} else if field.is_value_field() {
return infer_table_field_value_should_be(db, cache, field);
Expand Down
9 changes: 9 additions & 0 deletions crates/emmylua_ls/src/context/workspace_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -261,6 +261,15 @@ impl WorkspaceManager {

is_workspace_file
}

pub async fn check_schema_update(&self) {
let read_analysis = self.analysis.read().await;
if read_analysis.check_schema_update() {
drop(read_analysis);
let mut write_analysis = self.analysis.write().await;
write_analysis.update_schema().await;
}
Comment on lines +266 to +271

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The current pattern of dropping a read lock to acquire a write lock can introduce a race condition. Another thread could perform the schema update in the small window between the read lock being released and the write lock being acquired, leading to redundant work. To prevent this, it's a good practice to re-check the condition after acquiring the write lock.

        if self.analysis.read().await.check_schema_update() {
            let mut write_analysis = self.analysis.write().await;
            // Re-check after acquiring write lock to avoid race condition.
            if write_analysis.check_schema_update() {
                write_analysis.update_schema().await;
            }
        }

}
}

pub fn load_emmy_config(config_root: Option<PathBuf>, client_config: ClientConfig) -> Arc<Emmyrc> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ pub fn add_completion(builder: &mut CompletionBuilder) -> Option<()> {
for typ in &types {
dispatch_type(builder, typ.clone(), &InferGuard::new());
}

if !types.is_empty() && !builder.is_invoked() {
builder.stop_here();
}
Expand Down Expand Up @@ -124,6 +125,10 @@ fn get_token_should_type(builder: &mut CompletionBuilder) -> Option<Vec<LuaType>
}
}
LuaAst::LuaTableField(table_field) => {
if table_field.is_value_field() {
return None;
}

let typ = infer_table_field_value_should_be(
builder.semantic_model.get_db(),
&mut builder.semantic_model.get_cache().borrow_mut(),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
use std::collections::HashSet;

use emmylua_code_analysis::{InferGuard, LuaMemberInfo, LuaMemberKey, LuaType, get_real_type};
use emmylua_code_analysis::{
InferGuard, LuaMemberInfo, LuaMemberKey, LuaType, get_real_type,
infer_table_field_value_should_be,
};
use emmylua_parser::{LuaAst, LuaAstNode, LuaKind, LuaTableExpr, LuaTableField, LuaTokenKind};
use lsp_types::{CompletionItem, InsertTextFormat, InsertTextMode};
use rowan::NodeOrToken;
Expand Down Expand Up @@ -210,29 +213,45 @@ fn add_table_field_value_completion(builder: &mut CompletionBuilder) -> Option<(
return None;
}
// 仅在 value 为空的时候触发
let parent = builder.trigger_token.prev_token()?.parent()?;
let node = LuaAst::cast(parent)?;
match node {
LuaAst::LuaTableField(field) => {
let table_expr = field.get_parent::<LuaTableExpr>()?;
let table_type = builder
.semantic_model
.infer_table_should_be(table_expr.clone())?;
let key = builder
.semantic_model
.get_member_key(&field.get_field_key()?)?;
let member_infos = builder.semantic_model.get_member_infos(&table_type)?;
let member_info = member_infos.iter().find(|m| m.key == key)?;
let mut parent = if builder.trigger_token.kind() == LuaTokenKind::TkWhitespace.into() {
builder.trigger_token.prev_token()?.parent()?
} else {
builder.trigger_token.parent()?
};
for _ in 0..3 {
match LuaAst::cast(parent.clone())? {
LuaAst::LuaTableField(field) => {
if field.is_assign_field() {
let table_expr = field.get_parent::<LuaTableExpr>()?;
let table_type = builder
.semantic_model
.infer_table_should_be(table_expr.clone())?;
let key = builder
.semantic_model
.get_member_key(&field.get_field_key()?)?;
let member_infos = builder.semantic_model.get_member_infos(&table_type)?;
let member_info = member_infos.iter().find(|m| m.key == key)?;

if add_field_value_completion(builder, member_info.clone()).is_some() {
// 如果添加了补全项, 则停止
builder.stop_here();
if add_field_value_completion(builder, member_info.clone()).is_some() {
// 如果添加了补全项, 则停止
builder.stop_here();
}
} else {
let table_field_should = infer_table_field_value_should_be(
builder.semantic_model.get_db(),
&mut builder.semantic_model.get_cache().borrow_mut(),
field,
)
.ok()?;
dispatch_type(builder, table_field_should, &InferGuard::new())?;
}
return Some(());
}

Some(())
_ => parent = parent.parent()?,
}
_ => None,
}

Some(())
}

fn add_field_value_completion(
Expand Down
4 changes: 4 additions & 0 deletions crates/emmylua_ls/src/handlers/initialized/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,10 @@ pub async fn init_analysis(
Some("Indexing complete".to_string()),
);

if mut_analysis.check_schema_update() {
mut_analysis.update_schema().await;
}

drop(mut_analysis);

if !lsp_features.supports_workspace_diagnostic() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ pub async fn on_did_save_text_document(
.await;
let workspace_manager = context.workspace_manager().write().await;
workspace_manager.update_workspace_version(WorkspaceDiagnosticLevel::Slow, true);
workspace_manager.check_schema_update().await;
}

return Some(());
Expand All @@ -88,6 +89,7 @@ pub async fn on_did_save_text_document(
workspace
.reindex_workspace(Duration::from_millis(duration))
.await;
workspace.check_schema_update().await;
Some(())
}

Expand Down
4 changes: 2 additions & 2 deletions crates/schema_to_emmylua/src/converter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,10 @@ impl SchemaConverter {
emitter.blank_line();
}

let mut root_type_name = None;
let mut root_type_name = "schema.root".to_string();
// Emit the root schema as a class
if let Some(title) = walker.root_title() {
root_type_name = Some(format!("{}{}", self.type_prefix, title));
root_type_name = format!("{}{}", self.type_prefix, title);
let root = walker.root_schema();
if root.get("properties").is_some() {
let prefixed = format!("{}{}", self.type_prefix, title);
Expand Down
2 changes: 1 addition & 1 deletion crates/schema_to_emmylua/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@ pub use converter::SchemaConverter;

pub struct ConvertResult {
pub annotation_text: String,
pub root_type_name: Option<String>,
pub root_type_name: String,
}