Skip to content

Commit

Permalink
Support validating standalone operations without a known schema (#631)
Browse files Browse the repository at this point in the history
* fix(compiler): validate name collisions in executable documents

The implementation for name collisions validation was only running on
the type system, not on executable documents. Now the type system
validation only checks type system definitions, while the executable
validation only checks executable definitions.

* feat(compiler): sort diagnostics by location

This is good for display for end users, and makes our output deterministic even if we
move around the order of validations internally.

* feat(compiler): add validation function for standalone operations without schema

* remove stray comment

* Add failing test for undefined fragment

* Propagate schema-have-ness down operation validation functions
  • Loading branch information
goto-bus-stop authored Aug 31, 2023
1 parent 3626dca commit 777bba6
Show file tree
Hide file tree
Showing 26 changed files with 996 additions and 753 deletions.
20 changes: 20 additions & 0 deletions crates/apollo-compiler/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,26 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
## Maintenance
## Documentation -->
# [x.x.x] (unreleased) - 2021-mm-dd

## Features
- Add `validate_standalone_executable` function to validate an executable document without access to a schema, by [goto-bus-stop] in [pull/631], [issue/629]

This runs just those validations that can be done on operations without knowing the types of things.
```rust
let compiler = ApolloCompiler::new();
let file_id = compiler.add_executable(r#"
{
user { ...userData }
}
"#, "query.graphql");
let diagnostics = compiler.db.validate_standalone_executable(file_id);
// Complains about `userData` fragment not existing, but does not complain about `user` being an unknown query.
```

[goto-bus-stop]: https://github.com/goto-bus-stop
[pull/631]: https://github.com/apollographql/apollo-rs/pull/631
[issue/629]: https://github.com/apollographql/apollo-rs/issues/629

# [0.11.1](https://crates.io/crates/apollo-compiler/0.11.1) - 2023-08-24

Expand Down
3 changes: 2 additions & 1 deletion crates/apollo-compiler/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,9 @@ fn compiler_tests() {
let file_id = compiler.add_document(text, path.file_name().unwrap());

let errors = compiler.validate();
let ast = compiler.db.ast(file_id);
assert_diagnostics_are_absent(&errors, path);

let ast = compiler.db.ast(file_id);
format!("{ast:?}")
});

Expand Down
92 changes: 48 additions & 44 deletions crates/apollo-compiler/src/validation/field.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,17 @@ use crate::{
validation::ValidationDatabase,
};

use super::operation::OperationValidationConfig;

pub fn validate_field(
db: &dyn ValidationDatabase,
field: Arc<hir::Field>,
var_defs: Arc<Vec<hir::VariableDefinition>>,
context: OperationValidationConfig,
) -> Vec<ApolloDiagnostic> {
let mut diagnostics = db.validate_directives(
field.directives().to_vec(),
hir::DirectiveLocation::Field,
var_defs.clone(),
context.variables.clone(),
);

if let Some(field_definition) = field.field_definition(db.upcast()) {
Expand All @@ -29,13 +31,17 @@ pub fn validate_field(
.cloned();
if let Some(input_val) = input_val {
if let Some(diag) = db
.validate_variable_usage(input_val.clone(), var_defs.clone(), arg.clone())
.validate_variable_usage(
input_val.clone(),
context.variables.clone(),
arg.clone(),
)
.err()
{
diagnostics.push(diag)
} else {
let value_of_correct_type =
db.validate_values(input_val.ty(), arg, var_defs.clone());
db.validate_values(input_val.ty(), arg, context.variables.clone());

if let Err(diag) = value_of_correct_type {
diagnostics.extend(diag);
Expand Down Expand Up @@ -94,47 +100,45 @@ pub fn validate_field(
}
}

let field_type = field.ty(db.upcast());
if let Some(field_type) = field_type {
match db.validate_leaf_field_selection(field.clone(), field_type) {
Err(diag) => diagnostics.push(diag),
Ok(_) => diagnostics
.extend(db.validate_selection_set(field.selection_set().clone(), var_defs)),
}
} else {
let help = format!(
"`{}` is not defined on `{}` type",
field.name(),
field.parent_obj.as_ref().unwrap(),
);
let fname = field.name();
let diagnostic = ApolloDiagnostic::new(
db,
field.loc().into(),
DiagnosticData::UndefinedField {
field: fname.into(),
ty: field.parent_obj.clone().unwrap(),
},
)
.label(Label::new(
field.loc(),
format!("`{fname}` field is not defined"),
))
.help(help);
if context.has_schema {
let field_type = field.ty(db.upcast());
if let Some(field_type) = field_type {
match db.validate_leaf_field_selection(field.clone(), field_type) {
Err(diag) => diagnostics.push(diag),
Ok(_) => diagnostics
.extend(db.validate_selection_set(field.selection_set().clone(), context)),
}
} else if let Some(parent_obj) = field.parent_obj.as_ref() {
let help = format!("`{}` is not defined on `{}` type", field.name(), parent_obj,);
let fname = field.name();
let diagnostic = ApolloDiagnostic::new(
db,
field.loc().into(),
DiagnosticData::UndefinedField {
field: fname.into(),
ty: field.parent_obj.clone().unwrap(),
},
)
.label(Label::new(
field.loc(),
format!("`{fname}` field is not defined"),
))
.help(help);

let parent_type_loc = db
.find_type_definition_by_name(field.parent_obj.clone().unwrap())
.map(|type_def| type_def.loc());
let parent_type_loc = db
.find_type_definition_by_name(field.parent_obj.clone().unwrap())
.map(|type_def| type_def.loc());

let diagnostic = if let Some(parent_type_loc) = parent_type_loc {
diagnostic.label(Label::new(
parent_type_loc,
format!("`{}` declared here", field.parent_obj.as_ref().unwrap()),
))
} else {
diagnostic
};
diagnostics.push(diagnostic);
let diagnostic = if let Some(parent_type_loc) = parent_type_loc {
diagnostic.label(Label::new(
parent_type_loc,
format!("`{}` declared here", field.parent_obj.as_ref().unwrap()),
))
} else {
diagnostic
};
diagnostics.push(diagnostic);
}
}

diagnostics
Expand All @@ -148,7 +152,7 @@ pub fn validate_field_definition(
field.directives().to_vec(),
hir::DirectiveLocation::FieldDefinition,
// field definitions don't have variables
Arc::new(Vec::new()),
Default::default(),
);

diagnostics.extend(db.validate_arguments_definition(
Expand Down
48 changes: 30 additions & 18 deletions crates/apollo-compiler/src/validation/fragment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ use crate::{
};
use std::{collections::HashSet, sync::Arc};

use super::operation::OperationValidationConfig;

/// Given a type definition, find all the types that can be used for fragment spreading.
///
/// Spec: https://spec.graphql.org/October2021/#GetPossibleTypes()
Expand Down Expand Up @@ -152,28 +154,33 @@ pub fn validate_fragment_selection(
pub fn validate_inline_fragment(
db: &dyn ValidationDatabase,
inline: Arc<hir::InlineFragment>,
var_defs: Arc<Vec<hir::VariableDefinition>>,
context: OperationValidationConfig,
) -> Vec<ApolloDiagnostic> {
let mut diagnostics = Vec::new();

diagnostics.extend(db.validate_directives(
inline.directives().to_vec(),
hir::DirectiveLocation::InlineFragment,
var_defs.clone(),
context.variables.clone(),
));

let type_cond_diagnostics = if let Some(t) = inline.type_condition() {
db.validate_fragment_type_condition(t.to_string(), inline.loc())
let has_type_error = if context.has_schema {
let type_cond_diagnostics = if let Some(t) = inline.type_condition() {
db.validate_fragment_type_condition(t.to_string(), inline.loc())
} else {
Default::default()
};
let has_type_error = !type_cond_diagnostics.is_empty();
diagnostics.extend(type_cond_diagnostics);
has_type_error
} else {
Default::default()
false
};
let has_type_error = !type_cond_diagnostics.is_empty();
diagnostics.extend(type_cond_diagnostics);

// If there was an error with the type condition, it makes no sense to validate the selection,
// as every field would be an error.
if !has_type_error {
diagnostics.extend(db.validate_selection_set(inline.selection_set.clone(), var_defs));
diagnostics.extend(db.validate_selection_set(inline.selection_set.clone(), context));
diagnostics
.extend(db.validate_fragment_selection(hir::FragmentSelection::InlineFragment(inline)));
}
Expand All @@ -184,22 +191,22 @@ pub fn validate_inline_fragment(
pub fn validate_fragment_spread(
db: &dyn ValidationDatabase,
spread: Arc<hir::FragmentSpread>,
var_defs: Arc<Vec<hir::VariableDefinition>>,
context: OperationValidationConfig,
) -> Vec<ApolloDiagnostic> {
let mut diagnostics = Vec::new();

diagnostics.extend(db.validate_directives(
spread.directives().to_vec(),
hir::DirectiveLocation::FragmentSpread,
var_defs.clone(),
context.variables.clone(),
));

match spread.fragment(db.upcast()) {
Some(def) => {
diagnostics.extend(
db.validate_fragment_selection(hir::FragmentSelection::FragmentSpread(spread)),
);
diagnostics.extend(db.validate_fragment_definition(def, var_defs));
diagnostics.extend(db.validate_fragment_definition(def, context));
}
None => {
diagnostics.push(
Expand All @@ -224,26 +231,31 @@ pub fn validate_fragment_spread(
pub fn validate_fragment_definition(
db: &dyn ValidationDatabase,
def: Arc<hir::FragmentDefinition>,
var_defs: Arc<Vec<hir::VariableDefinition>>,
context: OperationValidationConfig,
) -> Vec<ApolloDiagnostic> {
let mut diagnostics = Vec::new();
diagnostics.extend(db.validate_directives(
def.directives().to_vec(),
hir::DirectiveLocation::FragmentDefinition,
var_defs.clone(),
context.variables.clone(),
));

let type_cond_diagnostics =
db.validate_fragment_type_condition(def.type_condition().to_string(), def.loc());
let has_type_error = !type_cond_diagnostics.is_empty();
diagnostics.extend(type_cond_diagnostics);
let has_type_error = if context.has_schema {
let type_cond_diagnostics =
db.validate_fragment_type_condition(def.type_condition().to_string(), def.loc());
let has_type_error = !type_cond_diagnostics.is_empty();
diagnostics.extend(type_cond_diagnostics);
has_type_error
} else {
false
};

let fragment_cycles_diagnostics = db.validate_fragment_cycles(def.clone());
let has_cycles = !fragment_cycles_diagnostics.is_empty();
diagnostics.extend(fragment_cycles_diagnostics);

if !has_type_error && !has_cycles {
diagnostics.extend(db.validate_selection_set(def.selection_set().clone(), var_defs));
diagnostics.extend(db.validate_selection_set(def.selection_set().clone(), context));
}

diagnostics
Expand Down
53 changes: 36 additions & 17 deletions crates/apollo-compiler/src/validation/operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,40 +5,42 @@ use crate::{
hir, FileId, ValidationDatabase,
};

pub fn validate_operation_definitions(
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct OperationValidationConfig {
/// When false, rules that require a schema to validate are disabled.
pub has_schema: bool,
/// The variables defined for this operation.
pub variables: Arc<Vec<hir::VariableDefinition>>,
}

pub(crate) fn validate_operation_definitions_inner(
db: &dyn ValidationDatabase,
file_id: FileId,
has_schema: bool,
) -> Vec<ApolloDiagnostic> {
let mut diagnostics = Vec::new();

let operations = db.operations(file_id);
for def in operations.iter() {
let config = OperationValidationConfig {
has_schema,
variables: def.variables.clone(),
};

diagnostics.extend(db.validate_directives(
def.directives().to_vec(),
def.operation_ty().into(),
// assumption here is that operation definition's own directives can
// use already defined variables
def.variables.clone(),
));
diagnostics.extend(db.validate_variable_definitions(def.variables.as_ref().clone()));
diagnostics.extend(db.validate_variable_definitions(def.variables.clone(), has_schema));

// Validate the Selection Set recursively
// Check that the root type exists
if def.object_type(db.upcast()).is_some() {
diagnostics.extend(
db.validate_selection_set(def.selection_set().clone(), def.variables.clone()),
);
}
diagnostics.extend(db.validate_unused_variable(def.clone()));
}

let subscription_operations = db.upcast().subscription_operations(file_id);
let query_operations = db.upcast().query_operations(file_id);
let mutation_operations = db.upcast().mutation_operations(file_id);

diagnostics.extend(db.validate_subscription_operations(subscription_operations));
diagnostics.extend(db.validate_query_operations(query_operations));
diagnostics.extend(db.validate_mutation_operations(mutation_operations));
// Validate the Selection Set recursively
diagnostics.extend(db.validate_selection_set(def.selection_set().clone(), config));
}

// It is possible to have an unnamed (anonymous) operation definition only
// if there is **one** operation definition.
Expand All @@ -63,9 +65,26 @@ pub fn validate_operation_definitions(
diagnostics.extend(missing_ident);
}

if has_schema {
let subscription_operations = db.upcast().subscription_operations(file_id);
let query_operations = db.upcast().query_operations(file_id);
let mutation_operations = db.upcast().mutation_operations(file_id);

diagnostics.extend(db.validate_subscription_operations(subscription_operations));
diagnostics.extend(db.validate_query_operations(query_operations));
diagnostics.extend(db.validate_mutation_operations(mutation_operations));
}

diagnostics
}

pub fn validate_operation_definitions(
db: &dyn ValidationDatabase,
file_id: FileId,
) -> Vec<ApolloDiagnostic> {
validate_operation_definitions_inner(db, file_id, false)
}

pub fn validate_subscription_operations(
db: &dyn ValidationDatabase,
subscriptions: Arc<Vec<Arc<hir::OperationDefinition>>>,
Expand Down
Loading

0 comments on commit 777bba6

Please sign in to comment.