From e25c0995844c208aaf88c4e7e907595e7a8c159d Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Wed, 19 Mar 2025 19:58:28 -0700 Subject: [PATCH 01/11] feat: add `all_fields_required` option --- pydantic2ts/cli/script.py | 32 ++++++++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/pydantic2ts/cli/script.py b/pydantic2ts/cli/script.py index 76fc9a9..3ada3aa 100644 --- a/pydantic2ts/cli/script.py +++ b/pydantic2ts/cli/script.py @@ -28,6 +28,7 @@ if TYPE_CHECKING: # pragma: no cover from pydantic.config import ConfigDict + from pydantic.fields import FieldInfo from pydantic.v1.config import BaseConfig from pydantic.v1.fields import ModelField @@ -158,7 +159,9 @@ def _extract_pydantic_models(module: ModuleType) -> List[type]: return models -def _clean_json_schema(schema: Dict[str, Any], model: Any = None) -> None: +def _clean_json_schema( + schema: Dict[str, Any], model: Any = None, all_fields_required: bool = False +) -> None: """ Clean up the resulting JSON schemas via the following steps: @@ -198,6 +201,13 @@ def _clean_json_schema(schema: Dict[str, Any], model: Any = None) -> None: exc_info=True, ) + if _is_v2_model(model) and all_fields_required: + required_properties = schema.setdefault("required", []) + fields_v2: dict[str, FieldInfo] = model.model_fields + for field_name in fields_v2: + if field_name not in required_properties: + required_properties.append(field_name) + def _clean_output_file(output_filename: str) -> None: """ @@ -266,7 +276,7 @@ def _schema_generation_overrides( setattr(config, key, value) -def _generate_json_schema(models: List[type]) -> str: +def _generate_json_schema(models: List[type], all_fields_required: bool = False) -> str: """ Create a top-level '_Master_' model with references to each of the actual models. Generate the schema for this model, which will include the schemas for all the @@ -291,7 +301,9 @@ def _generate_json_schema(models: List[type]) -> str: defs: Dict[str, Any] = master_schema.get(defs_key, {}) for name, schema in defs.items(): - _clean_json_schema(schema, models_by_name.get(name)) + _clean_json_schema( + schema, models_by_name.get(name), all_fields_required=all_fields_required + ) return json.dumps(master_schema, indent=2) @@ -301,6 +313,7 @@ def generate_typescript_defs( output: str, exclude: Tuple[str, ...] = (), json2ts_cmd: str = "json2ts", + all_fields_required: bool = False, ) -> None: """ Convert the pydantic models in a python module into typescript interfaces. @@ -313,6 +326,9 @@ def generate_typescript_defs( :param json2ts_cmd: optional, the command that will execute json2ts. Provide this if the executable is not discoverable or if it's locally installed (ex: 'yarn json2ts'). + :param all_fields_required: optional, treat all v2 model fields (including + those with defaults) as required in generated + TypeScript definitions. """ if " " not in json2ts_cmd and not shutil.which(json2ts_cmd): raise Exception( @@ -335,7 +351,7 @@ def generate_typescript_defs( LOG.info("Generating JSON schema from pydantic models...") - schema = _generate_json_schema(models) + schema = _generate_json_schema(models, all_fields_required=all_fields_required) schema_dir = mkdtemp() schema_file_path = os.path.join(schema_dir, "schema.json") @@ -392,6 +408,13 @@ def parse_cli_args(args: Optional[List[str]] = None) -> argparse.Namespace: "Provide this if it's not discoverable or if it's only installed locally (example: 'yarn json2ts').\n" "(default: json2ts)", ) + parser.add_argument( + "--all-fields-required", + action="store_true", + default=False, + help="Treat all fields (including those with defaults) as required in generated TypeScript definitions.\n" + "(Currently supported only for Pydantic V2 models.)", + ) return parser.parse_args(args) @@ -406,6 +429,7 @@ def main() -> None: args.output, tuple(args.exclude), args.json2ts_cmd, + all_fields_required=args.all_fields_required, ) From 2408ba730149f083e2174d8637458c54c39f8a32 Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Thu, 20 Mar 2025 10:23:02 -0700 Subject: [PATCH 02/11] docs: update README for new `--all-fields-required` flag --- README.md | 95 +++++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 81 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 02e3e57..f507d4f 100644 --- a/README.md +++ b/README.md @@ -12,36 +12,37 @@ Useful for any scenario in which python and javascript applications are interact This tool requires that you have the lovely json2ts CLI utility installed. Instructions can be found here: https://www.npmjs.com/package/json-schema-to-typescript -### Installation +## Installation ```bash -$ pip install pydantic-to-typescript +pip install pydantic-to-typescript ``` -### Pydantic V2 support +## Pydantic V2 support If you are encountering issues with `pydantic>2`, it is most likely because you're using an old version of `pydantic-to-typescript`. Run `pip install 'pydantic-to-typescript>2'` and/or add `pydantic-to-typescript>=2` to your project requirements. -### CI/CD +## CI/CD You can now use `pydantic-to-typescript` to automatically validate and/or update typescript definitions as part of your CI/CD pipeline. The github action can be found here: https://github.com/marketplace/actions/pydantic-to-typescript. The available inputs are documented here: https://github.com/phillipdupuis/pydantic-to-typescript/blob/master/action.yml. -### CLI +## CLI -| Prop | Description | -| :------------------------------ | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| ‑‑module | name or filepath of the python module you would like to convert. All the pydantic models within it will be converted to typescript interfaces. Discoverable submodules will also be checked. | -| ‑‑output | name of the file the typescript definitions should be written to. Ex: './frontend/apiTypes.ts' | -| ‑‑exclude | name of a pydantic model which should be omitted from the resulting typescript definitions. This option can be defined multiple times, ex: `--exclude Foo --exclude Bar` to exclude both the Foo and Bar models from the output. | -| ‑‑json2ts‑cmd | optional, the command used to invoke json2ts. The default is 'json2ts'. Specify this if you have it installed locally (ex: 'yarn json2ts') or if the exact path to the executable is required (ex: /myproject/node_modules/bin/json2ts) | +| Prop | Description | +| :-------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| ‑‑module | name or filepath of the python module you would like to convert. All the pydantic models within it will be converted to typescript interfaces. Discoverable submodules will also be checked. | +| ‑‑output | name of the file the typescript definitions should be written to. Ex: './frontend/apiTypes.ts' | +| ‑‑exclude | name of a pydantic model which should be omitted from the resulting typescript definitions. This option can be defined multiple times, ex: `--exclude Foo --exclude Bar` to exclude both the Foo and Bar models from the output. | +| ‑‑json2ts‑cmd | optional, the command used to invoke json2ts. The default is 'json2ts'. Specify this if you have it installed locally (ex: 'yarn json2ts') or if the exact path to the executable is required (ex: /myproject/node_modules/bin/json2ts) | +| ‑‑all‑fields‑required | optional. Treats all fields (even those with defaults) as required in the generated TypeScript interfaces. (Pydantic v2 only) | --- -### Usage +## Usage Define your pydantic models (ex: /backend/api.py): @@ -74,13 +75,13 @@ def login(body: LoginCredentials): Execute the command for converting these models into typescript definitions, via: ```bash -$ pydantic2ts --module backend.api --output ./frontend/apiTypes.ts +pydantic2ts --module backend.api --output ./frontend/apiTypes.ts ``` or: ```bash -$ pydantic2ts --module ./backend/api.py --output ./frontend/apiTypes.ts +pydantic2ts --module ./backend/api.py --output ./frontend/apiTypes.ts ``` or: @@ -138,3 +139,69 @@ async function login( } } ``` + +### Treating all fields as required + +If you are using pydantic v2 and would like to treat all fields as required in the generated TypeScript interfaces, you can use the `--all-fields-required` flag. + +This is useful if you know that all fields will be present on the TypeScript side; for example, when representing a response from your Python backend API (since Pydantic will populate any missing fields with defaults before the response is sent to the client). + +#### Example (pydantic v2) + +```python +from pydantic import BaseModel, Field +from typing import Annotated, Literal, Optional + +class ExampleModel(BaseModel): + a: Annotated[int, Field(default=2)] + b: Annotated[list[int], Field(default_factory=list)] + c: Literal["c"] = "c" + d: int = 1 + e: Optional[int] + f: Optional[int] = None + g: Optional[int] = 3 +``` + +Executing with `--all-fields-required`: + +```bash +pydantic2ts --module backend.api --output ./frontend/apiTypes.ts --all-fields-required +``` + +Generated TypeScript interface: + +```ts +export interface ExampleModel { + a: number; + b: number[]; + c: "c"; + d: number; + e: number | null; + f: number | null; + g: number | null; +} +``` + +Executing without `--all-fields-required`: + +```bash +pydantic2ts --module backend.api --output ./frontend/apiTypes.ts +``` + +Generated TypeScript interface: + +```ts +export interface ExampleModel { + a?: number; + b?: number[]; + c?: "c"; + d?: number; + e: number | null; + f?: number | null; + g?: number | null; +} +``` + +> [!NOTE] +> Field `e` is required (not marked as optional) in the generated interface, even without the `--all-fields-required` flag. This is because, in Pydantic v2, fields annotated as `Optional[...]` or `Any` are no longer given an implicit default of `None`. See [Pydantic docs](https://docs.pydantic.dev/latest/concepts/models/#required-fields): +> > [in Pydantic V2] there are no longer any type annotations that will result in a field having an implicit default value. From 1e01b6b2b2c787ee92b30fc5ebf528394a98197a Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Thu, 20 Mar 2025 13:27:04 -0700 Subject: [PATCH 03/11] feat: account for serialization aliases --- pydantic2ts/cli/script.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/pydantic2ts/cli/script.py b/pydantic2ts/cli/script.py index 3ada3aa..735b83b 100644 --- a/pydantic2ts/cli/script.py +++ b/pydantic2ts/cli/script.py @@ -27,8 +27,8 @@ import pydantic2ts.pydantic_v2 as v2 if TYPE_CHECKING: # pragma: no cover + from pydantic import BaseModel as V2BaseModel from pydantic.config import ConfigDict - from pydantic.fields import FieldInfo from pydantic.v1.config import BaseConfig from pydantic.v1.fields import ModelField @@ -202,11 +202,15 @@ def _clean_json_schema( ) if _is_v2_model(model) and all_fields_required: - required_properties = schema.setdefault("required", []) - fields_v2: dict[str, FieldInfo] = model.model_fields - for field_name in fields_v2: - if field_name not in required_properties: - required_properties.append(field_name) + _treat_all_fields_as_required(schema, model) + + +def _treat_all_fields_as_required(schema: Dict[str, Any], model: "V2BaseModel") -> None: + required_properties = schema.setdefault("required", []) + for field_name, field_info in model.model_fields.items(): + serialization_field_name = field_info.serialization_alias or field_info.alias or field_name + if serialization_field_name not in required_properties: + required_properties.append(serialization_field_name) def _clean_output_file(output_filename: str) -> None: From d9ff500a2cdd45e09792d0e5e5db80585d569142 Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Wed, 9 Apr 2025 10:41:02 -0700 Subject: [PATCH 04/11] fix: ensure all reachable models are processed under --all-fields-required Previously, some Pydantic models that were only indirectly referenced (e.g. buried inside an Annotated[Union[...]] field) weren't processed before final schema generation. As a result, their default-valued fields would be optional in the TypeScript output, even when all_fields_required was set. We now recursively walk and collect all reachable models before calling `_clean_json_schema`, ensuring all schemas are properly processed. --- pydantic2ts/cli/script.py | 87 ++++++++++++++++++++++++++++++++------- 1 file changed, 72 insertions(+), 15 deletions(-) diff --git a/pydantic2ts/cli/script.py b/pydantic2ts/cli/script.py index 735b83b..b048ac1 100644 --- a/pydantic2ts/cli/script.py +++ b/pydantic2ts/cli/script.py @@ -12,6 +12,7 @@ from types import ModuleType from typing import ( TYPE_CHECKING, + Annotated, Any, Dict, Generator, @@ -20,6 +21,8 @@ Tuple, Type, Union, + get_args, + get_origin, ) from uuid import uuid4 @@ -280,38 +283,91 @@ def _schema_generation_overrides( setattr(config, key, value) -def _generate_json_schema(models: List[type], all_fields_required: bool = False) -> str: +def _generate_json_schema(all_models: list[type], root_models: list[type], all_fields_required: bool = False) -> str: """ Create a top-level '_Master_' model with references to each of the actual models. Generate the schema for this model, which will include the schemas for all the nested models. Then clean up the schema. """ with ExitStack() as stack: - models_by_name: Dict[str, type] = {} - models_as_fields: Dict[str, Tuple[type, Any]] = {} + all_models_by_full_qualname: Dict[str, type] = {} + root_models_as_fields: Dict[str, Tuple[type, Any]] = {} - for model in models: + for model in all_models: stack.enter_context(_schema_generation_overrides(model)) name = model.__name__ - models_by_name[name] = model - models_as_fields[name] = (model, ...) + full_qualname = f"{model.__module__}.{model.__name__}" + all_models_by_full_qualname[full_qualname] = model + if model in root_models: # Only top-level models get put into _Master_ + root_models_as_fields[name] = (model, ...) - use_v1_tools = any(issubclass(m, v1.BaseModel) for m in models) + use_v1_tools = any(issubclass(m, v1.BaseModel) for m in root_models) create_model = v1.create_model if use_v1_tools else v2.create_model # type: ignore - master_model = create_model("_Master_", **models_as_fields) # type: ignore + master_model = create_model("_Master_", **root_models_as_fields) # type: ignore master_schema = _get_model_json_schema(master_model) # type: ignore defs_key = "$defs" if "$defs" in master_schema else "definitions" defs: Dict[str, Any] = master_schema.get(defs_key, {}) for name, schema in defs.items(): - _clean_json_schema( - schema, models_by_name.get(name), all_fields_required=all_fields_required + # Match the schema definition name back to the model class using its full qualified name + matched_model: type | None = next( + (m for full_qn, m in all_models_by_full_qualname.items() if full_qn.endswith(f".{name}")), + None, ) + _clean_json_schema(schema, matched_model, all_fields_required=all_fields_required) return json.dumps(master_schema, indent=2) +def _collect_all_models(root_models: List[type]) -> List[type]: + """ + Given a list of root Pydantic models, walk all referenced model fields recursively + to collect all concrete model classes (BaseModel subclasses). + """ + seen = set[type]() + result = list[type[v1.BaseModel] | type[v2.BaseModel]]() + + def walk(type_: Any) -> None: + if type_ in seen: + return + seen.add(type_) + + # Always unwrap and walk inner types — whether it's a model or not + for inner in _unwrap_type(type_): + walk(inner) + + if inspect.isclass(type_) and issubclass(type_, (v1.BaseModel, v2.BaseModel)): + result.append(type_) + for field in getattr(type_, "model_fields", {}).values(): + for inner in _unwrap_type(field.annotation): + walk(inner) + + for m in root_models: + walk(m) + + return result + + +def _unwrap_type(type_: Any) -> List[type]: + """ + Recursively extract all types from nested containers (List[T], Dict[K, V], Annotated, etc.) + """ + origin = get_origin(type_) + + if origin is Annotated: + base_type, *_annotations = get_args(type_) + return _unwrap_type(base_type) + + elif origin is Union: + return [t for arg in get_args(type_) for t in _unwrap_type(arg)] + + elif hasattr(type_, "__args__"): # Handle generics like List[...] + return [t for arg in get_args(type_) for t in _unwrap_type(arg)] + + return [type_] + + def generate_typescript_defs( module: str, output: str, @@ -342,20 +398,21 @@ def generate_typescript_defs( LOG.info("Finding pydantic models...") - models = _extract_pydantic_models(_import_module(module)) + root_models = _extract_pydantic_models(_import_module(module)) + all_models = _collect_all_models(root_models) if exclude: - models = [ - m for m in models if (m.__name__ not in exclude and m.__qualname__ not in exclude) + all_models = [ + m for m in all_models if (m.__name__ not in exclude and m.__qualname__ not in exclude) ] - if not models: + if not all_models: LOG.info("No pydantic models found, exiting.") return LOG.info("Generating JSON schema from pydantic models...") - schema = _generate_json_schema(models, all_fields_required=all_fields_required) + schema = _generate_json_schema(all_models=all_models, root_models=root_models, all_fields_required=all_fields_required) schema_dir = mkdtemp() schema_file_path = os.path.join(schema_dir, "schema.json") From a939205378e23eed6387407cb1453e5333916f93 Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Wed, 9 Apr 2025 11:40:18 -0700 Subject: [PATCH 05/11] fix: use List[str] instead of list[str] for 3.8 compatibility just in case --- pydantic2ts/cli/script.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pydantic2ts/cli/script.py b/pydantic2ts/cli/script.py index b048ac1..00c2556 100644 --- a/pydantic2ts/cli/script.py +++ b/pydantic2ts/cli/script.py @@ -32,6 +32,7 @@ if TYPE_CHECKING: # pragma: no cover from pydantic import BaseModel as V2BaseModel from pydantic.config import ConfigDict + from pydantic.v1 import BaseModel as V1BaseModel from pydantic.v1.config import BaseConfig from pydantic.v1.fields import ModelField @@ -283,7 +284,7 @@ def _schema_generation_overrides( setattr(config, key, value) -def _generate_json_schema(all_models: list[type], root_models: list[type], all_fields_required: bool = False) -> str: +def _generate_json_schema(all_models: List[type], root_models: List[type], all_fields_required: bool = False) -> str: """ Create a top-level '_Master_' model with references to each of the actual models. Generate the schema for this model, which will include the schemas for all the @@ -326,8 +327,7 @@ def _collect_all_models(root_models: List[type]) -> List[type]: to collect all concrete model classes (BaseModel subclasses). """ seen = set[type]() - result = list[type[v1.BaseModel] | type[v2.BaseModel]]() - + result: List[Type[Union["V1BaseModel", "V2BaseModel"]]] = [] def walk(type_: Any) -> None: if type_ in seen: return From 33f538a7d40d91284fc75f3d1190816e5960ba07 Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Wed, 9 Apr 2025 14:09:34 -0700 Subject: [PATCH 06/11] fix: import Annotated, get_args, and get_origin from typing_extensions for python 3.8 compatibility --- pydantic2ts/cli/script.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/pydantic2ts/cli/script.py b/pydantic2ts/cli/script.py index 00c2556..f082040 100644 --- a/pydantic2ts/cli/script.py +++ b/pydantic2ts/cli/script.py @@ -12,7 +12,6 @@ from types import ModuleType from typing import ( TYPE_CHECKING, - Annotated, Any, Dict, Generator, @@ -21,11 +20,11 @@ Tuple, Type, Union, - get_args, - get_origin, ) from uuid import uuid4 +from typing_extensions import Annotated, get_args, get_origin + import pydantic2ts.pydantic_v1 as v1 import pydantic2ts.pydantic_v2 as v2 @@ -284,7 +283,9 @@ def _schema_generation_overrides( setattr(config, key, value) -def _generate_json_schema(all_models: List[type], root_models: List[type], all_fields_required: bool = False) -> str: +def _generate_json_schema( + all_models: List[type], root_models: List[type], all_fields_required: bool = False +) -> str: """ Create a top-level '_Master_' model with references to each of the actual models. Generate the schema for this model, which will include the schemas for all the @@ -313,7 +314,11 @@ def _generate_json_schema(all_models: List[type], root_models: List[type], all_f for name, schema in defs.items(): # Match the schema definition name back to the model class using its full qualified name matched_model: type | None = next( - (m for full_qn, m in all_models_by_full_qualname.items() if full_qn.endswith(f".{name}")), + ( + m + for full_qn, m in all_models_by_full_qualname.items() + if full_qn.endswith(f".{name}") + ), None, ) _clean_json_schema(schema, matched_model, all_fields_required=all_fields_required) @@ -328,6 +333,7 @@ def _collect_all_models(root_models: List[type]) -> List[type]: """ seen = set[type]() result: List[Type[Union["V1BaseModel", "V2BaseModel"]]] = [] + def walk(type_: Any) -> None: if type_ in seen: return @@ -412,7 +418,9 @@ def generate_typescript_defs( LOG.info("Generating JSON schema from pydantic models...") - schema = _generate_json_schema(all_models=all_models, root_models=root_models, all_fields_required=all_fields_required) + schema = _generate_json_schema( + all_models=all_models, root_models=root_models, all_fields_required=all_fields_required + ) schema_dir = mkdtemp() schema_file_path = os.path.join(schema_dir, "schema.json") From 68d00551856da756d5daa6283396d2cb54ac5e43 Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Thu, 10 Apr 2025 14:12:53 -0700 Subject: [PATCH 07/11] feat: mark all fields as required in a simpler way (now works for both v1 and v2 models) - revert last 4 commits (serialization aliases and walking all models) - just add any property names from the schema directly into `required` instead of working with the model fields directly and needing to account for aliases - walking models no longer needed for all-fields-required to apply to all models used; even if `model` is None we still handle the schema --- pydantic2ts/cli/script.py | 109 ++++++++------------------------------ 1 file changed, 21 insertions(+), 88 deletions(-) diff --git a/pydantic2ts/cli/script.py b/pydantic2ts/cli/script.py index f082040..42cbfc4 100644 --- a/pydantic2ts/cli/script.py +++ b/pydantic2ts/cli/script.py @@ -23,15 +23,11 @@ ) from uuid import uuid4 -from typing_extensions import Annotated, get_args, get_origin - import pydantic2ts.pydantic_v1 as v1 import pydantic2ts.pydantic_v2 as v2 if TYPE_CHECKING: # pragma: no cover - from pydantic import BaseModel as V2BaseModel from pydantic.config import ConfigDict - from pydantic.v1 import BaseModel as V1BaseModel from pydantic.v1.config import BaseConfig from pydantic.v1.fields import ModelField @@ -204,16 +200,15 @@ def _clean_json_schema( exc_info=True, ) - if _is_v2_model(model) and all_fields_required: - _treat_all_fields_as_required(schema, model) + if all_fields_required: + _treat_all_fields_as_required(schema) -def _treat_all_fields_as_required(schema: Dict[str, Any], model: "V2BaseModel") -> None: +def _treat_all_fields_as_required(schema: Dict[str, Any]) -> None: required_properties = schema.setdefault("required", []) - for field_name, field_info in model.model_fields.items(): - serialization_field_name = field_info.serialization_alias or field_info.alias or field_name - if serialization_field_name not in required_properties: - required_properties.append(serialization_field_name) + for prop_name in schema.get("properties", {}).keys(): + if prop_name not in required_properties: + required_properties.append(prop_name) def _clean_output_file(output_filename: str) -> None: @@ -283,97 +278,38 @@ def _schema_generation_overrides( setattr(config, key, value) -def _generate_json_schema( - all_models: List[type], root_models: List[type], all_fields_required: bool = False -) -> str: +def _generate_json_schema(models: List[type], all_fields_required: bool = False) -> str: """ Create a top-level '_Master_' model with references to each of the actual models. Generate the schema for this model, which will include the schemas for all the nested models. Then clean up the schema. """ with ExitStack() as stack: - all_models_by_full_qualname: Dict[str, type] = {} - root_models_as_fields: Dict[str, Tuple[type, Any]] = {} + models_by_name: Dict[str, type] = {} + models_as_fields: Dict[str, Tuple[type, Any]] = {} - for model in all_models: + for model in models: stack.enter_context(_schema_generation_overrides(model)) name = model.__name__ - full_qualname = f"{model.__module__}.{model.__name__}" - all_models_by_full_qualname[full_qualname] = model - if model in root_models: # Only top-level models get put into _Master_ - root_models_as_fields[name] = (model, ...) + models_by_name[name] = model + models_as_fields[name] = (model, ...) - use_v1_tools = any(issubclass(m, v1.BaseModel) for m in root_models) + use_v1_tools = any(issubclass(m, v1.BaseModel) for m in models) create_model = v1.create_model if use_v1_tools else v2.create_model # type: ignore - master_model = create_model("_Master_", **root_models_as_fields) # type: ignore + master_model = create_model("_Master_", **models_as_fields) # type: ignore master_schema = _get_model_json_schema(master_model) # type: ignore defs_key = "$defs" if "$defs" in master_schema else "definitions" defs: Dict[str, Any] = master_schema.get(defs_key, {}) for name, schema in defs.items(): - # Match the schema definition name back to the model class using its full qualified name - matched_model: type | None = next( - ( - m - for full_qn, m in all_models_by_full_qualname.items() - if full_qn.endswith(f".{name}") - ), - None, + _clean_json_schema( + schema, models_by_name.get(name), all_fields_required=all_fields_required ) - _clean_json_schema(schema, matched_model, all_fields_required=all_fields_required) return json.dumps(master_schema, indent=2) -def _collect_all_models(root_models: List[type]) -> List[type]: - """ - Given a list of root Pydantic models, walk all referenced model fields recursively - to collect all concrete model classes (BaseModel subclasses). - """ - seen = set[type]() - result: List[Type[Union["V1BaseModel", "V2BaseModel"]]] = [] - - def walk(type_: Any) -> None: - if type_ in seen: - return - seen.add(type_) - - # Always unwrap and walk inner types — whether it's a model or not - for inner in _unwrap_type(type_): - walk(inner) - - if inspect.isclass(type_) and issubclass(type_, (v1.BaseModel, v2.BaseModel)): - result.append(type_) - for field in getattr(type_, "model_fields", {}).values(): - for inner in _unwrap_type(field.annotation): - walk(inner) - - for m in root_models: - walk(m) - - return result - - -def _unwrap_type(type_: Any) -> List[type]: - """ - Recursively extract all types from nested containers (List[T], Dict[K, V], Annotated, etc.) - """ - origin = get_origin(type_) - - if origin is Annotated: - base_type, *_annotations = get_args(type_) - return _unwrap_type(base_type) - - elif origin is Union: - return [t for arg in get_args(type_) for t in _unwrap_type(arg)] - - elif hasattr(type_, "__args__"): # Handle generics like List[...] - return [t for arg in get_args(type_) for t in _unwrap_type(arg)] - - return [type_] - - def generate_typescript_defs( module: str, output: str, @@ -404,23 +340,20 @@ def generate_typescript_defs( LOG.info("Finding pydantic models...") - root_models = _extract_pydantic_models(_import_module(module)) - all_models = _collect_all_models(root_models) + models = _extract_pydantic_models(_import_module(module)) if exclude: - all_models = [ - m for m in all_models if (m.__name__ not in exclude and m.__qualname__ not in exclude) + models = [ + m for m in models if (m.__name__ not in exclude and m.__qualname__ not in exclude) ] - if not all_models: + if not models: LOG.info("No pydantic models found, exiting.") return LOG.info("Generating JSON schema from pydantic models...") - schema = _generate_json_schema( - all_models=all_models, root_models=root_models, all_fields_required=all_fields_required - ) + schema = _generate_json_schema(models, all_fields_required=all_fields_required) schema_dir = mkdtemp() schema_file_path = os.path.join(schema_dir, "schema.json") From beca44456e401fcdcdcd3ccd7eb28913847fae75 Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Thu, 10 Apr 2025 16:01:06 -0700 Subject: [PATCH 08/11] docs: update README now that the flag works for both v1 and v2 --- README.md | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index f507d4f..6008eed 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ The available inputs are documented here: https://github.com/phillipdupuis/pydan | ‑‑output | name of the file the typescript definitions should be written to. Ex: './frontend/apiTypes.ts' | | ‑‑exclude | name of a pydantic model which should be omitted from the resulting typescript definitions. This option can be defined multiple times, ex: `--exclude Foo --exclude Bar` to exclude both the Foo and Bar models from the output. | | ‑‑json2ts‑cmd | optional, the command used to invoke json2ts. The default is 'json2ts'. Specify this if you have it installed locally (ex: 'yarn json2ts') or if the exact path to the executable is required (ex: /myproject/node_modules/bin/json2ts) | -| ‑‑all‑fields‑required | optional. Treats all fields (even those with defaults) as required in the generated TypeScript interfaces. (Pydantic v2 only) | +| ‑‑all‑fields‑required | optional (off by default). Treats all fields as required (present) in the generated TypeScript interfaces. | --- @@ -142,11 +142,11 @@ async function login( ### Treating all fields as required -If you are using pydantic v2 and would like to treat all fields as required in the generated TypeScript interfaces, you can use the `--all-fields-required` flag. +If you would like to treat all fields as required in the generated TypeScript interfaces, you can use the `--all-fields-required` flag. -This is useful if you know that all fields will be present on the TypeScript side; for example, when representing a response from your Python backend API (since Pydantic will populate any missing fields with defaults before the response is sent to the client). +This is useful, for example, when representing a response from your Python backend API—since Pydantic will populate any missing fields with defaults before sending the response. -#### Example (pydantic v2) +#### Example ```python from pydantic import BaseModel, Field @@ -168,8 +168,6 @@ Executing with `--all-fields-required`: pydantic2ts --module backend.api --output ./frontend/apiTypes.ts --all-fields-required ``` -Generated TypeScript interface: - ```ts export interface ExampleModel { a: number; @@ -188,8 +186,6 @@ Executing without `--all-fields-required`: pydantic2ts --module backend.api --output ./frontend/apiTypes.ts ``` -Generated TypeScript interface: - ```ts export interface ExampleModel { a?: number; @@ -201,7 +197,3 @@ export interface ExampleModel { g?: number | null; } ``` - -> [!NOTE] -> Field `e` is required (not marked as optional) in the generated interface, even without the `--all-fields-required` flag. This is because, in Pydantic v2, fields annotated as `Optional[...]` or `Any` are no longer given an implicit default of `None`. See [Pydantic docs](https://docs.pydantic.dev/latest/concepts/models/#required-fields): -> > [in Pydantic V2] there are no longer any type annotations that will result in a field having an implicit default value. From fc92d521cef8b3a932dcde0cc8babe3c2e118201 Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Thu, 10 Apr 2025 17:22:56 -0700 Subject: [PATCH 09/11] docs: update readme with better named example fields --- README.md | 46 +++++++++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 6008eed..95df953 100644 --- a/README.md +++ b/README.md @@ -153,13 +153,13 @@ from pydantic import BaseModel, Field from typing import Annotated, Literal, Optional class ExampleModel(BaseModel): - a: Annotated[int, Field(default=2)] - b: Annotated[list[int], Field(default_factory=list)] - c: Literal["c"] = "c" - d: int = 1 - e: Optional[int] - f: Optional[int] = None - g: Optional[int] = 3 + literal_str_with_default: Literal["c"] = "c" + int_with_default: int = 1 + int_with_pydantic_default: Annotated[int, Field(default=2)] + int_list_with_default_factory: Annotated[list[int], Field(default_factory=list)] + nullable_int: Optional[int] + nullable_int_with_default: Optional[int] = 3 + nullable_int_with_null_default: Optional[int] = None ``` Executing with `--all-fields-required`: @@ -170,13 +170,13 @@ pydantic2ts --module backend.api --output ./frontend/apiTypes.ts --all-fields-re ```ts export interface ExampleModel { - a: number; - b: number[]; - c: "c"; - d: number; - e: number | null; - f: number | null; - g: number | null; + literal_str_with_default: "c"; + int_with_default: number; + int_with_pydantic_default: number; + int_list_with_default_factory: number[]; + nullable_int: number | null; + nullable_int_with_default: number | null; + nullable_int_with_null_default: number | null; } ``` @@ -188,12 +188,16 @@ pydantic2ts --module backend.api --output ./frontend/apiTypes.ts ```ts export interface ExampleModel { - a?: number; - b?: number[]; - c?: "c"; - d?: number; - e: number | null; - f?: number | null; - g?: number | null; + literal_str_with_default?: "c"; + int_with_default?: number; + int_with_pydantic_default?: number; + int_list_with_default_factory?: number[]; + nullable_int: number | null; // optional if Pydantic V1 + nullable_int_with_default?: number | null; + nullable_int_with_null_default?: number | null; } ``` + +> [!NOTE] +> If you're using Pydantic V1, `nullable_int` will also be optional (`nullable_int?: number | null`) when executing without `--all-fields-required`. See [Pydantic docs](https://docs.pydantic.dev/2.10/concepts/models/#required-fields): +> > In Pydantic V1, fields annotated with `Optional` or `Any` would be given an implicit default of `None` even if no default was explicitly specified. This behavior has changed in Pydantic V2, and there are no longer any type annotations that will result in a field having an implicit default value. From f826cf45c21bc99f7ee92477348a510fcb4b34a3 Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Thu, 10 Apr 2025 22:35:19 -0700 Subject: [PATCH 10/11] docs: update _clean_json_schema docstring to mention --all-fields-required addition --- pydantic2ts/cli/script.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pydantic2ts/cli/script.py b/pydantic2ts/cli/script.py index 42cbfc4..c84e5ca 100644 --- a/pydantic2ts/cli/script.py +++ b/pydantic2ts/cli/script.py @@ -172,6 +172,9 @@ def _clean_json_schema( resulting typescript file (which is a LOT of unnecessary noise). 3) If it's a V1 model, ensure that nullability is properly represented. https://github.com/pydantic/pydantic/issues/1270 + 4) If all_fields_required is True, ensure that all properties are included in the + "required" list of the schema, so they don't get marked as optional in the + resulting typescript definitions. """ description = schema.get("description") From a0aee273ffd78810f6ec1150b1ea28c6efd8797f Mon Sep 17 00:00:00 2001 From: Aly Thobani Date: Sun, 27 Apr 2025 20:55:36 -0700 Subject: [PATCH 11/11] chore: remove references to all-fields-required only applying to V2 models --- pydantic2ts/cli/script.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pydantic2ts/cli/script.py b/pydantic2ts/cli/script.py index c84e5ca..27b116d 100644 --- a/pydantic2ts/cli/script.py +++ b/pydantic2ts/cli/script.py @@ -331,7 +331,7 @@ def generate_typescript_defs( :param json2ts_cmd: optional, the command that will execute json2ts. Provide this if the executable is not discoverable or if it's locally installed (ex: 'yarn json2ts'). - :param all_fields_required: optional, treat all v2 model fields (including + :param all_fields_required: optional, treat all model fields (including those with defaults) as required in generated TypeScript definitions. """ @@ -417,8 +417,7 @@ def parse_cli_args(args: Optional[List[str]] = None) -> argparse.Namespace: "--all-fields-required", action="store_true", default=False, - help="Treat all fields (including those with defaults) as required in generated TypeScript definitions.\n" - "(Currently supported only for Pydantic V2 models.)", + help="Treat all fields (including those with defaults) as required in generated TypeScript definitions.", ) return parser.parse_args(args)