|
1 | | -import unittest |
| 1 | +# import unittest |
| 2 | +import pytest |
2 | 3 | from typing import Any, Dict, List, Union |
3 | 4 |
|
| 5 | +#want to import ontology_generator.py properly, not the bad way we are currently doing |
| 6 | +from ontology_generator import Ontology, Tool, Classification, Option |
| 7 | + |
4 | 8 |
|
5 | 9 | def sample_ontology() -> Dict[str, Any]: |
6 | 10 | return { |
@@ -30,44 +34,132 @@ def sample_ontology() -> Dict[str, Any]: |
30 | 34 | }] |
31 | 35 | } |
32 | 36 |
|
| 37 | +#want to create base case tests, each indiv tool, classification, option |
| 38 | +#want to then do nested objects inside each |
| 39 | +#do we want to test colors, bool, etc? |
| 40 | +#test inside methods? like asdict/fromdict? |
| 41 | +#test ontology.from_project? |
| 42 | +#test ontology.build? |
| 43 | +""" |
| 44 | +Tool tests |
| 45 | +""" |
| 46 | +# def test_create_tool(client, project) -> None: |
| 47 | +def test_create_bbox_tool() -> None: |
| 48 | + t = Tool(tool=Tool.Type.BBOX, name="box tool") |
| 49 | + assert(t.tool==Tool.Type.BBOX) |
| 50 | + assert(t.name=="box tool") |
| 51 | + |
| 52 | +def test_create_point_tool() -> None: |
| 53 | + t = Tool(tool=Tool.Type.POINT, name="point tool") |
| 54 | + assert(t.tool==Tool.Type.POINT) |
| 55 | + assert(t.name=="point tool") |
| 56 | + |
| 57 | +def test_create_polygon_tool() -> None: |
| 58 | + t = Tool(tool=Tool.Type.POLYGON, name="polygon tool") |
| 59 | + assert(t.tool==Tool.Type.POLYGON) |
| 60 | + assert(t.name=="polygon tool") |
| 61 | + |
| 62 | +def test_create_ner_tool() -> None: |
| 63 | + t = Tool(tool=Tool.Type.NER, name="ner tool") |
| 64 | + assert(t.tool==Tool.Type.NER) |
| 65 | + assert(t.name=="ner tool") |
| 66 | + |
| 67 | +def test_create_segment_tool() -> None: |
| 68 | + t = Tool(tool=Tool.Type.SEGMENTATION, name="segment tool") |
| 69 | + assert(t.tool==Tool.Type.SEGMENTATION) |
| 70 | + assert(t.name=="segment tool") |
| 71 | + |
| 72 | +def test_create_line_tool() -> None: |
| 73 | + t = Tool(tool=Tool.Type.LINE, name="line tool") |
| 74 | + assert(t.tool==Tool.Type.LINE) |
| 75 | + assert(t.name=="line tool") |
| 76 | + |
| 77 | +""" |
| 78 | +Classification tests |
| 79 | +""" |
| 80 | +def test_create_text_classification() -> None: |
| 81 | + c = Classification(class_type=Classification.Type.TEXT, instructions="text") |
| 82 | + assert(c.class_type==Classification.Type.TEXT) |
| 83 | + assert(c.instructions=="text") |
| 84 | + assert(c.class_type not in c._REQUIRES_OPTIONS) |
| 85 | + |
| 86 | +def test_create_radio_classification() -> None: |
| 87 | + c = Classification(class_type=Classification.Type.RADIO, instructions="radio") |
| 88 | + assert(c.class_type==Classification.Type.RADIO) |
| 89 | + assert(c.instructions=="radio") |
| 90 | + assert(c.class_type in c._REQUIRES_OPTIONS) |
| 91 | + |
| 92 | +def test_create_checklist_classification() -> None: |
| 93 | + c = Classification(class_type=Classification.Type.CHECKLIST, instructions="checklist") |
| 94 | + assert(c.class_type==Classification.Type.CHECKLIST) |
| 95 | + assert(c.instructions=="checklist") |
| 96 | + assert(c.class_type in c._REQUIRES_OPTIONS) |
| 97 | + |
| 98 | +def test_create_dropdown_classification() -> None: |
| 99 | + c = Classification(class_type=Classification.Type.DROPDOWN, instructions="dropdown") |
| 100 | + assert(c.class_type==Classification.Type.DROPDOWN) |
| 101 | + assert(c.instructions=="dropdown") |
| 102 | + assert(c.class_type in c._REQUIRES_OPTIONS) |
| 103 | + |
| 104 | +""" |
| 105 | +Option tests |
| 106 | +""" |
| 107 | +def test_create_int_option() -> None: |
| 108 | + o = Option(value=3) |
| 109 | + assert(o.value==3) |
| 110 | + assert(type(o.value) == int) |
| 111 | + |
| 112 | +def test_create_string_option() -> None: |
| 113 | + o = Option(value="3") |
| 114 | + assert(o.value=="3") |
| 115 | + assert(type(o.value)== str) |
| 116 | + |
| 117 | +""" |
| 118 | +Ontology tests |
| 119 | +""" |
| 120 | +def test_create_ontology() -> None: |
| 121 | + o = Ontology() |
| 122 | + assert(o.tools == []) |
| 123 | + assert(o.classifications == []) |
| 124 | + |
| 125 | + |
| 126 | +# def test_create_ontology(client, project) -> None: |
| 127 | +# """ Tests that the ontology that a project was set up with can be grabbed.""" |
| 128 | +# frontend = list(client.get_labeling_frontends())[0] |
| 129 | +# project.setup(frontend, sample_ontology()) |
| 130 | +# normalized_ontology = project.ontology().normalized |
| 131 | + |
| 132 | +# def _remove_schema_ids( |
| 133 | +# ontology_part: Union[List, Dict[str, Any]]) -> Dict[str, Any]: |
| 134 | +# """ Recursively scrub the normalized ontology of any schema information.""" |
| 135 | +# removals = {'featureSchemaId', 'schemaNodeId'} |
| 136 | + |
| 137 | +# if isinstance(ontology_part, list): |
| 138 | +# return [_remove_schema_ids(part) for part in ontology_part] |
| 139 | +# if isinstance(ontology_part, dict): |
| 140 | +# return { |
| 141 | +# key: _remove_schema_ids(value) |
| 142 | +# for key, value in ontology_part.items() |
| 143 | +# if key not in removals |
| 144 | +# } |
| 145 | +# return ontology_part |
| 146 | + |
| 147 | +# removed = _remove_schema_ids(normalized_ontology) |
| 148 | +# assert removed == sample_ontology() |
| 149 | + |
| 150 | +# ontology = project.ontology() |
| 151 | + |
| 152 | +# tools = ontology.tools() |
| 153 | +# assert tools |
| 154 | +# for tool in tools: |
| 155 | +# assert tool.feature_schema_id |
| 156 | +# assert tool.schema_node_id |
33 | 157 |
|
34 | | -def test_create_ontology(client, project) -> None: |
35 | | - """ Tests that the ontology that a project was set up with can be grabbed.""" |
36 | | - frontend = list(client.get_labeling_frontends())[0] |
37 | | - project.setup(frontend, sample_ontology()) |
38 | | - normalized_ontology = project.ontology().normalized |
39 | | - |
40 | | - def _remove_schema_ids( |
41 | | - ontology_part: Union[List, Dict[str, Any]]) -> Dict[str, Any]: |
42 | | - """ Recursively scrub the normalized ontology of any schema information.""" |
43 | | - removals = {'featureSchemaId', 'schemaNodeId'} |
44 | | - |
45 | | - if isinstance(ontology_part, list): |
46 | | - return [_remove_schema_ids(part) for part in ontology_part] |
47 | | - if isinstance(ontology_part, dict): |
48 | | - return { |
49 | | - key: _remove_schema_ids(value) |
50 | | - for key, value in ontology_part.items() |
51 | | - if key not in removals |
52 | | - } |
53 | | - return ontology_part |
54 | | - |
55 | | - removed = _remove_schema_ids(normalized_ontology) |
56 | | - assert removed == sample_ontology() |
57 | | - |
58 | | - ontology = project.ontology() |
59 | | - |
60 | | - tools = ontology.tools() |
61 | | - assert tools |
62 | | - for tool in tools: |
63 | | - assert tool.feature_schema_id |
64 | | - assert tool.schema_node_id |
65 | | - |
66 | | - classifications = ontology.classifications() |
67 | | - assert classifications |
68 | | - for classification in classifications: |
69 | | - assert classification.feature_schema_id |
70 | | - assert classification.schema_node_id |
71 | | - for option in classification.options: |
72 | | - assert option.feature_schema_id |
73 | | - assert option.schema_node_id |
| 158 | +# classifications = ontology.classifications() |
| 159 | +# assert classifications |
| 160 | +# for classification in classifications: |
| 161 | +# assert classification.feature_schema_id |
| 162 | +# assert classification.schema_node_id |
| 163 | +# for option in classification.options: |
| 164 | +# assert option.feature_schema_id |
| 165 | +# assert option.schema_node_id |
0 commit comments