|
220 | 220 | "id": "portable-grenada", |
221 | 221 | "metadata": {}, |
222 | 222 | "source": [ |
223 | | - "#### Grab featureSchemaIds" |
| 223 | + "* <b>Grab featureSchemaIds</b>\n", |
| 224 | + "* This enables us to assign model inferences to a specific class in labelbox\n", |
| 225 | + "* Checkout the ndjson section to see where this is used" |
224 | 226 | ] |
225 | 227 | }, |
226 | 228 | { |
|
239 | 241 | ], |
240 | 242 | "source": [ |
241 | 243 | "# When we created a project with the ontology defined above, all of the ids were assigned.\n", |
242 | | - "# So lets reconstruct the ontology builder with all of the ids.\n", |
| 244 | + "# Let's reconstruct the ontology builder with all of the ids.\n", |
243 | 245 | "ontology = ontology_builder.from_project(project)\n", |
244 | 246 | "# We want all of the feature schemas to be easily accessible by name.\n", |
245 | 247 | "schema_lookup = {tool.name: tool.feature_schema_id for tool in ontology.tools}\n", |
|
252 | 254 | "metadata": {}, |
253 | 255 | "source": [ |
254 | 256 | "### Create import objects\n", |
255 | | - "* We want to create a json payload that matches this: https://docs.labelbox.com/en/model-assisted-labeling/create-import-file#images-7961\n", |
256 | | - "* Here we will run inferences on all of our data (only one image this time)\n", |
257 | | - "* See other notebook on supported objects. Everything is the same except coords are in lat lng" |
| 257 | + "* We want to create a polygon json payload that matches the format documented here : https://docs.labelbox.com/data-model/en/index-en#polygon" |
258 | 258 | ] |
259 | 259 | }, |
260 | 260 | { |
261 | 261 | "cell_type": "code", |
262 | | - "execution_count": 11, |
| 262 | + "execution_count": null, |
263 | 263 | "id": "advisory-preserve", |
264 | 264 | "metadata": {}, |
265 | 265 | "outputs": [], |
266 | 266 | "source": [ |
| 267 | + "# Create helper functions\n", |
| 268 | + "\n", |
267 | 269 | "def tile_generator(bounds: GeoBoundary, zoom : int):\n", |
268 | 270 | " # Returns all tiles that intersect with the provide bounds\n", |
269 | 271 | " bds = box(bounds.top_right.lng, bounds.top_right.lat, bounds.bottom_left.lng, bounds.bottom_left.lat).bounds\n", |
|
296 | 298 | " max_x -= tile_bounds[0]\n", |
297 | 299 | " min_y -= tile_bounds[1]\n", |
298 | 300 | " max_y -= tile_bounds[1]\n", |
299 | | - " return [int(x) for x in [min_x, min_y, max_x, max_y]], intersection\n" |
300 | | - ] |
301 | | - }, |
302 | | - { |
303 | | - "cell_type": "code", |
304 | | - "execution_count": 12, |
305 | | - "id": "according-sixth", |
306 | | - "metadata": {}, |
307 | | - "outputs": [], |
308 | | - "source": [ |
| 301 | + " return [int(x) for x in [min_x, min_y, max_x, max_y]], intersection\n", |
309 | 302 | "\n", |
310 | 303 | "def get_image(tile : Tile, url : str) -> np.ndarray:\n", |
311 | 304 | " # Downloads a tile\n", |
|
320 | 313 | "\n" |
321 | 314 | ] |
322 | 315 | }, |
| 316 | + { |
| 317 | + "cell_type": "markdown", |
| 318 | + "id": "informative-happiness", |
| 319 | + "metadata": {}, |
| 320 | + "source": [ |
| 321 | + "* <b>Visualize tile and labeling region</b>\n", |
| 322 | + "* The tile is one of the tiles that overlapped with the `GeoBoundary` that we provided\n", |
| 323 | + "* The red box, `GeoBoundary`, is the area where the labelers will label" |
| 324 | + ] |
| 325 | + }, |
323 | 326 | { |
324 | 327 | "cell_type": "code", |
325 | 328 | "execution_count": 13, |
|
372 | 375 | "Image.fromarray(crop)" |
373 | 376 | ] |
374 | 377 | }, |
| 378 | + { |
| 379 | + "cell_type": "markdown", |
| 380 | + "id": "distant-breast", |
| 381 | + "metadata": {}, |
| 382 | + "source": [ |
| 383 | + "* <b>Run Model</b>\n", |
| 384 | + "* This is where users can run model inferences.\n", |
| 385 | + "* For this example we are detecting water with some simple image processing" |
| 386 | + ] |
| 387 | + }, |
375 | 388 | { |
376 | 389 | "cell_type": "code", |
377 | 390 | "execution_count": 15, |
|
411 | 424 | "Image.fromarray(mask)" |
412 | 425 | ] |
413 | 426 | }, |
| 427 | + { |
| 428 | + "cell_type": "markdown", |
| 429 | + "id": "earned-correspondence", |
| 430 | + "metadata": {}, |
| 431 | + "source": [ |
| 432 | + "* <b>Convert Mask to WGS84 Polygons<b>" |
| 433 | + ] |
| 434 | + }, |
414 | 435 | { |
415 | 436 | "cell_type": "code", |
416 | 437 | "execution_count": 17, |
|
419 | 440 | "outputs": [], |
420 | 441 | "source": [ |
421 | 442 | "def extract_polygons(mask: np.ndarray) -> Polygon:\n", |
| 443 | + " # Turns a segementation mask into shapely polygons\n", |
422 | 444 | " contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)\n", |
423 | 445 | " polygons = []\n", |
424 | 446 | "\n", |
|
431 | 453 | " polygons = [Polygon(poly) for poly in polygons]\n", |
432 | 454 | " return [poly.simplify(3) for poly in polygons if poly.area > 50]\n", |
433 | 455 | "\n", |
434 | | - "def convert_to_epsg4326(poly : Polygon, intersection: Polygon) -> List[GeoPoint]:\n", |
| 456 | + "def convert_to_wgs84(poly : Polygon, intersection: Polygon) -> List[GeoPoint]:\n", |
| 457 | + " # Turns pixel polygons into wgs84 polygons\n", |
435 | 458 | " result = []\n", |
436 | 459 | " for coord in poly.__geo_interface__['coordinates'][0]:\n", |
437 | 460 | " x = intersection.bounds[1] + coord[0]\n", |
|
478 | 501 | "metadata": {}, |
479 | 502 | "outputs": [], |
480 | 503 | "source": [ |
481 | | - "geo_polygons = [convert_to_epsg4326(poly, intersection) for poly in pixel_polygons]" |
| 504 | + "geo_polygons = [convert_to_wgs84(poly, intersection) for poly in pixel_polygons]" |
482 | 505 | ] |
483 | 506 | }, |
484 | 507 | { |
|
508 | 531 | "geo_polygons" |
509 | 532 | ] |
510 | 533 | }, |
| 534 | + { |
| 535 | + "cell_type": "markdown", |
| 536 | + "id": "industrial-tuner", |
| 537 | + "metadata": {}, |
| 538 | + "source": [ |
| 539 | + "* <b>Create NDJson From Geo Polygons</b>" |
| 540 | + ] |
| 541 | + }, |
511 | 542 | { |
512 | 543 | "cell_type": "code", |
513 | 544 | "execution_count": 27, |
|
532 | 563 | " })" |
533 | 564 | ] |
534 | 565 | }, |
| 566 | + { |
| 567 | + "cell_type": "markdown", |
| 568 | + "id": "former-skiing", |
| 569 | + "metadata": {}, |
| 570 | + "source": [ |
| 571 | + "* <b>Upload to Labelbox</b>" |
| 572 | + ] |
| 573 | + }, |
535 | 574 | { |
536 | 575 | "cell_type": "code", |
537 | 576 | "execution_count": 28, |
|
0 commit comments