Skip to content

Commit 19878ce

Browse files
author
Andy Barrett
committed
removed dependencies on segtrax code
1 parent 1dd42e1 commit 19878ce

File tree

2 files changed

+80
-167
lines changed

2 files changed

+80
-167
lines changed

dataset-diagram.png

54.8 KB
Loading

xarray_examples.ipynb

Lines changed: 80 additions & 167 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,9 @@
2424
"import numpy as np\n",
2525
"import glob\n",
2626
"import sys\n",
27+
"import pyproj\n",
2728
"\n",
28-
"from scipy.interpolate import interp2d\n",
29-
"\n",
30-
"# Our modules\n",
31-
"sys.path.append('/home/jovyan/segtrax/source')\n",
32-
"from utilities import transform_coord\n"
29+
"import scipy.interpolate as scinterp\n"
3330
]
3431
},
3532
{
@@ -444,7 +441,7 @@
444441
{
445442
"data": {
446443
"text/plain": [
447-
"<matplotlib.collections.QuadMesh at 0x7f68644f0c88>"
444+
"<matplotlib.collections.QuadMesh at 0x7f6e74668b38>"
448445
]
449446
},
450447
"execution_count": 13,
@@ -483,6 +480,33 @@
483480
"cell_type": "code",
484481
"execution_count": 14,
485482
"metadata": {},
483+
"outputs": [],
484+
"source": [
485+
"def transform_coord(from_epsg, to_epsg, x, y):\n",
486+
" \"\"\"Transform coordinates from proj1 to proj2 (EPSG num).\n",
487+
" \n",
488+
" from_epsg - EPSG code for from_proj\n",
489+
" to_epsg - EPSG code for to_proj\n",
490+
" x - x-coordinate to convert\n",
491+
" y - y-coordinate to convert\n",
492+
" \n",
493+
" Useful EPSG:\n",
494+
" 4326 - WGS84\n",
495+
" 3408 - EASE North (https://spatialreference.org/ref/epsg/3408/)\n",
496+
" \"\"\"\n",
497+
" \n",
498+
" # Set full EPSG projection strings\n",
499+
" from_proj = pyproj.Proj(\"+init=EPSG:\"+str(from_epsg))\n",
500+
" to_proj = pyproj.Proj(\"+init=EPSG:\"+str(to_epsg))\n",
501+
" \n",
502+
" # Convert coordinates\n",
503+
" return pyproj.transform(from_proj, to_proj, x, y)"
504+
]
505+
},
506+
{
507+
"cell_type": "code",
508+
"execution_count": 15,
509+
"metadata": {},
486510
"outputs": [
487511
{
488512
"name": "stdout",
@@ -508,7 +532,7 @@
508532
},
509533
{
510534
"cell_type": "code",
511-
"execution_count": 15,
535+
"execution_count": 16,
512536
"metadata": {},
513537
"outputs": [
514538
{
@@ -542,7 +566,7 @@
542566
},
543567
{
544568
"cell_type": "code",
545-
"execution_count": 16,
569+
"execution_count": 17,
546570
"metadata": {},
547571
"outputs": [
548572
{
@@ -578,129 +602,50 @@
578602
},
579603
{
580604
"cell_type": "code",
581-
"execution_count": 27,
605+
"execution_count": 18,
582606
"metadata": {},
583607
"outputs": [
584608
{
585609
"name": "stdout",
586610
"output_type": "stream",
587611
"text": [
588-
"<xarray.DataArray 'u' (time: 1, i: 3)>\n",
589-
"dask.array<shape=(1, 3), dtype=float32, chunksize=(1, 3)>\n",
590-
"Coordinates:\n",
591-
" * time (time) object 2018-11-05 12:00:00\n",
592-
" x (i) float64 0.0 3.876e+04 5.792e+04\n",
593-
" y (i) float64 -5.558e+05 -4.43e+05 -3.285e+05\n",
594-
"Dimensions without coordinates: i\n",
595-
"Attributes:\n",
596-
" short_name: U\n",
597-
" long_name: sea_ice_x_velocity\n",
598-
" standard_name: sea_ice_x_velocity\n",
599-
" units: cm / s\n",
600-
" valid_min: -200.0\n",
601-
" valid_max: 200.0\n",
602-
" grid_mapping: crs\n",
603-
" comment: U is the along-x component of the ice motion. It is *not... <xarray.DataArray 'u' (time: 1, i: 3)>\n",
604-
"dask.array<shape=(1, 3), dtype=float32, chunksize=(1, 3)>\n",
605-
"Coordinates:\n",
606-
" * time (time) object 2018-11-05 12:00:00\n",
607-
" x (i) float64 0.0 3.876e+04 5.792e+04\n",
608-
" y (i) float64 -5.558e+05 -4.43e+05 -3.285e+05\n",
609-
"Dimensions without coordinates: i\n",
610-
"Attributes:\n",
611-
" short_name: U\n",
612-
" long_name: sea_ice_x_velocity\n",
613-
" standard_name: sea_ice_x_velocity\n",
614-
" units: cm / s\n",
615-
" valid_min: -200.0\n",
616-
" valid_max: 200.0\n",
617-
" grid_mapping: crs\n",
618-
" comment: U is the along-x component of the ice motion. It is *not...\n"
612+
"[[-1.94814917 -4.22065543 -4.93430168]] [[-6.13456718 -5.56727877 -4.18583824]]\n"
619613
]
620614
}
621615
],
622616
"source": [
623617
"ui = ds.sel(time='2018-11-05').u.interp(x=ind_x, y=ind_y)\n",
624-
"vi = ds.sel(time='2018-11-05').u.interp(x=ind_x, y=ind_y)\n",
625-
"print (ui, vi)"
618+
"vi = ds.sel(time='2018-11-05').v.interp(x=ind_x, y=ind_y)\n",
619+
"print (ui.values, vi.values)"
626620
]
627621
},
628622
{
629623
"cell_type": "markdown",
630624
"metadata": {},
631625
"source": [
632-
"Johan's approach"
626+
"### Johan's approach"
633627
]
634628
},
635629
{
636-
"cell_type": "code",
637-
"execution_count": 36,
630+
"cell_type": "markdown",
638631
"metadata": {},
639-
"outputs": [
640-
{
641-
"data": {
642-
"text/plain": [
643-
"<xarray.DataArray 'u' (y: 361, x: 361)>\n",
644-
"dask.array<shape=(361, 361), dtype=float32, chunksize=(361, 361)>\n",
645-
"Coordinates:\n",
646-
" * x (x) float64 -4.512e+06 -4.487e+06 ... 4.487e+06 4.512e+06\n",
647-
" * y (y) float64 -4.512e+06 -4.487e+06 ... 4.487e+06 4.512e+06\n",
648-
" time object 2018-11-05 12:00:00\n",
649-
"Attributes:\n",
650-
" short_name: U\n",
651-
" long_name: sea_ice_x_velocity\n",
652-
" standard_name: sea_ice_x_velocity\n",
653-
" units: cm / s\n",
654-
" valid_min: -200.0\n",
655-
" valid_max: 200.0\n",
656-
" grid_mapping: crs\n",
657-
" comment: U is the along-x component of the ice motion. It is *not..."
658-
]
659-
},
660-
"execution_count": 36,
661-
"metadata": {},
662-
"output_type": "execute_result"
663-
}
664-
],
665632
"source": [
666-
"data = ds.sel(time='2018-11-05').u.squeeze()\n",
667-
"data"
668-
]
669-
},
670-
{
671-
"cell_type": "code",
672-
"execution_count": 57,
673-
"metadata": {},
674-
"outputs": [
675-
{
676-
"data": {
677-
"text/plain": [
678-
"1"
679-
]
680-
},
681-
"execution_count": 57,
682-
"metadata": {},
683-
"output_type": "execute_result"
684-
}
685-
],
686-
"source": [
687-
"ds.x.values.ndim"
633+
"I've modified Johan's code to allow coordinate arrays to be 1D or 2D. \n",
634+
"\n",
635+
"I've also added some checks to make sure the input arrays are numpy arrays. This is what the `assert` statements do. I also check that the sizes of input coordinate and data arrays are compatable."
688636
]
689637
},
690638
{
691639
"cell_type": "code",
692-
"execution_count": 146,
640+
"execution_count": 19,
693641
"metadata": {},
694642
"outputs": [
695643
{
696-
"data": {
697-
"text/plain": [
698-
"array([-1.9481492, -4.2206554, -4.934302 ], dtype=float32)"
699-
]
700-
},
701-
"execution_count": 146,
702-
"metadata": {},
703-
"output_type": "execute_result"
644+
"name": "stdout",
645+
"output_type": "stream",
646+
"text": [
647+
"[-1.9481492 -4.2206554 -4.934302 ] [-6.1345673 -5.567279 -4.185838 ]\n"
648+
]
704649
}
705650
],
706651
"source": [
@@ -725,8 +670,14 @@
725670
" assert isinstance(xq, np.ndarray), 'xp must be a numpy array'\n",
726671
" assert isinstance(yq, np.ndarray), 'yp must be a numpy array'\n",
727672
"\n",
728-
" assert x.shape == y.shape == z.shape, \\\n",
729-
" \"input array must have same size and number of dimensions\"\n",
673+
" if (x.ndim == y.ndim == 2):\n",
674+
" assert x.shape == y.shape == z.shape, \\\n",
675+
" \"input array must have same size and number of dimensions\"\n",
676+
" elif (x.ndim == y.ndim == 1):\n",
677+
" assert (*x.shape, *y.shape) == z.shape, \\\n",
678+
" \"input coordinate dimensions must be same size as input data\"\n",
679+
" else:\n",
680+
" raise \n",
730681
" assert xq.shape == yq.shape, 'Output coordinate must have same shape'\n",
731682
" \n",
732683
" # If input coordinates are 2D, get single components\n",
@@ -763,84 +714,37 @@
763714
" return zq\n",
764715
"\n",
765716
"xx, yy = np.meshgrid(ds.x.values, ds.y.values)\n",
766-
"zz = ds.sel(time='2018-11-05').u.squeeze().values\n",
767-
"#zz = np.where(~np.isnan(zz), zz, -99.9)\n",
768-
"uj = interp2d(xx, yy, zz, np.array(x), np.array(y), order=1)\n",
769-
"uj\n"
770-
]
771-
},
772-
{
773-
"cell_type": "code",
774-
"execution_count": 138,
775-
"metadata": {},
776-
"outputs": [
777-
{
778-
"name": "stdout",
779-
"output_type": "stream",
780-
"text": [
781-
"[ 2 1 0 -1 -2]\n",
782-
"[[16 17 18 19]\n",
783-
" [12 13 14 15]\n",
784-
" [ 8 9 10 11]\n",
785-
" [ 4 5 6 7]\n",
786-
" [ 0 1 2 3]]\n",
787-
"[-2 -1 0 1 2]\n",
788-
"[[ 0 1 2 3]\n",
789-
" [ 4 5 6 7]\n",
790-
" [ 8 9 10 11]\n",
791-
" [12 13 14 15]\n",
792-
" [16 17 18 19]]\n"
793-
]
794-
}
795-
],
796-
"source": [
797-
"reverse = True\n",
798-
"x1 = np.arange(3)-1\n",
799-
"y1 = np.arange(5)-2\n",
800-
"\n",
801-
"x2, y2 = np.meshgrid(x1, y1)\n",
802-
"data = np.arange(4*5).reshape(5,4)\n",
803-
"\n",
804-
"if reverse:\n",
805-
" y1 = np.flip(y1)\n",
806-
" data = np.flipud(data)\n",
807-
" \n",
808-
"print (y1)\n",
809-
"print (data)\n",
810-
"\n",
811-
"if y1[-1] < y1[0]:\n",
812-
" y1 = np.flip(y1)\n",
813-
" data = np.flipud(data)\n",
814-
" \n",
815-
"print (y1)\n",
816-
"print (data)\n",
817-
"\n"
717+
"zu = ds.sel(time='2018-11-05').u.squeeze().values\n",
718+
"zv = ds.sel(time='2018-11-05').v.squeeze().values\n",
719+
"uj = interp2d(xx, yy, zu, np.array(x), np.array(y), order=1)\n",
720+
"vj = interp2d(xx, yy, zv, np.array(x), np.array(y), order=1)\n",
721+
"print (uj, vj)\n"
818722
]
819723
},
820724
{
821725
"cell_type": "markdown",
822726
"metadata": {},
823727
"source": [
824-
"My approach"
728+
"## My approach"
825729
]
826730
},
827731
{
828732
"cell_type": "code",
829-
"execution_count": 19,
733+
"execution_count": 20,
830734
"metadata": {},
831735
"outputs": [],
832736
"source": [
833737
"u = ds.sel(time='2018-11-05').u.values\n",
834738
"u[np.isnan(u)] = -99.9\n",
835739
"v = ds.sel(time='2018-11-05').v.values\n",
836740
"v[np.isnan(v)] = -99.9\n",
837-
"fu = interp2d(ds.x, ds.y, u, kind='linear')\n",
838-
"fv = interp2d(ds.x, ds.y, v, kind='linear')"
741+
"fu = scinterp.interp2d(ds.x, ds.y, u, kind='linear')\n",
742+
"fv = scinterp.interp2d(ds.x, ds.y, v, kind='linear')"
839743
]
840744
},
841745
{
842746
"cell_type": "code",
843-
"execution_count": 20,
747+
"execution_count": 21,
844748
"metadata": {},
845749
"outputs": [
846750
{
@@ -853,7 +757,7 @@
853757
}
854758
],
855759
"source": [
856-
"print ('u = ', np.diag(fu(x, y)))\n",
760+
"print ('u = ', np.diag(fu(x, y))) # This returns a 2D array so use np.diag to get the unique values\n",
857761
"print ('v = ', np.diag(fv(x, y)))"
858762
]
859763
},
@@ -866,7 +770,7 @@
866770
},
867771
{
868772
"cell_type": "code",
869-
"execution_count": null,
773+
"execution_count": 22,
870774
"metadata": {},
871775
"outputs": [],
872776
"source": [
@@ -877,8 +781,8 @@
877781
" um = np.where(~np.isnan(u), u, -99.9)\n",
878782
" vm = np.where(~np.isnan(v), v, -99.9)\n",
879783
" \n",
880-
" fu = interp2d(x, y, um, kind='linear')\n",
881-
" fv = interp2d(x, y, vm, kind='linear')\n",
784+
" fu = scinterp.interp2d(x, y, um, kind='linear')\n",
785+
" fv = scinterp.interp2d(x, y, vm, kind='linear')\n",
882786
" \n",
883787
" ui = np.array([fu(x_i, y_i) for x_i, y_i in zip(xi, yi)])\n",
884788
" vi = np.array([fv(x_i, y_i) for x_i, y_i in zip(xi, yi)])\n",
@@ -896,9 +800,18 @@
896800
},
897801
{
898802
"cell_type": "code",
899-
"execution_count": null,
803+
"execution_count": 23,
900804
"metadata": {},
901-
"outputs": [],
805+
"outputs": [
806+
{
807+
"name": "stdout",
808+
"output_type": "stream",
809+
"text": [
810+
"[-1.94814917 -4.22065543 -4.93430168]\n",
811+
"[-6.13456718 -5.56727877 -4.18583824]\n"
812+
]
813+
}
814+
],
902815
"source": [
903816
"ui, vi = interp_uv(ds.x, ds.y, \n",
904817
" ds.sel(time='2018-11-05').u.values, \n",

0 commit comments

Comments
 (0)