diff --git a/kaska.sublime-project b/kaska.sublime-project new file mode 100644 index 0000000..24db303 --- /dev/null +++ b/kaska.sublime-project @@ -0,0 +1,8 @@ +{ + "folders": + [ + { + "path": "." + } + ] +} diff --git a/kaska.sublime-workspace b/kaska.sublime-workspace new file mode 100644 index 0000000..bd40232 --- /dev/null +++ b/kaska.sublime-workspace @@ -0,0 +1,1239 @@ +{ + "auto_complete": + { + "selected_items": + [ + [ + "box", + "boxplot_sm_area" + ], + [ + "tick", + "tick_params" + ], + [ + "insit_all", + "insitu_all_years" + ], + [ + "spat", + "plot_spatial" + ], + [ + "unc", + "unc_19" + ], + [ + "time_con", + "time_contrainst" + ], + [ + "state", + "state_mask_3" + ], + [ + "sat", + "state_mask_3" + ], + [ + "ndwi", + "ndwi1_cos_maize" + ], + [ + "exept", + "except" + ], + [ + "vv", + "vv_version" + ], + [ + "set", + "set_xlim" + ], + [ + "v", + "vwc_sentinel_2" + ], + [ + "rmse_", + "rmse_prediction(predictions, targets)" + ], + [ + "rm", + "rmse_prediction(predictions, targets)" + ], + [ + "plot", + "plot_dir" + ], + [ + "sentinel", + "df_sentinel2" + ], + [ + "to", + "to_datetime" + ], + [ + "height", + "height_field" + ], + [ + "pyplo", + "pyplot as plt" + ], + [ + "op", + "opt_mod" + ], + [ + "canopy", + "canopy_list" + ], + [ + "_", + "_Field_buffer_30" + ], + [ + "_buff", + "_buffer_100" + ], + [ + "_buffer", + "_buffer_50" + ], + [ + "x", + "x_44" + ], + [ + "lookup", + "lookup_sm" + ], + [ + "se", + "sentinel_vv" + ], + [ + "df_vali", + "df_vali_statistic" + ], + [ + "df_va", + "df_vali_statistic" + ], + [ + "la", + "lai_mean" + ], + [ + "bbch", + "bbch_508" + ], + [ + "index", + "indexing3\tint ⟠" + ], + [ + "b", + "bulk" + ], + [ + "surface", + "surface_list" + ], + [ + "markercolor", + "markerColor" + ], + [ + "marker", + "markerSize" + ], + [ + "pat", + "patch5\tunknown ⟠" + ], + [ + "path", + "patch4\tunknown ⟠" + ], + [ + "patch", + "patch2\tunknown ⟠" + ], + [ + "sea", + "seaborn as sns\tmodule ⟠" + ], + [ + "vwc", + "vwcpro_field" + ], + [ + "Water", + "WaterCloud" + ], + [ + "vv_model", + "vv_model_used" + ], + [ + "vv_s1", + "vv_s1_used" + ], + [ + "Data", + "DataFrame" + ], + [ + "rmse", + "rmse_prediction" + ], + [ + "read", + "read_csv" + ], + [ + "df_", + "df_auswertung" + ], + [ + "pol", + "pol_value" + ], + [ + "field", + "field_data" + ], + [ + "y", + "set_ylim" + ], + [ + "vv_po", + "vv_point_old" + ], + [ + "poin", + "df_point_17" + ], + [ + "vv_poin", + "vv_point_11" + ], + [ + "vv_p", + "vv_point_old" + ] + ] + }, + "buffers": + [ + { + "contents": "\n REPO: /media/tweiss/Work/GIT/GitHub/UCL/kaska\n [a]ll: true\n\n● c826075 (HEAD -> sar_quick, McWhity/sar_quick) move to UniRo ​ 5 minutes ago, Thomas Weiß\n● ce17c5f version v3 ​ Dec 8 2021, Thomas Weiß\n● 3477616 maybe good solution ​ Nov 25 2021, Thomas Weiß\n● dd7630b start again ​ Nov 18 2021, Thomas Weiß\n● 5f2353a (McWhity/sar_quick_lmu, sar_quick_lmu) change of hard coded file names,.. ​ Dec 2 2019, Thomas Weiß\n● 01776ba (origin/sar_quick) Update watercloudmodel.py ​ Nov 22 2019, MarcYin\n● 191358b SAR optical coupling ​ Nov 22 2019, MarcYin\n● 98d9d5a updated entry to take prior ​ Nov 21 2019, José Gómez-Dans\n● bea78a7 Added missing retrieval bit ​ Nov 21 2019, José Gómez-Dans\n● 67e65f6 Quickly updated SAR files ​ Nov 21 2019, José Gómez-Dans\n| ● 2dc1165 (McWhity/lintfix2) Fixed pylint complaint about logging. ​ Dec 2 2019, Matthew Gillman\n| ● c84128d Added basic docstrings and remnoved unused variables from kaska_sar.py ​ Dec 2 2019, Matthew Gillman\n| ● 6348129 Fixed constant names in __main__ function of kaska_sar.py ​ Dec 2 2019, Matthew Gillman\n| ● 4737298 Fixed pylint import errors in kaska_sar.py ​ Dec 2 2019, Matthew Gillman\n| ● 0eec714 Fixed indentation erros in kaska_sar.py ​ Dec 2 2019, Matthew Gillman\n| ● 533f047 Changed variable names to keep pylint happy. ​ Dec 2 2019, Matthew Gillman\n| ● 6cc2044 Fixed _txtbar() function for pylint. ​ Dec 2 2019, Matthew Gillman\n| ● 597609e Removed superfluous imports and renamed some variables. ​ Dec 2 2019, Matthew Gillman\n| ● 892054e Fixed the remaining pylint errors that I can do at this stage. ​ Dec 2 2019, Matthew Gillman\n| ● 87745fa Fix pylint argument passing errors. ​ Dec 2 2019, Matthew Gillman\n| ● 7972daf Change constants in __main__ to UPPER_CASE naming style ​ Dec 2 2019, Matthew Gillman\n| ● d52cb48 Fix indentation errors in s2_observations.py ​ Dec 2 2019, Matthew Gillman\n| ● ef9040d Changed variable names etc to satisfy pylint. ​ Dec 2 2019, Matthew Gillman\n| ● d70a40f Fixed some variable names. ​ Dec 2 2019, Matthew Gillman\n| ● f5067a3 Remove extraneous brackets and fixed logger strings. ​ Dec 2 2019, Matthew Gillman\n| ● aeff1fa Fixed indentation errors. ​ Dec 2 2019, Matthew Gillman\n| ● b773b21 Changing variable names to snake_case naming style in watercloudmodel.. ​ Dec 2 2019, Matthew Gillman\n| ● 6953d57 Fixed some unused variable etc pylint errors. ​ Dec 2 2019, Matthew Gillman\n| ● e1121a2 Fixed pylint indentation errors in watercloudmodel.py ​ Dec 2 2019, Matthew Gillman\n| ● 82a4971 Fix weird pytest seg fault ​ Dec 2 2019, Matthew Gillman\n| ● 698813f Fixed some more pylint complaints in utils.py ​ Dec 2 2019, Matthew Gillman\n| ● f19062e Fixed some pylint complaints in utils.py ​ Nov 28 2019, Matthew Gillman\n| ● 3e28bc2 Fixed some pylint errors in inference_runner.py ​ Nov 28 2019, Matthew Gillman\n| ● 7ea8e6b Fix pylint complaints about logger.py ​ Nov 28 2019, Matthew Gillman\n| ● 8599f8d Fixing pylint complaints for entry.py ​ Nov 28 2019, Matthew Gillman\n| ● cab32cc (McWhity/master) Merge pull request #103 from UCL/config ​ Nov 27 2019, mattgillucl\n| ● 29b9cca Merge branch 'master' into config ​ Nov 27 2019, mattgillucl\n| |\\ \n| | ● c15c702 Merge pull request #102 from UCL/lintfix ​ Nov 26 2019, mattgillucl\n| | ● 22743a9 Fix typo ​ Nov 26 2019, Matthew Gillman\n| | ● 77a4139 Fix typos inadvertently introduced when fixing lint errors. ​ Nov 26 2019, Matthew Gillman\n| | ● f2af683 Fixing some typos etc in test code. ​ Nov 26 2019, Matthew Gillman\n| | ● dac6f6a Incorporating Tim's smoothn3 changes into lintfix. ​ Nov 26 2019, Matthew Gillman\n| | ● df4c285 Revert 9cae73d ​ Nov 26 2019, Matthew Gillman\n| | ● c2dce5b Fixing more pycodestyle errors in smoothn.py ​ Nov 26 2019, Matthew Gillman\n| | ● 1a9909a Fixed most remaining pycodestyle errors in watercloudmodel.py ​ Nov 26 2019, Matthew Gillman\n| | ● d3cc47c Fixed most of the remaining pycodestyle errors. ​ Nov 26 2019, Matthew Gillman\n| | ● e7ff9e2 Fixed remaining pycodestyle errors in /tests ​ Nov 26 2019, Matthew Gillman\n| | ● f31ca18 Fixed pycodestyle errors in test_smoothn.py ​ Nov 26 2019, Matthew Gillman\n| | ● 0b44fed Fixed most pycodestyle errors in test_reproject.py ​ Nov 26 2019, Matthew Gillman\n| | ● 139faac Fixed most pycodestyle errors in test_nn_library.py ​ Nov 26 2019, Matthew Gillman\n| | ● 772b5df Fixed utils.py syntax error ​ Nov 26 2019, Matthew Gillman\n| | ● 021cbf7 Fixed pycodestyle errors in test_interp.py ​ Nov 26 2019, Matthew Gillman\n| | ● 62d8bc8 Fixed pycodestyle errors in test_wcm.py ​ Nov 26 2019, Matthew Gillman\n| | ● c5647fc Fixed pycodestyle errors in /inverters ​ Nov 26 2019, Matthew Gillman\n| | ● 99d9996 Fixed most pycodestyle errors in two_nn.py ​ Nov 26 2019, Matthew Gillman\n| | ● 6afc976 Fixed pycodestyle errors in NNParameterInversion.py ​ Nov 26 2019, Matthew Gillman\n| | ● de89d48 Fixed most of the pycodestyle errors in watercloudmodel.py ​ Nov 26 2019, Matthew Gillman\n| | ● 79271f8 Fixed some pycodestyle errors in utils.py and smoothn.py ​ Nov 26 2019, Matthew Gillman\n| | ● 8f48aff Fixed more of the pycodestyle errors in smoothn.py ​ Nov 25 2019, Matthew Gillman\n| | ● 9cae73d Fixed pycodestyle indentation complaints for smoothn.py ​ Nov 25 2019, Matthew Gillman\n| | ● f0febbc Fixed pycodestyle errors in s2_observations.py ​ Nov 25 2019, Matthew Gillman\n| | ● e686518 Fixed pycodestyle errors in s1_observations.py ​ Nov 25 2019, Matthew Gillman\n| | ● 9c31bd2 (origin/lintfix) Fixed pycodestyle E113 errors in kaska_sar.py ​ Nov 25 2019, Matthew Gillman\n| | ● 2212d73 Fixed pycodestyle errors in parmap.py. ​ Nov 25 2019, Matthew Gillman\n| | ● b92a9ff Fixed pycodestyle errors in logger.py ​ Nov 25 2019, Matthew Gillman\n| | ● 4dcde97 Fixed pycodestyle errors (except E113) in kaska_sar.py ​ Nov 25 2019, Matthew Gillman\n| | ● 205ec2c Fixed pycodestyle errors in kaska_cost.py ​ Nov 25 2019, Matthew Gillman\n| | ● 8bb3fae Fixed some of the pylint errors for kaska.py ​ Nov 25 2019, Matthew Gillman\n| | ● 58eb932 Fixed pycodestyle errors in kaska.py ​ Nov 25 2019, Matthew Gillman\n| | ● a8a2b94 Fixed pycodestyle errors in interp_fix.py ​ Nov 25 2019, Matthew Gillman\n| | ● 33c9c2a Fixed pycodestyle errors in inference_runner.py ​ Nov 25 2019, Matthew Gillman\n| | ● dfce48d Fixed pycodestyle errors in inference_runner.py ​ Nov 25 2019, Matthew Gillman\n| | ● 10c1b8a Fixed some (not all) pylint complaints for setup.py ​ Nov 25 2019, Matthew Gillman\n| | | ● 21daa87 (McWhity/config) Hopefully really fixing it this time ​ Nov 27 2019, Matthew Gillman\n| | |/ \n| |/| \n| ● | b05dd16 Fixing an args error in kaska.py ​ Nov 27 2019, Matthew Gillman\n| | | ● 5b17e07 (McWhity/smoothn3) Some more linting ​ Nov 26 2019, Tim Spain\n| | | ● 57b5b1e Comment typo ​ Nov 26 2019, Tim Spain\n| | | ● 7de2819 Remove the unused peaks() function ​ Nov 26 2019, Tim Spain\n| | | ● 4e878d9 Get rid of the unknown error() call, replaced by ValueError. ​ Nov 26 2019, Tim Spain\n| | | ● 879a2be Take selected formatting changes from Black. ​ Nov 26 2019, Tim Spain\n| | | ● e665bad Pylinting ​ Nov 26 2019, Tim Spain\n| | | ● c0af82c DRY error message ​ Nov 25 2019, Tim Spain\n| | | ● 67b5516 Remove the redundant H() function. ​ Nov 25 2019, Tim Spain\n| | | ● 00c85e5 (origin/smoothn3) Not needing sys.path.append, we don't need to i.. ​ Nov 25 2019, Tim Spain\n| | | ● e76882b Merge pull request #97 from UCL/smoothn3_parametrizes ​ Nov 25 2019, Tim Spain\n| | | |\\ \n| | | | ● a728312 tests functions can't have default values - or so it seems! ​ Nov 22 2019, David Perez-Suarez\n| | | | ● e821350 :pencil2: fixing parametrize typo name and argument of function ​ Nov 22 2019, David Perez-Suarez\n| | | | ● 79b9012 :pencil2: missing commas ​ Nov 22 2019, David Perez-Suarez\n| | | | ● 31c8e84 (origin/testsmoothn_more, McWhity/testsmoothn_more) parametrize.. ​ Nov 22 2019, David Perez-Suarez\n| | | | ● 4573228 parametrize general robustness ​ Nov 22 2019, David Perez-Suarez\n| | | ● | 2816be7 Make mostly PEP8 compliant ​ Nov 22 2019, Tim Spain\n| | | ● | abbb5a5 Whitespace ​ Nov 22 2019, Tim Spain\n| | | ● | 81cee19 dct and idct are imported in the preamble ​ Nov 22 2019, Tim Spain\n| | | |/ \n| | | ● 847373f How much more does Travis need the test constraints loosening? ​ Nov 22 2019, Tim Spain\n| | | ● 73f525c Merge branch 'smoothn3' of https://github.com/UCL/kaska into sm.. ​ Nov 22 2019, Tim Spain\n| | | |\\ \n| | | | ● 64a4fe9 xrange() -> range() in python 3 ​ Nov 22 2019, Tim Spain\n| | | | ● dd4211e Make numpy members explicit. ​ Nov 22 2019, Tim Spain\n| | | | ● 430bf70 Remove redundant imports. ​ Nov 22 2019, Tim Spain\n| | | | ● 7e174f5 Revert \"Remove redundant imports\" ​ Nov 22 2019, Tim Spain\n| | | | ● 035e140 Remove redundant imports ​ Nov 22 2019, Tim Spain\n| | | | ● 3679807 Indentation ​ Nov 22 2019, Tim Spain\n| | | | ● 240308e Factor out the initialization of xpost ​ Jul 26 2019, Tim Spain\n| | | | ● 2530003 Factor out the initialization of z ​ Jul 26 2019, Tim Spain\n| | | | ● 804ffc8 Factor out calculation of the smoothness bounds ​ Jul 26 2019, Tim Spain\n| | | | ● 8ef894d Break out the definition of the lambda tensor ​ Jul 26 2019, Tim Spain\n| | | | ● 1ad3bb8 Remove 'MATLAB' from the error messages ​ Jul 26 2019, Tim Spain\n| | | | ● 2dce68d Make things more PEP 8 compliant ​ Jul 26 2019, Tim Spain\n| | | | ● 01a87c9 Tidy up exit cases, remove magic numbers ​ Jul 26 2019, Tim Spain\n| | | | ● 5e4a20c Remove semicolons; ​ Jul 25 2019, Tim Spain\n| | | | ● fafbb15 Preprocessing of data moved to a function ​ Jul 25 2019, Tim Spain\n| | | | ● 8c1f460 Refactor weight prep into functions ​ Jul 25 2019, Tim Spain\n| | | | ● a6f9bb2 Comment spelling ​ Jul 11 2019, Tim Spain\n| | | | ● 54648bc Factor out the conversion of standard deviations to weights ​ Jul 11 2019, Tim Spain\n| | | | ● d3096fc Really loosen constraints for the tests to pass in Travis. ​ Nov 21 2019, Tim Spain\n| | | | ● 35fd810 Revert \"Loosen constraints for Travis\" ​ Nov 21 2019, Tim Spain\n| | | | ● 92c717f Loosen constraints for Travis ​ Nov 21 2019, Tim Spain\n| | | | ● 84abe40 Revert \"Loosen constraints for Travis\" ​ Nov 21 2019, Tim Spain\n| | | | ● b9217b8 Loosen constraints for Travis ​ Nov 21 2019, Tim Spain\n| | | | ● bab68c0 Precalculate the values to be compared ​ Nov 21 2019, Tim Spain\n| | | | ● 03b7a37 Revert \"Loosen test criteria until Travis passes\" ​ Nov 21 2019, Tim Spain\n| | | | ● 724ed23 Loosen test criteria until Travis passes ​ Nov 21 2019, Tim Spain\n| | | | ● 2f08fab Perform the addition before the assert ​ Nov 21 2019, Tim Spain\n| | | | ● eb86b7f Add tests for masked data arrays, and the cauchy and talworth w.. ​ Nov 21 2019, Tim Spain\n| | | | ● fcb68e7 Test for sd versus weights. Copied from smoothn branch. ​ Nov 21 2019, Tim Spain\n| | | | ● 534d65d Explain magic numbers in the test ​ Nov 20 2019, Tim Spain\n| | | | ● b237c2b Placating PEP8 speaks ​ Nov 19 2019, Tim Spain\n| | | | ● ba1f968 Test smoothn with fixed smoothing order ​ Nov 19 2019, Tim Spain\n| | | ● | 0d464f7 xrange() -> range() in python 3 ​ Nov 22 2019, Tim Spain\n| | | ● | d321969 Make numpy members explicit. ​ Nov 22 2019, Tim Spain\n| | | ● | d33fe33 Remove redundant imports. ​ Nov 22 2019, Tim Spain\n| | | ● | e3644de Revert \"Remove redundant imports\" ​ Nov 22 2019, Tim Spain\n| | | ● | 8417f10 Remove redundant imports ​ Nov 22 2019, Tim Spain\n| | | ● | f9fc58a Indentation ​ Nov 22 2019, Tim Spain\n| | | ● | e572e1d Factor out the initialization of xpost ​ Jul 26 2019, Tim Spain\n| | | ● | 60d0ad0 Factor out the initialization of z ​ Jul 26 2019, Tim Spain\n| | | ● | 91d37b3 Factor out calculation of the smoothness bounds ​ Jul 26 2019, Tim Spain\n| | | ● | 11a6f2a Break out the definition of the lambda tensor ​ Jul 26 2019, Tim Spain\n| | | ● | 443a923 Remove 'MATLAB' from the error messages ​ Jul 26 2019, Tim Spain\n| | | ● | f3a3a47 Make things more PEP 8 compliant ​ Jul 26 2019, Tim Spain\n| | | ● | 50a8429 Tidy up exit cases, remove magic numbers ​ Jul 26 2019, Tim Spain\n| | | ● | 2e70ed6 Remove semicolons; ​ Jul 25 2019, Tim Spain\n| | | ● | 55a3188 Preprocessing of data moved to a function ​ Jul 25 2019, Tim Spain\n| | | ● | 1a74dc7 Refactor weight prep into functions ​ Jul 25 2019, Tim Spain\n| | | ● | 9dd8487 Comment spelling ​ Jul 11 2019, Tim Spain\n| | | ● | e4a7c1c Factor out the conversion of standard deviations to weights ​ Jul 11 2019, Tim Spain\n| | | ● | 8ac1afb Really loosen constraints for the tests to pass in Travis. ​ Nov 21 2019, Tim Spain\n| | | ● | 59efc61 Revert \"Loosen constraints for Travis\" ​ Nov 21 2019, Tim Spain\n| | | ● | 02dd29b Loosen constraints for Travis ​ Nov 21 2019, Tim Spain\n| | | ● | 7098796 Revert \"Loosen constraints for Travis\" ​ Nov 21 2019, Tim Spain\n| | | ● | 9e5b5ed Loosen constraints for Travis ​ Nov 21 2019, Tim Spain\n| | | ● | 0f4ec37 Precalculate the values to be compared ​ Nov 21 2019, Tim Spain\n| | | ● | f22be22 Revert \"Loosen test criteria until Travis passes\" ​ Nov 21 2019, Tim Spain\n| | | ● | 71153b9 Loosen test criteria until Travis passes ​ Nov 21 2019, Tim Spain\n| | | ● | 6cf7534 Perform the addition before the assert ​ Nov 21 2019, Tim Spain\n| | | ● | 38349f6 Add tests for masked data arrays, and the cauchy and talworth w.. ​ Nov 21 2019, Tim Spain\n| | | ● | 90562e2 Test for sd versus weights. Copied from smoothn branch. ​ Nov 21 2019, Tim Spain\n| | | ● | cf88a81 Explain magic numbers in the test ​ Nov 20 2019, Tim Spain\n| | | ● | f64a79f Placating PEP8 speaks ​ Nov 19 2019, Tim Spain\n| | | ● | 1427503 Test smoothn with fixed smoothing order ​ Nov 19 2019, Tim Spain\n| | | | | ● 1fc89e9 (origin/rtd, McWhity/rtd) Adds numpydoc extension and depende.. ​ Nov 22 2019, David Perez-Suarez\n| | | | | ● 7f2f03e Adds readthedocs config and fix setuptools installation ​ Nov 22 2019, David Perez-Suarez\n| | | |_|/ \n| | |/| | \n| | ● | | 4e43706 (origin/master, master) Merge pull request #74 from UCL/s2s1combo ​ Nov 22 2019, David Pérez-Suárez\n| | ● | | 2ed1584 :pencil2: typo on path ​ Nov 22 2019, David Perez-Suarez\n| | ● | | c49f032 Merge branch 'master' into s2s1combo ​ Nov 22 2019, David Pérez-Suárez\n| | |\\| | \n| | | ● | e12cd39 Merge pull request #83 from UCL/installation_instructions ​ Nov 22 2019, David Pérez-Suárez\n| | | |\\ \\ \n| | | | ● | 7ccd799 Adds gdal dependency to the installation instructions ​ Nov 19 2019, David Perez-Suarez\n| | | | ● | 5decc1e Pins gdal version for the creation of environments ​ Nov 19 2019, David Perez-Suarez\n| | | | ● | 7d6cf94 Adds uptodate installation to documentation ​ Nov 4 2019, David Perez-Suarez\n| | | | ● | 32a5623 Changes tox and Travis files to keep all within tox config ​ Nov 2 2019, David Perez-Suarez\n| | | | ● | 5a46be3 adds warning message if gdal is no available ​ Nov 1 2019, David Perez-Suarez\n| | | | ● | 576174e Sorts metadata and set dependencies based on use cases ​ Nov 1 2019, David Perez-Suarez\n| | | | ● | dd20880 Updates metadata and dependencies of package ​ Nov 1 2019, David Perez-Suarez\n| | | | |/ \n| | | ● | 3143718 Merge pull request #93 from UCL/config ​ Nov 21 2019, mattgillucl\n| | | |\\ \\ \n| | |_|/ / \n| |/| | / \n| | | |/ \n| ● | | b3535ec Removing PEP8 complaints ​ Nov 21 2019, Matthew Gillman\n| ● | | d9bfd8e Added doc strings ​ Nov 21 2019, Matthew Gillman\n| ● | | 9e7c425 Fixed handling of start and stop date args ​ Nov 21 2019, Matthew Gillman\n| ● | | 9b0f307 Fixed faulty handling of --disable_debug_log option. ​ Nov 21 2019, Matthew Gillman\n| ● | | d3a5f0a We have now removed the option of using command-line args. ​ Nov 20 2019, Matthew Gillman\n| ● | | e368dc9 Dealing with special case (optional) of block_size. ​ Nov 20 2019, Matthew Gillman\n| ● | | bddce79 Adding checks for more params in config file. ​ Nov 19 2019, Matthew Gillman\n| ● | | 7333ba9 Using config dictionary as non-global ​ Nov 19 2019, Matthew Gillman\n| ● | | 785e9f3 Started adding robust checking for keys being present ​ Nov 19 2019, Matthew Gillman\n| ● | | bb70a82 Initial version with config file ​ Nov 19 2019, Matthew Gillman\n| | | ● 639606e (origin/smoothnorder, McWhity/smoothnorder) Merge pull request #9.. ​ Nov 18 2019, Tim Spain\n| | | ● d2d66dc Add a maximum version of GDAL ​ Nov 15 2019, Tim Spain\n| | |/ \n| |/| \n| | ● 86e80f7 slashes using pathlib ​ Nov 22 2019, David Pérez-Suárez\n| |/ \n|/| \n● | 9d5d159 Comment on the assignment of bands as a list. ​ Nov 20 2019, Tim Spain\n● | 952d651 No, that slash needs to be in the quotes ​ Nov 19 2019, Tim Spain\n● | 87eaf4a Changes based on David's comments ​ Nov 19 2019, Tim Spain\n● | cc3a4a0 Add a maximum version of GDAL ​ Nov 15 2019, Tim Spain\n● | 4be8075 Dictionary translation between the two sets of names ​ Nov 15 2019, Tim Spain\n● | 7c92bf3 Merge pull request #86 from UCL/massage_interp_fix ​ Nov 14 2019, Tim Spain\n|\\ \\ \n| ● | a42051e Use boolean functions, rather than casting and summing. ​ Nov 14 2019, Tim Spain\n| ● | 8f1e58a Use np.full() to fill the array ​ Nov 14 2019, Tim Spain\n| ● | 4c58ee6 Revert \"Avoid multiplying by NaN and automatically extract the y ar.. ​ Nov 14 2019, Tim Spain\n| ● | 25d94c6 Avoid multiplying by NaN and automatically extract the y array. ​ Nov 14 2019, Tim Spain\n| |/ \n● | 4a714d8 Replace the extract_parameters function with a list comprehension. ​ Nov 14 2019, Tim Spain\n● | 2b170fe Rationalize if statements and format strings. ​ Nov 14 2019, Tim Spain\n● | 9ff8fad Get the array dimensions from the projection or state mask. ​ Nov 13 2019, Tim Spain\n● | 3e189d0 Revert \"Add ROI width and height to the output definition\" ​ Nov 13 2019, Tim Spain\n● | 68a76af Add ROI width and height to the output definition ​ Nov 13 2019, Tim Spain\n● | 6b1ab19 Handle the S1 time grid correctly ​ Nov 13 2019, Tim Spain\n● | fa7bd4d The end of the time series can be valid, too ​ Nov 13 2019, Tim Spain\n● | 6eaaa7b A slightly more informative message for the assert ​ Nov 13 2019, Tim Spain\n● | c52cb04 Named tuples as ad-hoc objects for the S2 results ​ Nov 8 2019, Tim Spain\n● | e9bcbb1 Function name typo ​ Nov 8 2019, Tim Spain\n● | cdeeb96 Change the output of the S2 retrieval ​ Nov 8 2019, Tim Spain\n● | fcfdf9e Match the form of the imports to that used by every other module ​ Nov 8 2019, Tim Spain\n● | ff711b2 Move the assert out of the loops. ​ Nov 6 2019, Tim Spain\n● | a31aa6d Swap the order of the if statement clauses to be more logical ​ Nov 6 2019, Tim Spain\n● | 28bef15 Rename tests and remove redundant assignment ​ Nov 6 2019, Tim Spain\n● | d64a083 Refactor the output path creation ​ Nov 6 2019, Tim Spain\n● | 7eee297 (multiply-org/s2s1combo) Add the S1 inversion and output the parameters ​ Oct 30 2019, Tim Spain\n● | 4a75870 Merge branch 's2s1combo' of https://github.com/UCL/kaska into s2s1c.. ​ Oct 29 2019, Tim Spain\n|\\ \\ \n| ● | 4d123cc Make PEP8 happy ​ Oct 28 2019, Tim Spain\n| ● | bc405c1 Get Sentinel 1 observations into the process_tile config ​ Oct 28 2019, Tim Spain\n● | | fc7e48e Make PEP8 happy ​ Oct 28 2019, Tim Spain\n● | | aa77947 Get Sentinel 1 observations into the process_tile config ​ Oct 28 2019, Tim Spain\n| |/ \n|/| \n● | a76261c (tag: v0.9, multiply-org/master) Merge pull request #81 from UCL/te.. ​ Oct 28 2019, José Gómez-Dans\n|\\ \\ \n| ● | 63f0619 Moves interp_fix tests to tests folder ​ Oct 28 2019, José Gómez-Dans\n● | | 5b3bb31 Merge pull request #79 from UCL/issue78 ​ Oct 28 2019, José Gómez-Dans\n|/ / \n● | c4193af Added missing docstring ​ Oct 28 2019, José Gómez-Dans\n● | 5b9932e Process single chunks ​ Oct 28 2019, José Gómez-Dans\n| | ● f59fb7a (origin/remove-mains, McWhity/remove-mains) More review comments, a.. ​ Nov 22 2019, Ilektra Christidi\n| | ● ffb9283 Fix a bunch of code review comments. ​ Nov 12 2019, Ilektra Christidi\n| | ● d1f0614 Simplify file existence checks per Jose's requirements. Add tests f.. ​ Nov 11 2019, Ilektra Christidi\n| | ● 946b364 Uncomment if condition commented out for debugging. Fix PEP8 whinni.. ​ Nov 8 2019, Ilektra Christidi\n| | ● 55f2137 Check for file existence in read_granule ​ Nov 8 2019, Ilektra Christidi\n| | ● 04f6f3f Simplify folder navigation in read_granule. ​ Nov 6 2019, Ilektra Christidi\n| | ● 5128a5d Simplify self.date_data. ​ Nov 4 2019, Ilektra Christidi\n| | ● 5b91d36 Pack all date, folder and file paths in self.date_data dictionary. ​ Nov 4 2019, Ilektra Christidi\n| | ● 81c75cd (multiply-org/remove-mains) use filenames from the xml, instead of .. ​ Oct 28 2019, Ilektra Christidi\n| | ● 6553ddf Satisfy PEP8. Remove __main__ with hardcoded paths. ​ Oct 28 2019, Ilektra Christidi\n| | ● aa1e2de Add tests and reference data for read_granule. Remove a bunch of te.. ​ Oct 28 2019, Ilektra Christidi\n| | ● 78b22f5 Add angle test data, read_granule call in test now runs. ​ Oct 25 2019, Ilektra Christidi\n| | ● 3123a5a Add more test data files and adjust tests. ​ Oct 25 2019, Ilektra Christidi\n| | ● 0d53943 More beautifying and documentation. ​ Oct 24 2019, Ilektra Christidi\n| | ● 8f4c5ce Add Jose's reading of dates and files from xml, and relevant test d.. ​ Oct 24 2019, Ilektra Christidi\n| | ● 3d2f116 Fix pep8 whinning. ​ Oct 24 2019, Ilektra Christidi\n| | ● ff4cb75 First s2_observations unit test and relevant test data. ​ Oct 23 2019, Ilektra Christidi\n| | ● ca55e98 Re-arrange and document part of s2_observations. ​ Oct 23 2019, Ilektra Christidi\n| |/ \n| | ● 845f4f1 (origin/kaska_sar_class, multiply-org/kaska_sar_class, McWhity/kaska_sar_class) .. ​ Oct 29 2019, José Gómez-Dans\n| | ● 0764afb Expose S1 object to main module ​ Oct 29 2019, José Gómez-Dans\n| | ● 3282361 Added main inversion loop ​ Oct 29 2019, José Gómez-Dans\n| | ● f74b304 Updated master ​ Oct 28 2019, José Gómez-Dans\n| | |\\ \n| |_|/ \n|/| | \n● | | 641542a Merge pull request #77 from UCL/master20191028 ​ Oct 28 2019, Tim Spain\n● | | 0a24fce Fix one final conflict ​ Oct 28 2019, Tim Spain\n● | | aa40767 Merge branch 'temporal' into master20191028 ​ Oct 28 2019, Tim Spain\n|\\ \\ \\ \n| |/ / \n|/| | \n| | ● d254fdb Moving kaska_sar to its own class 1/1 ​ Oct 28 2019, José Gómez-Dans\n| |/ \n|/| \n| | ● 8dd02d0 (origin/temporal, multiply-org/temporal, McWhity/temporal) Added te.. ​ Oct 28 2019, José Gómez-Dans\n| | ● 6435370 Updated from master ​ Oct 28 2019, José Gómez-Dans\n| | ● ebbb76b Update from master ​ Oct 28 2019, José Gómez-Dans\n| |/ \n| ● f094588 Changed ordering of interpolation/smoothing ​ Oct 1 2019, José Gómez-Dans\n| ● 5fcc351 Updated cloud threshold to 10 ​ Oct 1 2019, José Gómez-Dans\n| ● a9e3af1 Changed cloud threshold removed trace ​ Sep 30 2019, José Gómez-Dans\n| ● 3b066fa Mask issue fix ​ Sep 30 2019, José Gómez-Dans\n| ● 0ffd3e7 blocksize and clean up of temporary files ​ Sep 30 2019, José Gómez-Dans\n| ● b21f1e3 Expose dask and blocksize ​ Sep 30 2019, José Gómez-Dans\n| ● 9697e13 Minor cosmetic changes ​ Sep 30 2019, José Gómez-Dans\n| ● 8e22d72 Cleaned up a bit esp. cloud mask ​ Sep 30 2019, José Gómez-Dans\n| ● d3cd175 Changed smoothness ​ Sep 27 2019, José Gómez-Dans\n| ● 8b5a45d Ported issue 57 ​ Sep 24 2019, José Gómez-Dans\n| ● e2d7120 Pathlib typo ​ Sep 24 2019, Jose Gomez-Dans\n| ● 0758150 Folder for saving fixed images, otherwise complications in stitch ​ Sep 18 2019, Jose Gomez-Dans\n| ● 6aaf614 Fixed small typo ​ Sep 13 2019, Jose Gomez-Dans\n| ● 7445fb0 Fixes interpolation of nans ​ Sep 12 2019, José Gómez-Dans\n| ● e934c99 fixed conflict ​ Sep 12 2019, José Gómez-Dans\n| |\\ \n| | ● 17f541b Merge pull request #56 from UCL/issue51 ​ Sep 12 2019, José Gómez-Dans\n| | |\\ \n| ● | | ada4cf1 Not using uncertainty, just fix it, saves reading data in ​ Sep 12 2019, José Gómez-Dans\n| ● | | f18e08c This is issue51 ​ Sep 12 2019, José Gómez-Dans\n| ● | | 00324b0 Updating from issue51 ​ Sep 12 2019, José Gómez-Dans\n| ● | | d2f45fc Get the right dates to save single observations ​ Sep 12 2019, José Gómez-Dans\n| |/ / \n| ● | 1fb9902 Fixes interpolation of 0s ​ Sep 12 2019, José Gómez-Dans\n| ● | 1458f67 dask fix ​ Sep 12 2019, José Gómez-Dans\n| ● | 1fa1caa Fixes to save temporary inversion ​ Sep 12 2019, José Gómez-Dans\n| | | ● b1750a1 (origin/yapf, multiply-org/yapf, McWhity/yapf) Add the current li.. ​ Oct 24 2019, Tim Spain\n| | | ● eae7e9e Make changes flagged by smoothn ​ Oct 15 2019, Tim Spain\n| | | ● 20867c2 Revert \"Make the changes flagged by PEP8Speaks.\" ​ Oct 15 2019, Tim Spain\n| | | ● b4c9afd Make the changes flagged by PEP8Speaks. ​ Oct 15 2019, Tim Spain\n| | | ● f549dfb Linted watercloudmodel.py with yapf. ​ Oct 15 2019, Tim Spain\n| | | ● 446a527 Linted utils.py with yapf. ​ Oct 15 2019, Tim Spain\n| | | ● e17f28a Linted smoothn.py with yapf. ​ Oct 15 2019, Tim Spain\n| | | ● 35cf9cb Linted s2_observations.py with yapf. ​ Oct 15 2019, Tim Spain\n| | | ● 431118c Linted s1_observations.py with yapf. ​ Oct 15 2019, Tim Spain\n| | | ● e20487b Linted parmap.py with yapf. ​ Oct 15 2019, Tim Spain\n| | | ● 486209a Linted logger.py with yapf. ​ Oct 15 2019, Tim Spain\n| | | ● 7273e9f Linted kaska_sar.py with yapf ​ Oct 15 2019, Tim Spain\n| | | ● 11f5c52 Linted kaska.py with yapf ​ Oct 15 2019, Tim Spain\n| | | ● 87ec580 Linted kaska.py with yapf ​ Oct 15 2019, Tim Spain\n| | | ● 27c82f2 Linted inference_runner.py with yapf ​ Oct 15 2019, Tim Spain\n| | | ● 2cdbe70 Linted entry.py with yapf ​ Oct 15 2019, Tim Spain\n| |_|/ \n|/| | \n● | | 1909887 Merge pull request #65 from UCL/issue64 ​ Oct 11 2019, David Pérez-Suárez\n● | | e85a29c Merge branch 'master' into issue64 ​ Oct 11 2019, David Pérez-Suárez\n|\\ \\ \\ \n| ● | | bd5dca7 Merge pull request #67 from UCL/tidy-up-entry ​ Oct 11 2019, David Pérez-Suárez\n| ● | | 5c78a7a Add docstrings to run_process, fix some PEP8. ​ Oct 3 2019, Ilektra Christidi\n| ● | | f989600 Re-instate entry.py, because a run_process function is needed aft.. ​ Oct 3 2019, Ilektra Christidi\n| ● | | 625cbeb Remove duplicated function define_temporal_grid from kaska.py. ​ Oct 3 2019, Ilektra Christidi\n| ● | | b4a3285 Use kaska's create_logger, adjust input arguments accordingly. ​ Oct 3 2019, Ilektra Christidi\n| ● | | 89257e1 Expose dask_client argument to run_kaska, but not all the way to .. ​ Oct 1 2019, Ilektra Christidi\n| ● | | fc679e4 Remove temporal_grid creation from run_kaska: it's not used. ​ Oct 1 2019, Ilektra Christidi\n| ● | | 92598c4 Clean up logger setup mess. ​ Sep 30 2019, Ilektra Christidi\n| ● | | 5e459bd Merge entry.py functionality into run_kaska script, and delete en.. ​ Sep 30 2019, Ilektra Christidi\n| ● | | 9bd1227 Remove duplicated run_kaska.py ​ Sep 30 2019, Ilektra Christidi\n● | | | 46c64fa Added block size indication again ​ Sep 27 2019, Jose Gomez-Dans\n● | | | 99be931 Removed some unnecessary code and imports ​ Sep 27 2019, Jose Gomez-Dans\n● | | | f98900d Updated `run_process` to take dask client ​ Sep 27 2019, Jose Gomez-Dans\n● | | | 3d42614 Removes unnecessary files ​ Sep 27 2019, Jose Gomez-Dans\n|/ / / \n● | | c23e69c Merge pull request #62 from UCL/issue61 ​ Sep 27 2019, José Gómez-Dans\n● | | 80d090e Expose block_size to main runner ​ Sep 27 2019, Jose Gomez-Dans\n● | | 04c7660 Merge pull request #58 from UCL/issue57 ​ Sep 25 2019, José Gómez-Dans\n● | | a4ea033 Merge branch 'master' into issue57 ​ Sep 24 2019, José Gómez-Dans\n|\\ \\ \\ \n| ● \\ \\ 34433c6 Merge pull request #52 from UCL/issue51 ​ Sep 13 2019, José Gómez-Dans\n| |\\ \\ \\ \n| | ● | | f77e550 Fixed typo ​ Sep 13 2019, José Gómez-Dans\n| | | |/ \n| | |/| \n| | ● | e2c3fee Linted ​ Sep 10 2019, Jose Gomez-Dans\n| | ● | 64abfd5 Deals with tiles where not masked pixels exist ​ Sep 10 2019, Jose Gomez-Dans\n| | ● | fc5f561 Check the number of unmasked pixels in chunk ​ Sep 10 2019, Jose Gomez-Dans\n| ● | | 1869b87 Dask map no longer takes iterators (#55) ​ Sep 13 2019, David Pérez-Suárez\n| ● | | 9fe5c12 Bizarre copy/paste bug removed ​ Sep 12 2019, José Gómez-Dans\n| ● | | 6441e38 Dask map no longer takes iterators ​ Sep 12 2019, José Gómez-Dans\n| | |/ \n| |/| \n● | | 2fddc36 Some further edits to track master ​ Sep 24 2019, José Gómez-Dans\n● | | e52d40a Ensure inversion acknowledges state mask ​ Sep 24 2019, José Gómez-Dans\n|/ / \n| | ● e140b1e (origin/issue37, multiply-org/issue37, McWhity/issue37) Low hanging.. ​ Sep 11 2019, José Gómez-Dans\n| |/ \n|/| \n● | 520ab88 Sensible logging (#53) ​ Sep 11 2019, David Pérez-Suárez\n● | b267a0b Update kaska/entry.py ​ Sep 11 2019, José Gómez-Dans\n● | f12e170 Added logging infrastructure ​ Sep 10 2019, José Gómez-Dans\n● | 2ea12df Typo ​ Sep 10 2019, José Gómez-Dans\n● | 0f23386 Added common logger. Needs testing ​ Sep 10 2019, Jose Gomez-Dans\n● | 5aa7112 Updated logger ​ Sep 10 2019, Jose Gomez-Dans\n● | 1310b37 Merge branch 'master' into sensible_logging ​ Sep 10 2019, Jose Gomez-Dans\n|\\| \n| ● e9fbc2e Merge pull request #50 from UCL/gitignore_new ​ Sep 10 2019, José Gómez-Dans\n| |\\ \n| | ● 02edfa3 Added Eclipse, macos, linux and windows ​ Sep 10 2019, Jose Gomez-Dans\n| | ● 9f76ba4 Added vim and emacs ​ Sep 9 2019, Jose Gomez-Dans\n| | ● afc2422 Improved gitignore ​ Sep 9 2019, Jose Gomez-Dans\n| ● | f675133 Merge pull request #28 from UCL/entry_point ​ Sep 9 2019, José Gómez-Dans\n| ● | a723973 Fixed typo ​ Sep 9 2019, Jose Gomez-Dans\n| ● | f5486f7 Need to save outputs ​ Sep 9 2019, Jose Gomez-Dans\n| ● | ea3185d Merge branch 'master' into entry_point ​ Sep 9 2019, José Gómez-Dans\n| |\\ \\ \n| | ● \\ 07cae78 Merge pull request #44 from UCL/nn_librarydocs ​ Sep 9 2019, José Gómez-Dans\n| | |\\ \\ \n| | | |/ \n| | |/| \n| | | ● b1c8938 Added documentation and linting ​ Sep 5 2019, Jose Gomez-Dans\n| | ● | 10a44d4 Merge pull request #45 from UCL/issue43 ​ Sep 9 2019, José Gómez-Dans\n| | |\\ \\ \n| | | ● | 7d0aee5 Clearer naming, documentation and linting ​ Sep 6 2019, Jose Gomez-Dans\n| | | |/ \n| | ● | 1ce8236 Merge pull request #47 from UCL/rasterise_vector ​ Sep 9 2019, José Gómez-Dans\n| | |\\ \\ \n| | | ● | 7decd3c Linting of tests and changes to documentation ​ Sep 9 2019, Jose Gomez-Dans\n| | | ● | 2443930 Passes tests. Addresses #46 ​ Sep 8 2019, José Gómez-Dans\n| | | ● | 663c517 Added vector rasteriser ​ Sep 8 2019, Jose Gomez-Dans\n| | | |/ \n| | ● | 5dd2866 Merge pull request #49 from UCL/fix_nn_loader ​ Sep 9 2019, José Gómez-Dans\n| | ● | 7097a32 Fixes npz loader issues ​ Sep 8 2019, Jose Gomez-Dans\n| | |/ \n| ● | 4ef3a65 Little debugging stuff - kaska now runs end-to-end, but takes forev.. ​ Aug 22 2019, Ilektra Christidi\n| ● | 6a75ce0 Input arguments added to entry script. ​ Aug 21 2019, Ilektra Christidi\n| ● | 32c3db3 Move hardcoded dates and paths out of run_process and into the entr.. ​ Aug 20 2019, Ilektra Christidi\n| ● | 235eb9c Restore reading data files from package, that was lost in the mergi.. ​ Aug 20 2019, Ilektra Christidi\n| ● | e13697e Rename script and add python definition in the beginning. ​ Aug 20 2019, Ilektra Christidi\n| ● | e8119de Merge branch 'master' into entry_point ​ Aug 20 2019, Ilektra Christidi\n| |\\ \\ \n| ● | | 7ddb2b5 Moves kaska main to its own entry point ​ Aug 19 2019, David Perez-Suarez\n● | | | 0e0307c Logger functionality ​ Sep 6 2019, Jose Gomez-Dans\n| |_|/ \n|/| | \n● | | 5619288 Merge pull request #30 from UCL/sar_wcm ​ Sep 3 2019, Ilektra Christidi\n● | | aec66d6 Merge branch 'master' into sar_wcm ​ Sep 3 2019, Ilektra Christidi\n|\\ \\ \\ \n| ● | | f242c1d Merge pull request #31 from UCL/develop ​ Sep 3 2019, Ilektra Christidi\n| ● | | da5bb32 Merge branch 'master' into develop ​ Sep 3 2019, Tim Spain\n| |\\ \\ \\ \n| | | |/ \n| | |/| \n| ● | | 31a5871 Add file existence check and info output back into NNParameterInv.. ​ Sep 2 2019, Ilektra Christidi\n| ● | | f0d3ba8 Now tested ​ Aug 16 2019, José Gómez-Dans\n| ● | | 386aa11 Set up problem interface ​ Aug 16 2019, José Gómez-Dans\n| ● | | 7499fa0 Removed commented out stuff ​ Aug 16 2019, Jose Gomez-Dans\n| ● | | dbec108 Made sort of working function. Needs remote testing ​ Aug 16 2019, Jose Gomez-Dans\n| ● | | 3a8d818 updated ​ Aug 16 2019, José Gómez-Dans\n| ● | | 11d0622 process tile function ​ Aug 16 2019, José Gómez-Dans\n| ● | | 6391aba added as system going down ​ Aug 16 2019, José Gómez-Dans\n| ● | | c32701a Can be imported into something sensible ​ Aug 15 2019, José Gómez-Dans\n| ● | | 22a5b58 added tests ​ Aug 15 2019, José Gómez-Dans\n| ● | | fdedb1b Added get_chunks ​ Aug 15 2019, José Gómez-Dans\n| ● | | 2d39339 Fixed imports ​ Aug 15 2019, José Gómez-Dans\n| ● | | 79adc11 Changed setup.cfg to deal with installed packages ​ Aug 15 2019, José Gómez-Dans\n| ● | | 19c1b92 Added some small NN emulator/inverter library ​ Aug 15 2019, José Gómez-Dans\n| ● | | c8a6333 Removed kaska_out ​ Aug 13 2019, José Gómez-Dans\n| ● | | fab1430 Changed import to follow python3 convention ​ Aug 13 2019, José Gómez-Dans\n| ● | | 01e5749 Merge branch 'master' into develop ​ Aug 13 2019, José Gómez-Dans\n| |\\ \\ \\ \n| | | |/ \n| | |/| \n| ● | | 8725a1c Add to previous commit! ​ Aug 12 2019, José Gómez-Dans\n| ● | | 324233c Dump S2 outputs on GeoTIFF files ​ Aug 12 2019, José Gómez-Dans\n| ● | | de0a910 Added some documentation ​ Aug 7 2019, José Gómez-Dans\n| ● | | a00a6a0 Fixed file listing bug ​ Jul 25 2019, José Gómez-Dans\n● | | | a10fd99 Merge branch 'master' into sar_wcm ​ Sep 3 2019, Tim Spain\n|\\ \\ \\ \\ \n| | |_|/ \n| |/| | \n| ● | | c22842d Merge pull request #19 from UCL/neural_nets ​ Aug 20 2019, Ilektra Christidi\n| ● | | 3e415f6 Merge branch 'master' into neural_nets ​ Aug 19 2019, Ilektra Christidi\n| |\\ \\ \\ \n| | | |/ \n| | |/| \n| | ● | 6613316 Moving tests inside library (#26) ​ Aug 13 2019, David Pérez-Suárez\n| | ● | 742776b Updates tests to use relative imports ​ Aug 12 2019, David Perez-Suarez\n| | ● | d8944a4 tests moved under project folder ​ Aug 12 2019, David Perez-Suarez\n| | ● | 39d3a81 Merge pull request #24 from UCL/gitignore ​ Aug 6 2019, José Gómez-Dans\n| | |\\ \\ \n| | | ● | c17fe8a Changed .gitignore so file tracking is cleaner under vscode ​ Aug 6 2019, José Gómez-Dans\n| | ● | | 426f845 Merge pull request #21 from UCL/testdata ​ Aug 5 2019, José Gómez-Dans\n| | |\\ \\ \\ \n| | | |/ / \n| | |/| | \n| | | ● | 2d496d0 Fixed typo ​ Jul 24 2019, José Gómez-Dans\n| | | ● | d7950b6 Change module search path ​ Jul 24 2019, José Gómez-Dans\n| | | ● | 26a0adc Added NN test file ​ Jul 16 2019, José Gómez-Dans\n| | | ● | b92e401 Now with relative paths ​ Jul 16 2019, José Gómez-Dans\n| | | ● | 8a891f7 reproject_data test works locally ​ Jul 16 2019, José Gómez-Dans\n| | | ● | c2879d5 Renamed test function ​ Jul 16 2019, José Gómez-Dans\n| | | ● | fd77783 Added simple reproject test ​ Jul 16 2019, José Gómez-Dans\n| | | ● | 92a4a00 Added some sample files for testing ​ Jul 16 2019, José Gómez-Dans\n| | ● | | 015e25b Merge pull request #22 from timspainUCL/condapep2 ​ Jul 19 2019, José Gómez-Dans\n| | ● | | 702ff09 Add dots to relative paths for local subpackages ​ Jul 19 2019, Tim Spain\n| | ● | | b6d0f86 Hard coding the packages means cond build finds them. ​ Jul 19 2019, Tim Spain\n| | ● | | 7504fda Conda build files ​ Jul 19 2019, Tim Spain\n| | ● | | 1905340 Add setup.py for package creation. ​ Jul 12 2019, Tim Spain\n| | |/ / \n| ● | | 2b8f2dc This can now install using setup.cfg ​ Aug 15 2019, José Gómez-Dans\n● | | | c436fd8 This is needed to runt ests ​ Aug 13 2019, José Gómez-Dans\n● | | | aaf5d59 Moved tests to new test folder location ​ Aug 13 2019, José Gómez-Dans\n● | | | e171417 Trying to move tests to new format ​ Aug 13 2019, José Gómez-Dans\n● | | | a9bf6b0 Removed LAI*cab to just LAI ​ Aug 8 2019, José Gómez-Dans\n● | | | 6d25c16 Operation on images ​ Aug 8 2019, José Gómez-Dans\n● | | | 8695149 Working pixel by pixel ​ Aug 7 2019, José Gómez-Dans\n● | | | bf3366e Line wrapper ​ Aug 7 2019, José Gómez-Dans\n● | | | 8efbfb2 More ROI spelunking ​ Aug 7 2019, José Gómez-Dans\n● | | | b37af71 Merged from develop ​ Aug 7 2019, José Gómez-Dans\n● | | | d61c6e6 Allow selecting a ROI in the S1 observations ​ Aug 7 2019, José Gómez-Dans\n● | | | 1f262c7 Added SAR processing example testbed ​ Aug 7 2019, José Gómez-Dans\n● | | | 1fb5a81 kaska/kaska.py ​ Aug 7 2019, José Gómez-Dans\n● | | | 266995d Some minor changes. Canot get cost function to compile with numba ​ Aug 7 2019, José Gómez-Dans\n● | | | ac64f2b Try numba.jit on the WCM stuff ​ Aug 6 2019, José Gómez-Dans\n● | | | 9014bbe Fixed missing srcSRS parameter ​ Aug 6 2019, José Gómez-Dans\n● | | | 841cf2e Clean up git tracking ​ Aug 6 2019, José Gómez-Dans\n● | | | 25b9bee Reader for Sentinel 1 LMU netCDF files ​ Aug 6 2019, José Gómez-Dans\n● | | | 0c20cab Moved define temporal grid into utils ​ Aug 6 2019, José Gómez-Dans\n● | | | 08ab1ed Added tests for WCM and cost function ​ Aug 5 2019, José Gómez-Dans\n● | | | 52bbf0b Added WCM codes ​ Aug 5 2019, José Gómez-Dans\n● | | | 8d71c2b Changed running period and save results for testing ​ Jul 25 2019, José Gómez-Dans\n● | | | bac444e Bug in reading in files ​ Jul 25 2019, José Gómez-Dans\n|/ / / \n● | | 35446ce Updated setup.cfg ​ Jul 25 2019, José Gómez-Dans\n|\\ \\ \\ \n| ● | | 61adf9e Correct syntax for package_data. ​ Jul 16 2019, Ilektra Christidi\n| | |/ \n| |/| \n● | | 1bddd9a Using NNets provided by package ​ Jul 25 2019, José Gómez-Dans\n● | | da000ee Updated from master to install package ​ Jul 25 2019, José Gómez-Dans\n|/ / \n● | f018c03 Added prosail 2layer NN ​ Jul 16 2019, José Gómez-Dans\n● | 227d8f2 Added new NN emulator ​ Jul 16 2019, José Gómez-Dans\n● | f4157f9 Added inverter NN config file to package. Addresses #3 ​ Jul 16 2019, José Gómez-Dans\n|/ \n● 955b63f Updated Jose's local develop to track remote ​ Jul 15 2019, José Gómez-Dans\n|\\ \n| ● 9846191 Merge pull request #16 from timspainUCL/testsmoothn ​ Jul 11 2019, José Gómez-Dans\n● | f805953 Output saving module ​ Jul 15 2019, José Gómez-Dans\n● | 472d045 Added parallel map ​ Jul 15 2019, José Gómez-Dans\n● | 27f72f9 Added some TODO comments ​ Jul 10 2019, José Gómez-Dans\n● | 3f5af8b First dodgy pass code version ​ Jul 10 2019, José Gómez-Dans\n| | ● 38d09ad (origin/smoothn, McWhity/smoothn) Factor out the initialization of .. ​ Jul 26 2019, Tim Spain\n| | ● 36f7e3b Factor out the initialization of z ​ Jul 26 2019, Tim Spain\n| | ● 8af4017 Factor out calculation of the smoothness bounds ​ Jul 26 2019, Tim Spain\n| | ● 7ae86d9 Break out the definition of the lambda tensor ​ Jul 26 2019, Tim Spain\n| | ● 5804391 Remove 'MATLAB' from the error messages ​ Jul 26 2019, Tim Spain\n| | ● a948562 Make things more PEP 8 compliant ​ Jul 26 2019, Tim Spain\n| | ● 6edda81 Tidy up exit cases, remove magic numbers ​ Jul 26 2019, Tim Spain\n| | ● dc30062 Remove semicolons; ​ Jul 25 2019, Tim Spain\n| | ● 4903de8 Preprocessing of data moved to a function ​ Jul 25 2019, Tim Spain\n| | ● 3900450 Refactor weight prep into functions ​ Jul 25 2019, Tim Spain\n| | ● b3fa65e Add tests for masked data arrays, and the cauchy and talworth weigh.. ​ Jul 25 2019, Tim Spain\n| | ● b11cb20 Add masked array testing ​ Jul 11 2019, Tim Spain\n| | ● 374e889 Comment spelling ​ Jul 11 2019, Tim Spain\n| | ● 76f88b9 Factor out the conversion of standard deviations to weights ​ Jul 11 2019, Tim Spain\n| | ● fdd5258 Test for sd versus weights ​ Jul 11 2019, Tim Spain\n| |/ \n| ● f2446f1 Update test_smoothn.py ​ Jul 11 2019, Tim Spain\n| ● cbe2a9e test_smoothn.py bounds ​ Jul 11 2019, Tim Spain\n| ● 8844b63 pytest, not py.test ​ Jul 11 2019, Tim Spain\n| ● f6d2566 Run pytest as a module ​ Jul 11 2019, Tim Spain\n| ● f19f6cd Add pytest to conda ​ Jul 10 2019, Tim Spain\n| ● a7774b0 Run pytest through conda ​ Jul 10 2019, Tim Spain\n| ● ee28e04 Add dependencies to conda ​ Jul 10 2019, Tim Spain\n| ● 70564d7 Remove gdal ​ Jul 10 2019, Tim Spain\n| ● 74c714b Conda installation ​ Jul 10 2019, Tim Spain\n| ● 4aef1b9 Use conda ​ Jul 10 2019, Tim Spain\n| ● 6cfd322 Add gdal to dependencies ​ Jul 10 2019, Tim Spain\n| ● 81e72d7 Add kaska to PYTHONPATH ​ Jul 10 2019, Tim Spain\n| ● 07d1a96 Update pyproject.toml ​ Jul 10 2019, Tim Spain\n| ● c6a9364 Update setuptools ​ Jul 10 2019, Tim Spain\n| ● addba81 Add setuptools to requirements ​ Jul 10 2019, Tim Spain\n| ● 91c22d4 Update tox.ini ​ Jul 10 2019, Tim Spain\n| ● 898ee3b Update tox.ini ​ Jul 10 2019, Tim Spain\n| ● 458785d Update tox.ini ​ Jul 10 2019, Tim Spain\n| ● 28ba652 Update tox.ini to 3.7 ​ Jul 10 2019, Tim Spain\n| ● cff3c6f Update travis.yml ​ Jul 10 2019, Tim Spain\n| ● 2447c56 Update Travis for newer pip ​ Jul 10 2019, Tim Spain\n| ● a544060 Merge pull request #2 from jgomezdans/develop ​ Jul 10 2019, Tim Spain\n| |\\ \n| |/ \n|/| \n● | de3e749 Moved NNParameterInversion into its own submodule ​ Jul 5 2019, José Gómez-Dans\n● | 118f84b Deleted file ​ Jul 5 2019, José Gómez-Dans\n● | 4a68f88 Merge pull request #15 from timspainUCL/pep517mk2 ​ Jul 5 2019, José Gómez-Dans\n|\\ \\ \n| ● | f496dad Delete the pre-PEP517/8 setup.py ​ Jul 5 2019, Tim Spain\n| ● | 919b487 Add install requirements to the setup file. ​ Jul 3 2019, Tim Spain\n| ● | 0295135 Attempt to use declarative setup.cfg ​ Jul 3 2019, Tim Spain\n| ● | 7f4e95c Merge pull request #1 from jgomezdans/develop ​ Jul 5 2019, Tim Spain\n● | | 34fb6ac Cleaner logs and less warnings ​ Jul 5 2019, José Gómez-Dans\n● | | eae46c8 Added logging code at top ​ Jul 5 2019, José Gómez-Dans\n● | | 4da0f02 Added some logging code at the top ​ Jul 5 2019, José Gómez-Dans\n|/ / \n● | 6e3cb53 Update pip install on travis ​ Jul 5 2019, José Gómez-Dans\n● | a5774a3 Fixed a couple of import bugs and typos ​ Jul 4 2019, José Gómez-Dans\n● | 247fe98 Changes docs, logs and so on ​ Jul 4 2019, José Gómez-Dans\n● | 9d98821 Changed imports ​ Jul 4 2019, José Gómez-Dans\n● | 898e55a Added logging to ANN inversion ​ Jul 4 2019, José Gómez-Dans\n● | fdc9904 Moved common funcs to utils ​ Jul 4 2019, José Gómez-Dans\n● | 55e1584 Moved NNParameterInversion to its own file. ​ Jul 4 2019, José Gómez-Dans\n| ● 622951b Test vaguely simulating the real useage of smoothn. ​ Jul 9 2019, Tim Spain\n| ● 902f3c7 First smoothn test, from the MatLab examples ​ Jul 5 2019, Tim Spain\n| ● e75e197 Ignore Mac and Eclipse hidden files ​ Jul 3 2019, Tim Spain\n|/ \n● fbfc1cf Move reproject_data to function. Addresses #6 ​ Jul 2 2019, José Gómez-Dans\n● 9b94fe1 First pass inversion working ​ Jul 2 2019, José Gómez-Dans\n● 69f668e Pre processing mostly done ​ Jun 12 2019, José Gómez-Dans\n● 4d1e4fe Added smoothn code ​ Jun 12 2019, José Gómez-Dans\n● 8ad8560 Deal with versioning ​ Jun 12 2019, José Gómez-Dans\n● c0c47c1 Added some requirement packages... ​ May 30 2019, José Gómez-Dans\n● f7b6d50 Added cost function to this file ​ May 30 2019, José Gómez-Dans\n● 4903954 A bit more updating.. ​ May 30 2019, José Gómez-Dans\n● 586befc Can subset observation scanning if passed a time grid ​ May 29 2019, José Gómez-Dans\n● dba7264 Updated ​ May 29 2019, Jose Gomez-Dans\n● a8c6578 Updates to pass on code to UCL system ​ May 29 2019, Jose Gomez-Dans\n● 4722549 Wrong location of SIAC angle files ​ May 10 2019, Jose Gomez-Dans\n● 131f577 clean ups ​ May 10 2019, Jose Gomez-Dans\n● 49292ff Cosmetic changes ​ May 10 2019, Jose Gomez-Dans\n● 098bcf0 Reads a time series of S2 files ​ May 10 2019, Jose Gomez-Dans\n● c798e3e Added observations reader ​ May 10 2019, Jose Gomez-Dans\n● 5da22ee Added Feng's TwoNN library ​ May 10 2019, Jose Gomez-Dans\n● 6c3ee43 Only consider py36, probably later 3.7 ​ May 10 2019, Jose Gomez-Dans\n● 177ccff First commit from cookiecutter ​ May 10 2019, Jose Gomez-Dans\n\n", + "settings": + { + "buffer_size": 59009, + "line_ending": "Unix", + "name": "GRAPH", + "read_only": true, + "scratch": true + } + }, + { + "contents": "\n ROOT: /media/tweiss/Work/GIT/GitHub/UCL/kaska\n\n BRANCH: On branch `sar_quick` tracking `origin/sar_quick`.\n You're ahead by 5.\n HEAD: c826075 move to UniRo\n\n LOCAL:\n 4e43706 master (origin/master)\n ▸ c826075 sar_quick (origin/sar_quick, ahead 5)\n 5f2353a sar_quick_lmu\n\n ** Press [e] to toggle display of remote branches. **\n\n #############\n ## ACTIONS ##\n #############\n\n [c] checkout [p] push selected to remote\n [b] create from selected branch [P] push all branches to remote\n [d] delete [h] fetch remote branches\n [D] delete (force) [m] merge selected into active branch\n [R] rename (local) [M] fetch and merge into active branch\n [t] configure tracking\n\n [f] diff against active [l] show branch log\n [H] diff history against active [g] show branch log graph\n [E] edit branch description\n\n [e] toggle display of remote branches\n [tab] transition to next dashboard\n [SHIFT-tab] transition to previous dashboard\n [r] refresh\n [?] toggle this help menu\n\n-\n\n", + "settings": + { + "buffer_size": 1208, + "line_ending": "Unix", + "name": "BRANCHES: kaska", + "read_only": true, + "scratch": true + }, + "undo_stack": + [ + [ + 4, + 1, + "gs_new_content_and_regions", + { + "content": "\n ROOT: /media/tweiss/Work/GIT/GitHub/UCL/kaska\n\n BRANCH: On branch `sar_quick` tracking `origin/sar_quick`.\n You're ahead by 5.\n HEAD: c826075 move to UniRo\n\n LOCAL:\n 4e43706 master (origin/master)\n ▸ c826075 sar_quick (origin/sar_quick, ahead 5)\n 5f2353a sar_quick_lmu\n\n ** Press [e] to toggle display of remote branches. **\n\n #############\n ## ACTIONS ##\n #############\n\n [c] checkout [p] push selected to remote\n [b] create from selected branch [P] push all branches to remote\n [d] delete [h] fetch remote branches\n [D] delete (force) [m] merge selected into active branch\n [R] rename (local) [M] fetch and merge into active branch\n [t] configure tracking\n\n [f] diff against active [l] show branch log\n [H] diff history against active [g] show branch log graph\n [E] edit branch description\n\n [e] toggle display of remote branches\n [tab] transition to next dashboard\n [SHIFT-tab] transition to previous dashboard\n [r] refresh\n [?] toggle this help menu\n\n-\n\n", + "regions": + { + "branch_list": + [ + 188, + 298 + ], + "branch_status": + [ + 64, + 144 + ], + "git_root": + [ + 12, + 51 + ], + "head": + [ + 156, + 177 + ], + "help": + [ + 356, + 1207 + ], + "remotes": + [ + 298, + 356 + ] + } + }, + "AQAAAAAAAAAAAAAAuAQAAAAAAAAAAAAA", + "AQAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPC/" + ] + ] + } + ], + "build_system": "Packages/User/python_repl.sublime-build", + "build_system_choices": + [ + [ + [ + [ + "Packages/Makefile/Make.sublime-build", + "" + ], + [ + "Packages/Makefile/Make.sublime-build", + "Clean" + ], + [ + "Packages/Python/Python.sublime-build", + "" + ], + [ + "Packages/Python/Python.sublime-build", + "Syntax Check" + ], + [ + "Packages/SublimeREPL/sublimerepl_build_system_hack.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_new" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_new_old" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "ipython3.6" + ] + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6" + ] + ], + [ + [ + [ + "Packages/Makefile/Make.sublime-build", + "" + ], + [ + "Packages/Makefile/Make.sublime-build", + "Clean" + ], + [ + "Packages/SublimeREPL/sublimerepl_build_system_hack.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_new" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_new_old" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "ipython3.6" + ] + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6" + ] + ], + [ + [ + [ + "Packages/Python/Python.sublime-build", + "" + ], + [ + "Packages/Python/Python.sublime-build", + "Syntax Check" + ], + [ + "Packages/SublimeREPL/sublimerepl_build_system_hack.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "ipython3.6" + ] + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_extra_window" + ] + ], + [ + [ + [ + "Packages/Python/Python.sublime-build", + "" + ], + [ + "Packages/Python/Python.sublime-build", + "Syntax Check" + ], + [ + "Packages/SublimeREPL/sublimerepl_build_system_hack.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_new" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "ipython3.6" + ] + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_new" + ] + ], + [ + [ + [ + "Packages/Python/Python.sublime-build", + "" + ], + [ + "Packages/Python/Python.sublime-build", + "Syntax Check" + ], + [ + "Packages/SublimeREPL/sublimerepl_build_system_hack.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7" + ], + [ + "Packages/User/python_repl.sublime-build", + "python2.7_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_new" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_new_old" + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6_extra_window" + ], + [ + "Packages/User/python_repl.sublime-build", + "ipython3.6" + ] + ], + [ + "Packages/User/python_repl.sublime-build", + "python3.6" + ] + ] + ], + "build_varint": "", + "command_palette": + { + "height": 0.0, + "last_filter": "", + "selected_items": + [ + [ + "git stat", + "git: status" + ], + [ + "git tag", + "git: tag" + ], + [ + "package install", + "Package Control: Install Package" + ], + [ + "status", + "git: status" + ], + [ + "push", + "git: push to branch name" + ], + [ + "install", + "Package Control: Install Package" + ], + [ + "install ", + "Package Control: Install Package" + ] + ], + "width": 0.0 + }, + "console": + { + "height": 150.0, + "history": + [ + "import urllib.request,os,hashlib; h = '6f4c264a24d933ce70df5dedcf1dcaee' + 'ebe013ee18cced0ef93d5f746d80ef60'; pf = 'Package Control.sublime-package'; ipp = sublime.installed_packages_path(); urllib.request.install_opener( urllib.request.build_opener( urllib.request.ProxyHandler()) ); by = urllib.request.urlopen( 'http://packagecontrol.io/' + pf.replace(' ', '%20')).read(); dh = hashlib.sha256(by).hexdigest(); print('Error validating download (got %s instead of %s), please try manual install' % (dh, h)) if dh != h else open(os.path.join( ipp, pf), 'wb' ).write(by) " + ] + }, + "distraction_free": + { + "menu_visible": true, + "show_minimap": false, + "show_open_files": false, + "show_tabs": false, + "side_bar_visible": false, + "status_bar_visible": false + }, + "expanded_folders": + [ + "/media/tweiss/Work/GIT/GitHub/UCL/kaska", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/scripts" + ], + "file_history": + [ + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_plot_scatter.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_plotting.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_mask.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_plot_esu.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_run_area_calculations.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/watercloudmodel_vwc_rms.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/watercloudmodel.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_jose_again.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/z_helper.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_sar_ssrt_vwc.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_ssrt_tau_rms.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_ssrt_tau_rms_maize.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/entry.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_plot_old.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_inspection.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_reproject.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_scatterplot.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/paper3_inspection_esu.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_sar.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/utils.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_sar_ssrt.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/load_npy.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_sar_ssrt_jose.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_sar_ssrt_new_version.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_ssrt_again.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska/kaska_ssrt_tau.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/combine_extract_sentinel_and_field_data_running_kafka_ucl.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_helper.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_rt_model_calculations.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_s1_time_series_plot_2.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_s1_time_series_plot.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_optimization_vwc.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_s1_time_series_paper3_vwc.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_rt_model_calculations_paper_3_vwc_final.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense_sensitivity.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_rt_model_plot_time_series.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_rt_model_calculations_paper_3_vwc.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_s1_time_series_plot_1.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_optimization.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/plot_entropy_anisotorpy.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/surface/oh2004.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/core.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/canopy.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/dielectric/__init__.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/dielectric/dobson85.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/dielectric/epsmodel.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/util.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/surface/scatter.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/scatterer.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/model.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/__init__.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/surface/__init__.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/soil.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/setup.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/surface/dubois95.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/surface/oh1992.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/plot_sensitivity.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_sensitivity_plot.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/try.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/doc/fig10-10.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/feng_backsactter_input.py", + "/media/tweiss/Daten/combine_extract_sentinel_and_field_data_running_kafka_2.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/test_output2.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_s1_time_series.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/combine_extract_sentinel_and_field_data_running_kafka_ucl_puh.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/versuch.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_s1_time_series_plot_3.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_s1_time_series_plot_ratio.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/plot_ratio.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/emulator_ssrt_S2.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/feng.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/rho_diff .py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/run_sense_jose.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/try_sense_module_backscatter_input.py", + "/media/tweiss/Daten/combine_extract_sentinel_and_field_data_running_kafka_consortium.py", + "/media/tweiss/Work/GIT/GitHub/UCL/kaska/test_output.py", + "/media/tweiss/Work/validity_range.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_s1_time_series_sensitivity.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_sensitivity_plot_residium.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/try_sense_module.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_rt_model_review_plot.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_rt_model_vali_boxplot_taylor.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_dense_s1_time_series_sensitivity_old.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_rt_2018.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_rt_model_statistic_problem.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/surface/i2em.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/versuch_losschen.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/zz_delete_not_needed_anymore_paper_plot_vallidation.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/paper_plot_ben.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/check_incidence_angle.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/z_EGU2018_poster.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/validation_plots.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/versuch_oh-dubois.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/paper_plot_final_mano.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/surface/watercloud.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/EGU_poster.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/paper_plot.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/sense/version.py", + "/media/tweiss/Work/GIT/GitHub/PMarzahn/pygeo/sense/paper_plot_scatterplot.py", + "/home/tweiss/.config/sublime-text-3/Packages/SublimeREPL/config/Python/Main.sublime-menu", + "/media/tweiss/Daten/combine_extract_sentinel_and_field_data_running_kafka.py_consortium" + ], + "find": + { + "height": 34.0 + }, + "find_in_files": + { + "height": 116.0, + "where_history": + [ + "" + ] + }, + "find_state": + { + "case_sensitive": false, + "find_history": + [ + "cost_function_vwc", + "vwcb", + "spatial", + "mask", + "scatterplot", + "taylor", + "spatial", + "boxplot_sm_area", + "precip", + "0.38", + "taylor", + "figsize", + "plt.savefig", + "esu", + "taylor", + "pdb", + "plot2", + "mean_all_bias", + "scat", + "pdb", + "fontsize", + "-16", + "pdb", + "field", + "taylor", + "boxplot", + "spatial", + "plot_taylor", + "taylor_plot", + "ccoef", + "plot_taylor", + "plot_scat", + "rms", + "f, ax", + "self.plot(", + "self.plot", + "spatial", + "spatial_cal", + "spatial", + "yy", + "spatial", + "eich", + "Eich", + "eich", + "eichen", + "pdb", + "sm_std", + "pdb", + "unc", + "extract", + "rgb", + "ax", + "axis(1", + "boxplot", + "sm_api", + "boxplot", + "'b_0515", + "pdb.set_trace(", + "boxplot", + "vv", + "b_0515", + "omega", + "VV [dB]", + "'Date", + "Date", + "plt.title('ome", + "plt.title", + "plt.title(omega", + "spatial_calculation", + "scatterplot", + "z_helper", + "esu_size_tiff", + "mask_fields", + "time_contrainst", + "plot", + "cmap", + "m_", + "_m", + "m_", + "analysis", + "date", + "scatter", + "times", + "ndwi1_mag", + "mag", + "save", + "Paper3_down", + "ndwi", + "version", + "pdb", + "ticklabs", + "pdb", + "version", + "versions", + "years", + "cmap", + "pdb", + "year", + "pdb.set", + "pdb.set_", + "pol", + "sar_inference_data", + "pdb", + "version", + "sar_inference_data", + "vwc_data", + "inference_prepro", + "vwc_data", + "read_vwc", + "ndwi", + "vwc", + "state_mask", + "mask", + "raster", + "rasterize", + "mask", + "py", + "ndwi", + "ndwi1", + "inference_pre", + "interp1d", + "local", + "s2_lai", + "norm_multi", + "reproject", + "save_to_tif", + ".tif", + "reproject" + ], + "highlight": true, + "in_selection": false, + "preserve_case": false, + "regex": false, + "replace_history": + [ + "analysis_m" + ], + "reverse": false, + "scrollbar_highlights": true, + "show_context": true, + "use_buffer2": true, + "use_gitignore": true, + "whole_word": false, + "wrap": true + }, + "groups": + [ + { + "sheets": + [ + { + "buffer": 0, + "semi_transient": false, + "settings": + { + "buffer_size": 59009, + "regions": + { + }, + "selection": + [ + [ + 66, + 66 + ] + ], + "settings": + { + "bracket_highlighter.busy": false, + "bracket_highlighter.locations": + { + "close": + { + }, + "icon": + { + }, + "open": + { + }, + "unmatched": + { + } + }, + "bracket_highlighter.regions": + [ + "bh_default", + "bh_default_center", + "bh_default_open", + "bh_default_close", + "bh_default_content", + "bh_round", + "bh_round_center", + "bh_round_open", + "bh_round_close", + "bh_round_content", + "bh_double_quote", + "bh_double_quote_center", + "bh_double_quote_open", + "bh_double_quote_close", + "bh_double_quote_content", + "bh_c_define", + "bh_c_define_center", + "bh_c_define_open", + "bh_c_define_close", + "bh_c_define_content", + "bh_regex", + "bh_regex_center", + "bh_regex_open", + "bh_regex_close", + "bh_regex_content", + "bh_curly", + "bh_curly_center", + "bh_curly_open", + "bh_curly_close", + "bh_curly_content", + "bh_square", + "bh_square_center", + "bh_square_open", + "bh_square_close", + "bh_square_content", + "bh_angle", + "bh_angle_center", + "bh_angle_open", + "bh_angle_close", + "bh_angle_content", + "bh_unmatched", + "bh_unmatched_center", + "bh_unmatched_open", + "bh_unmatched_close", + "bh_unmatched_content", + "bh_single_quote", + "bh_single_quote_center", + "bh_single_quote_open", + "bh_single_quote_close", + "bh_single_quote_content", + "bh_tag", + "bh_tag_center", + "bh_tag_open", + "bh_tag_close", + "bh_tag_content" + ], + "color_scheme": "Packages/User/GitSavvy/GitSavvy.log_graph_view.color_scheme.hidden-tmTheme", + "default_dir": "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska", + "git_savvy.log_graph_view": true, + "git_savvy.log_graph_view.all_branches": true, + "git_savvy.log_graph_view.apply_filters": "", + "git_savvy.log_graph_view.branches": + [ + ], + "git_savvy.log_graph_view.decoration": "sparse", + "git_savvy.log_graph_view.filter_by_author": "", + "git_savvy.log_graph_view.filters": "", + "git_savvy.log_graph_view.follow": "HEAD", + "git_savvy.log_graph_view.paths": + [ + ], + "git_savvy.log_graph_view.show_commit_info_panel": false, + "git_savvy.log_graph_view.show_tags": true, + "git_savvy.repo_path": "/media/tweiss/Work/GIT/GitHub/UCL/kaska", + "syntax": "Packages/GitSavvy/syntax/graph.sublime-syntax" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "stack_index": 1, + "stack_multiselect": false, + "type": "text" + }, + { + "buffer": 1, + "selected": true, + "semi_transient": false, + "settings": + { + "buffer_size": 1208, + "regions": + { + }, + "selection": + [ + [ + 227, + 227 + ] + ], + "settings": + { + "__vi_external_disable": false, + "bracket_highlighter.busy": false, + "bracket_highlighter.locations": + { + "close": + { + }, + "icon": + { + }, + "open": + { + }, + "unmatched": + { + } + }, + "bracket_highlighter.regions": + [ + "bh_default", + "bh_default_center", + "bh_default_open", + "bh_default_close", + "bh_default_content", + "bh_round", + "bh_round_center", + "bh_round_open", + "bh_round_close", + "bh_round_content", + "bh_double_quote", + "bh_double_quote_center", + "bh_double_quote_open", + "bh_double_quote_close", + "bh_double_quote_content", + "bh_c_define", + "bh_c_define_center", + "bh_c_define_open", + "bh_c_define_close", + "bh_c_define_content", + "bh_regex", + "bh_regex_center", + "bh_regex_open", + "bh_regex_close", + "bh_regex_content", + "bh_curly", + "bh_curly_center", + "bh_curly_open", + "bh_curly_close", + "bh_curly_content", + "bh_square", + "bh_square_center", + "bh_square_open", + "bh_square_close", + "bh_square_content", + "bh_angle", + "bh_angle_center", + "bh_angle_open", + "bh_angle_close", + "bh_angle_content", + "bh_unmatched", + "bh_unmatched_center", + "bh_unmatched_open", + "bh_unmatched_close", + "bh_unmatched_content", + "bh_single_quote", + "bh_single_quote_center", + "bh_single_quote_open", + "bh_single_quote_close", + "bh_single_quote_content", + "bh_tag", + "bh_tag_center", + "bh_tag_open", + "bh_tag_close", + "bh_tag_content" + ], + "default_dir": "/media/tweiss/Work/GIT/GitHub/UCL/kaska/kaska", + "git_gutter_is_enabled": false, + "git_savvy.branch_view": true, + "git_savvy.help_hidden": false, + "git_savvy.interface": "branch", + "git_savvy.repo_path": "/media/tweiss/Work/GIT/GitHub/UCL/kaska", + "git_savvy.tabbable": true, + "syntax": "Packages/GitSavvy/syntax/branch.sublime-syntax" + }, + "translation.x": 0.0, + "translation.y": 0.0, + "zoom_level": 1.0 + }, + "stack_index": 0, + "stack_multiselect": false, + "type": "text" + } + ] + }, + { + "sheets": + [ + ] + } + ], + "incremental_find": + { + "height": 34.0 + }, + "input": + { + "height": 45.0 + }, + "layout": + { + "cells": + [ + [ + 0, + 0, + 1, + 1 + ], + [ + 1, + 0, + 2, + 1 + ] + ], + "cols": + [ + 0.0, + 0.593728927849, + 1.0 + ], + "rows": + [ + 0.0, + 1.0 + ] + }, + "menu_visible": true, + "output.GitSavvy": + { + "height": 144.0 + }, + "output.find_results": + { + "height": 0.0 + }, + "output.mdpopups": + { + "height": 0.0 + }, + "output.show_commit_info": + { + "height": 144.0 + }, + "output.unsaved_changes": + { + "height": 144.0 + }, + "pinned_build_system": "", + "project": "kaska.sublime-project", + "replace": + { + "height": 64.0 + }, + "save_all_on_build": true, + "select_file": + { + "height": 0.0, + "last_filter": "", + "selected_items": + [ + ], + "width": 0.0 + }, + "select_project": + { + "height": 500.0, + "last_filter": "", + "selected_items": + [ + ], + "width": 380.0 + }, + "select_symbol": + { + "height": 0.0, + "last_filter": "", + "selected_items": + [ + ], + "width": 0.0 + }, + "selected_group": 1, + "settings": + { + }, + "show_minimap": true, + "show_open_files": false, + "show_tabs": true, + "side_bar_visible": true, + "side_bar_width": 437.0, + "status_bar_visible": true, + "template_settings": + { + } +} diff --git a/kaska/__init__.py b/kaska/__init__.py index fc42e77..0d0fc1d 100644 --- a/kaska/__init__.py +++ b/kaska/__init__.py @@ -12,5 +12,6 @@ from .inverters import get_inverters, get_inverter from .kaska import KaSKA from .s2_observations import Sentinel2Observations -from .utils import get_chunks, define_temporal_grid +from .utils import get_chunks, define_temporal_grid, reproject_data from .inference_runner import kaska_runner +from .watercloudmodel import cost_function diff --git a/kaska/agv_plot_input_output.py b/kaska/agv_plot_input_output.py new file mode 100644 index 0000000..f631576 --- /dev/null +++ b/kaska/agv_plot_input_output.py @@ -0,0 +1,334 @@ +import numpy as np +import pdb +from osgeo import gdal +import matplotlib.pyplot as plt +from z_helper import * +import datetime +import seaborn as sns +from matplotlib.colors import ListedColormap +from pandas.plotting import register_matplotlib_converters +from osgeo.osr import SpatialReference, CoordinateTransformation +import pyproj + +class plot_input_output(object): + + def __init__(self, path, passes, year): + + """ + time_contrainst = ['no'] + """ + plot_folder = 'inputoutput' + if not os.path.exists(os.path.join(path,passes,plot_folder)): + os.makedirs(os.path.join(path,passes,plot_folder)) + self.load_data(path,passes,year) + + plt.rcParams["figure.figsize"] = (20,15) + + # self.plot_model_param(years,esus,passes,time_contrainst) + + self.plot(path,passes,plot_folder,self.var_sm,self.time,name='var_sm',vmin=0.05,vmax=0.5) + self.plot(path, passes, plot_folder, self.var_sm_api, self.time, name='var_sm_api') + self.plot(path, passes, plot_folder, self.var_vwc_input, self.time, name='var_vwc_input',vmin=0,vmax=4) + self.plot(path, passes, plot_folder, self.var_vwc_output, self.time, name='var_vwc_output') + self.plot(path, passes, plot_folder, self.var_b, self.time, name='var_b') + self.plot(path, passes, plot_folder, self.var_rms, self.time, name='var_rms') + self.plot(path, passes, plot_folder, 10*np.log10(self.var_vv_input), self.time, name='var_vv_input') + self.plot(path, passes, plot_folder, self.var_theta_input, self.time, name='var_theta_input') + self.plot(path, passes, plot_folder, self.var_ndwi_input, self.time, name='var_ndwi_input') + + def load_data(self,path,passes,year): + self.var_sm = np.load(os.path.join(path,passes,year + '_multi_' + 'sm' + '.npy')) + self.var_sm_api = np.load(os.path.join(path,passes,year + '_multi_' + 'input_sm_api' + '.npy')) + self.var_vwc_input = np.load(os.path.join(path,passes,year + '_multi_' + 'input_vwc' + '.npy')) + self.var_vwc_output = np.load(os.path.join(path,passes,year + '_multi_' + 'vwc' + '.npy')) + self.var_b = np.load(os.path.join(path,passes,year + '_multi_' + 'b' + '.npy')) + self.var_rms = np.load(os.path.join(path,passes,year + '_multi_' + 'rms' + '.npy')) + self.var_vv_input = np.load(os.path.join(path,passes,year + '_multi_' + 'input_vv' + '.npy')) + self.var_theta_input = np.load(os.path.join(path,passes,year + '_multi_' + 'input_theta' + '.npy')) + self.var_ndwi_input = np.load(os.path.join(path,passes,year + '_multi_' + 'input_ndwi' + '.npy')) + self.time = np.load(os.path.join(path,passes,year + '_multi_times.npy'), allow_pickle=True) + + + def plot_model_param(self,years,esus,passes,time_contrainst): + """ + plot model output sm, vwc, b, rms + plot model input vv, sm_api, vwc + + """ + + param = ['sm', 'vwc', 'b', 'rms', 'input_vv', 'input_sm_api', 'input_vwc'] + ymin_mean = [0.2, 0, 0, 0.005, -5, 0.23, 0] + ymax_mean = [0.3, 5, 0.6, 0.03, -16, 0.27, 5] + ymin_std = [0.0, 0, 0.1, 0.0, None, 0.0, 0] + ymax_std = [0.25, 3, 0.25, 1e-16, None, 0.25, 3] + ymin_var = [0, None, None, None, None, 0, None] + ymax_var = [0.4, None, None, None, None, 0.8, None] + + for i, par in enumerate(param): + + for year in years: + + if year == '2017': + fields = [0,301,319,542,508,515] + if year == '2018': + fields = [0,317,410,525,508] + + for time_con in time_contrainst: + + for field in fields: + + + + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/'+year+'_esu_field_buffer_30.tif') + state_mask = g.ReadAsArray().astype(np.int) + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/clc_class2.tif') + state_mask_2 = g.ReadAsArray().astype(np.int) + + var_multi = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+par+'.npy') + + time = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_times.npy',allow_pickle=True) + + if time_con == 'yes': + m = time < datetime.datetime(int(year),7,15) + var_multi = var_multi[m] + time = time[m] + name_ex = year+'0715' + else: + name_ex = '' + pass + + if field > 0.: + var_multi = self.mask_fields(var_multi,field,state_mask) + # for t, tt in enumerate(time): + # if par == 'input_vv': + # self.plot(10*np.log10(var_multi[t]), vmin=ymin_mean[i], vmax=ymax_mean[i], name='field/'+par+'_'+str(field)+'_'+str(tt)[:10], mask=state_mask_2,par=par, passes=passes) + # else: + # self.plot(var_multi[t], vmin=ymin_mean[i], vmax=ymax_mean[i], name='field/'+par+'_'+str(field)+'_'+str(tt)[:10], mask=state_mask_2,par=par, passes=passes) + + if par == 'sm': + file = '/media/tweiss/Work/z_final_mni_data_2017/new_in_situ_s1multi_buffer_100_'+year+'_paper3.csv' + + data = pd.read_csv(file,header=[0,1],index_col=1) + + data_field = data.filter(like=str(field)).filter(like='SM') + data_field.index = pd.to_datetime(data_field.index) + sm_insitu = data_field.mean(axis=1).values.flatten() + + date = data_field.index + + time2 = pd.to_datetime(time) + time2 = time2.strftime('%Y-%m-%d') + date2 = date.strftime('%Y-%m-%d') + mask_time = np.isin(time2,date2) + times = pd.to_datetime(date2) + + var_api = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'input_sm_api'+'.npy') + var_api = self.mask_fields(var_api,field,state_mask) + + sm = self.extraction_xxx(var_multi,state_mask,mask_time) + sm_api = self.extraction_xxx(var_api,state_mask,mask_time) + + if year == '2017': + meteo = pd.read_csv('/media/tweiss/Work/Paper3_down/GIS/Eichenried_01012017_31122017_hourly.csv', sep=';', decimal=',') + meteo2 = meteo.stack().str.replace(',','.').unstack() + meteo2['date'] = pd.to_datetime(meteo2['Tag']+' '+meteo2['Stunde']) + meteo2['SUM']= pd.to_numeric(meteo2['SUM_NN050'],errors='coerce') + s = meteo2.resample('d', on='date')['SUM'].sum() + else: + s = None + + + + self.boxplot2(sm,par,field,year,times,passes,sm_api,sm_insitu,s) + else: + self.boxplot(var_multi,par,field,year,time,passes) + else: + pass + + value_mean, value_std, value_var = calc_pix(var_multi) + + if par == 'input_vv': + value_mean = 10*np.log10(value_mean) + value_std = 10*np.log10(value_std) + value_var = 10*np.log10(value_var) + + self.plot_rgb(var_multi[1],var_multi[20],var_multi[40],mask=state_mask_2,name='rgb/rgb_'+year+'_'+str(field),passes=passes) + + self.plot_rgb(var_multi[0],var_multi[int(len(var_multi)/2.)],var_multi[-1],mask=state_mask_2,name='rgb/rgb_bme'+year+'_'+str(field),passes=passes) + + self.plot_rgb(var_multi[1],var_multi[45],var_multi[85],mask=state_mask_2,name='rgb/rgb_0323_0530_0729'+year+'_'+str(field),passes=passes) + self.plot_rgb(var_multi[1],var_multi[45],var_multi[75],mask=state_mask_2,name='rgb/rgb_0323_0530_0715'+year+'_'+str(field),passes=passes) + self.plot_rgb(var_multi[45],var_multi[75],var_multi[85],mask=state_mask_2,name='rgb/rgb_0530_0715_0729'+year+'_'+str(field),passes=passes) + self.plot_rgb(var_multi[55],var_multi[92],var_multi[-1],mask=state_mask_2,name='rgb/rgb_0615_0809_0928'+year+'_'+str(field),passes=passes) + + + self.plot(value_mean, vmin=ymin_mean[i], vmax=ymax_mean[i], name='spatial_calculations/'+par+year+'value_mean'+name_ex+'_'+str(field), mask=state_mask_2, par=par, passes=passes, year=year) + self.plot(value_std, vmin=ymin_std[i], vmax=ymax_std[i], name='spatial_calculations/'+par+year+'value_std'+name_ex+'_'+str(field), mask=state_mask_2, par=par, passes=passes, year=year) + self.plot(value_var, vmin=ymin_var[i], vmax=ymax_var[i], name='spatial_calculations/'+par+year+'value_var'+name_ex+'_'+str(field), mask=state_mask_2, par=par, passes=passes, year=year) + self.plot(value_var, name='spatial_calculations/'+par+year+'value_var2'+name_ex+'_'+str(field), par=par, passes=passes, year=year) + + def extraction_xxx(self,var,state_mask,mask_time): + + xxx = np.copy(var) + xxx = xxx[mask_time,:] + return xxx + + + def plot(self,path,passes,plot_folder,input,time,vmin=None,vmax=None,name=None,mask=None,par=None,year=None): + + if not os.path.exists(os.path.join(path,passes,plot_folder,name)): + os.makedirs(os.path.join(path,passes,plot_folder,name)) + + if vmin == None: + vmin = np.nanmin(input) + + if vmax == None: + vmax = np.nanmax(input) + + for i, ii in enumerate(time): + dataset = input[i] + + f, ax = plt.subplots(1,1) + + try: + dataset = np.ma.masked_where(mask == 0.,dataset) + except IndexError: + pass + + if par == 'input_vv': + cmap = plt.cm.Greys_r + label = 'VV [dB]' + elif par == 'vwc': + cmap = plt.cm.YlGn + label = 'VWC [kg/m$^2$]' + elif par == 'input_vwc': + cmap = plt.cm.RdYlGn + label = 'kg/m$^2$' + else: + cmap = plt.cm.viridis_r + # label = 'Soil Moisture [m$^3$/m$^3$]' + label = '' + cmap.set_bad(color='white') + plt.rcParams['axes.labelsize'] = 20 + + im1 = ax.imshow(dataset,vmin=vmin, vmax=vmax, cmap=cmap, aspect='auto') + # ax.set_title(name, fontsize=20) + f.subplots_adjust(right=0.85) + cbar_ax = f.add_axes([0.8, 0.15, 0.04, 0.7]) + ticklabs = cbar_ax.get_yticklabels() + cbar_ax.set_yticklabels(ticklabs, fontsize=20) + f.colorbar(im1, cax=cbar_ax, label=label) + + ax.xaxis.set_tick_params(labelsize=20) + ax.yaxis.set_tick_params(labelsize=20) + # plt.setp(ax, xticks=[22*6.1, 2*22*6.1, 3*22*6.1, 4*22*6.1, 5*22*6.1, 6*22*6.1], xticklabels=['11.64°E', '11.66°E', '11.68°E', '11.70°E', '11.72°E', '11.74°E'], yticks=[22*6.12, (22+39)*6.12, (22+39*2)*6.12, (22+39*3)*6.12], yticklabels=['48.30°N', '48.28°N', '48.26°N', '48.24°N']) + ax.set_ylim(len(dataset),0) + plt.savefig(os.path.join(path,passes,plot_folder,name,ii.strftime('%Y%m%d')+'_'+name+str(dataset.mean())[0:5]+'.png'), bbox_inches='tight') + plt.close() + + + def plot_rgb(self,rrr,ggg,bbb,mask=None,name=None,passes=None): + + + rrr = 10*np.log10(rrr) + ggg = 10*np.log10(ggg) + bbb = 10*np.log10(bbb) + + try: + rrr = np.ma.masked_where(mask == 0.,rrr) + ggg = np.ma.masked_where(mask == 0.,ggg) + bbb = np.ma.masked_where(mask == 0.,bbb) + except IndexError: + pass + + OldMin = -20 + OldMax = -5 + NewMin = 0 + NewMax = 255 + + OldRange = (OldMax - OldMin) + NewRange = (NewMax - NewMin) + rrr2 = ((((rrr - OldMin) * NewRange) / OldRange) + NewMin).astype(int) + ggg2 = ((((ggg - OldMin) * NewRange) / OldRange) + NewMin).astype(int) + bbb2 = ((((bbb - OldMin) * NewRange) / OldRange) + NewMin).astype(int) + rgb = np.dstack((rrr2,ggg2,bbb2)) + plt.imshow(rgb) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/'+name, bbox_inches='tight') + plt.close() + + def boxplot(self,var_multi,par,field,year,time,passes): + xx = var_multi.reshape(var_multi.shape[0], (var_multi.shape[1]*var_multi.shape[2])) + if par == 'input_vv': + sns.boxplot(np.repeat(np.arange(len(time)), len(xx[0])), 10*np.log10(xx.flatten())) + else: + sns.boxplot(np.repeat(np.arange(len(time)), len(xx[0])), xx.flatten()) + ind = list(range(1,len(time)+1)) + time2 = [i.strftime('%d-%m') for i in time] + plt.xticks(ind,time2, rotation=45) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot/'+par+str(field)+'_'+str(year), bbox_inches='tight') + plt.close() + + def boxplot2(self,var_multi,par,field,year,time,passes,sm_api,sm_insitu,meteo=None): + f, ax = plt.subplots(1,1) + + xx = var_multi.reshape(var_multi.shape[0], (var_multi.shape[1]*var_multi.shape[2])) + + sns.boxplot(np.repeat(np.arange(len(time)), len(xx[0])), xx.flatten(), color='skyblue') + + sm_api2 = np.nanmean(sm_api,axis=(1,2)) + ax.plot(sm_api2,'r-o',linewidth=4, label='SM Api') + ax.plot(sm_insitu,'b-o',linewidth=4, label = 'SM insitu') + ind = list(range(1,len(time)+1)) + time2 = [i.strftime('%d-%m') for i in time] + plt.xticks(ind,time2, rotation=45) + ax.set_ylabel('SM') + plt.legend() + if year == '2017': + ax2 = ax.twinx() + mask_time2 = np.isin(meteo.index,time) + + ax2.bar(np.arange(len(meteo[mask_time2])),meteo[mask_time2]) + ax2.set_ylim(0,150) + ax2.set_xticks([]) + ax2.set_ylabel('Precipitation') + plt.xticks(ind,time2, rotation=45) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot/'+par+str(field)+'_'+str(year), bbox_inches='tight') + plt.close() + + + def mask_fields(self,data,field,state_mask): + if field == 301: + mask_value = 87 + elif field == 319: + mask_value = 67 + elif field == 542: + mask_value = 8 + elif field == 508: + mask_value = 27 + elif field == 515: + mask_value = 4 + elif field == 317: + mask_value = 65 + elif field == 410: + mask_value = 113 + elif field == 525: + mask_value = 30 + else: + print("field not found") + + mask = state_mask == mask_value + xxx = np.copy(data) + xxx[:,~mask]=np.nan + + pos = np.argwhere(np.isfinite(xxx[0])) + x1 = np.min(pos[:,0]) + x2 = np.max(pos[:,0]) + y1 = np.min(pos[:,1]) + y2 = np.max(pos[:,1]) + + field_data = xxx[:,x1:x2,y1:y2] + return field_data + + + + diff --git a/kaska/inference_runner.py b/kaska/inference_runner.py index 26a1fe3..ebcc0ee 100644 --- a/kaska/inference_runner.py +++ b/kaska/inference_runner.py @@ -21,7 +21,7 @@ from .s2_observations import Sentinel2Observations from .kaska import KaSKA from .s1_observations import Sentinel1Observations -from .kaska_sar import sar_inversion, save_s1_output +from .kaska_sar import save_output, KaSKASAR Config = namedtuple( "Config", "s2_obs s1_obs temporal_grid state_mask" + \ @@ -166,14 +166,14 @@ def process_tile(the_chunk, config): s2_obs.apply_roi(ulx, uly, lrx, lry) chunk_mask = s2_obs.state_mask.ReadAsArray() n_unmasked_pxls = np.sum(chunk_mask) - - + + if n_unmasked_pxls == 0: LOG.info(f"No pixels in chunk {hex(chunk_no):s}") return None else: # Define KaSKA object with windowed observations. - + LOG.info(f"Unmasked pixels in {hex(chunk_no):s}: {n_unmasked_pxls:d}") kaska = KaSKA( s2_obs, @@ -194,8 +194,8 @@ def process_tile(the_chunk, config): s1_obs = copy.copy(config.s1_obs) s1_obs.apply_roi(ulx, uly, lrx, lry) - - s1_inversion = KasKASAR(config.temporal_grid, config.state_mask, + print(config.s1_obs) + s1_inversion = KaSKASAR(config.temporal_grid, config.state_mask, s2_parameter_data, config.prior, chunk=hex(chunk)) s1_inversion.sentinel1_inversion() @@ -308,9 +308,9 @@ def kaska_runner( else: # Do the splitting LOG.info(f"Doing chunk {chunk:d}") - the_chunk = [the_chunk + the_chunk = [the_chunk for the_chunk in get_chunks( - nx, ny, block_size=block_size) + nx, ny, block_size=block_size) if the_chunk[-1] == chunk] LOG.info("Single chunk!") wrapper(the_chunk[0]) diff --git a/kaska/kaska_jose_again.py b/kaska/kaska_jose_again.py new file mode 100644 index 0000000..1f984b2 --- /dev/null +++ b/kaska/kaska_jose_again.py @@ -0,0 +1,908 @@ + +import os +import pandas as pd +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +from matplotlib.dates import MonthLocator +# import matplotlib.ticker +import numpy as np +# from sense.canopy import OneLayer +# from sense.soil import Soil +# from sense import model +import scipy.stats +from scipy.optimize import minimize +import pdb +from z_helper import * +# from z_optimization import * +import datetime +from matplotlib import gridspec +import datetime +from matplotlib.lines import Line2D +import copy +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + +def fresnel(eps, theta): + theta = np.deg2rad(theta) + num = (eps-1)*(np.sin(theta)**2 - eps*(1+np.sin(theta)**2)) + den = eps*np.cos(theta) + np.sqrt(eps - np.sin(theta)**2) + den = den**2 + return np.abs(num/den) + +def mv2eps(a, b, c, mv): + eps = a + b * mv + c * mv**2 + return eps + +def quad_approx_solver(a, b, c, theta, alphas): + x = np.arange(0.01, 0.5, 0.01) + p = np.polyfit(x, fresnel(mv2eps(a, b, c, x),theta.mean()), 2) + # 2nd order polynomial + #solve + solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] + return solutions + + +def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=1.): + + + lais = [] + srs = [] + alphas = [] + sms = [] + ps = [] + times = [] + uorbits = np.unique(orbits) + for orbit in uorbits: + orbit_mask = orbits == orbit + ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osro, osro_std = sm[orbit_mask], sm_std[orbit_mask], sr[orbit_mask], sr_std[orbit_mask] + + olai_std = np.ones_like(olai)*0.05 + + alpha = fresnel(mv2eps(1.99, 38.9, 11.5, osm), otheta) + alpha_std = np.ones_like(alpha)*0.2 + + soil_sigma_mask = olai < 1 + sigma_soil_vv_mu = np.mean(ovv[soil_sigma_mask]) + sigma_soil_vh_mu = np.mean(ovh[soil_sigma_mask]) + + xvv = np.array([1, 0.5, sigma_soil_vv_mu]) + xvh = np.array([1, 0.5, sigma_soil_vh_mu]) + + prior_mean = np.concatenate([[0, ]*6, alpha, osro, olai]) + prior_unc = np.concatenate([[10., ]*6, alpha_std, osro_std, olai_std]) + + x0 = np.concatenate([xvv, xvh, alpha, osro, olai]) + + bounds = ( + [[None, None]] * 6 + + [[0.1, 3.3]] * olai.shape[0] + + [[0, .03]] * olai.shape[0] + + [[0, 8]] * olai.shape[0] + ) + + gamma = [1000, 1000] + retval = minimize(cost_function, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc), + jac=True, + bounds = bounds, + options={"disp": False},) + + posterious_lai = retval.x[6+2*len(olai) : ] + posterious_sr = retval.x[6+len(olai) : 6+2*len(olai)] + posterious_alpha = retval.x[6 : 6+len(olai)] + sols = np.array(quad_approx_solver(1.99, 38.9, 11.5, otheta, posterious_alpha)).min(axis=1) + lais.append(posterious_lai) + srs.append(posterious_sr) + sms.append(sols) + times.append(otime) + ps.append(retval.x[:6]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + lais = np.hstack(lais )[order] + srs = np.hstack(srs )[order] + sms = np.hstack(sms )[order].real + return times, lais, srs, sms, np.array(ps) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +# def do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, height, orbits, unc): + +# lais = [] +# coefs = [] +# sms = [] +# times = [] + +# uorbits = np.unique(orbits) +# # uorbits = np.array([44]) +# for orbit in uorbits: +# orbit_mask = orbits == orbit +# # orbit_mask = (orbits == 44) | (orbits == 168) +# # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) +# # orbit_mask = (orbits == 95) +# ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] +# osm, osm_std, oscoef, oscoef_std = sm[orbit_mask], sm_std[orbit_mask], coef[orbit_mask], coef_std[orbit_mask] + +# oheight = height[orbit_mask] + +# olai_std = np.ones_like(olai)*0.05 + +# alpha = _calc_eps(osm) +# alpha = osm +# alpha_std = np.ones_like(alpha)*10 +# alpha_std = osm_std +# # pdb.set_trace() +# prior_mean = np.concatenate([alpha,oscoef]) +# prior_unc = np.concatenate([alpha_std,oscoef_std]) +# x0 = np.concatenate([alpha,oscoef]) +# data = np.concatenate([oheight,olai]) +# bounds = ( +# # [[2.5, 30]] * olai.shape[0] +# [[0.01, 0.5]] * olai.shape[0] +# + [[0.01, 3]] * olai.shape[0] +# ) + +# gamma = [500, 500] + +# retval = minimize(cost_function2, +# x0, +# args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data, unc), +# jac=True, +# bounds = bounds, +# options={"disp": True},) + +# # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] +# posterious_coef = retval.x[len(olai) : +2*len(olai)] +# posterious_mv = retval.x[ : +len(olai)] +# # lais.append(posterious_lai) +# coefs.append(posterious_coef) +# # x = np.arange(0.01, 0.5, 0.001) +# # xx = _calc_eps(x) +# # sols=[] +# # for i in posterious_mv: +# # p, pp = find_nearest(xx,i) +# # sols.append(x[pp]) +# # sols = np.array(sols) + +# sms.append(posterious_mv) +# # sms.append(sols) +# times.append(otime) + +# order = np.argsort(np.hstack(times)) +# times = np.hstack(times )[order] +# # lais = np.hstack(lais )[order] +# lais=0 +# coefs = np.hstack(coefs )[order] +# # coefs=0 +# sms = np.hstack(sms )[order].real + +# return times, lais, coefs, sms, orbit_mask + + + +# def _simple_ew(): +# """ +# eq. 4.69 +# simplistic approach with T=23°C, bulk density = 1.7 g/cm3 +# """ +# f0 = 18.64 # relaxation frequency [GHz] +# f = 5.405 +# hlp = f/f0 +# e1 = 4.9 + (74.1)/(1.+hlp**2.) +# # e2 =(74.1*hlp)/(1.+hlp**2.) + 6.46 * self.sigma/self.f +# # return e1 + 1.j * e2 +# return e1 + +# def _calc_eps(mv): +# """ +# calculate dielectric permittivity +# Eq. 4.66 (Ulaby et al., 2014) +# """ +# clay = 0.0738 +# sand = 0.2408 +# bulk = 1.45 +# alpha = 0.65 +# beta1 = 1.27-0.519*sand - 0.152*clay +# beta2 = 2.06 - 0.928*sand -0.255*clay +# sigma = -1.645 + 1.939*bulk - 2.256*sand + 1.594*clay + + +# e1 = (1.+0.66*bulk+mv**beta1*_simple_ew()**alpha - mv)**(1./alpha) +# # e2 = np.imag(self.ew)*self.mv**self.beta2 +# # return e1 + 1.j*e2 +# return e1 + +# def quad_approx_solver(alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, _calc_eps(x), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + +def find_nearest(array, value): + array = np.asarray(array) + idx = (np.abs(array - value)).argmin() + return array[idx], idx + + + + + + + + +### Data preparation df_agro!!!! ### +#----------------------------------------------------------------- +# storage information +path = '/media/tweiss/Work/z_final_mni_data_2017' +file_name = 'in_situ_s1_buffer_50' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +path_agro = '/media/tweiss/Work/Paper/in_progress/RT_model_comparison/images' +file_name_agro = 'Daily_Freising' +extension_agro = '.csv' + +field = '508_high' +pol = 'vv' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro, pol) + +aggregation = ['','_buffer_30','_buffer_50','_buffer_100','_Field_buffer_30'] +pre_processing = ['multi', 'norm_multi'] +aggregation = ['_buffer_50','_Field_buffer_30'] +pre_processing = ['multi'] +# aggregation = ['_buffer_100'] +# aggregation = ['_Field_buffer_30'] +surface_list = ['Oh92', 'Oh04', 'Dubois95', 'WaterCloud', 'I2EM'] +canopy_list = ['turbid_isotropic', 'water_cloud'] + +surface_list = ['Oh92', 'I2EM'] +canopy_list = ['turbid_isotropic'] + +surface_list = ['Oh04'] +# surface_list = ['Oh92'] +# canopy_list = ['water_cloud'] +field = ['508_high'] +# field = ['508_low'] +# field = ['508_med'] +# field = ['301_high'] +field = ['301_low'] +# field = ['301_med'] +# field = ['542_high'] +# field = ['542_low'] +# field = ['542_med'] + +### option for time invariant or variant calibration of parameter +#------------------------------- +opt_mod = ['time_variant'] +#--------------------------- + + +for p in pre_processing: + + for pp in aggregation: + + # versions = ['everything','','44_117','95_168','44_168','117_95','44_95','117_168','44_117_95','44_117_168','44_95_168','117_95_168'] + # ver = ['','','44','95','44','117','44','117','44','44','44','117'] + # ver2 = ['','','117','168','168','95','95','168','117','117','95','95'] + # ver3 = ['','','','','','','','','95','168','168','168'] + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + # versions = ['44_168'] + # ver = ['44'] + # ver2 = ['168'] + # ver3 = [''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + + + + + + # fig, ax = plt.subplots(figsize=(17, 13)) + # gs = gridspec.GridSpec(5, 1, height_ratios=[14, 3, 3, 3, 3]) + # ax = plt.subplot(gs[0]) + + # plt.ylabel('Backscatter [dB]', fontsize=18) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + + # ax.set_ylim([-21.5,-8.5]) + + + # colormaps = ['Greens', 'Purples', 'Blues', 'Oranges', 'Reds', 'Greys', 'pink', 'bone', 'Blues', 'Blues', 'Blues'] + # r = 0 + + # colormap = plt.get_cmap(colormaps[r]) + # colors = [colormap(rr) for rr in np.linspace(0.35, 1., 3)] + + for kkk in opt_mod: + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + data_field = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + + vv = data_field.filter(like='S1_vv').values.flatten() + vv = 10*np.log10(vv) + vh = data_field.filter(like='S1_vh').values.flatten() + vh = 10*np.log10(vh) + lai = data_field.filter(like='LAI_insitu').values.flatten() + lai = lai + theta = data_field.filter(like='theta').values.flatten() + theta = np.rad2deg(theta) + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + + + s2_data = pd.read_csv('/media/tweiss/Daten/data_AGU/S2_'+kkkk+pp+'.csv',header=[0],index_col=0) + s2_data.index = pd.to_datetime(s2_data.index).floor('Min').floor('H') + s2_lai = s2_data.loc[time2]['lai'].values.flatten() + s2_cab = s2_data.loc[time2]['cab'].values.flatten() + s2_cbrown = s2_data.loc[time2]['cbrown'].values.flatten() + # lai = s2_lai + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api_sm.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_sm = api_data.loc[time2].values.flatten() + sm = data_field.filter(like='SM_insitu').values.flatten() + # sm = smooth(sm,2) + sm[:] = 0.15 + # sm = api_sm + sm_std = data_field.filter(like='SM_insitu').values.flatten() + # ooo = np.abs(sm[1:]-sm[:-1])*20 + # sm_std[0] = ooo[-1] + # sm_std[1:] = ooo + sm_std[:] = 5.0 + + # out_shape = date + # lai_outputs = np.zeros(out_shape ) + # sm_outputs = np.zeros(out_shape ) + # sr_outputs = np.zeros(out_shape ) + + # ps_shape = (len(uorbits),) + lai + + # Avv_outputs = np.zeros(ps_shape) + # Bvv_outputs = np.zeros(ps_shape) + # Cvv_outputs = np.zeros(ps_shape) + + # Avh_outputs = np.zeros(ps_shape) + # Bvh_outputs = np.zeros(ps_shape) + # Cvh_outputs = np.zeros(ps_shape) + + + sr = lai*1. + sr[:] = 0.3 + sr_std = lai*1. + sr_std[:] = 2 + + vv = np.maximum(vv, 0.0001) + vv = 10 * np.log10(vv) + vh = np.maximum(vh, 0.0001) + vh = 10 * np.log10(vh) + + unc = 10.8 + orbits = data_field.filter(like='relativeorbit').values.flatten() + + times, lais, srs, sms, ps = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=unc) + + # plt.plot(sm_insitu) + # plt.plot(sm) + # plt.plot(sms) + + # pdb.set_trace() + + # unc_array = np.arange(0,2,0.1) + # coef_array = np.arange(0,2,0.1) + # sm_array = np.arange(0,2,0.1) + + # hm = {} + # for r in unc_array: + # for rr in coef_array: + # for rrr in sm_array: + # unc = r + # sr_std[:] = rr + # sm_std[:] = rrr + # times, lais, srs, sms, ps = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=unc) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # hm[(r,rr,rrr)] = ubrmse_vv + + # pdb.set_trace() + # min(hm, key=hm.get) + # hm[min(hm, key=hm.get)] + + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + + plt.rcParams["figure.figsize"] = (10,7) + plt.plot(time,sm_insitu) + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + plt.plot(time,sm, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + rmse_vv = rmse_prediction(sm_insitu,sms) + bias_vv = bias_prediction(sm_insitu,sms) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + # #orbit_mask + # plt.plot(time[orbit_mask],sm_insitu[orbit_mask]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(time[orbit_mask],sm[orbit_mask], label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sms) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + + plt.legend() + plt.grid() + plt.ylabel('Soil Moisture') + plt.xlabel('Time') + plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/jose_unc08', bbox_inches = 'tight') + plt.show() + plt.close() + pdb.set_trace() + # plt.plot(time[orbit_mask],coef) + # plt.plot(times,coefs) + # plt.show() + pdb.set_trace() + pdb.set_trace() + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + if kkk == 'time invariant': + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + else: + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax.axvspan(x,xx, color='red', alpha=0.2, label = 'Incidence angle 43°, Descending track') + a += 1 + else: + ax.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax.axvspan(x,xx, color='blue', alpha=0.2, label = 'Incidence angle 45°, Ascending track') + b += 1 + else: + ax.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax.axvspan(x,xx, color='orange', alpha=0.2, label = 'Incidence angle 35°, Descending track') + c += 1 + else: + ax.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax.axvspan(x,xx, color='green', alpha=0.2, label = 'Incidence angle 36°, Ascending track') + d += 1 + else: + ax.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + + ax.plot(date,10*np.log10(data_field.filter(like='S1_vv')), '-', color='black', label='Sentinel-1', linewidth=3, marker='s') + + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.legend(prop={'size': 14}, loc=3) + + plt.grid(linestyle='dotted') + + plt.setp(ax.get_xticklabels(), visible=False) + + ax0 = plt.subplot(gs[1]) + plt.tick_params(labelsize=17) + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + + ground = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='part_g') + ground = ground[ground.columns[0]] + + lai = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='LAI_insitu').values + theta = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='theta').values + + if kk == 'turbid_isotropic': + coef = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='coef').values + d = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='height').values + T = np.exp(-coef*np.sqrt(lai)*d/np.cos(theta)) + T=T**2 + ax0.plot(date,T.flatten(), color=colors, marker='s', linestyle='dashed') + else: + B_vv = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='B_vv').values + T = np.exp(-2*B_vv*lai/np.cos(theta)) + ax0.plot(date,T.flatten(), color=colors, marker='s') + + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax0.axvspan(x,xx, color='red', alpha=0.2, label = 'descending 43°') + a += 1 + else: + ax0.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax0.axvspan(x,xx, color='blue', alpha=0.2, label = 'ascending 43°') + b += 1 + else: + ax0.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax0.axvspan(x,xx, color='orange', alpha=0.2, label = 'descending 35°') + c += 1 + else: + ax0.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax0.axvspan(x,xx, color='green', alpha=0.2, label = 'ascending 36°') + d += 1 + else: + ax0.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + plt.ylabel('Transmissivity\nT', fontsize=18) + ax0.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + ax0.set_ylim(-0.2,1.1) + plt.grid(linestyle='dotted') + plt.setp(ax0.get_xticklabels(), visible=False) + + + ax1 = plt.subplot(gs[2], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + plt.setp(ax1.get_xticklabels(), visible=False) + + lai_field = data_field.filter(like='LAI_insitu') + height_field = data_field.filter(like='height') + + + ax1.plot(date,lai_field,color='green',linewidth=2,label='LAI') + ax2 = ax1.twinx() + plt.tick_params(labelsize=17) + ax2.plot(date,height_field,color='black', linewidth=2, label='Height') + ax1.set_ylabel('LAI', fontsize=16) + ax2.set_ylabel('Height\n[m]', fontsize=16) + + + # add std for LAI and height for field 508 (data from field measurements) + lai_old = copy.deepcopy(lai_field) + height_old = copy.deepcopy(height_field) + if field == '508_high': + lai_field[lai_field.index>'2017-03-28'] = 0.2218 + lai_field[lai_field.index>'2017-04-05'] = 0.1367 + lai_field[lai_field.index>'2017-04-10'] = 0.4054 + lai_field[lai_field.index>'2017-04-21'] = 0.3247 + lai_field[lai_field.index>'2017-05-02'] = 0.5546 + lai_field[lai_field.index>'2017-05-10'] = 0.5852 + lai_field[lai_field.index>'2017-05-16'] = 0.3058 + lai_field[lai_field.index>'2017-05-26'] = 0.5373 + lai_field[lai_field.index>'2017-05-29'] = 0.332 + lai_field[lai_field.index>'2017-06-02'] = 0.2856 + lai_field[lai_field.index>'2017-06-13'] = 0.4717 + lai_field[lai_field.index>'2017-06-26'] = 0.2982 + lai_field[lai_field.index>'2017-07-06'] = 0.253 + + height_field[height_field.index>'2017-03-28'] = 0.005774 + height_field[height_field.index>'2017-04-05'] = 0.015275 + height_field[height_field.index>'2017-04-10'] = 0.026458 + height_field[height_field.index>'2017-04-21'] = 0.049329 + height_field[height_field.index>'2017-05-02'] = 0.01 + height_field[height_field.index>'2017-05-10'] = 0.01 + height_field[height_field.index>'2017-05-26'] = 0.028868 + height_field[height_field.index>'2017-05-29'] = 0.028868 + height_field[height_field.index>'2017-06-02'] = 0.028868 + height_field[height_field.index>'2017-06-13'] = 0.020817 + height_field[height_field.index>'2017-06-26'] = 0.025166 + height_field[height_field.index>'2017-07-06'] = 0.015275 + + ax1.fill_between(lai_field.index,lai_old.values.flatten()-lai_field.values.flatten(), lai_old.values.flatten()+lai_field.values.flatten(), color='green', alpha=0.2, label='Standard Deviation') + ax2.fill_between(height_field.index,height_old.values.flatten()-height_field.values.flatten(), height_old.values.flatten()+height_field.values.flatten(), color='black', alpha=0.2, label='Standard Deviation') + + ax2.legend(bbox_to_anchor=(.965, 0.45), prop={'size': 14}) + ax1.legend(loc=2, prop={'size': 14}) + + # ax1.set_xticks([]) + ax1.set_ylim(0,6.7) + ax2.set_ylim(0,1) + start, end = ax1.get_ylim() + ax1.yaxis.set_ticks(np.arange(start, end, 2)) + + # soil moisture and rainfall + ax3 = plt.subplot(gs[3], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + ax3.plot(date,data_field.filter(like='SM_insitu'),color='blue', linewidth=2, label='Soil Moisture') + ax3.set_ylabel('Soil Moisture\n$[cm^3/cm^3]$', fontsize=16) + ax5 = ax3.twinx() + date_agro = pd.to_datetime(df_agro['date'], format='%d.%m.%Y') + agro_sum = df_agro['SUM_NN050'][87:192] + ax5.bar(agro_sum.index, agro_sum, width=0.8, label='Precipitation') + ax3.legend(loc=2, prop={'size': 14}) + ax5.legend(loc=1, prop={'size': 14}) + ax5.set_ylabel('Precipita-\ntion [mm]', fontsize=16) + ax5.set_ylim(0,39) + ax3.set_ylim(0.17,0.38) + plt.setp(ax3.get_xticklabels(), visible=False) + plt.tick_params(labelsize=17) + + ax4 = plt.subplot(gs[4], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + bbch = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/bbch_2017.csv',header=[0,1]) + bbch = bbch.set_index(pd.to_datetime(bbch['None']['None'], format='%Y-%m-%d')) + bbch.index = pd.to_datetime(bbch.index) + + lai_field['bbch'] = 0 + + bbch_new = bbch.filter(like=kkkk[0:3]) + for t, tt in enumerate(bbch.index): + if t == 0: + start_date = '2017-03-29' + else: + start_date = bbch.index[t] + try: + end_date = bbch.index[t+1] + except IndexError: + start_date = bbch.index[t] + end_date = '2017-07-30' + mask = (lai_field.index > start_date) & (lai_field.index <= end_date) + + bbbb = lai_field['bbch'].where(~mask, other=2) + if bbch.index[t] < datetime.datetime.strptime('2017-03-29', '%Y-%m-%d'): + pass + else: + if bbch_new.values[t] < 30 and bbch_new.values[t] >= 20: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=2) + n2 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 40 and bbch_new.values[t] >= 30: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=3) + n3 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 50 and bbch_new.values[t] >= 40: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=4) + n4 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 60 and bbch_new.values[t] >= 50: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=5) + n5 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 70 and bbch_new.values[t] >= 60: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=6) + n6 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 80 and bbch_new.values[t] >= 70: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=7) + n7 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 90 and bbch_new.values[t] >= 80: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=8) + n8 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 100 and bbch_new.values[t] >= 90: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=9) + n9 = max(lai_field['bbch'][mask].index) + # bbch_ = lai_field['bbch'].value_counts().sort_index().values + bbch_ = [n2-datetime.datetime.strptime('2017-03-22', '%Y-%m-%d'),n3-n2,n4-n3,n5-n4,n6-n5,n7-n6,n8-n7,n9-n8] + + #Plot BBCH + hm = lai_field.filter(like='bbch') + label = ['','BBCH',''] + width = 0.3 + legend_items = ['Tillering','Stem elongation','Booting','Heading','Flowering','Fruit development','Ripening', 'Senescence'] + + a_508 = 0 + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + + for xxxx, kkkkk in enumerate(bbch_): + a_508 = a_508 + bbch_[xxxx].total_seconds() /60/60/24 + ax4.barh(label,[0,a_508,0],width, label=legend_items[xxxx], left=[0,aa_508,0]) + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + a_508 + + + xmin, xmax = ax4.get_xlim() + + ax4.barh(label,[0,200,0],width, left=[0,xmax-1,0], color='white') + ax4.set_ylim(0,1.7) + plt.legend(bbox_to_anchor=(.935, 0.4),ncol=8) + + plt.text(0.98, 0.05, "(a)", transform=ax.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(b)", transform=ax0.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(c)", transform=ax2.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(d)", transform=ax3.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(e)", transform=ax4.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + + + plt.savefig(plot_output_path+pol+'_all_'+kkk+kkkk, bbox_inches = 'tight') + + plt.close() + diff --git a/kaska/kaska_sar.py b/kaska/kaska_sar.py index 4271c89..791ee2d 100644 --- a/kaska/kaska_sar.py +++ b/kaska/kaska_sar.py @@ -14,6 +14,7 @@ from scipy.interpolate import interp1d from watercloudmodel import cost_function from scipy.ndimage.filters import gaussian_filter1d +import pdb def save_to_tif(fname, Array, GeoT): if os.path.exists(fname): @@ -28,7 +29,6 @@ def save_to_tif(fname, Array, GeoT): ds.FlushCache() return fname - def get_sar(s1_nc_file): s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang_name vv_name, vh_name') data = Dataset(s1_nc_file) @@ -39,26 +39,25 @@ def get_sar(s1_nc_file): time = data['time'][:] lat = data['lat'][:] lon = data['lon'][:] - + vv_name = s1_nc_file.replace('.nc', '_vv.tif') vh_name = s1_nc_file.replace('.nc', '_vh.tif') ang_name = s1_nc_file.replace('.nc', '_ang.tif') - if not os.path.exists(vv_name): - gg = gdal.Open('NETCDF:"%s":sigma0_vv_norm_multi_db'%s1_nc_file) + gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%s1_nc_file) geo = gg.GetGeoTransform() - sigma0_vv_norm_multi_db = data['sigma0_vv_norm_multi_db'][:] - save_to_tif(vv_name, sigma0_vv_norm_multi_db, geo) - + sigma0_vv_norm_multi = data['sigma0_vv_multi'][:] + save_to_tif(vv_name, sigma0_vv_norm_multi, geo) + if not os.path.exists(vh_name): - gg = gdal.Open('NETCDF:"%s":sigma0_vh_norm_multi_db'%s1_nc_file) - geo = gg.GetGeoTransform() - sigma0_vh_norm_multi_db = data['sigma0_vh_norm_multi_db'][:] - save_to_tif(vh_name, sigma0_vh_norm_multi_db, geo) + gg = gdal.Open('NETCDF:"%s":sigma0_vh_multi'%s1_nc_file) + geo = gg.GetGeoTransform() + sigma0_vh_norm_multi = data['sigma0_vh_multi'][:] + save_to_tif(vh_name, sigma0_vh_norm_multi, geo) if not os.path.exists(ang_name): gg = gdal.Open('NETCDF:"%s":localIncidenceAngle'%s1_nc_file) - geo = gg.GetGeoTransform() + geo = gg.GetGeoTransform() localIncidenceAngle = data['localIncidenceAngle'][:] save_to_tif(ang_name, localIncidenceAngle, geo) @@ -70,7 +69,7 @@ def read_sar(sar_data, state_mask): ang = reproject_data(sar_data.ang_name, output_format="MEM", target_img=state_mask) vv = reproject_data(sar_data.vv_name, output_format="MEM", target_img=state_mask) vh = reproject_data(sar_data.vh_name, output_format="MEM", target_img=state_mask) - + time = [datetime.datetime(1970,1,1) + datetime.timedelta(days=float(i)) for i in sar_data.time] return s1_data(time, sar_data.lat, sar_data.lon, sar_data.satellite, sar_data.relorbit, sar_data.orbitdirection, ang, vv, vh) @@ -87,19 +86,28 @@ def read_s2_lai(s2_lai, s2_cab, s2_cbrown, state_mask): cbrown = reproject_data(s2_cbrown, output_format="MEM", target_img=state_mask) return s2_data(time, lai, cab, cbrown) -def inference_preprocessing(s1_data, s2_data): +def inference_preprocessing(s1_data, s2_data, state_mask, orbit1=None, orbit2=None): """Resample S2 smoothed output to match S1 observations times""" # Move everything to DoY to simplify interpolation - + sar_inference_data = namedtuple('sar_inference_data', 'time lat lon satellite relorbit orbitdirection ang vv vh lai cab cbrown time_mask fields') - + s2_doys = np.array([ int(i.strftime('%j')) for i in s2_data.time]) s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) - + time_mask = (s1_doys >= s2_doys.min()) & (s1_doys <= s2_doys.max()) - + if orbit1 != None: + rel_orbit1 = s1_data.relorbit==orbit1 + if orbit2 != None: + rel_orbit2 = s1_data.relorbit==orbit2 + xxx = np.logical_and(rel_orbit1,time_mask) + yyy = np.logical_and(rel_orbit2,time_mask) + time_mask = np.logical_or(xxx,yyy) + + + pdb.set_trace() f = interp1d(s2_doys, s2_data.lai.ReadAsArray(), axis=0, bounds_error=False) lai_s1 = f(s1_doys) f = interp1d(s2_doys, s2_data.cab.ReadAsArray(), axis=0, bounds_error=False) @@ -110,9 +118,13 @@ def inference_preprocessing(s1_data, s2_data): lai_max = np.nanmax(s2_data.lai.ReadAsArray(), axis=0) patches = sobel(lai_max)>0.001 fields = label(patches)[0] - sar_inference_data = sar_inference_data(s1_data.time, s1_data.lat, s1_data.lon, - s1_data.satellite, s1_data.relorbit, - s1_data.orbitdirection, s1_data.ang, + g = gdal.Open(state_mask) + gg = g.GetRasterBand(1) + ggg = gg.ReadAsArray() + fields[ggg==0]=0 + sar_inference_data = sar_inference_data(s1_data.time, s1_data.lat, s1_data.lon, + s1_data.satellite, s1_data.relorbit, + s1_data.orbitdirection, s1_data.ang, s1_data.vv, s1_data.vh, lai_s1, cab_s1, cbrown_s1, time_mask, fields) return sar_inference_data @@ -123,7 +135,7 @@ def get_prior(s1_data, soilMoisture, soilMoisture_std, soilRoughness, soilRoughn # and the soil roughness prior using the satemask # the assumption of inputs are daily data in geotifs prior = namedtuple('prior', 'time sm_prior sm_std sr_prior sr_std') - + g = gdal.Open(soilMoisture) time = [] for i in range(g.RasterCount): @@ -134,16 +146,16 @@ def get_prior(s1_data, soilMoisture, soilMoisture_std, soilRoughness, soilRoughn sm_std = reproject_data(soilMoisture_std, output_format="MEM", target_img=state_mask) sr_prior = reproject_data(soilRoughness, output_format="MEM", target_img=state_mask) sr_std = reproject_data(soilRoughness_std,output_format="MEM", target_img=state_mask) - + prior_doy = np.array([ int(i.strftime('%j')) for i in time]) s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) - + f = interp1d(prior_doy, sm_prior.ReadAsArray(), axis=0, bounds_error=False) - + sm_s1 = f(s1_doys) f = interp1d(prior_doy, sm_std.ReadAsArray(), axis=0, bounds_error=False) sm_std_s1 = f(s1_doys) - + f = interp1d(prior_doy, sr_prior.ReadAsArray(), axis=0, bounds_error=False) sr_s1 = f(s1_doys) f = interp1d(prior_doy, sr_std.ReadAsArray(), axis=0, bounds_error=False) @@ -172,9 +184,9 @@ def quad_approx_solver(a, b, c, theta, alphas): def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std): - + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] - + lais = [] srs = [] alphas = [] @@ -186,9 +198,9 @@ def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, orbit_mask = orbits == orbit ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] osm, osm_std, osro, osro_std = sm[orbit_mask], sm_std[orbit_mask], sr[orbit_mask], sr_std[orbit_mask] - + olai_std = np.ones_like(olai)*0.05 - + alpha = fresnel(mv2eps(1.99, 38.9, 11.5, osm), otheta) alpha_std = np.ones_like(alpha)*0.2 @@ -203,19 +215,19 @@ def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, prior_unc = np.concatenate([[10., ]*6, alpha_std, osro_std, olai_std]) x0 = np.concatenate([xvv, xvh, alpha, osro, olai]) - + bounds = ( [[None, None]] * 6 + [[0.1, 3.3]] * olai.shape[0] + [[0, .03]] * olai.shape[0] + [[0, 8]] * olai.shape[0] - ) - - gamma = [1000, 1000] - retval = minimize(cost_function, - x0, - args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc), - jac=True, + ) + # pdb.set_trace() + gamma = [500, 500] + retval = minimize(cost_function, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc), + jac=True, bounds = bounds, options={"disp": False},) @@ -228,46 +240,46 @@ def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sms.append(sols) times.append(otime) ps.append(retval.x[:6]) - + order = np.argsort(np.hstack(times)) times = np.hstack(times )[order] lais = np.hstack(lais )[order] srs = np.hstack(srs )[order] - sms = np.hstack(sms )[order].real + sms = np.hstack(sms )[order].real return times, lais, srs, sms, np.array(ps) - + def do_inversion(sar_inference_data, prior, state_mask, segment=False): - + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] uorbits = np.unique(orbits) - if segment: + if segment: out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape lai_outputs = np.zeros(out_shape ) sm_outputs = np.zeros(out_shape ) sr_outputs = np.zeros(out_shape ) - - ps_shape = (len(uorbits),) + sar_inference_data.lai.shape[1:] - + + ps_shape = (len(uorbits),) + sar_inference_data.lai.shape[1:] + Avv_outputs = np.zeros(ps_shape) Bvv_outputs = np.zeros(ps_shape) Cvv_outputs = np.zeros(ps_shape) - + Avh_outputs = np.zeros(ps_shape) Bvh_outputs = np.zeros(ps_shape) Cvh_outputs = np.zeros(ps_shape) fields = np.unique(sar_inference_data.fields)[1:] for field in fields: - # get per field data + # get per field data # with time mask as well field_mask = sar_inference_data.fields == field time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] - + lai = sar_inference_data.lai[sar_inference_data.time_mask][:, field_mask] sm = prior.sm_prior[sar_inference_data.time_mask][:, field_mask] sm_std= prior.sm_std [sar_inference_data.time_mask][:, field_mask] - + sm[np.isnan(sm)] = 0.2 sm_std[sm_std==0] = 0.5 sm_std[np.isnan(sm_std)] = 0.5 @@ -276,17 +288,17 @@ def do_inversion(sar_inference_data, prior, state_mask, segment=False): sr_std= prior.sr_std [sar_inference_data.time_mask][:, field_mask] sr[np.isnan(sr)] = 0.03 sr_std[np.isnan(sr_std)] = 1 - + vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] - + lai = np.nanmean(lai, axis=1) vv = np.nanmean(vv, axis=1) vh = np.nanmean(vh, axis=1) theta = np.nanmean(theta, axis=1) - + sm = np.nanmean(sm, axis=1) sm_std = np.nanmean(sm_std, axis=1) @@ -297,13 +309,13 @@ def do_inversion(sar_inference_data, prior, state_mask, segment=False): vv = 10 * np.log10(vv) vh = np.maximum(vh, 0.0001) vh = 10 * np.log10(vh) - + times, lais, srs, sms, ps = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std) - + lai_outputs[:, field_mask] = lais[...,None] sr_outputs[:, field_mask] = srs [...,None] sm_outputs[:, field_mask] = sms [...,None] - + for i in range(len(uorbits)): Avv_outputs[i, field_mask] = ps[i,0] Bvv_outputs[i, field_mask] = ps[i,1] @@ -312,35 +324,35 @@ def do_inversion(sar_inference_data, prior, state_mask, segment=False): Bvh_outputs[i, field_mask] = ps[i,4] Cvh_outputs[i, field_mask] = ps[i,5] else: - mask = gdal.Open(state_mask).ReadAsArray() + mask = gdal.Open(state_mask).ReadAsArray() xs, ys = np.where(mask) - + out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape lai_outputs = np.zeros(out_shape ) sm_outputs = np.zeros(out_shape ) sr_outputs = np.zeros(out_shape ) - - ps_shape = (len(uorbits),) + sar_inference_data.lai.shape[1:] - + + ps_shape = (len(uorbits),) + sar_inference_data.lai.shape[1:] + Avv_outputs = np.zeros(ps_shape) Bvv_outputs = np.zeros(ps_shape) Cvv_outputs = np.zeros(ps_shape) - + Avh_outputs = np.zeros(ps_shape) Bvh_outputs = np.zeros(ps_shape) Cvh_outputs = np.zeros(ps_shape) - + for i in range(len(xs)): indx, indy = xs[i], ys[i] - + # field_mask = slice(None, None), slice(indx, indx+1), slice(indy, indy+1) time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] lai = sar_inference_data.lai[sar_inference_data.time_mask][:, indx, indy ] sm = prior.sm_prior[sar_inference_data.time_mask][:, indx, indy ] sm_std= prior.sm_std [sar_inference_data.time_mask][:, indx, indy ] - + sm[np.isnan(sm)] = 0.2 sm_std[sm_std==0] = 0.5 sm_std[np.isnan(sm_std)] = 0.5 @@ -349,7 +361,7 @@ def do_inversion(sar_inference_data, prior, state_mask, segment=False): sr_std= prior.sr_std [sar_inference_data.time_mask][:, indx, indy ] sr[np.isnan(sr)] = 0.03 sr_std[np.isnan(sr_std)] = 1 - + vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] @@ -358,13 +370,13 @@ def do_inversion(sar_inference_data, prior, state_mask, segment=False): vv = 10 * np.log10(vv) vh = np.maximum(vh, 0.0001) vh = 10 * np.log10(vh) - + times, lais, srs, sms, ps = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std) - + lai_outputs[:, indx, indy] = lais - sr_outputs[:, indx, indy] = srs - sm_outputs[:, indx, indy] = sms - + sr_outputs[:, indx, indy] = srs + sm_outputs[:, indx, indy] = sms + for i in range(len(uorbits)): Avv_outputs[i, indx, indy] = ps[i,0] Bvv_outputs[i, indx, indy] = ps[i,1] @@ -401,10 +413,10 @@ def save_ps_output(fname, Array, GeoT, projction, orbit): class KaSKASAR(object): - """A class to process Sentinel 1 SAR data using S2 data as + """A class to process Sentinel 1 SAR data using S2 data as an input""" - def __init__(self, s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std): + def __init__(self, s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std,orbit1=None,orbit2=None): self.s1_ncfile = s1_ncfile self.state_mask = state_mask self.s2_lai = s2_lai @@ -414,36 +426,43 @@ def __init__(self, s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, self.sm_std = sm_std self.sr_prior = sr_prior self.sr_std = sr_std - + self.orbit1 = None + self.orbit2 = None + if orbit1 != None: + self.orbit1 = orbit1 + if orbit2 != None: + self.orbit2 = orbit2 + def sentinel1_inversion(self, segment=False): - sar = get_sar(s1_ncfile) + sar = get_sar(s1_ncfile) s1_data = read_sar(sar, self.state_mask) - s2_data = s2_data = read_s2_lai(self.s2_lai, self.s2_cab, self.s2_cbrown, self.state_mask) + s2_data = read_s2_lai(self.s2_lai, self.s2_cab, self.s2_cbrown, self.state_mask) prior = get_prior(s1_data, self.sm_prior, self.sm_std, self.sr_prior, self.sr_std, self.state_mask) - sar_inference_data = inference_preprocessing(s1_data, s2_data) + sar_inference_data = inference_preprocessing(s1_data, s2_data, self.state_mask,self.orbit1,self.orbit2) + lai_outputs, sr_outputs, sm_outputs, \ Avv_outputs, Bvv_outputs, Cvv_outputs, \ Avh_outputs, Bvh_outputs, Cvh_outputs, uorbits = do_inversion(sar_inference_data, prior, self.state_mask, segment) - - gg = gdal.Open('NETCDF:"%s":sigma0_vh_norm_multi_db'%self.s1_ncfile) - geo = gg.GetGeoTransform() + + gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%self.s1_ncfile) + geo = gg.GetGeoTransform() projction = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' - + time = [i.strftime('%Y-%m-%d') for i in np.array(sar_inference_data.time)[sar_inference_data.time_mask]] - + sm_name = self.s1_ncfile.replace('.nc', '_sar_sm.tif') sr_name = self.s1_ncfile.replace('.nc', '_sar_sr.tif') lai_name = self.s1_ncfile.replace('.nc', '_sar_lai.tif') save_output(sm_name, sm_outputs, geo, projction, time) - save_output(sr_name, sr_outputs, geo, projction, time) - save_output(lai_name, lai_outputs, geo, projction, time) + save_output(sr_name, sr_outputs, geo, projction, time) + save_output(lai_name, lai_outputs, geo, projction, time) Avv_name = self.s1_ncfile.replace('.nc', '_Avv.tif') Bvv_name = self.s1_ncfile.replace('.nc', '_Bvv.tif') Cvv_name = self.s1_ncfile.replace('.nc', '_Cvv.tif') - + Avh_name = self.s1_ncfile.replace('.nc', '_Avh.tif') Bvh_name = self.s1_ncfile.replace('.nc', '_Bvh.tif') Cvh_name = self.s1_ncfile.replace('.nc', '_Cvh.tif') @@ -457,18 +476,42 @@ def sentinel1_inversion(self, segment=False): if __name__ == '__main__': - s1_ncfile = '/data/nemesis/kaska-sar_quick/S1_LMU_site_2017_new.nc' - state_mask = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/carto/ESU.tif" - s2_folder = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/s2_obs/" - s2_lai = f"{s2_folder:s}/outputs/lai.tif" - s2_cab = f"{s2_folder:s}/outputs/cab.tif" - s2_cbrown = f"{s2_folder:s}/outputs/cbrown.tif" - - sm_prior = '/data/nemesis/kaska-sar_quick/sm_prior.tif' - sm_std = '/data/nemesis/kaska-sar_quick/sm_std.tif' - sr_prior = '/data/nemesis/kaska-sar_quick/sr_prior.tif' - sr_std = '/data/nemesis/kaska-sar_quick/sr_std.tif' - sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + # s1_ncfile = '/data/nemesis/kaska-sar_quick/S1_LMU_site_2017_new.nc' + # state_mask = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/carto/ESU.tif" + # s2_folder = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/s2_obs/" + # s2_lai = f"{s2_folder:s}/outputs/lai.tif" + # s2_cab = f"{s2_folder:s}/outputs/cab.tif" + # s2_cbrown = f"{s2_folder:s}/outputs/cbrown.tif" + + # sm_prior = '/data/nemesis/kaska-sar_quick/sm_prior.tif' + # sm_std = '/data/nemesis/kaska-sar_quick/sm_std.tif' + # sr_prior = '/data/nemesis/kaska-sar_quick/sr_prior.tif' + # sr_std = '/data/nemesis/kaska-sar_quick/sr_std.tif' + # sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + # s1_ncfile = '/media/nas_data/Thomas/S1/processed/MNI_2017/MNI_2017.nc' + + aggregation = '_point' + # aggregation = '_Field_buffer_30' + # aggregation = '_buffer_100' + # aggregation = '_buffer_50' + aggregation = '_buffer_30' + + s1_ncfile = '/media/tweiss/Daten/data_AGU/'+aggregation+'/MNI_2017_new_final.nc' + state_mask = '/media/tweiss/Work/z_final_mni_data_2017/ESU'+aggregation+'.tif' + s2_folder = "/media/tweiss/Daten/test_kaska/data/" + s2_lai = f"{s2_folder:s}/lai.tif" + s2_cab = f"{s2_folder:s}/cab.tif" + s2_cbrown = f"{s2_folder:s}/cbrown.tif" + + sm_prior = f'{s2_folder:s}/sm_prior.tif' + sm_std = f'{s2_folder:s}/sm_std.tif' + sr_prior = f'{s2_folder:s}/sr_prior.tif' + sr_std = f'{s2_folder:s}/sr_std.tif' + sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + + sarsar.sentinel1_inversion(True) diff --git a/kaska/kaska_sar_ssrt.py b/kaska/kaska_sar_ssrt.py new file mode 100644 index 0000000..36c8e78 --- /dev/null +++ b/kaska/kaska_sar_ssrt.py @@ -0,0 +1,918 @@ +#!/usr/bin/env python + +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +import matplotlib.pyplot as plt + +def save_to_tif(fname, Array, GeoT): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + wkt = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + ds.SetProjection(wkt) + for i, image in enumerate(Array): + # ds.GetRasterBand(i+1).SetMetadata({'date': prior_time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def get_sar(s1_nc_file): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang_name vv_name, vh_name') + data = Dataset(s1_nc_file) + relorbit = data['relorbit'][:] + localIncidenceAngle = data['localIncidenceAngle'][:] + satellite = data['satellite'][:] + orbitdirection = data['orbitdirection'][:] + time = data['time'][:] + lat = data['lat'][:] + lon = data['lon'][:] + + vv_name = s1_nc_file.replace('.nc', '_vv.tif') + vh_name = s1_nc_file.replace('.nc', '_vh.tif') + ang_name = s1_nc_file.replace('.nc', '_ang.tif') + if not os.path.exists(vv_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%s1_nc_file) + geo = gg.GetGeoTransform() + sigma0_vv_norm_multi = data['sigma0_vv_multi'][:] + save_to_tif(vv_name, sigma0_vv_norm_multi, geo) + + if not os.path.exists(vh_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vh_multi'%s1_nc_file) + geo = gg.GetGeoTransform() + sigma0_vh_norm_multi = data['sigma0_vh_multi'][:] + save_to_tif(vh_name, sigma0_vh_norm_multi, geo) + + if not os.path.exists(ang_name): + gg = gdal.Open('NETCDF:"%s":localIncidenceAngle'%s1_nc_file) + geo = gg.GetGeoTransform() + localIncidenceAngle = data['localIncidenceAngle'][:] + save_to_tif(ang_name, localIncidenceAngle, geo) + + return s1_data(time, lat, lon, satellite, relorbit, orbitdirection, ang_name, vv_name, vh_name) + +def read_sar(sar_data, state_mask): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang vv vh') + + ang = reproject_data(sar_data.ang_name, output_format="MEM", target_img=state_mask) + vv = reproject_data(sar_data.vv_name, output_format="MEM", target_img=state_mask) + vh = reproject_data(sar_data.vh_name, output_format="MEM", target_img=state_mask) + + time = [datetime.datetime(1970,1,1) + datetime.timedelta(days=float(i)) for i in sar_data.time] + return s1_data(time, sar_data.lat, sar_data.lon, sar_data.satellite, sar_data.relorbit, sar_data.orbitdirection, ang, vv, vh) + +def read_s2_lai(s2_lai, s2_cab, s2_cbrown, state_mask): + s2_data = namedtuple('s2_lai', 'time lai cab cbrown') + g = gdal.Open(s2_lai) + time = [] + for i in range(g.RasterCount): + gg = g.GetRasterBand(i+1) + meta = gg.GetMetadata() + time.append(datetime.datetime.strptime(meta['DoY'], '%Y%j')) + lai = reproject_data(s2_lai, output_format="MEM", target_img=state_mask) + cab = reproject_data(s2_cab, output_format="MEM", target_img=state_mask) + cbrown = reproject_data(s2_cbrown, output_format="MEM", target_img=state_mask) + return s2_data(time, lai, cab, cbrown) + +def inference_preprocessing(s1_data, s2_data, state_mask, orbit1=None, orbit2=None): + """Resample S2 smoothed output to match S1 observations + times""" + # Move everything to DoY to simplify interpolation + + sar_inference_data = namedtuple('sar_inference_data', 'time lat lon satellite relorbit orbitdirection ang vv vh lai cab cbrown time_mask fields') + + + s2_doys = np.array([ int(i.strftime('%j')) for i in s2_data.time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + # s1_doys = s1_doys[:112] + # pdb.set_trace() + + time_mask = (s1_doys >= s2_doys.min()) & (s1_doys <= s2_doys.max()) + if orbit1 != None: + rel_orbit1 = s1_data.relorbit==orbit1 + if orbit2 != None: + rel_orbit2 = s1_data.relorbit==orbit2 + xxx = np.logical_and(rel_orbit1,time_mask) + yyy = np.logical_and(rel_orbit2,time_mask) + time_mask = np.logical_or(xxx,yyy) + + f = interp1d(s2_doys, s2_data.lai.ReadAsArray(), axis=0, bounds_error=False) + lai_s1 = f(s1_doys) + f = interp1d(s2_doys, s2_data.cab.ReadAsArray(), axis=0, bounds_error=False) + cab_s1 = f(s1_doys) + f = interp1d(s2_doys, s2_data.cbrown.ReadAsArray(), axis=0, bounds_error=False) + cbrown_s1 = f(s1_doys) + # segmentation + lai_max = np.nanmax(s2_data.lai.ReadAsArray(), axis=0) + patches = sobel(lai_max)>0.001 + fields = label(patches)[0] + + + g = gdal.Open(state_mask) + gg = g.GetRasterBand(1) + ggg = gg.ReadAsArray() + fields[ggg==0]=0 + sar_inference_data = sar_inference_data(s1_data.time, s1_data.lat, s1_data.lon, + s1_data.satellite, s1_data.relorbit, + s1_data.orbitdirection, s1_data.ang, + s1_data.vv, s1_data.vh, lai_s1, cab_s1, cbrown_s1, time_mask, fields) + + return sar_inference_data + + +def get_prior(s1_data, soilMoisture, soilMoisture_std, soilRoughness, soilRoughness_std, state_mask): + # this is the function to reading the soil moisture prior + # and the soil roughness prior using the satemask + # the assumption of inputs are daily data in geotifs + prior = namedtuple('prior', 'time sm_prior sm_std sr_prior sr_std') + + g = gdal.Open(soilMoisture) + time = [] + for i in range(g.RasterCount): + gg = g.GetRasterBand(i+1) + meta = gg.GetMetadata() + time.append(datetime.datetime.strptime(meta['date'], '%Y-%m-%d')) + sm_prior = reproject_data(soilMoisture, output_format="MEM", target_img=state_mask) + sm_std = reproject_data(soilMoisture_std, output_format="MEM", target_img=state_mask) + sr_prior = reproject_data(soilRoughness, output_format="MEM", target_img=state_mask) + sr_std = reproject_data(soilRoughness_std,output_format="MEM", target_img=state_mask) + + prior_doy = np.array([ int(i.strftime('%j')) for i in time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + + f = interp1d(prior_doy, sm_prior.ReadAsArray(), axis=0, bounds_error=False) + + sm_s1 = f(s1_doys) + f = interp1d(prior_doy, sm_std.ReadAsArray(), axis=0, bounds_error=False) + sm_std_s1 = f(s1_doys) + + f = interp1d(prior_doy, sr_prior.ReadAsArray(), axis=0, bounds_error=False) + sr_s1 = f(s1_doys) + f = interp1d(prior_doy, sr_std.ReadAsArray(), axis=0, bounds_error=False) + sr_std_s1 = f(s1_doys) + + return prior(time, sm_s1, sm_std_s1, sr_s1, sr_std_s1) + +# def fresnel(eps, theta): +# theta = np.deg2rad(theta) +# num = (eps-1)*(np.sin(theta)**2 - eps*(1+np.sin(theta)**2)) +# den = eps*np.cos(theta) + np.sqrt(eps - np.sin(theta)**2) +# den = den**2 +# return np.abs(num/den) + +# def mv2eps(a, b, c, mv): +# eps = a + b * mv + c * mv**2 +# return eps + +# def quad_approx_solver(a, b, c, theta, alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, fresnel(mv2eps(a, b, c, x),theta.mean()), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + + +# def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, height): + +# orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + +# lais = [] +# coefs = [] +# alphas = [] +# sms = [] +# ps = [] +# times = [] +# uorbits = np.unique(orbits) +# segmentation_by_orbit = 1 + +# if segmentation_by_orbit == 1: +# for orbit in uorbits: +# orbit_mask = orbits == orbit +# ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] +# osm, osm_std, osro, osro_std = sm[orbit_mask], sm_std[orbit_mask], sr[orbit_mask], sr_std[orbit_mask] + +# oheight = height[orbit_mask] + +# olai_std = np.ones_like(olai)*0.05 + +# alpha = osm +# alpha_std = osm_std +# mv = alpha * 1 +# coef = osro +# # coef[:] = 0.5 + +# # prior_mean = np.concatenate([alpha, coef, olai, oheight]) +# # prior_unc = np.concatenate([alpha_std, osro_std, olai_std, oheight]) +# # x0 = np.concatenate([mv, coef, olai, oheight]) + +# # bounds = ( +# # [[0.01, 0.5]] * olai.shape[0] +# # + [[0.01, 1.5]] * olai.shape[0] +# # + [[0, 8]] * olai.shape[0] +# # + [[0, 1]] * olai.shape[0] +# # ) + +# prior_mean = np.concatenate([alpha,coef]) +# prior_unc = np.concatenate([alpha_std,osro_std]) +# x0 = np.concatenate([mv,coef]) +# data = np.concatenate([oheight,olai]) +# bounds = ( +# [[0.1, 0.5]] * olai.shape[0] +# + [[0.01, 1.5]] * olai.shape[0] +# ) + +# gamma = [500, 500] + +# retval = minimize(cost_function2, +# x0, +# args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data), +# jac=True, +# bounds = bounds, +# options={"disp": False},) + +# # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] +# posterious_coef = retval.x[len(olai) : +2*len(olai)] +# posterious_mv = retval.x[ : +len(olai)] +# # lais.append(posterious_lai) +# coefs.append(posterious_coef) +# sms.append(posterious_mv) + +# times.append(otime) + +# order = np.argsort(np.hstack(times)) +# times = np.hstack(times )[order] +# # lais = np.hstack(lais )[order] +# lais=0 +# coefs = np.hstack(coefs )[order] +# # coefs=0 +# sms = np.hstack(sms )[order].real +# else: +# ovv, ovh, olai, otheta, otime = vv, vh, lai, theta, time +# osm, osm_std, osro, osro_std = sm, sm_std, sr, sr_std + +# oheight = height + +# olai_std = np.ones_like(olai)*0.05 + +# alpha = osm +# alpha_std = osm_std +# mv = alpha * 1 +# coef = osro + +# prior_mean = np.concatenate([alpha,coef]) +# prior_unc = np.concatenate([alpha_std,osro_std]) +# x0 = np.concatenate([mv,coef]) +# data = np.concatenate([oheight,olai]) +# bounds = ( +# [[0.1, 0.5]] * olai.shape[0] +# + [[0.01, 1.5]] * olai.shape[0] +# ) + +# gamma = [500, 500] + +# retval = minimize(cost_function2, +# x0, +# args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data), +# jac=True, +# bounds = bounds, +# options={"disp": False},) + +# # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] +# posterious_coef = retval.x[len(olai) : +2*len(olai)] +# posterious_mv = retval.x[ : +len(olai)] +# # lais.append(posterious_lai) +# coefs.append(posterious_coef) +# sms.append(posterious_mv) + +# times.append(otime) + + +# order = np.argsort(np.hstack(times)) +# times = np.hstack(times )[order] +# # lais = np.hstack(lais )[order] +# lais=0 +# coefs = np.hstack(coefs )[order] +# # coefs=0 +# sms = np.hstack(sms )[order].real + +# return times, lais, coefs, sms + +def do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits, unc): + + lais = [] + coefs = [] + sms = [] + times = [] + + uorbits = np.unique(orbits) + uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # pdb.set_trace() + # orbit_mask = orbits == orbit + # orbit_mask = (orbits == 95) | (orbits == 117) + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 95) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, oscoef, oscoef_std = sm[orbit_mask], sm_std[orbit_mask], coef[orbit_mask], coef_std[orbit_mask] + + oheight = height[orbit_mask] + + # ovv, ovh, olai, otheta, otime = np.array([vv[jj]]), np.array([vh[jj]]), np.array([lai[jj]]), np.array([theta[jj]]), np.array([time[jj]]) + # osm, osm_std, oscoef, oscoef_std = np.array([sm[jj]]), np.array([sm_std[jj]]), np.array([coef[jj]]), np.array([coef_std[jj]]) + + # oheight = np.array([height[jj]]) + + + + # pdb.set_trace() + olai_std = np.ones_like(olai)*0.05 + + alpha = osm + alpha_std = np.ones_like(alpha)*10 + alpha_std = osm_std + # pdb.set_trace() + prior_mean = np.concatenate([alpha,oscoef]) + prior_unc = np.concatenate([alpha_std,oscoef_std]) + x0 = np.concatenate([alpha,oscoef]) + data = np.concatenate([oheight,olai]) + bounds = ( + # [[2.5, 30]] * olai.shape[0] + [[0.01, 0.5]] * olai.shape[0] + + [[0.0000001, 3]] * olai.shape[0] + ) + + gamma = [500, 500] + + retval = minimize(cost_function2, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data, unc), + jac=True, + bounds = bounds, + options={"disp": True},) + + # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] + posterious_coef = retval.x[len(olai) : +2*len(olai)] + posterious_mv = retval.x[ : +len(olai)] + # lais.append(posterious_lai) + coefs.append(posterious_coef) + # x = np.arange(0.01, 0.5, 0.001) + # xx = _calc_eps(x) + # sols=[] + # for i in posterious_mv: + # p, pp = find_nearest(xx,i) + # sols.append(x[pp]) + # sols = np.array(sols) + + sms.append(posterious_mv) + # sms.append(sols) + times.append(otime) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + # lais = np.hstack(lais )[order] + lais=0 + coefs = np.hstack(coefs )[order] + # coefs=0 + sms = np.hstack(sms )[order].real + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + return times, lais, coefs, sms, orbit_mask + + +def do_inversion(sar_inference_data, prior, state_mask, segment=False): + + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + uorbits = np.unique(orbits) + if segment: + out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape + lai_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + coef_outputs = np.zeros(out_shape ) + + fields = np.unique(sar_inference_data.fields)[1:] + # pdb.set_trace() + pixel = ['_Field_buffer_30','','_buffer_30','_buffer_50','_buffer_100'] + pixel = ['_Field_buffer_30'] + fields = ['301','508','542'] + fields = ['301'] + # ESU names + esus = ['high', 'low', 'med', 'mean'] + esus = ['mean'] + for pixels in pixel: + print(pixels) + path_ESU = '/media/tweiss/Work/z_final_mni_data_2017/' + name_shp = 'ESU'+pixels+'.shp' + name_ESU = 'ESU'+pixels+'.tif' + + for esu in esus: + for field in fields: + field2 = field + '_' + esu + g = gdal.Open(os.path.join(path_ESU, name_ESU)) + state_mask = g.ReadAsArray().astype(np.int) + + if pixels == '_Field_buffer_30': + if field == '515': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508': + mask_value = 27 + state_mask = state_mask==mask_value + elif field == '542': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '319': + mask_value = 67 + state_mask = state_mask==mask_value + elif field == '301': + mask_value = 87 + state_mask = state_mask==mask_value + else: + if field == '515' and esu == 'high': + mask_value = 1 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'med': + mask_value = 2 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'low': + mask_value = 3 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'high': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'med': + mask_value = 5 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'low': + mask_value = 6 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'high': + mask_value = 7 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'med': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'low': + mask_value = 9 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'high': + mask_value = 10 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'med': + mask_value = 11 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'low': + mask_value = 12 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'high': + mask_value = 13 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'med': + mask_value = 14 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'low': + mask_value = 15 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==1) | (state_mask==2) | (state_mask==3))) + state_mask = m.mask + elif field == '508' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==4) | (state_mask==5) | (state_mask==6))) + state_mask = m.mask + elif field == '542' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==7) | (state_mask==8) | (state_mask==9))) + state_mask = m.mask + elif field == '319' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==10) | (state_mask==11) | (state_mask==12))) + state_mask = m.mask + elif field == '301' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==13) | (state_mask==14) | (state_mask==15))) + state_mask = m.mask + + + # get per field data + # with time mask as well + # field_mask2 = sar_inference_data.fields == field + field_mask = state_mask + + pre_processing = ['multi'] + aggregation = ['_buffer_100'] + canopy_list = ['turbid_isotropic'] + surface_list = ['Oh04'] + opt_mod = ['time_variant'] + + for p in pre_processing: + + for pp in aggregation: + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + for kkk in opt_mod: + for k in surface_list: + for kk in canopy_list: + + data_field =data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=field2) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + + start_date = date[0].to_pydatetime() + end_date = date[-1].to_pydatetime() + drop_milli = sar_inference_data.time + for t in range(len(sar_inference_data.time)): + sar_inference_data.time[t] = sar_inference_data.time[t].replace(microsecond=0).replace(second=0).replace(minute=0) + index1 = sar_inference_data.time.index(start_date.replace(second=0).replace(minute=0)) + index2 = sar_inference_data.time.index(end_date.replace(second=0).replace(minute=0)) + + sar_inference_data.time_mask[:] = False + sar_inference_data.time_mask[index1:index2+1] = True + + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api_sm.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_sm = api_data.loc[time2].values.flatten() + sm = api_sm + sm_std = data_field.filter(like='SM_insitu').values.flatten() + sm_std[:] = 0.21 + time_s1 = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + times1_2 = pd.to_datetime(time) + + + lai_all = sar_inference_data.lai[sar_inference_data.time_mask] + vv_all = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask] + vh_all = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask] + theta_all = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask] + + height = data_field.filter(like='height').values.flatten() + coef = data_field.filter(like='coef').values.flatten() + + coef_std = data_field.filter(like='SM_insitu').values.flatten() + coef_std[:] = 0.01 + + orbits = data_field.filter(like='relativeorbit').values.flatten() + unc = 1.5 + + sm_retrieved = lai_all * np.nan + + for z in range(len(state_mask)): + for zz in range(len(state_mask[0])): + if state_mask[z,zz] == False: + pass + else: + vv = vv_all[:,z,zz] + vh = vh_all[:,z,zz] + lai = lai_all[:,z,zz] + theta = theta_all[:,z,zz] + + times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + sm_retrieved[:,z,zz] = sms + + + + for u in range(len(sm_retrieved)): + + fig = plt.gcf() + ax = fig.add_subplot(111) + + + # quadmesh = ax.imshow(sm_retrieved[u,0:100,200:250]) + quadmesh = ax.imshow(sm_retrieved[u,650:750,400:500]) + # quadmesh = ax.imshow(sm_retrieved[u,250:350,580:630]) + plt.colorbar(quadmesh) + quadmesh.set_clim(vmin=0.05, vmax=0.5) + + plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/down/508_'+str(time[u])[:10], bbox_inches = 'tight') + plt.close() + + pdb.set_trace() + + + pdb.set_trace() + + + + + + # sm_prior + # coef + # height + # height_insitu = np.full([len(state_mask),len(state_mask[0])], np.nan) + + pdb.set_trace() + lai = np.nanmean(lai, axis=1) + cab = sar_inference_data.cab[sar_inference_data.time_mask][:, field_mask] + cab = np.nanmean(cab, axis=1) + cbrown = sar_inference_data.cbrown[sar_inference_data.time_mask][:, field_mask] + cbrown = np.nanmean(cbrown, axis=1) + + data = {'lai':lai, 'cab':cab, 'cbrown':cbrown} + + df = pd.DataFrame(data, index=time2) + df.to_csv('/media/tweiss/Daten/data_AGU/S2_'+field2+pixels+'.csv') + # pdb.set_trace() + + + + + + + + + pdb.set_trace() + # sm = prior.sm_prior[sar_inference_data.time_mask][:, field_mask] + # sm_std= prior.sm_std [sar_inference_data.time_mask][:, field_mask] + + # sm[np.isnan(sm)] = 0.2 + # sm_std[sm_std==0] = 0.5 + # sm_std[np.isnan(sm_std)] = 0.5 + + # coef = prior.sr_prior[sar_inference_data.time_mask][:, field_mask] + # coef_std= prior.sr_std [sar_inference_data.time_mask][:, field_mask] + + # height = prior.sm_prior[sar_inference_data.time_mask][:, field_mask] + # height[:] = 0.1 + + # # coef[:] = 0.2 + # coef_std[:] = 0.5 + + # coef[np.isnan(coef)] = 0.1 + # coef_std[np.isnan(coef_std)] = 0.5 + + # vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + # vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + # theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + + + # for jj in range(len(time)): + # time[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + + # start_date = pd.to_datetime(add_data.index)[0].to_pydatetime().replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + # end_date = pd.to_datetime(add_data.index)[-1].to_pydatetime().replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + # if field == 1: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_height = add_data.filter(like='height').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # elif field == 4: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_height = add_data.filter(like='height').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # elif field == 5: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_height = add_data.filter(like='height').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # else: + # pass + # # elif field == 3: + # # add_lai = add_data.filter(like='LAI_insitu').filter(like='515_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # # add_coef = add_data.filter(like='coef').filter(like='515_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # # else: + # # add_lai = add_data.filter(like='LAI_insitu').filter(like='319_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # # add_coef = add_data.filter(like='coef').filter(like='319_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + + + # lai = np.nanmean(lai, axis=1) + # lai[(start_date <= time) & (end_date >= time)] = add_lai + # vv = np.nanmean(vv, axis=1) + # vh = np.nanmean(vh, axis=1) + # theta = np.nanmean(theta, axis=1) + + # sm = np.nanmean(sm, axis=1) + # sm_std = np.nanmean(sm_std, axis=1) + + # coef = np.nanmean(coef, axis=1) + # coef[(start_date <= time) & (end_date >= time)] = add_coef + + # coef_std = np.nanmean(coef_std, axis=1) + + # height = coef + 1 + # height[(start_date <= time) & (end_date >= time)] = add_height + + # vv = np.maximum(vv, 0.0001) + # vv = 10 * np.log10(vv) + # vh = np.maximum(vh, 0.0001) + # vh = 10 * np.log10(vh) + + # times, lais, coefs, sms = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height) + + # lai_outputs[:, field_mask] = sms[...,None] + + # coef_outputs[:, field_mask] = coefs [...,None] + # sm_outputs[:, field_mask] = sms [...,None] + + else: + mask = gdal.Open(state_mask).ReadAsArray() + xs, ys = np.where(mask) + + out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + lai_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + coef_outputs = np.zeros(out_shape ) + + for i in range(len(xs)): + indx, indy = xs[i], ys[i] + + # field_mask = slice(None, None), slice(indx, indx+1), slice(indy, indy+1) + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + lai = sar_inference_data.lai[sar_inference_data.time_mask][:, indx, indy ] + + sm = prior.sm_prior[sar_inference_data.time_mask][:, indx, indy ] + sm_std= prior.sm_std [sar_inference_data.time_mask][:, indx, indy ] + + sm[np.isnan(sm)] = 0.2 + sm_std[sm_std==0] = 0.5 + sm_std[np.isnan(sm_std)] = 0.5 + + coef = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + coef_std= prior.sr_std [sar_inference_data.time_mask][:, indx, indy ] + sr[np.isnan(sr)] = 0.1 + sr_std[np.isnan(sr_std)] = 0.5 + + height = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + height[:] = 0.1 + + vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + + + vv = np.maximum(vv, 0.0001) + vv = 10 * np.log10(vv) + vh = np.maximum(vh, 0.0001) + vh = 10 * np.log10(vh) + + times, lais, coefs, sms = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height) + + lai_outputs[:, indx, indy] = lais + coef_outputs[:, indx, indy] = coefs + sm_outputs[:, indx, indy] = sms + + return lai_outputs, coef_outputs, sm_outputs, uorbits + +def save_output(fname, Array, GeoT, projction, time): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'date': time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def save_ps_output(fname, Array, GeoT, projction, orbit): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'orbit': str(int(orbit[i]))}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + + + +class KaSKASAR(object): + """A class to process Sentinel 1 SAR data using S2 data as + an input""" + + def __init__(self, s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std,orbit1=None,orbit2=None): + self.s1_ncfile = s1_ncfile + self.state_mask = state_mask + self.s2_lai = s2_lai + self.s2_cab = s2_cab + self.s2_cbrown = s2_cbrown + self.sm_prior = sm_prior + self.sm_std = sm_std + self.sr_prior = sr_prior + self.sr_std = sr_std + self.orbit1 = None + self.orbit2 = None + if orbit1 != None: + self.orbit1 = orbit1 + if orbit2 != None: + self.orbit2 = orbit2 + + def sentinel1_inversion(self, segment=False): + sar = get_sar(s1_ncfile) + s1_data = read_sar(sar, self.state_mask) + s2_data = read_s2_lai(self.s2_lai, self.s2_cab, self.s2_cbrown, self.state_mask) + prior = get_prior(s1_data, self.sm_prior, self.sm_std, self.sr_prior, self.sr_std, self.state_mask) + sar_inference_data = inference_preprocessing(s1_data, s2_data, self.state_mask,self.orbit1,self.orbit2) + + lai_outputs, sr_outputs, sm_outputs, uorbits = do_inversion(sar_inference_data, prior, self.state_mask, segment) + + gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%self.s1_ncfile) + geo = gg.GetGeoTransform() + + projction = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + + time = [i.strftime('%Y-%m-%d') for i in np.array(sar_inference_data.time)[sar_inference_data.time_mask]] + + sm_name = self.s1_ncfile.replace('.nc', '_sar_sm.tif') + sr_name = self.s1_ncfile.replace('.nc', '_sar_sr.tif') + lai_name = self.s1_ncfile.replace('.nc', '_sar_lai.tif') + + save_output(sm_name, sm_outputs, geo, projction, time) + save_output(sr_name, sr_outputs, geo, projction, time) + save_output(lai_name, lai_outputs, geo, projction, time) + + + + +if __name__ == '__main__': + # s1_ncfile = '/data/nemesis/kaska-sar_quick/S1_LMU_site_2017_new.nc' + # state_mask = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/carto/ESU.tif" + # s2_folder = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/s2_obs/" + # s2_lai = f"{s2_folder:s}/outputs/lai.tif" + # s2_cab = f"{s2_folder:s}/outputs/cab.tif" + # s2_cbrown = f"{s2_folder:s}/outputs/cbrown.tif" + + # sm_prior = '/data/nemesis/kaska-sar_quick/sm_prior.tif' + # sm_std = '/data/nemesis/kaska-sar_quick/sm_std.tif' + # sr_prior = '/data/nemesis/kaska-sar_quick/sr_prior.tif' + # sr_std = '/data/nemesis/kaska-sar_quick/sr_std.tif' + # sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + # s1_ncfile = '/media/nas_data/Thomas/S1/processed/MNI_2017/MNI_2017.nc' + + # aggregation = '_point' + aggregation = '_Field_buffer_30' + # aggregation = '_buffer_100' + aggregation = '_buffer_50' + + + s1_ncfile = '/media/tweiss/Daten/data_AGU/'+aggregation+'/MNI_2017_new_final.nc' + state_mask = '/media/tweiss/Work/z_final_mni_data_2017/ESU'+aggregation+'.tif' + s2_folder = "/media/tweiss/Daten/test_kaska/data/" + s2_lai = f"{s2_folder:s}/lai.tif" + s2_cab = f"{s2_folder:s}/cab.tif" + s2_cbrown = f"{s2_folder:s}/cbrown.tif" + + sm_prior = f'{s2_folder:s}/sm_prior.tif' + sm_std = f'{s2_folder:s}/sm_std.tif' + sr_prior = f'{s2_folder:s}/sr_prior.tif' + sr_std = f'{s2_folder:s}/sr_std.tif' + + sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + csv_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7multi_Field_buffer_30/csv/' + + add_data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + sarsar.sentinel1_inversion(True) + diff --git a/kaska/kaska_sar_ssrt_jose.py b/kaska/kaska_sar_ssrt_jose.py new file mode 100644 index 0000000..db4024b --- /dev/null +++ b/kaska/kaska_sar_ssrt_jose.py @@ -0,0 +1,873 @@ +#!/usr/bin/env python + +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +import matplotlib.pyplot as plt + +def save_to_tif(fname, Array, GeoT): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + wkt = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + ds.SetProjection(wkt) + for i, image in enumerate(Array): + # ds.GetRasterBand(i+1).SetMetadata({'date': prior_time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def get_sar(s1_nc_file): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang_name vv_name, vh_name') + data = Dataset(s1_nc_file) + relorbit = data['relorbit'][:] + localIncidenceAngle = data['localIncidenceAngle'][:] + satellite = data['satellite'][:] + orbitdirection = data['orbitdirection'][:] + time = data['time'][:] + lat = data['lat'][:] + lon = data['lon'][:] + + vv_name = s1_nc_file.replace('.nc', '_vv.tif') + vh_name = s1_nc_file.replace('.nc', '_vh.tif') + ang_name = s1_nc_file.replace('.nc', '_ang.tif') + if not os.path.exists(vv_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%s1_nc_file) + geo = gg.GetGeoTransform() + sigma0_vv_norm_multi = data['sigma0_vv_multi'][:] + save_to_tif(vv_name, sigma0_vv_norm_multi, geo) + + if not os.path.exists(vh_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vh_multi'%s1_nc_file) + geo = gg.GetGeoTransform() + sigma0_vh_norm_multi = data['sigma0_vh_multi'][:] + save_to_tif(vh_name, sigma0_vh_norm_multi, geo) + + if not os.path.exists(ang_name): + gg = gdal.Open('NETCDF:"%s":localIncidenceAngle'%s1_nc_file) + geo = gg.GetGeoTransform() + localIncidenceAngle = data['localIncidenceAngle'][:] + save_to_tif(ang_name, localIncidenceAngle, geo) + + return s1_data(time, lat, lon, satellite, relorbit, orbitdirection, ang_name, vv_name, vh_name) + +def read_sar(sar_data, state_mask): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang vv vh') + + ang = reproject_data(sar_data.ang_name, output_format="MEM", target_img=state_mask) + vv = reproject_data(sar_data.vv_name, output_format="MEM", target_img=state_mask) + vh = reproject_data(sar_data.vh_name, output_format="MEM", target_img=state_mask) + + time = [datetime.datetime(1970,1,1) + datetime.timedelta(days=float(i)) for i in sar_data.time] + return s1_data(time, sar_data.lat, sar_data.lon, sar_data.satellite, sar_data.relorbit, sar_data.orbitdirection, ang, vv, vh) + +def read_s2_lai(s2_lai, s2_cab, s2_cbrown, state_mask): + s2_data = namedtuple('s2_lai', 'time lai cab cbrown') + g = gdal.Open(s2_lai) + time = [] + for i in range(g.RasterCount): + gg = g.GetRasterBand(i+1) + meta = gg.GetMetadata() + time.append(datetime.datetime.strptime(meta['DoY'], '%Y%j')) + lai = reproject_data(s2_lai, output_format="MEM", target_img=state_mask) + cab = reproject_data(s2_cab, output_format="MEM", target_img=state_mask) + cbrown = reproject_data(s2_cbrown, output_format="MEM", target_img=state_mask) + return s2_data(time, lai, cab, cbrown) + +def inference_preprocessing(s1_data, s2_data, state_mask, orbit1=None, orbit2=None): + """Resample S2 smoothed output to match S1 observations + times""" + # Move everything to DoY to simplify interpolation + + sar_inference_data = namedtuple('sar_inference_data', 'time lat lon satellite relorbit orbitdirection ang vv vh lai cab cbrown time_mask fields') + + + s2_doys = np.array([ int(i.strftime('%j')) for i in s2_data.time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + # s1_doys = s1_doys[:112] + # pdb.set_trace() + + time_mask = (s1_doys >= s2_doys.min()) & (s1_doys <= s2_doys.max()) + if orbit1 != None: + rel_orbit1 = s1_data.relorbit==orbit1 + if orbit2 != None: + rel_orbit2 = s1_data.relorbit==orbit2 + xxx = np.logical_and(rel_orbit1,time_mask) + yyy = np.logical_and(rel_orbit2,time_mask) + time_mask = np.logical_or(xxx,yyy) + + f = interp1d(s2_doys, s2_data.lai.ReadAsArray(), axis=0, bounds_error=False) + lai_s1 = f(s1_doys) + f = interp1d(s2_doys, s2_data.cab.ReadAsArray(), axis=0, bounds_error=False) + cab_s1 = f(s1_doys) + f = interp1d(s2_doys, s2_data.cbrown.ReadAsArray(), axis=0, bounds_error=False) + cbrown_s1 = f(s1_doys) + # segmentation + lai_max = np.nanmax(s2_data.lai.ReadAsArray(), axis=0) + patches = sobel(lai_max)>0.001 + fields = label(patches)[0] + + + g = gdal.Open(state_mask) + gg = g.GetRasterBand(1) + ggg = gg.ReadAsArray() + fields[ggg==0]=0 + sar_inference_data = sar_inference_data(s1_data.time, s1_data.lat, s1_data.lon, + s1_data.satellite, s1_data.relorbit, + s1_data.orbitdirection, s1_data.ang, + s1_data.vv, s1_data.vh, lai_s1, cab_s1, cbrown_s1, time_mask, fields) + + return sar_inference_data + + +def get_prior(s1_data, soilMoisture, soilMoisture_std, soilRoughness, soilRoughness_std, state_mask): + # this is the function to reading the soil moisture prior + # and the soil roughness prior using the satemask + # the assumption of inputs are daily data in geotifs + prior = namedtuple('prior', 'time sm_prior sm_std sr_prior sr_std') + + g = gdal.Open(soilMoisture) + time = [] + for i in range(g.RasterCount): + gg = g.GetRasterBand(i+1) + meta = gg.GetMetadata() + time.append(datetime.datetime.strptime(meta['date'], '%Y-%m-%d')) + sm_prior = reproject_data(soilMoisture, output_format="MEM", target_img=state_mask) + sm_std = reproject_data(soilMoisture_std, output_format="MEM", target_img=state_mask) + sr_prior = reproject_data(soilRoughness, output_format="MEM", target_img=state_mask) + sr_std = reproject_data(soilRoughness_std,output_format="MEM", target_img=state_mask) + + prior_doy = np.array([ int(i.strftime('%j')) for i in time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + + f = interp1d(prior_doy, sm_prior.ReadAsArray(), axis=0, bounds_error=False) + + sm_s1 = f(s1_doys) + f = interp1d(prior_doy, sm_std.ReadAsArray(), axis=0, bounds_error=False) + sm_std_s1 = f(s1_doys) + + f = interp1d(prior_doy, sr_prior.ReadAsArray(), axis=0, bounds_error=False) + sr_s1 = f(s1_doys) + f = interp1d(prior_doy, sr_std.ReadAsArray(), axis=0, bounds_error=False) + sr_std_s1 = f(s1_doys) + + return prior(time, sm_s1, sm_std_s1, sr_s1, sr_std_s1) + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + +def fresnel(eps, theta): + theta = np.deg2rad(theta) + num = (eps-1)*(np.sin(theta)**2 - eps*(1+np.sin(theta)**2)) + den = eps*np.cos(theta) + np.sqrt(eps - np.sin(theta)**2) + den = den**2 + return np.abs(num/den) + +def mv2eps(a, b, c, mv): + eps = a + b * mv + c * mv**2 + return eps + +def quad_approx_solver(a, b, c, theta, alphas): + x = np.arange(0.01, 0.5, 0.01) + p = np.polyfit(x, fresnel(mv2eps(a, b, c, x),theta.mean()), 2) + # 2nd order polynomial + #solve + solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] + return solutions + + +def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=1.): + + + lais = [] + srs = [] + alphas = [] + sms = [] + ps = [] + times = [] + uorbits = np.unique(orbits) + for orbit in uorbits: + orbit_mask = orbits == orbit + ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osro, osro_std = sm[orbit_mask], sm_std[orbit_mask], sr[orbit_mask], sr_std[orbit_mask] + + olai_std = np.ones_like(olai)*0.05 + + alpha = fresnel(mv2eps(1.99, 38.9, 11.5, osm), otheta) + alpha_std = np.ones_like(alpha)*0.2 + + soil_sigma_mask = olai < 1 + sigma_soil_vv_mu = np.mean(ovv[soil_sigma_mask]) + sigma_soil_vh_mu = np.mean(ovh[soil_sigma_mask]) + + xvv = np.array([1, 0.5, sigma_soil_vv_mu]) + xvh = np.array([1, 0.5, sigma_soil_vh_mu]) + + prior_mean = np.concatenate([[0, ]*6, alpha, osro, olai]) + prior_unc = np.concatenate([[10., ]*6, alpha_std, osro_std, olai_std]) + + x0 = np.concatenate([xvv, xvh, alpha, osro, olai]) + + bounds = ( + [[None, None]] * 6 + + [[0.1, 3.3]] * olai.shape[0] + + [[0, .03]] * olai.shape[0] + + [[0, 8]] * olai.shape[0] + ) + + gamma = [1000, 1000] + retval = minimize(cost_function, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc), + jac=True, + bounds = bounds, + options={"disp": False},) + + posterious_lai = retval.x[6+2*len(olai) : ] + posterious_sr = retval.x[6+len(olai) : 6+2*len(olai)] + posterious_alpha = retval.x[6 : 6+len(olai)] + sols = np.array(quad_approx_solver(1.99, 38.9, 11.5, otheta, posterious_alpha)).min(axis=1) + lais.append(posterious_lai) + srs.append(posterious_sr) + sms.append(sols) + times.append(otime) + ps.append(retval.x[:6]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + lais = np.hstack(lais )[order] + srs = np.hstack(srs )[order] + sms = np.hstack(sms )[order].real + return times, lais, srs, sms, np.array(ps) + +# def do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits, unc): + +# lais = [] +# coefs = [] +# sms = [] +# times = [] + +# uorbits = np.unique(orbits) +# uorbits = np.array([95]) +# for orbit in uorbits: +# # for jj in range(len(vv)): +# # pdb.set_trace() +# # orbit_mask = orbits == orbit +# # orbit_mask = (orbits == 95) | (orbits == 117) +# orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) +# # orbit_mask = (orbits == 95) +# # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) +# ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] +# osm, osm_std, oscoef, oscoef_std = sm[orbit_mask], sm_std[orbit_mask], coef[orbit_mask], coef_std[orbit_mask] + +# oheight = height[orbit_mask] + +# # ovv, ovh, olai, otheta, otime = np.array([vv[jj]]), np.array([vh[jj]]), np.array([lai[jj]]), np.array([theta[jj]]), np.array([time[jj]]) +# # osm, osm_std, oscoef, oscoef_std = np.array([sm[jj]]), np.array([sm_std[jj]]), np.array([coef[jj]]), np.array([coef_std[jj]]) + +# # oheight = np.array([height[jj]]) + + + +# # pdb.set_trace() +# olai_std = np.ones_like(olai)*0.05 + +# alpha = osm +# alpha_std = np.ones_like(alpha)*10 +# alpha_std = osm_std +# # pdb.set_trace() +# prior_mean = np.concatenate([alpha,oscoef]) +# prior_unc = np.concatenate([alpha_std,oscoef_std]) +# x0 = np.concatenate([alpha,oscoef]) +# data = np.concatenate([oheight,olai]) +# bounds = ( +# # [[2.5, 30]] * olai.shape[0] +# [[0.01, 0.5]] * olai.shape[0] +# + [[0.0000001, 3]] * olai.shape[0] +# ) + +# gamma = [500, 500] + +# retval = minimize(cost_function2, +# x0, +# args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data, unc), +# jac=True, +# bounds = bounds, +# options={"disp": True},) + +# # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] +# posterious_coef = retval.x[len(olai) : +2*len(olai)] +# posterious_mv = retval.x[ : +len(olai)] +# # lais.append(posterious_lai) +# coefs.append(posterious_coef) +# # x = np.arange(0.01, 0.5, 0.001) +# # xx = _calc_eps(x) +# # sols=[] +# # for i in posterious_mv: +# # p, pp = find_nearest(xx,i) +# # sols.append(x[pp]) +# # sols = np.array(sols) + +# sms.append(posterious_mv) +# # sms.append(sols) +# times.append(otime) + +# order = np.argsort(np.hstack(times)) +# times = np.hstack(times )[order] +# # lais = np.hstack(lais )[order] +# lais=0 +# coefs = np.hstack(coefs )[order] +# # coefs=0 +# sms = np.hstack(sms )[order].real +# orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) +# return times, lais, coefs, sms, orbit_mask + + +def do_inversion(sar_inference_data, prior, state_mask, segment=False): + + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + uorbits = np.unique(orbits) + if segment: + out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape + lai_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + coef_outputs = np.zeros(out_shape ) + + fields = np.unique(sar_inference_data.fields)[1:] + # pdb.set_trace() + pixel = ['_Field_buffer_30','','_buffer_30','_buffer_50','_buffer_100'] + pixel = ['_Field_buffer_30'] + fields = ['301','508','542'] + fields = ['301'] + # ESU names + esus = ['high', 'low', 'med', 'mean'] + esus = ['mean'] + for pixels in pixel: + print(pixels) + path_ESU = '/media/tweiss/Work/z_final_mni_data_2017/' + name_shp = 'ESU'+pixels+'.shp' + name_ESU = 'ESU'+pixels+'.tif' + + for esu in esus: + for field in fields: + field2 = field + '_' + esu + g = gdal.Open(os.path.join(path_ESU, name_ESU)) + state_mask = g.ReadAsArray().astype(np.int) + + if pixels == '_Field_buffer_30': + if field == '515': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508': + mask_value = 27 + state_mask = state_mask==mask_value + elif field == '542': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '319': + mask_value = 67 + state_mask = state_mask==mask_value + elif field == '301': + mask_value = 87 + state_mask = state_mask==mask_value + else: + if field == '515' and esu == 'high': + mask_value = 1 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'med': + mask_value = 2 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'low': + mask_value = 3 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'high': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'med': + mask_value = 5 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'low': + mask_value = 6 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'high': + mask_value = 7 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'med': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'low': + mask_value = 9 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'high': + mask_value = 10 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'med': + mask_value = 11 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'low': + mask_value = 12 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'high': + mask_value = 13 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'med': + mask_value = 14 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'low': + mask_value = 15 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==1) | (state_mask==2) | (state_mask==3))) + state_mask = m.mask + elif field == '508' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==4) | (state_mask==5) | (state_mask==6))) + state_mask = m.mask + elif field == '542' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==7) | (state_mask==8) | (state_mask==9))) + state_mask = m.mask + elif field == '319' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==10) | (state_mask==11) | (state_mask==12))) + state_mask = m.mask + elif field == '301' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==13) | (state_mask==14) | (state_mask==15))) + state_mask = m.mask + + + # get per field data + # with time mask as well + # field_mask2 = sar_inference_data.fields == field + field_mask = state_mask + + pre_processing = ['multi'] + aggregation = ['_buffer_100'] + canopy_list = ['turbid_isotropic'] + surface_list = ['Oh04'] + opt_mod = ['time_variant'] + + for p in pre_processing: + + for pp in aggregation: + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + for kkk in opt_mod: + for k in surface_list: + for kk in canopy_list: + + data_field =data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=field2) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + + start_date = date[0].to_pydatetime() + end_date = date[-1].to_pydatetime() + drop_milli = sar_inference_data.time + for t in range(len(sar_inference_data.time)): + sar_inference_data.time[t] = sar_inference_data.time[t].replace(microsecond=0).replace(second=0).replace(minute=0) + index1 = sar_inference_data.time.index(start_date.replace(second=0).replace(minute=0)) + index2 = sar_inference_data.time.index(end_date.replace(second=0).replace(minute=0)) + + sar_inference_data.time_mask[:] = False + sar_inference_data.time_mask[index1:index2+1] = True + + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api_sm.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_sm = api_data.loc[time2].values.flatten() + sm = api_sm + sm_std = data_field.filter(like='SM_insitu').values.flatten() + sm_std[:] = 10.71 + time_s1 = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + times1_2 = pd.to_datetime(time) + + + lai_all = sar_inference_data.lai[sar_inference_data.time_mask] + vv_all = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask] + vh_all = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask] + theta_all = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask] + + height = data_field.filter(like='height').values.flatten() + coef = data_field.filter(like='coef').values.flatten() + + coef_std = data_field.filter(like='SM_insitu').values.flatten() + coef_std[:] = 0.01 + + orbits = data_field.filter(like='relativeorbit').values.flatten() + unc = 1.5 + + sm_retrieved = lai_all * np.nan + + for z in range(len(state_mask)): + for zz in range(len(state_mask[0])): + if state_mask[z,zz] == False: + pass + else: + vv = vv_all[:,z,zz] + vh = vh_all[:,z,zz] + lai = lai_all[:,z,zz] + theta = theta_all[:,z,zz] + + sr = lai*1. + sr[:] = 0.3 + sr_std = lai*1. + sr_std[:] = 2 + + vv = np.maximum(vv, 0.0001) + vv = 10 * np.log10(vv) + vh = np.maximum(vh, 0.0001) + vh = 10 * np.log10(vh) + times, lais, srs, sms, ps = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=unc) + + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + sm_retrieved[:,z,zz] = sms + + + + for u in range(len(sm_retrieved)): + + fig = plt.gcf() + ax = fig.add_subplot(111) + + + quadmesh = ax.imshow(sm_retrieved[u,0:100,200:250]) + # quadmesh = ax.imshow(sm_retrieved[u,650:750,400:500]) + # quadmesh = ax.imshow(sm_retrieved[u,250:350,580:630]) + plt.colorbar(quadmesh) + quadmesh.set_clim(vmin=0.05, vmax=0.5) + + plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/down/Jose_301_'+str(u), bbox_inches = 'tight') + plt.close() + + pdb.set_trace() + + + pdb.set_trace() + + + + + + # sm_prior + # coef + # height + # height_insitu = np.full([len(state_mask),len(state_mask[0])], np.nan) + + pdb.set_trace() + lai = np.nanmean(lai, axis=1) + cab = sar_inference_data.cab[sar_inference_data.time_mask][:, field_mask] + cab = np.nanmean(cab, axis=1) + cbrown = sar_inference_data.cbrown[sar_inference_data.time_mask][:, field_mask] + cbrown = np.nanmean(cbrown, axis=1) + + data = {'lai':lai, 'cab':cab, 'cbrown':cbrown} + + df = pd.DataFrame(data, index=time2) + df.to_csv('/media/tweiss/Daten/data_AGU/S2_'+field2+pixels+'.csv') + # pdb.set_trace() + + + + + + + + + pdb.set_trace() + # sm = prior.sm_prior[sar_inference_data.time_mask][:, field_mask] + # sm_std= prior.sm_std [sar_inference_data.time_mask][:, field_mask] + + # sm[np.isnan(sm)] = 0.2 + # sm_std[sm_std==0] = 0.5 + # sm_std[np.isnan(sm_std)] = 0.5 + + # coef = prior.sr_prior[sar_inference_data.time_mask][:, field_mask] + # coef_std= prior.sr_std [sar_inference_data.time_mask][:, field_mask] + + # height = prior.sm_prior[sar_inference_data.time_mask][:, field_mask] + # height[:] = 0.1 + + # # coef[:] = 0.2 + # coef_std[:] = 0.5 + + # coef[np.isnan(coef)] = 0.1 + # coef_std[np.isnan(coef_std)] = 0.5 + + # vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + # vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + # theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + + + # for jj in range(len(time)): + # time[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + + # start_date = pd.to_datetime(add_data.index)[0].to_pydatetime().replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + # end_date = pd.to_datetime(add_data.index)[-1].to_pydatetime().replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + # if field == 1: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_height = add_data.filter(like='height').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # elif field == 4: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_height = add_data.filter(like='height').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # elif field == 5: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_height = add_data.filter(like='height').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # else: + # pass + # # elif field == 3: + # # add_lai = add_data.filter(like='LAI_insitu').filter(like='515_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # # add_coef = add_data.filter(like='coef').filter(like='515_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # # else: + # # add_lai = add_data.filter(like='LAI_insitu').filter(like='319_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # # add_coef = add_data.filter(like='coef').filter(like='319_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + + + # lai = np.nanmean(lai, axis=1) + # lai[(start_date <= time) & (end_date >= time)] = add_lai + # vv = np.nanmean(vv, axis=1) + # vh = np.nanmean(vh, axis=1) + # theta = np.nanmean(theta, axis=1) + + # sm = np.nanmean(sm, axis=1) + # sm_std = np.nanmean(sm_std, axis=1) + + # coef = np.nanmean(coef, axis=1) + # coef[(start_date <= time) & (end_date >= time)] = add_coef + + # coef_std = np.nanmean(coef_std, axis=1) + + # height = coef + 1 + # height[(start_date <= time) & (end_date >= time)] = add_height + + # vv = np.maximum(vv, 0.0001) + # vv = 10 * np.log10(vv) + # vh = np.maximum(vh, 0.0001) + # vh = 10 * np.log10(vh) + + # times, lais, coefs, sms = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height) + + # lai_outputs[:, field_mask] = sms[...,None] + + # coef_outputs[:, field_mask] = coefs [...,None] + # sm_outputs[:, field_mask] = sms [...,None] + + else: + mask = gdal.Open(state_mask).ReadAsArray() + xs, ys = np.where(mask) + + out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + lai_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + coef_outputs = np.zeros(out_shape ) + + for i in range(len(xs)): + indx, indy = xs[i], ys[i] + + # field_mask = slice(None, None), slice(indx, indx+1), slice(indy, indy+1) + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + lai = sar_inference_data.lai[sar_inference_data.time_mask][:, indx, indy ] + + sm = prior.sm_prior[sar_inference_data.time_mask][:, indx, indy ] + sm_std= prior.sm_std [sar_inference_data.time_mask][:, indx, indy ] + + sm[np.isnan(sm)] = 0.2 + sm_std[sm_std==0] = 0.5 + sm_std[np.isnan(sm_std)] = 0.5 + + coef = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + coef_std= prior.sr_std [sar_inference_data.time_mask][:, indx, indy ] + sr[np.isnan(sr)] = 0.1 + sr_std[np.isnan(sr_std)] = 0.5 + + height = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + height[:] = 0.1 + + vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + + + vv = np.maximum(vv, 0.0001) + vv = 10 * np.log10(vv) + vh = np.maximum(vh, 0.0001) + vh = 10 * np.log10(vh) + + times, lais, coefs, sms = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height) + + lai_outputs[:, indx, indy] = lais + coef_outputs[:, indx, indy] = coefs + sm_outputs[:, indx, indy] = sms + + return lai_outputs, coef_outputs, sm_outputs, uorbits + +def save_output(fname, Array, GeoT, projction, time): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'date': time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def save_ps_output(fname, Array, GeoT, projction, orbit): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'orbit': str(int(orbit[i]))}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + + + +class KaSKASAR(object): + """A class to process Sentinel 1 SAR data using S2 data as + an input""" + + def __init__(self, s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std,orbit1=None,orbit2=None): + self.s1_ncfile = s1_ncfile + self.state_mask = state_mask + self.s2_lai = s2_lai + self.s2_cab = s2_cab + self.s2_cbrown = s2_cbrown + self.sm_prior = sm_prior + self.sm_std = sm_std + self.sr_prior = sr_prior + self.sr_std = sr_std + self.orbit1 = None + self.orbit2 = None + if orbit1 != None: + self.orbit1 = orbit1 + if orbit2 != None: + self.orbit2 = orbit2 + + def sentinel1_inversion(self, segment=False): + sar = get_sar(s1_ncfile) + s1_data = read_sar(sar, self.state_mask) + s2_data = read_s2_lai(self.s2_lai, self.s2_cab, self.s2_cbrown, self.state_mask) + prior = get_prior(s1_data, self.sm_prior, self.sm_std, self.sr_prior, self.sr_std, self.state_mask) + sar_inference_data = inference_preprocessing(s1_data, s2_data, self.state_mask,self.orbit1,self.orbit2) + + lai_outputs, sr_outputs, sm_outputs, uorbits = do_inversion(sar_inference_data, prior, self.state_mask, segment) + + gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%self.s1_ncfile) + geo = gg.GetGeoTransform() + + projction = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + + time = [i.strftime('%Y-%m-%d') for i in np.array(sar_inference_data.time)[sar_inference_data.time_mask]] + + sm_name = self.s1_ncfile.replace('.nc', '_sar_sm.tif') + sr_name = self.s1_ncfile.replace('.nc', '_sar_sr.tif') + lai_name = self.s1_ncfile.replace('.nc', '_sar_lai.tif') + + save_output(sm_name, sm_outputs, geo, projction, time) + save_output(sr_name, sr_outputs, geo, projction, time) + save_output(lai_name, lai_outputs, geo, projction, time) + + + + +if __name__ == '__main__': + # s1_ncfile = '/data/nemesis/kaska-sar_quick/S1_LMU_site_2017_new.nc' + # state_mask = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/carto/ESU.tif" + # s2_folder = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/s2_obs/" + # s2_lai = f"{s2_folder:s}/outputs/lai.tif" + # s2_cab = f"{s2_folder:s}/outputs/cab.tif" + # s2_cbrown = f"{s2_folder:s}/outputs/cbrown.tif" + + # sm_prior = '/data/nemesis/kaska-sar_quick/sm_prior.tif' + # sm_std = '/data/nemesis/kaska-sar_quick/sm_std.tif' + # sr_prior = '/data/nemesis/kaska-sar_quick/sr_prior.tif' + # sr_std = '/data/nemesis/kaska-sar_quick/sr_std.tif' + # sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + # s1_ncfile = '/media/nas_data/Thomas/S1/processed/MNI_2017/MNI_2017.nc' + + # aggregation = '_point' + aggregation = '_Field_buffer_30' + # aggregation = '_buffer_100' + aggregation = '_buffer_50' + + + s1_ncfile = '/media/tweiss/Daten/data_AGU/'+aggregation+'/MNI_2017_new_final.nc' + state_mask = '/media/tweiss/Work/z_final_mni_data_2017/ESU'+aggregation+'.tif' + s2_folder = "/media/tweiss/Daten/test_kaska/data/" + s2_lai = f"{s2_folder:s}/lai.tif" + s2_cab = f"{s2_folder:s}/cab.tif" + s2_cbrown = f"{s2_folder:s}/cbrown.tif" + + sm_prior = f'{s2_folder:s}/sm_prior.tif' + sm_std = f'{s2_folder:s}/sm_std.tif' + sr_prior = f'{s2_folder:s}/sr_prior.tif' + sr_std = f'{s2_folder:s}/sr_std.tif' + + sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + csv_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7multi_Field_buffer_30/csv/' + + add_data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + sarsar.sentinel1_inversion(True) + diff --git a/kaska/kaska_sar_ssrt_new_version.py b/kaska/kaska_sar_ssrt_new_version.py new file mode 100644 index 0000000..35e3ee2 --- /dev/null +++ b/kaska/kaska_sar_ssrt_new_version.py @@ -0,0 +1,417 @@ +#!/usr/bin/env python + +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * + + + + + + + +def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, height): + + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + + lais = [] + coefs = [] + alphas = [] + sms = [] + ps = [] + times = [] + uorbits = np.unique(orbits) + segmentation_by_orbit = 1 + + if segmentation_by_orbit == 1: + for orbit in uorbits: + orbit_mask = orbits == orbit + ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osro, osro_std = sm[orbit_mask], sm_std[orbit_mask], sr[orbit_mask], sr_std[orbit_mask] + + oheight = height[orbit_mask] + + olai_std = np.ones_like(olai)*0.05 + + alpha = osm + alpha_std = osm_std + mv = alpha * 1 + coef = osro + # coef[:] = 0.5 + + # prior_mean = np.concatenate([alpha, coef, olai, oheight]) + # prior_unc = np.concatenate([alpha_std, osro_std, olai_std, oheight]) + # x0 = np.concatenate([mv, coef, olai, oheight]) + + # bounds = ( + # [[0.01, 0.5]] * olai.shape[0] + # + [[0.01, 1.5]] * olai.shape[0] + # + [[0, 8]] * olai.shape[0] + # + [[0, 1]] * olai.shape[0] + # ) + + prior_mean = np.concatenate([alpha,coef]) + prior_unc = np.concatenate([alpha_std,osro_std]) + x0 = np.concatenate([mv,coef]) + data = np.concatenate([oheight,olai]) + bounds = ( + [[0.1, 0.5]] * olai.shape[0] + + [[0.01, 1.5]] * olai.shape[0] + ) + + gamma = [500, 500] + + retval = minimize(cost_function2, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data), + jac=True, + bounds = bounds, + options={"disp": False},) + + # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] + posterious_coef = retval.x[len(olai) : +2*len(olai)] + posterious_mv = retval.x[ : +len(olai)] + # lais.append(posterious_lai) + coefs.append(posterious_coef) + sms.append(posterious_mv) + + times.append(otime) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + # lais = np.hstack(lais )[order] + lais=0 + coefs = np.hstack(coefs )[order] + # coefs=0 + sms = np.hstack(sms )[order].real + else: + ovv, ovh, olai, otheta, otime = vv, vh, lai, theta, time + osm, osm_std, osro, osro_std = sm, sm_std, sr, sr_std + + oheight = height + + olai_std = np.ones_like(olai)*0.05 + + alpha = osm + alpha_std = osm_std + mv = alpha * 1 + coef = osro + + prior_mean = np.concatenate([alpha,coef]) + prior_unc = np.concatenate([alpha_std,osro_std]) + x0 = np.concatenate([mv,coef]) + data = np.concatenate([oheight,olai]) + bounds = ( + [[0.1, 0.5]] * olai.shape[0] + + [[0.01, 1.5]] * olai.shape[0] + ) + + gamma = [500, 500] + + retval = minimize(cost_function2, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data), + jac=True, + bounds = bounds, + options={"disp": False},) + + # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] + posterious_coef = retval.x[len(olai) : +2*len(olai)] + posterious_mv = retval.x[ : +len(olai)] + # lais.append(posterious_lai) + coefs.append(posterious_coef) + sms.append(posterious_mv) + + times.append(otime) + + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + # lais = np.hstack(lais )[order] + lais=0 + coefs = np.hstack(coefs )[order] + # coefs=0 + sms = np.hstack(sms )[order].real + + return times, lais, coefs, sms + +def do_inversion(sar_inference_data, prior, state_mask, segment=False): + + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + uorbits = np.unique(orbits) + if segment: + out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape + lai_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + coef_outputs = np.zeros(out_shape ) + + fields = np.unique(sar_inference_data.fields)[1:] + for field in fields: + + # get per field data + # with time mask as well + field_mask = sar_inference_data.fields == field + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + + lai = sar_inference_data.lai[sar_inference_data.time_mask][:, field_mask] + + sm = prior.sm_prior[sar_inference_data.time_mask][:, field_mask] + sm_std= prior.sm_std [sar_inference_data.time_mask][:, field_mask] + + sm[np.isnan(sm)] = 0.2 + sm_std[sm_std==0] = 0.5 + sm_std[np.isnan(sm_std)] = 0.5 + + coef = prior.sr_prior[sar_inference_data.time_mask][:, field_mask] + coef_std= prior.sr_std [sar_inference_data.time_mask][:, field_mask] + + height = prior.sm_prior[sar_inference_data.time_mask][:, field_mask] + height[:] = 0.1 + + # coef[:] = 0.2 + coef_std[:] = 0.5 + + coef[np.isnan(coef)] = 0.1 + coef_std[np.isnan(coef_std)] = 0.5 + + vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + + + for jj in range(len(time)): + time[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + + start_date = pd.to_datetime(add_data.index)[0].to_pydatetime().replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + end_date = pd.to_datetime(add_data.index)[-1].to_pydatetime().replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + if field == 1: + add_lai = add_data.filter(like='LAI_insitu').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + add_coef = add_data.filter(like='coef').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + add_height = add_data.filter(like='height').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + elif field == 4: + add_lai = add_data.filter(like='LAI_insitu').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + add_coef = add_data.filter(like='coef').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + add_height = add_data.filter(like='height').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + elif field == 5: + add_lai = add_data.filter(like='LAI_insitu').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + add_coef = add_data.filter(like='coef').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + add_height = add_data.filter(like='height').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + else: + pass + # elif field == 3: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='515_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='515_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # else: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='319_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='319_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + + + lai = np.nanmean(lai, axis=1) + lai[(start_date <= time) & (end_date >= time)] = add_lai + vv = np.nanmean(vv, axis=1) + vh = np.nanmean(vh, axis=1) + theta = np.nanmean(theta, axis=1) + + sm = np.nanmean(sm, axis=1) + sm_std = np.nanmean(sm_std, axis=1) + + coef = np.nanmean(coef, axis=1) + coef[(start_date <= time) & (end_date >= time)] = add_coef + + coef_std = np.nanmean(coef_std, axis=1) + + height = coef + 1 + height[(start_date <= time) & (end_date >= time)] = add_height + + vv = np.maximum(vv, 0.0001) + vv = 10 * np.log10(vv) + vh = np.maximum(vh, 0.0001) + vh = 10 * np.log10(vh) + + times, lais, coefs, sms = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height) + + lai_outputs[:, field_mask] = sms[...,None] + + coef_outputs[:, field_mask] = coefs [...,None] + sm_outputs[:, field_mask] = sms [...,None] + + else: + mask = gdal.Open(state_mask).ReadAsArray() + xs, ys = np.where(mask) + + out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + lai_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + coef_outputs = np.zeros(out_shape ) + + for i in range(len(xs)): + indx, indy = xs[i], ys[i] + + # field_mask = slice(None, None), slice(indx, indx+1), slice(indy, indy+1) + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + lai = sar_inference_data.lai[sar_inference_data.time_mask][:, indx, indy ] + + sm = prior.sm_prior[sar_inference_data.time_mask][:, indx, indy ] + sm_std= prior.sm_std [sar_inference_data.time_mask][:, indx, indy ] + + sm[np.isnan(sm)] = 0.2 + sm_std[sm_std==0] = 0.5 + sm_std[np.isnan(sm_std)] = 0.5 + + coef = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + coef_std= prior.sr_std [sar_inference_data.time_mask][:, indx, indy ] + sr[np.isnan(sr)] = 0.1 + sr_std[np.isnan(sr_std)] = 0.5 + + height = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + height[:] = 0.1 + + vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + + + vv = np.maximum(vv, 0.0001) + vv = 10 * np.log10(vv) + vh = np.maximum(vh, 0.0001) + vh = 10 * np.log10(vh) + + times, lais, coefs, sms = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height) + + lai_outputs[:, indx, indy] = lais + coef_outputs[:, indx, indy] = coefs + sm_outputs[:, indx, indy] = sms + + return lai_outputs, coef_outputs, sm_outputs, uorbits + +def save_output(fname, Array, GeoT, projction, time): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'date': time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def save_ps_output(fname, Array, GeoT, projction, orbit): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'orbit': str(int(orbit[i]))}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + + + +class KaSKASAR(object): + """A class to process Sentinel 1 SAR data using S2 data as + an input""" + + def __init__(self, s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std,orbit1=None,orbit2=None): + self.s1_ncfile = s1_ncfile + self.state_mask = state_mask + self.s2_lai = s2_lai + self.s2_cab = s2_cab + self.s2_cbrown = s2_cbrown + self.sm_prior = sm_prior + self.sm_std = sm_std + self.sr_prior = sr_prior + self.sr_std = sr_std + self.orbit1 = None + self.orbit2 = None + if orbit1 != None: + self.orbit1 = orbit1 + if orbit2 != None: + self.orbit2 = orbit2 + + def sentinel1_inversion(self, segment=False): + sar = get_sar(s1_ncfile) + s1_data = read_sar(sar, self.state_mask) + s2_data = read_s2_lai(self.s2_lai, self.s2_cab, self.s2_cbrown, self.state_mask) + prior = get_prior(s1_data, self.sm_prior, self.sm_std, self.sr_prior, self.sr_std, self.state_mask) + sar_inference_data = inference_preprocessing(s1_data, s2_data, self.state_mask,self.orbit1,self.orbit2) + + lai_outputs, sr_outputs, sm_outputs, uorbits = do_inversion(sar_inference_data, prior, self.state_mask, segment) + + gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%self.s1_ncfile) + geo = gg.GetGeoTransform() + + projction = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + + time = [i.strftime('%Y-%m-%d') for i in np.array(sar_inference_data.time)[sar_inference_data.time_mask]] + + sm_name = self.s1_ncfile.replace('.nc', '_sar_sm.tif') + sr_name = self.s1_ncfile.replace('.nc', '_sar_sr.tif') + lai_name = self.s1_ncfile.replace('.nc', '_sar_lai.tif') + + save_output(sm_name, sm_outputs, geo, projction, time) + save_output(sr_name, sr_outputs, geo, projction, time) + save_output(lai_name, lai_outputs, geo, projction, time) + + + + +if __name__ == '__main__': + # s1_ncfile = '/data/nemesis/kaska-sar_quick/S1_LMU_site_2017_new.nc' + # state_mask = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/carto/ESU.tif" + # s2_folder = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/s2_obs/" + # s2_lai = f"{s2_folder:s}/outputs/lai.tif" + # s2_cab = f"{s2_folder:s}/outputs/cab.tif" + # s2_cbrown = f"{s2_folder:s}/outputs/cbrown.tif" + + # sm_prior = '/data/nemesis/kaska-sar_quick/sm_prior.tif' + # sm_std = '/data/nemesis/kaska-sar_quick/sm_std.tif' + # sr_prior = '/data/nemesis/kaska-sar_quick/sr_prior.tif' + # sr_std = '/data/nemesis/kaska-sar_quick/sr_std.tif' + # sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + # s1_ncfile = '/media/nas_data/Thomas/S1/processed/MNI_2017/MNI_2017.nc' + + # aggregation = '_point' + aggregation = '_Field_buffer_30' + # aggregation = '_buffer_100' + aggregation = '_buffer_50' + + + s1_ncfile = '/media/tweiss/Daten/data_AGU/'+aggregation+'/MNI_2017_new_final.nc' + state_mask = '/media/tweiss/Work/z_final_mni_data_2017/ESU'+aggregation+'.tif' + s2_folder = "/media/tweiss/Daten/test_kaska/data/" + s2_lai = f"{s2_folder:s}/lai.tif" + s2_cab = f"{s2_folder:s}/cab.tif" + s2_cbrown = f"{s2_folder:s}/cbrown.tif" + + sm_prior = f'{s2_folder:s}/sm_prior.tif' + sm_std = f'{s2_folder:s}/sm_std.tif' + sr_prior = f'{s2_folder:s}/sr_prior.tif' + sr_std = f'{s2_folder:s}/sr_std.tif' + + sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + csv_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7multi_Field_buffer_30/csv/' + + add_data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + sarsar.sentinel1_inversion(True) + diff --git a/kaska/kaska_sar_ssrt_vwc.py b/kaska/kaska_sar_ssrt_vwc.py new file mode 100644 index 0000000..4ad9dd7 --- /dev/null +++ b/kaska/kaska_sar_ssrt_vwc.py @@ -0,0 +1,918 @@ +#!/usr/bin/env python + +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel_vwc import cost_function_vwc +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +import matplotlib.pyplot as plt + +def save_to_tif(fname, Array, GeoT): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + wkt = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + ds.SetProjection(wkt) + for i, image in enumerate(Array): + # ds.GetRasterBand(i+1).SetMetadata({'date': prior_time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def get_sar(s1_nc_file): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang_name vv_name, vh_name') + data = Dataset(s1_nc_file) + relorbit = data['relorbit'][:] + localIncidenceAngle = data['localIncidenceAngle'][:] + satellite = data['satellite'][:] + orbitdirection = data['orbitdirection'][:] + time = data['time'][:] + lat = data['lat'][:] + lon = data['lon'][:] + + vv_name = s1_nc_file.replace('.nc', '_vv.tif') + vh_name = s1_nc_file.replace('.nc', '_vh.tif') + ang_name = s1_nc_file.replace('.nc', '_ang.tif') + if not os.path.exists(vv_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%s1_nc_file) + geo = gg.GetGeoTransform() + sigma0_vv_norm_multi = data['sigma0_vv_multi'][:] + save_to_tif(vv_name, sigma0_vv_norm_multi, geo) + + if not os.path.exists(vh_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vh_multi'%s1_nc_file) + geo = gg.GetGeoTransform() + sigma0_vh_norm_multi = data['sigma0_vh_multi'][:] + save_to_tif(vh_name, sigma0_vh_norm_multi, geo) + + if not os.path.exists(ang_name): + gg = gdal.Open('NETCDF:"%s":localIncidenceAngle'%s1_nc_file) + geo = gg.GetGeoTransform() + localIncidenceAngle = data['localIncidenceAngle'][:] + save_to_tif(ang_name, localIncidenceAngle, geo) + + return s1_data(time, lat, lon, satellite, relorbit, orbitdirection, ang_name, vv_name, vh_name) + +def read_sar(sar_data, state_mask): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang vv vh') + + ang = reproject_data(sar_data.ang_name, output_format="MEM", target_img=state_mask) + vv = reproject_data(sar_data.vv_name, output_format="MEM", target_img=state_mask) + vh = reproject_data(sar_data.vh_name, output_format="MEM", target_img=state_mask) + + time = [datetime.datetime(1970,1,1) + datetime.timedelta(days=float(i)) for i in sar_data.time] + return s1_data(time, sar_data.lat, sar_data.lon, sar_data.satellite, sar_data.relorbit, sar_data.orbitdirection, ang, vv, vh) + +def read_s2_lai(s2_lai, s2_cab, s2_cbrown, state_mask): + s2_data = namedtuple('s2_lai', 'time lai cab cbrown') + g = gdal.Open(s2_lai) + time = [] + for i in range(g.RasterCount): + gg = g.GetRasterBand(i+1) + meta = gg.GetMetadata() + time.append(datetime.datetime.strptime(meta['DoY'], '%Y%j')) + lai = reproject_data(s2_lai, output_format="MEM", target_img=state_mask) + cab = reproject_data(s2_cab, output_format="MEM", target_img=state_mask) + cbrown = reproject_data(s2_cbrown, output_format="MEM", target_img=state_mask) + return s2_data(time, lai, cab, cbrown) + +def inference_preprocessing(s1_data, s2_data, state_mask, orbit1=None, orbit2=None): + """Resample S2 smoothed output to match S1 observations + times""" + # Move everything to DoY to simplify interpolation + + sar_inference_data = namedtuple('sar_inference_data', 'time lat lon satellite relorbit orbitdirection ang vv vh lai cab cbrown time_mask fields') + + + s2_doys = np.array([ int(i.strftime('%j')) for i in s2_data.time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + # s1_doys = s1_doys[:112] + # pdb.set_trace() + + time_mask = (s1_doys >= s2_doys.min()) & (s1_doys <= s2_doys.max()) + if orbit1 != None: + rel_orbit1 = s1_data.relorbit==orbit1 + if orbit2 != None: + rel_orbit2 = s1_data.relorbit==orbit2 + xxx = np.logical_and(rel_orbit1,time_mask) + yyy = np.logical_and(rel_orbit2,time_mask) + time_mask = np.logical_or(xxx,yyy) + + f = interp1d(s2_doys, s2_data.lai.ReadAsArray(), axis=0, bounds_error=False) + lai_s1 = f(s1_doys) + f = interp1d(s2_doys, s2_data.cab.ReadAsArray(), axis=0, bounds_error=False) + cab_s1 = f(s1_doys) + f = interp1d(s2_doys, s2_data.cbrown.ReadAsArray(), axis=0, bounds_error=False) + cbrown_s1 = f(s1_doys) + # segmentation + lai_max = np.nanmax(s2_data.lai.ReadAsArray(), axis=0) + patches = sobel(lai_max)>0.001 + fields = label(patches)[0] + + + g = gdal.Open(state_mask) + gg = g.GetRasterBand(1) + ggg = gg.ReadAsArray() + fields[ggg==0]=0 + sar_inference_data = sar_inference_data(s1_data.time, s1_data.lat, s1_data.lon, + s1_data.satellite, s1_data.relorbit, + s1_data.orbitdirection, s1_data.ang, + s1_data.vv, s1_data.vh, lai_s1, cab_s1, cbrown_s1, time_mask, fields) + + return sar_inference_data + + +def get_prior(s1_data, soilMoisture, soilMoisture_std, soilRoughness, soilRoughness_std, state_mask): + # this is the function to reading the soil moisture prior + # and the soil roughness prior using the satemask + # the assumption of inputs are daily data in geotifs + prior = namedtuple('prior', 'time sm_prior sm_std sr_prior sr_std') + + g = gdal.Open(soilMoisture) + time = [] + for i in range(g.RasterCount): + gg = g.GetRasterBand(i+1) + meta = gg.GetMetadata() + time.append(datetime.datetime.strptime(meta['date'], '%Y-%m-%d')) + sm_prior = reproject_data(soilMoisture, output_format="MEM", target_img=state_mask) + sm_std = reproject_data(soilMoisture_std, output_format="MEM", target_img=state_mask) + sr_prior = reproject_data(soilRoughness, output_format="MEM", target_img=state_mask) + sr_std = reproject_data(soilRoughness_std,output_format="MEM", target_img=state_mask) + + prior_doy = np.array([ int(i.strftime('%j')) for i in time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + + f = interp1d(prior_doy, sm_prior.ReadAsArray(), axis=0, bounds_error=False) + + sm_s1 = f(s1_doys) + f = interp1d(prior_doy, sm_std.ReadAsArray(), axis=0, bounds_error=False) + sm_std_s1 = f(s1_doys) + + f = interp1d(prior_doy, sr_prior.ReadAsArray(), axis=0, bounds_error=False) + sr_s1 = f(s1_doys) + f = interp1d(prior_doy, sr_std.ReadAsArray(), axis=0, bounds_error=False) + sr_std_s1 = f(s1_doys) + + return prior(time, sm_s1, sm_std_s1, sr_s1, sr_std_s1) + +# def fresnel(eps, theta): +# theta = np.deg2rad(theta) +# num = (eps-1)*(np.sin(theta)**2 - eps*(1+np.sin(theta)**2)) +# den = eps*np.cos(theta) + np.sqrt(eps - np.sin(theta)**2) +# den = den**2 +# return np.abs(num/den) + +# def mv2eps(a, b, c, mv): +# eps = a + b * mv + c * mv**2 +# return eps + +# def quad_approx_solver(a, b, c, theta, alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, fresnel(mv2eps(a, b, c, x),theta.mean()), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + + +# def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, height): + +# orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + +# lais = [] +# coefs = [] +# alphas = [] +# sms = [] +# ps = [] +# times = [] +# uorbits = np.unique(orbits) +# segmentation_by_orbit = 1 + +# if segmentation_by_orbit == 1: +# for orbit in uorbits: +# orbit_mask = orbits == orbit +# ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] +# osm, osm_std, osro, osro_std = sm[orbit_mask], sm_std[orbit_mask], sr[orbit_mask], sr_std[orbit_mask] + +# oheight = height[orbit_mask] + +# olai_std = np.ones_like(olai)*0.05 + +# alpha = osm +# alpha_std = osm_std +# mv = alpha * 1 +# coef = osro +# # coef[:] = 0.5 + +# # prior_mean = np.concatenate([alpha, coef, olai, oheight]) +# # prior_unc = np.concatenate([alpha_std, osro_std, olai_std, oheight]) +# # x0 = np.concatenate([mv, coef, olai, oheight]) + +# # bounds = ( +# # [[0.01, 0.5]] * olai.shape[0] +# # + [[0.01, 1.5]] * olai.shape[0] +# # + [[0, 8]] * olai.shape[0] +# # + [[0, 1]] * olai.shape[0] +# # ) + +# prior_mean = np.concatenate([alpha,coef]) +# prior_unc = np.concatenate([alpha_std,osro_std]) +# x0 = np.concatenate([mv,coef]) +# data = np.concatenate([oheight,olai]) +# bounds = ( +# [[0.1, 0.5]] * olai.shape[0] +# + [[0.01, 1.5]] * olai.shape[0] +# ) + +# gamma = [500, 500] + +# retval = minimize(cost_function2, +# x0, +# args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data), +# jac=True, +# bounds = bounds, +# options={"disp": False},) + +# # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] +# posterious_coef = retval.x[len(olai) : +2*len(olai)] +# posterious_mv = retval.x[ : +len(olai)] +# # lais.append(posterious_lai) +# coefs.append(posterious_coef) +# sms.append(posterious_mv) + +# times.append(otime) + +# order = np.argsort(np.hstack(times)) +# times = np.hstack(times )[order] +# # lais = np.hstack(lais )[order] +# lais=0 +# coefs = np.hstack(coefs )[order] +# # coefs=0 +# sms = np.hstack(sms )[order].real +# else: +# ovv, ovh, olai, otheta, otime = vv, vh, lai, theta, time +# osm, osm_std, osro, osro_std = sm, sm_std, sr, sr_std + +# oheight = height + +# olai_std = np.ones_like(olai)*0.05 + +# alpha = osm +# alpha_std = osm_std +# mv = alpha * 1 +# coef = osro + +# prior_mean = np.concatenate([alpha,coef]) +# prior_unc = np.concatenate([alpha_std,osro_std]) +# x0 = np.concatenate([mv,coef]) +# data = np.concatenate([oheight,olai]) +# bounds = ( +# [[0.1, 0.5]] * olai.shape[0] +# + [[0.01, 1.5]] * olai.shape[0] +# ) + +# gamma = [500, 500] + +# retval = minimize(cost_function2, +# x0, +# args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data), +# jac=True, +# bounds = bounds, +# options={"disp": False},) + +# # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] +# posterious_coef = retval.x[len(olai) : +2*len(olai)] +# posterious_mv = retval.x[ : +len(olai)] +# # lais.append(posterious_lai) +# coefs.append(posterious_coef) +# sms.append(posterious_mv) + +# times.append(otime) + + +# order = np.argsort(np.hstack(times)) +# times = np.hstack(times )[order] +# # lais = np.hstack(lais )[order] +# lais=0 +# coefs = np.hstack(coefs )[order] +# # coefs=0 +# sms = np.hstack(sms )[order].real + +# return times, lais, coefs, sms + +def do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, b, b_std, omega, rms, orbits, unc): + + ps = [] + vwcs = [] + bs = [] + sms = [] + times = [] + + uorbits = np.unique(orbits) + uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # pdb.set_trace() + # orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, ovwc, otheta, otime = vv[orbit_mask], vh[orbit_mask], vwc[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osb, osb_std = sm[orbit_mask], sm_std[orbit_mask], b[orbit_mask], b_std[orbit_mask] + + + ovwc_std = np.ones_like(osb)*0.5 + + # alpha = _calc_eps(osm) + # alpha = osm + # alpha_std = np.ones_like(alpha)*10 + # alpha_std = osm_std + # pdb.set_trace() + + prior_mean = np.concatenate([[0, ]*2, osm, ovwc, osb]) + prior_unc = np.concatenate([[10., ]*2, osm_std, ovwc_std, osb_std]) + + xvv = np.array([rms, omega]) + + + x0 = np.concatenate([xvv, osm, ovwc, osb]) + + bounds = ( + [[0.013, 0.013]] # s + + [[0.0107, 0.0107]] # omega + + [[0.01, 0.7]] * osb.shape[0] # mv + + [[0, 7.5]] * osb.shape[0] # vwc + + [[0.01, 0.6]] * osb.shape[0] # b + ) + + + gamma = [500, 500] + + retval = minimize(cost_function_vwc, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc), + jac=True, + bounds = bounds, + options={"disp": False},) + + + posterious_sm = retval.x[2 : 2+len(osb)] + posterious_vwc = retval.x[2+len(osb) : 2+2*len(osb)] + posterious_b = retval.x[2+2*len(osb) : 2+3*len(osb)] + + sms.append(posterious_sm) + vwcs.append(posterious_vwc) + bs.append(posterious_b) + times.append(otime) + ps.append(retval.x[:2]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + vwcs = np.hstack(vwcs )[order] + bs = np.hstack(bs )[order] + sms = np.hstack(sms )[order].real + return times, vwcs, bs, sms, np.array(ps), orbit_mask + + +def do_inversion(sar_inference_data, prior, state_mask, segment=False): + + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + uorbits = np.unique(orbits) + if segment: + out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape + lai_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + coef_outputs = np.zeros(out_shape ) + + fields = np.unique(sar_inference_data.fields)[1:] + # pdb.set_trace() + pixel = ['_Field_buffer_30','','_buffer_30','_buffer_50','_buffer_100'] + pixel = ['_Field_buffer_30'] + fields = ['301','508','542'] + fields = ['542'] + # ESU names + esus = ['high', 'low', 'med', 'mean'] + esus = ['mean'] + for pixels in pixel: + print(pixels) + path_ESU = '/media/tweiss/Work/z_final_mni_data_2017/' + name_shp = 'ESU'+pixels+'.shp' + name_ESU = 'ESU'+pixels+'.tif' + + for esu in esus: + for field in fields: + field2 = field + '_' + esu + g = gdal.Open(os.path.join(path_ESU, name_ESU)) + state_mask = g.ReadAsArray().astype(np.int) + + if pixels == '_Field_buffer_30': + if field == '515': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508': + mask_value = 27 + state_mask = state_mask==mask_value + elif field == '542': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '319': + mask_value = 67 + state_mask = state_mask==mask_value + elif field == '301': + mask_value = 87 + state_mask = state_mask==mask_value + else: + if field == '515' and esu == 'high': + mask_value = 1 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'med': + mask_value = 2 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'low': + mask_value = 3 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'high': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'med': + mask_value = 5 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'low': + mask_value = 6 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'high': + mask_value = 7 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'med': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'low': + mask_value = 9 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'high': + mask_value = 10 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'med': + mask_value = 11 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'low': + mask_value = 12 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'high': + mask_value = 13 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'med': + mask_value = 14 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'low': + mask_value = 15 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==1) | (state_mask==2) | (state_mask==3))) + state_mask = m.mask + elif field == '508' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==4) | (state_mask==5) | (state_mask==6))) + state_mask = m.mask + elif field == '542' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==7) | (state_mask==8) | (state_mask==9))) + state_mask = m.mask + elif field == '319' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==10) | (state_mask==11) | (state_mask==12))) + state_mask = m.mask + elif field == '301' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==13) | (state_mask==14) | (state_mask==15))) + state_mask = m.mask + + + # get per field data + # with time mask as well + # field_mask2 = sar_inference_data.fields == field + field_mask = state_mask + + pre_processing = ['multi'] + aggregation = ['_buffer_100'] + canopy_list = ['turbid_isotropic'] + surface_list = ['Oh04'] + opt_mod = ['time_variant'] + + for p in pre_processing: + + for pp in aggregation: + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/z_dense_s1_time_series_n1'+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/z_dense_s1_time_series_n1'+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper3/z_dense_s1_time_series_n1'+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + for kkk in opt_mod: + for k in surface_list: + for kk in canopy_list: + ################### + field3 = field2[0:4]+'high' + data_field =data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=field3) + ######################### + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + + start_date = date[0].to_pydatetime() + end_date = date[-1].to_pydatetime() + drop_milli = sar_inference_data.time + for t in range(len(sar_inference_data.time)): + sar_inference_data.time[t] = sar_inference_data.time[t].replace(microsecond=0).replace(second=0).replace(minute=0) + index1 = sar_inference_data.time.index(start_date.replace(second=0).replace(minute=0)) + index2 = sar_inference_data.time.index(end_date.replace(second=0).replace(minute=0)) + + sar_inference_data.time_mask[:] = False + sar_inference_data.time_mask[index1:index2+1] = True + + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api_sm.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_sm = api_data.loc[time2].values.flatten() + sm = api_sm + sm_std = data_field.filter(like='SM_insitu').values.flatten() + sm_std[:] = 0.21 + time_s1 = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + times1_2 = pd.to_datetime(time) + + + lai_all = sar_inference_data.lai[sar_inference_data.time_mask] + vv_all = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask] + vh_all = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask] + theta_all = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask] + + height = data_field.filter(like='height').values.flatten() + coef = data_field.filter(like='coef').values.flatten() + + coef_std = data_field.filter(like='SM_insitu').values.flatten() + coef_std[:] = 0.01 + + orbits = data_field.filter(like='relativeorbit').values.flatten() + + vwc = data_field.filter(like='VWC').values.flatten() + + unc = 1.5 + omega = 0.0107 + rms = 0.013 + + sm_retrieved = lai_all * np.nan + + for z in range(len(state_mask)): + for zz in range(len(state_mask[0])): + if state_mask[z,zz] == False: + pass + else: + vv = vv_all[:,z,zz] + vh = vh_all[:,z,zz] + # lai = lai_all[:,z,zz] + theta = theta_all[:,z,zz] + + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + times, vwcs, bs, sms, ps, orbit_mask = do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, coef, coef_std, omega, rms, orbits,unc=unc) + sm_retrieved[:,z,zz] = sms + + + + for u in range(len(sm_retrieved)): + + fig = plt.gcf() + ax = fig.add_subplot(111) + + + # quadmesh = ax.imshow(sm_retrieved[u,0:100,200:250]) + # quadmesh = ax.imshow(sm_retrieved[u,650:750,400:500]) + quadmesh = ax.imshow(sm_retrieved[u,250:350,580:630]) + plt.colorbar(quadmesh) + quadmesh.set_clim(vmin=0.15, vmax=0.35) + + plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/down2/542_'+str(time[u])[:10], bbox_inches = 'tight') + plt.close() + + pdb.set_trace() + + + pdb.set_trace() + + + + + + # sm_prior + # coef + # height + # height_insitu = np.full([len(state_mask),len(state_mask[0])], np.nan) + + pdb.set_trace() + lai = np.nanmean(lai, axis=1) + cab = sar_inference_data.cab[sar_inference_data.time_mask][:, field_mask] + cab = np.nanmean(cab, axis=1) + cbrown = sar_inference_data.cbrown[sar_inference_data.time_mask][:, field_mask] + cbrown = np.nanmean(cbrown, axis=1) + + data = {'lai':lai, 'cab':cab, 'cbrown':cbrown} + + df = pd.DataFrame(data, index=time2) + df.to_csv('/media/tweiss/Daten/data_AGU/S2_'+field2+pixels+'.csv') + # pdb.set_trace() + + + + + + + + + pdb.set_trace() + # sm = prior.sm_prior[sar_inference_data.time_mask][:, field_mask] + # sm_std= prior.sm_std [sar_inference_data.time_mask][:, field_mask] + + # sm[np.isnan(sm)] = 0.2 + # sm_std[sm_std==0] = 0.5 + # sm_std[np.isnan(sm_std)] = 0.5 + + # coef = prior.sr_prior[sar_inference_data.time_mask][:, field_mask] + # coef_std= prior.sr_std [sar_inference_data.time_mask][:, field_mask] + + # height = prior.sm_prior[sar_inference_data.time_mask][:, field_mask] + # height[:] = 0.1 + + # # coef[:] = 0.2 + # coef_std[:] = 0.5 + + # coef[np.isnan(coef)] = 0.1 + # coef_std[np.isnan(coef_std)] = 0.5 + + # vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + # vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + # theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, field_mask] + + + # for jj in range(len(time)): + # time[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + + # start_date = pd.to_datetime(add_data.index)[0].to_pydatetime().replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + # end_date = pd.to_datetime(add_data.index)[-1].to_pydatetime().replace(microsecond=0).replace(second=0).replace(minute=0).replace(hour=0) + # if field == 1: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_height = add_data.filter(like='height').filter(like='301_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # elif field == 4: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_height = add_data.filter(like='height').filter(like='542_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # elif field == 5: + # add_lai = add_data.filter(like='LAI_insitu').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_coef = add_data.filter(like='coef').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # add_height = add_data.filter(like='height').filter(like='508_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # else: + # pass + # # elif field == 3: + # # add_lai = add_data.filter(like='LAI_insitu').filter(like='515_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # # add_coef = add_data.filter(like='coef').filter(like='515_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # # else: + # # add_lai = add_data.filter(like='LAI_insitu').filter(like='319_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + # # add_coef = add_data.filter(like='coef').filter(like='319_high').filter(like='Oh04').filter(like='turbid_isotropic').values.flatten() + + + # lai = np.nanmean(lai, axis=1) + # lai[(start_date <= time) & (end_date >= time)] = add_lai + # vv = np.nanmean(vv, axis=1) + # vh = np.nanmean(vh, axis=1) + # theta = np.nanmean(theta, axis=1) + + # sm = np.nanmean(sm, axis=1) + # sm_std = np.nanmean(sm_std, axis=1) + + # coef = np.nanmean(coef, axis=1) + # coef[(start_date <= time) & (end_date >= time)] = add_coef + + # coef_std = np.nanmean(coef_std, axis=1) + + # height = coef + 1 + # height[(start_date <= time) & (end_date >= time)] = add_height + + # vv = np.maximum(vv, 0.0001) + # vv = 10 * np.log10(vv) + # vh = np.maximum(vh, 0.0001) + # vh = 10 * np.log10(vh) + + # times, lais, coefs, sms = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height) + + # lai_outputs[:, field_mask] = sms[...,None] + + # coef_outputs[:, field_mask] = coefs [...,None] + # sm_outputs[:, field_mask] = sms [...,None] + + else: + mask = gdal.Open(state_mask).ReadAsArray() + xs, ys = np.where(mask) + + out_shape = sar_inference_data.lai[sar_inference_data.time_mask].shape + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + lai_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + coef_outputs = np.zeros(out_shape ) + + for i in range(len(xs)): + indx, indy = xs[i], ys[i] + + # field_mask = slice(None, None), slice(indx, indx+1), slice(indy, indy+1) + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + lai = sar_inference_data.lai[sar_inference_data.time_mask][:, indx, indy ] + + sm = prior.sm_prior[sar_inference_data.time_mask][:, indx, indy ] + sm_std= prior.sm_std [sar_inference_data.time_mask][:, indx, indy ] + + sm[np.isnan(sm)] = 0.2 + sm_std[sm_std==0] = 0.5 + sm_std[np.isnan(sm_std)] = 0.5 + + coef = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + coef_std= prior.sr_std [sar_inference_data.time_mask][:, indx, indy ] + sr[np.isnan(sr)] = 0.1 + sr_std[np.isnan(sr_std)] = 0.5 + + height = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + height[:] = 0.1 + + vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + + + vv = np.maximum(vv, 0.0001) + vv = 10 * np.log10(vv) + vh = np.maximum(vh, 0.0001) + vh = 10 * np.log10(vh) + + times, lais, coefs, sms = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height) + + lai_outputs[:, indx, indy] = lais + coef_outputs[:, indx, indy] = coefs + sm_outputs[:, indx, indy] = sms + + return lai_outputs, coef_outputs, sm_outputs, uorbits + +def save_output(fname, Array, GeoT, projction, time): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'date': time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def save_ps_output(fname, Array, GeoT, projction, orbit): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'orbit': str(int(orbit[i]))}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + + + +class KaSKASAR(object): + """A class to process Sentinel 1 SAR data using S2 data as + an input""" + + def __init__(self, s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std,orbit1=None,orbit2=None): + self.s1_ncfile = s1_ncfile + self.state_mask = state_mask + self.s2_lai = s2_lai + self.s2_cab = s2_cab + self.s2_cbrown = s2_cbrown + self.sm_prior = sm_prior + self.sm_std = sm_std + self.sr_prior = sr_prior + self.sr_std = sr_std + self.orbit1 = None + self.orbit2 = None + if orbit1 != None: + self.orbit1 = orbit1 + if orbit2 != None: + self.orbit2 = orbit2 + + def sentinel1_inversion(self, segment=False): + sar = get_sar(s1_ncfile) + s1_data = read_sar(sar, self.state_mask) + s2_data = read_s2_lai(self.s2_lai, self.s2_cab, self.s2_cbrown, self.state_mask) + prior = get_prior(s1_data, self.sm_prior, self.sm_std, self.sr_prior, self.sr_std, self.state_mask) + sar_inference_data = inference_preprocessing(s1_data, s2_data, self.state_mask,self.orbit1,self.orbit2) + + lai_outputs, sr_outputs, sm_outputs, uorbits = do_inversion(sar_inference_data, prior, self.state_mask, segment) + + gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%self.s1_ncfile) + geo = gg.GetGeoTransform() + + projction = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + + time = [i.strftime('%Y-%m-%d') for i in np.array(sar_inference_data.time)[sar_inference_data.time_mask]] + + sm_name = self.s1_ncfile.replace('.nc', '_sar_sm.tif') + sr_name = self.s1_ncfile.replace('.nc', '_sar_sr.tif') + lai_name = self.s1_ncfile.replace('.nc', '_sar_lai.tif') + + save_output(sm_name, sm_outputs, geo, projction, time) + save_output(sr_name, sr_outputs, geo, projction, time) + save_output(lai_name, lai_outputs, geo, projction, time) + + + + +if __name__ == '__main__': + # s1_ncfile = '/data/nemesis/kaska-sar_quick/S1_LMU_site_2017_new.nc' + # state_mask = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/carto/ESU.tif" + # s2_folder = "/home/ucfajlg/Data/python/KaFKA_Validation/LMU/s2_obs/" + # s2_lai = f"{s2_folder:s}/outputs/lai.tif" + # s2_cab = f"{s2_folder:s}/outputs/cab.tif" + # s2_cbrown = f"{s2_folder:s}/outputs/cbrown.tif" + + # sm_prior = '/data/nemesis/kaska-sar_quick/sm_prior.tif' + # sm_std = '/data/nemesis/kaska-sar_quick/sm_std.tif' + # sr_prior = '/data/nemesis/kaska-sar_quick/sr_prior.tif' + # sr_std = '/data/nemesis/kaska-sar_quick/sr_std.tif' + # sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + # s1_ncfile = '/media/nas_data/Thomas/S1/processed/MNI_2017/MNI_2017.nc' + + # aggregation = '_point' + aggregation = '_Field_buffer_30' + # aggregation = '_buffer_100' + aggregation = '_buffer_50' + + + s1_ncfile = '/media/tweiss/Daten/data_AGU/'+aggregation+'/MNI_2017_new_final.nc' + state_mask = '/media/tweiss/Work/z_final_mni_data_2017/ESU'+aggregation+'.tif' + s2_folder = "/media/tweiss/Daten/test_kaska/data/" + s2_lai = f"{s2_folder:s}/lai.tif" + s2_cab = f"{s2_folder:s}/cab.tif" + s2_cbrown = f"{s2_folder:s}/cbrown.tif" + + sm_prior = f'{s2_folder:s}/sm_prior.tif' + sm_std = f'{s2_folder:s}/sm_std.tif' + sr_prior = f'{s2_folder:s}/sr_prior.tif' + sr_std = f'{s2_folder:s}/sr_std.tif' + + sarsar = KaSKASAR(s1_ncfile, state_mask, s2_lai, s2_cab, s2_cbrown, sm_prior, sm_std, sr_prior ,sr_std) + + csv_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7multi_Field_buffer_30/csv/' + + add_data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + sarsar.sentinel1_inversion(True) + diff --git a/kaska/kaska_sar_tau_rms_area.py b/kaska/kaska_sar_tau_rms_area.py new file mode 100644 index 0000000..a456e5f --- /dev/null +++ b/kaska/kaska_sar_tau_rms_area.py @@ -0,0 +1,791 @@ +#!/usr/bin/env python + +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel_vwc_rms import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +import matplotlib.pyplot as plt +from netCDF4 import date2num +import glob + +def ndwi1_mag(ndwi1): + vwc = 13.2*ndwi1**2+1.62*ndwi1 + return vwc + +def ndwi1_cos_maize(ndwi1): + vwc = 9.39*ndwi1+1.26 + return vwc + +def save_to_tif(fname, Array, GeoT): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + wkt = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + ds.SetProjection(wkt) + for i, image in enumerate(Array): + # ds.GetRasterBand(i+1).SetMetadata({'date': prior_time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def get_sar(s1_nc_file, version): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang_name vv_name, vh_name') + data = Dataset(s1_nc_file) + relorbit = data['relorbit'][:] + localIncidenceAngle = data['theta'][:] + satellite = data['satellite'][:] + orbitdirection = data['orbitdirection'][:] + time = data['time'][:] + lat = data['lat'][:] + lon = data['lon'][:] + + vv_name = s1_nc_file.replace('.nc', '_vv'+version+'.tif') + vh_name = s1_nc_file.replace('.nc', '_vh'+version+'.tif') + ang_name = s1_nc_file.replace('.nc', '_ang'+version+'.tif') + + if not os.path.exists(vv_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vv"%s"'%(s1_nc_file,version)) + geo = gg.GetGeoTransform() + sigma0_vv = data['sigma0_vv'+version][:] + save_to_tif(vv_name, sigma0_vv, geo) + + if not os.path.exists(vh_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vh"%s"'%(s1_nc_file,version)) + geo = gg.GetGeoTransform() + sigma0_vh = data['sigma0_vh'+version][:] + save_to_tif(vh_name, sigma0_vh, geo) + + if not os.path.exists(ang_name): + gg = gdal.Open('NETCDF:"%s":theta'%s1_nc_file) + geo = gg.GetGeoTransform() + localIncidenceAngle = data['theta'][:] + save_to_tif(ang_name, localIncidenceAngle, geo) + + return s1_data(time, lat, lon, satellite, relorbit, orbitdirection, ang_name, vv_name, vh_name) + +# def get_vwc(vwc_nc_file): +# s1_data = namedtuple('vwc_data', 'time lat lon vwc') +# data = Dataset(vwc_nc_file) + +# time = data['time'][:] +# lat = data['lat'][:] +# lon = data['lon'][:] + +# vwc_name = vwc_nc_file.replace('.nc', '_vwc.tif') + +# if not os.path.exists(vwc_name): +# gg = gdal.Open('NETCDF:"%s":newBand'%vwc_nc_file) +# geo = gg.GetGeoTransform() +# save_to_tif(vwc_name, data['newBand'][:,:,:], geo) + +# return s1_data(time, lat, lon, vwc_name) + +def get_api(api_nc_file,year): + api_data = namedtuple('api_data', 'time lat lon api') + data = Dataset(api_nc_file) + + xxx = date2num(datetime.datetime.strptime(year+'0201', '%Y%m%d'), units ='hours since 2000-01-01 00:00:00', calendar='gregorian') + yyy = date2num(datetime.datetime.strptime(year+'1001', '%Y%m%d'), units ='hours since 2000-01-01 00:00:00', calendar='gregorian') + + time = data['time'][np.where(data['time'][:]==xxx)[0][0]:np.where(data['time'][:]==yyy)[0][0]] + lat = data['lat'][:] + lon = data['lon'][:] + + api_name = api_nc_file.replace('.nc', '_api'+year+'.tif') + + if not os.path.exists(api_name): + gg = gdal.Open('NETCDF:"%s":api'%api_nc_file) + geo = gg.GetGeoTransform() + save_to_tif(api_name, data['api'][np.where(data['time'][:]==xxx)[0][0]:np.where(data['time'][:]==yyy)[0][0],:,:], geo) + + return api_data(time, lat, lon, api_name) + +def read_sar(sar_data, state_mask): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang vv vh') + ang = reproject_data(sar_data.ang_name, output_format="MEM", target_img=state_mask) + vv = reproject_data(sar_data.vv_name, output_format="MEM", target_img=state_mask) + vh = reproject_data(sar_data.vh_name, output_format="MEM", target_img=state_mask) + + time = [datetime.datetime(1970,1,1) + datetime.timedelta(days=float(i)) for i in sar_data.time] + + return s1_data(time, sar_data.lat, sar_data.lon, sar_data.satellite, sar_data.relorbit, sar_data.orbitdirection, ang, vv, vh) + +def read_vwc(vwc_data, state_mask): + s2_data = namedtuple('s2_vwc', 'time vwc ndwi') + filelist = glob.glob(vwc_data+'*.tif') + filelist.sort() + time = [] + vwc = [] + ndwi = [] + for file in filelist: + g = gdal.Open(file) + ndwi_array = reproject_data(file, output_format="MEM", target_img=state_mask) + ndwi_array = ndwi_array.ReadAsArray() + vwc_array = ndwi1_mag(ndwi_array) + time.append(datetime.datetime.strptime(file.split('/')[-1][14:22], '%Y%m%d')) + vwc.append(vwc_array) + ndwi.append(ndwi_array) + + return s2_data(time, vwc, ndwi) + +def read_api(api_data, state_mask): + s1_data = namedtuple('api_data', 'time lat lon api') + + api = reproject_data(api_data.api, output_format="MEM", target_img=state_mask) + time = [datetime.datetime(2000,1,1) + datetime.timedelta(hours=float(i)) for i in api_data.time] + + return s1_data(time, api_data.lat, api_data.lon, api) + + +def inference_preprocessing(s1_data, vwc_data, api_data, state_mask, orbit1=None, orbit2=None): + """Resample S2 smoothed output to match S1 observations + times""" + # Move everything to DoY to simplify interpolation + + sar_inference_data = namedtuple('sar_inference_data', 'time lat lon satellite relorbit orbitdirection ang vv vh vwc api time_mask ndwi') + + + vwc_doys = np.array([ int(i.strftime('%j')) for i in vwc_data.time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + + + time = np.array(s1_data.time) + for jj in range(len(s1_data.time)): + time[jj] = s1_data.time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + + index=[] + xxx = np.array(api_data.time) + for jj in range(len(time)): + oje = np.where(xxx==time[jj]) + try: + ojet = oje[0][0] + index.append(ojet) + except IndexError: + pass + api_doys = np.array([ int(i.strftime('%j')) for i in np.array(api_data.time)[index]]) + + f = interp1d(vwc_doys, np.array(vwc_data.vwc), axis=0, bounds_error=False) + vwc_s1 = f(s1_doys) + + f = interp1d(vwc_doys, np.array(vwc_data.ndwi), axis=0, bounds_error=False) + ndwi_s1 = f(s1_doys) + + api_s1 = api_data.api.ReadAsArray()[index] + f = interp1d(api_doys, api_s1, axis=0, bounds_error=False) + api_s1 = f(s1_doys) + + if s1_data.time[0].year == 2017: + time_mask = (s1_doys >= 80) & (s1_doys <= 273) + elif s1_data.time[0].year == 2018: + time_mask = (s1_doys >= 80) & (s1_doys <= 273) + else: + print('no time mask') + + if orbit1 != None: + rel_orbit1 = s1_data.relorbit==orbit1 + if orbit2 != None: + rel_orbit2 = s1_data.relorbit==orbit2 + xxx = np.logical_and(rel_orbit1,time_mask) + yyy = np.logical_and(rel_orbit2,time_mask) + time_mask = np.logical_or(xxx,yyy) + + sar_inference_data = sar_inference_data(s1_data.time, s1_data.lat, s1_data.lon, + s1_data.satellite, s1_data.relorbit, + s1_data.orbitdirection, s1_data.ang, + s1_data.vv, s1_data.vh, vwc_s1, api_s1, time_mask, ndwi_s1) + + return sar_inference_data + + +# def get_prior(s1_data, soilMoisture, soilMoisture_std, soilRoughness, soilRoughness_std, state_mask): +# # this is the function to reading the soil moisture prior +# # and the soil roughness prior using the satemask +# # the assumption of inputs are daily data in geotifs +# prior = namedtuple('prior', 'time sm_prior sm_std sr_prior sr_std') + +# g = gdal.Open(soilMoisture) +# time = [] +# for i in range(g.RasterCount): +# gg = g.GetRasterBand(i+1) +# meta = gg.GetMetadata() +# time.append(datetime.datetime.strptime(meta['date'], '%Y-%m-%d')) +# sm_prior = reproject_data(soilMoisture, output_format="MEM", target_img=state_mask) +# sm_std = reproject_data(soilMoisture_std, output_format="MEM", target_img=state_mask) +# sr_prior = reproject_data(soilRoughness, output_format="MEM", target_img=state_mask) +# sr_std = reproject_data(soilRoughness_std,output_format="MEM", target_img=state_mask) + +# prior_doy = np.array([ int(i.strftime('%j')) for i in time]) +# s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + +# f = interp1d(prior_doy, sm_prior.ReadAsArray(), axis=0, bounds_error=False) + +# sm_s1 = f(s1_doys) +# f = interp1d(prior_doy, sm_std.ReadAsArray(), axis=0, bounds_error=False) +# sm_std_s1 = f(s1_doys) + +# f = interp1d(prior_doy, sr_prior.ReadAsArray(), axis=0, bounds_error=False) +# sr_s1 = f(s1_doys) +# f = interp1d(prior_doy, sr_std.ReadAsArray(), axis=0, bounds_error=False) +# sr_std_s1 = f(s1_doys) + +# return prior(time, sm_s1, sm_std_s1, sr_s1, sr_std_s1) + + + +# def do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, b, b_std, omega, rms, orbits, unc): + +# ps = [] +# vwcs = [] +# bs = [] +# sms = [] +# times = [] + +# uorbits = np.unique(orbits) +# uorbits = np.array([95]) +# for orbit in uorbits: +# # for jj in range(len(vv)): +# # orbit_mask = orbits == orbit +# # orbit_mask = (orbits == 44) | (orbits == 168) +# orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) +# # orbit_mask = (orbits == 168) +# # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) +# ovv, ovh, ovwc, otheta, otime = vv[orbit_mask], vh[orbit_mask], vwc[orbit_mask], theta[orbit_mask], time[orbit_mask] +# osm, osm_std, osb, osb_std = sm[orbit_mask], sm_std[orbit_mask], b[orbit_mask], b_std[orbit_mask] + + +# ovwc_std = np.ones_like(osb)*0.5 + +# # alpha = _calc_eps(osm) +# # alpha = osm +# # alpha_std = np.ones_like(alpha)*10 +# # alpha_std = osm_std + + +# prior_mean = np.concatenate([[0, ]*2, osm, ovwc, osb]) +# prior_unc = np.concatenate([[10., ]*2, osm_std, ovwc_std, osb_std]) + +# xvv = np.array([rms, omega]) + + +# x0 = np.concatenate([xvv, osm, ovwc, osb]) + +# bounds = ( +# [[0.013, 0.013]] # s +# + [[0.0107, 0.0107]] # omega +# + [[0.01, 0.7]] * osb.shape[0] # mv +# + [[0, 7.5]] * osb.shape[0] # vwc +# + [[0.01, 0.6]] * osb.shape[0] # b +# ) + + +# gamma = [500, 500] + +# retval = minimize(cost_function_vwc, +# x0, +# args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc), +# jac=True, +# bounds = bounds, +# options={"disp": False},) + + +# posterious_sm = retval.x[2 : 2+len(osb)] +# posterious_vwc = retval.x[2+len(osb) : 2+2*len(osb)] +# posterious_b = retval.x[2+2*len(osb) : 2+3*len(osb)] + +# sms.append(posterious_sm) +# vwcs.append(posterious_vwc) +# bs.append(posterious_b) +# times.append(otime) +# ps.append(retval.x[:2]) + +# order = np.argsort(np.hstack(times)) +# times = np.hstack(times )[order] +# vwcs = np.hstack(vwcs )[order] +# bs = np.hstack(bs )[order] +# sms = np.hstack(sms )[order].real +# return times, vwcs, bs, sms, np.array(ps), orbit_mask + +def do_one_pixel_field(vv, vh, vwc, vwc_std, theta, time, sm, sm_std, b, b_std, omega, rms, rms_std, orbits, unc): + + ps = [] + vwcs = [] + bs = [] + sms = [] + srms = [] + times = [] + + uorbits = np.unique(orbits) + uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + # orbit_mask = (orbits == 95) | (orbits == 117) + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, ovwc, ovwc_std, otheta, otime = vv[orbit_mask], vh[orbit_mask], vwc[orbit_mask], vwc_std[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osb, osb_std = sm[orbit_mask], sm_std[orbit_mask], b[orbit_mask], b_std[orbit_mask] + + + + + # alpha = _calc_eps(osm) + # alpha = osm + # alpha_std = np.ones_like(alpha)*10 + # alpha_std = osm_std + + # prior_mean = np.concatenate([[0, ]*2, osm, ovwc, osb]) + # prior_unc = np.concatenate([[10., ]*2, osm_std, ovwc_std, osb_std]) + + # xvv = np.array([rms, omega]) + + + # x0 = np.concatenate([xvv, osm, ovwc, osb]) + + # bounds = ( + # [[0.013, 0.013]] # s + # + [[0.0107, 0.0107]] # omega + # + [[0.01, 0.7]] * osb.shape[0] # mv + # + [[0, 7.5]] * osb.shape[0] # vwc + # + [[0.01, 0.6]] * osb.shape[0] # b + # ) + + + prior_mean = np.concatenate([[0, ], [rms], osm, ovwc, osb]) + prior_unc = np.concatenate([[10., ], [rms_std], osm_std, ovwc_std, osb_std]) + + + x0 = np.concatenate([np.array([omega]), np.array([rms]), osm, ovwc, osb]) + + xxx = [] + for jj in osb: + if jj <= 0.2: + xxx.append([0.01,osb[0]+0.2]) + else: + xxx.append([osb[0]-0.2,osb[0]+0.2]) + + bounds = ( + [[0.027, 0.027]] # omega + + [[0.005, 0.03]] # s=rms + + [[0.01, 0.7]] * osb.shape[0] # mv + + [[0, 7.5]] * osb.shape[0] # vwc + + xxx #[[0.01, 0.6]] * osb.shape[0] # b + ) + + data = osb + + gamma = [10, 10] + + retval = minimize(cost_function_vwc, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc, data), + jac=True, + bounds = bounds, + options={"disp": False}) + + posterious_rms = retval.x[1] + posterious_sm = retval.x[2 : 2+len(osb)] + posterious_vwc = retval.x[2+len(osb) : 2+2*len(osb)] + posterious_b = retval.x[2+2*len(osb) : 2+3*len(osb)] + + srms.append(posterious_rms) + sms.append(posterious_sm) + vwcs.append(posterious_vwc) + bs.append(posterious_b) + times.append(otime) + ps.append(retval.x[:1]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + vwcs = np.hstack(vwcs )[order] + bs = np.hstack(bs )[order] + sms = np.hstack(sms )[order].real + # srms = np.hstack(srms)[order] + return times, vwcs, bs, sms, np.array(srms), np.array(ps), orbit_mask + + + + +def do_inversion(sar_inference_data, state_mask, segment=False, year=None, version=None): + + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + uorbits = np.unique(orbits) + if segment: + + out_shape = sar_inference_data.vwc[sar_inference_data.time_mask].shape + vwc_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + b_outputs = np.zeros(out_shape ) + rms_outputs = np.zeros(out_shape ) + + pixel = ['_Field_buffer_30','','_buffer_30','_buffer_50','_buffer_100'] + pixel = ['_Field_buffer_30'] + fields = ['301','508','542'] + fields = ['all'] + # ESU names + esus = ['high', 'low', 'med', 'mean'] + esus = ['mean'] + for pixels in pixel: + print(pixels) + path_ESU = '/media/tweiss/Work/z_final_mni_data_2017/' + name_shp = 'ESU'+pixels+'.shp' + name_ESU = 'ESU'+pixels+'.tif' + + for esu in esus: + for field in fields: + field2 = field + '_' + esu + + if field == 'all': + g = gdal.Open(state_mask) + state_mask = g.ReadAsArray().astype(np.int) + else: + g = gdal.Open(os.path.join(path_ESU, name_ESU)) + state_mask = g.ReadAsArray().astype(np.int) + + if pixels == '_Field_buffer_30': + if field == '515': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508': + mask_value = 27 + state_mask = state_mask==mask_value + elif field == '542': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '319': + mask_value = 67 + state_mask = state_mask==mask_value + elif field == '301': + mask_value = 87 + state_mask = state_mask==mask_value + elif field == 'all': + mask_value = 0 + state_mask = state_mask > mask_value + else: + pass + + field_mask = state_mask + pdb.set_trace() + vv_all = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask] + vh_all = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask] + theta_all = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask] + time_all = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + + vwc_all = sar_inference_data.vwc[sar_inference_data.time_mask] + ndwi_all = sar_inference_data.ndwi[sar_inference_data.time_mask] + ### vwc needs to be changed!!!! NDWI1!!! + vwc_std = vwc_all[:,0,0] + vwc_std[:] = 0.1 + sm_all = sar_inference_data.api[sar_inference_data.time_mask] + sm_all = sm_all / 100. + sm_std = sm_all[:,0,0] + sm_std[:] = 0.2 + + b = sm_all[:,0,0] + b[:] = 0 + b_std = sm_all[:,0,0] + b_std[:] = 0.5 # not used anyway + rms = sm_all[:,0,0] + rms = 0.2 + rms_std = 0.1 # not used anyway + + unc = 1.9 + omega = 0.027 + + sm_retrieved = sm_all * np.nan + + np.save('/media/tweiss/Work/Paper3_plot/b_input_vv'+year+version+'.npy', vv_all) + np.save('/media/tweiss/Work/Paper3_plot/b_input_vwc'+year+version+'.npy', vwc_all) + np.save('/media/tweiss/Work/Paper3_plot/b_input_sm_api'+year+version+'.npy', sm_all) + np.save('/media/tweiss/Work/Paper3_plot/b_input_ndwi'+year+version+'.npy', ndwi_all) + + for z in range(len(state_mask)): + print(z) + for zz in range(len(state_mask[0])): + if state_mask[z,zz] == False: + pass + # elif z < 232: + # pass + else: + vv = vv_all[:,z,zz] + vh = vh_all[:,z,zz] + theta = theta_all[:,z,zz] + vwc = vwc_all[:,z,zz] + vwc[vwc < 0.01] = 0.02 + + orbits95 = orbits==95 + orbits168 = orbits==168 + orbits44 = orbits==44 + orbits117 = orbits==117 + # orbits44_168 = (orbits == 44) | (orbits == 168) + # b[:] = 0.4 + b[orbits95] = 0.4 + b[orbits117] = 0.4 + b[orbits44] = 0.6 + b[orbits168] = 0.6 + + orbits95[0:np.argmax(vwc)] = False + orbits117[0:np.argmax(vwc)] = False + orbits44[0:np.argmax(vwc)] = False + orbits168[0:np.argmax(vwc)] = False + + b[orbits95] = 0.1 + b[orbits117] = 0.1 + b[orbits44] = 0.2 + b[orbits168] = 0.2 + + + sm = sm_all[:,z,zz] + + + times, svwc, sb, sms, srms, ps, orbit_mask = do_one_pixel_field(vv, vh, vwc, vwc_std, theta, time_all, sm, sm_std, b, b_std, omega, rms, rms_std, orbits,unc=unc) + + vwc_outputs[:,z,zz] = svwc + sm_outputs[:,z,zz] = sms + b_outputs[:,z,zz] = sb + rms_outputs[:,z,zz] = srms + + # np.save('/media/tweiss/Work/Paper3_plot/npy/2017/sm'+str(int(z))+'_'+str(int(zz))+'.npy', sms) + # np.save('/media/tweiss/Work/Paper3_plot/npy/2017/vwc'+str(int(z))+'_'+str(int(zz))+'.npy', svwc) + # np.save('/media/tweiss/Work/Paper3_plot/npy/2017/b'+str(int(z))+'_'+str(int(zz))+'.npy', sb) + # np.save('/media/tweiss/Work/Paper3_plot/npy/2017/rms'+str(int(z))+'_'+str(int(zz))+'.npy', srms) + + np.save('/media/tweiss/Work/Paper3_plot/b_'+field+year+version+'_sm'+'.npy', sm_outputs) + np.save('/media/tweiss/Work/Paper3_plot/b_'+field+year+version+'_vwc'+'.npy', vwc_outputs) + np.save('/media/tweiss/Work/Paper3_plot/b_'+field+year+version+'_b'+'.npy', b_outputs) + np.save('/media/tweiss/Work/Paper3_plot/b_'+field+year+version+'_rms'+'.npy', rms_outputs) + np.save('/media/tweiss/Work/Paper3_down/2017/b_'+year+version+'times.npy',times) + + for u in range(len(sm_retrieved)): + + fig, ax = plt.subplots(figsize=(15, 10)) + + + quadmesh = ax.imshow(sm_outputs[u]) + plt.colorbar(quadmesh) + quadmesh.set_clim(vmin=0.15, vmax=0.35) + plt.savefig('/media/tweiss/Work/Paper3_plot/npy/sm_2017/'+year+version+field+'_'+times[u].strftime("%Y%m%d"), bbox_inches = 'tight') + plt.close() + + fig, ax = plt.subplots(figsize=(15, 10)) + + + quadmesh = ax.imshow(b_outputs[u]) + plt.colorbar(quadmesh) + quadmesh.set_clim(vmin=0.01, vmax=0.7) + plt.savefig('/media/tweiss/Work/Paper3_plot/npy/b_2017/'+year+version+field+'_'+times[u].strftime("%Y%m%d"), bbox_inches = 'tight') + plt.close() + + fig, ax = plt.subplots(figsize=(15, 10)) + + + quadmesh = ax.imshow(vwc_outputs[u]) + plt.colorbar(quadmesh) + quadmesh.set_clim(vmin=0.1, vmax=6) + plt.savefig('/media/tweiss/Work/Paper3_plot/npy/vwc_2017/'+year+version+field+'_'+times[u].strftime("%Y%m%d"), bbox_inches = 'tight') + plt.close() + + fig, ax = plt.subplots(figsize=(15, 10)) + + + quadmesh = ax.imshow(rms_outputs[u]) + plt.colorbar(quadmesh) + quadmesh.set_clim(vmin=0.005, vmax=0.03) + plt.savefig('/media/tweiss/Work/Paper3_plot/npy/rms_2017/'+year+version+field+'_'+times[u].strftime("%Y%m%d"), bbox_inches = 'tight') + plt.close() + + + + + + + + # if field == '508': + # quadmesh = ax.imshow(sm_outputs[u,650:750,400:500]) + # elif field == '301': + # quadmesh = ax.imshow(sm_outputs[u,0:100,200:250]) + # elif field == '542': + # quadmesh = ax.imshow(sm_outputs[u,250:350,580:630]) + # else: + # pass + # plt.colorbar(quadmesh) + # quadmesh.set_clim(vmin=0.15, vmax=0.35) + + # plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/down3/'+field+'_'+times[u].strftime("%Y%m%d"), bbox_inches = 'tight') + # plt.close() + + + + + + else: + mask = gdal.Open(state_mask).ReadAsArray() + xs, ys = np.where(mask) + + out_shape = sar_inference_data.vwc[sar_inference_data.time_mask].shape + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + vwc_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + b_outputs = np.zeros(out_shape ) + + for i in range(len(xs)): + indx, indy = xs[i], ys[i] + + # field_mask = slice(None, None), slice(indx, indx+1), slice(indy, indy+1) + time = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + vwc = sar_inference_data.vwc[sar_inference_data.time_mask][:, indx, indy ] + api = sar_inference_data.api[sar_inference_data.time_mask][:, indx, indy ] + api_std = sar_inference_data.api[sar_inference_data.time_mask][:, indx, indy ] + pdb.set_trace() + api_std[:] = 0.2 + + # b = + # b_std = + + # rms = + # rms_std = + # sm = prior.sm_prior[sar_inference_data.time_mask][:, indx, indy ] + # sm_std= prior.sm_std [sar_inference_data.time_mask][:, indx, indy ] + + sm[np.isnan(sm)] = 0.2 + sm_std[sm_std==0] = 0.5 + sm_std[np.isnan(sm_std)] = 0.5 + + # coef = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + # coef_std= prior.sr_std [sar_inference_data.time_mask][:, indx, indy ] + # sr[np.isnan(sr)] = 0.1 + # sr_std[np.isnan(sr_std)] = 0.5 + + # height = prior.sr_prior[sar_inference_data.time_mask][:, indx, indy ] + # height[:] = 0.1 + + vv = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + vh = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + theta = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask][:, indx, indy ] + + + vv = np.maximum(vv, 0.0001) + vv = 10 * np.log10(vv) + vh = np.maximum(vh, 0.0001) + vh = 10 * np.log10(vh) + + times, lais, coefs, sms = do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height) + times, vwcs, bs, sms, srms, ps, orbit_mask = do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, b, b_std, omega, rms, rms_std, orbits,unc=unc) + + lai_outputs[:, indx, indy] = lais + coef_outputs[:, indx, indy] = coefs + sm_outputs[:, indx, indy] = sms + + return 'done' + +def save_output(fname, Array, GeoT, projction, time): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'date': time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def save_ps_output(fname, Array, GeoT, projction, orbit): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'orbit': str(int(orbit[i]))}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + + + +class KaSKASAR(object): + """A class to process Sentinel 1 SAR data using S2 data as + an input""" + + def __init__(self, s1_ncfile, state_mask, s2_wvc, rad_api, year, vv_version, orbit1=None,orbit2=None): + self.s1_ncfile = s1_ncfile + self.state_mask = state_mask + self.s2_wvc = s2_vwc + self.rad_api = rad_api + self.year = year + self.version = version + + self.orbit1 = None + self.orbit2 = None + if orbit1 != None: + self.orbit1 = orbit1 + if orbit2 != None: + self.orbit2 = orbit2 + + def sentinel1_inversion(self, segment=False): + sar = get_sar(s1_ncfile, version) + s1_data = read_sar(sar, self.state_mask) + + vwc_data = read_vwc(s2_vwc, self.state_mask) + + api = get_api(rad_api,year) + api_data = read_api(api, self.state_mask) + + sar_inference_data = inference_preprocessing(s1_data, vwc_data, api_data, self.state_mask,self.orbit1,self.orbit2) + + + xxx = do_inversion(sar_inference_data, self.state_mask, segment, year, version) + + # gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%self.s1_ncfile) + # geo = gg.GetGeoTransform() + + # projction = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + + # time = [i.strftime('%Y-%m-%d') for i in np.array(sar_inference_data.time)[sar_inference_data.time_mask]] + + # sm_name = self.s1_ncfile.replace('.nc', '_sar_sm.tif') + # sr_name = self.s1_ncfile.replace('.nc', '_sar_sr.tif') + # lai_name = self.s1_ncfile.replace('.nc', '_sar_lai.tif') + + # save_output(sm_name, sm_outputs, geo, projction, time) + # save_output(sr_name, sr_outputs, geo, projction, time) + # save_output(lai_name, lai_outputs, geo, projction, time) + + + + +if __name__ == '__main__': + + + years = ['2017','2018'] + # years = ['2018'] + versions = ['_multi', '_single'] + versions = ['_multi'] + for year in years: + for version in versions: + s1_ncfile = '/media/tweiss/Work/Paper3_down/data/MNI_'+year+'_new_final_paper3.nc' + state_mask = '/media/tweiss/Work/Paper3_down/GIS/clc_class2.tif' + rad_api = '/media/tweiss/Work/Paper3_down/data/RADOLAN_API_v1.0.0.nc' + + s2_vwc = '/media/tweiss/Work/Paper3_down/data/'+year+'/tif1/' + + sarsar = KaSKASAR(s1_ncfile, state_mask, s2_vwc, rad_api, year, version) + + sarsar.sentinel1_inversion(True) + diff --git a/kaska/kaska_sar_tau_rms_area_b_fields.py b/kaska/kaska_sar_tau_rms_area_b_fields.py new file mode 100644 index 0000000..69d5712 --- /dev/null +++ b/kaska/kaska_sar_tau_rms_area_b_fields.py @@ -0,0 +1,487 @@ +#!/usr/bin/env python + +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel_vwc_rms import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +import matplotlib.pyplot as plt +from netCDF4 import date2num +import glob + +def ndwi1_mag(ndwi1): + vwc = 13.2*ndwi1**2+1.62*ndwi1 + return vwc + +def ndwi1_cos_maize(ndwi1): + vwc = 9.39*ndwi1+1.26 + return vwc + +def save_to_tif(fname, Array, GeoT): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + wkt = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + ds.SetProjection(wkt) + for i, image in enumerate(Array): + # ds.GetRasterBand(i+1).SetMetadata({'date': prior_time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def get_sar(s1_nc_file, version): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang_name vv_name, vh_name') + data = Dataset(s1_nc_file) + relorbit = data['relorbit'][:] + localIncidenceAngle = data['theta'][:] + satellite = data['satellite'][:] + orbitdirection = data['orbitdirection'][:] + time = data['time'][:] + lat = data['lat'][:] + lon = data['lon'][:] + + vv_name = s1_nc_file.replace('.nc', '_vv'+version+'.tif') + vh_name = s1_nc_file.replace('.nc', '_vh'+version+'.tif') + ang_name = s1_nc_file.replace('.nc', '_ang'+version+'.tif') + + if not os.path.exists(vv_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vv"%s"'%(s1_nc_file,version)) + geo = gg.GetGeoTransform() + sigma0_vv = data['sigma0_vv'+version][:] + save_to_tif(vv_name, sigma0_vv, geo) + + if not os.path.exists(vh_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vh"%s"'%(s1_nc_file,version)) + geo = gg.GetGeoTransform() + sigma0_vh = data['sigma0_vh'+version][:] + save_to_tif(vh_name, sigma0_vh, geo) + + if not os.path.exists(ang_name): + gg = gdal.Open('NETCDF:"%s":theta'%s1_nc_file) + geo = gg.GetGeoTransform() + localIncidenceAngle = data['theta'][:] + save_to_tif(ang_name, localIncidenceAngle, geo) + + return s1_data(time, lat, lon, satellite, relorbit, orbitdirection, ang_name, vv_name, vh_name) + +def get_api(api_nc_file,year): + api_data = namedtuple('api_data', 'time lat lon api') + data = Dataset(api_nc_file) + + xxx = date2num(datetime.datetime.strptime(year+'0201', '%Y%m%d'), units ='hours since 2000-01-01 00:00:00', calendar='gregorian') + yyy = date2num(datetime.datetime.strptime(year+'1001', '%Y%m%d'), units ='hours since 2000-01-01 00:00:00', calendar='gregorian') + + time = data['time'][np.where(data['time'][:]==xxx)[0][0]:np.where(data['time'][:]==yyy)[0][0]] + lat = data['lat'][:] + lon = data['lon'][:] + + api_name = api_nc_file.replace('.nc', '_api'+year+'.tif') + + if not os.path.exists(api_name): + gg = gdal.Open('NETCDF:"%s":api'%api_nc_file) + geo = gg.GetGeoTransform() + save_to_tif(api_name, data['api'][np.where(data['time'][:]==xxx)[0][0]:np.where(data['time'][:]==yyy)[0][0],:,:], geo) + + return api_data(time, lat, lon, api_name) + +def read_sar(sar_data, state_mask): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang vv vh') + ang = reproject_data(sar_data.ang_name, output_format="MEM", target_img=state_mask) + vv = reproject_data(sar_data.vv_name, output_format="MEM", target_img=state_mask) + vh = reproject_data(sar_data.vh_name, output_format="MEM", target_img=state_mask) + + time = [datetime.datetime(1970,1,1) + datetime.timedelta(days=float(i)) for i in sar_data.time] + + return s1_data(time, sar_data.lat, sar_data.lon, sar_data.satellite, sar_data.relorbit, sar_data.orbitdirection, ang, vv, vh) + +def read_vwc(vwc_data, state_mask): + s2_data = namedtuple('s2_vwc', 'time vwc ndwi') + filelist = glob.glob(vwc_data+'*.tif') + filelist.sort() + time = [] + vwc = [] + ndwi = [] + for file in filelist: + g = gdal.Open(file) + ndwi_array = reproject_data(file, output_format="MEM", target_img=state_mask) + ndwi_array = ndwi_array.ReadAsArray() + vwc_array = ndwi1_mag(ndwi_array) + time.append(datetime.datetime.strptime(file.split('/')[-1][14:22], '%Y%m%d')) + vwc.append(vwc_array) + ndwi.append(ndwi_array) + + return s2_data(time, vwc, ndwi) + +def read_api(api_data, state_mask): + s1_data = namedtuple('api_data', 'time lat lon api') + + api = reproject_data(api_data.api, output_format="MEM", target_img=state_mask) + time = [datetime.datetime(2000,1,1) + datetime.timedelta(hours=float(i)) for i in api_data.time] + + return s1_data(time, api_data.lat, api_data.lon, api) + + +def inference_preprocessing(s1_data, vwc_data, api_data, state_mask, orbit1=None, orbit2=None): + """Resample S2 smoothed output to match S1 observations + times""" + # Move everything to DoY to simplify interpolation + + sar_inference_data = namedtuple('sar_inference_data', 'time lat lon satellite relorbit orbitdirection ang vv vh vwc api time_mask ndwi') + + + vwc_doys = np.array([ int(i.strftime('%j')) for i in vwc_data.time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + + + time = np.array(s1_data.time) + for jj in range(len(s1_data.time)): + time[jj] = s1_data.time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + + index=[] + xxx = np.array(api_data.time) + for jj in range(len(time)): + oje = np.where(xxx==time[jj]) + try: + ojet = oje[0][0] + index.append(ojet) + except IndexError: + pass + api_doys = np.array([ int(i.strftime('%j')) for i in np.array(api_data.time)[index]]) + + f = interp1d(vwc_doys, np.array(vwc_data.vwc), axis=0, bounds_error=False) + vwc_s1 = f(s1_doys) + + f = interp1d(vwc_doys, np.array(vwc_data.ndwi), axis=0, bounds_error=False) + ndwi_s1 = f(s1_doys) + + api_s1 = api_data.api.ReadAsArray()[index] + f = interp1d(api_doys, api_s1, axis=0, bounds_error=False) + api_s1 = f(s1_doys) + + if s1_data.time[0].year == 2017: + time_mask = (s1_doys >= 80) & (s1_doys <= 273) + elif s1_data.time[0].year == 2018: + time_mask = (s1_doys >= 80) & (s1_doys <= 273) + else: + print('no time mask') + + if orbit1 != None: + rel_orbit1 = s1_data.relorbit==orbit1 + if orbit2 != None: + rel_orbit2 = s1_data.relorbit==orbit2 + xxx = np.logical_and(rel_orbit1,time_mask) + yyy = np.logical_and(rel_orbit2,time_mask) + time_mask = np.logical_or(xxx,yyy) + + sar_inference_data = sar_inference_data(s1_data.time, s1_data.lat, s1_data.lon, + s1_data.satellite, s1_data.relorbit, + s1_data.orbitdirection, s1_data.ang, + s1_data.vv, s1_data.vh, vwc_s1, api_s1, time_mask, ndwi_s1) + + return sar_inference_data + + +def do_one_pixel_field(vv, vh, vwc, vwc_std, theta, time, sm, sm_std, b, b_std, omega, rms, rms_std, orbits, unc): + + ps = [] + vwcs = [] + bs = [] + sms = [] + srms = [] + times = [] + + uorbits = np.unique(orbits) + uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + # orbit_mask = (orbits == 95) | (orbits == 117) + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, ovwc, ovwc_std, otheta, otime = vv[orbit_mask], vh[orbit_mask], vwc[orbit_mask], vwc_std[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osb, osb_std = sm[orbit_mask], sm_std[orbit_mask], b[orbit_mask], b_std[orbit_mask] + + + prior_mean = np.concatenate([[0, ], [rms], osm, ovwc, osb]) + prior_unc = np.concatenate([[10., ], [rms_std], osm_std, ovwc_std, osb_std]) + + + x0 = np.concatenate([np.array([omega]), np.array([rms]), osm, ovwc, osb]) + + # bounds for b related to expected curve + xxx = [] + for jjj, jj in enumerate(osb): + if jj <= 0.2: + xxx.append([0.01,osb[jjj]+0.2]) + else: + xxx.append([osb[jjj]-0.2,osb[jjj]+0.2]) + + bounds = ( + [[0.027, 0.027]] # omega + + [[0.005, 0.03]] # s=rms + + [[0.01, 0.7]] * osb.shape[0] # mv + + [[0, 7.5]] * osb.shape[0] # vwc + + xxx #[[0.01, 0.6]] * osb.shape[0] # b + ) + + + data = osb + + gamma = [10, 10] + + retval = minimize(cost_function_vwc, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc, data), + jac=True, + bounds = bounds, + options={"disp": False}) + + posterious_rms = retval.x[1] + posterious_sm = retval.x[2 : 2+len(osb)] + posterious_vwc = retval.x[2+len(osb) : 2+2*len(osb)] + posterious_b = retval.x[2+2*len(osb) : 2+3*len(osb)] + + srms.append(posterious_rms) + sms.append(posterious_sm) + vwcs.append(posterious_vwc) + bs.append(posterious_b) + times.append(otime) + ps.append(retval.x[:1]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + vwcs = np.hstack(vwcs )[order] + bs = np.hstack(bs )[order] + sms = np.hstack(sms )[order].real + + return times, vwcs, bs, sms, np.array(srms), np.array(ps), orbit_mask + + + + +def do_inversion(sar_inference_data, state_mask, year=None, version=None, passes=None): + + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + uorbits = np.unique(orbits) + + out_shape = sar_inference_data.vwc[sar_inference_data.time_mask].shape + vwc_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + b_outputs = np.zeros(out_shape ) + rms_outputs = np.zeros(out_shape ) + + g = gdal.Open(state_mask) + state_mask = g.ReadAsArray().astype(np.int) + state_mask = state_mask > 0 + + vv_all = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask] + vh_all = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask] + theta_all = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask] + time_all = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + + vwc_all = sar_inference_data.vwc[sar_inference_data.time_mask] + ndwi_all = sar_inference_data.ndwi[sar_inference_data.time_mask] + + ### vwc needs to be changed!!!! NDWI1!!! + vwc_std = vwc_all[:,0,0] + vwc_std[:] = 0.1 + sm_all = sar_inference_data.api[sar_inference_data.time_mask] + sm_all = sm_all / 100. + sm_std = sm_all[:,0,0] + sm_std[:] = 0.2 + + b = sm_all[:,0,0] + b[:] = 0 + b_std = sm_all[:,0,0] + b_std[:] = 0.5 # not used anyway + rms = sm_all[:,0,0] + rms = 0.2 + rms_std = 0.1 # not used anyway + + unc = 1.9 + omega = 0.027 + + sm_retrieved = sm_all * np.nan + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes) + + + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_input_vv.npy', vv_all) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_input_vwc.npy', vwc_all) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_input_sm_api.npy', sm_all) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_input_ndwi.npy', ndwi_all) + + for z in range(len(state_mask)): + print(z) + for zz in range(len(state_mask[0])): + if state_mask[z,zz] == False: + pass + else: + vv = vv_all[:,z,zz] + vh = vh_all[:,z,zz] + theta = theta_all[:,z,zz] + vwc = vwc_all[:,z,zz] + vwc[vwc < 0.01] = 0.02 + + orbits95 = orbits==95 + orbits168 = orbits==168 + orbits44 = orbits==44 + orbits117 = orbits==117 + # orbits44_168 = (orbits == 44) | (orbits == 168) + # b[:] = 0.4 + b[orbits95] = 0.4 + b[orbits117] = 0.4 + b[orbits44] = 0.6 + b[orbits168] = 0.6 + + orbits95[0:np.argmax(vwc)] = False + orbits117[0:np.argmax(vwc)] = False + orbits44[0:np.argmax(vwc)] = False + orbits168[0:np.argmax(vwc)] = False + + # orbits95[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + # orbits117[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + # orbits44[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + # orbits168[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + + + b[orbits95] = 0.1 + b[orbits117] = 0.1 + b[orbits44] = 0.2 + b[orbits168] = 0.2 + + + sm = sm_all[:,z,zz] + + + times, svwc, sb, sms, srms, ps, orbit_mask = do_one_pixel_field(vv, vh, vwc, vwc_std, theta, time_all, sm, sm_std, b, b_std, omega, rms, rms_std, orbits,unc=unc) + + vwc_outputs[:,z,zz] = svwc + sm_outputs[:,z,zz] = sms + b_outputs[:,z,zz] = sb + rms_outputs[:,z,zz] = srms + + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_sm'+'.npy', sm_outputs) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_vwc'+'.npy', vwc_outputs) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_b'+'.npy', b_outputs) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_rms'+'.npy', rms_outputs) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_times.npy',times) + + return 'done' + +def save_output(fname, Array, GeoT, projction, time): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'date': time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def save_ps_output(fname, Array, GeoT, projction, orbit): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'orbit': str(int(orbit[i]))}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + + + +class KaSKASAR(object): + """A class to process Sentinel 1 SAR data using S2 data as + an input""" + + def __init__(self, s1_ncfile, state_mask, s2_wvc, rad_api, year, vv_version, passes, orbit1=None,orbit2=None): + self.s1_ncfile = s1_ncfile + self.state_mask = state_mask + self.s2_wvc = s2_vwc + self.rad_api = rad_api + self.year = year + self.version = version + self.passes = passes + + self.orbit1 = None + self.orbit2 = None + if orbit1 != None: + self.orbit1 = orbit1 + if orbit2 != None: + self.orbit2 = orbit2 + + def sentinel1_inversion(self): + sar = get_sar(s1_ncfile, version) + s1_data = read_sar(sar, self.state_mask) + + vwc_data = read_vwc(s2_vwc, self.state_mask) + + api = get_api(rad_api,year) + api_data = read_api(api, self.state_mask) + + sar_inference_data = inference_preprocessing(s1_data, vwc_data, api_data, self.state_mask,self.orbit1,self.orbit2) + + + xxx = do_inversion(sar_inference_data, self.state_mask, year, version, passes) + + # gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%self.s1_ncfile) + # geo = gg.GetGeoTransform() + + # projction = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + + # time = [i.strftime('%Y-%m-%d') for i in np.array(sar_inference_data.time)[sar_inference_data.time_mask]] + + # sm_name = self.s1_ncfile.replace('.nc', '_sar_sm.tif') + # sr_name = self.s1_ncfile.replace('.nc', '_sar_sr.tif') + # lai_name = self.s1_ncfile.replace('.nc', '_sar_lai.tif') + + # save_output(sm_name, sm_outputs, geo, projction, time) + # save_output(sr_name, sr_outputs, geo, projction, time) + # save_output(lai_name, lai_outputs, geo, projction, time) + + + + +if __name__ == '__main__': + + + years = ['2017','2018'] + # years = ['2018'] + versions = ['_multi', '_single'] + versions = ['_multi'] + for year in years: + for version in versions: + s1_ncfile = '/media/tweiss/Work/Paper3_down/data/MNI_'+year+'_new_final_paper3.nc' + state_mask = '/media/tweiss/Work/Paper3_down/GIS/clc_class2.tif' + state_mask = '/media/tweiss/Work/Paper3_down/GIS/'+year+'_ESU_Field_buffer_30.tif' + rad_api = '/media/tweiss/Work/Paper3_down/data/RADOLAN_API_v1.0.0.nc' + + s2_vwc = '/media/tweiss/Work/Paper3_down/data/'+year+'/tif1/' + + passes = 'hm' + + sarsar = KaSKASAR(s1_ncfile, state_mask, s2_vwc, rad_api, year, version, passes) + + sarsar.sentinel1_inversion() + diff --git a/kaska/kaska_ssrt_again.py b/kaska/kaska_ssrt_again.py new file mode 100644 index 0000000..4afdef6 --- /dev/null +++ b/kaska/kaska_ssrt_again.py @@ -0,0 +1,789 @@ + +import os +import pandas as pd +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +from matplotlib.dates import MonthLocator +# import matplotlib.ticker +import numpy as np +# from sense.canopy import OneLayer +# from sense.soil import Soil +# from sense import model +import scipy.stats +from scipy.optimize import minimize +import pdb +from z_helper import * +# from z_optimization import * +import datetime +from matplotlib import gridspec +import datetime +from matplotlib.lines import Line2D +import copy +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +from watercloudmodel import ssrt_jac_ + + + + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + + +def do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits, unc): + + lais = [] + coefs = [] + sms = [] + times = [] + + uorbits = np.unique(orbits) + uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # pdb.set_trace() + # orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, oscoef, oscoef_std = sm[orbit_mask], sm_std[orbit_mask], coef[orbit_mask], coef_std[orbit_mask] + + oheight = height[orbit_mask] + + # ovv, ovh, olai, otheta, otime = np.array([vv[jj]]), np.array([vh[jj]]), np.array([lai[jj]]), np.array([theta[jj]]), np.array([time[jj]]) + # osm, osm_std, oscoef, oscoef_std = np.array([sm[jj]]), np.array([sm_std[jj]]), np.array([coef[jj]]), np.array([coef_std[jj]]) + + # oheight = np.array([height[jj]]) + + + + # pdb.set_trace() + olai_std = np.ones_like(olai)*0.05 + + alpha = _calc_eps(osm) + alpha = osm + alpha_std = np.ones_like(alpha)*10 + alpha_std = osm_std + # pdb.set_trace() + prior_mean = np.concatenate([alpha,oscoef]) + prior_unc = np.concatenate([alpha_std,oscoef_std]) + + x0 = np.concatenate([alpha,oscoef]) + data = np.concatenate([oheight,olai]) + bounds = ( + # [[2.5, 30]] * olai.shape[0] + [[0.01, 0.5]] * olai.shape[0] + + [[0.0000001, 3]] * olai.shape[0] + ) + + gamma = [500, 500] + + retval = minimize(cost_function2, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data, unc), + jac=True, + bounds = bounds, + options={"disp": True},) + + # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] + posterious_coef = retval.x[len(olai) : +2*len(olai)] + posterious_mv = retval.x[ : +len(olai)] + # lais.append(posterious_lai) + coefs.append(posterious_coef) + # x = np.arange(0.01, 0.5, 0.001) + # xx = _calc_eps(x) + # sols=[] + # for i in posterious_mv: + # p, pp = find_nearest(xx,i) + # sols.append(x[pp]) + # sols = np.array(sols) + + sms.append(posterious_mv) + # sms.append(sols) + times.append(otime) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + # lais = np.hstack(lais )[order] + lais=0 + coefs = np.hstack(coefs )[order] + # coefs=0 + sms = np.hstack(sms )[order].real + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + return times, lais, coefs, sms, orbit_mask + + + +def _simple_ew(): + """ + eq. 4.69 + simplistic approach with T=23°C, bulk density = 1.7 g/cm3 + """ + f0 = 18.64 # relaxation frequency [GHz] + f = 5.405 + hlp = f/f0 + e1 = 4.9 + (74.1)/(1.+hlp**2.) + # e2 =(74.1*hlp)/(1.+hlp**2.) + 6.46 * self.sigma/self.f + # return e1 + 1.j * e2 + return e1 + +def _calc_eps(mv): + """ + calculate dielectric permittivity + Eq. 4.66 (Ulaby et al., 2014) + """ + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + alpha = 0.65 + beta1 = 1.27-0.519*sand - 0.152*clay + beta2 = 2.06 - 0.928*sand -0.255*clay + sigma = -1.645 + 1.939*bulk - 2.256*sand + 1.594*clay + + + e1 = (1.+0.66*bulk+mv**beta1*_simple_ew()**alpha - mv)**(1./alpha) + # e2 = np.imag(self.ew)*self.mv**self.beta2 + # return e1 + 1.j*e2 + return e1 + +# def quad_approx_solver(alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, _calc_eps(x), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + +def find_nearest(array, value): + array = np.asarray(array) + idx = (np.abs(array - value)).argmin() + return array[idx], idx + + + + +### Data preparation df_agro!!!! ### +#----------------------------------------------------------------- +# storage information +path = '/media/tweiss/Work/z_final_mni_data_2017' +file_name = 'in_situ_s1_buffer_50' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +path_agro = '/media/tweiss/Work/Paper/in_progress/RT_model_comparison/images' +file_name_agro = 'Daily_Freising' +extension_agro = '.csv' + +field = '508_high' +pol = 'vv' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro, pol) + +aggregation = ['','_buffer_30','_buffer_50','_buffer_100','_Field_buffer_30'] +pre_processing = ['multi', 'norm_multi'] +aggregation = ['_buffer_50','_Field_buffer_30'] +pre_processing = ['multi'] +aggregation = ['_buffer_100'] +# aggregation = ['_Field_buffer_30'] +surface_list = ['Oh92', 'Oh04', 'Dubois95', 'WaterCloud', 'I2EM'] +canopy_list = ['turbid_isotropic', 'water_cloud'] + +surface_list = ['Oh92', 'I2EM'] +canopy_list = ['turbid_isotropic'] + +surface_list = ['Oh04'] +# surface_list = ['Oh92'] +# canopy_list = ['water_cloud'] +field = ['508_high'] +# field = ['508_low'] +# field = ['508_med'] +# field = ['301_high'] +field = ['301_low'] +# field = ['301_med'] +# field = ['542_high'] +# field = ['542_low'] +# field = ['542_med'] + +### option for time invariant or variant calibration of parameter +#------------------------------- +opt_mod = ['time_variant'] +#--------------------------- + + +for p in pre_processing: + + for pp in aggregation: + + # versions = ['everything','','44_117','95_168','44_168','117_95','44_95','117_168','44_117_95','44_117_168','44_95_168','117_95_168'] + # ver = ['','','44','95','44','117','44','117','44','44','44','117'] + # ver2 = ['','','117','168','168','95','95','168','117','117','95','95'] + # ver3 = ['','','','','','','','','95','168','168','168'] + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + + # versions = ['everything'] + # ver = [''] + # ver2 = [''] + # ver3 = [''] + + # versions = ['44_168'] + # ver = ['44'] + # ver2 = ['168'] + # ver3 = [''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + + + + + + # fig, ax = plt.subplots(figsize=(17, 13)) + # gs = gridspec.GridSpec(5, 1, height_ratios=[14, 3, 3, 3, 3]) + # ax = plt.subplot(gs[0]) + + # plt.ylabel('Backscatter [dB]', fontsize=18) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + + # ax.set_ylim([-21.5,-8.5]) + + + # colormaps = ['Greens', 'Purples', 'Blues', 'Oranges', 'Reds', 'Greys', 'pink', 'bone', 'Blues', 'Blues', 'Blues'] + # r = 0 + + # colormap = plt.get_cmap(colormaps[r]) + # colors = [colormap(rr) for rr in np.linspace(0.35, 1., 3)] + + for kkk in opt_mod: + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + data_field = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + + + + vv = data_field.filter(like='S1_vv').values.flatten() + vv = 10*np.log10(vv) + vh = data_field.filter(like='S1_vh').values.flatten() + vh = 10*np.log10(vh) + lai = data_field.filter(like='LAI_insitu').values.flatten() + lai = lai + theta = data_field.filter(like='theta').values.flatten() + theta = np.rad2deg(theta) + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + + + s2_data = pd.read_csv('/media/tweiss/Daten/data_AGU/S2_'+kkkk+pp+'.csv',header=[0],index_col=0) + s2_data.index = pd.to_datetime(s2_data.index).floor('Min').floor('H') + s2_lai = s2_data.loc[time2]['lai'].values.flatten() + s2_cab = s2_data.loc[time2]['cab'].values.flatten() + s2_cbrown = s2_data.loc[time2]['cbrown'].values.flatten() + lai = s2_lai + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api_sm.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_sm = api_data.loc[time2].values.flatten() + sm = data_field.filter(like='SM_insitu').values.flatten() + # sm = smooth(sm,2) + sm[:] = 0.25 + # sm = data_field.filter(like='SM_insitu').values.flatten() + sm = api_sm + sm_std = data_field.filter(like='SM_insitu').values.flatten() + ooo = np.abs(sm[1:]-sm[:-1])*20 + sm_std[0] = ooo[-1] + sm_std[1:] = ooo + sm_std[:] = 0.21 + coef = data_field.filter(like='coef').values.flatten() + coef_std = data_field.filter(like='SM_insitu').values.flatten() + coef_std[:] = 0.01 + height = data_field.filter(like='height').values.flatten() + orbits = data_field.filter(like='relativeorbit').values.flatten() + unc = 2.1 + + # unc_array = np.arange(0,2,0.1) + # coef_array = np.arange(0,2,0.1) + # sm_array = np.arange(0,2,0.1) + + # hm = {} + # for r in unc_array: + # for rr in coef_array: + # for rrr in sm_array: + # unc = r + # coef_std[:] = rr + # sm_std[:] = rrr + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # hm[(r,rr,rrr)] = ubrmse_vv + + # pdb.set_trace() + # min(hm, key=hm.get) + # hm[min(hm, key=hm.get)] + + vv = 10 ** (vv/10) + + # pdb.set_trace() + times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + # pdb.set_trace() + plt.rcParams["figure.figsize"] = (10,7) + # plt.plot(time,sm_insitu, label='insitu') + plt.plot(times,sm_insitu[orbit_mask], label='insitu') + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(time,sm, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + plt.plot(times,sm[orbit_mask], label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sms) + bias_vv = bias_prediction(sm_insitu[orbit_mask],sms) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # plt.plot(times,coefs, label='coef') + # pdb.set_trace() + # #orbit_mask + # plt.plot(time[orbit_mask],sm_insitu[orbit_mask]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(time[orbit_mask],sm[orbit_mask], label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sms) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + + plt.legend() + plt.grid() + plt.ylabel('Soil Moisture') + plt.xlabel('Time') + # plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/oh04_unc10_lai_flat', bbox_inches = 'tight') + plt.show() + pdb.set_trace() + plt.plot(time[orbit_mask],coef) + plt.plot(times,coefs) + # plt.show() + pdb.set_trace() + pdb.set_trace() + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + if kkk == 'time invariant': + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + else: + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax.axvspan(x,xx, color='red', alpha=0.2, label = 'Incidence angle 43°, Descending track') + a += 1 + else: + ax.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax.axvspan(x,xx, color='blue', alpha=0.2, label = 'Incidence angle 45°, Ascending track') + b += 1 + else: + ax.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax.axvspan(x,xx, color='orange', alpha=0.2, label = 'Incidence angle 35°, Descending track') + c += 1 + else: + ax.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax.axvspan(x,xx, color='green', alpha=0.2, label = 'Incidence angle 36°, Ascending track') + d += 1 + else: + ax.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + + ax.plot(date,10*np.log10(data_field.filter(like='S1_vv')), '-', color='black', label='Sentinel-1', linewidth=3, marker='s') + + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.legend(prop={'size': 14}, loc=3) + + plt.grid(linestyle='dotted') + + plt.setp(ax.get_xticklabels(), visible=False) + + ax0 = plt.subplot(gs[1]) + plt.tick_params(labelsize=17) + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + + ground = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='part_g') + ground = ground[ground.columns[0]] + + lai = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='LAI_insitu').values + theta = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='theta').values + + if kk == 'turbid_isotropic': + coef = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='coef').values + d = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='height').values + T = np.exp(-coef*np.sqrt(lai)*d/np.cos(theta)) + T=T**2 + ax0.plot(date,T.flatten(), color=colors, marker='s', linestyle='dashed') + else: + B_vv = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='B_vv').values + T = np.exp(-2*B_vv*lai/np.cos(theta)) + ax0.plot(date,T.flatten(), color=colors, marker='s') + + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax0.axvspan(x,xx, color='red', alpha=0.2, label = 'descending 43°') + a += 1 + else: + ax0.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax0.axvspan(x,xx, color='blue', alpha=0.2, label = 'ascending 43°') + b += 1 + else: + ax0.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax0.axvspan(x,xx, color='orange', alpha=0.2, label = 'descending 35°') + c += 1 + else: + ax0.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax0.axvspan(x,xx, color='green', alpha=0.2, label = 'ascending 36°') + d += 1 + else: + ax0.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + plt.ylabel('Transmissivity\nT', fontsize=18) + ax0.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + ax0.set_ylim(-0.2,1.1) + plt.grid(linestyle='dotted') + plt.setp(ax0.get_xticklabels(), visible=False) + + + ax1 = plt.subplot(gs[2], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + plt.setp(ax1.get_xticklabels(), visible=False) + + lai_field = data_field.filter(like='LAI_insitu') + height_field = data_field.filter(like='height') + + + ax1.plot(date,lai_field,color='green',linewidth=2,label='LAI') + ax2 = ax1.twinx() + plt.tick_params(labelsize=17) + ax2.plot(date,height_field,color='black', linewidth=2, label='Height') + ax1.set_ylabel('LAI', fontsize=16) + ax2.set_ylabel('Height\n[m]', fontsize=16) + + + # add std for LAI and height for field 508 (data from field measurements) + lai_old = copy.deepcopy(lai_field) + height_old = copy.deepcopy(height_field) + if field == '508_high': + lai_field[lai_field.index>'2017-03-28'] = 0.2218 + lai_field[lai_field.index>'2017-04-05'] = 0.1367 + lai_field[lai_field.index>'2017-04-10'] = 0.4054 + lai_field[lai_field.index>'2017-04-21'] = 0.3247 + lai_field[lai_field.index>'2017-05-02'] = 0.5546 + lai_field[lai_field.index>'2017-05-10'] = 0.5852 + lai_field[lai_field.index>'2017-05-16'] = 0.3058 + lai_field[lai_field.index>'2017-05-26'] = 0.5373 + lai_field[lai_field.index>'2017-05-29'] = 0.332 + lai_field[lai_field.index>'2017-06-02'] = 0.2856 + lai_field[lai_field.index>'2017-06-13'] = 0.4717 + lai_field[lai_field.index>'2017-06-26'] = 0.2982 + lai_field[lai_field.index>'2017-07-06'] = 0.253 + + height_field[height_field.index>'2017-03-28'] = 0.005774 + height_field[height_field.index>'2017-04-05'] = 0.015275 + height_field[height_field.index>'2017-04-10'] = 0.026458 + height_field[height_field.index>'2017-04-21'] = 0.049329 + height_field[height_field.index>'2017-05-02'] = 0.01 + height_field[height_field.index>'2017-05-10'] = 0.01 + height_field[height_field.index>'2017-05-26'] = 0.028868 + height_field[height_field.index>'2017-05-29'] = 0.028868 + height_field[height_field.index>'2017-06-02'] = 0.028868 + height_field[height_field.index>'2017-06-13'] = 0.020817 + height_field[height_field.index>'2017-06-26'] = 0.025166 + height_field[height_field.index>'2017-07-06'] = 0.015275 + + ax1.fill_between(lai_field.index,lai_old.values.flatten()-lai_field.values.flatten(), lai_old.values.flatten()+lai_field.values.flatten(), color='green', alpha=0.2, label='Standard Deviation') + ax2.fill_between(height_field.index,height_old.values.flatten()-height_field.values.flatten(), height_old.values.flatten()+height_field.values.flatten(), color='black', alpha=0.2, label='Standard Deviation') + + ax2.legend(bbox_to_anchor=(.965, 0.45), prop={'size': 14}) + ax1.legend(loc=2, prop={'size': 14}) + + # ax1.set_xticks([]) + ax1.set_ylim(0,6.7) + ax2.set_ylim(0,1) + start, end = ax1.get_ylim() + ax1.yaxis.set_ticks(np.arange(start, end, 2)) + + # soil moisture and rainfall + ax3 = plt.subplot(gs[3], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + ax3.plot(date,data_field.filter(like='SM_insitu'),color='blue', linewidth=2, label='Soil Moisture') + ax3.set_ylabel('Soil Moisture\n$[cm^3/cm^3]$', fontsize=16) + ax5 = ax3.twinx() + date_agro = pd.to_datetime(df_agro['date'], format='%d.%m.%Y') + agro_sum = df_agro['SUM_NN050'][87:192] + ax5.bar(agro_sum.index, agro_sum, width=0.8, label='Precipitation') + ax3.legend(loc=2, prop={'size': 14}) + ax5.legend(loc=1, prop={'size': 14}) + ax5.set_ylabel('Precipita-\ntion [mm]', fontsize=16) + ax5.set_ylim(0,39) + ax3.set_ylim(0.17,0.38) + plt.setp(ax3.get_xticklabels(), visible=False) + plt.tick_params(labelsize=17) + + ax4 = plt.subplot(gs[4], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + bbch = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/bbch_2017.csv',header=[0,1]) + bbch = bbch.set_index(pd.to_datetime(bbch['None']['None'], format='%Y-%m-%d')) + bbch.index = pd.to_datetime(bbch.index) + + lai_field['bbch'] = 0 + + bbch_new = bbch.filter(like=kkkk[0:3]) + for t, tt in enumerate(bbch.index): + if t == 0: + start_date = '2017-03-29' + else: + start_date = bbch.index[t] + try: + end_date = bbch.index[t+1] + except IndexError: + start_date = bbch.index[t] + end_date = '2017-07-30' + mask = (lai_field.index > start_date) & (lai_field.index <= end_date) + + bbbb = lai_field['bbch'].where(~mask, other=2) + if bbch.index[t] < datetime.datetime.strptime('2017-03-29', '%Y-%m-%d'): + pass + else: + if bbch_new.values[t] < 30 and bbch_new.values[t] >= 20: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=2) + n2 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 40 and bbch_new.values[t] >= 30: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=3) + n3 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 50 and bbch_new.values[t] >= 40: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=4) + n4 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 60 and bbch_new.values[t] >= 50: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=5) + n5 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 70 and bbch_new.values[t] >= 60: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=6) + n6 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 80 and bbch_new.values[t] >= 70: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=7) + n7 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 90 and bbch_new.values[t] >= 80: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=8) + n8 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 100 and bbch_new.values[t] >= 90: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=9) + n9 = max(lai_field['bbch'][mask].index) + # bbch_ = lai_field['bbch'].value_counts().sort_index().values + bbch_ = [n2-datetime.datetime.strptime('2017-03-22', '%Y-%m-%d'),n3-n2,n4-n3,n5-n4,n6-n5,n7-n6,n8-n7,n9-n8] + + #Plot BBCH + hm = lai_field.filter(like='bbch') + label = ['','BBCH',''] + width = 0.3 + legend_items = ['Tillering','Stem elongation','Booting','Heading','Flowering','Fruit development','Ripening', 'Senescence'] + + a_508 = 0 + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + + for xxxx, kkkkk in enumerate(bbch_): + a_508 = a_508 + bbch_[xxxx].total_seconds() /60/60/24 + ax4.barh(label,[0,a_508,0],width, label=legend_items[xxxx], left=[0,aa_508,0]) + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + a_508 + + + xmin, xmax = ax4.get_xlim() + + ax4.barh(label,[0,200,0],width, left=[0,xmax-1,0], color='white') + ax4.set_ylim(0,1.7) + plt.legend(bbox_to_anchor=(.935, 0.4),ncol=8) + + plt.text(0.98, 0.05, "(a)", transform=ax.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(b)", transform=ax0.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(c)", transform=ax2.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(d)", transform=ax3.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(e)", transform=ax4.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + + + plt.savefig(plot_output_path+pol+'_all_'+kkk+kkkk, bbox_inches = 'tight') + + plt.close() + diff --git a/kaska/kaska_ssrt_again_lookup.py b/kaska/kaska_ssrt_again_lookup.py new file mode 100644 index 0000000..2a9962f --- /dev/null +++ b/kaska/kaska_ssrt_again_lookup.py @@ -0,0 +1,797 @@ + +import os +import pandas as pd +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +from matplotlib.dates import MonthLocator +# import matplotlib.ticker +import numpy as np +# from sense.canopy import OneLayer +# from sense.soil import Soil +# from sense import model +import scipy.stats +from scipy.optimize import minimize +import pdb +from z_helper import * +# from z_optimization import * +import datetime +from matplotlib import gridspec +import datetime +from matplotlib.lines import Line2D +import copy +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +from watercloudmodel import ssrt_jac_ + + + +def lookup(): + + mv = np.arange(0.01, 0.5, 0.01) + coef = np.arange(0.1,2,0.1) + lai = np.arange(0.1, 5, 0.1) + theta = np.arange(35,45,1) + h = np.arange(0,1,0.05) + hm = {} + for r in mv: + for rr in coef: + for rrr in lai: + for rrrr in h: + for rrrrr in theta: + x,y = ssrt_jac_(r, rr, rrr, rrrr, rrrrr) + hm[(r,rr,rrr,rrrr,rrrrr)] = x + return hm + +def lookup2(mv,coef,lai,theta,h,vv): + hm = {} + for r in mv: + x,y = ssrt_jac_(r, coef, lai, h, theta) + xx = 10*np.log10(x) + xxx = abs(xx) - abs(vv) + hm[r] = abs(xxx) + return hm + + + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + + +def do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, height, orbits, unc): + + lais = [] + coefs = [] + sms = [] + times = [] + + uorbits = np.unique(orbits) + # uorbits = np.array([44]) + for orbit in uorbits: + orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 95) + ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, oscoef, oscoef_std = sm[orbit_mask], sm_std[orbit_mask], coef[orbit_mask], coef_std[orbit_mask] + + oheight = height[orbit_mask] + + olai_std = np.ones_like(olai)*0.05 + + alpha = _calc_eps(osm) + alpha = osm + alpha_std = np.ones_like(alpha)*10 + alpha_std = osm_std + # pdb.set_trace() + prior_mean = np.concatenate([alpha,oscoef]) + prior_unc = np.concatenate([alpha_std,oscoef_std]) + x0 = np.concatenate([alpha,oscoef]) + data = np.concatenate([oheight,olai]) + bounds = ( + # [[2.5, 30]] * olai.shape[0] + [[0.01, 0.5]] * olai.shape[0] + + [[0.01, 3]] * olai.shape[0] + ) + + gamma = [500, 500] + + retval = minimize(cost_function2, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data, unc), + jac=True, + bounds = bounds, + options={"disp": True},) + + # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] + posterious_coef = retval.x[len(olai) : +2*len(olai)] + posterious_mv = retval.x[ : +len(olai)] + # lais.append(posterious_lai) + coefs.append(posterious_coef) + # x = np.arange(0.01, 0.5, 0.001) + # xx = _calc_eps(x) + # sols=[] + # for i in posterious_mv: + # p, pp = find_nearest(xx,i) + # sols.append(x[pp]) + # sols = np.array(sols) + + sms.append(posterious_mv) + # sms.append(sols) + times.append(otime) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + # lais = np.hstack(lais )[order] + lais=0 + coefs = np.hstack(coefs )[order] + # coefs=0 + sms = np.hstack(sms )[order].real + + return times, lais, coefs, sms, orbit_mask + + + +def _simple_ew(): + """ + eq. 4.69 + simplistic approach with T=23°C, bulk density = 1.7 g/cm3 + """ + f0 = 18.64 # relaxation frequency [GHz] + f = 5.405 + hlp = f/f0 + e1 = 4.9 + (74.1)/(1.+hlp**2.) + # e2 =(74.1*hlp)/(1.+hlp**2.) + 6.46 * self.sigma/self.f + # return e1 + 1.j * e2 + return e1 + +def _calc_eps(mv): + """ + calculate dielectric permittivity + Eq. 4.66 (Ulaby et al., 2014) + """ + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + alpha = 0.65 + beta1 = 1.27-0.519*sand - 0.152*clay + beta2 = 2.06 - 0.928*sand -0.255*clay + sigma = -1.645 + 1.939*bulk - 2.256*sand + 1.594*clay + + + e1 = (1.+0.66*bulk+mv**beta1*_simple_ew()**alpha - mv)**(1./alpha) + # e2 = np.imag(self.ew)*self.mv**self.beta2 + # return e1 + 1.j*e2 + return e1 + +# def quad_approx_solver(alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, _calc_eps(x), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + +def find_nearest(array, value): + array = np.asarray(array) + idx = (np.abs(array - value)).argmin() + return array[idx], idx + + +# hm = lookup() + +# pdb.set_trace() + + + +### Data preparation df_agro!!!! ### +#----------------------------------------------------------------- +# storage information +path = '/media/tweiss/Work/z_final_mni_data_2017' +file_name = 'in_situ_s1_buffer_50' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +path_agro = '/media/tweiss/Work/Paper/in_progress/RT_model_comparison/images' +file_name_agro = 'Daily_Freising' +extension_agro = '.csv' + +field = '508_high' +pol = 'vv' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro, pol) + +aggregation = ['','_buffer_30','_buffer_50','_buffer_100','_Field_buffer_30'] +pre_processing = ['multi', 'norm_multi'] +aggregation = ['_buffer_50','_Field_buffer_30'] +pre_processing = ['multi'] +# aggregation = ['_buffer_100'] +# aggregation = ['_Field_buffer_30'] +surface_list = ['Oh92', 'Oh04', 'Dubois95', 'WaterCloud', 'I2EM'] +canopy_list = ['turbid_isotropic', 'water_cloud'] + +surface_list = ['Oh92', 'I2EM'] +canopy_list = ['turbid_isotropic'] + +surface_list = ['Oh04'] +# surface_list = ['Oh92'] +# canopy_list = ['water_cloud'] +field = ['508_high'] +# field = ['508_low'] +# field = ['508_med'] +# field = ['301_high'] +# field = ['301_low'] +# field = ['301_med'] +# field = ['542_high'] +# field = ['542_low'] +# field = ['542_med'] + +### option for time invariant or variant calibration of parameter +#------------------------------- +opt_mod = ['time_variant'] +#--------------------------- + + +for p in pre_processing: + + for pp in aggregation: + + # versions = ['everything','','44_117','95_168','44_168','117_95','44_95','117_168','44_117_95','44_117_168','44_95_168','117_95_168'] + # ver = ['','','44','95','44','117','44','117','44','44','44','117'] + # ver2 = ['','','117','168','168','95','95','168','117','117','95','95'] + # ver3 = ['','','','','','','','','95','168','168','168'] + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + # versions = ['44_168'] + # ver = ['44'] + # ver2 = ['168'] + # ver3 = [''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + + + + + + # fig, ax = plt.subplots(figsize=(17, 13)) + # gs = gridspec.GridSpec(5, 1, height_ratios=[14, 3, 3, 3, 3]) + # ax = plt.subplot(gs[0]) + + # plt.ylabel('Backscatter [dB]', fontsize=18) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + + # ax.set_ylim([-21.5,-8.5]) + + + # colormaps = ['Greens', 'Purples', 'Blues', 'Oranges', 'Reds', 'Greys', 'pink', 'bone', 'Blues', 'Blues', 'Blues'] + # r = 0 + + # colormap = plt.get_cmap(colormaps[r]) + # colors = [colormap(rr) for rr in np.linspace(0.35, 1., 3)] + + for kkk in opt_mod: + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + data_field = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + + + + vv = data_field.filter(like='S1_vv').values.flatten() + vv = 10*np.log10(vv) + vh = data_field.filter(like='S1_vh').values.flatten() + vh = 10*np.log10(vh) + lai = data_field.filter(like='LAI_insitu').values.flatten() + lai = lai + theta = data_field.filter(like='theta').values.flatten() + theta = np.rad2deg(theta) + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + + + s2_data = pd.read_csv('/media/tweiss/Daten/data_AGU/S2_'+kkkk+pp+'.csv',header=[0],index_col=0) + s2_data.index = pd.to_datetime(s2_data.index).floor('Min').floor('H') + s2_lai = s2_data.loc[time2]['lai'].values.flatten() + s2_cab = s2_data.loc[time2]['cab'].values.flatten() + s2_cbrown = s2_data.loc[time2]['cbrown'].values.flatten() + lai = s2_lai + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api_sm.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_sm = api_data.loc[time2].values.flatten() + sm = data_field.filter(like='SM_insitu').values.flatten() + # sm = smooth(sm,2) + sm[:] = 0.25 + # sm = api_sm + sm_std = data_field.filter(like='SM_insitu').values.flatten() + ooo = np.abs(sm[1:]-sm[:-1])*20 + sm_std[0] = ooo[-1] + sm_std[1:] = ooo + # sm_std[:] = 0.2 + coef = data_field.filter(like='coef').values.flatten() + coef_std = data_field.filter(like='SM_insitu').values.flatten() + coef_std[:] = 0.2 + height = data_field.filter(like='height').values.flatten() + orbits = data_field.filter(like='relativeorbit').values.flatten() + unc = 0.8 + + unc_array = np.arange(0,2,0.1) + coef_array = np.arange(0,2,0.1) + sm_array = np.arange(0,2,0.1) + final_sm = [None] * len(height) + for i in range(len(height)): + mv = np.arange(0.01,0.4,0.05) + hm = lookup2(mv,coef[i],lai[i],theta[i],height[i],vv[i]) + final_sm[i] = min(hm, key=hm.get) + pdb.set_trace() + + + + + # hm = {} + # for r in unc_array: + # for rr in coef_array: + # for rrr in sm_array: + # unc = r + # coef_std[:] = rr + # sm_std[:] = rrr + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # hm[(r,rr,rrr)] = ubrmse_vv + + pdb.set_trace() + min(hm, key=hm.get) + hm[min(hm, key=hm.get)] + + times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + + + plt.plot(time,sm_insitu) + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + plt.plot(time,sm, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + rmse_vv = rmse_prediction(sm_insitu,sms) + bias_vv = bias_prediction(sm_insitu,sms) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + # #orbit_mask + # plt.plot(time[orbit_mask],sm_insitu[orbit_mask]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(time[orbit_mask],sm[orbit_mask], label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sms) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + + plt.legend() + pdb.set_trace() + plt.plot(time[orbit_mask],coef) + plt.plot(times,coefs) + # plt.show() + pdb.set_trace() + pdb.set_trace() + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + if kkk == 'time invariant': + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + else: + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax.axvspan(x,xx, color='red', alpha=0.2, label = 'Incidence angle 43°, Descending track') + a += 1 + else: + ax.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax.axvspan(x,xx, color='blue', alpha=0.2, label = 'Incidence angle 45°, Ascending track') + b += 1 + else: + ax.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax.axvspan(x,xx, color='orange', alpha=0.2, label = 'Incidence angle 35°, Descending track') + c += 1 + else: + ax.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax.axvspan(x,xx, color='green', alpha=0.2, label = 'Incidence angle 36°, Ascending track') + d += 1 + else: + ax.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + + ax.plot(date,10*np.log10(data_field.filter(like='S1_vv')), '-', color='black', label='Sentinel-1', linewidth=3, marker='s') + + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.legend(prop={'size': 14}, loc=3) + + plt.grid(linestyle='dotted') + + plt.setp(ax.get_xticklabels(), visible=False) + + ax0 = plt.subplot(gs[1]) + plt.tick_params(labelsize=17) + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + + ground = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='part_g') + ground = ground[ground.columns[0]] + + lai = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='LAI_insitu').values + theta = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='theta').values + + if kk == 'turbid_isotropic': + coef = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='coef').values + d = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='height').values + T = np.exp(-coef*np.sqrt(lai)*d/np.cos(theta)) + T=T**2 + ax0.plot(date,T.flatten(), color=colors, marker='s', linestyle='dashed') + else: + B_vv = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='B_vv').values + T = np.exp(-2*B_vv*lai/np.cos(theta)) + ax0.plot(date,T.flatten(), color=colors, marker='s') + + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax0.axvspan(x,xx, color='red', alpha=0.2, label = 'descending 43°') + a += 1 + else: + ax0.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax0.axvspan(x,xx, color='blue', alpha=0.2, label = 'ascending 43°') + b += 1 + else: + ax0.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax0.axvspan(x,xx, color='orange', alpha=0.2, label = 'descending 35°') + c += 1 + else: + ax0.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax0.axvspan(x,xx, color='green', alpha=0.2, label = 'ascending 36°') + d += 1 + else: + ax0.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + plt.ylabel('Transmissivity\nT', fontsize=18) + ax0.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + ax0.set_ylim(-0.2,1.1) + plt.grid(linestyle='dotted') + plt.setp(ax0.get_xticklabels(), visible=False) + + + ax1 = plt.subplot(gs[2], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + plt.setp(ax1.get_xticklabels(), visible=False) + + lai_field = data_field.filter(like='LAI_insitu') + height_field = data_field.filter(like='height') + + + ax1.plot(date,lai_field,color='green',linewidth=2,label='LAI') + ax2 = ax1.twinx() + plt.tick_params(labelsize=17) + ax2.plot(date,height_field,color='black', linewidth=2, label='Height') + ax1.set_ylabel('LAI', fontsize=16) + ax2.set_ylabel('Height\n[m]', fontsize=16) + + + # add std for LAI and height for field 508 (data from field measurements) + lai_old = copy.deepcopy(lai_field) + height_old = copy.deepcopy(height_field) + if field == '508_high': + lai_field[lai_field.index>'2017-03-28'] = 0.2218 + lai_field[lai_field.index>'2017-04-05'] = 0.1367 + lai_field[lai_field.index>'2017-04-10'] = 0.4054 + lai_field[lai_field.index>'2017-04-21'] = 0.3247 + lai_field[lai_field.index>'2017-05-02'] = 0.5546 + lai_field[lai_field.index>'2017-05-10'] = 0.5852 + lai_field[lai_field.index>'2017-05-16'] = 0.3058 + lai_field[lai_field.index>'2017-05-26'] = 0.5373 + lai_field[lai_field.index>'2017-05-29'] = 0.332 + lai_field[lai_field.index>'2017-06-02'] = 0.2856 + lai_field[lai_field.index>'2017-06-13'] = 0.4717 + lai_field[lai_field.index>'2017-06-26'] = 0.2982 + lai_field[lai_field.index>'2017-07-06'] = 0.253 + + height_field[height_field.index>'2017-03-28'] = 0.005774 + height_field[height_field.index>'2017-04-05'] = 0.015275 + height_field[height_field.index>'2017-04-10'] = 0.026458 + height_field[height_field.index>'2017-04-21'] = 0.049329 + height_field[height_field.index>'2017-05-02'] = 0.01 + height_field[height_field.index>'2017-05-10'] = 0.01 + height_field[height_field.index>'2017-05-26'] = 0.028868 + height_field[height_field.index>'2017-05-29'] = 0.028868 + height_field[height_field.index>'2017-06-02'] = 0.028868 + height_field[height_field.index>'2017-06-13'] = 0.020817 + height_field[height_field.index>'2017-06-26'] = 0.025166 + height_field[height_field.index>'2017-07-06'] = 0.015275 + + ax1.fill_between(lai_field.index,lai_old.values.flatten()-lai_field.values.flatten(), lai_old.values.flatten()+lai_field.values.flatten(), color='green', alpha=0.2, label='Standard Deviation') + ax2.fill_between(height_field.index,height_old.values.flatten()-height_field.values.flatten(), height_old.values.flatten()+height_field.values.flatten(), color='black', alpha=0.2, label='Standard Deviation') + + ax2.legend(bbox_to_anchor=(.965, 0.45), prop={'size': 14}) + ax1.legend(loc=2, prop={'size': 14}) + + # ax1.set_xticks([]) + ax1.set_ylim(0,6.7) + ax2.set_ylim(0,1) + start, end = ax1.get_ylim() + ax1.yaxis.set_ticks(np.arange(start, end, 2)) + + # soil moisture and rainfall + ax3 = plt.subplot(gs[3], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + ax3.plot(date,data_field.filter(like='SM_insitu'),color='blue', linewidth=2, label='Soil Moisture') + ax3.set_ylabel('Soil Moisture\n$[cm^3/cm^3]$', fontsize=16) + ax5 = ax3.twinx() + date_agro = pd.to_datetime(df_agro['date'], format='%d.%m.%Y') + agro_sum = df_agro['SUM_NN050'][87:192] + ax5.bar(agro_sum.index, agro_sum, width=0.8, label='Precipitation') + ax3.legend(loc=2, prop={'size': 14}) + ax5.legend(loc=1, prop={'size': 14}) + ax5.set_ylabel('Precipita-\ntion [mm]', fontsize=16) + ax5.set_ylim(0,39) + ax3.set_ylim(0.17,0.38) + plt.setp(ax3.get_xticklabels(), visible=False) + plt.tick_params(labelsize=17) + + ax4 = plt.subplot(gs[4], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + bbch = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/bbch_2017.csv',header=[0,1]) + bbch = bbch.set_index(pd.to_datetime(bbch['None']['None'], format='%Y-%m-%d')) + bbch.index = pd.to_datetime(bbch.index) + + lai_field['bbch'] = 0 + + bbch_new = bbch.filter(like=kkkk[0:3]) + for t, tt in enumerate(bbch.index): + if t == 0: + start_date = '2017-03-29' + else: + start_date = bbch.index[t] + try: + end_date = bbch.index[t+1] + except IndexError: + start_date = bbch.index[t] + end_date = '2017-07-30' + mask = (lai_field.index > start_date) & (lai_field.index <= end_date) + + bbbb = lai_field['bbch'].where(~mask, other=2) + if bbch.index[t] < datetime.datetime.strptime('2017-03-29', '%Y-%m-%d'): + pass + else: + if bbch_new.values[t] < 30 and bbch_new.values[t] >= 20: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=2) + n2 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 40 and bbch_new.values[t] >= 30: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=3) + n3 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 50 and bbch_new.values[t] >= 40: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=4) + n4 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 60 and bbch_new.values[t] >= 50: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=5) + n5 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 70 and bbch_new.values[t] >= 60: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=6) + n6 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 80 and bbch_new.values[t] >= 70: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=7) + n7 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 90 and bbch_new.values[t] >= 80: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=8) + n8 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 100 and bbch_new.values[t] >= 90: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=9) + n9 = max(lai_field['bbch'][mask].index) + # bbch_ = lai_field['bbch'].value_counts().sort_index().values + bbch_ = [n2-datetime.datetime.strptime('2017-03-22', '%Y-%m-%d'),n3-n2,n4-n3,n5-n4,n6-n5,n7-n6,n8-n7,n9-n8] + + #Plot BBCH + hm = lai_field.filter(like='bbch') + label = ['','BBCH',''] + width = 0.3 + legend_items = ['Tillering','Stem elongation','Booting','Heading','Flowering','Fruit development','Ripening', 'Senescence'] + + a_508 = 0 + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + + for xxxx, kkkkk in enumerate(bbch_): + a_508 = a_508 + bbch_[xxxx].total_seconds() /60/60/24 + ax4.barh(label,[0,a_508,0],width, label=legend_items[xxxx], left=[0,aa_508,0]) + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + a_508 + + + xmin, xmax = ax4.get_xlim() + + ax4.barh(label,[0,200,0],width, left=[0,xmax-1,0], color='white') + ax4.set_ylim(0,1.7) + plt.legend(bbox_to_anchor=(.935, 0.4),ncol=8) + + plt.text(0.98, 0.05, "(a)", transform=ax.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(b)", transform=ax0.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(c)", transform=ax2.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(d)", transform=ax3.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(e)", transform=ax4.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + + + plt.savefig(plot_output_path+pol+'_all_'+kkk+kkkk, bbox_inches = 'tight') + + plt.close() + diff --git a/kaska/kaska_ssrt_again_n3.py b/kaska/kaska_ssrt_again_n3.py new file mode 100644 index 0000000..d6f5686 --- /dev/null +++ b/kaska/kaska_ssrt_again_n3.py @@ -0,0 +1,791 @@ + +import os +import pandas as pd +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +from matplotlib.dates import MonthLocator +# import matplotlib.ticker +import numpy as np +# from sense.canopy import OneLayer +# from sense.soil import Soil +# from sense import model +import scipy.stats +from scipy.optimize import minimize +import pdb +from z_helper import * +# from z_optimization import * +import datetime +from matplotlib import gridspec +import datetime +from matplotlib.lines import Line2D +import copy +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +from watercloudmodel import ssrt_jac_ + + + + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + + +def do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, height, orbits, unc): + + lais = [] + coefs = [] + sms = [] + times = [] + + uorbits = np.unique(orbits) + # uorbits = np.array([95]) + # for orbit in uorbits: + for jj in range(len(vv)): + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + orbit_mask[:] = False + n = 9 + + if jj <= 2*n: + orbit_mask[:2*n+1] = True + elif jj > len(vv)-2*n: + orbit_mask[jj-2*n:] = True + else: + orbit_mask[jj-n:jj+n] = True + # pdb.set_trace() + # orbit_mask = orbits == orbit + # orbit_mask = (orbits == 95) | (orbits == 117) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 95) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, oscoef, oscoef_std = sm[orbit_mask], sm_std[orbit_mask], coef[orbit_mask], coef_std[orbit_mask] + + oheight = height[orbit_mask] + + # ovv, ovh, olai, otheta, otime = np.array([vv[jj]]), np.array([vh[jj]]), np.array([lai[jj]]), np.array([theta[jj]]), np.array([time[jj]]) + # osm, osm_std, oscoef, oscoef_std = np.array([sm[jj]]), np.array([sm_std[jj]]), np.array([coef[jj]]), np.array([coef_std[jj]]) + + # oheight = np.array([height[jj]]) + + + + # pdb.set_trace() + olai_std = np.ones_like(olai)*0.05 + + alpha = _calc_eps(osm) + alpha = osm + alpha_std = np.ones_like(alpha)*10 + alpha_std = osm_std + # pdb.set_trace() + prior_mean = np.concatenate([alpha,oscoef]) + prior_unc = np.concatenate([alpha_std,oscoef_std]) + x0 = np.concatenate([alpha,oscoef]) + data = np.concatenate([oheight,olai]) + bounds = ( + # [[2.5, 30]] * olai.shape[0] + [[0.01, 0.5]] * olai.shape[0] + + [[0.0000001, 3]] * olai.shape[0] + ) + + gamma = [500, 500] + + retval = minimize(cost_function2, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data, unc), + jac=True, + bounds = bounds, + options={"disp": True},) + + # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] + posterious_coef = retval.x[len(olai) : +2*len(olai)] + posterious_mv = retval.x[ : +len(olai)] + # lais.append(posterious_lai) + + + if jj <= int(n/2): + coefs.append(posterious_coef[jj]) + sms.append(posterious_mv[jj]) + times.append(otime[jj]) + elif jj > len(vv)-2*n: + coefs.append(posterious_coef[n]) + sms.append(posterious_mv[n]) + times.append(otime[n]) + else: + coefs.append(posterious_coef[n]) + sms.append(posterious_mv[n]) + times.append(otime[n]) + + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + # lais = np.hstack(lais )[order] + lais=0 + coefs = np.hstack(coefs )[order] + # coefs=0 + sms = np.hstack(sms )[order].real + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + return times, lais, coefs, sms, orbit_mask + + + +def _simple_ew(): + """ + eq. 4.69 + simplistic approach with T=23°C, bulk density = 1.7 g/cm3 + """ + f0 = 18.64 # relaxation frequency [GHz] + f = 5.405 + hlp = f/f0 + e1 = 4.9 + (74.1)/(1.+hlp**2.) + # e2 =(74.1*hlp)/(1.+hlp**2.) + 6.46 * self.sigma/self.f + # return e1 + 1.j * e2 + return e1 + +def _calc_eps(mv): + """ + calculate dielectric permittivity + Eq. 4.66 (Ulaby et al., 2014) + """ + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + alpha = 0.65 + beta1 = 1.27-0.519*sand - 0.152*clay + beta2 = 2.06 - 0.928*sand -0.255*clay + sigma = -1.645 + 1.939*bulk - 2.256*sand + 1.594*clay + + + e1 = (1.+0.66*bulk+mv**beta1*_simple_ew()**alpha - mv)**(1./alpha) + # e2 = np.imag(self.ew)*self.mv**self.beta2 + # return e1 + 1.j*e2 + return e1 + +# def quad_approx_solver(alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, _calc_eps(x), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + +def find_nearest(array, value): + array = np.asarray(array) + idx = (np.abs(array - value)).argmin() + return array[idx], idx + + + + +### Data preparation df_agro!!!! ### +#----------------------------------------------------------------- +# storage information +path = '/media/tweiss/Work/z_final_mni_data_2017' +file_name = 'in_situ_s1_buffer_50' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +path_agro = '/media/tweiss/Work/Paper/in_progress/RT_model_comparison/images' +file_name_agro = 'Daily_Freising' +extension_agro = '.csv' + +field = '508_high' +pol = 'vv' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro, pol) + +aggregation = ['','_buffer_30','_buffer_50','_buffer_100','_Field_buffer_30'] +pre_processing = ['multi', 'norm_multi'] +aggregation = ['_buffer_50','_Field_buffer_30'] +pre_processing = ['multi'] +aggregation = ['_buffer_100'] +# aggregation = ['_Field_buffer_30'] +surface_list = ['Oh92', 'Oh04', 'Dubois95', 'WaterCloud', 'I2EM'] +canopy_list = ['turbid_isotropic', 'water_cloud'] + +surface_list = ['Oh92', 'I2EM'] +canopy_list = ['turbid_isotropic'] + +surface_list = ['Oh04'] +# surface_list = ['Oh92'] +# canopy_list = ['water_cloud'] +field = ['508_high'] +# field = ['508_low'] +# field = ['508_med'] +field = ['301_high'] +# field = ['301_low'] +# field = ['301_med'] +# field = ['542_high'] +# field = ['542_low'] +# field = ['542_med'] + +### option for time invariant or variant calibration of parameter +#------------------------------- +opt_mod = ['time_variant'] +#--------------------------- + + +for p in pre_processing: + + for pp in aggregation: + + # versions = ['everything','','44_117','95_168','44_168','117_95','44_95','117_168','44_117_95','44_117_168','44_95_168','117_95_168'] + # ver = ['','','44','95','44','117','44','117','44','44','44','117'] + # ver2 = ['','','117','168','168','95','95','168','117','117','95','95'] + # ver3 = ['','','','','','','','','95','168','168','168'] + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + # versions = ['44_168'] + # ver = ['44'] + # ver2 = ['168'] + # ver3 = [''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper2/z_dense_s1_time_series_n7'+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + + + + + + # fig, ax = plt.subplots(figsize=(17, 13)) + # gs = gridspec.GridSpec(5, 1, height_ratios=[14, 3, 3, 3, 3]) + # ax = plt.subplot(gs[0]) + + # plt.ylabel('Backscatter [dB]', fontsize=18) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + + # ax.set_ylim([-21.5,-8.5]) + + + # colormaps = ['Greens', 'Purples', 'Blues', 'Oranges', 'Reds', 'Greys', 'pink', 'bone', 'Blues', 'Blues', 'Blues'] + # r = 0 + + # colormap = plt.get_cmap(colormaps[r]) + # colors = [colormap(rr) for rr in np.linspace(0.35, 1., 3)] + + for kkk in opt_mod: + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + data_field = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + + + + vv = data_field.filter(like='S1_vv').values.flatten() + vv = 10*np.log10(vv) + vh = data_field.filter(like='S1_vh').values.flatten() + vh = 10*np.log10(vh) + lai = data_field.filter(like='LAI_insitu').values.flatten() + lai = lai + theta = data_field.filter(like='theta').values.flatten() + theta = np.rad2deg(theta) + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + + + s2_data = pd.read_csv('/media/tweiss/Daten/data_AGU/S2_'+kkkk+pp+'.csv',header=[0],index_col=0) + s2_data.index = pd.to_datetime(s2_data.index).floor('Min').floor('H') + s2_lai = s2_data.loc[time2]['lai'].values.flatten() + s2_cab = s2_data.loc[time2]['cab'].values.flatten() + s2_cbrown = s2_data.loc[time2]['cbrown'].values.flatten() + # lai = s2_lai + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api_sm.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_sm = api_data.loc[time2].values.flatten() + sm = data_field.filter(like='SM_insitu').values.flatten() + # sm = smooth(sm,2) + sm[:] = 0.25 + # sm = data_field.filter(like='SM_insitu').values.flatten() + sm = api_sm + sm_std = data_field.filter(like='SM_insitu').values.flatten() + ooo = np.abs(sm[1:]-sm[:-1])*20 + sm_std[0] = ooo[-1] + sm_std[1:] = ooo + sm_std[:] = 10.71 + coef = data_field.filter(like='coef').values.flatten() + coef_std = data_field.filter(like='SM_insitu').values.flatten() + coef_std[:] = 0.01 + height = data_field.filter(like='height').values.flatten() + orbits = data_field.filter(like='relativeorbit').values.flatten() + unc = 1.5 + + # unc_array = np.arange(0,2,0.1) + # coef_array = np.arange(0,2,0.1) + # sm_array = np.arange(0,2,0.1) + + # hm = {} + # for r in unc_array: + # for rr in coef_array: + # for rrr in sm_array: + # unc = r + # coef_std[:] = rr + # sm_std[:] = rrr + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # hm[(r,rr,rrr)] = ubrmse_vv + + # pdb.set_trace() + # min(hm, key=hm.get) + # hm[min(hm, key=hm.get)] + + times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + # pdb.set_trace() + plt.rcParams["figure.figsize"] = (10,7) + # plt.plot(time,sm_insitu, label='insitu') + plt.plot(times,sm_insitu[orbit_mask], label='insitu') + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(time,sm, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + plt.plot(times,sm[orbit_mask], label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sms) + bias_vv = bias_prediction(sm_insitu[orbit_mask],sms) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + # #orbit_mask + # plt.plot(time[orbit_mask],sm_insitu[orbit_mask]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(time[orbit_mask],sm[orbit_mask], label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sms) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + + plt.legend() + plt.grid() + plt.ylabel('Soil Moisture') + plt.xlabel('Time') + plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/oh04_unc10_lai_flat', bbox_inches = 'tight') + plt.show() + pdb.set_trace() + plt.plot(time[orbit_mask],coef) + plt.plot(times,coefs) + # plt.show() + pdb.set_trace() + pdb.set_trace() + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + if kkk == 'time invariant': + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + else: + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax.axvspan(x,xx, color='red', alpha=0.2, label = 'Incidence angle 43°, Descending track') + a += 1 + else: + ax.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax.axvspan(x,xx, color='blue', alpha=0.2, label = 'Incidence angle 45°, Ascending track') + b += 1 + else: + ax.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax.axvspan(x,xx, color='orange', alpha=0.2, label = 'Incidence angle 35°, Descending track') + c += 1 + else: + ax.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax.axvspan(x,xx, color='green', alpha=0.2, label = 'Incidence angle 36°, Ascending track') + d += 1 + else: + ax.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + + ax.plot(date,10*np.log10(data_field.filter(like='S1_vv')), '-', color='black', label='Sentinel-1', linewidth=3, marker='s') + + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.legend(prop={'size': 14}, loc=3) + + plt.grid(linestyle='dotted') + + plt.setp(ax.get_xticklabels(), visible=False) + + ax0 = plt.subplot(gs[1]) + plt.tick_params(labelsize=17) + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + + ground = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='part_g') + ground = ground[ground.columns[0]] + + lai = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='LAI_insitu').values + theta = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='theta').values + + if kk == 'turbid_isotropic': + coef = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='coef').values + d = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='height').values + T = np.exp(-coef*np.sqrt(lai)*d/np.cos(theta)) + T=T**2 + ax0.plot(date,T.flatten(), color=colors, marker='s', linestyle='dashed') + else: + B_vv = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='B_vv').values + T = np.exp(-2*B_vv*lai/np.cos(theta)) + ax0.plot(date,T.flatten(), color=colors, marker='s') + + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax0.axvspan(x,xx, color='red', alpha=0.2, label = 'descending 43°') + a += 1 + else: + ax0.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax0.axvspan(x,xx, color='blue', alpha=0.2, label = 'ascending 43°') + b += 1 + else: + ax0.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax0.axvspan(x,xx, color='orange', alpha=0.2, label = 'descending 35°') + c += 1 + else: + ax0.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax0.axvspan(x,xx, color='green', alpha=0.2, label = 'ascending 36°') + d += 1 + else: + ax0.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + plt.ylabel('Transmissivity\nT', fontsize=18) + ax0.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + ax0.set_ylim(-0.2,1.1) + plt.grid(linestyle='dotted') + plt.setp(ax0.get_xticklabels(), visible=False) + + + ax1 = plt.subplot(gs[2], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + plt.setp(ax1.get_xticklabels(), visible=False) + + lai_field = data_field.filter(like='LAI_insitu') + height_field = data_field.filter(like='height') + + + ax1.plot(date,lai_field,color='green',linewidth=2,label='LAI') + ax2 = ax1.twinx() + plt.tick_params(labelsize=17) + ax2.plot(date,height_field,color='black', linewidth=2, label='Height') + ax1.set_ylabel('LAI', fontsize=16) + ax2.set_ylabel('Height\n[m]', fontsize=16) + + + # add std for LAI and height for field 508 (data from field measurements) + lai_old = copy.deepcopy(lai_field) + height_old = copy.deepcopy(height_field) + if field == '508_high': + lai_field[lai_field.index>'2017-03-28'] = 0.2218 + lai_field[lai_field.index>'2017-04-05'] = 0.1367 + lai_field[lai_field.index>'2017-04-10'] = 0.4054 + lai_field[lai_field.index>'2017-04-21'] = 0.3247 + lai_field[lai_field.index>'2017-05-02'] = 0.5546 + lai_field[lai_field.index>'2017-05-10'] = 0.5852 + lai_field[lai_field.index>'2017-05-16'] = 0.3058 + lai_field[lai_field.index>'2017-05-26'] = 0.5373 + lai_field[lai_field.index>'2017-05-29'] = 0.332 + lai_field[lai_field.index>'2017-06-02'] = 0.2856 + lai_field[lai_field.index>'2017-06-13'] = 0.4717 + lai_field[lai_field.index>'2017-06-26'] = 0.2982 + lai_field[lai_field.index>'2017-07-06'] = 0.253 + + height_field[height_field.index>'2017-03-28'] = 0.005774 + height_field[height_field.index>'2017-04-05'] = 0.015275 + height_field[height_field.index>'2017-04-10'] = 0.026458 + height_field[height_field.index>'2017-04-21'] = 0.049329 + height_field[height_field.index>'2017-05-02'] = 0.01 + height_field[height_field.index>'2017-05-10'] = 0.01 + height_field[height_field.index>'2017-05-26'] = 0.028868 + height_field[height_field.index>'2017-05-29'] = 0.028868 + height_field[height_field.index>'2017-06-02'] = 0.028868 + height_field[height_field.index>'2017-06-13'] = 0.020817 + height_field[height_field.index>'2017-06-26'] = 0.025166 + height_field[height_field.index>'2017-07-06'] = 0.015275 + + ax1.fill_between(lai_field.index,lai_old.values.flatten()-lai_field.values.flatten(), lai_old.values.flatten()+lai_field.values.flatten(), color='green', alpha=0.2, label='Standard Deviation') + ax2.fill_between(height_field.index,height_old.values.flatten()-height_field.values.flatten(), height_old.values.flatten()+height_field.values.flatten(), color='black', alpha=0.2, label='Standard Deviation') + + ax2.legend(bbox_to_anchor=(.965, 0.45), prop={'size': 14}) + ax1.legend(loc=2, prop={'size': 14}) + + # ax1.set_xticks([]) + ax1.set_ylim(0,6.7) + ax2.set_ylim(0,1) + start, end = ax1.get_ylim() + ax1.yaxis.set_ticks(np.arange(start, end, 2)) + + # soil moisture and rainfall + ax3 = plt.subplot(gs[3], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + ax3.plot(date,data_field.filter(like='SM_insitu'),color='blue', linewidth=2, label='Soil Moisture') + ax3.set_ylabel('Soil Moisture\n$[cm^3/cm^3]$', fontsize=16) + ax5 = ax3.twinx() + date_agro = pd.to_datetime(df_agro['date'], format='%d.%m.%Y') + agro_sum = df_agro['SUM_NN050'][87:192] + ax5.bar(agro_sum.index, agro_sum, width=0.8, label='Precipitation') + ax3.legend(loc=2, prop={'size': 14}) + ax5.legend(loc=1, prop={'size': 14}) + ax5.set_ylabel('Precipita-\ntion [mm]', fontsize=16) + ax5.set_ylim(0,39) + ax3.set_ylim(0.17,0.38) + plt.setp(ax3.get_xticklabels(), visible=False) + plt.tick_params(labelsize=17) + + ax4 = plt.subplot(gs[4], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + bbch = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/bbch_2017.csv',header=[0,1]) + bbch = bbch.set_index(pd.to_datetime(bbch['None']['None'], format='%Y-%m-%d')) + bbch.index = pd.to_datetime(bbch.index) + + lai_field['bbch'] = 0 + + bbch_new = bbch.filter(like=kkkk[0:3]) + for t, tt in enumerate(bbch.index): + if t == 0: + start_date = '2017-03-29' + else: + start_date = bbch.index[t] + try: + end_date = bbch.index[t+1] + except IndexError: + start_date = bbch.index[t] + end_date = '2017-07-30' + mask = (lai_field.index > start_date) & (lai_field.index <= end_date) + + bbbb = lai_field['bbch'].where(~mask, other=2) + if bbch.index[t] < datetime.datetime.strptime('2017-03-29', '%Y-%m-%d'): + pass + else: + if bbch_new.values[t] < 30 and bbch_new.values[t] >= 20: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=2) + n2 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 40 and bbch_new.values[t] >= 30: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=3) + n3 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 50 and bbch_new.values[t] >= 40: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=4) + n4 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 60 and bbch_new.values[t] >= 50: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=5) + n5 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 70 and bbch_new.values[t] >= 60: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=6) + n6 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 80 and bbch_new.values[t] >= 70: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=7) + n7 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 90 and bbch_new.values[t] >= 80: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=8) + n8 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 100 and bbch_new.values[t] >= 90: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=9) + n9 = max(lai_field['bbch'][mask].index) + # bbch_ = lai_field['bbch'].value_counts().sort_index().values + bbch_ = [n2-datetime.datetime.strptime('2017-03-22', '%Y-%m-%d'),n3-n2,n4-n3,n5-n4,n6-n5,n7-n6,n8-n7,n9-n8] + + #Plot BBCH + hm = lai_field.filter(like='bbch') + label = ['','BBCH',''] + width = 0.3 + legend_items = ['Tillering','Stem elongation','Booting','Heading','Flowering','Fruit development','Ripening', 'Senescence'] + + a_508 = 0 + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + + for xxxx, kkkkk in enumerate(bbch_): + a_508 = a_508 + bbch_[xxxx].total_seconds() /60/60/24 + ax4.barh(label,[0,a_508,0],width, label=legend_items[xxxx], left=[0,aa_508,0]) + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + a_508 + + + xmin, xmax = ax4.get_xlim() + + ax4.barh(label,[0,200,0],width, left=[0,xmax-1,0], color='white') + ax4.set_ylim(0,1.7) + plt.legend(bbox_to_anchor=(.935, 0.4),ncol=8) + + plt.text(0.98, 0.05, "(a)", transform=ax.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(b)", transform=ax0.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(c)", transform=ax2.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(d)", transform=ax3.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(e)", transform=ax4.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + + + plt.savefig(plot_output_path+pol+'_all_'+kkk+kkkk, bbox_inches = 'tight') + + plt.close() + diff --git a/kaska/kaska_ssrt_tau.py b/kaska/kaska_ssrt_tau.py new file mode 100644 index 0000000..e3f1f04 --- /dev/null +++ b/kaska/kaska_ssrt_tau.py @@ -0,0 +1,598 @@ + +import os +import pandas as pd +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +from matplotlib.dates import MonthLocator +# import matplotlib.ticker +import numpy as np +# from sense.canopy import OneLayer +# from sense.soil import Soil +# from sense import model +import scipy.stats +from scipy.optimize import minimize +import pdb +from z_helper import * +# from z_optimization import * +import datetime +from matplotlib import gridspec +import datetime +from matplotlib.lines import Line2D +import copy +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +# from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +# from watercloudmodel import ssrt_jac_ +from watercloudmodel_vwc import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc + + + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + + +def do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, b, b_std, omega, rms, orbits, unc): + + ps = [] + vwcs = [] + bs = [] + sms = [] + times = [] + + uorbits = np.unique(orbits) + uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # pdb.set_trace() + orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + # orbit_mask = (orbits == 95) | (orbits == 117) + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, ovwc, otheta, otime = vv[orbit_mask], vh[orbit_mask], vwc[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osb, osb_std = sm[orbit_mask], sm_std[orbit_mask], b[orbit_mask], b_std[orbit_mask] + + + ovwc_std = np.ones_like(osb)*0.5 + + # alpha = _calc_eps(osm) + # alpha = osm + # alpha_std = np.ones_like(alpha)*10 + # alpha_std = osm_std + # pdb.set_trace() + + # prior_mean = np.concatenate([[0, ]*2, osm, ovwc, osb]) + # prior_unc = np.concatenate([[10., ]*2, osm_std, ovwc_std, osb_std]) + + # xvv = np.array([rms, omega]) + + + # x0 = np.concatenate([xvv, osm, ovwc, osb]) + + # bounds = ( + # [[0.013, 0.013]] # s + # + [[0.0107, 0.0107]] # omega + # + [[0.01, 0.7]] * osb.shape[0] # mv + # + [[0, 7.5]] * osb.shape[0] # vwc + # + [[0.01, 0.6]] * osb.shape[0] # b + # ) + + + prior_mean = np.concatenate([[0, ]*2, osm, ovwc]) + prior_unc = np.concatenate([[10., ]*2, osm_std, ovwc_std]) + + xvv = np.array([rms, omega]) + + + x0 = np.concatenate([xvv, osm, ovwc]) + + bounds = ( + [[0.005, 0.02115]] # s + + [[0.027, 0.027]] # omega + + [[0.01, 0.7]] * osb.shape[0] # mv + + [[0, 7.5]] * osb.shape[0] # vwc + ) + + + + data = osb + + gamma = [5, 5] + + retval = minimize(cost_function_vwc, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc, data), + jac=True, + bounds = bounds, + options={"disp": False},) + + + posterious_sm = retval.x[2 : 2+len(osb)] + posterious_vwc = retval.x[2+len(osb) : 2+2*len(osb)] + # posterious_b = retval.x[2+2*len(osb) : 2+3*len(osb)] + posterious_b = osb + + sms.append(posterious_sm) + vwcs.append(posterious_vwc) + bs.append(posterious_b) + times.append(otime) + ps.append(retval.x[:2]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + vwcs = np.hstack(vwcs )[order] + bs = np.hstack(bs )[order] + sms = np.hstack(sms )[order].real + return times, vwcs, bs, sms, np.array(ps), orbit_mask + + +def _simple_ew(): + """ + eq. 4.69 + simplistic approach with T=23°C, bulk density = 1.7 g/cm3 + """ + f0 = 18.64 # relaxation frequency [GHz] + f = 5.405 + hlp = f/f0 + e1 = 4.9 + (74.1)/(1.+hlp**2.) + # e2 =(74.1*hlp)/(1.+hlp**2.) + 6.46 * self.sigma/self.f + # return e1 + 1.j * e2 + return e1 + +def _calc_eps(mv): + """ + calculate dielectric permittivity + Eq. 4.66 (Ulaby et al., 2014) + """ + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + alpha = 0.65 + beta1 = 1.27-0.519*sand - 0.152*clay + beta2 = 2.06 - 0.928*sand -0.255*clay + sigma = -1.645 + 1.939*bulk - 2.256*sand + 1.594*clay + + + e1 = (1.+0.66*bulk+mv**beta1*_simple_ew()**alpha - mv)**(1./alpha) + # e2 = np.imag(self.ew)*self.mv**self.beta2 + # return e1 + 1.j*e2 + return e1 + +# def quad_approx_solver(alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, _calc_eps(x), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + +def find_nearest(array, value): + array = np.asarray(array) + idx = (np.abs(array - value)).argmin() + return array[idx], idx + + + + +### Data preparation df_agro!!!! ### +#----------------------------------------------------------------- +# storage information +path = '/media/tweiss/Work/z_final_mni_data_2017' +file_name = 'in_situ_s1_buffer_100' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +path_agro = '/media/tweiss/Work/Paper/in_progress/RT_model_comparison/images' +file_name_agro = 'Daily_Freising' +extension_agro = '.csv' + +field = '508_high' +pol = 'vv' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro, pol) + +aggregation = ['','_buffer_30','_buffer_50','_buffer_100','_Field_buffer_30'] +pre_processing = ['multi', 'norm_multi'] +aggregation = ['_buffer_50','_Field_buffer_30'] +pre_processing = ['multi'] +aggregation = ['_buffer_100'] +# aggregation = ['_Field_buffer_30'] +surface_list = ['Oh92', 'Oh04', 'Dubois95', 'WaterCloud', 'I2EM'] +canopy_list = ['turbid_isotropic', 'water_cloud'] + +surface_list = ['Oh92', 'I2EM'] +canopy_list = ['turbid_isotropic'] + +surface_list = ['Oh04'] +# surface_list = ['Oh92'] +# canopy_list = ['water_cloud'] +field = ['508_high'] +field = ['508_low'] +field = ['508_med'] +field = ['301_high'] +field = ['301_low'] +field = ['301_med'] +field = ['542_high'] +field = ['542_low'] +field = ['542_med'] + +field = ['508_high','508_low','508_med','301_high','301_low','301_med','542_high','542_low','542_med'] + +### option for time invariant or variant calibration of parameter +#------------------------------- +opt_mod = ['time_variant'] +#--------------------------- + +years = ['_2017','_2018'] +numbers = [1,3,5,7,9] +numbers = [1] + +for zzz in numbers: + + for p in pre_processing: + + for pp in aggregation: + + for year in years: + if year == '_2017': + field_list = ['508_high','508_low','508_med','301_high','301_low','301_med','542_high','542_low','542_med'] + elif year == '_2018': + field_list = ['525_high','525_low','525_med','317_high','317_low','317_med'] + else: + pass + + # versions = ['everything','','44_117','95_168','44_168','117_95','44_95','117_168','44_117_95','44_117_168','44_95_168','117_95_168'] + # ver = ['','','44','95','44','117','44','117','44','44','44','117'] + # ver2 = ['','','117','168','168','95','95','168','117','117','95','95'] + # ver3 = ['','','','','','','','','95','168','168','168'] + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + + # versions = ['everything'] + # ver = [''] + # ver2 = [''] + # ver3 = [''] + + # versions = ['44_168'] + # ver = ['44'] + # ver2 = ['168'] + # ver3 = [''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/'+year[1:]+'/z_dense_s1_time_series_n'+str(zzz)+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/'+year[1:]+'/z_dense_s1_time_series_n'+str(zzz)+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper3/'+year[1:]+'/z_dense_s1_time_series_n'+str(zzz)+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all'+pp+'.csv',header=[0,1,2,3,4,5],index_col=0) + + + + + + + # fig, ax = plt.subplots(figsize=(17, 13)) + # gs = gridspec.GridSpec(5, 1, height_ratios=[14, 3, 3, 3, 3]) + # ax = plt.subplot(gs[0]) + + # plt.ylabel('Backscatter [dB]', fontsize=18) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + + # ax.set_ylim([-21.5,-8.5]) + + + # colormaps = ['Greens', 'Purples', 'Blues', 'Oranges', 'Reds', 'Greys', 'pink', 'bone', 'Blues', 'Blues', 'Blues'] + # r = 0 + + # colormap = plt.get_cmap(colormaps[r]) + # colors = [colormap(rr) for rr in np.linspace(0.35, 1., 3)] + + for kkk in opt_mod: + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + data_field = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + + ### b mean + + data_b = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like='coef') + mean_b = data_b.mean(axis=1) + + + + + + vv = data_field.filter(like='S1_vv').values.flatten() + vv = 10*np.log10(vv) + vh = data_field.filter(like='S1_vh').values.flatten() + vh = 10*np.log10(vh) + + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + time3 = time2.normalize() + theta = data_field.filter(like='theta').values.flatten() + theta = np.rad2deg(theta) + + + + + # lai = data_field.filter(like='LAI_insitu').values.flatten() + # lai = lai + + + + + # s2_data = pd.read_csv('/media/tweiss/Daten/data_AGU/S2_'+kkkk+pp+'.csv',header=[0],index_col=0) + # s2_data.index = pd.to_datetime(s2_data.index).floor('Min').floor('H') + # s2_lai = s2_data.loc[time2]['lai'].values.flatten() + # s2_cab = s2_data.loc[time2]['cab'].values.flatten() + # s2_cbrown = s2_data.loc[time2]['cbrown'].values.flatten() + + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + + + + + + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api'+year+'_radolan.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_field = api_data.filter(like=kkkk) + api_sm = api_field.loc[time2].values.flatten() + + vwc_data = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/vwc_sentinel_2'+pp+'_'+year+'_paper3_gao.csv', header=[0,1],index_col=0) + + + vwc_data.index = pd.to_datetime(vwc_data.index) + vwc_data = vwc_data.resample('D').mean().interpolate() + vwc_data = vwc_data.loc[time2.normalize()] + + vwc_field = vwc_data.filter(like=kkkk).filter(like=').1') + vwc_sentinel_2 = vwc_field.filter(like='m_pos_ag_vwc') + + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + pdb.set_trace() + + + # sm = smooth(sm,2) + # sm[:] = 0.25 + # sm = data_field.filter(like='SM_insitu').values.flatten() + sm = api_sm + # sm[:] = 0.2 + sm_std = data_field.filter(like='SM_insitu').values.flatten() + # ooo = np.abs(sm[1:]-sm[:-1])*20 + # sm_std[0] = ooo[-1] + # sm_std[1:] = ooo + sm_std[:] = 0.25 + + b = data_field.filter(like='coef').values.flatten() + b_old = data_field.filter(like='coef').values.flatten() + b_std = data_field.filter(like='SM_insitu').values.flatten() + # b = data_field.filter(like='coef').rolling(4).mean().values.flatten() + # b[0] = b_old[0] + # b[1] = b_old[1] + # b[2] = b_old[2] + # b[3] = b_old[3] + # b = mean_b.values.flatten() + + + # b=b-0.1 + b_std[:] = 0.5 + # height = data_field.filter(like='height').values.flatten() + orbits = data_field.filter(like='relativeorbit').values.flatten() + orbits95 = orbits==95 + orbits168 = orbits==168 + orbits44 = orbits==44 + orbits117 = orbits==117 + orbits44_168 = (orbits == 44) | (orbits == 168) + + b[orbits95] = 0.4 + b[orbits117] = 0.4 + b[orbits44] = 0.6 + b[orbits168] = 0.6 + orbits95[0:30] = False + orbits117[0:30] = False + orbits44[0:30] = False + orbits168[0:30] = False + + b[orbits95] = 0.1 + b[orbits117] = 0.1 + b[orbits44] = 0.2 + b[orbits168] = 0.15 + + + # pdb.set_trace() + + omega = 0.0107 + unc = 1.0 + vwc = data_field.filter(like='VWC').values.flatten() + + rms = 0.0115 + rms = 0.027 + # rms = 0.018 + + # unc_array = np.arange(0,2,0.1) + # coef_array = np.arange(0,2,0.1) + # sm_array = np.arange(0,2,0.1) + + # hm = {} + # for r in unc_array: + # for rr in coef_array: + # for rrr in sm_array: + # unc = r + # coef_std[:] = rr + # sm_std[:] = rrr + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # hm[(r,rr,rrr)] = ubrmse_vv + + # pdb.set_trace() + # min(hm, key=hm.get) + # hm[min(hm, key=hm.get)] + + vv = 10 ** (vv/10) + + # pdb.set_trace() + times, vwcs, bs, sms, ps, orbit_mask = do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, b, b_std, omega, rms, orbits,unc=unc) + + fig, ax = plt.subplots(figsize=(17, 13)) + gs = gridspec.GridSpec(4, 1, height_ratios=[5, 5, 5, 5]) + ax = plt.subplot(gs[0]) + + + # sm_insitu = sm_insitu[orbit_mask] + # api_sm = api_sm[orbit_mask] + # vwc = vwc[orbit_mask] + # b = b[orbit_mask] + # b_old = b_old[orbit_mask] + # vv = vv[orbit_mask] + # theta = theta[orbit_mask] + # sm = sm[orbit_mask] + + + + + ax.plot(times,sm_insitu, label='insitu') + + + + + rmse_vv = rmse_prediction(sm_insitu,api_sm) + bias_vv = bias_prediction(sm_insitu,api_sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + ax.plot(times,api_sm, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + rmse_vv = rmse_prediction(sm_insitu,sms) + bias_vv = bias_prediction(sm_insitu,sms) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + ax.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + plt.ylabel('Soil moisture', fontsize=18) + plt.ylim(0,0.6) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax.get_xticklabels(), visible=False) + + ax1 = plt.subplot(gs[1]) + + ax1.plot(times,vwc,label='input vwc') + ax1.plot(times,vwcs,label='model vwc') + plt.ylabel('VWC', fontsize=18) + plt.ylim(0,6) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax1.get_xticklabels(), visible=False) + + ax2 = plt.subplot(gs[2]) + + ax2.plot(times,b,label='input b') + ax2.plot(times,bs,label='model b') + ax2.plot(times,b_old,label='b calibrated') + + plt.ylabel('b', fontsize=18) + plt.ylim(0,1) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax1.get_xticklabels(), visible=False) + + ax3 = plt.subplot(gs[3]) + + sigma_vv, vv_g, vv_c = ssrt_vwc(sms, vwc, rms, omega, bs, theta) + + ax3.plot(times,10*np.log10(vv),label='S1') + ax3.plot(times,10*np.log10(sigma_vv),label='model') + ax3.plot(times,10*np.log10(vv_g),label='ground') + ax3.plot(times,10*np.log10(vv_c),label='canopy') + plt.ylabel('VV [dB]', fontsize=18) + plt.ylim(-30,-8) + plt.grid(linestyle='dotted') + plt.legend() + + plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + plt.subplots_adjust(hspace=.0) + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + plt.title('rms: 0.013, rms model:'+str(ps[0,0])[:6]+' omega: 0.0107, omega model:'+str(ps[0,1])[:6]) + + + + # plt.show() + plt.savefig('/media/tweiss/Work/paper3/plot/'+year[1:]+'/vwc/'+kkkk, bbox_inches = 'tight') + + + # noprior/bmean_s/oh04_unc10_apism025_'+kkkk, bbox_inches = 'tight') + plt.close() +pdb.set_trace() + + diff --git a/kaska/kaska_ssrt_tau_rms.py b/kaska/kaska_ssrt_tau_rms.py new file mode 100644 index 0000000..ef27314 --- /dev/null +++ b/kaska/kaska_ssrt_tau_rms.py @@ -0,0 +1,661 @@ + +import os +import pandas as pd +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +from matplotlib.dates import MonthLocator +# import matplotlib.ticker +import numpy as np +# from sense.canopy import OneLayer +# from sense.soil import Soil +# from sense import model +import scipy.stats +from scipy.optimize import minimize +import pdb +from z_helper import * +# from z_optimization import * +import datetime +from matplotlib import gridspec +import datetime +from matplotlib.lines import Line2D +import copy +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +# from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +# from watercloudmodel import ssrt_jac_ +from watercloudmodel_vwc_rms import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc + + + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + + +def do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, b, b_std, omega, rms, rms_std, orbits, unc): + + ps = [] + vwcs = [] + bs = [] + sms = [] + srms = [] + times = [] + + uorbits = np.unique(orbits) + # uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # pdb.set_trace() + orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + # orbit_mask = (orbits == 95) | (orbits == 117) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, ovwc, otheta, otime = vv[orbit_mask], vh[orbit_mask], vwc[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osb, osb_std = sm[orbit_mask], sm_std[orbit_mask], b[orbit_mask], b_std[orbit_mask] + + + ovwc_std = np.ones_like(osb)*0.05 + + # alpha = _calc_eps(osm) + # alpha = osm + # alpha_std = np.ones_like(alpha)*10 + # alpha_std = osm_std + # pdb.set_trace() + + # prior_mean = np.concatenate([[0, ]*2, osm, ovwc, osb]) + # prior_unc = np.concatenate([[10., ]*2, osm_std, ovwc_std, osb_std]) + + # xvv = np.array([rms, omega]) + + + # x0 = np.concatenate([xvv, osm, ovwc, osb]) + + # bounds = ( + # [[0.013, 0.013]] # s + # + [[0.0107, 0.0107]] # omega + # + [[0.01, 0.7]] * osb.shape[0] # mv + # + [[0, 7.5]] * osb.shape[0] # vwc + # + [[0.01, 0.6]] * osb.shape[0] # b + # ) + + # pdb.set_trace() + prior_mean = np.concatenate([[0, ], [rms], osm, ovwc, osb]) + prior_unc = np.concatenate([[10., ], [rms_std], osm_std, ovwc_std, osb_std]) + + xvv = np.array([omega]) + + + x0 = np.concatenate([xvv, np.array([rms]), osm, ovwc, osb]) + + bounds = ( + [[0.027, 0.027]] # omega + + [[0.005, 0.03]] # s=rms + + [[0.01, 0.7]] * osb.shape[0] # mv + + [[0, 7.5]] * osb.shape[0] # vwc + + [[0.01, 0.6]] * osb.shape[0] # b + ) + + + + data = osb + + gamma = [10, 10] + # pdb.set_trace() + retval = minimize(cost_function_vwc, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc, data), + jac=True, + bounds = bounds, + options={"disp": True}) + + posterious_rms = retval.x[1] + posterious_sm = retval.x[2 : 2+len(osb)] + posterious_vwc = retval.x[2+len(osb) : 2+2*len(osb)] + posterious_b = retval.x[2+2*len(osb) : 2+3*len(osb)] + + srms.append(posterious_rms) + sms.append(posterious_sm) + vwcs.append(posterious_vwc) + bs.append(posterious_b) + times.append(otime) + ps.append(retval.x[:1]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + vwcs = np.hstack(vwcs )[order] + bs = np.hstack(bs )[order] + sms = np.hstack(sms )[order].real + # pdb.set_trace() + # srms = np.hstack(srms)[order] + return times, vwcs, bs, sms, np.array(srms), np.array(ps), orbit_mask + + +def _simple_ew(): + """ + eq. 4.69 + simplistic approach with T=23°C, bulk density = 1.7 g/cm3 + """ + f0 = 18.64 # relaxation frequency [GHz] + f = 5.405 + hlp = f/f0 + e1 = 4.9 + (74.1)/(1.+hlp**2.) + # e2 =(74.1*hlp)/(1.+hlp**2.) + 6.46 * self.sigma/self.f + # return e1 + 1.j * e2 + return e1 + +def _calc_eps(mv): + """ + calculate dielectric permittivity + Eq. 4.66 (Ulaby et al., 2014) + """ + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + alpha = 0.65 + beta1 = 1.27-0.519*sand - 0.152*clay + beta2 = 2.06 - 0.928*sand -0.255*clay + sigma = -1.645 + 1.939*bulk - 2.256*sand + 1.594*clay + + + e1 = (1.+0.66*bulk+mv**beta1*_simple_ew()**alpha - mv)**(1./alpha) + # e2 = np.imag(self.ew)*self.mv**self.beta2 + # return e1 + 1.j*e2 + return e1 + +# def quad_approx_solver(alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, _calc_eps(x), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + +def find_nearest(array, value): + array = np.asarray(array) + idx = (np.abs(array - value)).argmin() + return array[idx], idx + + + + +### Data preparation df_agro!!!! ### +#----------------------------------------------------------------- +# storage information +path = '/media/tweiss/Work/z_final_mni_data_2017' +file_name = 'in_situ_s1_buffer_100' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +path_agro = '/media/tweiss/Work/Paper/in_progress/RT_model_comparison/images' +file_name_agro = 'Daily_Freising' +extension_agro = '.csv' + +field = '508_high' +pol = 'vv' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro, pol) + +aggregation = ['','_buffer_30','_buffer_50','_buffer_100','_Field_buffer_30'] +pre_processing = ['multi', 'norm_multi'] +aggregation = ['_buffer_50','_Field_buffer_30'] +pre_processing = ['multi'] +aggregation = ['_buffer_100'] +# aggregation = ['_Field_buffer_30'] +surface_list = ['Oh92', 'Oh04', 'Dubois95', 'WaterCloud', 'I2EM'] +canopy_list = ['turbid_isotropic', 'water_cloud'] + +surface_list = ['Oh92', 'I2EM'] +canopy_list = ['turbid_isotropic'] + +surface_list = ['Oh04'] +# surface_list = ['Oh92'] +# canopy_list = ['water_cloud'] +field = ['508_high'] +field = ['508_low'] +field = ['508_med'] +field = ['301_high'] +field = ['301_low'] +field = ['301_med'] +field = ['542_high'] +field = ['542_low'] +field = ['542_med'] + +field = ['508_high','508_low','508_med','301_high','301_low','301_med','542_high','542_low','542_med'] + +### option for time invariant or variant calibration of parameter +#------------------------------- +opt_mod = ['time_variant'] +#--------------------------- + +years = ['_2017','_2018'] +numbers = [1,3,5,7,9] +numbers = [1] + +for zzz in numbers: + + for p in pre_processing: + + for pp in aggregation: + + for year in years: + if year == '_2017': + field_list = ['508_high','508_low','508_med','301_high','301_low','301_med','542_high','542_low','542_med'] + elif year == '_2018': + field_list = ['525_high','525_low','525_med','317_high','317_low','317_med'] + else: + pass + + # versions = ['everything','','44_117','95_168','44_168','117_95','44_95','117_168','44_117_95','44_117_168','44_95_168','117_95_168'] + # ver = ['','','44','95','44','117','44','117','44','44','44','117'] + # ver2 = ['','','117','168','168','95','95','168','117','117','95','95'] + # ver3 = ['','','','','','','','','95','168','168','168'] + + # versions = ['','everything'] + # ver = ['',''] + # ver2 = ['',''] + # ver3 = ['',''] + + + versions = ['everything'] + ver = [''] + ver2 = [''] + ver3 = [''] + + # versions = ['44_168'] + # ver = ['44'] + # ver2 = ['168'] + # ver3 = [''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/'+year[1:]+'/z_dense_s1_time_series_n'+str(zzz)+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/'+year[1:]+'/z_dense_s1_time_series_n'+str(zzz)+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper3/'+year[1:]+'/z_dense_s1_time_series_n'+str(zzz)+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all'+pp+'.csv',header=[0,1,2,3,4,5],index_col=0) + + + + + + + # fig, ax = plt.subplots(figsize=(17, 13)) + # gs = gridspec.GridSpec(5, 1, height_ratios=[14, 3, 3, 3, 3]) + # ax = plt.subplot(gs[0]) + + # plt.ylabel('Backscatter [dB]', fontsize=18) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + + # ax.set_ylim([-21.5,-8.5]) + + + # colormaps = ['Greens', 'Purples', 'Blues', 'Oranges', 'Reds', 'Greys', 'pink', 'bone', 'Blues', 'Blues', 'Blues'] + # r = 0 + + # colormap = plt.get_cmap(colormaps[r]) + # colors = [colormap(rr) for rr in np.linspace(0.35, 1., 3)] + + for kkk in opt_mod: + for kkkk in field_list: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + data_field = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + + ### b mean + + data_b = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like='coef') + mean_b = data_b.mean(axis=1) + + + + + + vv = data_field.filter(like='S1_vv').values.flatten() + vv = 10*np.log10(vv) + vh = data_field.filter(like='S1_vh').values.flatten() + vh = 10*np.log10(vh) + + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + time3 = time2.normalize() + theta = data_field.filter(like='theta').values.flatten() + theta = np.rad2deg(theta) + + + + + # lai = data_field.filter(like='LAI_insitu').values.flatten() + # lai = lai + + + + + # s2_data = pd.read_csv('/media/tweiss/Daten/data_AGU/S2_'+kkkk+pp+'.csv',header=[0],index_col=0) + # s2_data.index = pd.to_datetime(s2_data.index).floor('Min').floor('H') + # s2_lai = s2_data.loc[time2]['lai'].values.flatten() + # s2_cab = s2_data.loc[time2]['cab'].values.flatten() + # s2_cbrown = s2_data.loc[time2]['cbrown'].values.flatten() + + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + + + + + + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api'+year+'_radolan.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + print(kkkk+year) + api_field = api_data.filter(like=kkkk) + api_sm = api_field.loc[time2].values.flatten() + + vwc_data = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/vwc_sentinel_2'+pp+year+'_paper3_gao.csv', header=[0,1],index_col=0) + + + vwc_data.index = pd.to_datetime(vwc_data.index) + vwc_data = vwc_data.resample('D').mean().interpolate() + vwc_data = vwc_data.loc[time2.normalize()] + + vwc_field = vwc_data.filter(like=kkkk) + vwc_sentinel_2 = vwc_field.filter(like='m_pos_ag_vwc') + + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + # pdb.set_trace() + + + # sm = smooth(sm,2) + # sm[:] = 0.25 + # sm = data_field.filter(like='SM_insitu').values.flatten() + sm = api_sm + # sm[:] = 0.2 + sm_std = data_field.filter(like='SM_insitu').values.flatten() + # ooo = np.abs(sm[1:]-sm[:-1])*20 + # sm_std[0] = ooo[-1] + # sm_std[1:] = ooo + sm_std[:] = 0.3 + # sm_std[:] = 0.5 + + b = data_field.filter(like='coef').values.flatten() + b_old = data_field.filter(like='coef').values.flatten() + b_std = data_field.filter(like='SM_insitu').values.flatten() + # b = data_field.filter(like='coef').rolling(4).mean().values.flatten() + # b[0] = b_old[0] + # b[1] = b_old[1] + # b[2] = b_old[2] + # b[3] = b_old[3] + # b = mean_b.values.flatten() + + + # # b=b-0.1 + # b_std[:] = 0.4 + # b[:] = 0.4 + # # height = data_field.filter(like='height').values.flatten() + orbits = data_field.filter(like='relativeorbit').values.flatten() + orbits95 = orbits==95 + orbits168 = orbits==168 + orbits44 = orbits==44 + orbits117 = orbits==117 + orbits44_168 = (orbits == 44) | (orbits == 168) + # b[:] = 0.4 + b[orbits95] = 0.4 + b[orbits117] = 0.4 + b[orbits44] = 0.6 + b[orbits168] = 0.6 + + + + # pdb.set_trace() + + omega = 0.027 + unc = 0.7 + + vwc_insitu = data_field.filter(like='VWC').values.flatten() + vwc = vwc_sentinel_2.values.flatten() + vwc[vwc < 0.01] = 0.02 + # vwc = vwc_insitu + # pdb.set_trace() + + orbits95[0:np.argmax(vwc)] = False + orbits117[0:np.argmax(vwc)] = False + orbits44[0:np.argmax(vwc)] = False + orbits168[0:np.argmax(vwc)] = False + + b[orbits95] = 0.1 + b[orbits117] = 0.1 + b[orbits44] = 0.2 + b[orbits168] = 0.2 + + + rms = 0.0115 + rms = 0.02 + + # rms = data_field.filter(like='SM_insitu').values.flatten() + # rms[:] = 0.027 + # rms_std = data_field.filter(like='SM_insitu').values.flatten() + rms_std = 0.01 + + # unc_array = np.arange(0,2,0.1) + # coef_array = np.arange(0,2,0.1) + # sm_array = np.arange(0,2,0.1) + + # hm = {} + # for r in unc_array: + # for rr in coef_array: + # for rrr in sm_array: + # unc = r + # coef_std[:] = rr + # sm_std[:] = rrr + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # hm[(r,rr,rrr)] = ubrmse_vv + + # pdb.set_trace() + # min(hm, key=hm.get) + # hm[min(hm, key=hm.get)] + uncs = np.arange(0.1,2,0.3) + uncs = np.array([0.9,1.9,2.5]) + b_stds = np.arange(0.1,1,0.4) + b_stds = np.array([0.5]) + sm_stds = np.arange(0.1,0.5,0.1) + sm_stds = np.array([0.1,0.2,0.3,0.7]) + # uncs = np.array([1.9]) + vv = 10 ** (vv/10) + # pdb.set_trace() + for unc in uncs: + for t in b_stds: + for tt in sm_stds: + + b_std[:] = t + sm_std[:] = tt + + # pdb.set_trace() + + # pdb.set_trace() + times, vwcs, bs, sms, srms, ps, orbit_mask = do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, b, b_std, omega, rms, rms_std, orbits,unc=unc) + + + uorbits = np.unique(orbits) + rms_2 = np.ones_like(orbits)*rms + srms_2 = np.ones_like(orbits) + for hh, hhh in enumerate(uorbits): + if len(srms) == 1: + srms_2[:] = srms[0] + else: + srms_2[orbits == hhh] = srms[hh] + + # pdb.set_trace() + fig, ax = plt.subplots(figsize=(17, 13)) + gs = gridspec.GridSpec(5, 1, height_ratios=[5, 5, 5, 5, 5]) + ax = plt.subplot(gs[0]) + + + # sm_insitu = sm_insitu[orbit_mask] + # api_sm = api_sm[orbit_mask] + # vwc = vwc[orbit_mask] + # b = b[orbit_mask] + # b_old = b_old[orbit_mask] + # vv = vv[orbit_mask] + # theta = theta[orbit_mask] + # sm = sm[orbit_mask] + + + + + ax.plot(times,sm_insitu, label='insitu') + + + + + rmse_vv = rmse_prediction(sm_insitu,api_sm) + bias_vv = bias_prediction(sm_insitu,api_sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + ax.plot(times,api_sm, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + rmse_vv = rmse_prediction(sm_insitu,sms) + bias_vv = bias_prediction(sm_insitu,sms) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + ax.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + plt.ylabel('Soil moisture', fontsize=18) + plt.ylim(0.05,0.45) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax.get_xticklabels(), visible=False) + + ax1 = plt.subplot(gs[1]) + + ax1.plot(times,vwc_insitu,label='insitu') + ax1.plot(times,vwc,label='input vwc') + ax1.plot(times,vwcs,label='model vwc') + plt.ylabel('VWC', fontsize=18) + plt.ylim(0,6) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax1.get_xticklabels(), visible=False) + + ax2 = plt.subplot(gs[2]) + + ax2.plot(times,b,label='input b') + ax2.plot(times,bs,label='model b') + ax2.plot(times,b_old,label='b calibrated') + + plt.ylabel('b', fontsize=18) + plt.ylim(0,1) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax1.get_xticklabels(), visible=False) + + ax3 = plt.subplot(gs[4]) + + sigma_vv, vv_g, vv_c = ssrt_vwc(sms, vwc, rms, omega, bs, theta) + + ax3.plot(times,10*np.log10(vv),label='S1') + ax3.plot(times,10*np.log10(sigma_vv),label='model') + ax3.plot(times,10*np.log10(vv_g),label='ground') + ax3.plot(times,10*np.log10(vv_c),label='canopy') + plt.ylabel('VV [dB]', fontsize=18) + plt.ylim(-30,-5) + plt.grid(linestyle='dotted') + plt.legend() + # plt.setp(ax1.get_xticklabels(), visible=False) + + ax4 = plt.subplot(gs[3]) + + ax4.plot(times,rms_2,label='input rms') + ax4.plot(times,srms_2,label='model rms') + # ax4.plot(times,b_old,label='b calibrated') + + plt.ylabel('rms'+str(rms), fontsize=18) + plt.ylim(0.005,0.03) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax4.get_xticklabels(), visible=False) + + ax3.set_xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + plt.subplots_adjust(hspace=.0) + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + + + + ax.set_title('omega: 0.027, omega model:'+str(ps[0,0])) + + + # plt.show() + plt.savefig('/media/tweiss/Work/paper3/plot/v4/'+year[1:]+'/'+kkkk+ii+'unc:'+str(unc)+'_sm_std'+str(tt)[:3]+'.png', bbox_inches = 'tight') + # pdb.set_trace() + + + # noprior/bmean_s/oh04_unc10_apism025_'+kkkk, bbox_inches = 'tight') + plt.close() +pdb.set_trace() + + diff --git a/kaska/kaska_ssrt_tau_rms_maize.py b/kaska/kaska_ssrt_tau_rms_maize.py new file mode 100644 index 0000000..e34a6f2 --- /dev/null +++ b/kaska/kaska_ssrt_tau_rms_maize.py @@ -0,0 +1,663 @@ + +import os +import pandas as pd +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +from matplotlib.dates import MonthLocator +# import matplotlib.ticker +import numpy as np +# from sense.canopy import OneLayer +# from sense.soil import Soil +# from sense import model +import scipy.stats +from scipy.optimize import minimize +import pdb +from z_helper import * +# from z_optimization import * +import datetime +from matplotlib import gridspec +import datetime +from matplotlib.lines import Line2D +import copy +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +# from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +# from watercloudmodel import ssrt_jac_ +from watercloudmodel_vwc_rms import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc + + + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + + +def do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, b, b_std, omega, rms, rms_std, orbits, unc): + + ps = [] + vwcs = [] + bs = [] + sms = [] + srms = [] + times = [] + + uorbits = np.unique(orbits) + # uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # pdb.set_trace() + orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + # orbit_mask = (orbits == 95) | (orbits == 117) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, ovwc, otheta, otime = vv[orbit_mask], vh[orbit_mask], vwc[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osb, osb_std = sm[orbit_mask], sm_std[orbit_mask], b[orbit_mask], b_std[orbit_mask] + + + ovwc_std = np.ones_like(osb)*0.05 + + # alpha = _calc_eps(osm) + # alpha = osm + # alpha_std = np.ones_like(alpha)*10 + # alpha_std = osm_std + # pdb.set_trace() + + # prior_mean = np.concatenate([[0, ]*2, osm, ovwc, osb]) + # prior_unc = np.concatenate([[10., ]*2, osm_std, ovwc_std, osb_std]) + + # xvv = np.array([rms, omega]) + + + # x0 = np.concatenate([xvv, osm, ovwc, osb]) + + # bounds = ( + # [[0.013, 0.013]] # s + # + [[0.0107, 0.0107]] # omega + # + [[0.01, 0.7]] * osb.shape[0] # mv + # + [[0, 7.5]] * osb.shape[0] # vwc + # + [[0.01, 0.6]] * osb.shape[0] # b + # ) + + # pdb.set_trace() + prior_mean = np.concatenate([[0, ], [rms], osm, ovwc, osb]) + prior_unc = np.concatenate([[10., ], [rms_std], osm_std, ovwc_std, osb_std]) + + xvv = np.array([omega]) + + + x0 = np.concatenate([xvv, np.array([rms]), osm, ovwc, osb]) + + bounds = ( + [[0.027, 0.027]] # omega + + [[0.005, 0.03]] # s=rms + + [[0.01, 0.7]] * osb.shape[0] # mv + + [[0, 7.5]] * osb.shape[0] # vwc + + [[0.01, 0.6]] * osb.shape[0] # b + ) + + + + data = osb + + gamma = [10, 10] + # pdb.set_trace() + retval = minimize(cost_function_vwc, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc, data), + jac=True, + bounds = bounds, + options={"disp": True}) + + posterious_rms = retval.x[1] + posterious_sm = retval.x[2 : 2+len(osb)] + posterious_vwc = retval.x[2+len(osb) : 2+2*len(osb)] + posterious_b = retval.x[2+2*len(osb) : 2+3*len(osb)] + + srms.append(posterious_rms) + sms.append(posterious_sm) + vwcs.append(posterious_vwc) + bs.append(posterious_b) + times.append(otime) + ps.append(retval.x[:1]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + vwcs = np.hstack(vwcs )[order] + bs = np.hstack(bs )[order] + sms = np.hstack(sms )[order].real + # pdb.set_trace() + # srms = np.hstack(srms)[order] + return times, vwcs, bs, sms, np.array(srms), np.array(ps), orbit_mask + + +def _simple_ew(): + """ + eq. 4.69 + simplistic approach with T=23°C, bulk density = 1.7 g/cm3 + """ + f0 = 18.64 # relaxation frequency [GHz] + f = 5.405 + hlp = f/f0 + e1 = 4.9 + (74.1)/(1.+hlp**2.) + # e2 =(74.1*hlp)/(1.+hlp**2.) + 6.46 * self.sigma/self.f + # return e1 + 1.j * e2 + return e1 + +def _calc_eps(mv): + """ + calculate dielectric permittivity + Eq. 4.66 (Ulaby et al., 2014) + """ + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + alpha = 0.65 + beta1 = 1.27-0.519*sand - 0.152*clay + beta2 = 2.06 - 0.928*sand -0.255*clay + sigma = -1.645 + 1.939*bulk - 2.256*sand + 1.594*clay + + + e1 = (1.+0.66*bulk+mv**beta1*_simple_ew()**alpha - mv)**(1./alpha) + # e2 = np.imag(self.ew)*self.mv**self.beta2 + # return e1 + 1.j*e2 + return e1 + +# def quad_approx_solver(alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, _calc_eps(x), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + +def find_nearest(array, value): + array = np.asarray(array) + idx = (np.abs(array - value)).argmin() + return array[idx], idx + + + + +### Data preparation df_agro!!!! ### +#----------------------------------------------------------------- +# storage information +path = '/media/tweiss/Work/z_final_mni_data_2017' +file_name = 'in_situ_s1_buffer_100' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +path_agro = '/media/tweiss/Work/Paper/in_progress/RT_model_comparison/images' +file_name_agro = 'Daily_Freising' +extension_agro = '.csv' + +field = '508_high' +pol = 'vv' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro, pol) + +aggregation = ['','_buffer_30','_buffer_50','_buffer_100','_Field_buffer_30'] +pre_processing = ['multi', 'norm_multi'] +aggregation = ['_buffer_50','_Field_buffer_30'] +pre_processing = ['multi'] +aggregation = ['_buffer_100'] +# aggregation = ['_Field_buffer_30'] +surface_list = ['Oh92', 'Oh04', 'Dubois95', 'WaterCloud', 'I2EM'] +canopy_list = ['turbid_isotropic', 'water_cloud'] + +surface_list = ['Oh92', 'I2EM'] +canopy_list = ['turbid_isotropic'] + +surface_list = ['Oh04'] +# surface_list = ['Oh92'] +# canopy_list = ['water_cloud'] +field = ['508_high'] +field = ['508_low'] +field = ['508_med'] +field = ['301_high'] +field = ['301_low'] +field = ['301_med'] +field = ['542_high'] +field = ['542_low'] +field = ['542_med'] + +field = ['508_high','508_low','508_med','301_high','301_low','301_med','542_high','542_low','542_med'] + +### option for time invariant or variant calibration of parameter +#------------------------------- +opt_mod = ['time_variant'] +#--------------------------- + +years = ['_2017','_2018'] +years = ['_2017'] +numbers = [1,3,5,7,9] +numbers = [1] + +for zzz in numbers: + + for p in pre_processing: + + for pp in aggregation: + + for year in years: + if year == '_2017': + field_list = ['508_high','508_low','508_med','301_high','301_low','301_med','542_high','542_low','542_med'] + field_list = ['319_high','319_low','319_med','515_high','515_low','515_med'] + elif year == '_2018': + field_list = ['525_high','525_low','525_med','317_high','317_low','317_med'] + else: + pass + + # versions = ['everything','','44_117','95_168','44_168','117_95','44_95','117_168','44_117_95','44_117_168','44_95_168','117_95_168'] + # ver = ['','','44','95','44','117','44','117','44','44','44','117'] + # ver2 = ['','','117','168','168','95','95','168','117','117','95','95'] + # ver3 = ['','','','','','','','','95','168','168','168'] + + # versions = ['','everything'] + # ver = ['',''] + # ver2 = ['',''] + # ver3 = ['',''] + + + versions = ['everything'] + ver = [''] + ver2 = [''] + ver3 = [''] + + # versions = ['44_168'] + # ver = ['44'] + # ver2 = ['168'] + # ver3 = [''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/'+year[1:]+'/z_dense_s1_time_series_n'+str(zzz)+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/'+year[1:]+'/z_dense_s1_time_series_n'+str(zzz)+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper3/'+year[1:]+'/z_dense_s1_time_series_n'+str(zzz)+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all'+pp+'.csv',header=[0,1,2,3,4,5],index_col=0) + + + + + + + # fig, ax = plt.subplots(figsize=(17, 13)) + # gs = gridspec.GridSpec(5, 1, height_ratios=[14, 3, 3, 3, 3]) + # ax = plt.subplot(gs[0]) + + # plt.ylabel('Backscatter [dB]', fontsize=18) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + + # ax.set_ylim([-21.5,-8.5]) + + + # colormaps = ['Greens', 'Purples', 'Blues', 'Oranges', 'Reds', 'Greys', 'pink', 'bone', 'Blues', 'Blues', 'Blues'] + # r = 0 + + # colormap = plt.get_cmap(colormaps[r]) + # colors = [colormap(rr) for rr in np.linspace(0.35, 1., 3)] + + for kkk in opt_mod: + for kkkk in field_list: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + data_field = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + + ### b mean + + data_b = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like='coef') + mean_b = data_b.mean(axis=1) + + + + + + vv = data_field.filter(like='S1_vv').values.flatten() + vv = 10*np.log10(vv) + vh = data_field.filter(like='S1_vh').values.flatten() + vh = 10*np.log10(vh) + + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + time3 = time2.normalize() + theta = data_field.filter(like='theta').values.flatten() + theta = np.rad2deg(theta) + + + + + # lai = data_field.filter(like='LAI_insitu').values.flatten() + # lai = lai + + + + + # s2_data = pd.read_csv('/media/tweiss/Daten/data_AGU/S2_'+kkkk+pp+'.csv',header=[0],index_col=0) + # s2_data.index = pd.to_datetime(s2_data.index).floor('Min').floor('H') + # s2_lai = s2_data.loc[time2]['lai'].values.flatten() + # s2_cab = s2_data.loc[time2]['cab'].values.flatten() + # s2_cbrown = s2_data.loc[time2]['cbrown'].values.flatten() + + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + + + + + + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api'+year+'_radolan.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + print(kkkk+year) + api_field = api_data.filter(like=kkkk) + api_sm = api_field.loc[time2].values.flatten() + + vwc_data = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/vwc_sentinel_2'+pp+year+'_paper3_gao.csv', header=[0,1],index_col=0) + + + vwc_data.index = pd.to_datetime(vwc_data.index) + vwc_data = vwc_data.resample('D').mean().interpolate() + vwc_data = vwc_data.loc[time2.normalize()] + + vwc_field = vwc_data.filter(like=kkkk) + vwc_sentinel_2 = vwc_field.filter(like='m_pos_ag_vwc') + + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + # pdb.set_trace() + + + # sm = smooth(sm,2) + # sm[:] = 0.25 + # sm = data_field.filter(like='SM_insitu').values.flatten() + sm = api_sm + # sm[:] = 0.2 + sm_std = data_field.filter(like='SM_insitu').values.flatten() + # ooo = np.abs(sm[1:]-sm[:-1])*20 + # sm_std[0] = ooo[-1] + # sm_std[1:] = ooo + sm_std[:] = 0.3 + # sm_std[:] = 0.5 + + b = data_field.filter(like='coef').values.flatten() + b_old = data_field.filter(like='coef').values.flatten() + b_std = data_field.filter(like='SM_insitu').values.flatten() + # b = data_field.filter(like='coef').rolling(4).mean().values.flatten() + # b[0] = b_old[0] + # b[1] = b_old[1] + # b[2] = b_old[2] + # b[3] = b_old[3] + # b = mean_b.values.flatten() + + + # # b=b-0.1 + # b_std[:] = 0.4 + # b[:] = 0.4 + # # height = data_field.filter(like='height').values.flatten() + orbits = data_field.filter(like='relativeorbit').values.flatten() + orbits95 = orbits==95 + orbits168 = orbits==168 + orbits44 = orbits==44 + orbits117 = orbits==117 + orbits44_168 = (orbits == 44) | (orbits == 168) + # b[:] = 0.4 + b[orbits95] = 0.4 + b[orbits117] = 0.4 + b[orbits44] = 0.6 + b[orbits168] = 0.6 + + + + # pdb.set_trace() + + omega = 0.027 + unc = 0.7 + + vwc_insitu = data_field.filter(like='VWC').values.flatten() + vwc = vwc_sentinel_2.values.flatten() + vwc[vwc < 0.01] = 0.02 + # vwc = vwc_insitu + # pdb.set_trace() + + orbits95[0:np.argmax(vwc)] = False + orbits117[0:np.argmax(vwc)] = False + orbits44[0:np.argmax(vwc)] = False + orbits168[0:np.argmax(vwc)] = False + + b[orbits95] = 0.1 + b[orbits117] = 0.1 + b[orbits44] = 0.2 + b[orbits168] = 0.2 + + + rms = 0.0115 + rms = 0.02 + + # rms = data_field.filter(like='SM_insitu').values.flatten() + # rms[:] = 0.027 + # rms_std = data_field.filter(like='SM_insitu').values.flatten() + rms_std = 0.01 + + # unc_array = np.arange(0,2,0.1) + # coef_array = np.arange(0,2,0.1) + # sm_array = np.arange(0,2,0.1) + + # hm = {} + # for r in unc_array: + # for rr in coef_array: + # for rrr in sm_array: + # unc = r + # coef_std[:] = rr + # sm_std[:] = rrr + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # hm[(r,rr,rrr)] = ubrmse_vv + + # pdb.set_trace() + # min(hm, key=hm.get) + # hm[min(hm, key=hm.get)] + uncs = np.arange(0.1,2,0.3) + uncs = np.array([0.9,1.9,2.5]) + b_stds = np.arange(0.1,1,0.4) + b_stds = np.array([0.5]) + sm_stds = np.arange(0.1,0.5,0.1) + sm_stds = np.array([0.1,0.2,0.3,0.7]) + # uncs = np.array([1.9]) + vv = 10 ** (vv/10) + # pdb.set_trace() + for unc in uncs: + for t in b_stds: + for tt in sm_stds: + + b_std[:] = t + sm_std[:] = tt + + # pdb.set_trace() + + pdb.set_trace() + times, vwcs, bs, sms, srms, ps, orbit_mask = do_one_pixel_field(data_field, vv, vh, vwc, theta, time, sm, sm_std, b, b_std, omega, rms, rms_std, orbits,unc=unc) + + + uorbits = np.unique(orbits) + rms_2 = np.ones_like(orbits)*rms + srms_2 = np.ones_like(orbits) + for hh, hhh in enumerate(uorbits): + if len(srms) == 1: + srms_2[:] = srms[0] + else: + srms_2[orbits == hhh] = srms[hh] + + # pdb.set_trace() + fig, ax = plt.subplots(figsize=(17, 13)) + gs = gridspec.GridSpec(5, 1, height_ratios=[5, 5, 5, 5, 5]) + ax = plt.subplot(gs[0]) + + + # sm_insitu = sm_insitu[orbit_mask] + # api_sm = api_sm[orbit_mask] + # vwc = vwc[orbit_mask] + # b = b[orbit_mask] + # b_old = b_old[orbit_mask] + # vv = vv[orbit_mask] + # theta = theta[orbit_mask] + # sm = sm[orbit_mask] + + + + + ax.plot(times,sm_insitu, label='insitu') + + + + + rmse_vv = rmse_prediction(sm_insitu,api_sm) + bias_vv = bias_prediction(sm_insitu,api_sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + ax.plot(times,api_sm, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + rmse_vv = rmse_prediction(sm_insitu,sms) + bias_vv = bias_prediction(sm_insitu,sms) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + ax.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + plt.ylabel('Soil moisture', fontsize=18) + plt.ylim(0.05,0.45) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax.get_xticklabels(), visible=False) + + ax1 = plt.subplot(gs[1]) + + ax1.plot(times,vwc_insitu,label='insitu') + ax1.plot(times,vwc,label='input vwc') + ax1.plot(times,vwcs,label='model vwc') + plt.ylabel('VWC', fontsize=18) + plt.ylim(0,6) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax1.get_xticklabels(), visible=False) + + ax2 = plt.subplot(gs[2]) + + ax2.plot(times,b,label='input b') + ax2.plot(times,bs,label='model b') + ax2.plot(times,b_old,label='b calibrated') + + plt.ylabel('b', fontsize=18) + plt.ylim(0,1) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax1.get_xticklabels(), visible=False) + + ax3 = plt.subplot(gs[4]) + + sigma_vv, vv_g, vv_c = ssrt_vwc(sms, vwc, rms, omega, bs, theta) + + ax3.plot(times,10*np.log10(vv),label='S1') + ax3.plot(times,10*np.log10(sigma_vv),label='model') + ax3.plot(times,10*np.log10(vv_g),label='ground') + ax3.plot(times,10*np.log10(vv_c),label='canopy') + plt.ylabel('VV [dB]', fontsize=18) + plt.ylim(-30,-5) + plt.grid(linestyle='dotted') + plt.legend() + # plt.setp(ax1.get_xticklabels(), visible=False) + + ax4 = plt.subplot(gs[3]) + + ax4.plot(times,rms_2,label='input rms') + ax4.plot(times,srms_2,label='model rms') + # ax4.plot(times,b_old,label='b calibrated') + + plt.ylabel('rms'+str(rms), fontsize=18) + plt.ylim(0.005,0.03) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax4.get_xticklabels(), visible=False) + + ax3.set_xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + plt.subplots_adjust(hspace=.0) + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + + + + ax.set_title('omega: 0.027, omega model:'+str(ps[0,0])) + + + # plt.show() + plt.savefig('/media/tweiss/Work/paper3/plot/maize/'+year[1:]+'/'+kkkk+ii+'unc:'+str(unc)+'_sm_std'+str(tt)[:3]+'.png', bbox_inches = 'tight') + # pdb.set_trace() + + + # noprior/bmean_s/oh04_unc10_apism025_'+kkkk, bbox_inches = 'tight') + plt.close() +pdb.set_trace() + + diff --git a/kaska/paper3_mask.py b/kaska/paper3_mask.py new file mode 100644 index 0000000..213286f --- /dev/null +++ b/kaska/paper3_mask.py @@ -0,0 +1,62 @@ + +import subprocess + +# Rasterize Stadtgüter Fruchtfelder 2017 +# 1 = Mais +# 2 = Winterweizen +# 3 = Triticale +# 4 = Rest +subprocess.call('gdal_rasterize -at -of GTiff -a field -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/Paper3_down/GIS/sgm/sgm2017.shp'+' /media/tweiss/Work/Paper3_down/GIS/sgm2017_frucht.tif', shell=True) + +# Rasterize Corine Land Cover 5 ha version 2018 +# 211 = nicht bewässertes Ackerland +# 231 = Wiesen und Weiden +# 0 = Rest +subprocess.call('gdal_rasterize -at -of GTiff -a clc18 -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/Paper3_down/GIS/clc5_2018.utm32s.shape/clc5/clc5_class2xx.shp'+' /media/tweiss/Work/Paper3_down/GIS/clc_class2.tif', shell=True) + +# Rasterize 2017 fields ESU_Field_buffer_30.shp +# Field 301 = 87 (triti) +# Field 319 = 67 (maize) +# Field 542 = 8 (triti) +# Field 508 = 27 (wheat) +# Field 515 = 4 (maize) +subprocess.call('gdal_rasterize -at -of GTiff -a ID -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/z_final_mni_data_2017/ESU_Field_buffer_30.shp'+' /media/tweiss/Work/Paper3_down/GIS/2017_ESU_Field_buffer_30.tif', shell=True) + +# Rasterize 2018 fields ESU_2018_Field_buffer_30.shp +# Field 317 = 65 (triti) +# Field 410 = 113 (maize) +# Field 525 = 30 (wheat) +# Field 508 = 27 (maize) +subprocess.call('gdal_rasterize -at -of GTiff -a ID -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/z_final_mni_data_2017/ESU_2018_Field_buffer_30.shp'+' /media/tweiss/Work/Paper3_down/GIS/2018_ESU_Field_buffer_30.tif', shell=True) + + +# Rasterize Stadtgüter Fruchtfelder 2017 +#if(Frucht_lan='Wintertriticale',1,if(Frucht_lan='Winterweizen',2,if(Frucht_lan='Wintergerste',3,if(Frucht_lan='Wiesengras',4,if(Frucht_lan='Kleegras',5,if(Frucht_lan='Ackergras',6,if(Frucht_lan='Weidegras',7,if(Frucht_lan='Mais',8,if(frucht_lan='Sommerhafer',9,if(Frucht_lan='Luzerne',10,if(Frucht_lan='Feldgemuese',11,if(Frucht_lan='Ackerbohnen',12,13)))))))))))) +subprocess.call('gdal_rasterize -at -of GTiff -a field_id -te 694748 5345900 703600 5354600 -tr 20 20 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/Paper3_down/GIS/2017_field_line.shp'+' /media/tweiss/Work/Paper3_down/GIS/sgm2017_line.tif', shell=True) + +subprocess.call('gdalwarp -of GTiff -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/Paper3_down/GIS/sgm2017_line.tif'+' /media/tweiss/Work/Paper3_down/GIS/sgm2017_line2.tif', shell=True) + +# Rasterize Field boundaries line +subprocess.call('gdal_rasterize -at -of GTiff -a ID -te 694748 5345900 703600 5354600 -tr 20 20 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/Paper3_down/GIS/2017_line.shp'+' /media/tweiss/Work/Paper3_down/GIS/2017_line.tif', shell=True) + +subprocess.call('gdalwarp -of GTiff -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/Paper3_down/GIS/2017_line.tif'+' /media/tweiss/Work/Paper3_down/GIS/2017_line2.tif', shell=True) + +subprocess.call('gdal_rasterize -at -of GTiff -a ID -te 694748 5345900 703600 5354600 -tr 20 20 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/Paper3_down/GIS/2018_line.shp'+' /media/tweiss/Work/Paper3_down/GIS/2018_line.tif', shell=True) + +subprocess.call('gdalwarp -of GTiff -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/Paper3_down/GIS/2018_line.tif'+' /media/tweiss/Work/Paper3_down/GIS/2018_line2.tif', shell=True) + + +# Rasterize ESU buffer 100 m 2017 +subprocess.call('gdal_rasterize -at -of GTiff -a FID_ -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/z_final_mni_data_2017/ESU_buffer_100.shp'+' /media/tweiss/Work/Paper3_down/GIS/2017_ESU_buffer_100.tif', shell=True) + +subprocess.call('gdal_rasterize -at -of GTiff -a ID -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/z_final_mni_data_2017/ESU_2018_buffer_100.shp'+' /media/tweiss/Work/Paper3_down/GIS/2018_ESU_buffer_100.tif', shell=True) + +# buffer 30 +subprocess.call('gdal_rasterize -at -of GTiff -a FID_ -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/z_final_mni_data_2017/ESU_buffer_30.shp'+' /media/tweiss/Work/Paper3_down/GIS/2017_ESU_buffer_30.tif', shell=True) + +# esu 2017 +subprocess.call('gdal_rasterize -at -of GTiff -a FID_ -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/z_final_mni_data_2017/ESU.shp'+' /media/tweiss/Work/Paper3_down/GIS/2017_ESU.tif', shell=True) + +# agvolution +subprocess.call('gdal_rasterize -at -of GTiff -a fid -te 758967 5937147 768056 5945451 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/data/Arbeit_einordnen/agvolution/sensor_data_agvolution/farm_one/test.shp'+' /media/AUF/userdata/agvolution/fields_1.tif', shell=True) + diff --git a/kaska/paper3_plot_esu.py b/kaska/paper3_plot_esu.py new file mode 100644 index 0000000..7af1d9d --- /dev/null +++ b/kaska/paper3_plot_esu.py @@ -0,0 +1,697 @@ +import numpy as np +import pdb +from osgeo import gdal +import matplotlib.pyplot as plt +from z_helper import * +import datetime +import seaborn as sns +from matplotlib.colors import ListedColormap +from watercloudmodel_vwc_rms import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc +from matplotlib import gridspec +from pandas.plotting import register_matplotlib_converters +import matplotlib.dates as mdates + +class plot_esu(object): + """Plotting scatterplots""" + + def __init__(self, years, esus, passes, esu_size_tiff): + self.esus = esus + self.years = years + self.passes = passes + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/esu'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/esu') + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot') + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/spatial'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/spatial') + + self.plot(years, esus, passes, esu_size_tiff) + + def extraction(self,var,state_mask,mask_time): + + xxx = np.copy(var) + xxx[:,~state_mask]=np.nan + xxx = xxx[mask_time,:] + mean = np.nanmean(xxx,axis=(1,2)) + return mean + + def extraction2(self,var,state_mask,mask_time): + + xxx = np.copy(var) + xxx[:,~state_mask]=np.nan + xxx = xxx[mask_time,:] + return xxx + + def plot(self, years, esus, passes, esu_size_tiff): + """ + years = ['2017', '2018'] + + esus = ['high', 'med', 'low'] + + esu_size_tiff = '_ESU_buffer_100.tif' + """ + + fig, ax = plt.subplots(figsize=(20, 15)) + + insitu_all_years = [] + mean_all_years = [] + mean_all_bias_years = [] + for year in years: + var_sm = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'sm'+'.npy') + var_sm_api = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'input_sm_api'+'.npy') + var_vwc_input = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'input_vwc'+'.npy') + var_vwc_output = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'vwc'+'.npy') + var_b = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'b'+'.npy') + var_rms = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'rms'+'.npy') + var_vv_input = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'input_vv'+'.npy') + var_theta_input = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'input_theta'+'.npy') + + time = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_times.npy',allow_pickle=True) + + file = '/media/tweiss/Work/z_final_mni_data_2017/new_in_situ_s1multi_buffer_100_'+year+'_paper3.csv' + + data = pd.read_csv(file,header=[0,1],index_col=1) + + if year == '2017': + fields = ['301','319','542','508','515'] + # fields = ['301','319','508'] + if year == '2018': + fields = ['317','410','525','508'] + + yy = 0.1 + insitu_all = [] + mean_all = [] + mean_all_bias = [] + rf = [] + bf = [] + uf = [] + + meteo = pd.read_csv('/media/tweiss/Work/Paper3_down/GIS/Eichenried_0101'+year+'_3112'+year+'_hourly.csv', sep=';', decimal=',') + if year == '2017': + meteo2 = meteo.stack().str.replace(',','.').unstack() + meteo2['SUM']= pd.to_numeric(meteo2['SUM_NN050'],errors='coerce') + meteo2['date'] = pd.to_datetime(meteo2['Tag']+' '+meteo2['Stunde']) + s = meteo2.resample('d', on='date')['SUM'].sum() + + elif year == '2018': + meteo['date'] = pd.to_datetime(meteo['Tag']+' '+meteo['Stunde']) + s = meteo.resample('d', on='date')['SUM_NN050'].sum() + else: + s = None + + fig, ax = plt.subplots(figsize=(20, 15)) + + self.plot_spatial(var_sm,year,passes,time,s=s,par='sm') + # self.plot_spatial2(var_sm,year,passes,time,s=s,par='sm') + self.plot_spatial(var_sm_api,year,passes,time,s=s,par='sm_api') + + self.boxplot_sm_area(var_sm,year,time,passes,meteo=s) + + for field in fields: + + insitu_field = [] + mean_field = [] + for esu in esus: + + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/'+year+esu_size_tiff) + state_mask = g.ReadAsArray().astype(np.int) + + state_mask = self.state_mask(year,field,esu,state_mask) + data_field = data.filter(like=field).filter(like=esu) + if year == '2018': + if (field == '410') or (field == '508'): + data_field = data_field.filter([(field+'_'+esu,'SM')]).dropna() + else: + data_field = data_field.filter([(field+'_'+esu,'SM'),(field+'_'+esu,'VWC')]).dropna() + data_field.index = pd.to_datetime(data_field.index) + # data_field = data_field.dropna() + date = data_field.index + + time2 = pd.to_datetime(time) + time2 = time2.strftime('%Y-%m-%d') + date2 = date.strftime('%Y-%m-%d') + mask_time = np.isin(time2,date2) + times = pd.to_datetime(date2) + + sm = self.extraction(var_sm,state_mask,mask_time) + sm_api = self.extraction(var_sm_api,state_mask,mask_time) + vwc_input = self.extraction(var_vwc_input,state_mask,mask_time) + vwc_output = self.extraction(var_vwc_output,state_mask,mask_time) + b = self.extraction(var_b,state_mask,mask_time) + rms = self.extraction(var_rms,state_mask,mask_time) + vv = self.extraction(var_vv_input,state_mask,mask_time) + theta = self.extraction(var_theta_input,state_mask,mask_time) + + sm_insitu = data_field.filter(like='SM').values.flatten() + if year == '2018': + if (field == '410') or (field == '508'): + pass + else: + vwc_insitu = data_field.filter(like='VWC').values.flatten() + + + fig, ax = plt.subplots(figsize=(17, 13)) + gs = gridspec.GridSpec(5, 1, height_ratios=[5, 5, 5, 5, 5]) + ax = plt.subplot(gs[0]) + + ax.plot(times,sm_insitu, label='insitu') + + rmse_vv = rmse_prediction(sm_insitu,sm_api) + bias_vv = bias_prediction(sm_insitu,sm_api) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + + slope, intercept, r_value, p_value, std_err= linregress(sm_insitu,sm) + ax.plot(times,sm_api, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + ax.plot(times,sm, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6] + ' R2:'+str(r_value)[0:4]) + plt.ylabel('Soil moisture', fontsize=18) + plt.ylim(0.05,0.45) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax.get_xticklabels(), visible=False) + + ax1 = plt.subplot(gs[1]) + + if year == '2018': + if (field == '410') or (field == '508'): + pass + else: + ax1.plot(times,vwc_insitu,label='insitu') + ax1.plot(times,vwc_input,label='input vwc') + ax1.plot(times,vwc_output,label='model vwc') + plt.ylabel('VWC', fontsize=18) + plt.ylim(0,6) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax1.get_xticklabels(), visible=False) + + ax2 = plt.subplot(gs[2]) + + ax2.plot(times,b,label='model b') + + plt.ylabel('b', fontsize=18) + plt.ylim(0,1) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax1.get_xticklabels(), visible=False) + + ax3 = plt.subplot(gs[4]) + omega = 0.027 + sigma_vv, vv_g, vv_c = ssrt_vwc(sm, vwc_output, rms, omega, b, theta) + + ax3.plot(times,10*np.log10(vv),label='S1') + ax3.plot(times,10*np.log10(sigma_vv),label='model') + ax3.plot(times,10*np.log10(vv_g),label='ground') + ax3.plot(times,10*np.log10(vv_c),label='canopy') + plt.ylabel('VV [dB]', fontsize=18) + plt.ylim(-30,-5) + plt.grid(linestyle='dotted') + plt.legend() + # plt.setp(ax1.get_xticklabels(), visible=False) + + ax4 = plt.subplot(gs[3]) + + ax4.plot(times,rms,label='model rms') + # ax4.plot(times,b_old,label='b calibrated') + + plt.ylabel('rms', fontsize=18) + plt.ylim(0.005,0.03) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax4.get_xticklabels(), visible=False) + + ax3.set_xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + plt.subplots_adjust(hspace=.0) + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + + slope, intercept, r_value, p_value, std_err= linregress(sm_insitu,sm) + + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/esu/'+year+'_'+field+'_'+esu+'.png', bbox_inches = 'tight') + + plt.close() + + + vwc_insitu = data_field.filter(like='VWC').values.flatten() + + if year == '2018': + if (field == '410') or (field == '508'): + pass + else: + self.plot_vwc_b(times, vwc_output, vwc_insitu, b, year, field, esu, passes) + else: + self.plot_vwc_b(times, vwc_output, vwc_insitu, b, year, field, esu, passes) + + box_sm = self.extraction2(var_sm,state_mask,mask_time) + box_api = self.extraction2(var_sm_api,state_mask,mask_time) + + self.boxplot3(box_sm,'sm',field,esu,year,times,passes,box_api,sm_insitu,s) + + + def plot_vwc_b(self, times, vwc_output,vwc_insitu, b, year, field, esu, passes): + + fig, ax = plt.subplots(figsize=(20, 7)) + gs = gridspec.GridSpec(2, 1, height_ratios=[5, 5]) + ax = plt.subplot(gs[0]) + + mask_x = np.isnan(vwc_insitu) + + ax.plot(times[~mask_x],vwc_output[~mask_x],label='VWC - Sentinel-2') + ax.plot(times[~mask_x],vwc_insitu[~mask_x],label='VWC - in-situ measurements') + plt.ylabel('VWC [kg/m²]', fontsize=18) + plt.ylim(0,7) + plt.grid(linestyle='dotted') + plt.legend() + plt.subplots_adjust(hspace=.0) + plt.setp(ax.get_xticklabels(), visible=False) + + ax1 = plt.subplot(gs[1]) + + ax1.plot(times[~mask_x],b[~mask_x], label="b'", color='green') + + plt.ylabel("b'", fontsize=18) + plt.ylim(0,0.95) + plt.grid(linestyle='dotted') + plt.legend() + + slope, intercept, r_value, p_value, std_err = linregress(vwc_output[~mask_x],vwc_insitu[~mask_x]) + + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/esu/vwcb_'+year+'_'+field+'_'+esu+str(r_value)[0:4]+'.png', bbox_inches = 'tight') + + plt.close() + + + + def state_mask(self,year,field,esu,state_mask): + + if year == '2017': + if field == '515' and esu == 'high': + mask_value = 1 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'med': + mask_value = 2 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'low': + mask_value = 3 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'high': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'med': + mask_value = 5 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'low': + mask_value = 6 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'high': + mask_value = 7 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'med': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'low': + mask_value = 9 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'high': + mask_value = 10 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'med': + mask_value = 11 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'low': + mask_value = 12 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'high': + mask_value = 13 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'med': + mask_value = 14 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'low': + mask_value = 15 + state_mask = state_mask==mask_value + else: + state_mask = 0 + elif year == '2018': + if field == '317' and esu == 'high': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '317' and esu == 'med': + mask_value = 6 + state_mask = state_mask==mask_value + elif field == '317' and esu == 'low': + mask_value = 5 + state_mask = state_mask==mask_value + elif field == '410' and esu == 'high': + mask_value = 7 + state_mask = state_mask==mask_value + elif field == '410' and esu == 'med': + mask_value = 9 + state_mask = state_mask==mask_value + elif field == '410' and esu == 'low': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'high': + mask_value = 10 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'med': + mask_value = 12 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'low': + mask_value = 11 + state_mask = state_mask==mask_value + elif field == '525' and esu == 'high': + mask_value = 13 + state_mask = state_mask==mask_value + elif field == '525' and esu == 'med': + mask_value = 15 + state_mask = state_mask==mask_value + elif field == '525' and esu == 'low': + mask_value = 14 + state_mask = state_mask==mask_value + else: + state_mask = 0 + else: + state_mask = 0 + + return state_mask + + + def boxplot3(self,var_multi,par,field,esu,year,time,passes,sm_api,sm_insitu,meteo=None): + f, ax = plt.subplots(figsize=(20, 15)) + + xx = var_multi.reshape(var_multi.shape[0], (var_multi.shape[1]*var_multi.shape[2])) + sns.boxplot(np.repeat(np.arange(len(xx)), len(xx[0])), xx.flatten(), color='skyblue') + + sm_api2 = np.nanmean(sm_api,axis=(1,2)) + ax.plot(sm_api2,'r-o',linewidth=4, label='SM Api') + ax.plot(sm_insitu,'b-o',linewidth=4, label = 'SM insitu') + ind = list(range(1,len(time)+1)) + time2 = [i.strftime('%d-%m') for i in time] + plt.xticks(ind,time2, rotation=45) + ax.set_ylabel('SM') + plt.legend() + if (year == '2017') or (year == '2018'): + ax2 = ax.twinx() + mask_time2 = np.isin(meteo.index,time) + + ax2.bar(np.arange(len(meteo[mask_time2])),meteo[mask_time2]) + ax2.set_ylim(0,150) + ax2.set_xticks([]) + ax2.set_ylabel('Precipitation') + plt.xticks(ind,time2, rotation=45) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot/'+par+str(field)+'_'+esu+'_'+str(year), bbox_inches='tight') + plt.close() + + + + def plot_spatial(self, sm, year, passes, id, s=None, par='xx'): + + id2 = pd.to_datetime(id) + id2 = id2.strftime('%Y-%m-%d') + id2 = pd.to_datetime(id2) + + f, ax = plt.subplots(figsize=(20, 15)) + time2 = [i.strftime('%d-%m') for i in id2] + + + sm[sm == 0] = 'nan' + + sm_mean = np.mean(sm,axis=(1,2)) + xxx = np.arange(len(sm_mean)) + sm_std = np.std(sm,axis=(1,2)) + ax.errorbar(xxx,sm_mean,sm_std,fmt='-o') + ax.set_xticks([]) + ax2 = ax.twinx() + mask_time2 = np.isin(s.index,id2) + + ax2.bar(np.arange(len(s[mask_time2])),s[mask_time2]) + ax2.set_ylim(0,150) + ax2.set_xticks([]) + ax2.set_ylabel('Precipitation') + ind = list(range(1,len(time2)+1)) + plt.xticks(ind,time2, rotation=45) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/spatial/'+par+'_'+year, bbox_inches='tight') + plt.close() + + for iii in np.arange(len(sm)): + + f, ax = plt.subplots(figsize=(20, 15)) + cmap = plt.cm.viridis_r + # label = 'Soil Moisture [m$^3$/m$^3$]' + label = 'Bodenfeuchte [m$^3$/m$^3$]' + cmap.set_bad(color='white') + plt.rcParams['axes.labelsize'] = 20 + + im1 = ax.imshow(sm[iii,:,:], vmin=0.1, vmax=0.4, cmap=cmap, aspect='auto') + # ax.set_title('sm_'+year, fontsize=20) + f.subplots_adjust(right=0.85) + cbar_ax = f.add_axes([0.88, 0.15, 0.04, 0.7]) + ticklabs = cbar_ax.get_yticklabels() + cbar_ax.set_yticklabels(ticklabs, fontsize=20) + f.colorbar(im1, cax=cbar_ax, label=label) + + ax.xaxis.set_tick_params(labelsize=20) + ax.yaxis.set_tick_params(labelsize=20) + + plt.setp(ax, xticks=[22*6.1, 2*22*6.1, 3*22*6.1, 4*22*6.1, 5*22*6.1, 6*22*6.1], xticklabels=['11.64°E', '11.66°E', '11.68°E', '11.70°E', '11.72°E', '11.74°E'], yticks=[22*6.12, (22+39)*6.12, (22+39*2)*6.12, (22+39*3)*6.12], yticklabels=['48.30°N', '48.28°N', '48.26°N', '48.24°N']) + ax.set_ylim(len(sm[0]),0) + mean = np.nanmean(sm[iii,:,:]) + + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/spatial/'+par+'_'+year+'_'+str(id[iii])[:10]+'_'+str(mean)[0:5]+'.png', bbox_inches='tight') + + # plt.close() + + def plot_spatial2(self, sm, year, passes, id, s=None, par='xx'): + + id2 = pd.to_datetime(id) + id2 = id2.strftime('%Y-%m-%d') + id2 = pd.to_datetime(id2) + + # f, ax = plt.subplots(figsize=(20, 15)) + # time2 = [i.strftime('%d-%m') for i in id2] + + + sm[sm == 0] = 'nan' + + # sm_mean = np.mean(sm,axis=(1,2)) + # xxx = np.arange(len(sm_mean)) + # sm_std = np.std(sm,axis=(1,2)) + # ax.errorbar(xxx,sm_mean,sm_std,fmt='-o') + # ax.set_xticks([]) + # ax2 = ax.twinx() + # mask_time2 = np.isin(s.index,id2) + + # ax2.bar(np.arange(len(s[mask_time2])),s[mask_time2]) + # ax2.set_ylim(0,150) + # ax2.set_xticks([]) + # ax2.set_ylabel('Precipitation') + # ind = list(range(1,len(time2)+1)) + # plt.xticks(ind,time2, rotation=45) + # plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/spatial/'+par+'_'+year, bbox_inches='tight') + # plt.close() + + for iii in np.arange(len(sm)): + + f, ax = plt.subplots(figsize=(20, 15)) + cmap = plt.cm.viridis_r + # label = 'Soil Moisture [m$^3$/m$^3$]' + cmap.set_bad(color='white') + # plt.rcParams['axes.labelsize'] = 20 + + im1 = ax.imshow(sm[iii,:,:], vmin=0.1, vmax=0.4, cmap=cmap, aspect='auto') + plt.axis('off') + + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/spatial2/'+par+'_'+year+'_'+str(id[iii])[:10]+'_'+'.png', bbox_inches='tight') + + # plt.close() + + + def boxplot_sm_area(self,var_multi,year,time,passes,meteo=None): + + f, ax = plt.subplots(figsize=(20, 15)) + + if year == '2017': + time_final = pd.date_range(start='2017-03-21',end='2017-09-30') + elif year == '2018': + time_final = pd.date_range(start='2018-03-21',end='2018-09-30') + else: + pass + + new_array = np.zeros((len(time_final),len(var_multi[0,:,0])*len(var_multi[0,0,:]))) + new_array[:] = np.nan + + time3 = pd.to_datetime(time).strftime('%Y-%m-%d') + time3 = pd.to_datetime(time3) + mask_time_final = np.isin(time_final,time3) + + xx = var_multi.reshape(var_multi.shape[0], (var_multi.shape[1]*var_multi.shape[2])) + new_array[mask_time_final] = xx + + new_array2 = new_array.flatten() + id = np.repeat(np.arange(0,len(new_array)),len(new_array[0])) + df = pd.DataFrame({'id':id,'value':new_array2},columns=['id','value']) + df2 = df.head(new_array[0].shape[0]*25) + + sns.boxplot(data=df,x='id',y='value', color='skyblue', showfliers = False) + # sns.violinplot(data=df, y='value', x='id', color='skyblue') + + ind = list(range(1,len(time)+1)) + time2 = [i.strftime('%d-%m') for i in time] + # plt.xticks(ind,time2, rotation=45) + ax.set_ylabel('SM') + ax.set_ylim(0.1,0.45) + freq = int(10) + ax.set_xticklabels(time_final[::freq].strftime('%d-%m')) + xtix = ax.get_xticks() + ax.set_xticks(xtix[::freq]) + # f.autofmt_xdate() + # plt.legend() + + + + if (year == '2017') or (year == '2018'): + ax2 = ax.twinx() + mask_time2 = np.isin(meteo.index,time_final) + + ax2.bar(np.arange(len(meteo[mask_time2])),meteo[mask_time2]) + ax2.set_ylim(0,150) + ax2.set_xticks([]) + ax2.set_ylabel('Precipitation [mm]') + + ax.set_xticks(xtix[::freq]) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot/'+'sm_area_'+str(year), bbox_inches='tight') + plt.close() + + if year == '2017': + # time_invest = pd.date_range(start="2017-05-29",end="2017-06-06") + time_invest = pd.date_range(start="2017-03-28",end="2017-07-31") + time_mask_invest = np.isin(time_final,time_invest) + data_invest = new_array[time_mask_invest] + + id_invest = np.repeat(np.arange(0,len(data_invest)),len(data_invest[0])) + df_invest = pd.DataFrame({'id':id_invest,'value':data_invest.flatten()},columns=['id','value']) + + + f, ax = plt.subplots(figsize=(30, 10)) + + gs = gridspec.GridSpec(2, 1, height_ratios=[5,5]) + ax = plt.subplot(gs[0]) + plt.tick_params(labelsize=16) + + sns.boxplot(data=df_invest,x='id',y='value', color='skyblue', showfliers = False) + ax.set_ylabel('SM',fontsize=16) + ax.set_ylim(0.1,0.4) + ax.set_xlabel('') + ax.set_xticklabels(time_invest.strftime('%Y-%m-%d')) + xtix = ax.get_xticks() + ax.set_xticks(xtix) + plt.setp(ax.get_xticklabels(), visible=False) + plt.subplots_adjust(hspace=.0) + ax1 = plt.subplot(gs[1]) + plt.tick_params(labelsize=16) + plt.rcParams.update({'font.size': 16}) + meteo2 = self.import_meteo() + + + mask_time3 = np.isin(meteo2.index,time_invest) + hm = meteo2[mask_time3] + # hm = meteo2.iloc[145:160] + ax0 = hm.plot.bar(ax=ax1,rot=0, fontsize=16) + xxx = [pd_datetime.strftime("%Y-%m-%d") for pd_datetime in hm.index] + # ax0.set_xticklabels([pd_datetime.strftime("%Y-%m-%d") for pd_datetime in hm.index]) + ax0.set_xticklabels(xxx) + ax0.set_ylabel('Precipitation [mm]',fontsize=16) + ax0.set_xlabel('Date') + plt.xticks([4, 34, 65, 95]) + ax0.set_xticklabels([xxx[4],xxx[34],xxx[65],xxx[95]]) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot/'+'precip_2017', bbox_inches='tight') + plt.close() + + if year == '2018': + # time_invest = pd.date_range(start="2018-07-16",end="2018-07-26") + time_invest = pd.date_range(start="2018-03-28",end="2018-07-31") + time_mask_invest = np.isin(time_final,time_invest) + data_invest = new_array[time_mask_invest] + + id_invest = np.repeat(np.arange(0,len(data_invest)),len(data_invest[0])) + df_invest = pd.DataFrame({'id':id_invest,'value':data_invest.flatten()},columns=['id','value']) + + + f, ax = plt.subplots(figsize=(30, 10)) + + gs = gridspec.GridSpec(2, 1, height_ratios=[5,5]) + ax = plt.subplot(gs[0]) + plt.tick_params(labelsize=16) + + sns.boxplot(data=df_invest,x='id',y='value', color='skyblue', showfliers = False) + ax.set_ylabel('SM',fontsize=16) + ax.set_ylim(0.1,0.4) + ax.set_xlabel('') + ax.set_xticklabels(time_invest.strftime('%Y-%m-%d')) + xtix = ax.get_xticks() + ax.set_xticks(xtix) + plt.setp(ax.get_xticklabels(), visible=False) + plt.subplots_adjust(hspace=.0) + ax1 = plt.subplot(gs[1]) + plt.tick_params(labelsize=16) + plt.rcParams.update({'font.size': 16}) + meteo2 = self.import_meteo() + + + mask_time3 = np.isin(meteo2.index,time_invest) + hm = meteo2[mask_time3] + # hm = meteo2.iloc[145:160] + ax0 = hm.plot.bar(ax=ax1,rot=0, fontsize=16) + xxx = [pd_datetime.strftime("%Y-%m-%d") for pd_datetime in hm.index] + # ax0.set_xticklabels([pd_datetime.strftime("%Y-%m-%d") for pd_datetime in hm.index]) + ax0.set_ylabel('Precipitation [mm]',fontsize=16) + ax0.set_xlabel('Date') + plt.xticks([4, 34, 65, 95]) + ax0.set_xticklabels([xxx[4],xxx[34],xxx[65],xxx[95]]) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot/'+'precip_2018', bbox_inches='tight') + plt.close() + + + def import_meteo(self): + eichenried = pd.read_csv('/media/tweiss/Work/Paper3_down/Tag_Eichenried.csv', sep=';', decimal=',') + # freising = pd.read_csv('/media/tweiss/Work/Paper3_down/Tag_Freising.csv', sep=';', decimal=',') + grub = pd.read_csv('/media/tweiss/Work/Paper3_down/Tag_Grub.csv', sep=';', decimal=',') + + meteo = pd.merge(eichenried,grub,on='Tag',how='inner') + + meteo.columns = ['date','Eichenried','Grub'] + meteo['date'] = pd.to_datetime(meteo['date'],format='%d.%m.%Y') + meteo.index=meteo['date'] + meteo = meteo.drop(columns=['date']) + + return meteo + + + +if __name__ == '__main__': + + years = ['2017','2018'] + years = ['2017'] + versions = ['_multi', '_single'] + versions = ['_multi'] + + esus = ['high', 'med', 'low'] + + esu_size_tiff = '_ESU_buffer_100.tif' # buffer around ESU 100, 50, 30 etc + passes = 'hm' + + + plot_esu(years, esus, passes, esu_size_tiff) + diff --git a/kaska/paper3_plot_old.py b/kaska/paper3_plot_old.py new file mode 100644 index 0000000..6d1a5d2 --- /dev/null +++ b/kaska/paper3_plot_old.py @@ -0,0 +1,347 @@ +import os +import osr +from osgeo import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel_vwc_rms import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +import matplotlib.pyplot as plt +from netCDF4 import date2num +import matplotlib +import subprocess + +def reproject_data2(source_img, + target_img=None, + dstSRS=None, + srcSRS=None, + srcNodata=np.nan, + dstNodata=np.nan, + outputType=None, + output_format="MEM", + verbose=False, + xmin=None, + xmax=None, + ymin=None, + ymax=None, + xRes=None, + yRes=None, + xSize=None, + ySize=None, + resample=0, + ): + + """ + A method that uses a source and a target images to + reproject & clip the source image to match the extent, + projection and resolution of the target image. + + """ + + outputType = ( + gdal.GDT_Unknown if outputType is None else outputType + ) + if srcNodata is None: + try: + srcNodata = " ".join( + [ + i.split("=")[1] + for i in gdal.Info(source_img).split("\n") + if " NoData" in i + ] + ) + except RuntimeError: + srcNodata = None + # If the output type is intenger and destination nodata is nan + # set it to 0 to avoid warnings + if outputType <= 5 and np.isnan(dstNodata): + dstNodata = 0 + + if srcSRS is not None: + _srcSRS = osr.SpatialReference() + try: + _srcSRS.ImportFromEPSG(int(srcSRS.split(":")[1])) + except: + _srcSRS.ImportFromWkt(srcSRS) + else: + _srcSRS = None + + + if (target_img is None) & (dstSRS is None): + raise IOError( + "Projection should be specified ether from " + + "a file or a projection code." + ) + elif target_img is not None: + try: + g = gdal.Open(target_img) + except RuntimeError: + g = target_img + geo_t = g.GetGeoTransform() + x_size, y_size = g.RasterXSize, g.RasterYSize + + if xRes is None: + xRes = abs(geo_t[1]) + if yRes is None: + yRes = abs(geo_t[5]) + + if xSize is not None: + x_size = 1.0 * xSize * xRes / abs(geo_t[1]) + if ySize is not None: + y_size = 1.0 * ySize * yRes / abs(geo_t[5]) + + xmin, xmax = ( + min(geo_t[0], geo_t[0] + x_size * geo_t[1]), + max(geo_t[0], geo_t[0] + x_size * geo_t[1]), + ) + ymin, ymax = ( + min(geo_t[3], geo_t[3] + y_size * geo_t[5]), + max(geo_t[3], geo_t[3] + y_size * geo_t[5]), + ) + dstSRS = osr.SpatialReference() + raster_wkt = g.GetProjection() + dstSRS.ImportFromWkt(raster_wkt) + gg = gdal.Warp( + "", + source_img, + format=output_format, + outputBounds=[xmin, ymin, xmax, ymax], + dstNodata=dstNodata, + warpOptions=["NUM_THREADS=ALL_CPUS"], + xRes=xRes, + yRes=yRes, + dstSRS=dstSRS, + outputType=outputType, + srcNodata=srcNodata, + resampleAlg=resample, + srcSRS=_srcSRS + ) + + else: + gg = gdal.Warp( + "", + source_img, + format=output_format, + outputBounds=[xmin, ymin, xmax, ymax], + xRes=xRes, + yRes=yRes, + dstSRS=dstSRS, + warpOptions=["NUM_THREADS=ALL_CPUS"], + copyMetadata=True, + outputType=outputType, + dstNodata=dstNodata, + srcNodata=srcNodata, + resampleAlg=resample, + srcSRS=_srcSRS + ) + if verbose: + LOG.debug("There are %d bands in this file, use " + + "g.GetRasterBand() to avoid reading the whole file." + % gg.RasterCount + ) + return gg + + + +def plot(input,min,max,name,path,times,mask=None): + + for i in range(len(input)): + fig, ax = plt.subplots(figsize=(20, 15)) + input = np.ma.masked_where(input == 0.,input) + current_cmap = matplotlib.cm.get_cmap() + current_cmap.set_bad(color='white') + + try: + hm = input[i] + hm[mask] = np.nan + quadmesh = ax.imshow(hm) + plt.colorbar(quadmesh) + quadmesh.set_clim(vmin=min, vmax=max) + plt.savefig(os.path.join(path,name+'_mask',name+'_'+times[i].strftime("%Y%m%d")), bbox_inches = 'tight') + except TypeError: + quadmesh = ax.imshow(input[i]) + plt.colorbar(quadmesh) + quadmesh.set_clim(vmin=min, vmax=max) + plt.savefig(os.path.join(path,name,name+'_'+times[i].strftime("%Y%m%d")), bbox_inches = 'tight') + plt.close() + +def scatterplot(input1,input2,fields,esus): + for field in fields: + aaa = input1.filter(like=field) + bbb = input2.filter(like=field) + + fig, ax = plt.subplots(figsize=(20, 15)) + colors=['blue','green','red'] + for u, esu in enumerate(esus): + ccc = aaa.filter(like=esu).values.flatten() + ddd = bbb.filter(like=esu).values.flatten() + ax.plot(ccc,ddd,marker='o',color=colors[u], linestyle='') + ax.set_xlim(0.05,0.4) + ax.set_ylim(0.05,0.4) + x = [0, 1] + y = [0, 1] + ax.plot(x,y) + plt.ylabel('SM model') + plt.xlabel('SM insitu') + plt.title(field) + plt.savefig('/media/tweiss/Work/Paper3_down/2017/plot/scatterplot/scatterplot_'+field,bbox_inches='tight') + plt.close() + +def scatterplot_bias(input1,input2,fields,esus): + for field in fields: + aaa = input1.filter(like=field) + bbb = input2.filter(like=field) + + fig, ax = plt.subplots(figsize=(20, 15)) + colors=['blue','green','red'] + for u, esu in enumerate(esus): + ccc = aaa.filter(like=esu).values.flatten() + ddd = bbb.filter(like=esu).values.flatten() + bias = np.nanmean(ccc - ddd) + ax.plot(ccc,ddd+bias,marker='o',color=colors[u], linestyle='') + + ax.set_xlim(0.05,0.4) + ax.set_ylim(0.05,0.4) + x = [0, 1] + y = [0, 1] + ax.plot(x,y) + plt.ylabel('SM model') + plt.xlabel('SM insitu') + plt.title(field+' bias corrected') + plt.savefig('/media/tweiss/Work/Paper3_down/2017/plot/scatterplot/scatterplot_bias_'+field,bbox_inches='tight') + plt.close() + +# subprocess.call('gdal_rasterize -at -of GTiff -a field -te 694748 5345900 703600 5354600 -tr 10 10 -ot Byte -co \"COMPRESS=DEFLATE\" '+'/media/tweiss/Work/Paper3_down/GIS/sgm2017.shp'+' /media/tweiss/Work/Paper3_down/GIS/sgm2017_frucht.tif', shell=True) + +# state_mask = '/media/tweiss/Work/Paper3_down/clc5_class2xx_2018.tif' + +# mask_frucht = reproject_data('/media/tweiss/Work/Paper3_down/GIS/sgm2017_frucht.tif', output_format="MEM", target_img=state_mask) +# mask_frucht = mask_frucht.ReadAsArray().astype(np.int) + + +# mask = (mask_frucht != 2) & (mask_frucht != 3) + +path = '/media/tweiss/Work/Paper3_down/2017/plot' +times = np.load('/media/tweiss/Work/Paper3_down/2017/times.npy',allow_pickle=True) + +# plot(np.load('/media/tweiss/Work/Paper3_down/2017/input_sm_api.npy'),0.1,0.4,'input_sm',path,times,mask) +# plot(10*np.log10(np.load('/media/tweiss/Work/Paper3_down/2017/input_vv.npy')),-25,-5,'input_vv',path,times,mask) +# plot(np.load('/media/tweiss/Work/Paper3_down/2017/input_vwc.npy'),0.0,5,'input_vwc',path,times,mask) + +# plot(np.load('/media/tweiss/Work/Paper3_down/2017/output_sm.npy'),0.1,0.4,'output_sm',path,times,mask) +# plot(np.load('/media/tweiss/Work/Paper3_down/2017/output_rms.npy'),0.005,0.03,'output_rms',path,times,mask) +# plot(np.load('/media/tweiss/Work/Paper3_down/2017/output_vwc.npy'),0.0,5,'output_vwc',path,times,mask) +# plot(np.load('/media/tweiss/Work/Paper3_down/2017/output_b.npy'),0.0,0.6,'output_b',path,times,mask) + + +mask_default = '/media/tweiss/Work/Paper3_down/clc5_class2xx_2018.tif' + +pixel = ['_buffer_50'] +pixel = ['_buffer_100'] + +processed_sentinel = ['multi'] + +fields = ['301', '508', '542', '319', '515'] +fields = ['508','301','542'] +# ESU names +esus = ['high', 'low', 'med'] + +df_model = pd.DataFrame() +df_insitu = pd.DataFrame() + + +for processed_sentinel_data in processed_sentinel: + + for pixels in pixel: + print(pixels) + path_ESU = '/media/tweiss/Work/z_final_mni_data_2017/' + name_shp = 'ESU'+pixels+'.shp' + name_ESU = 'ESU'+pixels+'.tif' + mask_esu = reproject_data2(path_ESU+name_ESU, output_format="MEM", target_img=mask_default) + + + + for field in fields: + for esu in esus: + state_mask = mask_esu.ReadAsArray().astype(np.float) + + if field == '515' and esu == 'high': + mask_value = 1 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'med': + mask_value = 2 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'low': + mask_value = 3 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'high': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'med': + mask_value = 5 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'low': + mask_value = 6 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'high': + mask_value = 7 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'med': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'low': + mask_value = 9 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'high': + mask_value = 10 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'med': + mask_value = 11 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'low': + mask_value = 12 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'high': + mask_value = 13 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'med': + mask_value = 14 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'low': + mask_value = 15 + state_mask = state_mask==mask_value + else: + state_mask = 0 + + + + sm = np.load('/media/tweiss/Work/Paper3_down/2017/output_sm.npy') + sm = sm[:,state_mask] + + # sm = np.mean(sm,axis=1) + sm = sm[:,0] + + file = '/media/tweiss/Work/z_final_mni_data_2017/new_in_situ_s1multi_buffer_100_2017_paper3.csv' + data = pd.read_csv(file,header=[0,1],index_col=1) + data_field = data.filter(like=field).filter(like=esu).filter(like='SM') + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + sm_insitu= data_field[times[0]:times[-1]].values.flatten() + + df_insitu[field+'_'+esu+'_sminsitu'] = sm_insitu + df_model[field+'_'+esu+'_smmodel'] = sm[1:-1] + + +scatterplot(df_insitu,df_model,fields,esus) +scatterplot_bias(df_insitu,df_model,fields,esus) + + diff --git a/kaska/paper3_plot_scatter.py b/kaska/paper3_plot_scatter.py new file mode 100644 index 0000000..86d4df2 --- /dev/null +++ b/kaska/paper3_plot_scatter.py @@ -0,0 +1,749 @@ +import numpy as np +import pdb +from osgeo import gdal +import matplotlib.pyplot as plt +from z_helper import * +import datetime +import seaborn as sns +from matplotlib.colors import ListedColormap +import skill_metrics as sm +from matplotlib.lines import Line2D + +class plot_scatter(object): + """Plotting scatterplots""" + + def __init__(self, years, esus, passes, esu_size_tiff): + self.esus = esus + self.years = years + self.passes = passes + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/taylor'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/taylor') + + self.plot(years, esus, passes, esu_size_tiff) + self.plot(years, esus, passes, esu_size_tiff, 'wheat') + self.plot(years, esus, passes, esu_size_tiff, 'maize') + + if '2017' in years: + self.plot(['2017'], esus, passes, esu_size_tiff, '301') + self.plot(['2017'], esus, passes, esu_size_tiff, '542') + self.plot(['2017'], esus, passes, esu_size_tiff, '508') + self.plot(['2017'], esus, passes, esu_size_tiff, '319') + self.plot(['2017'], esus, passes, esu_size_tiff, '515') + if '2018' in years: + self.plot(['2018'], esus, passes, esu_size_tiff, '508') + self.plot(['2018'], esus, passes, esu_size_tiff, '317') + self.plot(['2018'], esus, passes, esu_size_tiff, '410') + self.plot(['2018'], esus, passes, esu_size_tiff, '525') + self.plot2(years, esus, passes, esu_size_tiff) + + def plot(self, years, esus, passes, esu_size_tiff,crop=None): + """ + years = ['2017', '2018'] + + esus = ['high', 'med', 'low'] + + esu_size_tiff = '_ESU_buffer_100.tif' + """ + + fig, ax = plt.subplots(figsize=(20, 15)) + + insitu_all_years = [] + mean_all_years = [] + mean_all_bias_years = [] + + ccoef_field = [] + crmsd_field = [] + sdev_field = [] + labels_field = [] + pppp = 0 + + for year in years: + var_multi = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'sm'+'.npy') + + time = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_times.npy',allow_pickle=True) + + file = '/media/tweiss/Work/z_final_mni_data_2017/new_in_situ_s1multi_buffer_100_'+year+'_paper3.csv' + + data = pd.read_csv(file,header=[0,1],index_col=1) + + if year == '2017': + if crop == 'wheat': + fields = ['301','542','508'] + elif crop == 'maize': + fields = ['319','515'] + elif crop == '301': + fields = ['301'] + elif crop == '542': + fields = ['542'] + elif crop == '508': + fields = ['508'] + elif crop == '319': + fields = ['319'] + elif crop == '515': + fields = ['515'] + else: + fields = ['301','319','542','508','515'] + crop='' + if year == '2018': + if crop == 'wheat': + fields = ['317','525'] + elif crop == 'maize': + fields = ['410','508'] + elif crop == '508': + fields = ['508'] + elif crop == '317': + fields = ['317'] + elif crop == '410': + fields = ['410'] + elif crop == '525': + fields = ['525'] + else: + fields = ['317','410','525','508'] + crop='' + + yy = 0.1 + insitu_all = [] + mean_all = [] + mean_all_bias = [] + bias_collection = [] + rf = [] + bf = [] + uf = [] + ccoef = [] + crmsd = [] + sdev = [] + labels = [] + + ppp = 0 + + + fig, ax = plt.subplots(figsize=(20, 15)) + for field in fields: + + insitu_field = [] + mean_field = [] + for esu in esus: + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/'+year+esu_size_tiff) + state_mask = g.ReadAsArray().astype(np.int) + + state_mask = self.state_mask(year,field,esu,state_mask) + + data_field = data.filter(like=field).filter(like=esu).filter(like='SM') + data_field.index = pd.to_datetime(data_field.index) + data_field = data_field.dropna() + date = data_field.index + xxx = np.copy(var_multi) + xxx[:,~state_mask]=np.nan + time2 = pd.to_datetime(time) + time2 = time2.strftime('%Y-%m-%d') + date2 = date.strftime('%Y-%m-%d') + mask_time = np.isin(time2,date2) + yyy = xxx[mask_time,:] + mean_rt = np.nanmean(yyy,axis=(1,2)) + + # sm_insitu = np.repeat(data_field.values.flatten(),len(yyy[0][~np.isnan(yyy[0])])) + # yyy = yyy[~np.isnan(yyy)] + + if field == '301': + color = 'green' + elif field == '319': + color = 'red' + elif field == '508': + color = 'blue' + elif field == '515': + color = 'orange' + elif field == '542': + color = 'black' + elif field == '317': + color = 'grey' + elif field == '410': + color = 'brown' + elif field == '525': + color = 'yellow' + else: + pass + + insitu = data_field.values.flatten() + bias = np.nanmean(insitu - mean_rt) + + ax.plot(insitu,mean_rt+bias,marker='o',color=color, linestyle='') + + insitu_all = np.append(insitu_all,insitu) + mean_all = np.append(mean_all,mean_rt) + insitu_field = np.append(insitu_field,insitu) + mean_field = np.append(mean_field,mean_rt) + mean_all_bias = np.append(mean_all_bias,mean_rt+bias) + bias_collection = np.append(bias_collection,bias) + insitu_all_years = np.append(insitu_all_years,insitu) + mean_all_years = np.append(mean_all_years,mean_rt) + mean_all_bias_years = np.append(mean_all_bias_years,mean_rt+bias) + + + stats = sm.taylor_statistics(mean_rt+bias,insitu,'data') + + if ppp == 0: + ccoef = np.append(ccoef,stats['ccoef'][0]) + crmsd = np.append(crmsd,stats['crmsd'][0]) + sdev = np.append(sdev,stats['sdev'][0]) + labels = np.append(labels,'initial') + + ccoef = np.append(ccoef,stats['ccoef'][1]) + crmsd = np.append(crmsd,stats['crmsd'][1]) + sdev = np.append(sdev,stats['sdev'][1]) + labels = np.append(labels,field+esu) + + ppp = ppp+1 + + rmse_field = rmse_prediction(insitu_field,mean_field) + bias_field = bias_prediction(insitu_field,mean_field) + ubrmse_field = ubrmse_prediction(rmse_field,bias_field) + + stats = sm.taylor_statistics(mean_field+bias_field,insitu_field,'data') + r_value = stats['ccoef'][0] + + if pppp == 0: + ccoef_field = np.append(ccoef_field,stats['ccoef'][0]) + crmsd_field = np.append(crmsd_field,stats['crmsd'][0]) + sdev_field = np.append(sdev_field,stats['sdev'][0]) + labels_field = np.append(labels_field,'initial') + + pppp = pppp+1 + + + ccoef_field = np.append(ccoef_field,stats['ccoef'][1]) + crmsd_field = np.append(crmsd_field,stats['crmsd'][1]) + sdev_field = np.append(sdev_field,stats['sdev'][1]) + labels_field = np.append(labels_field,year + '-' + field) + + + yy = yy + 0.02 + + plt.text(0.4,yy,field+' rmse:'+str(rmse_field)[0:5]+' ubrmse:'+str(ubrmse_field)[0:5], color=color) + + + + rf.append(rmse_field) + bf.append(bias_field) + uf.append(ubrmse_field) + + + ax.set_xlim(0,0.5) + ax.set_ylim(0,0.5) + + + x = [0, 1] + y = [0, 1] + ax.plot(x,y) + plt.ylabel('SM model', fontsize=20) + plt.xlabel('SM insitu', fontsize=20) + + ax.xaxis.set_tick_params(labelsize=20) + ax.yaxis.set_tick_params(labelsize=20) + + rmse = rmse_prediction(insitu_all,mean_all) + bias = bias_prediction(insitu_all,mean_all) + ubrmse = ubrmse_prediction(rmse,bias) + + yy = yy +0.02 + + plt.text(0.4,yy,'rmse all:'+str(rmse_field)[0:5]+' ubrmse all:'+str(ubrmse_field)[0:5], color='black') + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/scatterplot'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/scatterplot') + + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/scatterplot/'+year+crop,bbox_inches='tight') + plt.close() + + self.plot_scat(insitu_all, mean_all_bias, year, passes, mean_all, bias_collection,crop) + + # self.plot_taylor(ccoef, crmsd, sdev, labels, passes, year) + # pdb.set_trace() + # if crop == '': + # self.plot_taylor(ccoef_field, crmsd_field, sdev_field, labels_field, passes, year) + + # self.plot_scat(insitu_all_years, mean_all_bias_years, '2017-2018', passes, mean_all, bin_a=40,bin_b=30) + + def plot2(self, years, esus, passes, esu_size_tiff): + """ + years = ['2017', '2018'] + + esus = ['high', 'med', 'low'] + + esu_size_tiff = '_ESU_buffer_100.tif' + """ + crop='' + fig, ax = plt.subplots(figsize=(20, 15)) + + insitu_all_years = [] + mean_all_years = [] + mean_all_bias_years = [] + + ccoef_field = [] + crmsd_field = [] + sdev_field = [] + labels_field = [] + pppp = 0 + + for year in years: + var_multi = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'sm'+'.npy') + + time = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_times.npy',allow_pickle=True) + + file = '/media/tweiss/Work/z_final_mni_data_2017/new_in_situ_s1multi_buffer_100_'+year+'_paper3.csv' + + data = pd.read_csv(file,header=[0,1],index_col=1) + + if year == '2017': + fields = ['301','319','542','508','515'] + # fields = ['301','319','508'] + if year == '2018': + fields = ['317','410','525','508'] + + yy = 0.1 + insitu_all = [] + mean_all = [] + mean_all_bias = [] + rf = [] + bf = [] + uf = [] + ccoef = [] + crmsd = [] + sdev = [] + labels = [] + + ppp = 0 + + + fig, ax = plt.subplots(figsize=(20, 15)) + for field in fields: + + insitu_field = [] + mean_field = [] + + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/'+year+'_ESU_Field_buffer_30.tif') + state_mask = g.ReadAsArray().astype(np.int) + + state_mask = self.state_mask2(year,field,state_mask) + + data_field = data.filter(like=field).filter(like='SM') + data_field.index = pd.to_datetime(data_field.index) + data_field = data_field.dropna() + date = data_field.index + xxx = np.copy(var_multi) + xxx[:,~state_mask]=np.nan + time2 = pd.to_datetime(time) + time2 = time2.strftime('%Y-%m-%d') + date2 = date.strftime('%Y-%m-%d') + mask_time = np.isin(time2,date2) + yyy = xxx[mask_time,:] + mean_rt = np.nanmean(yyy,axis=(1,2)) + + # sm_insitu = np.repeat(data_field.values.flatten(),len(yyy[0][~np.isnan(yyy[0])])) + # yyy = yyy[~np.isnan(yyy)] + + if field == '301': + color = 'green' + elif field == '319': + color = 'red' + elif field == '508': + color = 'blue' + elif field == '515': + color = 'orange' + elif field == '542': + color = 'black' + elif field == '317': + color = 'grey' + elif field == '410': + color = 'brown' + elif field == '525': + color = 'yellow' + else: + pass + + + + if year == '2017': + bbch = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/bbch_2017.csv',header=[0,1]) + elif year == '2018': + bbch = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/bbch_2018.csv',header=[0,1]) + else: + pass + + bbch_value = 37 + print(year) + print(field) + bbch.index = pd.to_datetime(bbch['None','None']) + bbch_field = bbch.filter(like=field) + lower37 = bbch_field.loc[bbch_field[field,'BBCH median']>bbch_value] + + + + pos37 = data_field.index.get_loc(lower37.index[-1],method='nearest') + + if field == '515': + data_field = data_field.drop(data_field.columns[0],1) + + mean_rt = mean_rt[0:pos37] + insitu = data_field.mean(axis=1).values.flatten()[0:pos37] + # insitu = data_field.mean(axis=1).values.flatten() + + + bias = np.nanmean(insitu - mean_rt) + + ax.plot(insitu,mean_rt+bias,marker='o',color=color, linestyle='') + + insitu_all = np.append(insitu_all,insitu) + mean_all = np.append(mean_all,mean_rt) + insitu_field = np.append(insitu_field,insitu) + mean_field = np.append(mean_field,mean_rt) + mean_all_bias = np.append(mean_all_bias,mean_rt+bias) + insitu_all_years = np.append(insitu_all_years,insitu) + mean_all_years = np.append(mean_all_years,mean_rt) + mean_all_bias_years = np.append(mean_all_bias_years,mean_rt+bias) + + + stats = sm.taylor_statistics(mean_rt+bias,insitu,'data') + + rmse_field = rmse_prediction(insitu,mean_rt) + bias_field = bias_prediction(insitu,mean_rt) + ubrmse_field = ubrmse_prediction(rmse_field,bias_field) + + stats = sm.taylor_statistics(mean_rt+bias_field,insitu,'data') + + + if pppp == 0: + ccoef_field = np.append(ccoef_field,stats['ccoef'][0]) + crmsd_field = np.append(crmsd_field,stats['crmsd'][0]) + sdev_field = np.append(sdev_field,stats['sdev'][0]) + labels_field = np.append(labels_field,'initial') + + pppp = pppp+1 + + + ccoef_field = np.append(ccoef_field,stats['ccoef'][1]) + crmsd_field = np.append(crmsd_field,stats['crmsd'][1]) + sdev_field = np.append(sdev_field,stats['sdev'][1]) + labels_field = np.append(labels_field,year + '-' + field) + + + yy = yy + 0.02 + + plt.text(0.4,yy,field+' rmse:'+str(rmse_field)[0:4]+' ubrmse:'+str(ubrmse_field)[0:4], color=color) + + + rf.append(rmse_field) + bf.append(bias_field) + uf.append(ubrmse_field) + + + ax.set_xlim(0,0.5) + ax.set_ylim(0,0.5) + + + x = [0, 1] + y = [0, 1] + ax.plot(x,y) + plt.ylabel('SM model', fontsize=20) + plt.xlabel('SM insitu', fontsize=20) + + ax.xaxis.set_tick_params(labelsize=20) + ax.yaxis.set_tick_params(labelsize=20) + + rmse = rmse_prediction(insitu_all,mean_all) + bias = bias_prediction(insitu_all,mean_all) + ubrmse = ubrmse_prediction(rmse,bias) + + yy = yy +0.02 + + plt.text(0.4,yy,'rmse all:'+str(rmse_field)[0:5]+' ubrmse all:'+str(ubrmse_field)[0:5], color='black') + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/scatterplot'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/scatterplot') + + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/scatterplot/'+year+'_v2',bbox_inches='tight') + plt.close() + + # self.plot_taylor(ccoef, crmsd, sdev, labels, passes, year) + # self.plot_taylor(ccoef_field, crmsd_field, sdev_field, labels_field, passes, year, name_ex='_v4_lower'+str(bbch_value)) + # self.plot_taylor(ccoef_field, crmsd_field, sdev_field, labels_field, passes, year, name_ex='_v4_higher'+str(bbch_value)) + # pdb.set_trace() + # self.plot_taylor(ccoef_field, crmsd_field, sdev_field, labels_field, passes, year, name_ex='_v4') + + + def state_mask(self,year,field,esu,state_mask): + + if year == '2017': + if field == '515' and esu == 'high': + mask_value = 1 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'med': + mask_value = 2 + state_mask = state_mask==mask_value + elif field == '515' and esu == 'low': + mask_value = 3 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'high': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'med': + mask_value = 5 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'low': + mask_value = 6 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'high': + mask_value = 7 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'med': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '542' and esu == 'low': + mask_value = 9 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'high': + mask_value = 10 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'med': + mask_value = 11 + state_mask = state_mask==mask_value + elif field == '319' and esu == 'low': + mask_value = 12 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'high': + mask_value = 13 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'med': + mask_value = 14 + state_mask = state_mask==mask_value + elif field == '301' and esu == 'low': + mask_value = 15 + state_mask = state_mask==mask_value + else: + state_mask = 0 + elif year == '2018': + if field == '317' and esu == 'high': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '317' and esu == 'med': + mask_value = 6 + state_mask = state_mask==mask_value + elif field == '317' and esu == 'low': + mask_value = 5 + state_mask = state_mask==mask_value + elif field == '410' and esu == 'high': + mask_value = 7 + state_mask = state_mask==mask_value + elif field == '410' and esu == 'med': + mask_value = 9 + state_mask = state_mask==mask_value + elif field == '410' and esu == 'low': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'high': + mask_value = 10 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'med': + mask_value = 12 + state_mask = state_mask==mask_value + elif field == '508' and esu == 'low': + mask_value = 11 + state_mask = state_mask==mask_value + elif field == '525' and esu == 'high': + mask_value = 13 + state_mask = state_mask==mask_value + elif field == '525' and esu == 'med': + mask_value = 15 + state_mask = state_mask==mask_value + elif field == '525' and esu == 'low': + mask_value = 14 + state_mask = state_mask==mask_value + else: + state_mask = 0 + else: + state_mask = 0 + + return state_mask + + def state_mask2(self,year,field,state_mask): + + if year == '2017': + if field == '515': + mask_value = 4 + state_mask = state_mask==mask_value + elif field == '508': + mask_value = 27 + state_mask = state_mask==mask_value + elif field == '542': + mask_value = 8 + state_mask = state_mask==mask_value + elif field == '319': + mask_value = 67 + state_mask = state_mask==mask_value + elif field == '301': + mask_value = 87 + state_mask = state_mask==mask_value + else: + state_mask = 0 + elif year == '2018': + if field == '317': + mask_value = 65 + state_mask = state_mask==mask_value + elif field == '410': + mask_value = 113 + state_mask = state_mask==mask_value + elif field == '508': + mask_value = 27 + state_mask = state_mask==mask_value + elif field == '525': + mask_value = 30 + state_mask = state_mask==mask_value + else: + state_mask = 0 + else: + state_mask = 0 + + return state_mask + + + def plot_scat(self,a,b,year,passes,c,d,crop,bin_a=40,bin_b=25): + """ """ + + if year == '2018': + bin_a=42 + bin_b=32 + fig, ax = plt.subplots(figsize=(20, 15)) + hhh = ax.hist2d(a, b, bins=(bin_a, bin_b), cmap=plt.cm.jet, cmin=1, vmax=8) + ax.set_xlim(0.0,0.5) + ax.set_ylim(0.0,0.5) + x = [0, 1] + y = [0, 1] + ax.plot(x,y) + plt.ylabel('SM model [m³/m³]',fontsize=20) + plt.xlabel('SM insitu [m³/m³]',fontsize=20) + ax.xaxis.set_tick_params(labelsize=20) + ax.yaxis.set_tick_params(labelsize=20) + # plt.title('bias corrected') + plt.rcParams.update({'font.size': 20}) + plt.colorbar(hhh[3],ax=ax).set_label(label='Density Distribution',size=20) + + rmse_field = rmse_prediction(a,c) + # bias_field = bias_prediction(a,c) + ubrmse_field = rmse_prediction(a,b) + slope, intercept, r_value, p_value, std_err = linregress (a,c) + + plt.text(0.02,0.48,'RMSE: '+str(rmse_field)[0:5]+' m³/m³', fontsize=20) + plt.text(0.02,0.46,'ubRMSE: '+str(ubrmse_field)[0:5]+' m³/m³', fontsize=20) + plt.text(0.02,0.44,'Min bias: '+str(np.min(d))[0:5]+' m³/m³'+'; Max bias: '+str(np.max(d))[0:4]+' m³/m³', fontsize=20) + plt.text(0.02,0.42,'Min model: '+str(min(b))[0:4]+' m³/m³'+'; Max model: '+str(max(b))[0:4]+' m³/m³', fontsize=20) + plt.text(0.02,0.4,'Min insitu: '+str(min(a))[0:4]+' m³/m³'+'; Max insitu: '+str(max(a))[0:4]+' m³/m³', fontsize=20) + plt.text(0.02,0.38,'R²: '+str(r_value)[0:5], fontsize=20) + + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/scatterplot/scatterplot_bias_'+year+crop,bbox_inches='tight') + plt.close() + + def plot_taylor(self, ccoef, crmsd, sdev, labels, passes, year, name_ex=''): + + ### Taylor plot + #------------------ + # Info: Made some changes within skill_metrics package (rename of RMSD to ubRMSE!) + + # field_short = ['508_high','508_low','508_med','301_high','301_low','301_med','542_high','542_low','542_med'] + marker = ['P','o','X','s','d','^','v','p','h'] + colors = ['b', 'r', 'y', 'm', 'g', 'y'] + fig, ax = plt.subplots(figsize=(14, 10)) + + # sm.taylor_diagram(sdev,crmsd,ccoef, styleOBS = '-', colOBS = 'r', markerobs = 'o', titleOBS = 'observation') + # y = 0 + # yy = 0 + # for k, kk in enumerate(labels): + + # sm.taylor_diagram(np.array(sdev), np.array(crmsd), np.array(ccoef), alpha = 1.0, markercolor=colors[yy], markerSize=8, markerLabel = labels, markerLabelColor = 'b', markerLegend = 'on', colCOR = 'k', colRMS='k', styleOBS = '-', colOBS = 'r', markerobs = 'o', titleOBS = 'Ref') + # plt.scatter(crmsd[k],ccoef[k],s=80,c=colors[yy],marker=marker[y]) + sdev[0] = 0.042 + sm.taylor_diagram(sdev[0:6],crmsd[0:6],ccoef[0:6], markerLabel = labels[0:6].tolist(),markerLabelColor = 'r', markerColor = 'r', tickRMS = range(0,60,10), colRMS = 'm', styleRMS = ':', widthRMS = 2.0, titleRMS = 'on', titleRMSDangle = 40.0, colSTD = 'b', styleSTD = '-.', widthSTD = 1.0, titleSTD = 'on', colCOR = 'k', styleCOR = '--', widthCOR = 1.0, titleCOR = 'on', markerSize = 12, markerLegend = 'on') + + sm.taylor_diagram(np.append(sdev[0],sdev[6:]),np.append(crmsd[0],crmsd[6:]),np.append(ccoef[0],ccoef[6:]), overlay = 'on', markerLabel = labels.tolist(), markerColor = 'b', markerLegend = 'on', markerSize = 12) + + # pdb.set_trace() + + # if y == 2: + # y = 0 + # yy = yy+1 + # else: + # y = y+1 + + + + # for kk in canopy_list: + # for kkk in opt_mod: + # fig, ax = plt.subplots(figsize=(8, 6)) + + # s1_vv = df_taylor.filter(like=kk).filter(like=kkk).filter(like='S1_vv').values.flatten() + # model_vv = df_taylor.filter(like=kk).filter(like=kkk).filter(like='biasedmodel_').values.flatten() + # model_vv_ub = df_taylor.filter(like=kk).filter(like=kkk).filter(like='unbiasedmodeldb').values.flatten() + + # s1_vv = 10*np.log10(s1_vv) + # model_vv_ub = model_vv_ub + + # predictions = model_vv_ub[~np.isnan(model_vv_ub)] + # targets = s1_vv[~np.isnan(model_vv_ub)] + # predictions = predictions[~np.isnan(targets)] + # targets = targets[~np.isnan(targets)] + + # stats = sm.taylor_statistics(predictions,targets,'data') + + # ccoef = stats['ccoef'][0] + # crmsd = stats['crmsd'][0] + # sdev = stats['sdev'][0] + # label = [''] + # y=0 + # for k in surface_list: + # yy=0 + # for kkkk in field_short: + # s1_vv = df_taylor.filter(like=k).filter(like=kk).filter(like=kkk).filter(like='S1_vv').filter(like=kkkk).values.flatten() + # model_vv = df_taylor.filter(like=k).filter(like=kk).filter(like=kkk).filter(like='biasedmodel_').filter(like=kkkk).values.flatten() + # model_vv_ub = df_taylor.filter(like=k).filter(like=kk).filter(like=kkk).filter(like='unbiasedmodeldb').filter(like=kkkk).values.flatten() + + # s1_vv = 10*np.log10(s1_vv) + # model_vv_ub = model_vv_ub + + # predictions = model_vv_ub[~np.isnan(model_vv_ub)] + # targets = s1_vv[~np.isnan(model_vv_ub)] + # predictions = predictions[~np.isnan(targets)] + # targets = targets[~np.isnan(targets)] + + # stats = sm.taylor_statistics(predictions,targets,'data') + # plt.scatter(stats['crmsd'][1],stats['ccoef'][1],s=80,c=colors[y],marker=marker[yy]) + # ccoef = np.append(ccoef,stats['ccoef'][1]) + # crmsd = np.append(crmsd,stats['crmsd'][1]) + # sdev = np.append(sdev,stats['sdev'][1]) + # if kkkk == 'I2EM': + # label.append('IEM_B') + # elif kkkk == 'WaterCloud': + # label.append('WCM') + # else: + # label.append(kkkk) + + # yy=yy+1 + + # y=y+1 + + pdb.set_trace() + legend_elements = [Line2D([0], [0], color='w', lw=4, label=labels[1]+' wheat', marker='P',markerfacecolor='r', markerSize=12), Line2D([0], [0], color='w', lw=4, label=labels[2]+' maize', marker='o',markerfacecolor='r', markerSize=12), Line2D([0], [0], color='w', lw=4, label=labels[3]+' wheat', marker='X',markerfacecolor='r', markerSize=12), Line2D([0], [0], color='w', lw=4, label=labels[4]+' wheat', marker='s',markerfacecolor='r', markerSize=12), Line2D([0], [0], color='w', lw=4, label=labels[5]+' maize', marker='d',markerfacecolor='r', markerSize=12), Line2D([0], [0], color='w', lw=4, label=labels[6]+' wheat', marker='P',markerfacecolor='b', markerSize=12), Line2D([0], [0], color='w', lw=4, label=labels[7]+' maize', marker='o',markerfacecolor='b', markerSize=12), Line2D([0], [0], color='w', lw=4, label=labels[8]+' wheat', marker='X',markerfacecolor='b', markerSize=12), Line2D([0], [0], color='w', lw=4, label=labels[9]+' maize', marker='s',markerfacecolor='b', markerSize=12)] + + # legend_elements2 = [mpatches.Patch(color=colors[0], label=surface_list[0]),mpatches.Patch(color=colors[1], label=surface_list[1]),mpatches.Patch(color=colors[2], label=surface_list[2]),mpatches.Patch(color=colors[3], label=surface_list[3]),mpatches.Patch(color=colors[4], label='IEM_B')] + + leg = ax.legend(handles=legend_elements, prop={'size': 20}, bbox_to_anchor=(0.78, 0.37, 0.6, 0.8)) + # leg1 = ax.legend(handles=legend_elements2, prop={'size': 14},loc='lower left') + # ax.add_artist(leg) + # plt.grid(linestyle='dotted') + # plt.xlabel('ubRMSE',fontsize=16) + # plt.ylabel('$R^2$', fontsize=16) + # plt.xlim(1.35,2.75) + # plt.ylim(0.31,0.85) + # plt.tick_params(labelsize=17) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/taylor/taylor_'+year+name_ex+'.png') + plt.close() + + diff --git a/kaska/paper3_plot_single_vs_multi_speckle_filter.py b/kaska/paper3_plot_single_vs_multi_speckle_filter.py new file mode 100644 index 0000000..72cba9a --- /dev/null +++ b/kaska/paper3_plot_single_vs_multi_speckle_filter.py @@ -0,0 +1,96 @@ +import numpy as np +import pdb +from osgeo import gdal +import matplotlib.pyplot as plt +from z_helper import * +import datetime +import seaborn as sns +from matplotlib.colors import ListedColormap + + +""" Inspection of processed data for Paper 3 """ + + +years = ['2017','2018'] +# versions = ['_multi', '_single'] + + +def mask_fields(data,field,state_mask): + if field == 301: + mask_value = 87 + elif field == 319: + mask_value = 67 + elif field == 542: + mask_value = 8 + elif field == 508: + mask_value = 27 + elif field == 515: + mask_value = 4 + elif field == 317: + mask_value = 65 + elif field == 410: + mask_value = 113 + elif field == 525: + mask_value = 30 + else: + print("field not found") + + mask = state_mask == mask_value + xxx = np.copy(data) + xxx[:,~mask]=np.nan + + pos = np.argwhere(np.isfinite(xxx[0])) + x1 = np.min(pos[:,0]) + x2 = np.max(pos[:,0]) + y1 = np.min(pos[:,1]) + y2 = np.max(pos[:,1]) + + field_data = xxx[:,x1:x2,y1:y2] + return field_data + + +# multi temporal speckle vs spatial speckle filter + +param = ['sm', 'vwc', 'b', 'rms'] +ymin = [0.1, 0, 0, 0.005] +ymax = [0.4, 5, 0.9, 0.03] + +for i, par in enumerate(param): + + for year in years: + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/'+year+'_esu_field_buffer_30.tif') + state_mask = g.ReadAsArray().astype(np.int) + + sm_multi = np.load('/media/tweiss/Work/Paper3_plot/all'+year+'_multi_'+par+'.npy') + sm_single = np.load('/media/tweiss/Work/Paper3_plot/all'+year+'_single_'+par+'.npy') + time = np.load('/media/tweiss/Work/Paper3_down/2017/'+year+'_multitimes.npy',allow_pickle=True) + + if year == '2017': + fields = [301,319,542,508,515] + if year == '2018': + fields = [317,410,525,508] + + for field in fields: + + multi = mask_fields(sm_multi,field,state_mask) + single = mask_fields(sm_single,field,state_mask) + + plt.plot(time,np.nanmean(multi, axis=(1,2)),label='multi') + plt.plot(time,np.nanmean(single, axis=(1,2)),label='single') + plt.title(par+' mean Field'+str(field)+' '+str(year)) + plt.legend() + plt.savefig('/media/tweiss/Work/Paper3_down/analysis_b/speckle_temp_vs_spatial/'+par+'/mean_field_'+str(field)+'_'+str(year), bbox_inches='tight') + plt.close() + + for t in np.arange(len(time)): + + f, ax = plt.subplots(1,2) + im1 = ax[0].imshow(multi[t],vmin=ymin[i], vmax=ymax[i], cmap='viridis_r', aspect='auto') + ax[0].set_title('multi temporal') + im2 = ax[1].imshow(single[t],vmin=ymin[i], vmax=ymax[i], cmap='viridis_r', aspect='auto') + ax[1].set_title('spatial') + f.subplots_adjust(right=0.85) + cbar_ax = f.add_axes([0.88, 0.15, 0.04, 0.7]) + f.colorbar(im2, cax=cbar_ax) + plt.savefig('/media/tweiss/Work/Paper3_down/analysis_b/speckle_temp_vs_spatial/'+par+'/field_'+str(field)+'_'+str(time[t])[:10], bbox_inches='tight') + plt.close() diff --git a/kaska/paper3_plotting.py b/kaska/paper3_plotting.py new file mode 100644 index 0000000..bee822a --- /dev/null +++ b/kaska/paper3_plotting.py @@ -0,0 +1,395 @@ +import numpy as np +import pdb +from osgeo import gdal +import matplotlib.pyplot as plt +from z_helper import * +import datetime +import seaborn as sns +from matplotlib.colors import ListedColormap +from pandas.plotting import register_matplotlib_converters +from osgeo.osr import SpatialReference, CoordinateTransformation +import pyproj + +class plot_paper_3(object): + + def __init__(self, years, esus, passes,time_contrainst): + + """ + time_contrainst = ['no'] + """ + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/rgb'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/rgb') + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/field'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/field') + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/spatial_calculations'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/spatial_calculations') + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot'): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot') + + plt.rcParams["figure.figsize"] = (20,15) + + self.plot_model_param(years,esus,passes,time_contrainst) + + def plot_model_param(self,years,esus,passes,time_contrainst): + """ + plot model output sm, vwc, b, rms + plot model input vv, sm_api, vwc + + """ + + param = ['sm', 'vwc', 'b', 'rms', 'input_vv', 'input_sm_api', 'input_vwc'] + ymin_mean = [0.2, 0, 0, 0.005, -5, 0.23, 0] + ymax_mean = [0.3, 5, 0.6, 0.03, -16, 0.27, 5] + ymin_std = [0.0, 0, 0.1, 0.0, None, 0.0, 0] + ymax_std = [0.25, 3, 0.25, 1e-16, None, 0.25, 3] + ymin_var = [0, None, None, None, None, 0, None] + ymax_var = [0.4, None, None, None, None, 0.8, None] + + for i, par in enumerate(param): + + for year in years: + + if year == '2017': + fields = [0,301,319,542,508,515] + if year == '2018': + fields = [0,317,410,525,508] + + for time_con in time_contrainst: + + for field in fields: + + + + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/'+year+'_esu_field_buffer_30.tif') + state_mask = g.ReadAsArray().astype(np.int) + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/clc_class2.tif') + state_mask_2 = g.ReadAsArray().astype(np.int) + + var_multi = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+par+'.npy') + + time = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_times.npy',allow_pickle=True) + + if time_con == 'yes': + m = time < datetime.datetime(int(year),7,15) + var_multi = var_multi[m] + time = time[m] + name_ex = year+'0715' + else: + name_ex = '' + pass + + if field > 0.: + var_multi = self.mask_fields(var_multi,field,state_mask) + # for t, tt in enumerate(time): + # if par == 'input_vv': + # self.plot(10*np.log10(var_multi[t]), vmin=ymin_mean[i], vmax=ymax_mean[i], name='field/'+par+'_'+str(field)+'_'+str(tt)[:10], mask=state_mask_2,par=par, passes=passes) + # else: + # self.plot(var_multi[t], vmin=ymin_mean[i], vmax=ymax_mean[i], name='field/'+par+'_'+str(field)+'_'+str(tt)[:10], mask=state_mask_2,par=par, passes=passes) + + if par == 'sm': + file = '/media/tweiss/Work/z_final_mni_data_2017/new_in_situ_s1multi_buffer_100_'+year+'_paper3.csv' + + data = pd.read_csv(file,header=[0,1],index_col=1) + + data_field = data.filter(like=str(field)).filter(like='SM') + data_field.index = pd.to_datetime(data_field.index) + sm_insitu = data_field.mean(axis=1).values.flatten() + + date = data_field.index + + time2 = pd.to_datetime(time) + time2 = time2.strftime('%Y-%m-%d') + date2 = date.strftime('%Y-%m-%d') + mask_time = np.isin(time2,date2) + times = pd.to_datetime(date2) + + var_api = np.load('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+'_multi_'+'input_sm_api'+'.npy') + var_api = self.mask_fields(var_api,field,state_mask) + + sm = self.extraction_xxx(var_multi,state_mask,mask_time) + sm_api = self.extraction_xxx(var_api,state_mask,mask_time) + + if year == '2017': + meteo = pd.read_csv('/media/tweiss/Work/Paper3_down/GIS/Eichenried_01012017_31122017_hourly.csv', sep=';', decimal=',') + meteo2 = meteo.stack().str.replace(',','.').unstack() + meteo2['date'] = pd.to_datetime(meteo2['Tag']+' '+meteo2['Stunde']) + meteo2['SUM']= pd.to_numeric(meteo2['SUM_NN050'],errors='coerce') + s = meteo2.resample('d', on='date')['SUM'].sum() + else: + s = None + + + + self.boxplot2(sm,par,field,year,times,passes,sm_api,sm_insitu,s) + else: + self.boxplot(var_multi,par,field,year,time,passes) + else: + pass + + value_mean, value_std, value_var = calc_pix(var_multi) + + if par == 'input_vv': + value_mean = 10*np.log10(value_mean) + value_std = 10*np.log10(value_std) + value_var = 10*np.log10(value_var) + + self.plot_rgb(var_multi[1],var_multi[20],var_multi[40],mask=state_mask_2,name='rgb/rgb_'+year+'_'+str(field),passes=passes) + + self.plot_rgb(var_multi[0],var_multi[int(len(var_multi)/2.)],var_multi[-1],mask=state_mask_2,name='rgb/rgb_bme'+year+'_'+str(field),passes=passes) + + self.plot_rgb(var_multi[1],var_multi[45],var_multi[85],mask=state_mask_2,name='rgb/rgb_0323_0530_0729'+year+'_'+str(field),passes=passes) + self.plot_rgb(var_multi[1],var_multi[45],var_multi[75],mask=state_mask_2,name='rgb/rgb_0323_0530_0715'+year+'_'+str(field),passes=passes) + self.plot_rgb(var_multi[45],var_multi[75],var_multi[85],mask=state_mask_2,name='rgb/rgb_0530_0715_0729'+year+'_'+str(field),passes=passes) + self.plot_rgb(var_multi[55],var_multi[92],var_multi[-1],mask=state_mask_2,name='rgb/rgb_0615_0809_0928'+year+'_'+str(field),passes=passes) + + + self.plot(value_mean, vmin=ymin_mean[i], vmax=ymax_mean[i], name='spatial_calculations/'+par+year+'value_mean'+name_ex+'_'+str(field), mask=state_mask_2, par=par, passes=passes, year=year) + self.plot(value_std, vmin=ymin_std[i], vmax=ymax_std[i], name='spatial_calculations/'+par+year+'value_std'+name_ex+'_'+str(field), mask=state_mask_2, par=par, passes=passes, year=year) + self.plot(value_var, vmin=ymin_var[i], vmax=ymax_var[i], name='spatial_calculations/'+par+year+'value_var'+name_ex+'_'+str(field), mask=state_mask_2, par=par, passes=passes, year=year) + self.plot(value_var, name='spatial_calculations/'+par+year+'value_var2'+name_ex+'_'+str(field), par=par, passes=passes, year=year) + + def extraction_xxx(self,var,state_mask,mask_time): + + xxx = np.copy(var) + xxx = xxx[mask_time,:] + return xxx + + + def plot(self,input,vmin=None,vmax=None,name=None,mask=None,par=None,passes=None,year=None): + + f, ax = plt.subplots(1,1) + + try: + input = np.ma.masked_where(mask == 0.,input) + except IndexError: + pass + + if par == 'input_vv': + cmap = plt.cm.Greys_r + label = 'VV [dB]' + elif par == 'vwc': + cmap = plt.cm.YlGn + label = 'VWC [kg/m$^2$]' + elif par == 'input_vwc': + cmap = plt.cm.RdYlGn + label = 'kg/m$^2$' + else: + cmap = plt.cm.viridis_r + label = 'Soil Moisture [m$^3$/m$^3$]' + cmap.set_bad(color='white') + plt.rcParams['axes.labelsize'] = 20 + + im1 = ax.imshow(input,vmin=vmin, vmax=vmax, cmap=cmap, aspect='auto') + # ax.set_title(name, fontsize=20) + f.subplots_adjust(right=0.85) + cbar_ax = f.add_axes([0.8, 0.15, 0.04, 0.7]) + ticklabs = cbar_ax.get_yticklabels() + cbar_ax.set_yticklabels(ticklabs, fontsize=20) + f.colorbar(im1, cax=cbar_ax, label=label) + + + + if len(input) > 200: + if year == '2017': + + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/2017_line2.tif') + elif year == '2018': + g = gdal.Open('/media/tweiss/Work/Paper3_down/GIS/2018_line2.tif') + + # # Define the wgs84 system (EPSG 4326) + # epsg4326 = SpatialReference() + # epsg4326.ImportFromEPSG(4326) + + # # Define ... + # epsg32632 = SpatialReference() + # epsg32632.ImportFromEPSG(32632) + + + # rd2latlon = CoordinateTransformation(epsg32632, epsg4326) + # ulx, xres, xskew, uly, yskew, yres = g.GetGeoTransform() + # lrx = ulx + (g.RasterXSize * xres) + # lry = uly + (g.RasterYSize * yres) + # lonlatmin = rd2latlon.TransformPoint(ulx, lry) + # lonlatmax = rd2latlon.TransformPoint(lrx, uly) + # p = pyproj.Proj(proj='utm', zone=32, ellps='WGS84') + # ulx1, lry1 = p(ulx, lry,inverse=True) + # lrx1, uly1 = p(lrx, uly,inverse=True) + + # BBox = ((uly, lry, lrx, ulx)) + + # BBox2 = ((lonlatmax[1], lonlatmin[1], lonlatmax[0], lonlatmin[0])) + + im1 = ax.imshow(input,vmin=vmin, vmax=vmax, cmap=cmap, aspect='auto') + # ax.set_title(name, fontsize=20) + f.subplots_adjust(right=0.85) + cbar_ax = f.add_axes([0.8, 0.15, 0.04, 0.7]) + ticklabs = cbar_ax.get_yticklabels() + cbar_ax.set_yticklabels(ticklabs, fontsize=20) + cbar_ax.set_label(label) + f.colorbar(im1, cax=cbar_ax, label=label) + + + state_mask_3 = g.ReadAsArray().astype(np.int) + state_mask_3=state_mask_3.astype('float') + state_mask_3[state_mask_3==0]=np.nan + state_mask_3[state_mask_3>0]=1 + + + # Let's also design our color mapping: 1s should be plotted in blue, 2s in red, etc... + if year == '2017': + col_dict={1:"magenta"} + elif year == '2018': + col_dict={1:"orange"} + else: + col_dict={1:"black"} + + + # We create a colormar from our list of colors + cm = ListedColormap([col_dict[x] for x in col_dict.keys()]) + + + # cbar_ax = f.add_axes([0.8, 0.15, 0.04, 0.7]) + # cbar = f.colorbar(im2, cax=cbar_ax, ticks=[1,2,3,4,5,6,7,8,9,10,11,12]) + # cbar.ax.set_yticks([0.2, 0.4, 0.6, 0.8]) + # cbar.ax.set_yticklabels(['WTriticale', 'WWeizen', 'WGerste', 'Wiese', 'Wiese', 'Wiese', 'Wiese', 'Mais', 'SHafer', 'Luzerne', 'Gemuese', 'Bohne']) + + + + im2 = ax.imshow(state_mask_3,cmap=cm) + else: + im1 = ax.imshow(input,vmin=vmin, vmax=vmax, cmap=cmap, aspect='auto') + # ax.set_title(name, fontsize=20) + f.subplots_adjust(right=0.85) + cbar_ax = f.add_axes([0.8, 0.15, 0.04, 0.7]) + ticklabs = cbar_ax.get_yticklabels() + cbar_ax.set_yticklabels(ticklabs, fontsize=20) + f.colorbar(im1, cax=cbar_ax, label=label) + + ax.xaxis.set_tick_params(labelsize=20) + ax.yaxis.set_tick_params(labelsize=20) + + plt.setp(ax, xticks=[22*6.1, 2*22*6.1, 3*22*6.1, 4*22*6.1, 5*22*6.1, 6*22*6.1], xticklabels=['11.64°E', '11.66°E', '11.68°E', '11.70°E', '11.72°E', '11.74°E'], yticks=[22*6.12, (22+39)*6.12, (22+39*2)*6.12, (22+39*3)*6.12], yticklabels=['48.30°N', '48.28°N', '48.26°N', '48.24°N']) + ax.set_ylim(len(input),0) + + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/'+name+str(input.mean())[0:5]+'.png', bbox_inches='tight') + # xtick = [] + # ytick = [] + # for t in ax.get_xticks(): + # xx, yy = p(t, ax.get_yticks()[0],inverse=True) + # xtick.append(xx) + # for t in ax.get_yticks(): + # xx, yy = p(ax.get_xticks()[0], t,inverse=True) + # ytick.append(yy) + + # pdb.set_trace() + plt.close() + + + def plot_rgb(self,rrr,ggg,bbb,mask=None,name=None,passes=None): + + + rrr = 10*np.log10(rrr) + ggg = 10*np.log10(ggg) + bbb = 10*np.log10(bbb) + + try: + rrr = np.ma.masked_where(mask == 0.,rrr) + ggg = np.ma.masked_where(mask == 0.,ggg) + bbb = np.ma.masked_where(mask == 0.,bbb) + except IndexError: + pass + + OldMin = -20 + OldMax = -5 + NewMin = 0 + NewMax = 255 + + OldRange = (OldMax - OldMin) + NewRange = (NewMax - NewMin) + rrr2 = ((((rrr - OldMin) * NewRange) / OldRange) + NewMin).astype(int) + ggg2 = ((((ggg - OldMin) * NewRange) / OldRange) + NewMin).astype(int) + bbb2 = ((((bbb - OldMin) * NewRange) / OldRange) + NewMin).astype(int) + rgb = np.dstack((rrr2,ggg2,bbb2)) + plt.imshow(rgb) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/'+name, bbox_inches='tight') + plt.close() + + def boxplot(self,var_multi,par,field,year,time,passes): + xx = var_multi.reshape(var_multi.shape[0], (var_multi.shape[1]*var_multi.shape[2])) + if par == 'input_vv': + sns.boxplot(np.repeat(np.arange(len(time)), len(xx[0])), 10*np.log10(xx.flatten())) + else: + sns.boxplot(np.repeat(np.arange(len(time)), len(xx[0])), xx.flatten()) + ind = list(range(1,len(time)+1)) + time2 = [i.strftime('%d-%m') for i in time] + plt.xticks(ind,time2, rotation=45) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot/'+par+str(field)+'_'+str(year), bbox_inches='tight') + plt.close() + + def boxplot2(self,var_multi,par,field,year,time,passes,sm_api,sm_insitu,meteo=None): + f, ax = plt.subplots(1,1) + + xx = var_multi.reshape(var_multi.shape[0], (var_multi.shape[1]*var_multi.shape[2])) + + sns.boxplot(np.repeat(np.arange(len(time)), len(xx[0])), xx.flatten(), color='skyblue') + + sm_api2 = np.nanmean(sm_api,axis=(1,2)) + ax.plot(sm_api2,'r-o',linewidth=4, label='SM Api') + ax.plot(sm_insitu,'b-o',linewidth=4, label = 'SM insitu') + ind = list(range(1,len(time)+1)) + time2 = [i.strftime('%d-%m') for i in time] + plt.xticks(ind,time2, rotation=45) + ax.set_ylabel('SM') + plt.legend() + if year == '2017': + ax2 = ax.twinx() + mask_time2 = np.isin(meteo.index,time) + + ax2.bar(np.arange(len(meteo[mask_time2])),meteo[mask_time2]) + ax2.set_ylim(0,150) + ax2.set_xticks([]) + ax2.set_ylabel('Precipitation') + plt.xticks(ind,time2, rotation=45) + plt.savefig('/media/tweiss/Work/Paper3_down/'+passes+'/boxplot/'+par+str(field)+'_'+str(year), bbox_inches='tight') + plt.close() + + + def mask_fields(self,data,field,state_mask): + if field == 301: + mask_value = 87 + elif field == 319: + mask_value = 67 + elif field == 542: + mask_value = 8 + elif field == 508: + mask_value = 27 + elif field == 515: + mask_value = 4 + elif field == 317: + mask_value = 65 + elif field == 410: + mask_value = 113 + elif field == 525: + mask_value = 30 + else: + print("field not found") + + mask = state_mask == mask_value + xxx = np.copy(data) + xxx[:,~mask]=np.nan + + pos = np.argwhere(np.isfinite(xxx[0])) + x1 = np.min(pos[:,0]) + x2 = np.max(pos[:,0]) + y1 = np.min(pos[:,1]) + y2 = np.max(pos[:,1]) + + field_data = xxx[:,x1:x2,y1:y2] + return field_data + + + + diff --git a/kaska/paper3_reproject.py b/kaska/paper3_reproject.py new file mode 100644 index 0000000..a58e4d0 --- /dev/null +++ b/kaska/paper3_reproject.py @@ -0,0 +1,158 @@ +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel_vwc_rms import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +import matplotlib.pyplot as plt +from netCDF4 import date2num +import matplotlib +import subprocess + +def reproject_data2(source_img, + target_img=None, + dstSRS=None, + srcSRS=None, + srcNodata=np.nan, + dstNodata=np.nan, + outputType=None, + output_format="MEM", + verbose=False, + xmin=None, + xmax=None, + ymin=None, + ymax=None, + xRes=None, + yRes=None, + xSize=None, + ySize=None, + resample=0, + ): + + """ + A method that uses a source and a target images to + reproject & clip the source image to match the extent, + projection and resolution of the target image. + + """ + + outputType = ( + gdal.GDT_Unknown if outputType is None else outputType + ) + if srcNodata is None: + try: + srcNodata = " ".join( + [ + i.split("=")[1] + for i in gdal.Info(source_img).split("\n") + if " NoData" in i + ] + ) + except RuntimeError: + srcNodata = None + # If the output type is intenger and destination nodata is nan + # set it to 0 to avoid warnings + if outputType <= 5 and np.isnan(dstNodata): + dstNodata = 0 + + if srcSRS is not None: + _srcSRS = osr.SpatialReference() + try: + _srcSRS.ImportFromEPSG(int(srcSRS.split(":")[1])) + except: + _srcSRS.ImportFromWkt(srcSRS) + else: + _srcSRS = None + + + if (target_img is None) & (dstSRS is None): + raise IOError( + "Projection should be specified ether from " + + "a file or a projection code." + ) + elif target_img is not None: + try: + g = gdal.Open(target_img) + except RuntimeError: + g = target_img + geo_t = g.GetGeoTransform() + x_size, y_size = g.RasterXSize, g.RasterYSize + + if xRes is None: + xRes = abs(geo_t[1]) + if yRes is None: + yRes = abs(geo_t[5]) + + if xSize is not None: + x_size = 1.0 * xSize * xRes / abs(geo_t[1]) + if ySize is not None: + y_size = 1.0 * ySize * yRes / abs(geo_t[5]) + + xmin, xmax = ( + min(geo_t[0], geo_t[0] + x_size * geo_t[1]), + max(geo_t[0], geo_t[0] + x_size * geo_t[1]), + ) + ymin, ymax = ( + min(geo_t[3], geo_t[3] + y_size * geo_t[5]), + max(geo_t[3], geo_t[3] + y_size * geo_t[5]), + ) + dstSRS = osr.SpatialReference() + raster_wkt = g.GetProjection() + dstSRS.ImportFromWkt(raster_wkt) + gg = gdal.Warp( + "", + source_img, + format=output_format, + outputBounds=[xmin, ymin, xmax, ymax], + dstNodata=dstNodata, + warpOptions=["NUM_THREADS=ALL_CPUS"], + xRes=xRes, + yRes=yRes, + dstSRS=dstSRS, + outputType=outputType, + srcNodata=srcNodata, + resampleAlg=resample, + srcSRS=_srcSRS + ) + + else: + gg = gdal.Warp( + "", + source_img, + format=output_format, + outputBounds=[xmin, ymin, xmax, ymax], + xRes=xRes, + yRes=yRes, + dstSRS=dstSRS, + warpOptions=["NUM_THREADS=ALL_CPUS"], + copyMetadata=True, + outputType=outputType, + dstNodata=dstNodata, + srcNodata=srcNodata, + resampleAlg=resample, + srcSRS=_srcSRS + ) + if verbose: + LOG.debug("There are %d bands in this file, use " + + "g.GetRasterBand() to avoid reading the whole file." + % gg.RasterCount + ) + return gg + + + + +mask_default = '/media/tweiss/Work/Paper3_down/clc5_class2xx_2018.tif' +hm = '/media/tweiss/Daten/Subset_COPERNICUS_S2_20180124T101309_20180124T101352_T32UPU.B11_NDWI1.tif' +mask_esu = reproject_data2(hm, output_format="MEM", target_img=mask_default) diff --git a/kaska/paper3_run_area_calculations.py b/kaska/paper3_run_area_calculations.py new file mode 100644 index 0000000..3b9829f --- /dev/null +++ b/kaska/paper3_run_area_calculations.py @@ -0,0 +1,607 @@ +#!/usr/bin/env python + +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel_vwc_rms import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +import matplotlib.pyplot as plt +from netCDF4 import date2num +import glob +from paper3_plotting import * +from paper3_plot_scatter import * +from paper3_plot_esu import * +from pandas.plotting import register_matplotlib_converters + + +def ndwi1_mag(ndwi1): + vwc = 13.2*ndwi1**2+1.62*ndwi1 + return vwc + +def ndwi1_cos_maize(ndwi1): + vwc = 9.39*ndwi1+1.26 + return vwc + +def save_to_tif(fname, Array, GeoT): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + wkt = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + ds.SetProjection(wkt) + for i, image in enumerate(Array): + # ds.GetRasterBand(i+1).SetMetadata({'date': prior_time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def get_sar(s1_nc_file, version): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang_name vv_name, vh_name') + data = Dataset(s1_nc_file) + relorbit = data['relorbit'][:] + localIncidenceAngle = data['theta'][:] + satellite = data['satellite'][:] + orbitdirection = data['orbitdirection'][:] + time = data['time'][:] + lat = data['lat'][:] + lon = data['lon'][:] + + vv_name = s1_nc_file.replace('.nc', '_vv'+version+'.tif') + vh_name = s1_nc_file.replace('.nc', '_vh'+version+'.tif') + ang_name = s1_nc_file.replace('.nc', '_ang'+version+'.tif') + + if not os.path.exists(vv_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vv"%s"'%(s1_nc_file,version)) + geo = gg.GetGeoTransform() + sigma0_vv = data['sigma0_vv'+version][:] + save_to_tif(vv_name, sigma0_vv, geo) + + if not os.path.exists(vh_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vh"%s"'%(s1_nc_file,version)) + geo = gg.GetGeoTransform() + sigma0_vh = data['sigma0_vh'+version][:] + save_to_tif(vh_name, sigma0_vh, geo) + + if not os.path.exists(ang_name): + gg = gdal.Open('NETCDF:"%s":theta'%s1_nc_file) + geo = gg.GetGeoTransform() + localIncidenceAngle = data['theta'][:] + save_to_tif(ang_name, localIncidenceAngle, geo) + + return s1_data(time, lat, lon, satellite, relorbit, orbitdirection, ang_name, vv_name, vh_name) + +def get_api(api_nc_file,year): + api_data = namedtuple('api_data', 'time lat lon api') + data = Dataset(api_nc_file) + + xxx = date2num(datetime.datetime.strptime(year+'0201', '%Y%m%d'), units ='hours since 2000-01-01 00:00:00', calendar='gregorian') + yyy = date2num(datetime.datetime.strptime(year+'1001', '%Y%m%d'), units ='hours since 2000-01-01 00:00:00', calendar='gregorian') + + time = data['time'][np.where(data['time'][:]==xxx)[0][0]:np.where(data['time'][:]==yyy)[0][0]] + lat = data['lat'][:] + lon = data['lon'][:] + + api_name = api_nc_file.replace('.nc', '_api'+year+'.tif') + + if not os.path.exists(api_name): + gg = gdal.Open('NETCDF:"%s":api'%api_nc_file) + geo = gg.GetGeoTransform() + save_to_tif(api_name, data['api'][np.where(data['time'][:]==xxx)[0][0]:np.where(data['time'][:]==yyy)[0][0],:,:], geo) + + return api_data(time, lat, lon, api_name) + +def read_sar(sar_data, state_mask): + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang vv vh') + ang = reproject_data(sar_data.ang_name, output_format="MEM", target_img=state_mask) + vv = reproject_data(sar_data.vv_name, output_format="MEM", target_img=state_mask) + vh = reproject_data(sar_data.vh_name, output_format="MEM", target_img=state_mask) + + time = [datetime.datetime(1970,1,1) + datetime.timedelta(days=float(i)) for i in sar_data.time] + + return s1_data(time, sar_data.lat, sar_data.lon, sar_data.satellite, sar_data.relorbit, sar_data.orbitdirection, ang, vv, vh) + +def read_vwc(vwc_data, state_mask): + s2_data = namedtuple('s2_vwc', 'time vwc ndwi') + filelist = glob.glob(vwc_data+'*.tif') + filelist.sort() + time = [] + vwc = [] + ndwi = [] + for file in filelist: + g = gdal.Open(file) + ndwi_array = reproject_data(file, output_format="MEM", target_img=state_mask) + ndwi_array = ndwi_array.ReadAsArray() + vwc_array = ndwi1_mag(ndwi_array) + time.append(datetime.datetime.strptime(file.split('/')[-1][14:22], '%Y%m%d')) + vwc.append(vwc_array) + ndwi.append(ndwi_array) + + return s2_data(time, vwc, ndwi) + +def read_api(api_data, state_mask): + s1_data = namedtuple('api_data', 'time lat lon api') + + api = reproject_data(api_data.api, output_format="MEM", target_img=state_mask) + time = [datetime.datetime(2000,1,1) + datetime.timedelta(hours=float(i)) for i in api_data.time] + + return s1_data(time, api_data.lat, api_data.lon, api) + + +def inference_preprocessing(s1_data, vwc_data, api_data, state_mask, orbit1=None, orbit2=None): + """Resample S2 smoothed output to match S1 observations + times""" + # Move everything to DoY to simplify interpolation + + sar_inference_data = namedtuple('sar_inference_data', 'time lat lon satellite relorbit orbitdirection ang vv vh vwc api time_mask ndwi') + + + vwc_doys = np.array([ int(i.strftime('%j')) for i in vwc_data.time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + + + time = np.array(s1_data.time) + for jj in range(len(s1_data.time)): + time[jj] = s1_data.time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + + index=[] + xxx = np.array(api_data.time) + for jj in range(len(time)): + oje = np.where(xxx==time[jj]) + try: + ojet = oje[0][0] + index.append(ojet) + except IndexError: + pass + api_doys = np.array([ int(i.strftime('%j')) for i in np.array(api_data.time)[index]]) + + f = interp1d(vwc_doys, np.array(vwc_data.vwc), axis=0, bounds_error=False) + vwc_s1 = f(s1_doys) + + f = interp1d(vwc_doys, np.array(vwc_data.ndwi), axis=0, bounds_error=False) + ndwi_s1 = f(s1_doys) + + api_s1 = api_data.api.ReadAsArray()[index] + f = interp1d(api_doys, api_s1, axis=0, bounds_error=False) + api_s1 = f(s1_doys) + + if s1_data.time[0].year == 2017: + time_mask = (s1_doys >= 80) & (s1_doys <= 273) + elif s1_data.time[0].year == 2018: + time_mask = (s1_doys >= 80) & (s1_doys <= 273) + else: + print('no time mask') + + if orbit1 != None: + rel_orbit1 = s1_data.relorbit==orbit1 + if orbit2 != None: + rel_orbit2 = s1_data.relorbit==orbit2 + xxx = np.logical_and(rel_orbit1,time_mask) + yyy = np.logical_and(rel_orbit2,time_mask) + time_mask = np.logical_or(xxx,yyy) + + sar_inference_data = sar_inference_data(s1_data.time, s1_data.lat, s1_data.lon, + s1_data.satellite, s1_data.relorbit, + s1_data.orbitdirection, s1_data.ang, + s1_data.vv, s1_data.vh, vwc_s1, api_s1, time_mask, ndwi_s1) + + return sar_inference_data + + +def do_one_pixel_field(vv, vh, vwc, vwc_std, theta, time, sm, sm_std, b, b_std, omega, rms, rms_std, orbits, unc): + + ps = [] + vwcs = [] + bs = [] + sms = [] + srms = [] + times = [] + + uorbits = np.unique(orbits) + uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + # orbit_mask = (orbits == 95) | (orbits == 117) + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + # orbit_mask = (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, ovwc, ovwc_std, otheta, otime = vv[orbit_mask], vh[orbit_mask], vwc[orbit_mask], vwc_std[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osb, osb_std = sm[orbit_mask], sm_std[orbit_mask], b[orbit_mask], b_std[orbit_mask] + + + prior_mean = np.concatenate([[0, ], [rms], osm, ovwc, osb]) + prior_unc = np.concatenate([[10., ], [rms_std], osm_std, ovwc_std, osb_std]) + + + x0 = np.concatenate([np.array([omega]), np.array([rms]), osm, ovwc, osb]) + + # bounds for b related to expected curve + xxx = [] + for jjj, jj in enumerate(osb): + if jj <= 0.2: + xxx.append([0.01,osb[jjj]+0.2]) + else: + xxx.append([osb[jjj]-0.2,osb[jjj]+0.2]) + + bounds = ( + [[0.027, 0.027]] # omega + + [[0.005, 0.03]] # s=rms + + [[0.01, 0.7]] * osb.shape[0] # mv + + [[0, 7.5]] * osb.shape[0] # vwc + + xxx #[[0.01, 0.6]] * osb.shape[0] # b + ) + + + data = osb + + gamma = [10, 10] + + retval = minimize(cost_function_vwc, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc, data), + jac=True, + bounds = bounds, + options={"disp": False}) + + posterious_rms = retval.x[1] + posterious_sm = retval.x[2 : 2+len(osb)] + posterious_vwc = retval.x[2+len(osb) : 2+2*len(osb)] + posterious_b = retval.x[2+2*len(osb) : 2+3*len(osb)] + + srms.append(posterious_rms) + sms.append(posterious_sm) + vwcs.append(posterious_vwc) + bs.append(posterious_b) + times.append(otime) + ps.append(retval.x[:1]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + vwcs = np.hstack(vwcs )[order] + bs = np.hstack(bs )[order] + sms = np.hstack(sms )[order].real + + return times, vwcs, bs, sms, np.array(srms), np.array(ps), orbit_mask + + + + +def do_inversion(sar_inference_data, state_mask, year=None, version=None, passes=None): + + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + uorbits = np.unique(orbits) + + out_shape = sar_inference_data.vwc[sar_inference_data.time_mask].shape + vwc_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + b_outputs = np.zeros(out_shape ) + rms_outputs = np.zeros(out_shape ) + + g = gdal.Open(state_mask) + state_mask = g.ReadAsArray().astype(np.int) + # state_mask = state_mask > 0 + state_mask = state_mask >= 0 + + vv_all = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask] + vh_all = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask] + theta_all = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask] + time_all = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + + vwc_all = sar_inference_data.vwc[sar_inference_data.time_mask] + ndwi_all = sar_inference_data.ndwi[sar_inference_data.time_mask] + + ### vwc needs to be changed!!!! NDWI1!!! + vwc_std = vwc_all[:,0,0] + vwc_std[:] = 0.1 + sm_all = sar_inference_data.api[sar_inference_data.time_mask] + sm_all = sm_all / 100. + sm_std = np.copy(sm_all[:,0,0]) + sm_std[:] = 0.2 + + b = np.copy(sm_all[:,0,0]) + b[:] = 0 + b_std = np.copy(sm_all[:,0,0]) + b_std[:] = 0.5 # not used anyway + rms = sm_all[:,0,0] + rms = 0.2 + rms_std = 0.1 # not used anyway + + unc = 1.9 + omega = 0.027 + + sm_retrieved = sm_all * np.nan + + if not os.path.exists('/media/tweiss/Work/Paper3_down/'+passes): + os.makedirs('/media/tweiss/Work/Paper3_down/'+passes) + + + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_input_vv.npy', vv_all) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_input_vwc.npy', vwc_all) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_input_sm_api.npy', sm_all) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_input_ndwi.npy', ndwi_all) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_input_theta.npy', theta_all) + + for z in range(len(state_mask)): + print(z) + for zz in range(len(state_mask[0])): + if state_mask[z,zz] == False: + pass + else: + vv = vv_all[:,z,zz] + vh = vh_all[:,z,zz] + theta = theta_all[:,z,zz] + vwc = vwc_all[:,z,zz] + vwc[vwc < 0.01] = 0.02 + + orbits95 = orbits==95 + orbits168 = orbits==168 + orbits44 = orbits==44 + orbits117 = orbits==117 + # orbits44_168 = (orbits == 44) | (orbits == 168) + # b[:] = 0.4 + b[orbits95] = 0.4 + b[orbits117] = 0.4 + b[orbits44] = 0.6 + b[orbits168] = 0.6 + + if passes == 'b_0515': + orbits95[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + orbits117[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + orbits44[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + orbits168[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + else: + orbits95[0:np.argmax(vwc)] = False + orbits117[0:np.argmax(vwc)] = False + orbits44[0:np.argmax(vwc)] = False + orbits168[0:np.argmax(vwc)] = False + + if passes == 'b_veg': + norm = (vwc- np.nanmin(vwc)) / (np.nanmax(vwc) - np.nanmin(vwc)) + norm_ref = np.abs(norm-1) + b = b * norm_ref + elif passes == 'normal': + b[orbits95] = 0.1 + b[orbits117] = 0.1 + b[orbits44] = 0.2 + b[orbits168] = 0.2 + else: + norm = (vwc- np.nanmin(vwc)) / (np.nanmax(vwc) - np.nanmin(vwc)) + norm_ref = np.abs(norm-1) + b = b * norm_ref + + if passes == 'unc_15': + unc = 1.5 + elif passes == 'unc_13': + unc = 1.3 + elif passes == 'unc_10': + unc = 1.0 + elif passes == 'unc_05': + unc = 0.5 + elif passes == 'unc_21': + unc = 2.1 + elif passes == 'unc_19': + unc = 1.9 + elif passes == 'unc_25': + unc = 2.5 + else: + unc = 1.0 + + + if passes == 'sm_std_001': + sm_std[:] = 0.01 + elif passes == 'sm_std_003': + sm_std[:] = 0.03 + elif passes == 'sm_std_005': + sm_std[:] = 0.05 + elif passes == 'sm_std_007': + sm_std[:] = 0.07 + elif passes == 'sm_std_010': + sm_std[:] = 0.1 + elif passes == 'sm_std_013': + sm_std[:] = 0.13 + elif passes == 'sm_std_015': + sm_std[:] = 0.15 + elif passes == 'sm_std_017': + sm_std[:] = 0.17 + elif passes == 'sm_std_020': + sm_std[:] = 0.20 + else: + sm_std[:] = 0.2 + + if passes == 'sm_std_001_1': + sm_std[:] = 0.01 + unc = 0.4 + elif passes == 'sm_std_003_1': + sm_std[:] = 0.03 + unc = 0.4 + elif passes == 'sm_std_005_1': + sm_std[:] = 0.05 + unc = 0.4 + elif passes == 'sm_std_007_1': + sm_std[:] = 0.07 + unc = 0.4 + elif passes == 'sm_std_010_1': + sm_std[:] = 0.1 + unc = 0.4 + elif passes == 'sm_std_013_1': + sm_std[:] = 0.13 + unc = 0.4 + elif passes == 'sm_std_015_1': + sm_std[:] = 0.15 + unc = 0.4 + elif passes == 'sm_std_017_1': + sm_std[:] = 0.17 + unc = 0.4 + elif passes == 'sm_std_020_1': + sm_std[:] = 0.20 + unc = 0.4 + else: + pass + + sm = sm_all[:,z,zz] + print(unc) + print(sm_std[0]) + # pdb.set_trace() + times, svwc, sb, sms, srms, ps, orbit_mask = do_one_pixel_field(vv, vh, vwc, vwc_std, theta, time_all, sm, sm_std, b, b_std, omega, rms, rms_std, orbits,unc=unc) + + vwc_outputs[:,z,zz] = svwc + sm_outputs[:,z,zz] = sms + b_outputs[:,z,zz] = sb + rms_outputs[:,z,zz] = srms + + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_sm'+'.npy', sm_outputs) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_vwc'+'.npy', vwc_outputs) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_b'+'.npy', b_outputs) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_rms'+'.npy', rms_outputs) + np.save('/media/tweiss/Work/Paper3_down/'+passes+'/'+year+version+'_times.npy',times) + + return 'done' + +def save_output(fname, Array, GeoT, projction, time): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'date': time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def save_ps_output(fname, Array, GeoT, projction, orbit): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'orbit': str(int(orbit[i]))}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + + + +class KaSKASAR(object): + """A class to process Sentinel 1 SAR data using S2 data as + an input""" + + def __init__(self, s1_ncfile, state_mask, s2_wvc, rad_api, year, vv_version, passes, orbit1=None,orbit2=None): + self.s1_ncfile = s1_ncfile + self.state_mask = state_mask + self.s2_wvc = s2_vwc + self.rad_api = rad_api + self.year = year + self.version = version + self.passes = passes + + self.orbit1 = None + self.orbit2 = None + if orbit1 != None: + self.orbit1 = orbit1 + if orbit2 != None: + self.orbit2 = orbit2 + + def sentinel1_inversion(self): + sar = get_sar(s1_ncfile, version) + s1_data = read_sar(sar, self.state_mask) + + vwc_data = read_vwc(s2_vwc, self.state_mask) + + api = get_api(rad_api,year) + api_data = read_api(api, self.state_mask) + + sar_inference_data = inference_preprocessing(s1_data, vwc_data, api_data, self.state_mask,self.orbit1,self.orbit2) + + + xxx = do_inversion(sar_inference_data, self.state_mask, year, version, passes) + + # gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%self.s1_ncfile) + # geo = gg.GetGeoTransform() + + # projction = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + + # time = [i.strftime('%Y-%m-%d') for i in np.array(sar_inference_data.time)[sar_inference_data.time_mask]] + + # sm_name = self.s1_ncfile.replace('.nc', '_sar_sm.tif') + # sr_name = self.s1_ncfile.replace('.nc', '_sar_sr.tif') + # lai_name = self.s1_ncfile.replace('.nc', '_sar_lai.tif') + + # save_output(sm_name, sm_outputs, geo, projction, time) + # save_output(sr_name, sr_outputs, geo, projction, time) + # save_output(lai_name, lai_outputs, geo, projction, time) + + + + +if __name__ == '__main__': + + + years = ['2017','2018'] + # years = ['2017'] + versions = ['_multi', '_single'] + versions = ['_multi'] + + esus = ['high', 'med', 'low'] + + esu_size_tiff = '_ESU_buffer_100.tif' # buffer around ESU 100, 50, 30 etc + + time_contrainst = ['no'] # if yes time period march to july will be investigated + + pas = ['b_veg'] + # pas = ['analysis'] + # pas = ['b_0515'] + pas = ['unc_15','unc_13','unc_10'] + pas = ['analysis','b_veg','unc_15','unc_13','unc_10','unc_25', 'unc_05', 'unc_19'] + pas = ['sm_std_001','sm_std_003','sm_std_007','sm_std_010','sm_std_013','sm_std_015','sm_std_017','sm_std_020',] + pas = ['sm_std_010'] + pas = ['b_veg'] + + pas = ['sm_std_007', 'sm_std_007_1','sm_std_010', 'sm_std_010_1','sm_std_013','sm_std_013_1','sm_std_001','sm_std_003','sm_std_015','sm_std_017','sm_std_020'] + + pas = ['sm_std_013'] + + start = datetime.datetime.now() + for passes in pas: + + for year in years: + for version in versions: + s1_ncfile = '/media/tweiss/Work/Paper3_down/data/MNI_'+year+'_new_final_paper3.nc' + state_mask = '/media/tweiss/Work/Paper3_down/GIS/clc_class2.tif' + # state_mask = '/media/tweiss/Work/Paper3_down/GIS/'+year+'_ESU_Field_buffer_30.tif' + rad_api = '/media/tweiss/Work/Paper3_down/data/RADOLAN_API_v1.0.0.nc' + + s2_vwc = '/media/tweiss/Work/Paper3_down/data/'+year+'/tif1/' + + # sarsar = KaSKASAR(s1_ncfile, state_mask, s2_vwc, rad_api, year, version, passes) + + # sarsar.sentinel1_inversion() + plot1 = datetime.datetime.now() + # plot_scatter(years, esus, passes, esu_size_tiff) + plot2 = datetime.datetime.now() + # plot_paper_3(years, esus, passes,time_contrainst) + plot3 = datetime.datetime.now() + plot_esu(years, esus, passes, esu_size_tiff) + + end = datetime.datetime.now() + print('start:'+str(start)) + print('start plot 1:'+str(plot1)) + print('start plot 2:'+str(plot2)) + print('start plot 3:'+str(plot3)) + print('end:'+str(end)) + +pdb.set_trace() diff --git a/kaska/paper3_run_area_calculations_agvolution.py b/kaska/paper3_run_area_calculations_agvolution.py new file mode 100644 index 0000000..0f448b3 --- /dev/null +++ b/kaska/paper3_run_area_calculations_agvolution.py @@ -0,0 +1,495 @@ +#!/usr/bin/env python + +import os +# import osr +from osgeo import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel_vwc_rms import cost_function_vwc, ssrt_jac_vwc, ssrt_vwc +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * +import matplotlib.pyplot as plt +from netCDF4 import date2num +from netCDF4 import num2date +import glob +from paper3_plotting import * +from paper3_plot_scatter import * +from paper3_plot_esu import * +from pandas.plotting import register_matplotlib_converters +from agv_plot_input_output import * +from sm_helper_data_preparation import get_sm_input, inference_preprocessing +from sm_run_SenSARP import run_SenSARP + +def get_api_folder(api_folder): + + filelist = glob.glob(api_folder+'**/**/*.nc', recursive = True) + filelist.sort() + + for file in filelist: + data = Dataset(file) + api_name = file.replace('.nc', '.tif') + + if not os.path.exists(api_name): + gg = gdal.Open('NETCDF:"%s":ssm'%file) + geo = gg.GetGeoTransform() + save_to_tif(api_name, data['ssm'], geo) +def read_api_ssm(api_folder, state_mask): + api_ssm = namedtuple('api_data', 'time api') + filelist = glob.glob(api_folder+'**/*.tif', recursive = True) + filelist.sort() + + time = [] + api = [] + + for file in filelist: + g = gdal.Open(file) + ssm_array = reproject_data(file, output_format="MEM", target_img=state_mask) + ssm_array = ssm_array.ReadAsArray() + porosity = 0.45 + ssm_array_absolute = ssm_array*porosity + # ssm_array_absolute = ssm_array/100 * porosity + + time.append(datetime.datetime.strptime(file.split('/')[-1][13:21], '%Y%m%d')) + api.append(ssm_array_absolute) + + return api_ssm(time, api) + + +def do_one_pixel_field(vv, vh, vwc, vwc_std, theta, time, sm, sm_std, b, b_std, omega, rms, rms_std, orbits, unc): + + ps = [] + vwcs = [] + bs = [] + sms = [] + srms = [] + times = [] + + uorbits = np.unique(orbits) + uorbits = np.array([95]) + for orbit in uorbits: + # for jj in range(len(vv)): + # orbit_mask = orbits == orbit + # orbit_mask = (orbits == 44) | (orbits == 168) + # orbit_mask = (orbits == 95) | (orbits == 117) + orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) + orbit_mask = (orbits == 146) | (orbits == 168) | (orbits == 44) | (orbits == 95) + # orbit_mask = (orbits == 168) + # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) + ovv, ovh, ovwc, ovwc_std, otheta, otime = vv[orbit_mask], vh[orbit_mask], vwc[orbit_mask], vwc_std[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osb, osb_std = sm[orbit_mask], sm_std[orbit_mask], b[orbit_mask], b_std[orbit_mask] + + + prior_mean = np.concatenate([[0, ], [rms], osm, ovwc, osb]) + prior_unc = np.concatenate([[10., ], [rms_std], osm_std, ovwc_std, osb_std]) + + + x0 = np.concatenate([np.array([omega]), np.array([rms]), osm, ovwc, osb]) + + # bounds for b related to expected curve + xxx = [] + for jjj, jj in enumerate(osb): + if jj <= 0.2: + xxx.append([0.01,osb[jjj]+0.2]) + else: + xxx.append([osb[jjj]-0.2,osb[jjj]+0.2]) + + bounds = ( + [[0.027, 0.027]] # omega + + [[0.005, 0.03]] # s=rms + + [[0.01, 0.7]] * osb.shape[0] # mv + + [[0, 7.5]] * osb.shape[0] # vwc + + xxx #[[0.01, 0.6]] * osb.shape[0] # b + ) + + + data = osb + + gamma = [10, 10] + + retval = minimize(cost_function_vwc, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc, data), + jac=True, + bounds = bounds, + options={"disp": False}) + + posterious_rms = retval.x[1] + posterious_sm = retval.x[2 : 2+len(osb)] + posterious_vwc = retval.x[2+len(osb) : 2+2*len(osb)] + posterious_b = retval.x[2+2*len(osb) : 2+3*len(osb)] + + srms.append(posterious_rms) + sms.append(posterious_sm) + vwcs.append(posterious_vwc) + bs.append(posterious_b) + times.append(otime) + ps.append(retval.x[:1]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + vwcs = np.hstack(vwcs )[order] + bs = np.hstack(bs )[order] + sms = np.hstack(sms )[order].real + + return times, vwcs, bs, sms, np.array(srms), np.array(ps), orbit_mask + + + + +def do_inversion(sar_inference_data, state_mask, year=None, version=None, passes=None): + + orbits = sar_inference_data.relorbit[sar_inference_data.time_mask] + uorbits = np.unique(orbits) + + out_shape = sar_inference_data.vwc[sar_inference_data.time_mask].shape + vwc_outputs = np.zeros(out_shape ) + sm_outputs = np.zeros(out_shape ) + b_outputs = np.zeros(out_shape ) + rms_outputs = np.zeros(out_shape ) + + g = gdal.Open(state_mask) + state_mask = g.ReadAsArray() + # state_mask = state_mask > 0 + state_mask = state_mask >= 1 + + vv_all = sar_inference_data.vv.ReadAsArray()[sar_inference_data.time_mask] + vh_all = sar_inference_data.vh.ReadAsArray()[sar_inference_data.time_mask] + theta_all = sar_inference_data.ang.ReadAsArray()[sar_inference_data.time_mask] + time_all = np.array(sar_inference_data.time)[sar_inference_data.time_mask] + + vwc_all = sar_inference_data.vwc[sar_inference_data.time_mask] + ndwi_all = sar_inference_data.ndwi[sar_inference_data.time_mask] + + ### vwc needs to be changed!!!! NDWI1!!! + vwc_std = vwc_all[:,0,0] + vwc_std[:] = 0.1 + sm_all = sar_inference_data.api[sar_inference_data.time_mask] + sm_all = sm_all / 100. + sm_std = np.copy(sm_all[:,0,0]) + sm_std[:] = 0.2 + + b = np.copy(sm_all[:,0,0]) + b[:] = 0 + b_std = np.copy(sm_all[:,0,0]) + b_std[:] = 0.5 # not used anyway + rms = sm_all[:,0,0] + rms = 0.2 + rms_std = 0.1 # not used anyway + + unc = 1.9 + omega = 0.027 + + sm_retrieved = sm_all * np.nan + + if not os.path.exists('/media/AUF/userdata/agvolution/inversion/'+passes): + os.makedirs('/media/AUF/userdata/agvolution/inversion/'+passes) + + + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_input_vv.npy', vv_all) + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_input_vwc.npy', vwc_all) + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_input_sm_api.npy', sm_all) + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_input_ndwi.npy', ndwi_all) + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_input_theta.npy', theta_all) + + for z in range(len(state_mask)): + print(z) + for zz in range(len(state_mask[0])): + if state_mask[z,zz] == False: + pass + else: + vv = vv_all[:,z,zz] + vh = vh_all[:,z,zz] + theta = theta_all[:,z,zz] + vwc = vwc_all[:,z,zz] + vwc[vwc < 0.01] = 0.02 + + orbits95 = orbits==95 + orbits168 = orbits==168 + orbits44 = orbits==44 + orbits117 = orbits==117 + # orbits44_168 = (orbits == 44) | (orbits == 168) + # b[:] = 0.4 + b[orbits95] = 0.4 + b[orbits117] = 0.4 + b[orbits44] = 0.6 + b[orbits168] = 0.6 + + if passes == 'b_0515': + orbits95[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + orbits117[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + orbits44[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + orbits168[0:np.where(time_all == min(time_all, key=lambda x: abs(x-datetime.datetime(2017,5,15))))[0][0]] = False + else: + orbits95[0:np.argmax(vwc)] = False + orbits117[0:np.argmax(vwc)] = False + orbits44[0:np.argmax(vwc)] = False + orbits168[0:np.argmax(vwc)] = False + + if passes == 'b_veg': + norm = (vwc- np.nanmin(vwc)) / (np.nanmax(vwc) - np.nanmin(vwc)) + norm_ref = np.abs(norm-1) + b = b * norm_ref + elif passes == 'normal': + b[orbits95] = 0.1 + b[orbits117] = 0.1 + b[orbits44] = 0.2 + b[orbits168] = 0.2 + else: + norm = (vwc- np.nanmin(vwc)) / (np.nanmax(vwc) - np.nanmin(vwc)) + norm_ref = np.abs(norm-1) + b = b * norm_ref + pdb.set_trace() + + if passes == 'unc_15': + unc = 1.5 + elif passes == 'unc_13': + unc = 1.3 + elif passes == 'unc_10': + unc = 1.0 + elif passes == 'unc_05': + unc = 0.5 + elif passes == 'unc_21': + unc = 2.1 + elif passes == 'unc_19': + unc = 1.9 + elif passes == 'unc_25': + unc = 2.5 + else: + unc = 1.0 + + + if passes == 'sm_std_001': + sm_std[:] = 0.01 + elif passes == 'sm_std_003': + sm_std[:] = 0.03 + elif passes == 'sm_std_005': + sm_std[:] = 0.05 + elif passes == 'sm_std_007': + sm_std[:] = 0.07 + elif passes == 'sm_std_010': + sm_std[:] = 0.1 + elif passes == 'sm_std_013': + sm_std[:] = 0.13 + elif passes == 'sm_std_015': + sm_std[:] = 0.15 + elif passes == 'sm_std_017': + sm_std[:] = 0.17 + elif passes == 'sm_std_020': + sm_std[:] = 0.20 + else: + sm_std[:] = 0.2 + + if passes == 'sm_std_001_1': + sm_std[:] = 0.01 + unc = 0.4 + elif passes == 'sm_std_003_1': + sm_std[:] = 0.03 + unc = 0.4 + elif passes == 'sm_std_005_1': + sm_std[:] = 0.05 + unc = 0.4 + elif passes == 'sm_std_007_1': + sm_std[:] = 0.07 + unc = 0.4 + elif passes == 'sm_std_010_1': + sm_std[:] = 0.1 + unc = 0.4 + elif passes == 'sm_std_013_1': + sm_std[:] = 0.13 + unc = 0.4 + elif passes == 'sm_std_015_1': + sm_std[:] = 0.15 + unc = 0.4 + elif passes == 'sm_std_017_1': + sm_std[:] = 0.17 + unc = 0.4 + elif passes == 'sm_std_020_1': + sm_std[:] = 0.20 + unc = 0.4 + else: + pass + + sm = sm_all[:,z,zz] + print(unc) + print(sm_std[0]) + + times, svwc, sb, sms, srms, ps, orbit_mask = do_one_pixel_field(vv, vh, vwc, vwc_std, theta, time_all, sm, sm_std, b, b_std, omega, rms, rms_std, orbits,unc=unc) + + vwc_outputs[:,z,zz] = svwc + sm_outputs[:,z,zz] = sms + b_outputs[:,z,zz] = sb + rms_outputs[:,z,zz] = srms + + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_sm'+'.npy', sm_outputs) + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_vwc'+'.npy', vwc_outputs) + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_b'+'.npy', b_outputs) + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_rms'+'.npy', rms_outputs) + np.save('/media/AUF/userdata/agvolution/inversion/'+passes+'/'+year+version+'_times.npy',times) + + return 'done' + +def save_output(fname, Array, GeoT, projction, time): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'date': time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def save_ps_output(fname, Array, GeoT, projction, orbit): + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + ds.SetProjection(projction) + for i, image in enumerate(Array): + ds.GetRasterBand(i+1).SetMetadata({'orbit': str(int(orbit[i]))}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + + + +class KaSKASAR(object): + """A class to process Sentinel 1 SAR data using S2 data as + an input""" + + def __init__(self, s1_ncfile, state_mask, s2_wvc, rad_api, year, vv_version, passes, orbit1=None,orbit2=None): + self.s1_ncfile = s1_ncfile + self.state_mask = state_mask + self.s2_wvc = s2_vwc + self.rad_api = rad_api + self.year = year + self.version = version + self.passes = passes + + self.orbit1 = None + self.orbit2 = None + if orbit1 != None: + self.orbit1 = orbit1 + if orbit2 != None: + self.orbit2 = orbit2 + + def sentinel1_inversion(self): + + path_s1data = '/media/AUF/GG/Geodatenverzeichnis-Uni/Fernerkundung/Satellitenbilder/Sentinel1/' + output_folder = '/media/AUF/userdata/agvolution_new' + sample_config_file = os.path.expanduser('~/sar-pre-processing/docs/notebooks/sample_config_file') #todo: change this location for automation + name_tag = 'agvolution_new' + year = '2023' + lr_lat = 53.49579 + lr_lon = 13.11798 + ul_lat = 53.62974 + ul_lon = 12.87880 + multi_speck = '5' + + run_SenSARP(path_s1data,output_folder,sample_config_file,name_tag,year=year,lr_lat=lr_lat,lr_lon=lr_lon,ul_lat=ul_lat,ul_lon=ul_lon,multi_speck=multi_speck) + + s1_data, vwc_data, api_data = get_sm_input(s1_ncfile,version,s2_vwc,rad_api,self.state_mask,year) + + + # api = get_api_folder(rad_api) + # api_data = read_api_ssm(rad_api, self.state_mask) + output_folder = '/media/tweiss/data/test_data/output' + pdb.set_trace() + sar_inference_data = inference_preprocessing(s1_data, vwc_data, api_data, output_folder) + pdb.set_trace() + + xxx = do_inversion(sar_inference_data, self.state_mask, year, version, passes) + + # gg = gdal.Open('NETCDF:"%s":sigma0_vv_multi'%self.s1_ncfile) + # geo = gg.GetGeoTransform() + # + # projction = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + # + # time = [i.strftime('%Y-%m-%d') for i in np.array(sar_inference_data.time)[sar_inference_data.time_mask]] + # + # sm_name = self.s1_ncfile.replace('.nc', '_sar_sm.tif') + # sr_name = self.s1_ncfile.replace('.nc', '_sar_sr.tif') + # lai_name = self.s1_ncfile.replace('.nc', '_sar_lai.tif') + # + # save_output(sm_name, sm_outputs, geo, projction, time) + # save_output(sr_name, sr_outputs, geo, projction, time) + # save_output(lai_name, lai_outputs, geo, projction, time) + + + + +if __name__ == '__main__': + + + # years = ['2017','2018'] + year = '2023' + versions = ['_multi', '_single'] + versions = ['_multi'] + + esus = ['high', 'med', 'low'] + + esu_size_tiff = '_ESU_buffer_100.tif' # buffer around ESU 100, 50, 30 etc + + time_contrainst = ['no'] # if yes time period march to july will be investigated + + pas = ['b_veg'] + # pas = ['analysis'] + # pas = ['b_0515'] + pas = ['unc_15','unc_13','unc_10'] + pas = ['analysis','b_veg','unc_15','unc_13','unc_10','unc_25', 'unc_05', 'unc_19'] + pas = ['sm_std_001','sm_std_003','sm_std_007','sm_std_010','sm_std_013','sm_std_015','sm_std_017','sm_std_020',] + pas = ['sm_std_010'] + pas = ['b_veg'] + + pas = ['sm_std_007', 'sm_std_007_1','sm_std_010', 'sm_std_010_1','sm_std_013','sm_std_013_1','sm_std_001','sm_std_003','sm_std_015','sm_std_017','sm_std_020'] + + pas = ['sm_std_013'] + + start = datetime.datetime.now() + for passes in pas: + + for version in versions: + s1_ncfile = '/media/tweiss/data/test_data/agvolution.nc' + # rad_api = '/media/AUF/GG/Geodatenverzeichnis-Uni/Fernerkundung/Satellitenbilder/SSM/' + + state_mask = '/media/tweiss/data/test_data/fields_1.tif' + + rad_api = '/media/tweiss/data/test_data/API_RADOLAN_19768.01021216689_-0.05_6.996020755659563_5.0cm_2022-202306_2023-11-08T07:12:30.nc' + + s2_vwc = '/media/tweiss/data/test_data/tif/' + + sarsar = KaSKASAR(s1_ncfile, state_mask, s2_vwc, rad_api, year, version, passes) + sarsar.sentinel1_inversion() + pass + # todo: path right now hard coded in different classes, this need to be changed!!!! + path = '/media/AUF/userdata/agvolution/inversion/' + # year = '2017' + # path = '/media/tweiss/data/Arbeit_einordnen/mni/' + + plot_input_output(path, passes,year) + + # plot1 = datetime.datetime.now() + # plot_scatter(years, esus, passes, esu_size_tiff) + # plot2 = datetime.datetime.now() + # plot_paper_3(years, esus, passes,time_contrainst) + # plot3 = datetime.datetime.now() + # plot_esu(years, esus, passes, esu_size_tiff) + + end = datetime.datetime.now() + print('start:'+str(start)) + # print('start plot 1:'+str(plot1)) + # print('start plot 2:'+str(plot2)) + # print('start plot 3:'+str(plot3)) + print('end:'+str(end)) + +pdb.set_trace() diff --git a/kaska/s1_observations.py b/kaska/s1_observations.py index 36b060a..978c8f9 100644 --- a/kaska/s1_observations.py +++ b/kaska/s1_observations.py @@ -24,15 +24,15 @@ layers = [ - "sigma0_vv_norm_multi_db", - "sigma0_vh_norm_multi_db", + "sigma0_vv_norm_multi", + "sigma0_vh_norm_multi", "localIncidenceAngle" ] def get_s1_dates(s1_file): """Gets the dates from a LMU processed netCDF Sentinel 1 file""" - times = [float(s1_file.GetRasterBand(b+1).GetMetadata()['NETCDF_DIM_time']) + times = [float(s1_file.GetRasterBand(b+1).GetMetadata()['NETCDF_DIM_time']) for b in range(s1_file.RasterCount)] times = [dt.datetime(1970,1,1) + dt.timedelta(days=x) for x in times ] LOG.info(f"Sentinel 1 First obs: {times[0].strftime('%Y-%m-%d'):s}") @@ -47,8 +47,8 @@ def __init__( state_mask, chunk=None, time_grid=None, - nc_layers = {"VV": "sigma0_vv_norm_multi_db", - "VH": "sigma0_vh_norm_multi_db", + nc_layers = {"VV": "sigma0_vv_norm_multi", + "VH": "sigma0_vh_norm_multi", "theta": "localIncidenceAngle"} ): self.time_grid = time_grid @@ -64,10 +64,10 @@ def apply_roi(self, ulx, uly, lrx, lry): self.lry = lry width = lrx - ulx height = uly - lry - + self.state_mask = gdal.Translate( "", - self.original_mask, + self.state_mask, srcWin=[ulx, uly, width, abs(height)], format="MEM", ) @@ -77,9 +77,9 @@ def apply_roi(self, ulx, uly, lrx, lry): def define_output(self): """Define the output array shapes to be consistent with the state - mask. You get the projection and geotransform, that should be + mask. You get the projection and geotransform, that should be enough to define an ouput dataset that conforms to the state mask. - + Returns ------- tuple @@ -103,32 +103,32 @@ def define_output(self): # new_geoT[0] = new_geoT[0] + self.ulx*new_geoT[1] # new_geoT[3] = new_geoT[3] + self.uly*new_geoT[5] return proj, geoT.tolist(), nx, ny # new_geoT.tolist() - + def _match_to_mask(self): """Matches the observations to the state mask. """ self.s1_data_ptr = {} for layer, layer_name in self.nc_layers.items(): fname = f'NETCDF:"{self.nc_file.as_posix():s}":{layer_name:s}' - self.s1_data_ptr[layer] = reproject_data(fname, output_format="VRT", + self.s1_data_ptr[layer] = reproject_data(fname, output_format="VRT", srcSRS="EPSG:4326", target_img=self.state_mask) s1_dates = get_s1_dates(self.s1_data_ptr[layer]) - self.dates = {x:(i+1) - for i, x in enumerate(s1_dates) - if ( (x >= self.time_grid[0]) and + self.dates = {x:(i+1) + for i, x in enumerate(s1_dates) + if ( (x >= self.time_grid[0]) and (x <= self.time_grid[-1]))} def read_time_series(self, time_grid): """Reads a time series of observations. Uses the time grid to provide a min/max times. - + Parameters ---------- time_grid : list of datetimes List of datetimes - + Returns ------- S1data @@ -137,7 +137,7 @@ def read_time_series(self, time_grid): """ early = time_grid[0] late = time_grid[-1] - + sel_dates = [k for k,v in self.dates.items() if (early <= k <= late)] sel_bands = [v for k,v in self.dates.items() @@ -149,7 +149,7 @@ def read_time_series(self, time_grid): for i in sel_bands]) the_obs = S1data(sel_dates, obs['VV'], obs['VH'], obs['theta'], 0.5, 0.5) return the_obs - + if __name__ == "__main__": start_date = dt.datetime(2017, 3, 1) end_date = dt.datetime(2017, 9, 1) diff --git a/kaska/sm_helper_data_preparation.py b/kaska/sm_helper_data_preparation.py new file mode 100644 index 0000000..f0d5fc8 --- /dev/null +++ b/kaska/sm_helper_data_preparation.py @@ -0,0 +1,331 @@ + +import os +import numpy as np +import datetime +from osgeo import gdal +from netCDF4 import Dataset +from netCDF4 import date2num +from collections import namedtuple +from utils import reproject_data +import glob +from scipy.interpolate import interp1d + +def save_to_tif(fname, Array, GeoT): + """ + save array as tif file + + :param fname: str + name of output (tif) file + :param Array: array + contains geographic information + :param GeoT: ??? + GeoTransform information + :return: str + name of output (tif) file + """ + + if os.path.exists(fname): + os.remove(fname) + ds = gdal.GetDriverByName('GTiff').Create(fname, Array.shape[2], Array.shape[1], Array.shape[0], gdal.GDT_Float32) + ds.SetGeoTransform(GeoT) + wkt = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]' + ds.SetProjection(wkt) + for i, image in enumerate(Array): + # ds.GetRasterBand(i+1).SetMetadata({'date': prior_time[i]}) + ds.GetRasterBand(i+1).WriteArray( image ) + ds.FlushCache() + return fname + +def get_sar(s1_nc_file, version): + """ + convert self processed netcdf4 file stack of S1 images (SenSARP) to single tif images + + :param s1_nc_file: str + name of netcdf4 file + :param version: str + layer extension based on naming during SenSARP processing + :return: tuple + information about time, lat, lon, satellite, relorbit, orbitdirection, ang_name, vv_name, vh_name + + + Problem: Getting GeoTranformation from netcdf file!!!!!!!!!!!!!!!!!!!!! problem related to version of gdal??? + """ + + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang_name vv_name, vh_name') + data = Dataset(s1_nc_file) + relorbit = data['relorbit'][:] + localIncidenceAngle = data['theta'][:] + satellite = data['satellite'][:] + orbitdirection = data['orbitdirection'][:] + time = data['time'][:] + lat = data['lat'][:] + lon = data['lon'][:] + + vv_name = s1_nc_file.replace('.nc', '_vv'+version+'.tif') + vh_name = s1_nc_file.replace('.nc', '_vh'+version+'.tif') + ang_name = s1_nc_file.replace('.nc', '_ang'+version+'.tif') + + if not os.path.exists(vv_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vv"%s"'%(s1_nc_file,version)) + geo = gg.GetGeoTransform() + geo = (12.878782781795174,0.000124866097153945,0.0,53.62977739815832,0.0,-0.0001248646760142416) + sigma0_vv = data['sigma0_vv'+version][:] + save_to_tif(vv_name, sigma0_vv, geo) + + + if not os.path.exists(vh_name): + gg = gdal.Open('NETCDF:"%s":sigma0_vh"%s"'%(s1_nc_file,version)) + geo = gg.GetGeoTransform() + geo = (12.878782781795174, 0.000124866097153945, 0.0, 53.62977739815832, 0.0, -0.0001248646760142416) + sigma0_vh = data['sigma0_vh'+version][:] + save_to_tif(vh_name, sigma0_vh, geo) + + if not os.path.exists(ang_name): + gg = gdal.Open('NETCDF:"%s":theta'%s1_nc_file) + geo = gg.GetGeoTransform() + geo = (12.878782781795174, 0.000124866097153945, 0.0, 53.62977739815832, 0.0, -0.0001248646760142416) + localIncidenceAngle = data['theta'][:] + save_to_tif(ang_name, localIncidenceAngle, geo) + + return s1_data(time, lat, lon, satellite, relorbit, orbitdirection, ang_name, vv_name, vh_name) + +def read_sar(sar_data, state_mask): + """ + get/reproject sar data and mask on same extent (grid) + + :param sar_data: tuple + information from function get_sar (time, lat, lon, satellite, relorbit, orbitdirection, ang_name, vv_name, vh_name) + :param state_mask: array ???? + mask + :return: tuple + information about time, lat, long, satellite, relorbit, orbitdirection, ang, vv_name, vh_name + """ + + s1_data = namedtuple('s1_data', 'time lat lon satellite relorbit orbitdirection ang vv vh') + ang = reproject_data(sar_data.ang_name, output_format="MEM", target_img=state_mask) + vv = reproject_data(sar_data.vv_name, output_format="MEM", target_img=state_mask) + vh = reproject_data(sar_data.vh_name, output_format="MEM", target_img=state_mask) + time = [datetime.datetime(1970,1,1) + datetime.timedelta(days=float(i)) for i in sar_data.time] + return s1_data(time, sar_data.lat, sar_data.lon, sar_data.satellite, sar_data.relorbit, sar_data.orbitdirection, ang, vv, vh) + +def get_api(api_nc_file,year): + """ + extract one year and convert RADOLAN netcdf4 file information to tif + + :param api_nc_file: str + name of netcdf4 file with RADOLAN sm prior information + :param year: str + year of interest + :return: tuple + information about time, lat, lon, api_name (tif file) + """ + + api_data = namedtuple('api_data', 'time lat lon api') + data = Dataset(api_nc_file) + + xxx = date2num(datetime.datetime.strptime(year+'0201', '%Y%m%d'), units ='hours since 2000-01-01 00:00:00', calendar='gregorian') + # yyy = date2num(datetime.datetime.strptime(year+'1001', '%Y%m%d'), units ='hours since 2000-01-01 00:00:00', calendar='gregorian') + yyy = date2num(datetime.datetime.strptime(year+'0628', '%Y%m%d'), units ='hours since 2000-01-01 00:00:00', calendar='gregorian') + + time = data['time'][np.where(data['time'][:]==xxx)[0][0]:np.where(data['time'][:]==yyy)[0][0]] + lat = data['lat'][:] + lon = data['lon'][:] + + api_name = api_nc_file.replace('.nc', '_api'+year+'.tif') + + if not os.path.exists(api_name): + gg = gdal.Open('NETCDF:"%s":api'%api_nc_file) + geo = gg.GetGeoTransform() + save_to_tif(api_name, data['api'][np.where(data['time'][:]==xxx)[0][0]:np.where(data['time'][:]==yyy)[0][0],:,:], geo) + + return api_data(time, lat, lon, api_name) + +def read_api(api_data, state_mask): + """ + get/reproject api data and mask on same extent (grid) + + :param api_data: tuple + information from funtion get_api (time, lat, lon, api_name (tif file)) + :param state_mask: array ??? + mask + :return: tuple + information about time, lat, lon, api_name (tif file) + """ + s1_data = namedtuple('api_data', 'time lat lon api') + + api = reproject_data(api_data.api, output_format="MEM", target_img=state_mask) + time = [datetime.datetime(2000,1,1) + datetime.timedelta(hours=float(i)) for i in api_data.time] + + return s1_data(time, api_data.lat, api_data.lon, api) + +def read_vwc(vwc_data, state_mask): + """ + get/reproject ndwi1 data and mask on same extent (grid) + calculate vwc from ndwi1 data (current usage of empirical function Maggioni et al. 2006) + + :param vwc_data: str + path to stored vwc tif files + :param state_mask: array ??? + mask + :return: tuple + information about time, vwc content, ndwi content + """ + s2_data = namedtuple('s2_vwc', 'time vwc ndwi') + filelist = glob.glob(vwc_data+'*.tif') + filelist.sort() + time = [] + vwc = [] + ndwi = [] + for file in filelist: + g = gdal.Open(file) + ndwi_array = reproject_data(file, output_format="MEM", target_img=state_mask) + ndwi_array = ndwi_array.ReadAsArray() + vwc_array = ndwi1_mag(ndwi_array) + time.append(datetime.datetime.strptime(file.split('/')[-1][14:22], '%Y%m%d')) + vwc.append(vwc_array) + ndwi.append(ndwi_array) + + return s2_data(time, vwc, ndwi) + +def ndwi1_mag(ndwi1): + """ + formula to calculate vwc from ndwi1 after Maggioni et al. 2006 + + :param ndwi1: array/float + ndwi1 information + :return: array/float + vwc information + """ + vwc = 13.2*ndwi1**2+1.62*ndwi1 + return vwc + +def ndwi1_cos_maize(ndwi1): + """ + formula to calculate vwc from ndwi1 after Cosh et al. 2006 + + :param ndwi1: array/float + ndwi1 information + :return: array/float + vwc information + """ + vwc = 9.39*ndwi1+1.26 + return vwc + +def get_sm_input(s1_ncfile, s1_version, s2_vwc, rad_api, state_mask, year): + """ + load all input data for sm retrieval + + :param s1_ncfile: str + name of netcdf4 file with stack of S1 images (pre processed with SenSARP package) + :param s2_vwc: str + name of folder with S2 tif files (pre processed with Google Earth Engine script) + :param rad_api: str + name of RADOLAN file with sm prior information + :param state_mask: array + mask information + :param year: str + year of interest (important for RADOLAN extraction) + :return: arrays + input data for retrieval (s1, vwc, api) + """ + sar = get_sar(s1_ncfile, s1_version) + print('get_sar:'+str(datetime.datetime.now())) + s1_data = read_sar(sar, state_mask) + print('read_sar:' + str(datetime.datetime.now())) + vwc_data = read_vwc(s2_vwc, state_mask) + print('read_vwc:' + str(datetime.datetime.now())) + api = get_api(rad_api, year) + print('get_api:' + str(datetime.datetime.now())) + api_data = read_api(api, state_mask) + print('read_api:' + str(datetime.datetime.now())) + + return s1_data, vwc_data, api_data + +def inference_preprocessing(s1_data, vwc_data, api_data, output_folder, orbit1=None, orbit2=None): + """ + Resample S2 smoothed output to match S1 observations times + + :param s1_data: tuple ???? + .... + :param vwc_data: tuple ???? + .... + :param api_data: tuple ???? + .... + :param orbit1: str + orbit number + :param orbit2: str + orbit number + :return: tuple + ..... + """ + # Move everything to DoY to simplify interpolation + sar_inference_data = namedtuple('sar_inference_data', 'time lat lon satellite relorbit orbitdirection ang vv vh vwc api time_mask ndwi') + + vwc_doys = np.array([ int(i.strftime('%j')) for i in vwc_data.time]) + s1_doys = np.array([ int(i.strftime('%j')) for i in s1_data.time]) + + + time = np.array(s1_data.time) + for jj in range(len(s1_data.time)): + time[jj] = s1_data.time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time[jj] = time[jj].replace(hour=0) + + index=[] + xxx = np.array(api_data.time) + for jj in range(len(time)): + oje = np.where(xxx==time[jj]) + try: + ojet = oje[0][0] + index.append(ojet) + except IndexError: + pass + api_doys = np.array([ int(i.strftime('%j')) for i in np.array(api_data.time)[index]]) + + f = interp1d(vwc_doys, np.array(vwc_data.vwc), axis=0, bounds_error=False) + vwc_s1 = f(s1_doys) + + f = interp1d(vwc_doys, np.array(vwc_data.ndwi), axis=0, bounds_error=False) + ndwi_s1 = f(s1_doys) + + api_s1 = api_data.api.ReadAsArray()[index] + f = interp1d(api_doys, api_s1, axis=0, bounds_error=False) + api_s1 = f(s1_doys) + + if s1_data.time[0].year == 2017: + time_mask = (s1_doys >= 80) & (s1_doys <= 273) + elif s1_data.time[0].year == 2018: + time_mask = (s1_doys >= 80) & (s1_doys <= 273) + else: + time_mask = (s1_doys >= 0) & (s1_doys <= 365) + + if orbit1 != None: + rel_orbit1 = s1_data.relorbit==orbit1 + if orbit2 != None: + rel_orbit2 = s1_data.relorbit==orbit2 + xxx = np.logical_and(rel_orbit1,time_mask) + yyy = np.logical_and(rel_orbit2,time_mask) + time_mask = np.logical_or(xxx,yyy) + + sar_inference_data = sar_inference_data(s1_data.time, s1_data.lat, s1_data.lon, + s1_data.satellite, s1_data.relorbit, + s1_data.orbitdirection, s1_data.ang, + s1_data.vv, s1_data.vh, vwc_s1, api_s1, time_mask, ndwi_s1) + + print('inference_processing:' + str(datetime.datetime.now())) + + if output_folder != None: + np.save(os.path.join(output_folder, 'time.npy'),s1_data.time) + s1_data.lat.dump(os.path.join(output_folder, 'lat.npy')) + s1_data.lon.dump(os.path.join(output_folder, 'lon.npy')) + s1_data.satellite.dump(os.path.join(output_folder, 'satellite.npy')) + s1_data.relorbit.dump(os.path.join(output_folder, 'relorbit.npy')) + s1_data.orbitdirection.dump(os.path.join(output_folder, 'orbitdirection.npy')) + #s1_data.ang.dump(os.path.join(output_folder, 'ang.npy')) + #s1_data.vv.dump(os.path.join(output_folder, 'vv.npy')) + #s1_data.vh.dump(os.path.join(output_folder, 'vh.npy')) + vwc_s1.dump(os.path.join(output_folder, 'vwc.npy')) + api_s1.dump(os.path.join(output_folder, 'api.npy')) + time_mask.dump(os.path.join(output_folder, 'time_mask.npy')) + ndwi_s1.dump(os.path.join(output_folder, 'ndwi.npy')) + + return sar_inference_data diff --git a/kaska/sm_run_SenSARP.py b/kaska/sm_run_SenSARP.py new file mode 100644 index 0000000..2d93808 --- /dev/null +++ b/kaska/sm_run_SenSARP.py @@ -0,0 +1,88 @@ +import os +import yaml +from sar_pre_processing.sar_pre_processor import * +import warnings +warnings.filterwarnings("ignore") +class run_SenSARP(object): + """ + Class to run SenSARP default mode + """ + def __init__(self, path_s1data, output_folder, sample_config_file, name_tag, gpt_location='~/snap/bin/gpt', year=None, lr_lat=None,lr_lon=None,ul_lat=None,ul_lon=None,multi_speck=None,norm_angle=None): + self.input_folder = path_s1data + self.output_folder = output_folder + if not os.path.exists(self.output_folder): + os.makedirs(self.output_folder) + self.gpt_location = os.path.expanduser(gpt_location) + self.sample_config_file = sample_config_file + self.name_tag = name_tag + self.config_file = self.name_tag + '_config_file.yaml' + self.year = year + self.lr_lat = lr_lat #todo: add test if lr_lon, ul_lat, ul_lon are specified as well!!!! + self.lr_lon = lr_lon + self.ul_lat = ul_lat + self.ul_lon = ul_lon + self.multi_speck = multi_speck + self.norm_angle = norm_angle + + self.open_yaml() + self.add_options() + self.run() + + def open_yaml(self): + + with open(self.sample_config_file) as stream: + data = yaml.safe_load(stream) + + data['input_folder'] = self.input_folder + data['output_folder'] = self.output_folder + data['gpt'] = self.gpt_location + + with open(self.config_file, 'wb') as stream: + yaml.safe_dump(data, stream, default_flow_style=False, + explicit_start=True, allow_unicode=True, encoding='utf-8') + + def add_options(self): + + with open(self.config_file) as stream: + data = yaml.safe_load(stream) + + # Filter option + ## Filter via year of interest + if self.year != None: + data['year'] = self.year + + if self.lr_lat != None: + ## Define region of interest + data['region']['lr']['lat'] = self.lr_lat # lower right latitude + data['region']['lr']['lon'] = self.lr_lon # lower right longitude + data['region']['ul']['lat'] = self.ul_lat # upper left latitude + data['region']['ul']['lon'] = self.ul_lon # upper left longitude + data['region']['subset'] = 'yes' + + if self.multi_speck != None: + ## Define multi-temporal filtering properties + data['speckle_filter']['multi_temporal']['apply'] = 'yes' + data['speckle_filter']['multi_temporal']['files'] = self.multi_speck # Number of files used for multi temporal filtering + + if self.norm_angle != None: + ## Define incidence angle for normalization + data['normalization_angle'] = self.norm_angle + + with open('test_config_file.yaml', 'wb') as stream: + yaml.safe_dump(data, stream, default_flow_style=False, + explicit_start=True, allow_unicode=True, encoding='utf-8') + + def run(self): + + processing = SARPreProcessor(config=self.config_file) + processing.create_processing_file_list() + print('start step 1') + processing.pre_process_step1() + print('start step 2') + processing.pre_process_step2() + print('start step 3') + processing.pre_process_step3() + print('start add netcdf information') + processing.add_netcdf_information() + print('start create netcdf stack') + processing.create_netcdf_stack() diff --git a/kaska/tau.py b/kaska/tau.py new file mode 100644 index 0000000..a427c2a --- /dev/null +++ b/kaska/tau.py @@ -0,0 +1,1027 @@ + +import os +import pandas as pd +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +from matplotlib.dates import MonthLocator +# import matplotlib.ticker +import numpy as np +# from sense.canopy import OneLayer +# from sense.soil import Soil +# from sense import model +import scipy.stats +from scipy.optimize import minimize +import pdb +from z_helper import * +# from z_optimization import * +import datetime +from matplotlib import gridspec +import datetime +from matplotlib.lines import Line2D +import copy +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + +def fresnel(eps, theta): + theta = np.deg2rad(theta) + num = (eps-1)*(np.sin(theta)**2 - eps*(1+np.sin(theta)**2)) + den = eps*np.cos(theta) + np.sqrt(eps - np.sin(theta)**2) + den = den**2 + return np.abs(num/den) + +def mv2eps(a, b, c, mv): + eps = a + b * mv + c * mv**2 + return eps + +def quad_approx_solver(a, b, c, theta, alphas): + x = np.arange(0.01, 0.5, 0.01) + p = np.polyfit(x, fresnel(mv2eps(a, b, c, x),theta.mean()), 2) + # 2nd order polynomial + #solve + solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] + return solutions + + +def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=1.): + + + lais = [] + srs = [] + alphas = [] + sms = [] + ps = [] + times = [] + uorbits = np.unique(orbits) + for orbit in uorbits: + orbit_mask = orbits == orbit + ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osro, osro_std = sm[orbit_mask], sm_std[orbit_mask], sr[orbit_mask], sr_std[orbit_mask] + + olai_std = np.ones_like(olai)*0.05 + + alpha = fresnel(mv2eps(1.99, 38.9, 11.5, osm), otheta) + alpha_std = np.ones_like(alpha)*0.2 + + soil_sigma_mask = olai < 1 + sigma_soil_vv_mu = np.mean(ovv[soil_sigma_mask]) + sigma_soil_vh_mu = np.mean(ovh[soil_sigma_mask]) + + xvv = np.array([1, 0.5, sigma_soil_vv_mu]) + xvh = np.array([1, 0.5, sigma_soil_vh_mu]) + + prior_mean = np.concatenate([[0, ]*6, alpha, osro, olai]) + prior_unc = np.concatenate([[10., ]*6, alpha_std, osro_std, olai_std]) + + x0 = np.concatenate([xvv, xvh, alpha, osro, olai]) + + bounds = ( + [[None, None]] * 6 + + [[0.1, 3.3]] * olai.shape[0] + + [[0, .03]] * olai.shape[0] + + [[0, 8]] * olai.shape[0] + ) + + gamma = [1000, 1000] + retval = minimize(cost_function, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc), + jac=True, + bounds = bounds, + options={"disp": False},) + + posterious_lai = retval.x[6+2*len(olai) : ] + posterious_sr = retval.x[6+len(olai) : 6+2*len(olai)] + posterious_alpha = retval.x[6 : 6+len(olai)] + sols = np.array(quad_approx_solver(1.99, 38.9, 11.5, otheta, posterious_alpha)).min(axis=1) + lais.append(posterious_lai) + srs.append(posterious_sr) + sms.append(sols) + times.append(otime) + ps.append(retval.x[:6]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + lais = np.hstack(lais )[order] + srs = np.hstack(srs )[order] + sms = np.hstack(sms )[order].real + return times, lais, srs, sms, np.array(ps) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +# def do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, height, orbits, unc): + +# lais = [] +# coefs = [] +# sms = [] +# times = [] + +# uorbits = np.unique(orbits) +# # uorbits = np.array([44]) +# for orbit in uorbits: +# orbit_mask = orbits == orbit +# # orbit_mask = (orbits == 44) | (orbits == 168) +# # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) +# # orbit_mask = (orbits == 95) +# ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] +# osm, osm_std, oscoef, oscoef_std = sm[orbit_mask], sm_std[orbit_mask], coef[orbit_mask], coef_std[orbit_mask] + +# oheight = height[orbit_mask] + +# olai_std = np.ones_like(olai)*0.05 + +# alpha = _calc_eps(osm) +# alpha = osm +# alpha_std = np.ones_like(alpha)*10 +# alpha_std = osm_std +# # pdb.set_trace() +# prior_mean = np.concatenate([alpha,oscoef]) +# prior_unc = np.concatenate([alpha_std,oscoef_std]) +# x0 = np.concatenate([alpha,oscoef]) +# data = np.concatenate([oheight,olai]) +# bounds = ( +# # [[2.5, 30]] * olai.shape[0] +# [[0.01, 0.5]] * olai.shape[0] +# + [[0.01, 3]] * olai.shape[0] +# ) + +# gamma = [500, 500] + +# retval = minimize(cost_function2, +# x0, +# args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data, unc), +# jac=True, +# bounds = bounds, +# options={"disp": True},) + +# # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] +# posterious_coef = retval.x[len(olai) : +2*len(olai)] +# posterious_mv = retval.x[ : +len(olai)] +# # lais.append(posterious_lai) +# coefs.append(posterious_coef) +# # x = np.arange(0.01, 0.5, 0.001) +# # xx = _calc_eps(x) +# # sols=[] +# # for i in posterious_mv: +# # p, pp = find_nearest(xx,i) +# # sols.append(x[pp]) +# # sols = np.array(sols) + +# sms.append(posterious_mv) +# # sms.append(sols) +# times.append(otime) + +# order = np.argsort(np.hstack(times)) +# times = np.hstack(times )[order] +# # lais = np.hstack(lais )[order] +# lais=0 +# coefs = np.hstack(coefs )[order] +# # coefs=0 +# sms = np.hstack(sms )[order].real + +# return times, lais, coefs, sms, orbit_mask + + + +# def _simple_ew(): +# """ +# eq. 4.69 +# simplistic approach with T=23°C, bulk density = 1.7 g/cm3 +# """ +# f0 = 18.64 # relaxation frequency [GHz] +# f = 5.405 +# hlp = f/f0 +# e1 = 4.9 + (74.1)/(1.+hlp**2.) +# # e2 =(74.1*hlp)/(1.+hlp**2.) + 6.46 * self.sigma/self.f +# # return e1 + 1.j * e2 +# return e1 + +# def _calc_eps(mv): +# """ +# calculate dielectric permittivity +# Eq. 4.66 (Ulaby et al., 2014) +# """ +# clay = 0.0738 +# sand = 0.2408 +# bulk = 1.45 +# alpha = 0.65 +# beta1 = 1.27-0.519*sand - 0.152*clay +# beta2 = 2.06 - 0.928*sand -0.255*clay +# sigma = -1.645 + 1.939*bulk - 2.256*sand + 1.594*clay + + +# e1 = (1.+0.66*bulk+mv**beta1*_simple_ew()**alpha - mv)**(1./alpha) +# # e2 = np.imag(self.ew)*self.mv**self.beta2 +# # return e1 + 1.j*e2 +# return e1 + +# def quad_approx_solver(alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, _calc_eps(x), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + +def find_nearest(array, value): + array = np.asarray(array) + idx = (np.abs(array - value)).argmin() + return array[idx], idx + + + + + + + + +### Data preparation df_agro!!!! ### +#----------------------------------------------------------------- +# storage information +path = '/media/tweiss/Work/z_final_mni_data_2017' +file_name = 'in_situ_s1_buffer_50' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +path_agro = '/media/tweiss/Work/Paper/in_progress/RT_model_comparison/images' +file_name_agro = 'Daily_Freising' +extension_agro = '.csv' + +field = '508_high' +pol = 'vv' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro, pol) + +aggregation = ['','_buffer_30','_buffer_50','_buffer_100','_Field_buffer_30'] +pre_processing = ['multi', 'norm_multi'] +aggregation = ['_buffer_100','_Field_buffer_30'] +pre_processing = ['multi'] +# aggregation = ['_buffer_100'] +# aggregation = ['_Field_buffer_30'] +surface_list = ['Oh92', 'Oh04', 'Dubois95', 'WaterCloud', 'I2EM'] +canopy_list = ['turbid_isotropic', 'water_cloud'] + +surface_list = ['Oh92', 'I2EM'] +canopy_list = ['turbid_isotropic'] +# canopy_list = ['turbid_rayleigh'] +surface_list = ['Oh04'] +# surface_list = ['Oh92'] +# surface_list = ['I2EM'] +# canopy_list = ['water_cloud'] +field = ['508_high'] +# field = ['508_low'] +# field = ['508_med'] +# field = ['301_high'] +# field = ['301_low'] +# field = ['301_med'] +# field = ['542_high'] +# field = ['542_low'] +# field = ['542_med'] + +### option for time invariant or variant calibration of parameter +#------------------------------- +opt_mod = ['time_variant'] +#--------------------------- + + +for p in pre_processing: + + for pp in aggregation: + + versions = ['everything','','44_117','95_168','44_168','117_95','44_95','117_168','44_117_95','44_117_168','44_95_168','117_95_168'] + ver = ['','','44','95','44','117','44','117','44','44','44','117'] + ver2 = ['','','117','168','168','95','95','168','117','117','95','95'] + ver3 = ['','','','','','','','','95','168','168','168'] + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + # versions = ['44_168'] + # ver = ['44'] + # ver2 = ['168'] + # ver3 = [''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/z_dense_s1_time_series_n7'+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/z_dense_s1_time_series_n7'+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper3/z_dense_s1_time_series_n7'+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + + + + + + # fig, ax = plt.subplots(figsize=(17, 13)) + # gs = gridspec.GridSpec(5, 1, height_ratios=[14, 3, 3, 3, 3]) + # ax = plt.subplot(gs[0]) + + # plt.ylabel('Backscatter [dB]', fontsize=18) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + + # ax.set_ylim([-21.5,-8.5]) + + + # colormaps = ['Greens', 'Purples', 'Blues', 'Oranges', 'Reds', 'Greys', 'pink', 'bone', 'Blues', 'Blues', 'Blues'] + # r = 0 + + # colormap = plt.get_cmap(colormaps[r]) + # colors = [colormap(rr) for rr in np.linspace(0.35, 1., 3)] + + for kkk in opt_mod: + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + data_field = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + + vv = data_field.filter(like='S1_vv').values.flatten() + vv = 10*np.log10(vv) + vv_model = data_field.filter(like='vv_model').values.flatten() + vv_model = 10*np.log10(vv_model) + vh = data_field.filter(like='S1_vh').values.flatten() + vh = 10*np.log10(vh) + lai = data_field.filter(like='LAI_insitu').values.flatten() + lai = lai + theta = data_field.filter(like='theta').values.flatten() + theta = np.rad2deg(theta) + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + + + s2_data = pd.read_csv('/media/tweiss/Daten/data_AGU/S2_'+kkkk+pp+'.csv',header=[0],index_col=0) + s2_data.index = pd.to_datetime(s2_data.index).floor('Min').floor('H') + s2_lai = s2_data.loc[time2]['lai'].values.flatten() + s2_cab = s2_data.loc[time2]['cab'].values.flatten() + s2_cbrown = s2_data.loc[time2]['cbrown'].values.flatten() + lai = s2_lai + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api_sm.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_sm = api_data.loc[time2].values.flatten() + sm = data_field.filter(like='SM_insitu').values.flatten() + # sm = smooth(sm,2) + sm[:] = 0.25 + # sm = api_sm + sm_std = data_field.filter(like='SM_insitu').values.flatten() + # ooo = np.abs(sm[1:]-sm[:-1])*20 + # sm_std[0] = ooo[-1] + # sm_std[1:] = ooo + sm_std[:] = 1.0 + + # out_shape = date + # lai_outputs = np.zeros(out_shape ) + # sm_outputs = np.zeros(out_shape ) + # sr_outputs = np.zeros(out_shape ) + + # ps_shape = (len(uorbits),) + lai + + # Avv_outputs = np.zeros(ps_shape) + # Bvv_outputs = np.zeros(ps_shape) + # Cvv_outputs = np.zeros(ps_shape) + + # Avh_outputs = np.zeros(ps_shape) + # Bvh_outputs = np.zeros(ps_shape) + # Cvh_outputs = np.zeros(ps_shape) + + + # sr = lai*1. + # sr[:] = 0.3 + # sr_std = lai*1. + # sr_std[:] = 2 + + # vv = np.maximum(vv, 0.0001) + # vv = 10 * np.log10(vv) + # vh = np.maximum(vh, 0.0001) + # vh = 10 * np.log10(vh) + + unc = 1.8 + orbits = data_field.filter(like='relativeorbit').values.flatten() + mask95 = orbits == 95 + mask168 = orbits == 168 + mask44 = orbits == 44 + mask117 = orbits == 117 + + + + tau = data_field.filter(like='coef').values.flatten() * np.sqrt(data_field.filter(like='LAI').values.flatten()) * data_field.filter(like='height').values.flatten() + height = data_field.filter(like='height').values.flatten() + + + data_biomass = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/new_in_situ_s1multi_buffer_50neu_biomass.csv',header=[0,1],index_col=1) + + data_biomass = data_biomass[:len(height)] + data_biomass2 = data_biomass.filter(like=kkkk) + wetbiomass = data_biomass2.filter(like='wet') + drybiomass = data_biomass2.filter(like='dry') + + + xxx = tau[mask168]-tau[mask117] + + # plt.rcParams["figure.figsize"] = (10,7) + # plt.plot(time.values, tau) + # plt.grid() + # plt.ylabel('tau') + # plt.xlabel('Time') + # plt.xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + # # plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/tau_x', bbox_inches = 'tight') + + # # plt.show() + + + # plt.close() + + vwc = data_field.filter(like='VWC').values.flatten() + b = tau / vwc + coef = data_field.filter(like='coef').values.flatten() + # plt.plot(time.values, b) + # plt.grid() + # plt.ylabel('b = tau/vwc') + # plt.xlabel('Time') + # plt.xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + # plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/b_301_n1', bbox_inches = 'tight') + # pdb.set_trace() + + + fig, ax = plt.subplots(figsize=(17, 13)) + gs = gridspec.GridSpec(7, 1, height_ratios=[5, 5, 5, 5, 5,5,5]) + ax = plt.subplot(gs[0]) + + plt.ylabel('Backscatter [dB]', fontsize=14) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=12) + + ax.plot(time.values, vv, label='VV S1') + ax.plot(time.values, vv_model, label='VV model') + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.legend() + plt.subplots_adjust(hspace=.0) + + + ax1 = plt.subplot(gs[1]) + ax1.plot(time.values, vwc, label='VWC') + plt.ylabel('VWC [kg/m2]', fontsize=14) + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.subplots_adjust(hspace=.0) + + ax2 = plt.subplot(gs[2]) + ax2.plot(time.values, tau, label='tau') + plt.ylabel('tau', fontsize=18) + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.subplots_adjust(hspace=.0) + + ax3 = plt.subplot(gs[3]) + ax3.plot(time.values, b, label='b=tau/vwc') + plt.ylabel('b', fontsize=14) + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.subplots_adjust(hspace=.0) + + ax4 = plt.subplot(gs[4]) + ax4.plot(time.values, coef, label='coef') + plt.ylabel('coef', fontsize=14) + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.subplots_adjust(hspace=.0) + + ax5 = plt.subplot(gs[5]) + ax5.plot(time.values, height, label='height') + plt.ylabel('height [m]', fontsize=14) + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.subplots_adjust(hspace=.0) + + ax6 = plt.subplot(gs[6]) + ax6.plot(time.values, wetbiomass, label='wet biomass') + ax6.plot(time.values, drybiomass, label='dry biomass') + plt.ylabel('Biomass [kg/m2]', fontsize=14) + plt.grid(linestyle='dotted') + plt.legend() + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + + plt.setp(ax.get_xticklabels(), visible=False) + plt.setp(ax1.get_xticklabels(), visible=False) + plt.setp(ax2.get_xticklabels(), visible=False) + plt.setp(ax3.get_xticklabels(), visible=False) + plt.setp(ax4.get_xticklabels(), visible=False) + plt.setp(ax5.get_xticklabels(), visible=False) + + + plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska2/tau'+kkkk+'_'+k, bbox_inches = 'tight') + + pdb.set_trace() + + + times, lais, srs, sms, ps = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=unc) + + # plt.plot(sm_insitu) + # plt.plot(sm) + # plt.plot(sms) + + # pdb.set_trace() + + # unc_array = np.arange(0,2,0.1) + # coef_array = np.arange(0,2,0.1) + # sm_array = np.arange(0,2,0.1) + + # hm = {} + # for r in unc_array: + # for rr in coef_array: + # for rrr in sm_array: + # unc = r + # sr_std[:] = rr + # sm_std[:] = rrr + # times, lais, srs, sms, ps = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=unc) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # hm[(r,rr,rrr)] = ubrmse_vv + + # pdb.set_trace() + # min(hm, key=hm.get) + # hm[min(hm, key=hm.get)] + + # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + + plt.rcParams["figure.figsize"] = (10,7) + plt.plot(time,sm_insitu) + rmse_vv = rmse_prediction(sm_insitu,sm) + bias_vv = bias_prediction(sm_insitu,sm) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + plt.plot(time,sm, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + rmse_vv = rmse_prediction(sm_insitu,sms) + bias_vv = bias_prediction(sm_insitu,sms) + ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + # #orbit_mask + # plt.plot(time[orbit_mask],sm_insitu[orbit_mask]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(time[orbit_mask],sm[orbit_mask], label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sms) + # bias_vv = bias_prediction(sm_insitu[orbit_mask],sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + + plt.legend() + plt.grid() + plt.ylabel('Soil Moisture') + plt.xlabel('Time') + plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/jose_unc08', bbox_inches = 'tight') + plt.close() + pdb.set_trace() + # plt.plot(time[orbit_mask],coef) + # plt.plot(times,coefs) + # plt.show() + pdb.set_trace() + pdb.set_trace() + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + if kkk == 'time invariant': + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + else: + if kk == 'turbid_isotropic': + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + else: + ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax.axvspan(x,xx, color='red', alpha=0.2, label = 'Incidence angle 43°, Descending track') + a += 1 + else: + ax.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax.axvspan(x,xx, color='blue', alpha=0.2, label = 'Incidence angle 45°, Ascending track') + b += 1 + else: + ax.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax.axvspan(x,xx, color='orange', alpha=0.2, label = 'Incidence angle 35°, Descending track') + c += 1 + else: + ax.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax.axvspan(x,xx, color='green', alpha=0.2, label = 'Incidence angle 36°, Ascending track') + d += 1 + else: + ax.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + + ax.plot(date,10*np.log10(data_field.filter(like='S1_vv')), '-', color='black', label='Sentinel-1', linewidth=3, marker='s') + + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.legend(prop={'size': 14}, loc=3) + + plt.grid(linestyle='dotted') + + plt.setp(ax.get_xticklabels(), visible=False) + + ax0 = plt.subplot(gs[1]) + plt.tick_params(labelsize=17) + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + + ground = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='part_g') + ground = ground[ground.columns[0]] + + lai = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='LAI_insitu').values + theta = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='theta').values + + if kk == 'turbid_isotropic': + coef = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='coef').values + d = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='height').values + T = np.exp(-coef*np.sqrt(lai)*d/np.cos(theta)) + T=T**2 + ax0.plot(date,T.flatten(), color=colors, marker='s', linestyle='dashed') + else: + B_vv = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='B_vv').values + T = np.exp(-2*B_vv*lai/np.cos(theta)) + ax0.plot(date,T.flatten(), color=colors, marker='s') + + a = 0 + b = 0 + c = 0 + d = 0 + + relativeorbit = data_field.filter(like='relativeorbit') + for j in range(len(relativeorbit)): + relativeorbit.index[j] + x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + if relativeorbit.values.flatten()[j] == 95: + if a == 0: + ax0.axvspan(x,xx, color='red', alpha=0.2, label = 'descending 43°') + a += 1 + else: + ax0.axvspan(x,xx, color='red', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 117: + if b == 0: + ax0.axvspan(x,xx, color='blue', alpha=0.2, label = 'ascending 43°') + b += 1 + else: + ax0.axvspan(x,xx, color='blue', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 168: + if c == 0: + ax0.axvspan(x,xx, color='orange', alpha=0.2, label = 'descending 35°') + c += 1 + else: + ax0.axvspan(x,xx, color='orange', alpha=0.2) + elif relativeorbit.values.flatten()[j] == 44: + if d == 0: + ax0.axvspan(x,xx, color='green', alpha=0.2, label = 'ascending 36°') + d += 1 + else: + ax0.axvspan(x,xx, color='green', alpha=0.2) + else: + pass + plt.ylabel('Transmissivity\nT', fontsize=18) + ax0.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + ax0.set_ylim(-0.2,1.1) + plt.grid(linestyle='dotted') + plt.setp(ax0.get_xticklabels(), visible=False) + + + ax1 = plt.subplot(gs[2], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + plt.setp(ax1.get_xticklabels(), visible=False) + + lai_field = data_field.filter(like='LAI_insitu') + height_field = data_field.filter(like='height') + + + ax1.plot(date,lai_field,color='green',linewidth=2,label='LAI') + ax2 = ax1.twinx() + plt.tick_params(labelsize=17) + ax2.plot(date,height_field,color='black', linewidth=2, label='Height') + ax1.set_ylabel('LAI', fontsize=16) + ax2.set_ylabel('Height\n[m]', fontsize=16) + + + # add std for LAI and height for field 508 (data from field measurements) + lai_old = copy.deepcopy(lai_field) + height_old = copy.deepcopy(height_field) + if field == '508_high': + lai_field[lai_field.index>'2017-03-28'] = 0.2218 + lai_field[lai_field.index>'2017-04-05'] = 0.1367 + lai_field[lai_field.index>'2017-04-10'] = 0.4054 + lai_field[lai_field.index>'2017-04-21'] = 0.3247 + lai_field[lai_field.index>'2017-05-02'] = 0.5546 + lai_field[lai_field.index>'2017-05-10'] = 0.5852 + lai_field[lai_field.index>'2017-05-16'] = 0.3058 + lai_field[lai_field.index>'2017-05-26'] = 0.5373 + lai_field[lai_field.index>'2017-05-29'] = 0.332 + lai_field[lai_field.index>'2017-06-02'] = 0.2856 + lai_field[lai_field.index>'2017-06-13'] = 0.4717 + lai_field[lai_field.index>'2017-06-26'] = 0.2982 + lai_field[lai_field.index>'2017-07-06'] = 0.253 + + height_field[height_field.index>'2017-03-28'] = 0.005774 + height_field[height_field.index>'2017-04-05'] = 0.015275 + height_field[height_field.index>'2017-04-10'] = 0.026458 + height_field[height_field.index>'2017-04-21'] = 0.049329 + height_field[height_field.index>'2017-05-02'] = 0.01 + height_field[height_field.index>'2017-05-10'] = 0.01 + height_field[height_field.index>'2017-05-26'] = 0.028868 + height_field[height_field.index>'2017-05-29'] = 0.028868 + height_field[height_field.index>'2017-06-02'] = 0.028868 + height_field[height_field.index>'2017-06-13'] = 0.020817 + height_field[height_field.index>'2017-06-26'] = 0.025166 + height_field[height_field.index>'2017-07-06'] = 0.015275 + + ax1.fill_between(lai_field.index,lai_old.values.flatten()-lai_field.values.flatten(), lai_old.values.flatten()+lai_field.values.flatten(), color='green', alpha=0.2, label='Standard Deviation') + ax2.fill_between(height_field.index,height_old.values.flatten()-height_field.values.flatten(), height_old.values.flatten()+height_field.values.flatten(), color='black', alpha=0.2, label='Standard Deviation') + + ax2.legend(bbox_to_anchor=(.965, 0.45), prop={'size': 14}) + ax1.legend(loc=2, prop={'size': 14}) + + # ax1.set_xticks([]) + ax1.set_ylim(0,6.7) + ax2.set_ylim(0,1) + start, end = ax1.get_ylim() + ax1.yaxis.set_ticks(np.arange(start, end, 2)) + + # soil moisture and rainfall + ax3 = plt.subplot(gs[3], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + ax3.plot(date,data_field.filter(like='SM_insitu'),color='blue', linewidth=2, label='Soil Moisture') + ax3.set_ylabel('Soil Moisture\n$[cm^3/cm^3]$', fontsize=16) + ax5 = ax3.twinx() + date_agro = pd.to_datetime(df_agro['date'], format='%d.%m.%Y') + agro_sum = df_agro['SUM_NN050'][87:192] + ax5.bar(agro_sum.index, agro_sum, width=0.8, label='Precipitation') + ax3.legend(loc=2, prop={'size': 14}) + ax5.legend(loc=1, prop={'size': 14}) + ax5.set_ylabel('Precipita-\ntion [mm]', fontsize=16) + ax5.set_ylim(0,39) + ax3.set_ylim(0.17,0.38) + plt.setp(ax3.get_xticklabels(), visible=False) + plt.tick_params(labelsize=17) + + ax4 = plt.subplot(gs[4], sharex = ax) + plt.tick_params(labelsize=17) + # remove vertical gap between subplots + plt.subplots_adjust(hspace=.0) + plt.grid(linestyle='dotted') + bbch = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/bbch_2017.csv',header=[0,1]) + bbch = bbch.set_index(pd.to_datetime(bbch['None']['None'], format='%Y-%m-%d')) + bbch.index = pd.to_datetime(bbch.index) + + lai_field['bbch'] = 0 + + bbch_new = bbch.filter(like=kkkk[0:3]) + for t, tt in enumerate(bbch.index): + if t == 0: + start_date = '2017-03-29' + else: + start_date = bbch.index[t] + try: + end_date = bbch.index[t+1] + except IndexError: + start_date = bbch.index[t] + end_date = '2017-07-30' + mask = (lai_field.index > start_date) & (lai_field.index <= end_date) + + bbbb = lai_field['bbch'].where(~mask, other=2) + if bbch.index[t] < datetime.datetime.strptime('2017-03-29', '%Y-%m-%d'): + pass + else: + if bbch_new.values[t] < 30 and bbch_new.values[t] >= 20: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=2) + n2 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 40 and bbch_new.values[t] >= 30: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=3) + n3 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 50 and bbch_new.values[t] >= 40: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=4) + n4 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 60 and bbch_new.values[t] >= 50: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=5) + n5 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 70 and bbch_new.values[t] >= 60: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=6) + n6 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 80 and bbch_new.values[t] >= 70: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=7) + n7 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 90 and bbch_new.values[t] >= 80: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=8) + n8 = max(lai_field['bbch'][mask].index) + elif bbch_new.values[t] < 100 and bbch_new.values[t] >= 90: + # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=9) + n9 = max(lai_field['bbch'][mask].index) + # bbch_ = lai_field['bbch'].value_counts().sort_index().values + bbch_ = [n2-datetime.datetime.strptime('2017-03-22', '%Y-%m-%d'),n3-n2,n4-n3,n5-n4,n6-n5,n7-n6,n8-n7,n9-n8] + + #Plot BBCH + hm = lai_field.filter(like='bbch') + label = ['','BBCH',''] + width = 0.3 + legend_items = ['Tillering','Stem elongation','Booting','Heading','Flowering','Fruit development','Ripening', 'Senescence'] + + a_508 = 0 + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + + for xxxx, kkkkk in enumerate(bbch_): + a_508 = a_508 + bbch_[xxxx].total_seconds() /60/60/24 + ax4.barh(label,[0,a_508,0],width, label=legend_items[xxxx], left=[0,aa_508,0]) + + aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + a_508 + + + xmin, xmax = ax4.get_xlim() + + ax4.barh(label,[0,200,0],width, left=[0,xmax-1,0], color='white') + ax4.set_ylim(0,1.7) + plt.legend(bbox_to_anchor=(.935, 0.4),ncol=8) + + plt.text(0.98, 0.05, "(a)", transform=ax.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(b)", transform=ax0.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(c)", transform=ax2.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(d)", transform=ax3.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + plt.text(0.98, 0.2, "(e)", transform=ax4.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + + + plt.savefig(plot_output_path+pol+'_all_'+kkk+kkkk, bbox_inches = 'tight') + + plt.close() + diff --git a/kaska/vwc_tau.py b/kaska/vwc_tau.py new file mode 100644 index 0000000..6554a8e --- /dev/null +++ b/kaska/vwc_tau.py @@ -0,0 +1,1028 @@ + +import os +import pandas as pd +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +from matplotlib.dates import MonthLocator +# import matplotlib.ticker +import numpy as np +# from sense.canopy import OneLayer +# from sense.soil import Soil +# from sense import model +import scipy.stats +from scipy.optimize import minimize +import pdb +from z_helper import * +# from z_optimization import * +import datetime +from matplotlib import gridspec +import datetime +from matplotlib.lines import Line2D +import copy +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb +from z_helper import * + +def smooth(y, box_pts): + box = np.ones(box_pts)/box_pts + y_smooth = np.convolve(y, box, mode='same') + return y_smooth + +def fresnel(eps, theta): + theta = np.deg2rad(theta) + num = (eps-1)*(np.sin(theta)**2 - eps*(1+np.sin(theta)**2)) + den = eps*np.cos(theta) + np.sqrt(eps - np.sin(theta)**2) + den = den**2 + return np.abs(num/den) + +def mv2eps(a, b, c, mv): + eps = a + b * mv + c * mv**2 + return eps + +def quad_approx_solver(a, b, c, theta, alphas): + x = np.arange(0.01, 0.5, 0.01) + p = np.polyfit(x, fresnel(mv2eps(a, b, c, x),theta.mean()), 2) + # 2nd order polynomial + #solve + solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] + return solutions + + +def do_one_pixel_field(sar_inference_data, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=1.): + + + lais = [] + srs = [] + alphas = [] + sms = [] + ps = [] + times = [] + uorbits = np.unique(orbits) + for orbit in uorbits: + orbit_mask = orbits == orbit + ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] + osm, osm_std, osro, osro_std = sm[orbit_mask], sm_std[orbit_mask], sr[orbit_mask], sr_std[orbit_mask] + + olai_std = np.ones_like(olai)*0.05 + + alpha = fresnel(mv2eps(1.99, 38.9, 11.5, osm), otheta) + alpha_std = np.ones_like(alpha)*0.2 + + soil_sigma_mask = olai < 1 + sigma_soil_vv_mu = np.mean(ovv[soil_sigma_mask]) + sigma_soil_vh_mu = np.mean(ovh[soil_sigma_mask]) + + xvv = np.array([1, 0.5, sigma_soil_vv_mu]) + xvh = np.array([1, 0.5, sigma_soil_vh_mu]) + + prior_mean = np.concatenate([[0, ]*6, alpha, osro, olai]) + prior_unc = np.concatenate([[10., ]*6, alpha_std, osro_std, olai_std]) + + x0 = np.concatenate([xvv, xvh, alpha, osro, olai]) + + bounds = ( + [[None, None]] * 6 + + [[0.1, 3.3]] * olai.shape[0] + + [[0, .03]] * olai.shape[0] + + [[0, 8]] * olai.shape[0] + ) + + gamma = [1000, 1000] + retval = minimize(cost_function, + x0, + args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, unc), + jac=True, + bounds = bounds, + options={"disp": False},) + + posterious_lai = retval.x[6+2*len(olai) : ] + posterious_sr = retval.x[6+len(olai) : 6+2*len(olai)] + posterious_alpha = retval.x[6 : 6+len(olai)] + sols = np.array(quad_approx_solver(1.99, 38.9, 11.5, otheta, posterious_alpha)).min(axis=1) + lais.append(posterious_lai) + srs.append(posterious_sr) + sms.append(sols) + times.append(otime) + ps.append(retval.x[:6]) + + order = np.argsort(np.hstack(times)) + times = np.hstack(times )[order] + lais = np.hstack(lais )[order] + srs = np.hstack(srs )[order] + sms = np.hstack(sms )[order].real + return times, lais, srs, sms, np.array(ps) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +# def do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, height, orbits, unc): + +# lais = [] +# coefs = [] +# sms = [] +# times = [] + +# uorbits = np.unique(orbits) +# # uorbits = np.array([44]) +# for orbit in uorbits: +# orbit_mask = orbits == orbit +# # orbit_mask = (orbits == 44) | (orbits == 168) +# # orbit_mask = (orbits == 44) | (orbits == 95) | (orbits == 117) | (orbits == 168) +# # orbit_mask = (orbits == 95) +# ovv, ovh, olai, otheta, otime = vv[orbit_mask], vh[orbit_mask], lai[orbit_mask], theta[orbit_mask], time[orbit_mask] +# osm, osm_std, oscoef, oscoef_std = sm[orbit_mask], sm_std[orbit_mask], coef[orbit_mask], coef_std[orbit_mask] + +# oheight = height[orbit_mask] + +# olai_std = np.ones_like(olai)*0.05 + +# alpha = _calc_eps(osm) +# alpha = osm +# alpha_std = np.ones_like(alpha)*10 +# alpha_std = osm_std +# # pdb.set_trace() +# prior_mean = np.concatenate([alpha,oscoef]) +# prior_unc = np.concatenate([alpha_std,oscoef_std]) +# x0 = np.concatenate([alpha,oscoef]) +# data = np.concatenate([oheight,olai]) +# bounds = ( +# # [[2.5, 30]] * olai.shape[0] +# [[0.01, 0.5]] * olai.shape[0] +# + [[0.01, 3]] * olai.shape[0] +# ) + +# gamma = [500, 500] + +# retval = minimize(cost_function2, +# x0, +# args=(ovh, ovv, otheta, gamma, prior_mean, prior_unc, data, unc), +# jac=True, +# bounds = bounds, +# options={"disp": True},) + +# # posterious_lai = retval.x[2*len(olai) : 3*len(olai)] +# posterious_coef = retval.x[len(olai) : +2*len(olai)] +# posterious_mv = retval.x[ : +len(olai)] +# # lais.append(posterious_lai) +# coefs.append(posterious_coef) +# # x = np.arange(0.01, 0.5, 0.001) +# # xx = _calc_eps(x) +# # sols=[] +# # for i in posterious_mv: +# # p, pp = find_nearest(xx,i) +# # sols.append(x[pp]) +# # sols = np.array(sols) + +# sms.append(posterious_mv) +# # sms.append(sols) +# times.append(otime) + +# order = np.argsort(np.hstack(times)) +# times = np.hstack(times )[order] +# # lais = np.hstack(lais )[order] +# lais=0 +# coefs = np.hstack(coefs )[order] +# # coefs=0 +# sms = np.hstack(sms )[order].real + +# return times, lais, coefs, sms, orbit_mask + + + +# def _simple_ew(): +# """ +# eq. 4.69 +# simplistic approach with T=23°C, bulk density = 1.7 g/cm3 +# """ +# f0 = 18.64 # relaxation frequency [GHz] +# f = 5.405 +# hlp = f/f0 +# e1 = 4.9 + (74.1)/(1.+hlp**2.) +# # e2 =(74.1*hlp)/(1.+hlp**2.) + 6.46 * self.sigma/self.f +# # return e1 + 1.j * e2 +# return e1 + +# def _calc_eps(mv): +# """ +# calculate dielectric permittivity +# Eq. 4.66 (Ulaby et al., 2014) +# """ +# clay = 0.0738 +# sand = 0.2408 +# bulk = 1.45 +# alpha = 0.65 +# beta1 = 1.27-0.519*sand - 0.152*clay +# beta2 = 2.06 - 0.928*sand -0.255*clay +# sigma = -1.645 + 1.939*bulk - 2.256*sand + 1.594*clay + + +# e1 = (1.+0.66*bulk+mv**beta1*_simple_ew()**alpha - mv)**(1./alpha) +# # e2 = np.imag(self.ew)*self.mv**self.beta2 +# # return e1 + 1.j*e2 +# return e1 + +# def quad_approx_solver(alphas): +# x = np.arange(0.01, 0.5, 0.01) +# p = np.polyfit(x, _calc_eps(x), 2) +# # 2nd order polynomial +# #solve +# solutions = [np.roots([p[0], p[1], p[2]-aa]) for aa in alphas] +# return solutions + +def find_nearest(array, value): + array = np.asarray(array) + idx = (np.abs(array - value)).argmin() + return array[idx], idx + + + + + + + + +### Data preparation df_agro!!!! ### +#----------------------------------------------------------------- +# storage information +path = '/media/tweiss/Work/z_final_mni_data_2017' +file_name = 'in_situ_s1_buffer_50' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +path_agro = '/media/tweiss/Work/Paper/in_progress/RT_model_comparison/images' +file_name_agro = 'Daily_Freising' +extension_agro = '.csv' + +field = '508_high' +pol = 'vv' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro, pol) + +aggregation = ['','_buffer_30','_buffer_50','_buffer_100','_Field_buffer_30'] +pre_processing = ['multi', 'norm_multi'] +aggregation = ['_buffer_100','_Field_buffer_30'] +aggregation = ['_buffer_100'] +pre_processing = ['multi'] +# aggregation = ['_buffer_100'] +# aggregation = ['_Field_buffer_30'] +surface_list = ['Oh92', 'Oh04', 'Dubois95', 'WaterCloud', 'I2EM'] +canopy_list = ['turbid_isotropic', 'water_cloud'] + +surface_list = ['Oh92', 'I2EM'] +canopy_list = ['turbid_isotropic'] +# canopy_list = ['turbid_rayleigh'] +surface_list = ['Oh04'] +# surface_list = ['Oh92'] +# surface_list = ['I2EM'] +# canopy_list = ['water_cloud'] +field = ['508_high'] +# field = ['508_low'] +# field = ['508_med'] +# field = ['301_high'] +# field = ['301_low'] +# field = ['301_med'] +# field = ['542_high'] +# field = ['542_low'] +# field = ['542_med'] + +field = ['508_high', '508_low', '508_med', '301_high', '301_low', '301_med', '542_high', '542_low', '542_med'] + + +### option for time invariant or variant calibration of parameter +#------------------------------- +opt_mod = ['time_variant'] +#--------------------------- + + +for p in pre_processing: + + for pp in aggregation: + + versions = ['everything','','44_117','95_168','44_168','117_95','44_95','117_168','44_117_95','44_117_168','44_95_168','117_95_168'] + ver = ['','','44','95','44','117','44','117','44','44','44','117'] + ver2 = ['','','117','168','168','95','95','168','117','117','95','95'] + ver3 = ['','','','','','','','','95','168','168','168'] + + versions = ['','everything'] + ver = ['',''] + ver2 = ['',''] + ver3 = ['',''] + + # versions = ['44_168'] + # ver = ['44'] + # ver2 = ['168'] + # ver3 = [''] + + for i, ii in enumerate(versions): + + if ii == 'everything': + orbit_list = [None] + orbit1=None + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/z_dense_s1_time_series_n7'+p+pp+'_all'+'/' + csv_output_path = plot_output_path+'csv/None_' + elif ii == '': + orbit_list = [44,117,95,168] + orbit2=None + orbit3=None + orbit4=None + plot_output_path = '/media/tweiss/Work/paper3/z_dense_s1_time_series_n7'+p+pp+'/' + csv_output_path = plot_output_path+'csv/' + else: + plot_output_path = '/media/tweiss/Work/paper3/z_dense_s1_time_series_n7'+p+pp+'_'+ii+'/' + csv_output_path = plot_output_path+'csv/'+ver[i]+'_'+ver[i]+'_' + orbit_list = [int(ver[i])] + orbit2 = int(ver2[i]) + if ver3[i] == '': + orbit3 = None + else: + orbit3 = int(ver3[i]) + + + data = pd.read_csv(csv_output_path+'all_50.csv',header=[0,1,2,3,4,5],index_col=0) + + + + + + + # fig, ax = plt.subplots(figsize=(17, 13)) + # gs = gridspec.GridSpec(5, 1, height_ratios=[14, 3, 3, 3, 3]) + # ax = plt.subplot(gs[0]) + + # plt.ylabel('Backscatter [dB]', fontsize=18) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=17) + + # ax.set_ylim([-21.5,-8.5]) + + + # colormaps = ['Greens', 'Purples', 'Blues', 'Oranges', 'Reds', 'Greys', 'pink', 'bone', 'Blues', 'Blues', 'Blues'] + # r = 0 + + # colormap = plt.get_cmap(colormaps[r]) + # colors = [colormap(rr) for rr in np.linspace(0.35, 1., 3)] + + for kkk in opt_mod: + for kkkk in field: + for k in surface_list: + for kk in canopy_list: + + if k == 'Oh92': + hm = 'Oh92' + colors = 'b' + elif k == 'Oh04': + hm = 'Oh04' + colors = 'r' + elif k == 'Dubois95': + hm='Dubois95' + colors = 'y' + elif k == 'WaterCloud': + hm = 'WCM' + colors = 'm' + elif k == 'I2EM': + hm = 'IEM_B' + colors = 'g' + + data_field = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk) + data_field.index = pd.to_datetime(data_field.index) + date = data_field.index + + vv = data_field.filter(like='S1_vv').values.flatten() + vv = 10*np.log10(vv) + vv_model = data_field.filter(like='vv_model').values.flatten() + vv_model = 10*np.log10(vv_model) + vh = data_field.filter(like='S1_vh').values.flatten() + vh = 10*np.log10(vh) + lai = data_field.filter(like='LAI_insitu').values.flatten() + lai = lai + theta = data_field.filter(like='theta').values.flatten() + theta = np.rad2deg(theta) + time = date + time2 = np.array(time) + for jj in range(len(time)): + time2[jj] = time[jj].replace(microsecond=0).replace(second=0).replace(minute=0) + time2 = pd.to_datetime(time2) + + + s2_data = pd.read_csv('/media/tweiss/Daten/data_AGU/S2_'+kkkk+pp+'.csv',header=[0],index_col=0) + s2_data.index = pd.to_datetime(s2_data.index).floor('Min').floor('H') + s2_lai = s2_data.loc[time2]['lai'].values.flatten() + s2_cab = s2_data.loc[time2]['cab'].values.flatten() + s2_cbrown = s2_data.loc[time2]['cbrown'].values.flatten() + lai = s2_lai + sm_insitu = data_field.filter(like='SM_insitu').values.flatten() + api_data = pd.read_csv('/media/tweiss/Daten/data_AGU/api_sm.csv',header=[0],index_col=0) + api_data.index = pd.to_datetime(api_data.index) + api_sm = api_data.loc[time2].values.flatten() + sm = data_field.filter(like='SM_insitu').values.flatten() + # sm = smooth(sm,2) + sm[:] = 0.25 + # sm = api_sm + sm_std = data_field.filter(like='SM_insitu').values.flatten() + # ooo = np.abs(sm[1:]-sm[:-1])*20 + # sm_std[0] = ooo[-1] + # sm_std[1:] = ooo + sm_std[:] = 1.0 + + # out_shape = date + # lai_outputs = np.zeros(out_shape ) + # sm_outputs = np.zeros(out_shape ) + # sr_outputs = np.zeros(out_shape ) + + # ps_shape = (len(uorbits),) + lai + + # Avv_outputs = np.zeros(ps_shape) + # Bvv_outputs = np.zeros(ps_shape) + # Cvv_outputs = np.zeros(ps_shape) + + # Avh_outputs = np.zeros(ps_shape) + # Bvh_outputs = np.zeros(ps_shape) + # Cvh_outputs = np.zeros(ps_shape) + + + # sr = lai*1. + # sr[:] = 0.3 + # sr_std = lai*1. + # sr_std[:] = 2 + + # vv = np.maximum(vv, 0.0001) + # vv = 10 * np.log10(vv) + # vh = np.maximum(vh, 0.0001) + # vh = 10 * np.log10(vh) + + unc = 1.8 + orbits = data_field.filter(like='relativeorbit').values.flatten() + mask95 = orbits == 95 + mask168 = orbits == 168 + mask44 = orbits == 44 + mask117 = orbits == 117 + + + + tau = data_field.filter(like='coef').values.flatten() * data_field.filter(like='VWC').values.flatten() + height = data_field.filter(like='height').values.flatten() + + + data_biomass = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/new_in_situ_s1multi_buffer_50neu_biomass.csv',header=[0,1],index_col=1) + + data_biomass = data_biomass[:len(height)] + data_biomass2 = data_biomass.filter(like=kkkk) + wetbiomass = data_biomass2.filter(like='wet') + drybiomass = data_biomass2.filter(like='dry') + + + xxx = tau[mask168]-tau[mask117] + + # plt.rcParams["figure.figsize"] = (10,7) + # plt.plot(time.values, tau) + # plt.grid() + # plt.ylabel('tau') + # plt.xlabel('Time') + # plt.xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + # # plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/tau_x', bbox_inches = 'tight') + + # # plt.show() + + + # plt.close() + + vwc = data_field.filter(like='VWC').values.flatten() + b = data_field.filter(like='coef').values.flatten() + #coef = data_field.filter(like='coef').values.flatten() + # plt.plot(time.values, b) + # plt.grid() + # plt.ylabel('b = tau/vwc') + # plt.xlabel('Time') + # plt.xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + # plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/b_301_n1', bbox_inches = 'tight') + # pdb.set_trace() + + + fig, ax = plt.subplots(figsize=(17, 13)) + gs = gridspec.GridSpec(6, 1, height_ratios=[5, 5, 5, 5, 5,5]) + ax = plt.subplot(gs[0]) + + plt.ylabel('Backscatter [dB]', fontsize=14) + # plt.xlabel('Date', fontsize=18) + # plt.tick_params(labelsize=12) + + ax.plot(time.values, vv, label='VV S1') + ax.plot(time.values, vv_model, label='VV model') + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.legend() + plt.subplots_adjust(hspace=.0) + + + ax1 = plt.subplot(gs[1]) + ax1.plot(time.values, vwc, label='VWC') + plt.ylabel('VWC [kg/m2]', fontsize=14) + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.subplots_adjust(hspace=.0) + + ax2 = plt.subplot(gs[2]) + ax2.plot(time.values, tau, label='tau') + plt.ylabel('tau', fontsize=18) + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + ax2.set_ylim(0,1) + plt.subplots_adjust(hspace=.0) + + ax3 = plt.subplot(gs[3]) + ax3.plot(time.values, b, label='b') + plt.ylabel('b', fontsize=14) + plt.grid(linestyle='dotted') + ax3.set_ylim(0,1) + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.subplots_adjust(hspace=.0) + + + ax5 = plt.subplot(gs[4]) + ax5.plot(time.values, height, label='height') + plt.ylabel('height [m]', fontsize=14) + plt.grid(linestyle='dotted') + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + plt.subplots_adjust(hspace=.0) + + ax6 = plt.subplot(gs[5]) + ax6.plot(time.values, wetbiomass, label='wet biomass') + ax6.plot(time.values, drybiomass, label='dry biomass') + plt.ylabel('Biomass [kg/m2]', fontsize=14) + plt.grid(linestyle='dotted') + plt.legend() + ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + + plt.setp(ax.get_xticklabels(), visible=False) + plt.setp(ax1.get_xticklabels(), visible=False) + plt.setp(ax2.get_xticklabels(), visible=False) + plt.setp(ax3.get_xticklabels(), visible=False) + # plt.setp(ax4.get_xticklabels(), visible=False) + plt.setp(ax5.get_xticklabels(), visible=False) + + + plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska2/tau'+kkkk+'_'+k+'_'+ii+'_n1_s_o', bbox_inches = 'tight') + plt.close() + +pdb.set_trace() + + + # times, lais, srs, sms, ps = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=unc) + + # # plt.plot(sm_insitu) + # # plt.plot(sm) + # # plt.plot(sms) + + # # pdb.set_trace() + + # # unc_array = np.arange(0,2,0.1) + # # coef_array = np.arange(0,2,0.1) + # # sm_array = np.arange(0,2,0.1) + + # # hm = {} + # # for r in unc_array: + # # for rr in coef_array: + # # for rrr in sm_array: + # # unc = r + # # sr_std[:] = rr + # # sm_std[:] = rrr + # # times, lais, srs, sms, ps = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, sr, sr_std, orbits, unc=unc) + # # rmse_vv = rmse_prediction(sm_insitu,sms) + # # bias_vv = bias_prediction(sm_insitu,sms) + # # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # # hm[(r,rr,rrr)] = ubrmse_vv + + # # pdb.set_trace() + # # min(hm, key=hm.get) + # # hm[min(hm, key=hm.get)] + + # # times, lais, coefs, sms, orbit_mask = do_one_pixel_field(data_field, vv, vh, lai, theta, time, sm, sm_std, coef, coef_std, height, orbits,unc=unc) + + # plt.rcParams["figure.figsize"] = (10,7) + # plt.plot(time,sm_insitu) + # rmse_vv = rmse_prediction(sm_insitu,sm) + # bias_vv = bias_prediction(sm_insitu,sm) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(time,sm, label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # rmse_vv = rmse_prediction(sm_insitu,sms) + # bias_vv = bias_prediction(sm_insitu,sms) + # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + # # #orbit_mask + # # plt.plot(time[orbit_mask],sm_insitu[orbit_mask]) + # # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # # bias_vv = bias_prediction(sm_insitu[orbit_mask],sm[orbit_mask]) + # # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # # plt.plot(time[orbit_mask],sm[orbit_mask], label='prior RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + # # rmse_vv = rmse_prediction(sm_insitu[orbit_mask],sms) + # # bias_vv = bias_prediction(sm_insitu[orbit_mask],sms) + # # ubrmse_vv = ubrmse_prediction(rmse_vv,bias_vv) + # # plt.plot(times,sms, label='model RMSE:'+str(rmse_vv)[0:6]+' ubRMSE:'+str(ubrmse_vv)[0:6]) + + + # plt.legend() + # plt.grid() + # plt.ylabel('Soil Moisture') + # plt.xlabel('Time') + # plt.savefig('/media/tweiss/Daten/data_AGU/test_kaska/jose_unc08', bbox_inches = 'tight') + # plt.close() + # pdb.set_trace() + # # plt.plot(time[orbit_mask],coef) + # # plt.plot(times,coefs) + # # plt.show() + # pdb.set_trace() + # pdb.set_trace() + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + # if kkk == 'time invariant': + # if kk == 'turbid_isotropic': + # ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + # else: + # ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + # else: + # if kk == 'turbid_isotropic': + # ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', linestyle='dashed', label = hm+ ' + ' + 'SSRT') + # else: + # ax.plot(date, 10*np.log10(data_field.filter(like='vv_model')), color=colors, marker='s', label = hm+ ' + ' + 'WCM') + # a = 0 + # b = 0 + # c = 0 + # d = 0 + + # relativeorbit = data_field.filter(like='relativeorbit') + # for j in range(len(relativeorbit)): + # relativeorbit.index[j] + # x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + # xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + # if relativeorbit.values.flatten()[j] == 95: + # if a == 0: + # ax.axvspan(x,xx, color='red', alpha=0.2, label = 'Incidence angle 43°, Descending track') + # a += 1 + # else: + # ax.axvspan(x,xx, color='red', alpha=0.2) + # elif relativeorbit.values.flatten()[j] == 117: + # if b == 0: + # ax.axvspan(x,xx, color='blue', alpha=0.2, label = 'Incidence angle 45°, Ascending track') + # b += 1 + # else: + # ax.axvspan(x,xx, color='blue', alpha=0.2) + # elif relativeorbit.values.flatten()[j] == 168: + # if c == 0: + # ax.axvspan(x,xx, color='orange', alpha=0.2, label = 'Incidence angle 35°, Descending track') + # c += 1 + # else: + # ax.axvspan(x,xx, color='orange', alpha=0.2) + # elif relativeorbit.values.flatten()[j] == 44: + # if d == 0: + # ax.axvspan(x,xx, color='green', alpha=0.2, label = 'Incidence angle 36°, Ascending track') + # d += 1 + # else: + # ax.axvspan(x,xx, color='green', alpha=0.2) + # else: + # pass + + # ax.plot(date,10*np.log10(data_field.filter(like='S1_vv')), '-', color='black', label='Sentinel-1', linewidth=3, marker='s') + + # ax.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + # plt.legend(prop={'size': 14}, loc=3) + + # plt.grid(linestyle='dotted') + + # plt.setp(ax.get_xticklabels(), visible=False) + + # ax0 = plt.subplot(gs[1]) + # plt.tick_params(labelsize=17) + # for kkkk in field: + # for k in surface_list: + # for kk in canopy_list: + + # if k == 'Oh92': + # hm = 'Oh92' + # colors = 'b' + # elif k == 'Oh04': + # hm = 'Oh04' + # colors = 'r' + # elif k == 'Dubois95': + # hm='Dubois95' + # colors = 'y' + # elif k == 'WaterCloud': + # hm = 'WCM' + # colors = 'm' + # elif k == 'I2EM': + # hm = 'IEM_B' + # colors = 'g' + + + # ground = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='part_g') + # ground = ground[ground.columns[0]] + + # lai = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='LAI_insitu').values + # theta = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='theta').values + + # if kk == 'turbid_isotropic': + # coef = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='coef').values + # d = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='height').values + # T = np.exp(-coef*np.sqrt(lai)*d/np.cos(theta)) + # T=T**2 + # ax0.plot(date,T.flatten(), color=colors, marker='s', linestyle='dashed') + # else: + # B_vv = data.filter(like=k).filter(like=kk).filter(like=kkk).filter(like=kkkk).filter(like='B_vv').values + # T = np.exp(-2*B_vv*lai/np.cos(theta)) + # ax0.plot(date,T.flatten(), color=colors, marker='s') + + # a = 0 + # b = 0 + # c = 0 + # d = 0 + + # relativeorbit = data_field.filter(like='relativeorbit') + # for j in range(len(relativeorbit)): + # relativeorbit.index[j] + # x = relativeorbit.index[j] - datetime.timedelta(days=0.4) + # xx = relativeorbit.index[j] + datetime.timedelta(days=0.4) + # if relativeorbit.values.flatten()[j] == 95: + # if a == 0: + # ax0.axvspan(x,xx, color='red', alpha=0.2, label = 'descending 43°') + # a += 1 + # else: + # ax0.axvspan(x,xx, color='red', alpha=0.2) + # elif relativeorbit.values.flatten()[j] == 117: + # if b == 0: + # ax0.axvspan(x,xx, color='blue', alpha=0.2, label = 'ascending 43°') + # b += 1 + # else: + # ax0.axvspan(x,xx, color='blue', alpha=0.2) + # elif relativeorbit.values.flatten()[j] == 168: + # if c == 0: + # ax0.axvspan(x,xx, color='orange', alpha=0.2, label = 'descending 35°') + # c += 1 + # else: + # ax0.axvspan(x,xx, color='orange', alpha=0.2) + # elif relativeorbit.values.flatten()[j] == 44: + # if d == 0: + # ax0.axvspan(x,xx, color='green', alpha=0.2, label = 'ascending 36°') + # d += 1 + # else: + # ax0.axvspan(x,xx, color='green', alpha=0.2) + # else: + # pass + # plt.ylabel('Transmissivity\nT', fontsize=18) + # ax0.set_xlim([datetime.date(2017, 3, 22), datetime.date(2017, 7, 18)]) + # ax0.set_ylim(-0.2,1.1) + # plt.grid(linestyle='dotted') + # plt.setp(ax0.get_xticklabels(), visible=False) + + + # ax1 = plt.subplot(gs[2], sharex = ax) + # plt.tick_params(labelsize=17) + # # remove vertical gap between subplots + # plt.subplots_adjust(hspace=.0) + # plt.grid(linestyle='dotted') + # plt.setp(ax1.get_xticklabels(), visible=False) + + # lai_field = data_field.filter(like='LAI_insitu') + # height_field = data_field.filter(like='height') + + + # ax1.plot(date,lai_field,color='green',linewidth=2,label='LAI') + # ax2 = ax1.twinx() + # plt.tick_params(labelsize=17) + # ax2.plot(date,height_field,color='black', linewidth=2, label='Height') + # ax1.set_ylabel('LAI', fontsize=16) + # ax2.set_ylabel('Height\n[m]', fontsize=16) + + + # # add std for LAI and height for field 508 (data from field measurements) + # lai_old = copy.deepcopy(lai_field) + # height_old = copy.deepcopy(height_field) + # if field == '508_high': + # lai_field[lai_field.index>'2017-03-28'] = 0.2218 + # lai_field[lai_field.index>'2017-04-05'] = 0.1367 + # lai_field[lai_field.index>'2017-04-10'] = 0.4054 + # lai_field[lai_field.index>'2017-04-21'] = 0.3247 + # lai_field[lai_field.index>'2017-05-02'] = 0.5546 + # lai_field[lai_field.index>'2017-05-10'] = 0.5852 + # lai_field[lai_field.index>'2017-05-16'] = 0.3058 + # lai_field[lai_field.index>'2017-05-26'] = 0.5373 + # lai_field[lai_field.index>'2017-05-29'] = 0.332 + # lai_field[lai_field.index>'2017-06-02'] = 0.2856 + # lai_field[lai_field.index>'2017-06-13'] = 0.4717 + # lai_field[lai_field.index>'2017-06-26'] = 0.2982 + # lai_field[lai_field.index>'2017-07-06'] = 0.253 + + # height_field[height_field.index>'2017-03-28'] = 0.005774 + # height_field[height_field.index>'2017-04-05'] = 0.015275 + # height_field[height_field.index>'2017-04-10'] = 0.026458 + # height_field[height_field.index>'2017-04-21'] = 0.049329 + # height_field[height_field.index>'2017-05-02'] = 0.01 + # height_field[height_field.index>'2017-05-10'] = 0.01 + # height_field[height_field.index>'2017-05-26'] = 0.028868 + # height_field[height_field.index>'2017-05-29'] = 0.028868 + # height_field[height_field.index>'2017-06-02'] = 0.028868 + # height_field[height_field.index>'2017-06-13'] = 0.020817 + # height_field[height_field.index>'2017-06-26'] = 0.025166 + # height_field[height_field.index>'2017-07-06'] = 0.015275 + + # ax1.fill_between(lai_field.index,lai_old.values.flatten()-lai_field.values.flatten(), lai_old.values.flatten()+lai_field.values.flatten(), color='green', alpha=0.2, label='Standard Deviation') + # ax2.fill_between(height_field.index,height_old.values.flatten()-height_field.values.flatten(), height_old.values.flatten()+height_field.values.flatten(), color='black', alpha=0.2, label='Standard Deviation') + + # ax2.legend(bbox_to_anchor=(.965, 0.45), prop={'size': 14}) + # ax1.legend(loc=2, prop={'size': 14}) + + # # ax1.set_xticks([]) + # ax1.set_ylim(0,6.7) + # ax2.set_ylim(0,1) + # start, end = ax1.get_ylim() + # ax1.yaxis.set_ticks(np.arange(start, end, 2)) + + # # soil moisture and rainfall + # ax3 = plt.subplot(gs[3], sharex = ax) + # plt.tick_params(labelsize=17) + # # remove vertical gap between subplots + # plt.subplots_adjust(hspace=.0) + # plt.grid(linestyle='dotted') + # ax3.plot(date,data_field.filter(like='SM_insitu'),color='blue', linewidth=2, label='Soil Moisture') + # ax3.set_ylabel('Soil Moisture\n$[cm^3/cm^3]$', fontsize=16) + # ax5 = ax3.twinx() + # date_agro = pd.to_datetime(df_agro['date'], format='%d.%m.%Y') + # agro_sum = df_agro['SUM_NN050'][87:192] + # ax5.bar(agro_sum.index, agro_sum, width=0.8, label='Precipitation') + # ax3.legend(loc=2, prop={'size': 14}) + # ax5.legend(loc=1, prop={'size': 14}) + # ax5.set_ylabel('Precipita-\ntion [mm]', fontsize=16) + # ax5.set_ylim(0,39) + # ax3.set_ylim(0.17,0.38) + # plt.setp(ax3.get_xticklabels(), visible=False) + # plt.tick_params(labelsize=17) + + # ax4 = plt.subplot(gs[4], sharex = ax) + # plt.tick_params(labelsize=17) + # # remove vertical gap between subplots + # plt.subplots_adjust(hspace=.0) + # plt.grid(linestyle='dotted') + # bbch = pd.read_csv('/media/tweiss/Work/z_final_mni_data_2017/bbch_2017.csv',header=[0,1]) + # bbch = bbch.set_index(pd.to_datetime(bbch['None']['None'], format='%Y-%m-%d')) + # bbch.index = pd.to_datetime(bbch.index) + + # lai_field['bbch'] = 0 + + # bbch_new = bbch.filter(like=kkkk[0:3]) + # for t, tt in enumerate(bbch.index): + # if t == 0: + # start_date = '2017-03-29' + # else: + # start_date = bbch.index[t] + # try: + # end_date = bbch.index[t+1] + # except IndexError: + # start_date = bbch.index[t] + # end_date = '2017-07-30' + # mask = (lai_field.index > start_date) & (lai_field.index <= end_date) + + # bbbb = lai_field['bbch'].where(~mask, other=2) + # if bbch.index[t] < datetime.datetime.strptime('2017-03-29', '%Y-%m-%d'): + # pass + # else: + # if bbch_new.values[t] < 30 and bbch_new.values[t] >= 20: + # # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=2) + # n2 = max(lai_field['bbch'][mask].index) + # elif bbch_new.values[t] < 40 and bbch_new.values[t] >= 30: + # # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=3) + # n3 = max(lai_field['bbch'][mask].index) + # elif bbch_new.values[t] < 50 and bbch_new.values[t] >= 40: + # # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=4) + # n4 = max(lai_field['bbch'][mask].index) + # elif bbch_new.values[t] < 60 and bbch_new.values[t] >= 50: + # # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=5) + # n5 = max(lai_field['bbch'][mask].index) + # elif bbch_new.values[t] < 70 and bbch_new.values[t] >= 60: + # # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=6) + # n6 = max(lai_field['bbch'][mask].index) + # elif bbch_new.values[t] < 80 and bbch_new.values[t] >= 70: + # # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=7) + # n7 = max(lai_field['bbch'][mask].index) + # elif bbch_new.values[t] < 90 and bbch_new.values[t] >= 80: + # # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=8) + # n8 = max(lai_field['bbch'][mask].index) + # elif bbch_new.values[t] < 100 and bbch_new.values[t] >= 90: + # # lai_field['bbch'] = lai_field['bbch'].where(~mask, other=9) + # n9 = max(lai_field['bbch'][mask].index) + # # bbch_ = lai_field['bbch'].value_counts().sort_index().values + # bbch_ = [n2-datetime.datetime.strptime('2017-03-22', '%Y-%m-%d'),n3-n2,n4-n3,n5-n4,n6-n5,n7-n6,n8-n7,n9-n8] + + # #Plot BBCH + # hm = lai_field.filter(like='bbch') + # label = ['','BBCH',''] + # width = 0.3 + # legend_items = ['Tillering','Stem elongation','Booting','Heading','Flowering','Fruit development','Ripening', 'Senescence'] + + # a_508 = 0 + + # aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + + # for xxxx, kkkkk in enumerate(bbch_): + # a_508 = a_508 + bbch_[xxxx].total_seconds() /60/60/24 + # ax4.barh(label,[0,a_508,0],width, label=legend_items[xxxx], left=[0,aa_508,0]) + + # aa_508 = mdates.date2num(lai_field['bbch'].index[0]) + a_508 + + + # xmin, xmax = ax4.get_xlim() + + # ax4.barh(label,[0,200,0],width, left=[0,xmax-1,0], color='white') + # ax4.set_ylim(0,1.7) + # plt.legend(bbox_to_anchor=(.935, 0.4),ncol=8) + + # plt.text(0.98, 0.05, "(a)", transform=ax.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + # plt.text(0.98, 0.2, "(b)", transform=ax0.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + # plt.text(0.98, 0.2, "(c)", transform=ax2.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + # plt.text(0.98, 0.2, "(d)", transform=ax3.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + # plt.text(0.98, 0.2, "(e)", transform=ax4.transAxes, fontsize=20, horizontalalignment='center', verticalalignment='center') + + + # plt.savefig(plot_output_path+pol+'_all_'+kkk+kkkk, bbox_inches = 'tight') + + # plt.close() + diff --git a/kaska/watercloudmodel.py b/kaska/watercloudmodel.py index 8349efe..4d53b53 100644 --- a/kaska/watercloudmodel.py +++ b/kaska/watercloudmodel.py @@ -9,16 +9,20 @@ \sigma_{pp}^{0} = A\cdot V_{1}\left[1 - \exp\left(-\frac{-2B\cdot V_{2}}{\cos\theta}\right)\right] + \exp\left(-\frac{-2B\cdot V_{2}}{\cos\theta}\right)\cdot\left(C + D\cdot M_{v}\right). $$ -`A*V_1` is basically the backscattering coefficient, whereas +`A*V_1` is basically the backscattering coefficient, whereas `B*V_2` is the extinction coefficient. `C` relates `VSM` (volumetric -soil moisture in [%]) to backscatter. In general, all the "constants" +soil moisture in [%]) to backscatter. In general, all the "constants" (`A`, `B`, `C`, `D`) are polarisation dependent. `V1` and `V2` have to do with the scatterers within the turbid medium, and are usually related to LAI. """ import numpy as np import scipy.stats as SS_vh +import pdb +from sense.canopy import OneLayer +from sense.soil import Soil +from sense import model def wcm_jac_(A, V1, B, V2, R, alpha, C, theta=23): """WCM model and jacobian calculations. The main @@ -55,16 +59,173 @@ def wcm_jac_(A, V1, B, V2, R, alpha, C, theta=23): [der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] ) -def fwd_model_(x, svh, svv, theta): - """Running the model forward to predict backscatter""" - n_obs = len(svv) - A_vv, B_vv, C_vv, A_vh, B_vh, C_vh = x[:6] - alpha = x[6 : (6 + n_obs)] - R = x[(6 + n_obs):(6 + 2*n_obs)] - lai = x[(6 + 2*n_obs) :] - sigma_vv, dvv = wcm_jac_(A_vv, lai, B_vv, lai, C_vv, R, alpha, theta=theta) - sigma_vh, dvh = wcm_jac_(A_vh, lai, B_vh, lai, C_vh, R, alpha, theta=theta) - return sigma_vv, sigma_vh + +def fresnel(e): + return np.abs( (1.-np.sqrt(e))/(1.+np.sqrt(e)) )**2. + +def refelctivity(eps, theta): + """ + table 2.5 Ulaby (2014) + assumes specular surface + Parameters + ---------- + eps : complex + relative dielectric permitivity + theta : float, ndarray + incidence angle [rad] + can be specified + """ + co = np.cos(theta) + si2 = np.sin(theta)**2. + rho_v = (eps*co-np.sqrt(eps-si2))/(eps*co+np.sqrt(eps-si2)) + rho_h = (co-np.sqrt(eps-si2))/(co+np.sqrt(eps-si2)) + + v = np.abs(rho_v)**2. + h = np.abs(rho_h)**2. + + return v, h + +def ssrt_jac_oh92_(eps, coef, LAI, H, theta): + theta = np.deg2rad(theta) + mu = np.cos(theta) + omega = 0.027 + freq = 5.405 + k = 2.*np.pi / f2lam(freq) + s = 0.0115 + + ks = k * s + + c = 1./(3.*fresnel(eps)) + p = (1. - (2.*theta/np.pi)**c * np.exp(-ks))**2. + + v, h = refelctivity(eps,theta) + a = 0.7*(1.-np.exp(-0.65*ks**1.8)) * np.cos(theta)**3. + b = (v+h) / np.sqrt(p) + + sigma_soil = a*b + + tau = np.exp(-coef * np.sqrt(LAI) * H / mu)**2 + + soil = tau * sigma_soil + veg = omega * mu / 2 * (1 - tau) + + co = np.cos(theta) + si = np.sin(theta) + si2 = np.sin(theta)**2. + hoch = (np.sqrt(eps)+1)**2./(3*(1-np.sqrt(eps))**2.) + d = np.exp(-ks) + f = 2.*theta/np.pi + + k = sigma_soil + m = np.sqrt(LAI) * H / mu + l = omega * mu / 2 + + + part_one = (2*a*(co-1/2*np.sqrt(eps-si2))*(eps*co-np.sqrt(eps-si2))) / (p * (eps*co+np.sqrt(eps-si2))**2) + part_two = (2*a*(co+1/2*np.sqrt(eps-si2))*(eps*co-np.sqrt(eps-si2))**2) / (p * (eps*co+np.sqrt(eps-si2))**3) + part_three = (2*a*d*( (np.sqrt(eps)+1)**2)/(3*(1-np.sqrt(eps))**3*np.sqrt(eps)) + (np.sqrt(eps)+1)/(3*(1-np.sqrt(eps))**2*np.sqrt(eps)) * f**c * np.log(f) * (eps*co-np.sqrt(eps-si2))**2) / ((1-d*f**c)**3*(np.sqrt(eps-si2)+eps*co)**2) + p1 = part_one-part_two+part_three + + part_four = (-a*co-np.sqrt(eps-si2)) / (2*(1-d*f**c)**2 * np.sqrt(eps-si2) * (np.sqrt(eps-si2)+co)**2) + part_five = (2*d*((np.sqrt(eps)+1)**2 / (3*(1-np.sqrt(eps))**3*np.sqrt(eps)) + (np.sqrt(eps)+1)/(3*(1-np.sqrt(eps))**2*np.sqrt(eps))) *f**c * np.log(f) * (a*co-np.sqrt(eps-si2))) / ((1-d*f**c)**3 * (np.sqrt(eps-si2)+co)) + part_six = 1/ (2*(1-d*f**c)**2 * np.sqrt(eps-si2) * (np.sqrt(eps*si2)+co)) + p2 = part_four + part_five - part_six + + der_mv = p1 + p2 + # pdb.set_trace() + # part_one = (a*( ((co-1/2*np.sqrt(eps-si2))/(eps*co+np.sqrt(eps-si2))) - (1/(2*np.sqrt(eps-si2)*(co+np.sqrt(eps-si2)))) - ((1/2*np.sqrt(eps-si2)+co)*(eps*co-np.sqrt(eps-si2))/(eps*co+np.sqrt(eps-si2))**2) )) / p + # part_two = (2*a*d*((((np.sqrt(eps)+1)**2)/(3*(1-np.sqrt(eps))**3 * np.sqrt(eps))) + (np.sqrt(eps)+1)/(3*(1-np.sqrt(eps))**2 * np.sqrt(eps))) * f**c * np.log(f) * (v+h)) / (1-d*f**c)**3 + + # der_mv = part_one + part_two + der_coef = -2*l*(k-m)*np.exp(-2*l-coef) + + return ( + veg + soil, + [der_mv, der_coef] + ) + + + +def f2lam(f): + """ + given the frequency in GHz, + return the wavelength [m] + """ + c0=299792458. # speed of light [m/s] + + return c0/(f*1.E9) + + +def ssrt_jac_(mv, coef, LAI, H, theta): + """""" + + omega = 0.027 + freq = 5.405 + k = 2.*np.pi / f2lam(freq) + s = 0.0115 + + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + + ks = k * s + + mu = np.cos(np.deg2rad(theta)) + sin = np.sin(1.5*np.deg2rad(theta)) + a = 0.11 * mv**0.7 * mu**2.2 + b = 1 - np.exp(-0.32 * ks**1.8) + q = 0.095 * (0.13 + sin)**1.4 * (1-np.exp(-1.3 * ks**0.9)) + sigma_soil = a * b / q + + tau = np.exp(-coef * LAI * H / mu)**2 + + soil2 = tau * sigma_soil + veg = omega * mu / 2 * (1 - tau) + # pdb.set_trace() + # Sense + models = {'surface': 'Oh04', 'canopy': 'turbid_isotropic'} + can = 'turbid_isotropic' + ke = coef * LAI + theta = np.deg2rad(theta) + # soil + soil = Soil(mv=mv, s=s, f=freq, clay=clay, sand=sand, bulk=bulk) + + # canopy + can = OneLayer(canopy=can, ke_h=ke, ke_v=ke, d=H, ks_h = omega * ke, ks_v = omega*ke) + + S = model.RTModel(surface=soil, canopy=can, models=models, theta=theta, freq=freq) + S.sigma0() + S.__dict__['stot']['vv'[::-1]], S.__dict__['stot']['vh'[::-1]] + + s0g = S.__dict__['s0g']['vv'] + s0c = S.__dict__['s0c']['vv'] + s0cgt = S.__dict__['s0cgt']['vv'] + s0gcg = S.__dict__['s0gcg']['vv'] + stot = S.__dict__['stot']['vv'] + + + der_coef = (-2 * np.sqrt(LAI) * H / mu) * tau + omega * mu / 2 * -1 * (-2 * np.sqrt(LAI) * H / mu) * tau + der_mv = tau * 0.077 * mv**(-0.3) * mu**2.2 * b / q + der_lai = (-2 * coef * H / mu / 2 / np.sqrt(LAI)) * tau + omega * mu / 2 * -1 * (-2 * coef * H / mu / 2 / np.sqrt(LAI)) * tau + der_height = (-2 * coef * H / mu / 2 / np.sqrt(LAI)) * tau + omega * mu / 2 * -1 * (-2 * coef * H / mu / 2 / np.sqrt(LAI)) * tau + + # pdb.set_trace() + # Also returns der_dV1 and der_dV2 + return ( + stot , + [der_mv, der_coef, der_lai, der_height] + ) + +# def fwd_model_(x, svh, svv, theta): +# """Running the model forward to predict backscatter""" +# n_obs = len(svv) +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh = x[:6] +# alpha = x[6 : (6 + n_obs)] +# R = x[(6 + n_obs):(6 + 2*n_obs)] +# lai = x[(6 + 2*n_obs) :] +# sigma_vv, dvv = wcm_jac_(A_vv, lai, B_vv, lai, C_vv, R, alpha, theta=theta) +# sigma_vh, dvh = wcm_jac_(A_vh, lai, B_vh, lai, C_vh, R, alpha, theta=theta) +# return sigma_vv, sigma_vh def cost_obs_(x, svh, svv, theta, unc=0.5): """Cost function. Order of parameters is @@ -106,9 +267,73 @@ def cost_obs_(x, svh, svv, theta, unc=0.5): (dvv[5] + dvv[6]) * diff_vv + (dvh[5] + dvh[6]) * diff_vh, # LAI ] ) + + return np.nansum(cost), -jac / (unc ** 2) + +def cost_obs_ssrt(x, svh, svv, theta, data, unc=0.3): + """ + """ + n_obs = svh.shape[0] + mv = x[:n_obs] + coef = x[n_obs:2*n_obs] + lai = data[1:(2*n_obs)] + h = data[0] + + sigma_vv, dvv = ssrt_jac_(mv, coef, lai, h, theta=theta) + + diff_vv = 10*np.log10(svv) - 10*np.log10(sigma_vv) + # diff_vv = svv - sigma_vv + # diff_vv = 10 ** (svv/10) - sigma_vv + # pdb.set_trace() + cost = 0.5 * (diff_vv ** 2) / (unc ** 2) + + jac = np.concatenate( + ##[der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] + np.array( + [ + (dvv[0] * diff_vv), # mv + (dvv[1] * diff_vv), # coef + # (dvv[2] * diff_vv), # lai + # (dvv[3] * diff_vv), # height + ] + ) + + ) + # pdb.set_trace() + return np.nansum(cost), -jac / (unc ** 2) + +def cost_obs_ssrt_oh92_(x, svh, svv, theta, data, unc=0.3): + """ + """ + n_obs = svh.shape[0] + mv = x[:n_obs] + coef = x[n_obs:2*n_obs] + lai = data[n_obs:(2*n_obs)] + h = data[:n_obs] + + sigma_vv, dvv = ssrt_jac_oh92_(mv, coef, lai, h, theta=theta) + + diff_vv = svv - 10*np.log10(sigma_vv) + + cost = 0.5 * (diff_vv ** 2) / (unc ** 2) + + jac = np.concatenate( + ##[der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] + np.array( + [ + (dvv[0] * diff_vv), # mv + (dvv[1] * diff_vv), # coef + # (dvv[2] * diff_vv), # lai + # (dvv[3] * diff_vv), # height + ] + ) + + ) + # pdb.set_trace() return np.nansum(cost), -jac / (unc ** 2) + def cost_prior_(x, svh, svv, theta, prior_mean, prior_unc): """A Gaussian cost function prior. We assume no correlations between parameters, only mean and standard deviation. @@ -129,6 +354,29 @@ def cost_prior_(x, svh, svv, theta, prior_mean, prior_unc): cost1 = prior_cost[(6+2*n_obs):].sum() # LAI cost return cost0 + cost1, dprior_cost +def cost_prior_ssrt(x, svh, svv, theta, prior_mean, prior_unc): + """A Gaussian cost function prior. We assume no correlations + between parameters, only mean and standard deviation. + Cost function. Order of parameters is + A_vv, B_vv, C_vv, A_vh, B_vh, C_vh, + alpha_0, ..., alpha_N, + ruff_0, ..., ruff_N, + LAI_0, ..., LAI_N + We assume that len(svh) == N + """ + # pdb.set_trace() + n_obs = len(svh) + prior_cost = 0.5 * (prior_mean - x) ** 2 / prior_unc ** 2 + dprior_cost = -(prior_mean - x) / prior_unc ** 2 + # coef->No prior! + # dprior_cost[(n_obs):(2*n_obs)] = 0. + # dprior_cost[:n_obs] = 0. + cost0 = prior_cost[:(n_obs)].sum() # mv cost + cost1 = prior_cost[n_obs:2*n_obs].sum() # coef cost + # cost0=0 + + return cost0 + cost1, dprior_cost + def cost_smooth_(x, gamma): """A smoother for one parameter (e.g. LAI or whatever). @@ -164,4 +412,32 @@ def cost_function(x, svh, svv, theta, gamma, prior_mean, prior_unc, unc=0.8): R = x[(6 + n_obs):(6 + 2*n_obs)] cost4, dcost4 = cost_smooth_(R, gamma[0]) tmp[(7 + n_obs) : (5 + 2*n_obs)] = dcost4 + # pdb.set_trace() return cost1 + cost2 + cost3 + cost4, dcost1 + dcost2 + tmp + +def cost_function2(x, svh, svv, theta, gamma, prior_mean, prior_unc, data, unc=0.3): + """A combined cost function that calls the prior, fit to the observations + """ + # Fit to the observations + cost1, dcost1 = cost_obs_ssrt(x, svh, svv, theta, data, unc=unc) + # cost1, dcost1 = cost_obs_ssrt_oh92_(x, svh, svv, theta, data, unc=unc) + # pdb.set_trace() + # Fit to the prior + cost2, dcost2 = cost_prior_ssrt(x, svh, svv, theta, prior_mean, prior_unc) + # pdb.set_trace() + # Smooth evolution of LAI + # n_obs = len(svv) + # lai = x[2*n_obs:3*n_obs] + # cost3, dcost3 = cost_smooth_(lai, gamma[1]) + # tmp = np.zeros_like(dcost1) + # tmp[2*n_obs+1:-1] = dcost3 + tmp=0 + cost3=0 + + # # Smooth evolution of ruffness + # R = x[(6 + n_obs):(6 + 2*n_obs)] + # cost4, dcost4 = cost_smooth_(R, gamma[0]) + # tmp[(7 + n_obs) : (5 + 2*n_obs)] = dcost4 + # return cost1 + cost2 + cost3 + cost4, dcost1 + dcost2 + tmp + # pdb.set_trace() + return cost1 + cost2 + cost3, dcost1 + dcost2 + tmp diff --git a/kaska/watercloudmodel_vwc.py b/kaska/watercloudmodel_vwc.py new file mode 100644 index 0000000..0722099 --- /dev/null +++ b/kaska/watercloudmodel_vwc.py @@ -0,0 +1,660 @@ +#!/usr/bin/env python +"""Some useful functions for the Water Cloud Model (WCM) +used to retrieve parameters from Sentinel 1 data. The model +is first presented in Attema & Ulaby (1978) + +The WCM predicts backscatter in a polarisation `pp` as a function +of some parameters: +$$ +\sigma_{pp}^{0} = A\cdot V_{1}\left[1 - \exp\left(-\frac{-2B\cdot V_{2}}{\cos\theta}\right)\right] + \exp\left(-\frac{-2B\cdot V_{2}}{\cos\theta}\right)\cdot\left(C + D\cdot M_{v}\right). +$$ + +`A*V_1` is basically the backscattering coefficient, whereas +`B*V_2` is the extinction coefficient. `C` relates `VSM` (volumetric +soil moisture in [%]) to backscatter. In general, all the "constants" +(`A`, `B`, `C`, `D`) are polarisation dependent. `V1` and `V2` have to do with +the scatterers within the turbid medium, and are usually related to LAI. +""" + +import numpy as np +import scipy.stats as SS_vh +import pdb + +from sense.canopy import OneLayer +from sense.soil import Soil +from sense import model +from sense.dielectric import Dobson85 +from sense.core import Reflectivity +from sense.util import f2lam + +# def wcm_jac_(A, V1, B, V2, R, alpha, C, theta=23): +# """WCM model and jacobian calculations. The main +# assumption here is that we only consider first +# order effects. The vegetation backscatter contribution +# is given by `A*V1`, which is often related to scatterer +# (e.g. leaves, stems, ...) properties. The attenuation +# due to the canopy is controlled by `B*V2`, which is +# often related to canopy moisture content (this is polarisation +# and frequency dependent). The soil backscatter is modelled as +# an additive model (in dB units, multiplicative in linear), with +# a roughness term and a moisture-controlled term. The soil moisture +# term can be interpreted in weird and wonderful manners once retrieved +# (eg inverting the dielectric constant) +# This function returns the gradient for all parameters (A, B, +# V1, V2 and C).""" +# mu = np.cos(np.deg2rad(theta)) +# tau = np.exp(-2 * B * V2 / mu) +# veg = A * V1 * mu * (1 - tau) +# sigma_soil = R+alpha +# soil = tau * sigma_soil + C + +# der_dA = V1 * mu - V1 * mu * tau +# der_dV1 = A * mu - A * mu * tau +# der_dB = (-2 * V2 / mu) * tau * (-A * V1 * mu + sigma_soil) +# der_dV2 = (-2 * B / mu) * tau * (-A * V1 * mu + sigma_soil) +# der_dC = 1 +# der_dR = tau +# der_dalpha = tau + +# # Also returns der_dV1 and der_dV2 +# return ( +# veg + soil, +# [der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] +# ) + + +# def fresnel(e): +# return np.abs( (1.-np.sqrt(e))/(1.+np.sqrt(e)) )**2. + +# def refelctivity(eps, theta): +# """ +# table 2.5 Ulaby (2014) +# assumes specular surface +# Parameters +# ---------- +# eps : complex +# relative dielectric permitivity +# theta : float, ndarray +# incidence angle [rad] +# can be specified +# """ +# co = np.cos(theta) +# si2 = np.sin(theta)**2. +# rho_v = (eps*co-np.sqrt(eps-si2))/(eps*co+np.sqrt(eps-si2)) +# rho_h = (co-np.sqrt(eps-si2))/(co+np.sqrt(eps-si2)) + +# v = np.abs(rho_v)**2. +# h = np.abs(rho_h)**2. + +# return v, h + +# def ssrt_jac_oh92_(eps, coef, LAI, H, theta): +# theta = np.deg2rad(theta) +# mu = np.cos(theta) +# omega = 0.027 +# freq = 5.405 +# k = 2.*np.pi / f2lam(freq) +# s = 0.0115 + +# ks = k * s + +# c = 1./(3.*fresnel(eps)) +# p = (1. - (2.*theta/np.pi)**c * np.exp(-ks))**2. + +# v, h = refelctivity(eps,theta) +# a = 0.7*(1.-np.exp(-0.65*ks**1.8)) * np.cos(theta)**3. +# b = (v+h) / np.sqrt(p) + +# sigma_soil = a*b + +# tau = np.exp(-coef * np.sqrt(LAI) * H / mu)**2 + +# soil = tau * sigma_soil +# veg = omega * mu / 2 * (1 - tau) + +# co = np.cos(theta) +# si = np.sin(theta) +# si2 = np.sin(theta)**2. +# hoch = (np.sqrt(eps)+1)**2./(3*(1-np.sqrt(eps))**2.) +# d = np.exp(-ks) +# f = 2.*theta/np.pi + +# k = sigma_soil +# m = np.sqrt(LAI) * H / mu +# l = omega * mu / 2 + + +# part_one = (2*a*(co-1/2*np.sqrt(eps-si2))*(eps*co-np.sqrt(eps-si2))) / (p * (eps*co+np.sqrt(eps-si2))**2) +# part_two = (2*a*(co+1/2*np.sqrt(eps-si2))*(eps*co-np.sqrt(eps-si2))**2) / (p * (eps*co+np.sqrt(eps-si2))**3) +# part_three = (2*a*d*( (np.sqrt(eps)+1)**2)/(3*(1-np.sqrt(eps))**3*np.sqrt(eps)) + (np.sqrt(eps)+1)/(3*(1-np.sqrt(eps))**2*np.sqrt(eps)) * f**c * np.log(f) * (eps*co-np.sqrt(eps-si2))**2) / ((1-d*f**c)**3*(np.sqrt(eps-si2)+eps*co)**2) +# p1 = part_one-part_two+part_three + +# part_four = (-a*co-np.sqrt(eps-si2)) / (2*(1-d*f**c)**2 * np.sqrt(eps-si2) * (np.sqrt(eps-si2)+co)**2) +# part_five = (2*d*((np.sqrt(eps)+1)**2 / (3*(1-np.sqrt(eps))**3*np.sqrt(eps)) + (np.sqrt(eps)+1)/(3*(1-np.sqrt(eps))**2*np.sqrt(eps))) *f**c * np.log(f) * (a*co-np.sqrt(eps-si2))) / ((1-d*f**c)**3 * (np.sqrt(eps-si2)+co)) +# part_six = 1/ (2*(1-d*f**c)**2 * np.sqrt(eps-si2) * (np.sqrt(eps*si2)+co)) +# p2 = part_four + part_five - part_six + +# der_mv = p1 + p2 +# # pdb.set_trace() +# # part_one = (a*( ((co-1/2*np.sqrt(eps-si2))/(eps*co+np.sqrt(eps-si2))) - (1/(2*np.sqrt(eps-si2)*(co+np.sqrt(eps-si2)))) - ((1/2*np.sqrt(eps-si2)+co)*(eps*co-np.sqrt(eps-si2))/(eps*co+np.sqrt(eps-si2))**2) )) / p +# # part_two = (2*a*d*((((np.sqrt(eps)+1)**2)/(3*(1-np.sqrt(eps))**3 * np.sqrt(eps))) + (np.sqrt(eps)+1)/(3*(1-np.sqrt(eps))**2 * np.sqrt(eps))) * f**c * np.log(f) * (v+h)) / (1-d*f**c)**3 + +# # der_mv = part_one + part_two +# der_coef = -2*l*(k-m)*np.exp(-2*l-coef) + +# return ( +# veg + soil, +# [der_mv, der_coef] +# ) + + +def ssrt_jac_vwc(mv, vwc, s, omega, b, theta): + """""" + + # omega = 0.027 + freq = 5.405 + k = 2.*np.pi / f2lam(freq) + # s = 0.0115 + + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + + ks = k * s + + mu = np.cos(np.deg2rad(theta)) + sin = np.sin(1.5*np.deg2rad(theta)) + a = 0.11 * mv**0.7 * mu**2.2 + bb = 1 - np.exp(-0.32 * ks**1.8) + q = 0.095 * (0.13 + sin)**1.4 * (1-np.exp(-1.3 * ks**0.9)) + sigma_soil = a * bb / q + + tau = np.exp(-b * vwc / mu)**2 + + # soil2 = tau * sigma_soil + # veg = omega * mu / 2 * (1 - tau) + # pdb.set_trace() + # Sense + models = {'surface': 'Oh04', 'canopy': 'turbid_isotropic'} + can = 'turbid_isotropic' + ke = b * vwc + if np.nanmean(theta) > 5.: + theta = np.deg2rad(theta) + + # soil + soil = Soil(mv=mv, s=s, f=freq, clay=clay, sand=sand, bulk=bulk) + + # canopy + can = OneLayer(canopy=can, ke_h=ke, ke_v=ke, d=1., ks_h = omega * ke, ks_v = omega*ke) + + S = model.RTModel(surface=soil, canopy=can, models=models, theta=theta, freq=freq) + S.sigma0() + S.__dict__['stot']['vv'[::-1]], S.__dict__['stot']['vh'[::-1]] + + s0g = S.__dict__['s0g']['vv'] + s0c = S.__dict__['s0c']['vv'] + s0cgt = S.__dict__['s0cgt']['vv'] + s0gcg = S.__dict__['s0gcg']['vv'] + stot = S.__dict__['stot']['vv'] + + eps = Dobson85(clay=clay, sand=sand, bulk=bulk, mv=mv, freq=freq).eps + v = Reflectivity(eps,theta).v + + sec = 1/np.cos(theta) + + der_omega1 = (1/2 * np.cos(theta) * (1-tau)) + der_omega2 = (4 * b * vwc * tau * v) + der_omega3 = 1/2 * np.cos(theta) * v * (np.sqrt(tau)-tau) + der_b1 = -2 * s0g * vwc * sec * tau + der_vwc1 = -2 * s0g * b * sec * tau + der_b2 = omega * vwc * tau + der_vwc2 = omega * b * tau + der_b3 = -4 * omega * vwc * v * tau * (2 * vwc * b * sec - 1) + der_vwc3 = -4 * omega * b * v * tau * (2 * vwc * b * sec - 1) + der_b4 = - omega * vwc * v * tau**2 * (tau -2 ) + der_vwc4 = - omega * b * v * tau**2 * (tau -2 ) + + der_omega = der_omega1+der_omega2+der_omega3 + der_b = der_b1+der_b2+der_b3+der_b4 + der_vwc = der_vwc1+der_vwc2+der_vwc3+der_vwc4 + + der_s = S.G.rt_s.der_s_vv + der_mv = S.G.rt_s.der_mv_vv + + # return ( + # stot , + # [der_s, der_omega, der_mv, der_vwc, der_b] + # ) + + return ( + stot , + [der_s, der_omega, der_mv, der_vwc] + ) + +def ssrt_vwc(mv, vwc, s, omega, b, theta): + """""" + + # omega = 0.027 + freq = 5.405 + k = 2.*np.pi / f2lam(freq) + # s = 0.0115 + + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + + ks = k * s + + mu = np.cos(np.deg2rad(theta)) + sin = np.sin(1.5*np.deg2rad(theta)) + a = 0.11 * mv**0.7 * mu**2.2 + bb = 1 - np.exp(-0.32 * ks**1.8) + q = 0.095 * (0.13 + sin)**1.4 * (1-np.exp(-1.3 * ks**0.9)) + sigma_soil = a * bb / q + + tau = np.exp(-b * vwc / mu)**2 + + # soil2 = tau * sigma_soil + # veg = omega * mu / 2 * (1 - tau) + # pdb.set_trace() + # Sense + models = {'surface': 'Oh04', 'canopy': 'turbid_isotropic'} + can = 'turbid_isotropic' + ke = b * vwc + if np.nanmean(theta) > 5.: + theta = np.deg2rad(theta) + + # soil + soil = Soil(mv=mv, s=s, f=freq, clay=clay, sand=sand, bulk=bulk) + + # canopy + can = OneLayer(canopy=can, ke_h=ke, ke_v=ke, d=1., ks_h = omega * ke, ks_v = omega*ke) + + S = model.RTModel(surface=soil, canopy=can, models=models, theta=theta, freq=freq) + S.sigma0() + S.__dict__['stot']['vv'[::-1]], S.__dict__['stot']['vh'[::-1]] + + s0g = S.__dict__['s0g']['vv'] + s0c = S.__dict__['s0c']['vv'] + s0cgt = S.__dict__['s0cgt']['vv'] + s0gcg = S.__dict__['s0gcg']['vv'] + stot = S.__dict__['stot']['vv'] + + eps = Dobson85(clay=clay, sand=sand, bulk=bulk, mv=mv, freq=freq).eps + v = Reflectivity(eps,theta).v + + sec = 1/np.cos(theta) + + der_omega1 = (1/2 * np.cos(theta) * (1-tau)) + der_omega2 = (4 * b * vwc * tau * v) + der_omega3 = 1/2 * np.cos(theta) * v * (np.sqrt(tau)-tau) + der_b1 = -2 * s0g * vwc * sec * tau + der_vwc1 = -2 * s0g * b * sec * tau + der_b2 = omega * vwc * tau + der_vwc2 = omega * b * tau + der_b3 = -4 * omega * vwc * v * tau * (2 * vwc * b * sec - 1) + der_vwc3 = -4 * omega * b * v * tau * (2 * vwc * b * sec - 1) + der_b4 = - omega * vwc * v * tau**2 * (tau -2 ) + der_vwc4 = - omega * b * v * tau**2 * (tau -2 ) + + der_omega = der_omega1+der_omega2+der_omega3 + der_b = der_b1+der_b2+der_b3+der_b4 + der_vwc = der_vwc1+der_vwc2+der_vwc3+der_vwc4 + + der_s = S.G.rt_s.der_s_vv + der_mv = S.G.rt_s.der_mv_vv + + # return ( + # stot , + # [der_s, der_omega, der_mv, der_vwc, der_b] + # ) + + return ( + stot ,s0g, s0c + ) + + + +# def fwd_model_(x, svh, svv, theta): +# """Running the model forward to predict backscatter""" +# n_obs = len(svv) +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh = x[:6] +# alpha = x[6 : (6 + n_obs)] +# R = x[(6 + n_obs):(6 + 2*n_obs)] +# lai = x[(6 + 2*n_obs) :] +# sigma_vv, dvv = wcm_jac_(A_vv, lai, B_vv, lai, C_vv, R, alpha, theta=theta) +# sigma_vh, dvh = wcm_jac_(A_vh, lai, B_vh, lai, C_vh, R, alpha, theta=theta) +# return sigma_vv, sigma_vh + +# def cost_obs_(x, svh, svv, theta, unc=0.5): +# """Cost function. Order of parameters is +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh, +# vsm_0, ..., vsm_N, +# LAI_0, ..., LAI_N +# We assume that len(svh) == N +# Uncertainty is the uncertainty in backscatter, and +# assume that there are two polarisations (VV and VH), +# although these are just labels! +# """ +# n_obs = svh.shape[0] +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh = x[:6] +# alpha = x[6 : (6 + n_obs)] +# R = x[(6 + n_obs):(6 + 2*n_obs)] +# lai = x[(6 + 2*n_obs) :] +# sigma_vv, dvv = wcm_jac_(A_vv, lai, B_vv, lai, C_vv, R, alpha, theta=theta) +# sigma_vh, dvh = wcm_jac_(A_vh, lai, B_vh, lai, C_vh, R, alpha, theta=theta) +# diff_vv = svv - sigma_vv +# diff_vh = svh - sigma_vh +# #NOTE!!!!! Only fits the VV channel!!!! +# # Soil misture in VH is complicated +# diff_vh = 0. +# cost = 0.5 * (diff_vv ** 2 + diff_vh ** 2) / (unc ** 2) +# jac = np.concatenate( +# [##[der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] +# np.array( +# [ +# np.sum(dvv[0] * diff_vv), # A_vv +# np.sum(dvv[1] * diff_vv), # B_vv +# np.sum(dvv[2] * diff_vv), # C_vv +# np.sum(dvh[0] * diff_vh), # A_vh +# np.sum(dvh[1] * diff_vh), # B_vh +# np.sum(dvh[2] * diff_vh), +# ] +# ), # C_vh +# dvv[3] * diff_vv + dvh[3] * diff_vh, # R +# dvv[4] * diff_vv + dvh[4] * diff_vh, # alpha +# (dvv[5] + dvv[6]) * diff_vv + (dvh[5] + dvh[6]) * diff_vh, # LAI +# ] +# ) + +# return np.nansum(cost), -jac / (unc ** 2) + + +def cost_obs_vwc(x, svh, svv, theta, unc=0.5, data=0): + """Cost function. Order of parameters is + s, omega, + b_0, ..., b_N, + vwc_0, ..., vwc_N + mv_0, ..., mv_N + We assume that len(svh) == N + Uncertainty is the uncertainty in backscatter, and + assume that there are two polarisations (VV and VH), + although these are just labels! + """ + n_obs = svh.shape[0] + s, omega = x[:2] + mv = x[2 : (2 + n_obs)] + vwc = x[(2 + n_obs) : (2 + 2*n_obs)] + # b = x[(2 + 2*n_obs) : (2 + 3*n_obs)] + b = data + + sigma_vv, dvv = ssrt_jac_vwc(mv, vwc, s, omega, b, theta=theta) + # sigma_vh, dvh = ssrt_jac_vwc(A_vh, lai, B_vh, lai, C_vh, R, alpha, theta=theta) + diff_vv = svv - sigma_vv + ### in dB ??? + diff_vv = 10*np.log10(svv) - 10*np.log10(sigma_vv) + # diff_vh = svh - sigma_vh + #NOTE!!!!! Only fits the VV channel!!!! + # Soil misture in VH is complicated + diff_vh = 0. + cost = 0.5 * (diff_vv ** 2 + diff_vh ** 2) / (unc ** 2) + jac = np.concatenate( + [##[der_s, der_omega, der_mv, der_vwc, der_b] + np.array( + [ + np.sum(dvv[0] * diff_vv), # s + np.sum(dvv[1] * diff_vv), # omega + ]), + dvv[2] * diff_vv, # mv + dvv[3] * diff_vv, # vwc + # dvv[4] * diff_vv, # b + + ] + ) + + return np.nansum(cost), -jac / (unc ** 2) + + + +# def cost_obs_ssrt(x, svh, svv, theta, data, unc=0.3): +# """ +# """ +# n_obs = svh.shape[0] +# mv = x[:n_obs] +# coef = x[n_obs:2*n_obs] +# lai = data[1:(2*n_obs)] +# h = data[0] + +# sigma_vv, dvv = ssrt_jac_(mv, coef, lai, h, theta=theta) + +# diff_vv = 10*np.log10(svv) - 10*np.log10(sigma_vv) +# # diff_vv = svv - sigma_vv +# # diff_vv = 10 ** (svv/10) - sigma_vv +# # pdb.set_trace() +# cost = 0.5 * (diff_vv ** 2) / (unc ** 2) + +# jac = np.concatenate( +# ##[der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] +# np.array( +# [ +# (dvv[0] * diff_vv), # mv +# (dvv[1] * diff_vv), # coef +# # (dvv[2] * diff_vv), # lai +# # (dvv[3] * diff_vv), # height +# ] +# ) + +# ) +# # pdb.set_trace() +# return np.nansum(cost), -jac / (unc ** 2) + +# def cost_obs_ssrt_oh92_(x, svh, svv, theta, data, unc=0.3): +# """ +# """ +# n_obs = svh.shape[0] +# mv = x[:n_obs] +# coef = x[n_obs:2*n_obs] +# lai = data[n_obs:(2*n_obs)] +# h = data[:n_obs] + +# sigma_vv, dvv = ssrt_jac_oh92_(mv, coef, lai, h, theta=theta) + +# diff_vv = svv - 10*np.log10(sigma_vv) + +# cost = 0.5 * (diff_vv ** 2) / (unc ** 2) + +# jac = np.concatenate( +# ##[der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] +# np.array( +# [ +# (dvv[0] * diff_vv), # mv +# (dvv[1] * diff_vv), # coef +# # (dvv[2] * diff_vv), # lai +# # (dvv[3] * diff_vv), # height +# ] +# ) + +# ) +# # pdb.set_trace() +# return np.nansum(cost), -jac / (unc ** 2) + + + +# def cost_prior_(x, svh, svv, theta, prior_mean, prior_unc): +# """A Gaussian cost function prior. We assume no correlations +# between parameters, only mean and standard deviation. +# Cost function. Order of parameters is +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh, +# alpha_0, ..., alpha_N, +# ruff_0, ..., ruff_N, +# LAI_0, ..., LAI_N +# We assume that len(svh) == N +# """ +# n_obs = len(svh) +# prior_cost = 0.5 * (prior_mean - x) ** 2 / prior_unc ** 2 +# dprior_cost = -(prior_mean - x) / prior_unc ** 2 +# dprior_cost[:6] = 0.0 +# # Ruff->No prior! +# dprior_cost[(6 + n_obs):(6 + 2*n_obs)] = 0. +# cost0 = prior_cost[6:(6+n_obs)].sum() # alpha cost +# cost1 = prior_cost[(6+2*n_obs):].sum() # LAI cost +# return cost0 + cost1, dprior_cost + + +def cost_prior_vwc(x, svh, svv, theta, prior_mean, prior_unc): + """A Gaussian cost function prior. We assume no correlations + between parameters, only mean and standard deviation. + Cost function. Order of parameters is + s, omega, + mv_0, ..., b_N, + vwc_0, ..., vwc_N + b_0, ..., mv_N + We assume that len(svh) == N + """ + n_obs = len(svh) + + # mean_prior = np.nanmean(prior_mean[(2) : (2 + n_obs)]) + # mean_x = np.nanmean(x[(2) : (2 + n_obs)]) + + # ppp = prior_mean *1. + # pppp = prior_mean *1. + + # ppp[(2) : (2 + n_obs)] = prior_mean[(2) : (2 + n_obs)] - mean_prior + # pppp[(2) : (2 + n_obs)] = x[(2) : (2 + n_obs)] - mean_x + + prior_cost = 0.5 * (prior_mean - x) ** 2 / prior_unc ** 2 + # prior_cost = 0.5 * (ppp - pppp) ** 2 / prior_unc ** 2 + + + + dprior_cost = -(prior_mean - x) / prior_unc ** 2 + + # dprior_cost = -(prior_mean/mean_prior*mean_x - x) / prior_unc ** 2 + + + dprior_cost[:2] = 0.0 + # Ruff->No prior! + # dprior_cost[(2) : (2 + n_obs)] = 0. # mv + dprior_cost[(2 + n_obs) : (2 + 2*n_obs)] = 0. # vwc + # dprior_cost[(2 + 2*n_obs) : (2 + 3*n_obs)] = 0. # b + cost0 = prior_cost[(2) : (2 + n_obs)].sum() # mv cost + # pdb.set_trace() + # print(cost0) + return cost0 , dprior_cost + +# def cost_prior_ssrt(x, svh, svv, theta, prior_mean, prior_unc): +# """A Gaussian cost function prior. We assume no correlations +# between parameters, only mean and standard deviation. +# Cost function. Order of parameters is +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh, +# alpha_0, ..., alpha_N, +# ruff_0, ..., ruff_N, +# LAI_0, ..., LAI_N +# We assume that len(svh) == N +# """ +# # pdb.set_trace() +# n_obs = len(svh) +# prior_cost = 0.5 * (prior_mean - x) ** 2 / prior_unc ** 2 +# dprior_cost = -(prior_mean - x) / prior_unc ** 2 +# # coef->No prior! +# # dprior_cost[(n_obs):(2*n_obs)] = 0. +# # dprior_cost[:n_obs] = 0. +# cost0 = prior_cost[:(n_obs)].sum() # mv cost +# cost1 = prior_cost[n_obs:2*n_obs].sum() # coef cost +# # cost0=0 + +# return cost0 + cost1, dprior_cost + + +def cost_smooth_(x, gamma): + """A smoother for one parameter (e.g. LAI or whatever). + `gamma` controls the magnitude of the smoothing (higher + `gamma`, more smoothing) + """ + # Calculate differences + p_diff1 = x[1:-1] - x[2:] + p_diff2 = x[1:-1] - x[:-2] + # Cost function + xcost_model = 0.5 * gamma * np.sum(p_diff1 ** 2 + p_diff2 ** 2) + # Jacobian + xdcost_model = 1 * gamma * (p_diff1 + p_diff2) + # Note that we miss the first and last elements of the Jacobian + # They're zero! + return xcost_model, xdcost_model + + +# def cost_function(x, svh, svv, theta, gamma, prior_mean, prior_unc, unc=0.8): +# """A combined cost function that calls the prior, fit to the observations +# """ +# # Fit to the observations +# cost1, dcost1 = cost_obs_(x, svh, svv, theta, unc=unc) +# # Fit to the prior +# cost2, dcost2 = cost_prior_(x, svh, svv, theta, prior_mean, prior_unc) +# # Smooth evolution of LAI +# n_obs = len(svv) +# lai = x[(6 + 2*n_obs) :] +# cost3, dcost3 = cost_smooth_(lai, gamma[1]) +# tmp = np.zeros_like(dcost1) +# tmp[(7 + 2*n_obs) : -1] = dcost3 +# # Smooth evolution of ruffness +# R = x[(6 + n_obs):(6 + 2*n_obs)] +# cost4, dcost4 = cost_smooth_(R, gamma[0]) +# tmp[(7 + n_obs) : (5 + 2*n_obs)] = dcost4 +# # pdb.set_trace() +# return cost1 + cost2 + cost3 + cost4, dcost1 + dcost2 + tmp + +def cost_function_vwc(x, svh, svv, theta, gamma, prior_mean, prior_unc, unc=0.8, data=0): + """A combined cost function that calls the prior, fit to the observations + """ + # Fit to the observations + cost1, dcost1 = cost_obs_vwc(x, svh, svv, theta, unc=unc, data=data) + # Fit to the prior + # cost2, dcost2 = cost_prior_vwc(x, svh, svv, theta, prior_mean, prior_unc) + cost2 = 0 + dcost2=0 + cost3 = 0 + cost4 = 0 + tmp = 0 + # Smooth evolution of sm + # n_obs = len(svv) + # lai = x[2 : (2 + n_obs)] + # cost3, dcost3 = cost_smooth_(lai, gamma[1]) + # # pdb.set_trace() + # tmp = np.zeros_like(dcost1) + # tmp[3 : (2 + n_obs)-1] = dcost3 + # # Smooth evolution of ruffness + # R = x[(6 + n_obs):(6 + 2*n_obs)] + # cost4, dcost4 = cost_smooth_(R, gamma[0]) + # tmp[(7 + n_obs) : (5 + 2*n_obs)] = dcost4 + # pdb.set_trace() + return cost1 + cost2 + cost3 + cost4, dcost1 + dcost2 + tmp + + + + +# def cost_function2(x, svh, svv, theta, gamma, prior_mean, prior_unc, data, unc=0.3): +# """A combined cost function that calls the prior, fit to the observations +# """ +# # Fit to the observations +# cost1, dcost1 = cost_obs_ssrt(x, svh, svv, theta, data, unc=unc) +# # cost1, dcost1 = cost_obs_ssrt_oh92_(x, svh, svv, theta, data, unc=unc) +# # pdb.set_trace() +# # Fit to the prior +# cost2, dcost2 = cost_prior_ssrt(x, svh, svv, theta, prior_mean, prior_unc) +# # pdb.set_trace() +# # Smooth evolution of LAI +# # n_obs = len(svv) +# # lai = x[2*n_obs:3*n_obs] +# # cost3, dcost3 = cost_smooth_(lai, gamma[1]) +# # tmp = np.zeros_like(dcost1) +# # tmp[2*n_obs+1:-1] = dcost3 +# tmp=0 +# cost3=0 + +# # # Smooth evolution of ruffness +# # R = x[(6 + n_obs):(6 + 2*n_obs)] +# # cost4, dcost4 = cost_smooth_(R, gamma[0]) +# # tmp[(7 + n_obs) : (5 + 2*n_obs)] = dcost4 +# # return cost1 + cost2 + cost3 + cost4, dcost1 + dcost2 + tmp +# # pdb.set_trace() +# return cost1 + cost2 + cost3, dcost1 + dcost2 + tmp diff --git a/kaska/watercloudmodel_vwc_rms.py b/kaska/watercloudmodel_vwc_rms.py new file mode 100644 index 0000000..102c488 --- /dev/null +++ b/kaska/watercloudmodel_vwc_rms.py @@ -0,0 +1,679 @@ +#!/usr/bin/env python +"""Some useful functions for the Water Cloud Model (WCM) +used to retrieve parameters from Sentinel 1 data. The model +is first presented in Attema & Ulaby (1978) + +The WCM predicts backscatter in a polarisation `pp` as a function +of some parameters: +$$ +\sigma_{pp}^{0} = A\cdot V_{1}\left[1 - \exp\left(-\frac{-2B\cdot V_{2}}{\cos\theta}\right)\right] + \exp\left(-\frac{-2B\cdot V_{2}}{\cos\theta}\right)\cdot\left(C + D\cdot M_{v}\right). +$$ + +`A*V_1` is basically the backscattering coefficient, whereas +`B*V_2` is the extinction coefficient. `C` relates `VSM` (volumetric +soil moisture in [%]) to backscatter. In general, all the "constants" +(`A`, `B`, `C`, `D`) are polarisation dependent. `V1` and `V2` have to do with +the scatterers within the turbid medium, and are usually related to LAI. +""" + +import numpy as np +import scipy.stats as SS_vh +import pdb + +from sense.canopy import OneLayer +from sense.soil import Soil +from sense import model +from sense.dielectric import Dobson85 +from sense.core import Reflectivity +from sense.util import f2lam + +# def wcm_jac_(A, V1, B, V2, R, alpha, C, theta=23): +# """WCM model and jacobian calculations. The main +# assumption here is that we only consider first +# order effects. The vegetation backscatter contribution +# is given by `A*V1`, which is often related to scatterer +# (e.g. leaves, stems, ...) properties. The attenuation +# due to the canopy is controlled by `B*V2`, which is +# often related to canopy moisture content (this is polarisation +# and frequency dependent). The soil backscatter is modelled as +# an additive model (in dB units, multiplicative in linear), with +# a roughness term and a moisture-controlled term. The soil moisture +# term can be interpreted in weird and wonderful manners once retrieved +# (eg inverting the dielectric constant) +# This function returns the gradient for all parameters (A, B, +# V1, V2 and C).""" +# mu = np.cos(np.deg2rad(theta)) +# tau = np.exp(-2 * B * V2 / mu) +# veg = A * V1 * mu * (1 - tau) +# sigma_soil = R+alpha +# soil = tau * sigma_soil + C + +# der_dA = V1 * mu - V1 * mu * tau +# der_dV1 = A * mu - A * mu * tau +# der_dB = (-2 * V2 / mu) * tau * (-A * V1 * mu + sigma_soil) +# der_dV2 = (-2 * B / mu) * tau * (-A * V1 * mu + sigma_soil) +# der_dC = 1 +# der_dR = tau +# der_dalpha = tau + +# # Also returns der_dV1 and der_dV2 +# return ( +# veg + soil, +# [der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] +# ) + + +# def fresnel(e): +# return np.abs( (1.-np.sqrt(e))/(1.+np.sqrt(e)) )**2. + +# def refelctivity(eps, theta): +# """ +# table 2.5 Ulaby (2014) +# assumes specular surface +# Parameters +# ---------- +# eps : complex +# relative dielectric permitivity +# theta : float, ndarray +# incidence angle [rad] +# can be specified +# """ +# co = np.cos(theta) +# si2 = np.sin(theta)**2. +# rho_v = (eps*co-np.sqrt(eps-si2))/(eps*co+np.sqrt(eps-si2)) +# rho_h = (co-np.sqrt(eps-si2))/(co+np.sqrt(eps-si2)) + +# v = np.abs(rho_v)**2. +# h = np.abs(rho_h)**2. + +# return v, h + +# def ssrt_jac_oh92_(eps, coef, LAI, H, theta): +# theta = np.deg2rad(theta) +# mu = np.cos(theta) +# omega = 0.027 +# freq = 5.405 +# k = 2.*np.pi / f2lam(freq) +# s = 0.0115 + +# ks = k * s + +# c = 1./(3.*fresnel(eps)) +# p = (1. - (2.*theta/np.pi)**c * np.exp(-ks))**2. + +# v, h = refelctivity(eps,theta) +# a = 0.7*(1.-np.exp(-0.65*ks**1.8)) * np.cos(theta)**3. +# b = (v+h) / np.sqrt(p) + +# sigma_soil = a*b + +# tau = np.exp(-coef * np.sqrt(LAI) * H / mu)**2 + +# soil = tau * sigma_soil +# veg = omega * mu / 2 * (1 - tau) + +# co = np.cos(theta) +# si = np.sin(theta) +# si2 = np.sin(theta)**2. +# hoch = (np.sqrt(eps)+1)**2./(3*(1-np.sqrt(eps))**2.) +# d = np.exp(-ks) +# f = 2.*theta/np.pi + +# k = sigma_soil +# m = np.sqrt(LAI) * H / mu +# l = omega * mu / 2 + + +# part_one = (2*a*(co-1/2*np.sqrt(eps-si2))*(eps*co-np.sqrt(eps-si2))) / (p * (eps*co+np.sqrt(eps-si2))**2) +# part_two = (2*a*(co+1/2*np.sqrt(eps-si2))*(eps*co-np.sqrt(eps-si2))**2) / (p * (eps*co+np.sqrt(eps-si2))**3) +# part_three = (2*a*d*( (np.sqrt(eps)+1)**2)/(3*(1-np.sqrt(eps))**3*np.sqrt(eps)) + (np.sqrt(eps)+1)/(3*(1-np.sqrt(eps))**2*np.sqrt(eps)) * f**c * np.log(f) * (eps*co-np.sqrt(eps-si2))**2) / ((1-d*f**c)**3*(np.sqrt(eps-si2)+eps*co)**2) +# p1 = part_one-part_two+part_three + +# part_four = (-a*co-np.sqrt(eps-si2)) / (2*(1-d*f**c)**2 * np.sqrt(eps-si2) * (np.sqrt(eps-si2)+co)**2) +# part_five = (2*d*((np.sqrt(eps)+1)**2 / (3*(1-np.sqrt(eps))**3*np.sqrt(eps)) + (np.sqrt(eps)+1)/(3*(1-np.sqrt(eps))**2*np.sqrt(eps))) *f**c * np.log(f) * (a*co-np.sqrt(eps-si2))) / ((1-d*f**c)**3 * (np.sqrt(eps-si2)+co)) +# part_six = 1/ (2*(1-d*f**c)**2 * np.sqrt(eps-si2) * (np.sqrt(eps*si2)+co)) +# p2 = part_four + part_five - part_six + +# der_mv = p1 + p2 +# # pdb.set_trace() +# # part_one = (a*( ((co-1/2*np.sqrt(eps-si2))/(eps*co+np.sqrt(eps-si2))) - (1/(2*np.sqrt(eps-si2)*(co+np.sqrt(eps-si2)))) - ((1/2*np.sqrt(eps-si2)+co)*(eps*co-np.sqrt(eps-si2))/(eps*co+np.sqrt(eps-si2))**2) )) / p +# # part_two = (2*a*d*((((np.sqrt(eps)+1)**2)/(3*(1-np.sqrt(eps))**3 * np.sqrt(eps))) + (np.sqrt(eps)+1)/(3*(1-np.sqrt(eps))**2 * np.sqrt(eps))) * f**c * np.log(f) * (v+h)) / (1-d*f**c)**3 + +# # der_mv = part_one + part_two +# der_coef = -2*l*(k-m)*np.exp(-2*l-coef) + +# return ( +# veg + soil, +# [der_mv, der_coef] +# ) + + +def ssrt_jac_vwc(mv, vwc, s, omega, b, theta): + """""" + + # omega = 0.027 + freq = 5.405 + k = 2.*np.pi / f2lam(freq) + # s = 0.0115 + + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + + ks = k * s + + mu = np.cos(np.deg2rad(theta)) + sin = np.sin(1.5*np.deg2rad(theta)) + a = 0.11 * mv**0.7 * mu**2.2 + bb = 1 - np.exp(-0.32 * ks**1.8) + q = 0.095 * (0.13 + sin)**1.4 * (1-np.exp(-1.3 * ks**0.9)) + sigma_soil = a * bb / q + + tau = np.exp(-b * vwc / mu)**2 + + # soil2 = tau * sigma_soil + # veg = omega * mu / 2 * (1 - tau) + # pdb.set_trace() + # Sense + models = {'surface': 'Oh04', 'canopy': 'turbid_isotropic'} + can = 'turbid_isotropic' + ke = b * vwc + if np.nanmean(theta) > 5.: + theta = np.deg2rad(theta) + + # soil + soil = Soil(mv=mv, s=s, f=freq, clay=clay, sand=sand, bulk=bulk) + + # canopy + can = OneLayer(canopy=can, ke_h=ke, ke_v=ke, d=1., ks_h = omega * ke, ks_v = omega*ke) + + S = model.RTModel(surface=soil, canopy=can, models=models, theta=theta, freq=freq) + S.sigma0() + S.__dict__['stot']['vv'[::-1]], S.__dict__['stot']['vh'[::-1]] + + s0g = S.__dict__['s0g']['vv'] + s0c = S.__dict__['s0c']['vv'] + s0cgt = S.__dict__['s0cgt']['vv'] + s0gcg = S.__dict__['s0gcg']['vv'] + stot = S.__dict__['stot']['vv'] + + eps = Dobson85(clay=clay, sand=sand, bulk=bulk, mv=mv, freq=freq).eps + v = Reflectivity(eps,theta).v + + sec = 1/np.cos(theta) + + der_omega1 = (1/2 * np.cos(theta) * (1-tau)) + der_omega2 = (4 * b * vwc * tau * v) + der_omega3 = 1/2 * np.cos(theta) * v * (np.sqrt(tau)-tau) + der_b1 = -2 * s0g * vwc * sec * tau + der_vwc1 = -2 * s0g * b * sec * tau + der_b2 = omega * vwc * tau + der_vwc2 = omega * b * tau + der_b3 = -4 * omega * vwc * v * tau * (2 * vwc * b * sec - 1) + der_vwc3 = -4 * omega * b * v * tau * (2 * vwc * b * sec - 1) + der_b4 = - omega * vwc * v * tau**2 * (tau -2 ) + der_vwc4 = - omega * b * v * tau**2 * (tau -2 ) + + der_omega = der_omega1+der_omega2+der_omega3 + der_b = der_b1+der_b2+der_b3+der_b4 + der_vwc = der_vwc1+der_vwc2+der_vwc3+der_vwc4 + + der_s = S.G.rt_s.der_s_vv + der_mv = S.G.rt_s.der_mv_vv + + return ( + stot , + [der_omega, der_s, der_mv, der_vwc, der_b] + ) + + # return ( + # stot , + # [der_omega, der_s, der_mv, der_vwc] + # ) + +def ssrt_vwc(mv, vwc, s, omega, b, theta): + """""" + + # omega = 0.027 + freq = 5.405 + k = 2.*np.pi / f2lam(freq) + # s = 0.0115 + + clay = 0.0738 + sand = 0.2408 + bulk = 1.45 + + ks = k * s + + mu = np.cos(np.deg2rad(theta)) + sin = np.sin(1.5*np.deg2rad(theta)) + a = 0.11 * mv**0.7 * mu**2.2 + bb = 1 - np.exp(-0.32 * ks**1.8) + q = 0.095 * (0.13 + sin)**1.4 * (1-np.exp(-1.3 * ks**0.9)) + sigma_soil = a * bb / q + + tau = np.exp(-b * vwc / mu)**2 + + # soil2 = tau * sigma_soil + # veg = omega * mu / 2 * (1 - tau) + # pdb.set_trace() + # Sense + models = {'surface': 'Oh04', 'canopy': 'turbid_isotropic'} + can = 'turbid_isotropic' + ke = b * vwc + if np.nanmean(theta) > 5.: + theta = np.deg2rad(theta) + + # soil + soil = Soil(mv=mv, s=s, f=freq, clay=clay, sand=sand, bulk=bulk) + + # canopy + can = OneLayer(canopy=can, ke_h=ke, ke_v=ke, d=1., ks_h = omega * ke, ks_v = omega*ke) + + S = model.RTModel(surface=soil, canopy=can, models=models, theta=theta, freq=freq) + S.sigma0() + S.__dict__['stot']['vv'[::-1]], S.__dict__['stot']['vh'[::-1]] + + s0g = S.__dict__['s0g']['vv'] + s0c = S.__dict__['s0c']['vv'] + s0cgt = S.__dict__['s0cgt']['vv'] + s0gcg = S.__dict__['s0gcg']['vv'] + stot = S.__dict__['stot']['vv'] + + eps = Dobson85(clay=clay, sand=sand, bulk=bulk, mv=mv, freq=freq).eps + v = Reflectivity(eps,theta).v + + sec = 1/np.cos(theta) + + der_omega1 = (1/2 * np.cos(theta) * (1-tau)) + der_omega2 = (4 * b * vwc * tau * v) + der_omega3 = 1/2 * np.cos(theta) * v * (np.sqrt(tau)-tau) + der_b1 = -2 * s0g * vwc * sec * tau + der_vwc1 = -2 * s0g * b * sec * tau + der_b2 = omega * vwc * tau + der_vwc2 = omega * b * tau + der_b3 = -4 * omega * vwc * v * tau * (2 * vwc * b * sec - 1) + der_vwc3 = -4 * omega * b * v * tau * (2 * vwc * b * sec - 1) + der_b4 = - omega * vwc * v * tau**2 * (tau -2 ) + der_vwc4 = - omega * b * v * tau**2 * (tau -2 ) + + der_omega = der_omega1+der_omega2+der_omega3 + der_b = der_b1+der_b2+der_b3+der_b4 + der_vwc = der_vwc1+der_vwc2+der_vwc3+der_vwc4 + + der_s = S.G.rt_s.der_s_vv + der_mv = S.G.rt_s.der_mv_vv + + # return ( + # stot , + # [der_s, der_omega, der_mv, der_vwc, der_b] + # ) + + return ( + stot ,s0g, s0c + ) + + + +# def fwd_model_(x, svh, svv, theta): +# """Running the model forward to predict backscatter""" +# n_obs = len(svv) +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh = x[:6] +# alpha = x[6 : (6 + n_obs)] +# R = x[(6 + n_obs):(6 + 2*n_obs)] +# lai = x[(6 + 2*n_obs) :] +# sigma_vv, dvv = wcm_jac_(A_vv, lai, B_vv, lai, C_vv, R, alpha, theta=theta) +# sigma_vh, dvh = wcm_jac_(A_vh, lai, B_vh, lai, C_vh, R, alpha, theta=theta) +# return sigma_vv, sigma_vh + +# def cost_obs_(x, svh, svv, theta, unc=0.5): +# """Cost function. Order of parameters is +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh, +# vsm_0, ..., vsm_N, +# LAI_0, ..., LAI_N +# We assume that len(svh) == N +# Uncertainty is the uncertainty in backscatter, and +# assume that there are two polarisations (VV and VH), +# although these are just labels! +# """ +# n_obs = svh.shape[0] +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh = x[:6] +# alpha = x[6 : (6 + n_obs)] +# R = x[(6 + n_obs):(6 + 2*n_obs)] +# lai = x[(6 + 2*n_obs) :] +# sigma_vv, dvv = wcm_jac_(A_vv, lai, B_vv, lai, C_vv, R, alpha, theta=theta) +# sigma_vh, dvh = wcm_jac_(A_vh, lai, B_vh, lai, C_vh, R, alpha, theta=theta) +# diff_vv = svv - sigma_vv +# diff_vh = svh - sigma_vh +# #NOTE!!!!! Only fits the VV channel!!!! +# # Soil misture in VH is complicated +# diff_vh = 0. +# cost = 0.5 * (diff_vv ** 2 + diff_vh ** 2) / (unc ** 2) +# jac = np.concatenate( +# [##[der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] +# np.array( +# [ +# np.sum(dvv[0] * diff_vv), # A_vv +# np.sum(dvv[1] * diff_vv), # B_vv +# np.sum(dvv[2] * diff_vv), # C_vv +# np.sum(dvh[0] * diff_vh), # A_vh +# np.sum(dvh[1] * diff_vh), # B_vh +# np.sum(dvh[2] * diff_vh), +# ] +# ), # C_vh +# dvv[3] * diff_vv + dvh[3] * diff_vh, # R +# dvv[4] * diff_vv + dvh[4] * diff_vh, # alpha +# (dvv[5] + dvv[6]) * diff_vv + (dvh[5] + dvh[6]) * diff_vh, # LAI +# ] +# ) + +# return np.nansum(cost), -jac / (unc ** 2) + + +def cost_obs_vwc(x, svh, svv, theta, unc=0.5, data=0): + """Cost function. Order of parameters is + s, omega, + b_0, ..., b_N, + vwc_0, ..., vwc_N + mv_0, ..., mv_N + We assume that len(svh) == N + Uncertainty is the uncertainty in backscatter, and + assume that there are two polarisations (VV and VH), + although these are just labels! + """ + n_obs = svh.shape[0] + omega = x[0] + s = x[1] + mv = x[(2) : (2 + n_obs)] + vwc = x[(2 + n_obs) : (2 + 2*n_obs)] + b = x[(2 + 2*n_obs) : (2 + 3*n_obs)] + + sigma_vv, dvv = ssrt_jac_vwc(mv, vwc, s, omega, b, theta=theta) + # sigma_vh, dvh = ssrt_jac_vwc(A_vh, lai, B_vh, lai, C_vh, R, alpha, theta=theta) + diff_vv = svv - sigma_vv + ### in dB ??? + diff_vv = 10*np.log10(svv) - 10*np.log10(sigma_vv) + # diff_vh = svh - sigma_vh + #NOTE!!!!! Only fits the VV channel!!!! + # Soil misture in VH is complicated + diff_vh = 0. + # cost = 0.5 * (diff_vv ** 2 + diff_vh ** 2) / (unc ** 2) + cost = (diff_vv ** 2 + diff_vh ** 2) / (unc ** 2) + + jac = np.concatenate( + [##[der_omega, der_s, der_mv, der_vwc, der_b] + np.array([np.sum(dvv[0] * diff_vv)]), # omega + np.array([np.sum(dvv[1] * diff_vv)]), # s + dvv[2] * diff_vv, # mv + dvv[3] * diff_vv, # vwc + dvv[4] * diff_vv, # b + + ] + ) + + return np.nansum(cost), -jac / (unc ** 2) + + + +# def cost_obs_ssrt(x, svh, svv, theta, data, unc=0.3): +# """ +# """ +# n_obs = svh.shape[0] +# mv = x[:n_obs] +# coef = x[n_obs:2*n_obs] +# lai = data[1:(2*n_obs)] +# h = data[0] + +# sigma_vv, dvv = ssrt_jac_(mv, coef, lai, h, theta=theta) + +# diff_vv = 10*np.log10(svv) - 10*np.log10(sigma_vv) +# # diff_vv = svv - sigma_vv +# # diff_vv = 10 ** (svv/10) - sigma_vv +# # pdb.set_trace() +# cost = 0.5 * (diff_vv ** 2) / (unc ** 2) + +# jac = np.concatenate( +# ##[der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] +# np.array( +# [ +# (dvv[0] * diff_vv), # mv +# (dvv[1] * diff_vv), # coef +# # (dvv[2] * diff_vv), # lai +# # (dvv[3] * diff_vv), # height +# ] +# ) + +# ) +# # pdb.set_trace() +# return np.nansum(cost), -jac / (unc ** 2) + +# def cost_obs_ssrt_oh92_(x, svh, svv, theta, data, unc=0.3): +# """ +# """ +# n_obs = svh.shape[0] +# mv = x[:n_obs] +# coef = x[n_obs:2*n_obs] +# lai = data[n_obs:(2*n_obs)] +# h = data[:n_obs] + +# sigma_vv, dvv = ssrt_jac_oh92_(mv, coef, lai, h, theta=theta) + +# diff_vv = svv - 10*np.log10(sigma_vv) + +# cost = 0.5 * (diff_vv ** 2) / (unc ** 2) + +# jac = np.concatenate( +# ##[der_dA, der_dB, der_dC, der_dR, der_dalpha, der_dV1, der_dV2] +# np.array( +# [ +# (dvv[0] * diff_vv), # mv +# (dvv[1] * diff_vv), # coef +# # (dvv[2] * diff_vv), # lai +# # (dvv[3] * diff_vv), # height +# ] +# ) + +# ) +# # pdb.set_trace() +# return np.nansum(cost), -jac / (unc ** 2) + + + +# def cost_prior_(x, svh, svv, theta, prior_mean, prior_unc): +# """A Gaussian cost function prior. We assume no correlations +# between parameters, only mean and standard deviation. +# Cost function. Order of parameters is +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh, +# alpha_0, ..., alpha_N, +# ruff_0, ..., ruff_N, +# LAI_0, ..., LAI_N +# We assume that len(svh) == N +# """ +# n_obs = len(svh) +# prior_cost = 0.5 * (prior_mean - x) ** 2 / prior_unc ** 2 +# dprior_cost = -(prior_mean - x) / prior_unc ** 2 +# dprior_cost[:6] = 0.0 +# # Ruff->No prior! +# dprior_cost[(6 + n_obs):(6 + 2*n_obs)] = 0. +# cost0 = prior_cost[6:(6+n_obs)].sum() # alpha cost +# cost1 = prior_cost[(6+2*n_obs):].sum() # LAI cost +# return cost0 + cost1, dprior_cost + + +def cost_prior_vwc(x, svh, svv, theta, prior_mean, prior_unc): + """A Gaussian cost function prior. We assume no correlations + between parameters, only mean and standard deviation. + Cost function. Order of parameters is + s, omega, + mv_0, ..., b_N, + vwc_0, ..., vwc_N + b_0, ..., mv_N + We assume that len(svh) == N + """ + n_obs = len(svh) + + # mean_prior = np.nanmean(prior_mean[(2) : (2 + n_obs)]) + # mean_x = np.nanmean(x[(2) : (2 + n_obs)]) + + # ppp = prior_mean *1. + # pppp = prior_mean *1. + + # ppp[(2) : (2 + n_obs)] = prior_mean[(2) : (2 + n_obs)] - mean_prior + # pppp[(2) : (2 + n_obs)] = x[(2) : (2 + n_obs)] - mean_x + + prior_cost = 0.5 * (prior_mean - x) ** 2 / prior_unc ** 2 + prior_cost = (prior_mean - x) ** 2 / prior_unc ** 2 + + # prior_cost = 0.5 * (ppp - pppp) ** 2 / prior_unc ** 2 + + + + dprior_cost = -(prior_mean - x) / prior_unc ** 2 + + # dprior_cost = -(prior_mean/mean_prior*mean_x - x) / prior_unc ** 2 + + + y = x[1:] - x[:-1] + yy = prior_mean[1:] - prior_mean[:-1] + + prior_cost1 = (y-yy)**2 / prior_unc[:-1]**2 + cost1 = prior_cost1[(2 ) : (2 + n_obs-1)].sum() + + dprior_cost1 = -(y - yy) / prior_unc[:-1] ** 2 + dprior_cost1 = np.append(dprior_cost1,0) + dprior_cost1[0:3] = 0 + dprior_cost1[(2 + n_obs) : (2 + 2*n_obs)] = 0. # vwc + dprior_cost1[(2 + 2*n_obs) : (2 + 3*n_obs)] = 0. # b + + dprior_cost[0] = 0.0 + # Ruff->No prior! + dprior_cost[1] = 0. # rms + # dprior_cost[(2 ) : (2 + n_obs)] = 0. # mv + dprior_cost[(2 + n_obs) : (2 + 2*n_obs)] = 0. # vwc + dprior_cost[(2 + 2*n_obs) : (2 + 3*n_obs)] = 0. # b + cost0 = prior_cost[(2 ) : (2 + n_obs)].sum() # mv cost + # cost1 = prior_cost[(2 + n_obs) : (2 + 2*n_obs)].sum() #vwc cost + + # print(cost0) + return cost0+cost1 , dprior_cost + dprior_cost1 + +# def cost_prior_ssrt(x, svh, svv, theta, prior_mean, prior_unc): +# """A Gaussian cost function prior. We assume no correlations +# between parameters, only mean and standard deviation. +# Cost function. Order of parameters is +# A_vv, B_vv, C_vv, A_vh, B_vh, C_vh, +# alpha_0, ..., alpha_N, +# ruff_0, ..., ruff_N, +# LAI_0, ..., LAI_N +# We assume that len(svh) == N +# """ +# # pdb.set_trace() +# n_obs = len(svh) +# prior_cost = 0.5 * (prior_mean - x) ** 2 / prior_unc ** 2 +# dprior_cost = -(prior_mean - x) / prior_unc ** 2 +# # coef->No prior! +# # dprior_cost[(n_obs):(2*n_obs)] = 0. +# # dprior_cost[:n_obs] = 0. +# cost0 = prior_cost[:(n_obs)].sum() # mv cost +# cost1 = prior_cost[n_obs:2*n_obs].sum() # coef cost +# # cost0=0 + +# return cost0 + cost1, dprior_cost + + +def cost_smooth_(x, gamma): + """A smoother for one parameter (e.g. LAI or whatever). + `gamma` controls the magnitude of the smoothing (higher + `gamma`, more smoothing) + """ + # Calculate differences + p_diff1 = x[1:-1] - x[2:] + p_diff2 = x[1:-1] - x[:-2] + # Cost function + xcost_model = 0.5 * gamma * np.sum(p_diff1 ** 2 + p_diff2 ** 2) + # Jacobian + xdcost_model = 1 * gamma * (p_diff1 + p_diff2) + # Note that we miss the first and last elements of the Jacobian + # They're zero! + return xcost_model, xdcost_model + + +# def cost_function(x, svh, svv, theta, gamma, prior_mean, prior_unc, unc=0.8): +# """A combined cost function that calls the prior, fit to the observations +# """ +# # Fit to the observations +# cost1, dcost1 = cost_obs_(x, svh, svv, theta, unc=unc) +# # Fit to the prior +# cost2, dcost2 = cost_prior_(x, svh, svv, theta, prior_mean, prior_unc) +# # Smooth evolution of LAI +# n_obs = len(svv) +# lai = x[(6 + 2*n_obs) :] +# cost3, dcost3 = cost_smooth_(lai, gamma[1]) +# tmp = np.zeros_like(dcost1) +# tmp[(7 + 2*n_obs) : -1] = dcost3 +# # Smooth evolution of ruffness +# R = x[(6 + n_obs):(6 + 2*n_obs)] +# cost4, dcost4 = cost_smooth_(R, gamma[0]) +# tmp[(7 + n_obs) : (5 + 2*n_obs)] = dcost4 +# # pdb.set_trace() +# return cost1 + cost2 + cost3 + cost4, dcost1 + dcost2 + tmp + +def cost_function_vwc(x, svh, svv, theta, gamma, prior_mean, prior_unc, unc=0.8, data=0): + """A combined cost function that calls the prior, fit to the observations + """ + + # Fit to the observations + cost1, dcost1 = cost_obs_vwc(x, svh, svv, theta, unc=unc, data=data) + # Fit to the prior + cost2, dcost2 = cost_prior_vwc(x, svh, svv, theta, prior_mean, prior_unc) + # print(cost1) + # print(cost2) + # print(x) + # cost2 = 0 + # dcost2=0 + cost3 = 0 + cost4 = 0 + tmp = 0 + ###Smooth evolution of sm + # n_obs = len(svv) + # lai = x[2 : (2 + n_obs)] + # cost3, dcost3 = cost_smooth_(lai, gamma[1]) + # # pdb.set_trace() + # tmp = np.zeros_like(dcost1) + # tmp[3 : (2 + n_obs)-1] = dcost3 + # # Smooth evolution of ruffness + # R = x[(6 + n_obs):(6 + 2*n_obs)] + # cost4, dcost4 = cost_smooth_(R, gamma[0]) + # tmp[(7 + n_obs) : (5 + 2*n_obs)] = dcost4 + # pdb.set_trace() + return cost1 + cost2 + cost3 + cost4, dcost1 + dcost2 + tmp + + + + +# def cost_function2(x, svh, svv, theta, gamma, prior_mean, prior_unc, data, unc=0.3): +# """A combined cost function that calls the prior, fit to the observations +# """ +# # Fit to the observations +# cost1, dcost1 = cost_obs_ssrt(x, svh, svv, theta, data, unc=unc) +# # cost1, dcost1 = cost_obs_ssrt_oh92_(x, svh, svv, theta, data, unc=unc) +# # pdb.set_trace() +# # Fit to the prior +# cost2, dcost2 = cost_prior_ssrt(x, svh, svv, theta, prior_mean, prior_unc) +# # pdb.set_trace() +# # Smooth evolution of LAI +# # n_obs = len(svv) +# # lai = x[2*n_obs:3*n_obs] +# # cost3, dcost3 = cost_smooth_(lai, gamma[1]) +# # tmp = np.zeros_like(dcost1) +# # tmp[2*n_obs+1:-1] = dcost3 +# tmp=0 +# cost3=0 + +# # # Smooth evolution of ruffness +# # R = x[(6 + n_obs):(6 + 2*n_obs)] +# # cost4, dcost4 = cost_smooth_(R, gamma[0]) +# # tmp[(7 + n_obs) : (5 + 2*n_obs)] = dcost4 +# # return cost1 + cost2 + cost3 + cost4, dcost1 + dcost2 + tmp +# # pdb.set_trace() +# return cost1 + cost2 + cost3, dcost1 + dcost2 + tmp diff --git a/kaska/write_tiff_files.py b/kaska/write_tiff_files.py new file mode 100644 index 0000000..7708726 --- /dev/null +++ b/kaska/write_tiff_files.py @@ -0,0 +1,39 @@ +import os +import osr +import gdal +import datetime +import numpy as np +from netCDF4 import Dataset +from scipy.ndimage import label +from utils import reproject_data +from skimage.filters import sobel +from collections import namedtuple +from scipy.optimize import minimize +from scipy.interpolate import interp1d +# from watercloudmodel import cost_function +from watercloudmodel import cost_function2 +from scipy.ndimage.filters import gaussian_filter1d +import pdb + + +lai = '/media/tweiss/Daten/data_AGU/lai.tif' + +g = gdal.Open(lai) +for i in range(g.RasterCount): + gg = g.GetRasterBand(i+1) + meta = gg.GetMetadata() + + pdb.set_trace() + +# def read_s2_lai(s2_lai, s2_cab, s2_cbrown, state_mask): +# s2_data = namedtuple('s2_lai', 'time lai cab cbrown') +# g = gdal.Open(s2_lai) +# time = [] +# for i in range(g.RasterCount): +# gg = g.GetRasterBand(i+1) +# meta = gg.GetMetadata() +# time.append(datetime.datetime.strptime(meta['DoY'], '%Y%j')) +# lai = reproject_data(s2_lai, output_format="MEM", target_img=state_mask) +# cab = reproject_data(s2_cab, output_format="MEM", target_img=state_mask) +# cbrown = reproject_data(s2_cbrown, output_format="MEM", target_img=state_mask) +# return s2_data(time, lai, cab, cbrown) diff --git a/kaska/z_helper.py b/kaska/z_helper.py new file mode 100644 index 0000000..8cd47bf --- /dev/null +++ b/kaska/z_helper.py @@ -0,0 +1,147 @@ + +import pandas as pd +import numpy as np +import scipy.stats +import os +import pdb +### Helper functions for plots### +#------------------------------- + + +# Helper functions for statistical parameters +#-------------------------------------------- +def rmse_prediction(predictions, targets): + """ calculation of RMSE """ + return np.sqrt(np.nanmean((predictions - targets) ** 2)) + +def bias_prediction(predictions, targets): + """ calculation of bias """ + return np.nanmean(predictions - targets) + +def bias_advanced(predictions, targets): + xxx = predictions.values - targets.values + xxx[xxx>1.5]=np.nan + xxx[xxx<(-1.5)]=np.nan + length = int(len(xxx)/2) + + return np.nanmean(xxx[:length]), np.nanmean(xxx[length:]) + +def ubrmse_prediction(rmse,bias): + """ calculation of unbiased RMSE """ + return np.sqrt(rmse ** 2 - bias ** 2) + +def linregress(predictions, targets): + """ Calculate a linear least-squares regression for two sets of measurements """ + + # get rid of NaN values + predictions_new, targets_new = nan_values(predictions, targets) + + # linregress calculation + slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(predictions_new, targets_new) + return slope, intercept, r_value, p_value, std_err + +def nan_values(predictions, targets): + """ get rid of nan values""" + predictions2 = predictions[~np.isnan(predictions)] + targets2 = targets[~np.isnan(predictions)] + predictions3 = predictions2[~np.isnan(targets2)] + targets3 = targets2[~np.isnan(targets2)] + return predictions3, targets3 + +# provide in-situ data +#------------------------------- +def read_mni_data(path, file_name, extension, field, sep=','): + """ read MNI campaign data """ + df = pd.io.parsers.read_csv(os.path.join(path, file_name + extension), header=[0, 1], sep=sep) + df = df.set_index(pd.to_datetime(df[field]['date'])) + df = df.drop(df.filter(like='date'), axis=1) + return df + +def read_agrometeo(path, file_name, extension, sep=';', decimal=','): + """ read agro-meteorological station (hourly data) """ + df = pd.read_csv(os.path.join(path, file_name + extension), sep=sep, decimal=decimal) + + # df['SUM_NN050'] = df['SUM_NN050'].str.replace(',','.') + # df['SUM_NN050'] = df['SUM_NN050'].str.replace('-','0').astype(float) + + # df['date'] = df['Tag'] + ' ' + df['Stunde'] + df['date'] = df['Tag'] + # df = df.set_index(pd.to_datetime(df['date'], format='%d.%m.%Y %H:%S')) + df = df.set_index(pd.to_datetime(df['date'], format='%d.%m.%Y')) + return df + +def filter_relativorbit(data, field, orbit1, orbit2=None, orbit3=None, orbit4=None): + """ data filter for relativ orbits """ + output = data[[(check == orbit1 or check == orbit2 or check == orbit3 or check == orbit4) for check in data[(field,'relativeorbit')]]] + return output + +def read_data(path, file_name, extension, field, path_agro=None, file_name_agro=None, extension_agro=None, pol=None, orbit1=None, orbit2=None, orbit3=None, orbit4=None): + """ return all in-situ data """ + + # Read MNI data + df = read_mni_data(path, file_name, extension, field) + + # Read agro-meteorological station + try: + df_agro = read_agrometeo(path_agro, file_name_agro, extension_agro) + except FileNotFoundError: + df_agro = 0 + + # filter for field + field_data = df.filter(like=field) + + # filter for relativorbit + if orbit1 != None: + field_data_orbit = filter_relativorbit(field_data, field, orbit1, orbit2, orbit3, orbit4) + field_data = field_data_orbit + else: + field_data_orbit = None + + # get rid of NaN values + parameter_nan = 'LAI' + field_data = field_data[~np.isnan(field_data.filter(like=parameter_nan).values)] + + # available auxiliary data + theta_field = np.deg2rad(field_data.filter(like='theta')) + # theta_field[:] = 45 + sm_field = field_data.filter(like='SM') + height_field = field_data.filter(like='Height')/100 + lai_field = field_data.filter(like='LAI') + vwc_field = field_data.filter(like='VWC') + pol_field = field_data.filter(like='sigma_sentinel_'+pol) + vv_field = field_data.filter(like='sigma_sentinel_vv') + vh_field = field_data.filter(like='sigma_sentinel_vh') + relativeorbit = field_data.filter(like='relativeorbit') + vwcpro_field = field_data.filter(like='watercontentpro') + return df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field, vv_field, vh_field, relativeorbit, vwcpro_field + +# Hanning smoother +#--------------------------------------------------------- +def smooth(x,window_len=11,window='hanning'): + if x.ndim != 1: + raise ValueError #, "smooth only accepts 1 dimension arrays." + if x.size < window_len: + raise ValueError #, "Input vector needs to be bigger than window size." + if window_len<3: + return x + if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']: + raise ValueError #, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'" + s=np.r_[2*x[0]-x[window_len-1::-1],x,2*x[-1]-x[-1:-window_len:-1]] + if window == 'flat': #moving average + w=np.ones(window_len,'d') + else: + w=eval('np.'+window+'(window_len)') + y=np.convolve(w/w.sum(),s,mode='same') + return y[window_len:-window_len+1] + + +# Paper 3 +# calculation per pixel + +def calc_pix(x, axis=0): + value_mean = np.nanmean(x,axis=axis) + value_std = np.nanstd(x,axis=axis) + value_var = scipy.stats.variation(x,axis=axis) + + return value_mean, value_std, value_var + diff --git a/setup.py b/setup.py index e986caf..b31d8e8 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ author_email = "j.gomez-dans@ucl.ac.uk", license = "GNU General Public License v3", # packages = find_packages(include=['kaska']), - packages = ['kaska'], # , 'kaska.TwoNN', 'kaska.NNParameterInversion'], + packages = ['kaska' , 'kaska.TwoNN', 'kaska.NNParameterInversion', 'kaska.inverters'], scripts= ['scripts/run_kaska'], zip_safe=False ) diff --git a/test_output.py b/test_output.py new file mode 100644 index 0000000..9bc5616 --- /dev/null +++ b/test_output.py @@ -0,0 +1,1287 @@ +from osgeo import gdal +import os +import pdb +import numpy as np +import matplotlib.pyplot as plt +import pandas as pd +import datetime +import scipy.stats +import matplotlib.dates as dates + +def rmse_prediction(predictions, targets): + """ calculation of RMSE """ + return np.sqrt(np.nanmean((predictions - targets) ** 2)) + +def linregress(predictions, targets): + """ Calculate a linear least-squares regression for two sets of measurements """ + slope, intercept, r_value, p_value, std_err = scipy.stats.linregress(predictions, targets) + return slope, intercept, r_value, p_value, std_err + +def get_dataset(dataset,mask_301,mask_319,mask_508,mask_515,mask_542): + stack_date = [] + stack_data = [] + stack_301 = [] + stack_319 = [] + stack_508 = [] + stack_515 = [] + stack_542 = [] + for x in range(1, dataset.RasterCount + 1): + band = dataset.GetRasterBand(x) + array = band.ReadAsArray() + try: + stack_date.append(datetime.datetime.strptime(band.GetMetadata()['date'], '%Y-%m-%d')) + except: + pass + stack_301.append(np.nanmean(array[mask_301>0])) + stack_319.append(np.nanmean(array[mask_319>0])) + stack_508.append(np.nanmean(array[mask_508>0])) + stack_515.append(np.nanmean(array[mask_515>0])) + stack_542.append(np.nanmean(array[mask_542>0])) + return stack_date, stack_301, stack_319, stack_508, stack_515, stack_542 + +def read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro): + # Read MNI data + df = read_mni_data(path, file_name, extension, field) + + # Read agro-meteorological station + # df_agro = read_agrometeo(path_agro, file_name_agro, extension_agro) + df_agro = 0 + + # filter for field + field_data = df.filter(like=field) + + # filter for relativorbit + field_data_orbit = filter_relativorbit(field_data, field, 95, 168) + # field_data = field_data_orbit + + # get rid of NaN values + parameter_nan = 'LAI' + field_data = field_data[~np.isnan(field_data.filter(like=parameter_nan).values)] + + # available auxiliary data + theta_field = np.deg2rad(field_data.filter(like='theta')) + # theta_field[:] = 45 + sm_field = field_data.filter(like='SM') + height_field = field_data.filter(like='Height')/100 + lai_field = field_data.filter(like='LAI') + vwc_field = field_data.filter(like='VWC') + pol_field = field_data.filter(like='sigma_sentinel_'+pol) + return df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field + +def filter_relativorbit(data, field, orbit1, orbit2=None, orbit3=None, orbit4=None): + """ data filter for relativ orbits """ + output = data[[(check == orbit1 or check == orbit2 or check == orbit3 or check == orbit4) for check in data[(field,'relativeorbit')]]] + return output + +def read_mni_data(path, file_name, extention, field, sep=';'): + """ read MNI campaign data """ + df = pd.io.parsers.read_csv(os.path.join(path, file_name + extension), header=[0, 1], sep=sep) + df = df.set_index(pd.to_datetime(df[field]['date'])) + df = df.drop(df.filter(like='date'), axis=1) + return df + +### mask for fields + +# field names +fields = ['301', '508', '542', '319', '515'] +# fields = ['508'] +# ESU names +esus = ['high', 'low', 'med', 'mean'] +esus = ['high', 'low', 'med', 'mean'] +esus = ['high'] + +# Save output path +save_path = '/media/tweiss/Work/z_final_mni_data_2017' + +#------------------------------------------------------------------------------ +pixel = ['_Field_buffer_30','','_buffer_30','_buffer_50','_buffer_100'] +# pixel = ['_Field_buffer_30'] +# pixel = ['_buffer_30'] +pixel = ['_Field_buffer_30','_buffer_50','_buffer_100'] + +# processed_sentinel = ['multi','norm_multi'] +# processed_sentinel = ['mulit'] + + +path = '/media/tweiss/Daten/new_data' +file_name = 'multi10' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +file_name_agro = 'Eichenried_01012017_31122017_hourly' +extension_agro = '.csv' + +# df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + + +plt.rcParams["figure.figsize"] = (12,10) + + + +file_sm = 'MNI_2017_sar_sm.tif' +file_vv = 'MNI_2017_vv.tif' +file_vh = 'MNI_2017_vh.tif' +file_lai = 'MNI_2017_sar_lai.tif' +file_sr = 'MNI_2017_sar_sr.tif' +file_sm_prior = 'sm_prior.tif' +file_sm_std = 'sm_std.tif' +file_lai_prior = 'lai.tif' + + + + +pol = 'vv' + +for pixels in pixel: + print(pixels) + path_ESU = '/media/tweiss/Work/z_final_mni_data_2017/' + name_shp = 'ESU'+pixels+'.shp' + name_ESU = 'ESU'+pixels+'.tif' + + path = '/media/tweiss/Daten/data_AGU/'+pixels + datapath = '/media/tweiss/Daten/data_AGU' + dataset_sm = gdal.Open(os.path.join(path,file_sm)) + + dataset_sm_prior = gdal.Open(os.path.join(datapath,file_sm_prior)) + band1 = dataset_sm_prior.GetRasterBand(1) + mask = band1.ReadAsArray() + + df_output = pd.DataFrame(columns=pd.MultiIndex(levels=[[],[]], codes=[[],[]])) + + + for esu in esus: + for field in fields: + g = gdal.Open(os.path.join(path_ESU, name_ESU)) + state_mask = g.ReadAsArray().astype(np.int) + + if pixels == '_Field_buffer_30': + if field == '515': + mask_value = 4 + state_mask = state_mask==mask_value + mask_515 = state_mask + elif field == '508': + mask_value = 27 + state_mask = state_mask==mask_value + mask_508 = state_mask + elif field == '542': + mask_value = 8 + state_mask = state_mask==mask_value + mask_542 = state_mask + elif field == '319': + mask_value = 67 + state_mask = state_mask==mask_value + mask_319 = state_mask + elif field == '301': + mask_value = 87 + state_mask = state_mask==mask_value + mask_301 = state_mask + else: + if field == '515' and esu == 'high': + mask_value = 1 + state_mask = state_mask==mask_value + mask_515 = state_mask + elif field == '515' and esu == 'med': + mask_value = 2 + state_mask = state_mask==mask_value + mask_515 = state_mask + elif field == '515' and esu == 'low': + mask_value = 3 + state_mask = state_mask==mask_value + mask_515 = state_mask + elif field == '508' and esu == 'high': + mask_value = 4 + state_mask = state_mask==mask_value + mask_508 = state_mask + elif field == '508' and esu == 'med': + mask_value = 5 + state_mask = state_mask==mask_value + mask_508 = state_mask + elif field == '508' and esu == 'low': + mask_value = 6 + state_mask = state_mask==mask_value + mask_508 = state_mask + elif field == '542' and esu == 'high': + mask_value = 7 + state_mask = state_mask==mask_value + mask_542 = state_mask + elif field == '542' and esu == 'med': + mask_value = 8 + state_mask = state_mask==mask_value + mask_542 = state_mask + elif field == '542' and esu == 'low': + mask_value = 9 + state_mask = state_mask==mask_value + mask_542 = state_mask + elif field == '319' and esu == 'high': + mask_value = 10 + state_mask = state_mask==mask_value + mask_319 = state_mask + elif field == '319' and esu == 'med': + mask_value = 11 + state_mask = state_mask==mask_value + mask_319 = state_mask + elif field == '319' and esu == 'low': + mask_value = 12 + state_mask = state_mask==mask_value + mask_319 = state_mask + elif field == '301' and esu == 'high': + mask_value = 13 + state_mask = state_mask==mask_value + mask_301 = state_mask + elif field == '301' and esu == 'med': + mask_value = 14 + # state_mask = state_mask==mask_value + mask_301 = state_mask + elif field == '301' and esu == 'low': + mask_value = 15 + state_mask = state_mask==mask_value + mask_301 = state_mask + elif field == '515' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==1) | (state_mask==2) | (state_mask==3))) + state_mask = m.mask + mask_515 = state_mask + elif field == '508' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==4) | (state_mask==5) | (state_mask==6))) + state_mask = m.mask + mask_508 = state_mask + elif field == '542' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==7) | (state_mask==8) | (state_mask==9))) + state_mask = m.mask + mask_542 = state_mask + elif field == '319' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==10) | (state_mask==11) | (state_mask==12))) + state_mask = m.mask + mask_319 = state_mask + elif field == '301' and esu == 'mean': + m = np.ma.array(state_mask,mask=((state_mask==13) | (state_mask==14) | (state_mask==15))) + state_mask = m.mask + mask_301 = state_mask + + sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + + dataset_lai = gdal.Open(os.path.join(path,file_lai)) + lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + + dataset_sr = gdal.Open(os.path.join(path,file_sr)) + sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + + dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) + lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + + # dataset_vv = gdal.Open(os.path.join(path,file_vv)) + # vv_date, vv_301, vv_319, vv_508, vv_515, vv_542 = get_dataset(dataset_vv,mask_301,mask_319,mask_508,mask_515,mask_542) + + # dataset_vh = gdal.Open(os.path.join(path,file_vh)) + # vh_date, vh_301, vh_319, vh_508, vh_515, vh_542 = get_dataset(dataset_vh,mask_301,mask_319,mask_508,mask_515,mask_542) + + sm_prior_date, sm_prior_301, sm_prior_319, sm_prior_508, sm_prior_515, sm_prior_542 = get_dataset(dataset_sm_prior,mask,mask,mask,mask,mask) + + dataset_sm_std = gdal.Open(os.path.join(datapath,file_sm_std)) + sm_std_date, sm_std_301, sm_std_319, sm_std_508, sm_std_515, sm_std_542 = get_dataset(dataset_sm_std,mask,mask,mask,mask,mask) + + pathx = '/media/tweiss/Daten/new_data' + field = '508_high' + + df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(pathx, file_name, extension, field, path_agro, file_name_agro, extension_agro) + + start = datetime.datetime.combine(sm_field.index.date[0], datetime.datetime.min.time()) + end = datetime.datetime.combine(sm_field.index.date[len(sm_field.index)-1], datetime.datetime.min.time()) + + start2 = np.argwhere(np.array(sm_date)==start)[0][0] + end2 = np.argwhere(np.array(sm_date)==end)[0][0] + + rmse = rmse_prediction(sm_508[start2:end2],sm_field['508_high']['SM']) + rmse_prior = rmse_prediction(sm_prior_508[start2:end2],sm_field['508_high']['SM']) + slope, intercept, r_value, p_value, std_err = linregress(sm_508[start2:end2],sm_field['508_high']['SM']) + slope_p, intercept_p, r_value_p, p_value_p, std_err_p = linregress(sm_prior_508[start2:end2],sm_field['508_high']['SM']) + + # plt.title('SM 508, RMSE [Vol/%]:'+str(rmse*100)[0:4]+' R2:'+str(r_value)[0:4]+' RMSE Prior,insitu:'+str(rmse_prior*100)[0:4]) + + + if esu == 'high' or esu == 'mean': + plt.plot(sm_date, sm_508, label='SM retrieved point; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Red') + +plt.plot(sm_field,label='SM field measurement',color='Black', linewidth=3) +plt.plot(sm_prior_date, sm_prior_508, label='SM prior; RMSE [Vol%]: '+str(rmse_prior*100)[:4]+'; $R^2$: '+str(r_value_p)[:4]) + +dstart = datetime.datetime(2017,3,25) +dend = datetime.datetime(2017,7,16) +plt.tick_params(labelsize=12) +plt.xlim(dstart, dend) +plt.legend(prop={'size': 14}) +plt.ylim(0.0,0.7) +plt.grid() +plt.ylabel('Soil Moisture [$m^3/m^3$]', fontsize=15) +plt.xlabel('Date', fontsize=15) +plt.savefig(datapath+'/sm_5082_p2.png') +plt.close() +pdb.set_trace() + + + +# path = '/media/tweiss/Daten/new_data' +# file_name = 'multi10' # theta needs to be changed to for norm multi +# extension = '.csv' + +# path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +# file_name_agro = 'Eichenried_01012017_31122017_hourly' +# extension_agro = '.csv' + +# field = '508_high' +# field_plot = ['508_high', '508_low', '508_med'] +# pol = 'vv' +# pol = 'vh' + +# # output path +# plot_output_path = '/media/tweiss/Daten/plots/paper/' + +# df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + + +# plt.rcParams["figure.figsize"] = (12,10) + +# path = '/media/tweiss/Daten/data_AGU/ucl' + +# datapath = '/media/tweiss/Daten/data_AGU' + +# file_sm = 'MNI_2017_sar_sm.tif' +# file_vv = 'MNI_2017_vv.tif' +# file_vh = 'MNI_2017_vh.tif' +# file_lai = 'MNI_2017_sar_lai.tif' +# file_sr = 'MNI_2017_sar_sr.tif' +# file_sm_prior = 'sm_prior.tif' +# file_sm_std = 'sm_std.tif' +# file_lai_prior = 'lai.tif' + +# dataset_sm = gdal.Open(os.path.join(path,file_sm)) +# band1 = dataset_sm.GetRasterBand(1) + + +# sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + +# dataset_lai = gdal.Open(os.path.join(path,file_lai)) +# lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + +# dataset_sr = gdal.Open(os.path.join(path,file_sr)) +# sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + +# dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) +# lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + +# # dataset_vv = gdal.Open(os.path.join(path,file_vv)) +# # vv_date, vv_301, vv_319, vv_508, vv_515, vv_542 = get_dataset(dataset_vv,mask_301,mask_319,mask_508,mask_515,mask_542) + +# # dataset_vh = gdal.Open(os.path.join(path,file_vh)) +# # vh_date, vh_301, vh_319, vh_508, vh_515, vh_542 = get_dataset(dataset_vh,mask_301,mask_319,mask_508,mask_515,mask_542) + + + +# dataset_sm_prior = gdal.Open(os.path.join(datapath,file_sm_prior)) +# band1 = dataset_sm_prior.GetRasterBand(1) +# mask = band1.ReadAsArray() + + +# sm_prior_date, sm_prior_301, sm_prior_319, sm_prior_508, sm_prior_515, sm_prior_542 = get_dataset(dataset_sm_prior,mask,mask,mask,mask,mask) + +# dataset_sm_std = gdal.Open(os.path.join(datapath,file_sm_std)) +# sm_std_date, sm_std_301, sm_std_319, sm_std_508, sm_std_515, sm_std_542 = get_dataset(dataset_sm_std,mask,mask,mask,mask,mask) + +# path = '/media/tweiss/Daten/new_data' +# field = '508_high' + +# df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + +# start = datetime.datetime.combine(sm_field.index.date[0], datetime.datetime.min.time()) +# end = datetime.datetime.combine(sm_field.index.date[len(sm_field.index)-1], datetime.datetime.min.time()) + +# start2 = np.argwhere(np.array(sm_date)==start)[0][0] +# end2 = np.argwhere(np.array(sm_date)==end)[0][0] + +# rmse = rmse_prediction(sm_508[start2:end2],sm_field['508_high']['SM']) +# rmse_prior = rmse_prediction(sm_prior_508[start2:end2],sm_field['508_high']['SM']) +# slope, intercept, r_value, p_value, std_err = linregress(sm_508[start2:end2],sm_field['508_high']['SM']) +# slope_p, intercept_p, r_value_p, p_value_p, std_err_p = linregress(sm_prior_508[start2:end2],sm_field['508_high']['SM']) + +# # plt.title('SM 508, RMSE [Vol/%]:'+str(rmse*100)[0:4]+' R2:'+str(r_value)[0:4]+' RMSE Prior,insitu:'+str(rmse_prior*100)[0:4]) +# plt.plot(sm_field,label='SM field measurement',color='Black', linewidth=3) +# plt.plot(sm_prior_date, sm_prior_508, label='SM prior; RMSE [Vol%]: '+str(rmse_prior*100)[:4]+'; $R^2$: '+str(r_value_p)[:4]) +# plt.plot(sm_date, sm_508, label='SM retrieved point; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Red') + + + + + +# path = '/media/tweiss/Daten/data_AGU/ucl' + +# datapath = '/media/tweiss/Daten/data_AGU' +# file_sm = 'MNI_2017_sar_sm.tif' + +# dataset_sm = gdal.Open(os.path.join(path,file_sm)) +# band1 = dataset_sm.GetRasterBand(1) +# mask = band1.ReadAsArray() + + +# #field +# field_301 = 0.21249178 +# field_319 = 0.20654242 +# field_508 = 0.23555766 +# field_515 = 0.21090584 +# field_542 = 0.21022798 + + +# # 30m +# field_301 = 0.21076469 +# field_319 = 0.2052274 +# field_508 = 0.20654558 +# field_515 = 0.21090584 +# field_542 = 0.21518409 + + +# mask_301 = band1.ReadAsArray() +# mask_301[mask_301!=field_301] = 0 + +# mask_319 = band1.ReadAsArray() +# mask_319[mask_319!=field_319] = 0 +# mask_508 = band1.ReadAsArray() +# mask_508[mask_508!=field_508] = 0 +# mask_515 = band1.ReadAsArray() +# mask_515[mask_515!=field_515] = 0 +# mask_542 = band1.ReadAsArray() +# mask_542[mask_542!=field_542] = 0 + + +sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai = gdal.Open(os.path.join(path,file_lai)) +lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_sr = gdal.Open(os.path.join(path,file_sr)) +sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) +lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + +rmse = rmse_prediction(sm_508[start2:end2],sm_field['508_high']['SM']) +rmse_prior = rmse_prediction(sm_prior_508[start2:end2],sm_field['508_high']['SM']) +slope, intercept, r_value, p_value, std_err = linregress(sm_508[start2:end2],sm_field['508_high']['SM']) + + +plt.plot(sm_date, sm_508, label='SM retrieved 30m; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Orange') + + + +# path = '/media/tweiss/Daten/data_AGU/ucl' + +# datapath = '/media/tweiss/Daten/data_AGU' +# file_sm = 'MNI_2017_sar_sm.tif' + +# dataset_sm = gdal.Open(os.path.join(path,file_sm)) +# band1 = dataset_sm.GetRasterBand(1) +# # mask = band1.ReadAsArray() + + +# #field +# field_301 = 0.21249178 +# field_319 = 0.20654242 +# field_508 = 0.23555766 +# field_515 = 0.21090584 +# field_542 = 0.21022798 + + + +# mask_301 = band1.ReadAsArray() +# mask_301[mask_301!=field_301] = 0 + +# mask_319 = band1.ReadAsArray() +# mask_319[mask_319!=field_319] = 0 +# mask_508 = band1.ReadAsArray() +# mask_508[mask_508!=field_508] = 0 +# mask_515 = band1.ReadAsArray() +# mask_515[mask_515!=field_515] = 0 +# mask_542 = band1.ReadAsArray() +# mask_542[mask_542!=field_542] = 0 + + +sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai = gdal.Open(os.path.join(path,file_lai)) +lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_sr = gdal.Open(os.path.join(path,file_sr)) +sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) +lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + +rmse = rmse_prediction(sm_508[start2:end2],sm_field['508_high']['SM']) +rmse_prior = rmse_prediction(sm_prior_508[start2:end2],sm_field['508_high']['SM']) +slope, intercept, r_value, p_value, std_err = linregress(sm_508[start2:end2],sm_field['508_high']['SM']) + +plt.plot(sm_date, sm_508, label='SM retrieved field; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Green') + + + + + + + + + + + + +dstart = datetime.datetime(2017,3,25) +dend = datetime.datetime(2017,7,16) +plt.tick_params(labelsize=12) +plt.xlim(dstart, dend) +plt.legend(prop={'size': 14}) +plt.ylim(0.1,0.4) +plt.grid() +plt.ylabel('Soil Moisture [$m^3/m^3$]', fontsize=15) +plt.xlabel('Date', fontsize=15) +plt.savefig(datapath+'/sm_5082_p2.png') +plt.close() +pdb.set_trace() + + + + + + + + + + + + +### soil moisture time + + +path = '/media/tweiss/Work/Jose/new_backscatter/field' + +file_sm = 'MNI_2017_sar_sm.tif' + +dataset_sm = gdal.Open(os.path.join(path,file_sm)) + +for x in range(1, dataset_sm.RasterCount + 1): + band = dataset_sm.GetRasterBand(x) + array = band.ReadAsArray() + datum = datetime.datetime.strptime(band.GetMetadata()['date'], '%Y-%m-%d') + plt.imshow(array, cmap='Blues', vmin=0.0, vmax=0.4) + #legend + cbar = plt.colorbar() + cbar.set_label('Soil Moisture [$m^3/m^3$]', rotation=270, labelpad=20) + plt.text(450,40,datum.date()) + plt.savefig(path+'/gif/sm_'+str(datum.date())+'.png') + plt.close() + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +path = '/media/tweiss/Daten/new_data' +file_name = 'multi10' # theta needs to be changed to for norm multi +extension = '.csv' + +path_agro = '/media/nas_data/2017_MNI_campaign/field_data/meteodata/agrarmeteorological_station' +file_name_agro = 'Eichenried_01012017_31122017_hourly' +extension_agro = '.csv' + +field = '508_high' +field_plot = ['508_high', '508_low', '508_med'] +pol = 'vv' +pol = 'vh' + +# output path +plot_output_path = '/media/tweiss/Daten/plots/paper/' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + + +plt.rcParams["figure.figsize"] = (12,10) + +path = '/home/tweiss/Desktop/LRZ Sync+Share/Jose/new_backscatter/point' + +datapath = '/media/tweiss/Daten/test_kaska/data' +file_sm = 'MNI_2017_sar_sm.tif' + +dataset_sm = gdal.Open(os.path.join(path,file_sm)) +band1 = dataset_sm.GetRasterBand(1) +mask = band1.ReadAsArray() + + +#field +field_301 = 0.21249178 +field_319 = 0.20654242 +field_508 = 0.23555766 +field_515 = 0.21090584 +field_542 = 0.21022798 + + +# 30m +field_301 = 0.21076469 +field_319 = 0.2052274 +field_508 = 0.20654558 +field_515 = 0.21090584 +field_542 = 0.21518409 + +# 1m +field_301 = 0.21167806 +field_319 = 0.20519826 +field_508 = 0.22907102 +field_515 = 0.2043577 +field_542 = 0.21640626 + + +mask_301 = band1.ReadAsArray() +mask_301[mask_301!=field_301] = 0 +mask_319 = band1.ReadAsArray() +mask_319[mask_319!=field_319] = 0 +mask_508 = band1.ReadAsArray() +mask_508[mask_508!=field_508] = 0 +mask_515 = band1.ReadAsArray() +mask_515[mask_515!=field_515] = 0 +mask_542 = band1.ReadAsArray() +mask_542[mask_542!=field_542] = 0 + + +file_sm = 'MNI_2017_sar_sm.tif' +file_vv = 'MNI_2017_vv.tif' +file_vh = 'MNI_2017_vh.tif' +file_lai = 'MNI_2017_sar_lai.tif' +file_sr = 'MNI_2017_sar_sr.tif' +file_sm_prior = 'sm_prior.tif' +file_sm_std = 'sm_std.tif' +file_lai_prior = 'lai.tif' + + +sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai = gdal.Open(os.path.join(path,file_lai)) +lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_sr = gdal.Open(os.path.join(path,file_sr)) +sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) +lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + +# dataset_vv = gdal.Open(os.path.join(path,file_vv)) +# vv_date, vv_301, vv_319, vv_508, vv_515, vv_542 = get_dataset(dataset_vv,mask_301,mask_319,mask_508,mask_515,mask_542) + +# dataset_vh = gdal.Open(os.path.join(path,file_vh)) +# vh_date, vh_301, vh_319, vh_508, vh_515, vh_542 = get_dataset(dataset_vh,mask_301,mask_319,mask_508,mask_515,mask_542) + + + +dataset_sm_prior = gdal.Open(os.path.join(datapath,file_sm_prior)) +band1 = dataset_sm_prior.GetRasterBand(1) +mask = band1.ReadAsArray() + + +sm_prior_date, sm_prior_301, sm_prior_319, sm_prior_508, sm_prior_515, sm_prior_542 = get_dataset(dataset_sm_prior,mask,mask,mask,mask,mask) + +dataset_sm_std = gdal.Open(os.path.join(datapath,file_sm_std)) +sm_std_date, sm_std_301, sm_std_319, sm_std_508, sm_std_515, sm_std_542 = get_dataset(dataset_sm_std,mask,mask,mask,mask,mask) + +path = '/media/tweiss/Daten/new_data' +field = '508_high' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + +start = datetime.datetime.combine(sm_field.index.date[0], datetime.datetime.min.time()) +end = datetime.datetime.combine(sm_field.index.date[len(sm_field.index)-1], datetime.datetime.min.time()) + +start2 = np.argwhere(np.array(sm_date)==start)[0][0] +end2 = np.argwhere(np.array(sm_date)==end)[0][0] + +rmse = rmse_prediction(sm_508[start2:end2],sm_field['508_high']['SM']) +rmse_prior = rmse_prediction(sm_prior_508[start2:end2],sm_field['508_high']['SM']) +slope, intercept, r_value, p_value, std_err = linregress(sm_508[start2:end2],sm_field['508_high']['SM']) +slope_p, intercept_p, r_value_p, p_value_p, std_err_p = linregress(sm_prior_508[start2:end2],sm_field['508_high']['SM']) + +# plt.title('SM 508, RMSE [Vol/%]:'+str(rmse*100)[0:4]+' R2:'+str(r_value)[0:4]+' RMSE Prior,insitu:'+str(rmse_prior*100)[0:4]) +plt.plot(sm_field,label='SM field measurement',color='Black', linewidth=3) +plt.plot(sm_prior_date, sm_prior_508, label='SM prior; RMSE [Vol%]: '+str(rmse_prior*100)[:4]+'; $R^2$: '+str(r_value_p)[:4]) +# plt.plot(sm_date, sm_508, label='SM retrieved point; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Red') + + + + + +path = '/home/tweiss/Desktop/LRZ Sync+Share/Jose/new_backscatter/30m' + +datapath = '/media/tweiss/Daten/test_kaska/data' +file_sm = 'MNI_2017_sar_sm.tif' + +dataset_sm = gdal.Open(os.path.join(path,file_sm)) +band1 = dataset_sm.GetRasterBand(1) +mask = band1.ReadAsArray() + + +#field +field_301 = 0.21249178 +field_319 = 0.20654242 +field_508 = 0.23555766 +field_515 = 0.21090584 +field_542 = 0.21022798 + + +# 30m +field_301 = 0.21076469 +field_319 = 0.2052274 +field_508 = 0.20654558 +field_515 = 0.21090584 +field_542 = 0.21518409 + + +mask_301 = band1.ReadAsArray() +mask_301[mask_301!=field_301] = 0 + +mask_319 = band1.ReadAsArray() +mask_319[mask_319!=field_319] = 0 +mask_508 = band1.ReadAsArray() +mask_508[mask_508!=field_508] = 0 +mask_515 = band1.ReadAsArray() +mask_515[mask_515!=field_515] = 0 +mask_542 = band1.ReadAsArray() +mask_542[mask_542!=field_542] = 0 + + +sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai = gdal.Open(os.path.join(path,file_lai)) +lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_sr = gdal.Open(os.path.join(path,file_sr)) +sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) +lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + +rmse = rmse_prediction(sm_508[start2:end2],sm_field['508_high']['SM']) +rmse_prior = rmse_prediction(sm_prior_508[start2:end2],sm_field['508_high']['SM']) +slope, intercept, r_value, p_value, std_err = linregress(sm_508[start2:end2],sm_field['508_high']['SM']) + + +plt.plot(sm_date, sm_508, label='SM retrieved 30m; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Orange') + + + +path = '/home/tweiss/Desktop/LRZ Sync+Share/Jose/new_backscatter/field' + +datapath = '/media/tweiss/Daten/test_kaska/data' +file_sm = 'MNI_2017_sar_sm.tif' + +dataset_sm = gdal.Open(os.path.join(path,file_sm)) +band1 = dataset_sm.GetRasterBand(1) +mask = band1.ReadAsArray() + + +#field +field_301 = 0.21249178 +field_319 = 0.20654242 +field_508 = 0.23555766 +field_515 = 0.21090584 +field_542 = 0.21022798 + + + +mask_301 = band1.ReadAsArray() +mask_301[mask_301!=field_301] = 0 + +mask_319 = band1.ReadAsArray() +mask_319[mask_319!=field_319] = 0 +mask_508 = band1.ReadAsArray() +mask_508[mask_508!=field_508] = 0 +mask_515 = band1.ReadAsArray() +mask_515[mask_515!=field_515] = 0 +mask_542 = band1.ReadAsArray() +mask_542[mask_542!=field_542] = 0 + + +sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai = gdal.Open(os.path.join(path,file_lai)) +lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_sr = gdal.Open(os.path.join(path,file_sr)) +sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) +lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + +rmse = rmse_prediction(sm_508[start2:end2],sm_field['508_high']['SM']) +rmse_prior = rmse_prediction(sm_prior_508[start2:end2],sm_field['508_high']['SM']) +slope, intercept, r_value, p_value, std_err = linregress(sm_508[start2:end2],sm_field['508_high']['SM']) + +plt.plot(sm_date, sm_508, label='SM retrieved field; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Green') + + + + + + + + + + + + +dstart = datetime.datetime(2017,3,25) +dend = datetime.datetime(2017,7,16) +plt.tick_params(labelsize=12) +plt.xlim(dstart, dend) +plt.legend(prop={'size': 14}) +plt.ylim(0.1,0.4) +plt.grid() +plt.ylabel('Soil Moisture [$m^3/m^3$]', fontsize=15) +plt.xlabel('Date', fontsize=15) +plt.savefig(datapath+'/sm_5082_p2.png') +plt.close() + + +path = '/home/tweiss/Desktop/LRZ Sync+Share/Jose/new_backscatter/point' + +datapath = '/media/tweiss/Daten/test_kaska/data' +file_sm = 'MNI_2017_sar_sm.tif' + +dataset_sm = gdal.Open(os.path.join(path,file_sm)) +band1 = dataset_sm.GetRasterBand(1) +mask = band1.ReadAsArray() + + +#field +field_301 = 0.21249178 +field_319 = 0.20654242 +field_508 = 0.23555766 +field_515 = 0.21090584 +field_542 = 0.21022798 + + +# 30m +field_301 = 0.21076469 +field_319 = 0.2052274 +field_508 = 0.20654558 +field_515 = 0.21090584 +field_542 = 0.21518409 + +# 1m +field_301 = 0.21167806 +field_319 = 0.20519826 +field_508 = 0.22907102 +field_515 = 0.2043577 +field_542 = 0.21640626 + + +mask_301 = band1.ReadAsArray() +mask_301[mask_301!=field_301] = 0 +mask_319 = band1.ReadAsArray() +mask_319[mask_319!=field_319] = 0 +mask_508 = band1.ReadAsArray() +mask_508[mask_508!=field_508] = 0 +mask_515 = band1.ReadAsArray() +mask_515[mask_515!=field_515] = 0 +mask_542 = band1.ReadAsArray() +mask_542[mask_542!=field_542] = 0 + + +file_sm = 'MNI_2017_sar_sm.tif' +file_vv = 'MNI_2017_vv.tif' +file_vh = 'MNI_2017_vh.tif' +file_lai = 'MNI_2017_sar_lai.tif' +file_sr = 'MNI_2017_sar_sr.tif' +file_sm_prior = 'sm_prior.tif' +file_sm_std = 'sm_std.tif' +file_lai_prior = 'lai.tif' + + +sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai = gdal.Open(os.path.join(path,file_lai)) +lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_sr = gdal.Open(os.path.join(path,file_sr)) +sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) +lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + +# dataset_vv = gdal.Open(os.path.join(path,file_vv)) +# vv_date, vv_301, vv_319, vv_508, vv_515, vv_542 = get_dataset(dataset_vv,mask_301,mask_319,mask_508,mask_515,mask_542) + +# dataset_vh = gdal.Open(os.path.join(path,file_vh)) +# vh_date, vh_301, vh_319, vh_508, vh_515, vh_542 = get_dataset(dataset_vh,mask_301,mask_319,mask_508,mask_515,mask_542) + + + +dataset_sm_prior = gdal.Open(os.path.join(datapath,file_sm_prior)) +band1 = dataset_sm_prior.GetRasterBand(1) +mask = band1.ReadAsArray() + + +sm_prior_date, sm_prior_301, sm_prior_319, sm_prior_508, sm_prior_515, sm_prior_542 = get_dataset(dataset_sm_prior,mask,mask,mask,mask,mask) + +dataset_sm_std = gdal.Open(os.path.join(datapath,file_sm_std)) +sm_std_date, sm_std_301, sm_std_319, sm_std_508, sm_std_515, sm_std_542 = get_dataset(dataset_sm_std,mask,mask,mask,mask,mask) + + + + + +path = '/media/tweiss/Daten/new_data' +field = '301_high' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + +start = datetime.datetime.combine(sm_field.index.date[0], datetime.datetime.min.time()) +end = datetime.datetime.combine(sm_field.index.date[len(sm_field.index)-1], datetime.datetime.min.time()) + +start2 = np.argwhere(np.array(sm_date)==start)[0][0] +end2 = np.argwhere(np.array(sm_date)==end)[0][0] + +sm_301[start2:end2] + +rmse = rmse_prediction(sm_301[start2:end2],sm_field['301_high']['SM']) +rmse_prior = rmse_prediction(sm_prior_301[start2:end2],sm_field['301_high']['SM']) +slope, intercept, r_value, p_value, std_err = linregress(sm_301[start2:end2],sm_field['301_high']['SM']) +slope_p, intercept_p, r_value_p, p_value_p, std_err_p = linregress(sm_prior_301[start2:end2],sm_field['301_high']['SM']) + +# plt.title('SM 301, RMSE [Vol/%]: '+str(rmse*100)[0:4]+' R2: '+str(r_value)[0:4]+' RMSE Prior,insitu:'+str(rmse_prior*100)[0:4]) +plt.plot(sm_field,label='SM field measurement', color='Black', linewidth=3) +plt.plot(sm_prior_date, sm_prior_301, label='SM prior; RMSE [Vol%]: '+str(rmse_prior*100)[:4]+'; $R^2$: '+str(r_value_p)[:4]) +# plt.plot(sm_date, sm_301, label='SM retrieved point; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Red') + + + + +path = '/home/tweiss/Desktop/LRZ Sync+Share/Jose/new_backscatter/30m' + +datapath = '/media/tweiss/Daten/test_kaska/data' +file_sm = 'MNI_2017_sar_sm.tif' + +dataset_sm = gdal.Open(os.path.join(path,file_sm)) +band1 = dataset_sm.GetRasterBand(1) +mask = band1.ReadAsArray() + + +#field +field_301 = 0.21249178 +field_319 = 0.20654242 +field_508 = 0.23555766 +field_515 = 0.21090584 +field_542 = 0.21022798 + + +# 30m +field_301 = 0.21076469 +field_319 = 0.2052274 +field_508 = 0.20654558 +field_515 = 0.21090584 +field_542 = 0.21518409 + + +mask_301 = band1.ReadAsArray() +mask_301[mask_301!=field_301] = 0 + +mask_319 = band1.ReadAsArray() +mask_319[mask_319!=field_319] = 0 +mask_508 = band1.ReadAsArray() +mask_508[mask_508!=field_508] = 0 +mask_515 = band1.ReadAsArray() +mask_515[mask_515!=field_515] = 0 +mask_542 = band1.ReadAsArray() +mask_542[mask_542!=field_542] = 0 + + +sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai = gdal.Open(os.path.join(path,file_lai)) +lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_sr = gdal.Open(os.path.join(path,file_sr)) +sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) +lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + + +rmse = rmse_prediction(sm_301[start2:end2],sm_field['301_high']['SM']) +rmse_prior = rmse_prediction(sm_prior_301[start2:end2],sm_field['301_high']['SM']) +slope, intercept, r_value, p_value, std_err = linregress(sm_301[start2:end2],sm_field['301_high']['SM']) + +plt.plot(sm_date, sm_301, label='SM retrieved 30m; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Orange') + + +path = '/home/tweiss/Desktop/LRZ Sync+Share/Jose/new_backscatter/field' + +datapath = '/media/tweiss/Daten/test_kaska/data' +file_sm = 'MNI_2017_sar_sm.tif' + +dataset_sm = gdal.Open(os.path.join(path,file_sm)) +band1 = dataset_sm.GetRasterBand(1) +mask = band1.ReadAsArray() + + +#field +field_301 = 0.21249178 +field_319 = 0.20654242 +field_508 = 0.23555766 +field_515 = 0.21090584 +field_542 = 0.21022798 + + + +mask_301 = band1.ReadAsArray() +mask_301[mask_301!=field_301] = 0 + +mask_319 = band1.ReadAsArray() +mask_319[mask_319!=field_319] = 0 +mask_508 = band1.ReadAsArray() +mask_508[mask_508!=field_508] = 0 +mask_515 = band1.ReadAsArray() +mask_515[mask_515!=field_515] = 0 +mask_542 = band1.ReadAsArray() +mask_542[mask_542!=field_542] = 0 + + +sm_date, sm_301, sm_319, sm_508, sm_515, sm_542 = get_dataset(dataset_sm,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai = gdal.Open(os.path.join(path,file_lai)) +lai_date, lai_301, lai_319, lai_508, lai_515, lai_542 = get_dataset(dataset_lai,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_sr = gdal.Open(os.path.join(path,file_sr)) +sr_date, sr_301, sr_319, sr_508, sr_515, sr_542 = get_dataset(dataset_sr,mask_301,mask_319,mask_508,mask_515,mask_542) + +dataset_lai_prior = gdal.Open(os.path.join(datapath,file_lai_prior)) +lai_prior_date, lai_prior_301, lai_prior_319, lai_prior_508, lai_prior_515, lai_prior_542 = get_dataset(dataset_lai_prior,mask_301,mask_319,mask_508,mask_515,mask_542) + +rmse = rmse_prediction(sm_301[start2:end2],sm_field['301_high']['SM']) +rmse_prior = rmse_prediction(sm_prior_301[start2:end2],sm_field['301_high']['SM']) +slope, intercept, r_value, p_value, std_err = linregress(sm_301[start2:end2],sm_field['301_high']['SM']) + + +plt.plot(sm_date, sm_301, label='SM retrieved field; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Green') + +dstart = datetime.datetime(2017,3,25) +dend = datetime.datetime(2017,7,16) +plt.tick_params(labelsize=12) +plt.xlim(dstart, dend) +plt.legend(prop={'size': 14}) +plt.ylim(0.1,0.3) +plt.grid() +plt.ylabel('Soil Moisture [$m^3/m^3$]', fontsize=15) +plt.xlabel('Date', fontsize=15) +plt.savefig(datapath+'/sm_301_field2.png') +plt.close() + + + + + + + + + + + + +path = '/media/tweiss/Daten/new_data' +field = '515_med' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + +start1 = datetime.datetime.combine(sm_prior_date[0], datetime.datetime.min.time()) +end1 = datetime.datetime.combine(sm_prior_date[len(sm_prior_date)-1], datetime.datetime.min.time()) +start = datetime.datetime.combine(sm_field.index.date[0], datetime.datetime.min.time()) +end = datetime.datetime.combine(sm_field.index.date[len(sm_field.index)-1], datetime.datetime.min.time()) + +start2 = np.argwhere(np.array(sm_date)==start)[0][0] +end2 = np.argwhere(np.array(sm_date)==end)[0][0] + +sm_515[start2:end2] + +rmse = rmse_prediction(sm_515[start2:end2],sm_field['515_med']['SM'][:-1]) +rmse_prior = rmse_prediction(sm_prior_515[start2:end2],sm_field['515_med']['SM'][:-1]) +slope, intercept, r_value, p_value, std_err = linregress(sm_515[start2:end2],sm_field['515_med']['SM'][:-1]) +slope_p, intercept_p, r_value_p, p_value_p, std_err_p = linregress(sm_prior_515[start2:end2],sm_field['515_med']['SM'][:-1]) + +# plt.title('SM 515, RMSE [Vol/%]:'+str(rmse*100)[0:4]+' R2:'+str(r_value)[0:4]+' RMSE Prior,insitu:'+str(rmse_prior*100)[0:4]) +plt.plot(sm_field,label='SM field measurement') +plt.plot(sm_prior_date, sm_prior_515, label='SM prior; RMSE [Vol%]: '+str(rmse_prior*100)[:4]+'; $R^2$: '+str(r_value_p)[:4]) +plt.plot(sm_date, sm_515, label='SM retrieved field; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Green') +plt.legend(prop={'size': 14}) +plt.grid() + + +plt.xlim(start1, end1) +plt.ylim(0.1, 0.35) +plt.ylabel('Soil Moisture [$m^3/m^3$]', fontsize=15) +plt.xlabel('Date', fontsize=15) +plt.tick_params(labelsize=15) +# plt.gca().xaxis.set_major_locator(dates.DayLocator()) +plt.gca().xaxis.set_major_formatter(dates.DateFormatter('%Y-%m-%d')) +plt.tight_layout() +plt.savefig(datapath+'/sm_515_field.png',dpi=600) +plt.close() + + +path = '/media/tweiss/Daten/new_data' +field = '508_high' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + +start1 = datetime.datetime.combine(sm_prior_date[0], datetime.datetime.min.time()) +end1 = datetime.datetime.combine(sm_prior_date[len(sm_prior_date)-1], datetime.datetime.min.time()) +start = datetime.datetime.combine(sm_field.index.date[0], datetime.datetime.min.time()) +end = datetime.datetime.combine(sm_field.index.date[len(sm_field.index)-1], datetime.datetime.min.time()) + +start2 = np.argwhere(np.array(sm_date)==start)[0][0] +end2 = np.argwhere(np.array(sm_date)==end)[0][0] + +sm_508[start2:end2] + +rmse = rmse_prediction(sm_508[start2:end2],sm_field['508_high']['SM'][:]) +rmse_prior = rmse_prediction(sm_prior_508[start2:end2],sm_field['508_high']['SM'][:]) +slope, intercept, r_value, p_value, std_err = linregress(sm_508[start2:end2],sm_field['508_high']['SM'][:]) +slope_p, intercept_p, r_value_p, p_value_p, std_err_p = linregress(sm_prior_508[start2:end2],sm_field['508_high']['SM'][:]) + +# plt.title('SM 508, RMSE [Vol/%]:'+str(rmse*100)[0:4]+' R2:'+str(r_value)[0:4]+' RMSE Prior,insitu:'+str(rmse_prior*100)[0:4]) +plt.plot(sm_field,label='SM field measurement') +plt.plot(sm_prior_date, sm_prior_508, label='SM prior; RMSE [Vol%]: '+str(rmse_prior*100)[:4]+'; $R^2$: '+str(r_value_p)[:4]) +plt.plot(sm_date, sm_508, label='SM retrieved field; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Green') +plt.legend(prop={'size': 14}) +plt.grid() + +plt.xlim(start1, end1) +plt.ylim(0.1, 0.35) +plt.ylabel('Soil Moisture [$m^3/m^3$]', fontsize=15) +plt.xlabel('Date', fontsize=15) +plt.tick_params(labelsize=15) +plt.gca().xaxis.set_major_formatter(dates.DateFormatter('%Y-%m-%d')) +plt.tight_layout() +plt.savefig(datapath+'/sm_508_field.png',dpi=600) +plt.close() + + +path = '/media/tweiss/Daten/new_data' +field = '542_high' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + +start1 = datetime.datetime.combine(sm_prior_date[0], datetime.datetime.min.time()) +end1 = datetime.datetime.combine(sm_prior_date[len(sm_prior_date)-1], datetime.datetime.min.time()) +start = datetime.datetime.combine(sm_field.index.date[0], datetime.datetime.min.time()) +end = datetime.datetime.combine(sm_field.index.date[len(sm_field.index)-1], datetime.datetime.min.time()) +start2 = np.argwhere(np.array(sm_date)==start)[0][0] +end2 = np.argwhere(np.array(sm_date)==end)[0][0] + +sm_542[start2:end2] + +rmse = rmse_prediction(sm_542[start2:end2],sm_field['542_high']['SM'][:]) +rmse_prior = rmse_prediction(sm_prior_542[start2:end2],sm_field['542_high']['SM'][:]) +slope, intercept, r_value, p_value, std_err = linregress(sm_542[start2:end2],sm_field['542_high']['SM'][:]) +slope_p, intercept_p, r_value_p, p_value_p, std_err_p = linregress(sm_prior_542[start2:end2],sm_field['542_high']['SM'][:]) + +# plt.title('SM 542, RMSE [Vol/%]:'+str(rmse*100)[0:4]+' R2:'+str(r_value)[0:4]+' RMSE Prior,insitu:'+str(rmse_prior*100)[0:4]) +plt.plot(sm_field,label='SM field measurement') +plt.plot(sm_prior_date, sm_prior_542, label='SM prior; RMSE [Vol%]: '+str(rmse_prior*100)[:4]+'; $R^2$: '+str(r_value_p)[:4]) +plt.plot(sm_date, sm_542, label='SM retrieved field; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Green') +plt.legend(prop={'size': 14}) +plt.grid() + +plt.xlim(start1, end1) +plt.ylim(0.1, 0.35) +plt.ylabel('Soil Moisture [$m^3/m^3$]', fontsize=15) +plt.xlabel('Date', fontsize=15) +plt.tick_params(labelsize=15) +plt.gca().xaxis.set_major_formatter(dates.DateFormatter('%Y-%m-%d')) +plt.tight_layout() +plt.savefig(datapath+'/sm_542_field.png',dpi=600) +plt.close() + +path = '/media/tweiss/Daten/new_data' +field = '301_high' + +df, df_agro, field_data, field_data_orbit, theta_field, sm_field, height_field, lai_field, vwc_field, pol_field = read_data(path, file_name, extension, field, path_agro, file_name_agro, extension_agro) + +start1 = datetime.datetime.combine(sm_prior_date[0], datetime.datetime.min.time()) +end1 = datetime.datetime.combine(sm_prior_date[len(sm_prior_date)-1], datetime.datetime.min.time()) +start = datetime.datetime.combine(sm_field.index.date[0], datetime.datetime.min.time()) +end = datetime.datetime.combine(sm_field.index.date[len(sm_field.index)-1], datetime.datetime.min.time()) +start2 = np.argwhere(np.array(sm_date)==start)[0][0] +end2 = np.argwhere(np.array(sm_date)==end)[0][0] + +sm_301[start2:end2] + +rmse = rmse_prediction(sm_301[start2:end2],sm_field['301_high']['SM'][:]) +rmse_prior = rmse_prediction(sm_prior_301[start2:end2],sm_field['301_high']['SM'][:]) +slope, intercept, r_value, p_value, std_err = linregress(sm_301[start2:end2],sm_field['301_high']['SM'][:]) +slope_p, intercept_p, r_value_p, p_value_p, std_err_p = linregress(sm_prior_301[start2:end2],sm_field['301_high']['SM'][:]) + +# plt.title('SM 301, RMSE [Vol/%]:'+str(rmse*100)[0:4]+' R2:'+str(r_value)[0:4]+' RMSE Prior,insitu:'+str(rmse_prior*100)[0:4]) +plt.plot(sm_field,label='SM field measurement') +plt.plot(sm_prior_date, sm_prior_301, label='SM prior; RMSE [Vol%]: '+str(rmse_prior*100)[:4]+'; $R^2$: '+str(r_value_p)[:4]) +plt.plot(sm_date, sm_301, label='SM retrieved field; RMSE [Vol%]: '+str(rmse*100)[:4]+'; $R^2$: '+str(r_value)[:4], linewidth=2, color='Green') +plt.legend(prop={'size': 14}) +plt.grid() + +plt.xlim(start1, end1) +plt.ylim(0.1, 0.35) +plt.ylabel('Soil Moisture [$m^3/m^3$]', fontsize=15) +plt.xlabel('Date', fontsize=15) +plt.tick_params(labelsize=15) +plt.gca().xaxis.set_major_formatter(dates.DateFormatter('%Y-%m-%d')) +plt.tight_layout() +plt.savefig(datapath+'/sm_301_field.png',dpi=600) +plt.close()